mirror of
https://github.com/paperclipai/paperclip
synced 2026-05-07 15:42:06 +02:00
Compare commits
1099 Commits
fix/github
...
paperclip-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c8b5b852e | ||
|
|
4d870aec37 | ||
|
|
90647f81ff | ||
|
|
f24ca164e4 | ||
|
|
f025c313be | ||
|
|
d5afd4eacb | ||
|
|
b8725c52ef | ||
|
|
5f45712846 | ||
|
|
d4c3899ca4 | ||
|
|
7463479fc8 | ||
|
|
3fa5d25de1 | ||
|
|
c1a02497b0 | ||
|
|
390502736c | ||
|
|
0d87fd9a11 | ||
|
|
6059c665d5 | ||
|
|
f460f744ef | ||
|
|
32a9165ddf | ||
|
|
50cd76d8a3 | ||
|
|
f6ce976544 | ||
|
|
b816809a1e | ||
|
|
d0a8d4e08a | ||
|
|
213bcd8c7a | ||
|
|
7f893ac4ec | ||
|
|
e89076148a | ||
|
|
6e6f538630 | ||
|
|
5d1ed71779 | ||
|
|
76fe736e8e | ||
|
|
d6b06788f6 | ||
|
|
6844226572 | ||
|
|
0cb42f49ea | ||
|
|
e59047187b | ||
|
|
1729e41179 | ||
|
|
11de5ae9c9 | ||
|
|
8e82ac7e38 | ||
|
|
be82a912b2 | ||
|
|
ab5eeca94e | ||
|
|
2172476e84 | ||
|
|
c1bb938519 | ||
|
|
b649bd454f | ||
|
|
a692e37f3e | ||
|
|
96637a1e09 | ||
|
|
a5aed931ab | ||
|
|
a63e847525 | ||
|
|
a7dc88941b | ||
|
|
b6115424b1 | ||
|
|
1f78e55072 | ||
|
|
fcab770518 | ||
|
|
2d8f97feb0 | ||
|
|
03a2cf5c8a | ||
|
|
a77206812e | ||
|
|
6208899d0a | ||
|
|
2a84e53c1b | ||
|
|
e1bf9d66a7 | ||
|
|
b48be80d5d | ||
|
|
45ebecab5a | ||
|
|
dae888cc5d | ||
|
|
aaf42f3a7e | ||
|
|
62d05a7ae2 | ||
|
|
1cd0281b4d | ||
|
|
65480ffab1 | ||
|
|
dc94e3d1df | ||
|
|
0162bb332c | ||
|
|
7ec8716159 | ||
|
|
8cb70d897d | ||
|
|
8bdf4081ee | ||
|
|
958c11699e | ||
|
|
c566a9236c | ||
|
|
dab95740be | ||
|
|
548721248e | ||
|
|
f4a05dc35c | ||
|
|
b00d52c5b6 | ||
|
|
ac664df8e4 | ||
|
|
4477ca2a7e | ||
|
|
724893ad5b | ||
|
|
7c42345177 | ||
|
|
0e87fdbe35 | ||
|
|
4077ccd343 | ||
|
|
44d94d0add | ||
|
|
6d63a4df45 | ||
|
|
3cee1f12da | ||
|
|
03dff1a29a | ||
|
|
5d021583be | ||
|
|
b4a58ba8a6 | ||
|
|
da251e5eab | ||
|
|
264eb34f24 | ||
|
|
0191fabdc6 | ||
|
|
b578bf1f51 | ||
|
|
781d9dcf74 | ||
|
|
c7bf2661c9 | ||
|
|
d607ca0089 | ||
|
|
61ed4ef90c | ||
|
|
ce3bc329fc | ||
|
|
11c3eee66b | ||
|
|
0ed3f56935 | ||
|
|
1ac1dbcb3e | ||
|
|
fe21ab324b | ||
|
|
327eadb45c | ||
|
|
996c7eb727 | ||
|
|
9e8cd28f81 | ||
|
|
a4b05d8831 | ||
|
|
de1cd5858d | ||
|
|
efc1e336b0 | ||
|
|
d82468d6e5 | ||
|
|
2ebbad6561 | ||
|
|
f44c951a22 | ||
|
|
e15b5412ec | ||
|
|
ba5cb34bed | ||
|
|
cbc237311f | ||
|
|
1079f21ac4 | ||
|
|
15b0f11275 | ||
|
|
59d913d04b | ||
|
|
e21e442033 | ||
|
|
296033620f | ||
|
|
fad5634b29 | ||
|
|
69ff793c6a | ||
|
|
ede3206423 | ||
|
|
2cf2a44d68 | ||
|
|
58ae23aa2c | ||
|
|
d3e66c789e | ||
|
|
097f30b138 | ||
|
|
c5ccafbb80 | ||
|
|
28a28d1cb6 | ||
|
|
7edd2f7946 | ||
|
|
1a82646e9d | ||
|
|
f75c0c317c | ||
|
|
d0920da459 | ||
|
|
c830c64727 | ||
|
|
2ec2b1f1eb | ||
|
|
9159b44fcc | ||
|
|
f5a87ab14e | ||
|
|
185195201a | ||
|
|
1e4ccb2b1f | ||
|
|
038dd2bb82 | ||
|
|
bac5afa647 | ||
|
|
db1279dc12 | ||
|
|
30dd2b78e5 | ||
|
|
ee82a4f243 | ||
|
|
1cbb0a5e34 | ||
|
|
93355bae6b | ||
|
|
c6779b570f | ||
|
|
27ec1e0c8b | ||
|
|
26d4cabb2e | ||
|
|
3baebee2df | ||
|
|
8894520ed0 | ||
|
|
ec75cabcd8 | ||
|
|
9eaf72ab31 | ||
|
|
844b061267 | ||
|
|
5640d29ab0 | ||
|
|
1de5fb9316 | ||
|
|
372421ef0b | ||
|
|
8e88577371 | ||
|
|
b1e9215375 | ||
|
|
0d270655ab | ||
|
|
5758aba91e | ||
|
|
482dac7097 | ||
|
|
0937f07c79 | ||
|
|
d00860b12a | ||
|
|
4e20279305 | ||
|
|
4bd62471f7 | ||
|
|
56ee63bfd0 | ||
|
|
87db949d3f | ||
|
|
735c591bad | ||
|
|
46892ded18 | ||
|
|
9f9a8cfa25 | ||
|
|
3264f9c1f6 | ||
|
|
642188f900 | ||
|
|
b7a7dacfa3 | ||
|
|
b1e457365b | ||
|
|
fcbae62baf | ||
|
|
cc44d309c0 | ||
|
|
316790ea0a | ||
|
|
8367c5f406 | ||
|
|
0ff262ca0f | ||
|
|
fa3cbc7fdb | ||
|
|
e3804f792d | ||
|
|
3cfbc350a0 | ||
|
|
667d5a7384 | ||
|
|
950ea065ae | ||
|
|
b5e177df7e | ||
|
|
81b96c6021 | ||
|
|
fe96a2f976 | ||
|
|
92f142f7f8 | ||
|
|
34589ad457 | ||
|
|
7dd3661467 | ||
|
|
e09dfb1a2c | ||
|
|
a57f6f48b4 | ||
|
|
29514606bb | ||
|
|
627fbc80ac | ||
|
|
2a372fbe8a | ||
|
|
d8a7342686 | ||
|
|
3574a3bf49 | ||
|
|
f94fe57d10 | ||
|
|
94652c6079 | ||
|
|
9131cc0355 | ||
|
|
f7410673fe | ||
|
|
4a75d05969 | ||
|
|
8ada49f31b | ||
|
|
f593e116c1 | ||
|
|
3fea60c04c | ||
|
|
73abe4c76e | ||
|
|
9cfa37fce3 | ||
|
|
943b851a5e | ||
|
|
f2a2049d17 | ||
|
|
54f93c1f27 | ||
|
|
f55a5e557d | ||
|
|
50a36beec5 | ||
|
|
f559455d92 | ||
|
|
5ae335c42f | ||
|
|
a13ac0d56f | ||
|
|
b0b85e6ba3 | ||
|
|
cb705c9856 | ||
|
|
bce58d353d | ||
|
|
a0333f3e9d | ||
|
|
25d308186d | ||
|
|
0e80e60665 | ||
|
|
0a5ac9affd | ||
|
|
ff333d6828 | ||
|
|
e7fe02c02f | ||
|
|
71d93c79a5 | ||
|
|
cb6e615186 | ||
|
|
be518529b7 | ||
|
|
2e31fb7c91 | ||
|
|
b3e0c31239 | ||
|
|
4b39b0cc14 | ||
|
|
e10baee84c | ||
|
|
3cd9a54d94 | ||
|
|
6e894f27a2 | ||
|
|
93c7493054 | ||
|
|
391afa627f | ||
|
|
47b025c146 | ||
|
|
8b7dafd218 | ||
|
|
700b41f7e1 | ||
|
|
7e78ce0d7e | ||
|
|
aa18aeb1e9 | ||
|
|
b6fe9ebcbc | ||
|
|
53ffa50638 | ||
|
|
ebd45b62cd | ||
|
|
26ebe3b002 | ||
|
|
60744d8a91 | ||
|
|
3a0e71b080 | ||
|
|
1c1d006c5e | ||
|
|
07987d75ad | ||
|
|
aec88f10dd | ||
|
|
45f18d1bee | ||
|
|
2329a33f32 | ||
|
|
74481b1d1e | ||
|
|
cae7cda463 | ||
|
|
2c2e13eac2 | ||
|
|
502d60b2a8 | ||
|
|
f3e5c55f45 | ||
|
|
448e9f2be3 | ||
|
|
48704c6586 | ||
|
|
e2962e6528 | ||
|
|
3e0ab97b12 | ||
|
|
bb980bfb33 | ||
|
|
1e4d252661 | ||
|
|
ac473820a3 | ||
|
|
2c8cb7f519 | ||
|
|
51414be269 | ||
|
|
1de1393413 | ||
|
|
669e5c87cc | ||
|
|
9a150eee65 | ||
|
|
a3ecc086d9 | ||
|
|
85ca675311 | ||
|
|
622a8e44bf | ||
|
|
d71ff903e4 | ||
|
|
492e49e1c0 | ||
|
|
f1bb175584 | ||
|
|
4b654fc81e | ||
|
|
5136381d8f | ||
|
|
0edac73a68 | ||
|
|
b3b9d99519 | ||
|
|
c414790404 | ||
|
|
1b55474a9b | ||
|
|
bf3fba36f2 | ||
|
|
dc842ff7ea | ||
|
|
758219d53f | ||
|
|
2775a5652b | ||
|
|
bd0f56e523 | ||
|
|
977e9f3e9a | ||
|
|
365b6d9bd8 | ||
|
|
6b4f3b56e4 | ||
|
|
c1d0c52985 | ||
|
|
5d6217b70b | ||
|
|
eda127a2b2 | ||
|
|
93e8e6447d | ||
|
|
13ada98e78 | ||
|
|
54ac2c6fe9 | ||
|
|
962a882799 | ||
|
|
2ac1c62ab1 | ||
|
|
2278d96d5a | ||
|
|
aff56c2297 | ||
|
|
612bab1eb6 | ||
|
|
68499eb2f4 | ||
|
|
e9c8bd4805 | ||
|
|
517fe5093e | ||
|
|
bdc8e27bf4 | ||
|
|
8cdba3ce18 | ||
|
|
1a3aee9ee1 | ||
|
|
9a8a169e95 | ||
|
|
bfa60338cc | ||
|
|
1e76bbe38c | ||
|
|
42b326bcc6 | ||
|
|
8f23270f35 | ||
|
|
97d4ce41b3 | ||
|
|
0a9a8b5a44 | ||
|
|
37d2d5ef02 | ||
|
|
55d756f9a3 | ||
|
|
7e34d6c66b | ||
|
|
8be6fe987b | ||
|
|
15bd2ef349 | ||
|
|
08fea10ce1 | ||
|
|
b74d94ba1e | ||
|
|
8f722c5751 | ||
|
|
b6e40fec54 | ||
|
|
a8d1c4b596 | ||
|
|
cd19834fab | ||
|
|
9e2ccc24bb | ||
|
|
fc8e1d1153 | ||
|
|
f3ad1fc301 | ||
|
|
eefe9f39f1 | ||
|
|
8d20510b9a | ||
|
|
5a252020d5 | ||
|
|
4c01a45d2a | ||
|
|
467f3a749a | ||
|
|
9499d0df97 | ||
|
|
dde4cc070e | ||
|
|
a8638619e5 | ||
|
|
2f73346a64 | ||
|
|
785ce54e5e | ||
|
|
73e7007e7c | ||
|
|
c5f3b8e40a | ||
|
|
47299c511e | ||
|
|
ed97432fae | ||
|
|
0593b9b0c5 | ||
|
|
855d895a12 | ||
|
|
39d001c9b5 | ||
|
|
89ad6767c7 | ||
|
|
c171ff901c | ||
|
|
2d129bfede | ||
|
|
2e09570ce0 | ||
|
|
866032eaaa | ||
|
|
81ff9fb311 | ||
|
|
23eea392c8 | ||
|
|
3513b60dbc | ||
|
|
42989115a7 | ||
|
|
7623f679cf | ||
|
|
9be1b3f8a9 | ||
|
|
b380d6000f | ||
|
|
e23d148be1 | ||
|
|
58a1a20f5b | ||
|
|
12011fa9de | ||
|
|
11643941e6 | ||
|
|
8cdb65febb | ||
|
|
2082bb61fe | ||
|
|
21a1e97a81 | ||
|
|
6c8569156c | ||
|
|
c19208010a | ||
|
|
8ae4c0e765 | ||
|
|
22af797ca3 | ||
|
|
27accb1bdb | ||
|
|
b9b2bf3b5b | ||
|
|
4dea302791 | ||
|
|
b825a121cb | ||
|
|
91e040a696 | ||
|
|
e75960f284 | ||
|
|
94d4a01b76 | ||
|
|
fe61e650c2 | ||
|
|
c89349687f | ||
|
|
f515f2aa12 | ||
|
|
5a9a2a9112 | ||
|
|
65818c3447 | ||
|
|
4993b5338c | ||
|
|
d3401c0518 | ||
|
|
dbb5f0c4a9 | ||
|
|
3d685335eb | ||
|
|
2615450afc | ||
|
|
35f2fc7230 | ||
|
|
d9476abecb | ||
|
|
d12650e5ac | ||
|
|
d202631016 | ||
|
|
cd2be692e9 | ||
|
|
c6d2dc8b56 | ||
|
|
80b81459a7 | ||
|
|
a07237779b | ||
|
|
21dd6acb81 | ||
|
|
b81d765d2e | ||
|
|
4efe018a8f | ||
|
|
0651f48f6c | ||
|
|
01c05b5f1b | ||
|
|
c36ea1de6f | ||
|
|
3c4b8711ec | ||
|
|
ef2cbb838f | ||
|
|
fb3aabb743 | ||
|
|
2a2fa31a03 | ||
|
|
8adae848e4 | ||
|
|
00898e8194 | ||
|
|
199a2178cf | ||
|
|
ed95fc1dda | ||
|
|
c757a07708 | ||
|
|
acfd7c260a | ||
|
|
388650afc7 | ||
|
|
d7a7bda209 | ||
|
|
47f3cdc1bb | ||
|
|
69a1593ff8 | ||
|
|
f884cbab78 | ||
|
|
14d59da316 | ||
|
|
e13c3f7c6c | ||
|
|
f8452a4520 | ||
|
|
68b2fe20bb | ||
|
|
1ce800c158 | ||
|
|
aa256fee03 | ||
|
|
112eeafd62 | ||
|
|
258c7ccd21 | ||
|
|
728fbdd199 | ||
|
|
8e42c6cdac | ||
|
|
2af64b6068 | ||
|
|
9b3ad6e616 | ||
|
|
f749efd412 | ||
|
|
f2925ae0df | ||
|
|
37b6ad42ea | ||
|
|
6d73a8a1cb | ||
|
|
acb2bc6b3b | ||
|
|
21ee44e29c | ||
|
|
58db67c318 | ||
|
|
87d46bba57 | ||
|
|
045a3d54b9 | ||
|
|
f467f3d826 | ||
|
|
2ac40aba56 | ||
|
|
72408642b1 | ||
|
|
8db0c7fd2f | ||
|
|
993a3262f6 | ||
|
|
a13a67de54 | ||
|
|
422dd51a87 | ||
|
|
a80edfd6d9 | ||
|
|
65e0d3d672 | ||
|
|
931678db83 | ||
|
|
dda63a4324 | ||
|
|
43fa9c3a9a | ||
|
|
c9ee8e7a7e | ||
|
|
f843a45a84 | ||
|
|
36049beeea | ||
|
|
c041fee6fc | ||
|
|
82290451d4 | ||
|
|
fb3b57ab1f | ||
|
|
ca8d35fd99 | ||
|
|
81a7f79dfd | ||
|
|
ad1ef6a8c6 | ||
|
|
833842b391 | ||
|
|
fd6cfc7149 | ||
|
|
620a5395d7 | ||
|
|
1350753f5f | ||
|
|
50e9f69010 | ||
|
|
38a0cd275e | ||
|
|
bd6d07d0b4 | ||
|
|
3ab7d52f00 | ||
|
|
909e8cd4c8 | ||
|
|
36376968af | ||
|
|
29d0e82dce | ||
|
|
1c1040e219 | ||
|
|
0ec8257563 | ||
|
|
38833304d4 | ||
|
|
85e6371cb6 | ||
|
|
daea94a2ed | ||
|
|
c18b3cb414 | ||
|
|
af844b778e | ||
|
|
53dbcd185e | ||
|
|
f16de6026d | ||
|
|
34044cdfce | ||
|
|
ca5659f734 | ||
|
|
77faf8c668 | ||
|
|
d12e3e3d1a | ||
|
|
2fca400dd9 | ||
|
|
c0d0d03bce | ||
|
|
3db6bdfc3c | ||
|
|
6524dbe08f | ||
|
|
2c1883fc77 | ||
|
|
4abd53c089 | ||
|
|
3c99ab8d01 | ||
|
|
9d6d159209 | ||
|
|
26069682ee | ||
|
|
1e24e6e84c | ||
|
|
9d89d74d70 | ||
|
|
056a5ee32a | ||
|
|
dedd972e3d | ||
|
|
6a7830b07e | ||
|
|
f9cebe9b73 | ||
|
|
9e1ee925cd | ||
|
|
c424f06263 | ||
|
|
77f854c081 | ||
|
|
6c2c63e0f1 | ||
|
|
461779a960 | ||
|
|
6aa3ead238 | ||
|
|
e0f64c04e7 | ||
|
|
9b238d9644 | ||
|
|
e5b2e8b29b | ||
|
|
62d8b39474 | ||
|
|
420cd4fd8d | ||
|
|
b642d3e06b | ||
|
|
5b479652f2 | ||
|
|
99296f95db | ||
|
|
92e03ac4e3 | ||
|
|
ce8d9eb323 | ||
|
|
06cf00129f | ||
|
|
ebc6888e7d | ||
|
|
9f1bb350fe | ||
|
|
46ce546174 | ||
|
|
90889c12d8 | ||
|
|
761dce559d | ||
|
|
41f261eaf5 | ||
|
|
8427043431 | ||
|
|
19aaa54ae4 | ||
|
|
d134d5f3a1 | ||
|
|
98337f5b03 | ||
|
|
477ef78fed | ||
|
|
b0e0f8cd91 | ||
|
|
ccb5cce4ac | ||
|
|
5575399af1 | ||
|
|
2c75c8a1ec | ||
|
|
d8814e938c | ||
|
|
a7cfbc98f3 | ||
|
|
5e65bb2b92 | ||
|
|
d7d01e9819 | ||
|
|
88e742a129 | ||
|
|
db4e146551 | ||
|
|
9684e7bf30 | ||
|
|
a3e125f796 | ||
|
|
2b18fc4007 | ||
|
|
ec1210caaa | ||
|
|
3c66683169 | ||
|
|
c610192c53 | ||
|
|
4d61dbfd34 | ||
|
|
26a974da17 | ||
|
|
8a368e8721 | ||
|
|
c8ab70f2ce | ||
|
|
29da357c5b | ||
|
|
4120016d30 | ||
|
|
fceefe7f09 | ||
|
|
2d31c71fbe | ||
|
|
b5efd8b435 | ||
|
|
fc2be204e2 | ||
|
|
92ebad3d42 | ||
|
|
5310bbd4d8 | ||
|
|
c54b985d9f | ||
|
|
32fe1056e7 | ||
|
|
8e2148e99d | ||
|
|
70702ce74f | ||
|
|
b1b3408efa | ||
|
|
57357991e4 | ||
|
|
50577b8c63 | ||
|
|
1871a602df | ||
|
|
facf994694 | ||
|
|
403aeff7f6 | ||
|
|
7d81e4cb2a | ||
|
|
44f052f4c5 | ||
|
|
c33dcbd202 | ||
|
|
bc61eb84df | ||
|
|
74687553f3 | ||
|
|
4226e15128 | ||
|
|
cfb7dd4818 | ||
|
|
52bb4ea37a | ||
|
|
3986eb615c | ||
|
|
0f9faa297b | ||
|
|
d917375e35 | ||
|
|
ce4536d1fa | ||
|
|
4fd62a3d91 | ||
|
|
25066c967b | ||
|
|
1534b39ee3 | ||
|
|
826da2973d | ||
|
|
4426d96610 | ||
|
|
c8956094ad | ||
|
|
2ec4ba629e | ||
|
|
182b459235 | ||
|
|
94d6ae4049 | ||
|
|
b3d61a7561 | ||
|
|
d9005405b9 | ||
|
|
e3f07aad55 | ||
|
|
2fea39b814 | ||
|
|
0356040a29 | ||
|
|
caa7550e9f | ||
|
|
84d4c328f5 | ||
|
|
11f08ea5d5 | ||
|
|
1f1fe9c989 | ||
|
|
f1ad07616c | ||
|
|
868cfa8c50 | ||
|
|
6793dde597 | ||
|
|
cadfcd1bc6 | ||
|
|
c114ff4dc6 | ||
|
|
84e35b801c | ||
|
|
cbeefbfa5a | ||
|
|
2de691f023 | ||
|
|
41f2a80aa8 | ||
|
|
bb1732dd11 | ||
|
|
15e0e2ece9 | ||
|
|
b7b5d8dae3 | ||
|
|
0ff778ec29 | ||
|
|
b69f0b7dc4 | ||
|
|
b75ac76b13 | ||
|
|
19b6adc415 | ||
|
|
54b05d6d68 | ||
|
|
f83a77f41f | ||
|
|
a3537a86e3 | ||
|
|
5d538d4792 | ||
|
|
dc3aa8f31f | ||
|
|
c98af52590 | ||
|
|
01fb97e8da | ||
|
|
6a72faf83b | ||
|
|
1fd40920db | ||
|
|
caef115b95 | ||
|
|
17e5322e28 | ||
|
|
582f4ceaf4 | ||
|
|
1583a2d65a | ||
|
|
9a70a4edaa | ||
|
|
0ac01a04e5 | ||
|
|
11ff24cd22 | ||
|
|
a5d47166e2 | ||
|
|
eb8c5d93e7 | ||
|
|
af5b980362 | ||
|
|
2e563ccd50 | ||
|
|
2c406d3b8c | ||
|
|
49c7fb7fbd | ||
|
|
995f5b0b66 | ||
|
|
b34fa3b273 | ||
|
|
9ddf960312 | ||
|
|
a8894799e4 | ||
|
|
76a692c260 | ||
|
|
5913706329 | ||
|
|
b944293eda | ||
|
|
3c1ebed539 | ||
|
|
ab0d04ff7a | ||
|
|
6073ac3145 | ||
|
|
3b329467eb | ||
|
|
aa5b2be907 | ||
|
|
dcb66eeae7 | ||
|
|
874fe5ec7d | ||
|
|
c916626cef | ||
|
|
555f026c24 | ||
|
|
e91da556ee | ||
|
|
ab82e3f022 | ||
|
|
c74cda1851 | ||
|
|
fcf3ba6974 | ||
|
|
ed62d58cb2 | ||
|
|
dd8c1ca3b2 | ||
|
|
5ee4cd98e8 | ||
|
|
a6ca3a9418 | ||
|
|
0fd75aa579 | ||
|
|
eaa765118f | ||
|
|
ed73547fb6 | ||
|
|
692105e202 | ||
|
|
01b550d61a | ||
|
|
c6364149b1 | ||
|
|
b0b9809732 | ||
|
|
844b6dfd70 | ||
|
|
0a32e3838a | ||
|
|
e186449f94 | ||
|
|
4bb42005ea | ||
|
|
66aa65f8f7 | ||
|
|
15f6079c6b | ||
|
|
9e9eec9af6 | ||
|
|
1a4ed8c953 | ||
|
|
bd60ea4909 | ||
|
|
6ebfc0ff3d | ||
|
|
083d7c9ac4 | ||
|
|
80766e589c | ||
|
|
c5c6c62bd7 | ||
|
|
1549799c1e | ||
|
|
af1b08fdf4 | ||
|
|
72bc4ab403 | ||
|
|
4c6b9c190b | ||
|
|
f6ac6e47c4 | ||
|
|
623ab1c3ea | ||
|
|
eeec52ad74 | ||
|
|
db3883d2e7 | ||
|
|
9637351880 | ||
|
|
d0e01d2863 | ||
|
|
cbca599625 | ||
|
|
b1d12d2f37 | ||
|
|
0a952dc93d | ||
|
|
ff8b839f42 | ||
|
|
fea892c8b3 | ||
|
|
1696ff0c3f | ||
|
|
4eecd23ea3 | ||
|
|
4da83296a9 | ||
|
|
0ce4134ce1 | ||
|
|
03f44d0089 | ||
|
|
d38d5e1a7b | ||
|
|
add6ca5648 | ||
|
|
04a07080af | ||
|
|
8bebc9599a | ||
|
|
6250d536a0 | ||
|
|
de5985bb75 | ||
|
|
331e1f0d06 | ||
|
|
58c511af9a | ||
|
|
4b668379bc | ||
|
|
f352f3f514 | ||
|
|
4ff460de38 | ||
|
|
06b85d62b2 | ||
|
|
3447e2087a | ||
|
|
44fbf83106 | ||
|
|
eb73fc747a | ||
|
|
5602576ae1 | ||
|
|
c4838cca6e | ||
|
|
67841a0c6d | ||
|
|
5561a9c17f | ||
|
|
a9dcea023b | ||
|
|
14ffbe30a0 | ||
|
|
98a5e287ef | ||
|
|
2735ef1f4a | ||
|
|
53f0988006 | ||
|
|
730a67bb20 | ||
|
|
59e29afab5 | ||
|
|
fd4df4db48 | ||
|
|
8ae954bb8f | ||
|
|
32c76e0012 | ||
|
|
70bd55a00f | ||
|
|
f92d2c3326 | ||
|
|
a3f4e6f56c | ||
|
|
08bdc3d28e | ||
|
|
7c54b6e9e3 | ||
|
|
a346ad2a73 | ||
|
|
e4e5b61596 | ||
|
|
eeb7e1a91a | ||
|
|
f2637e6972 | ||
|
|
c8f8f6752f | ||
|
|
87b3cacc8f | ||
|
|
4096db8053 | ||
|
|
fa084e1a16 | ||
|
|
22067c7d1d | ||
|
|
85d2c54d53 | ||
|
|
5222a49cc3 | ||
|
|
36574bd9c6 | ||
|
|
2cc2d4420d | ||
|
|
7576c5ecbc | ||
|
|
dd1d9bed80 | ||
|
|
92c29f27c3 | ||
|
|
55b26ed590 | ||
|
|
6960ab1106 | ||
|
|
c3f4e18a5e | ||
|
|
a3f568dec7 | ||
|
|
6f1ce3bd60 | ||
|
|
159c5b4360 | ||
|
|
b5fde733b0 | ||
|
|
f9927bdaaa | ||
|
|
dcead97650 | ||
|
|
9786ebb7ba | ||
|
|
66d84ccfa3 | ||
|
|
56a39fea3d | ||
|
|
2a6e1cf1fc | ||
|
|
c02dc73d3c | ||
|
|
06f5632d1a | ||
|
|
1246ccf250 | ||
|
|
a339b488ae | ||
|
|
ac376d0e5e | ||
|
|
220946b2a1 | ||
|
|
c41dd2e393 | ||
|
|
2e76a2a554 | ||
|
|
8fa4b6a5fb | ||
|
|
d8b408625e | ||
|
|
19154d0fec | ||
|
|
c0c1fd17cb | ||
|
|
2daae758b1 | ||
|
|
43b21c6033 | ||
|
|
0bb1ee3caa | ||
|
|
3b2cb3a699 | ||
|
|
1adfd30b3b | ||
|
|
a315838d43 | ||
|
|
75c7eb3868 | ||
|
|
eac3f3fa69 | ||
|
|
02c779b41d | ||
|
|
5a1e17f27f | ||
|
|
e0d2c4bddf | ||
|
|
d73c8df895 | ||
|
|
e73bc81a73 | ||
|
|
0b960b0739 | ||
|
|
bdecb1bad2 | ||
|
|
e61f00d4c1 | ||
|
|
42c8d9b660 | ||
|
|
bd0b76072b | ||
|
|
db42adf1bf | ||
|
|
0e8e162cd5 | ||
|
|
49ace2faf9 | ||
|
|
8232456ce8 | ||
|
|
cd7c6ee751 | ||
|
|
f8dd4dcb30 | ||
|
|
0b9f00346b | ||
|
|
ef0846e723 | ||
|
|
3a79d94050 | ||
|
|
b5610f66a6 | ||
|
|
119dd0eaa0 | ||
|
|
080c9e415d | ||
|
|
7f9a76411a | ||
|
|
01b6b7e66a | ||
|
|
298713fae7 | ||
|
|
37c2c4acc4 | ||
|
|
1376fc8f44 | ||
|
|
e6801123ca | ||
|
|
f23d611d0c | ||
|
|
5dfdbe91bb | ||
|
|
e6df9fa078 | ||
|
|
5a73556871 | ||
|
|
e204e03fa6 | ||
|
|
8b4850aaea | ||
|
|
f87db64ba9 | ||
|
|
f42aebdff8 | ||
|
|
4ebc12ab5a | ||
|
|
fdb20d5d08 | ||
|
|
5bf6fd1270 | ||
|
|
e3e7a92c77 | ||
|
|
640f527f8c | ||
|
|
49c1b8c2d8 | ||
|
|
93ba78362d | ||
|
|
2fdf953229 | ||
|
|
ebe00359d1 | ||
|
|
036e2b52db | ||
|
|
f4803291b8 | ||
|
|
d47ec56eca | ||
|
|
ae6aac044d | ||
|
|
da2c15905a | ||
|
|
13ca33aa4e | ||
|
|
e37e9df0d1 | ||
|
|
54b99d5096 | ||
|
|
fb63d61ae5 | ||
|
|
73ada45037 | ||
|
|
be911754c5 | ||
|
|
cff06c9a54 | ||
|
|
ad011fbf1e | ||
|
|
28a5f858b7 | ||
|
|
220a5ec5dd | ||
|
|
0ec79d4295 | ||
|
|
5e414ff4df | ||
|
|
a46dc4634b | ||
|
|
df64530333 | ||
|
|
8dc98db717 | ||
|
|
9093cfbe4f | ||
|
|
da9b31e393 | ||
|
|
99eb317600 | ||
|
|
652fa8223e | ||
|
|
4587627f3c | ||
|
|
17b6f6c8f7 | ||
|
|
de10269d10 | ||
|
|
dfb83295de | ||
|
|
61f53b6471 | ||
|
|
e3c92a20f1 | ||
|
|
a290d1d550 | ||
|
|
abf48cbbf9 | ||
|
|
d53714a145 | ||
|
|
07757a59e9 | ||
|
|
f0b5130b80 | ||
|
|
0ca479de9c | ||
|
|
553e7b6b30 | ||
|
|
1830216078 | ||
|
|
5140d7b0c4 | ||
|
|
a62c264ddf | ||
|
|
3db2d33e4c | ||
|
|
360a7fc17b | ||
|
|
13fd656e2b | ||
|
|
47449152ac | ||
|
|
9ee440b8e4 | ||
|
|
5b1e1239fd | ||
|
|
79652da520 | ||
|
|
0f4a5716ea | ||
|
|
8fc399f511 | ||
|
|
dd44f69e2b | ||
|
|
39878fcdfe | ||
|
|
3de7d63ea9 | ||
|
|
581a654748 | ||
|
|
888179f7f0 | ||
|
|
0bb6336eaf | ||
|
|
2d8c8abbfb | ||
|
|
6f7609daac | ||
|
|
b26b9cda7b | ||
|
|
fb760a63ab | ||
|
|
971513d3ae | ||
|
|
d6bb71f324 | ||
|
|
0f45999df9 | ||
|
|
bee814787a | ||
|
|
d22131ad0a | ||
|
|
7930e725af | ||
|
|
5fee484e85 | ||
|
|
d7a08c1db2 | ||
|
|
401b241570 | ||
|
|
bf5cfaaeab | ||
|
|
616a2bc8f9 | ||
|
|
4ab3e4f7ab | ||
|
|
2a33acce3a | ||
|
|
b2c2bbd96f | ||
|
|
b72279afe4 | ||
|
|
4c6e8e6053 | ||
|
|
f2c42aad12 | ||
|
|
6a568662b8 | ||
|
|
d07d86f778 | ||
|
|
8cc8540597 | ||
|
|
5f2b1b63c2 | ||
|
|
4fc80bdc16 | ||
|
|
dfdd3784b9 | ||
|
|
a0a28fce38 | ||
|
|
22b38b1956 | ||
|
|
4ffa2b15dc | ||
|
|
ee85028534 | ||
|
|
c844ca1a40 | ||
|
|
7f3fad64b8 | ||
|
|
d6c6aa5c49 | ||
|
|
f9d685344d | ||
|
|
bcc1d9f3d6 | ||
|
|
25af0a1532 | ||
|
|
72a0e256a8 | ||
|
|
9e21ef879f | ||
|
|
58a3cbd654 | ||
|
|
915a3ff3ce | ||
|
|
9c5a31ed45 | ||
|
|
14ee364190 | ||
|
|
2d7b9e95cb | ||
|
|
b20675b7b5 | ||
|
|
df8cc8136f | ||
|
|
b05d0c560e | ||
|
|
c5f20a9891 | ||
|
|
53249c00cf | ||
|
|
339c05c2d4 | ||
|
|
c7d05096ab | ||
|
|
21765f8118 | ||
|
|
9998cc0683 | ||
|
|
c39758a169 | ||
|
|
e341abb99c | ||
|
|
5caf43349b | ||
|
|
f7c766ff32 | ||
|
|
bdeaaeac9c | ||
|
|
a9802c1962 | ||
|
|
531945cfe2 | ||
|
|
6a7e2d3fce | ||
|
|
035cb8aec2 | ||
|
|
ca3fdb3957 | ||
|
|
301437e169 | ||
|
|
12c6584d30 | ||
|
|
efbcce27e4 | ||
|
|
54dd8f7ac8 | ||
|
|
ce69ebd2ec | ||
|
|
500d926da7 | ||
|
|
b1c4b2e420 | ||
|
|
1d1511e37c | ||
|
|
8f5196f7d6 | ||
|
|
8edff22c0b | ||
|
|
2f076f2add | ||
|
|
fff0600b1d | ||
|
|
16e221d03c | ||
|
|
cace79631e | ||
|
|
05c8a23a75 | ||
|
|
7a652b8998 | ||
|
|
6d564e0539 | ||
|
|
dbc9375256 | ||
|
|
b4e06c63e2 | ||
|
|
01afa92424 | ||
|
|
1cd61601f3 | ||
|
|
6eb9545a72 | ||
|
|
47a6d86174 | ||
|
|
aa854e7efe | ||
|
|
5536e6b91e | ||
|
|
f37e0aa7b3 | ||
|
|
b75e00e05d | ||
|
|
51ca713181 | ||
|
|
685c7549e1 | ||
|
|
8be868f0ab | ||
|
|
e28bcef4ad | ||
|
|
7b4a4f45ed | ||
|
|
87b17de0bd | ||
|
|
9ba47681c6 | ||
|
|
ef60ea0446 | ||
|
|
cd01ebb417 | ||
|
|
6000bb4ee2 | ||
|
|
e99fa66daf | ||
|
|
3b03ac1734 | ||
|
|
6ba5758d30 | ||
|
|
cfc53bf96b | ||
|
|
58d7f59477 | ||
|
|
b0524412c4 | ||
|
|
3689992965 | ||
|
|
55165f116d | ||
|
|
480174367d | ||
|
|
099c37c4b4 | ||
|
|
d84399aebe | ||
|
|
4f49c8a2b9 | ||
|
|
10f26cfad9 | ||
|
|
1e393bedb2 | ||
|
|
1ac85d837a | ||
|
|
9e19f1d005 | ||
|
|
731c9544b3 | ||
|
|
154a4a7ac1 | ||
|
|
5252568825 | ||
|
|
c7d31346e0 | ||
|
|
6b355e1acf | ||
|
|
f98d821213 | ||
|
|
8954512dad | ||
|
|
9d452eb120 | ||
|
|
4fdcfe5515 | ||
|
|
6ba9aea8ba | ||
|
|
e980c2ef64 | ||
|
|
827b09d7a5 | ||
|
|
e2f26f039a | ||
|
|
b5aeae7e22 | ||
|
|
2a7c44d314 | ||
|
|
517e90c13a | ||
|
|
228277d361 | ||
|
|
c539fcde8b | ||
|
|
7a08fbd370 | ||
|
|
71e1bc260d | ||
|
|
78342e384d | ||
|
|
59b1d1551a | ||
|
|
5d1e39b651 | ||
|
|
ceb18c77db | ||
|
|
6a1c198c04 | ||
|
|
dd11e7aa7b | ||
|
|
0cfbc58842 | ||
|
|
79e0915a86 | ||
|
|
56f7807732 | ||
|
|
52978e84ba | ||
|
|
b339f923d6 | ||
|
|
9e843c4dec | ||
|
|
9a26974ba8 | ||
|
|
5890b318c4 | ||
|
|
bb46423969 | ||
|
|
8460fee380 | ||
|
|
cca086b863 | ||
|
|
d77630154a | ||
|
|
10d06bc1ca | ||
|
|
0b76b1aced | ||
|
|
fed94d18f3 | ||
|
|
0763e2eb20 | ||
|
|
1548b73b77 | ||
|
|
cf8bfe8d8e | ||
|
|
5d6dadda83 | ||
|
|
43fa4fc487 | ||
|
|
bf9b057670 | ||
|
|
4a5aba5bac | ||
|
|
0b829ea20b | ||
|
|
86bb3d25cc | ||
|
|
ad494e74ad | ||
|
|
bc8fde5433 | ||
|
|
8d0581ffb4 | ||
|
|
298cb4ab8a | ||
|
|
3572ef230d | ||
|
|
f8249af501 | ||
|
|
140c4e1feb | ||
|
|
617aeaae0e | ||
|
|
b116e04894 | ||
|
|
dc1bf7e9c6 | ||
|
|
5b44dbe9c4 | ||
|
|
3c31e379a1 | ||
|
|
4e146f0075 | ||
|
|
173e7915a7 | ||
|
|
e76fca138d | ||
|
|
45df62652b | ||
|
|
068441b01b | ||
|
|
ca0169eb6c | ||
|
|
448fdaab96 | ||
|
|
b1e2a5615b | ||
|
|
b535860a50 | ||
|
|
2b478764a9 | ||
|
|
88cc8e495c | ||
|
|
88df0fecb0 | ||
|
|
ef652a2766 | ||
|
|
cf30ddb924 | ||
|
|
2f7da835de | ||
|
|
c6ea491000 | ||
|
|
76d30ff835 | ||
|
|
cc40e1f8e9 | ||
|
|
eb647ab2db | ||
|
|
7675fd0856 | ||
|
|
82f253c310 | ||
|
|
5de5fb507a | ||
|
|
269dd6abbe | ||
|
|
2c35be0212 | ||
|
|
c44dbf79cb | ||
|
|
5814249ea9 | ||
|
|
e619e64433 | ||
|
|
b2c0f3f9a5 | ||
|
|
7e43020a28 | ||
|
|
cfa4925075 | ||
|
|
280536092e | ||
|
|
2ba0f5914f | ||
|
|
0bf53bc513 | ||
|
|
2137c2f715 | ||
|
|
58a9259a2e | ||
|
|
1d8f514d10 | ||
|
|
8a201022c0 | ||
|
|
56a34a8f8a | ||
|
|
271c2b9018 | ||
|
|
2975aa950b | ||
|
|
29b70e0c36 | ||
|
|
3f48b61bfa | ||
|
|
dbb5bd48cc | ||
|
|
a39579dad3 | ||
|
|
fbb8d10305 | ||
|
|
3d2abbde72 | ||
|
|
ff02220890 | ||
|
|
bc5b30eccf | ||
|
|
d114927814 | ||
|
|
b41c00a9ef | ||
|
|
432d7e72fa | ||
|
|
666ab53648 | ||
|
|
314288ff82 |
269
.agents/skills/company-creator/SKILL.md
Normal file
269
.agents/skills/company-creator/SKILL.md
Normal file
@@ -0,0 +1,269 @@
|
||||
---
|
||||
name: company-creator
|
||||
description: >
|
||||
Create agent company packages conforming to the Agent Companies specification
|
||||
(agentcompanies/v1). Use when a user wants to create a new agent company from
|
||||
scratch, build a company around an existing git repo or skills collection, or
|
||||
scaffold a team/department of agents. Triggers on: "create a company", "make me
|
||||
a company", "build a company from this repo", "set up an agent company",
|
||||
"create a team of agents", "hire some agents", or when given a repo URL and
|
||||
asked to turn it into a company. Do NOT use for importing an existing company
|
||||
package (use the CLI import command instead) or for modifying a company that
|
||||
is already running in Paperclip.
|
||||
---
|
||||
|
||||
# Company Creator
|
||||
|
||||
Create agent company packages that conform to the Agent Companies specification.
|
||||
|
||||
Spec references:
|
||||
|
||||
- Normative spec: `docs/companies/companies-spec.md` (read this before generating files)
|
||||
- Web spec: https://agentcompanies.io/specification
|
||||
- Protocol site: https://agentcompanies.io/
|
||||
|
||||
## Two Modes
|
||||
|
||||
### Mode 1: Company From Scratch
|
||||
|
||||
The user describes what they want. Interview them to flesh out the vision, then generate the package.
|
||||
|
||||
### Mode 2: Company From a Repo
|
||||
|
||||
The user provides a git repo URL, local path, or tweet. Analyze the repo, then create a company that wraps it.
|
||||
|
||||
See [references/from-repo-guide.md](references/from-repo-guide.md) for detailed repo analysis steps.
|
||||
|
||||
## Process
|
||||
|
||||
### Step 1: Gather Context
|
||||
|
||||
Determine which mode applies:
|
||||
|
||||
- **From scratch**: What kind of company or team? What domain? What should the agents do?
|
||||
- **From repo**: Clone/read the repo. Scan for existing skills, agent configs, README, source structure.
|
||||
|
||||
### Step 2: Interview (Use AskUserQuestion)
|
||||
|
||||
Do not skip this step. Use AskUserQuestion to align with the user before writing any files.
|
||||
|
||||
**For from-scratch companies**, ask about:
|
||||
|
||||
- Company purpose and domain (1-2 sentences is fine)
|
||||
- What agents they need - propose a hiring plan based on what they described
|
||||
- Whether this is a full company (needs a CEO) or a team/department (no CEO required)
|
||||
- Any specific skills the agents should have
|
||||
- How work flows through the organization (see "Workflow" below)
|
||||
- Whether they want projects and starter tasks
|
||||
|
||||
**For from-repo companies**, present your analysis and ask:
|
||||
|
||||
- Confirm the agents you plan to create and their roles
|
||||
- Whether to reference or vendor any discovered skills (default: reference)
|
||||
- Any additional agents or skills beyond what the repo provides
|
||||
- Company name and any customization
|
||||
- Confirm the workflow you inferred from the repo (see "Workflow" below)
|
||||
|
||||
**Workflow — how does work move through this company?**
|
||||
|
||||
A company is not just a list of agents with skills. It's an organization that takes ideas and turns them into work products. You need to understand the workflow so each agent knows:
|
||||
|
||||
- Who gives them work and in what form (a task, a branch, a question, a review request)
|
||||
- What they do with it
|
||||
- Who they hand off to when they're done, and what that handoff looks like
|
||||
- What "done" means for their role
|
||||
|
||||
**Not every company is a pipeline.** Infer the right workflow pattern from context:
|
||||
|
||||
- **Pipeline** — sequential stages, each agent hands off to the next. Use when the repo/domain has a clear linear process (e.g. plan → build → review → ship → QA, or content ideation → draft → edit → publish).
|
||||
- **Hub-and-spoke** — a manager delegates to specialists who report back independently. Use when agents do different kinds of work that don't feed into each other (e.g. a CEO who dispatches to a researcher, a marketer, and an analyst).
|
||||
- **Collaborative** — agents work together on the same things as peers. Use for small teams where everyone contributes to the same output (e.g. a design studio, a brainstorming team).
|
||||
- **On-demand** — agents are summoned as needed with no fixed flow. Use when agents are more like a toolbox of specialists the user calls directly.
|
||||
|
||||
For from-scratch companies, propose a workflow pattern based on what they described and ask if it fits.
|
||||
|
||||
For from-repo companies, infer the pattern from the repo's structure. If skills have a clear sequential dependency (like `plan-ceo-review → plan-eng-review → review → ship → qa`), that's a pipeline. If skills are independent capabilities, it's more likely hub-and-spoke or on-demand. State your inference in the interview so the user can confirm or adjust.
|
||||
|
||||
**Key interviewing principles:**
|
||||
|
||||
- Propose a concrete hiring plan. Don't ask open-ended "what agents do you want?" - suggest specific agents based on context and let the user adjust.
|
||||
- Keep it lean. Most users are new to agent companies. A few agents (3-5) is typical for a startup. Don't suggest 10+ agents unless the scope demands it.
|
||||
- From-scratch companies should start with a CEO who manages everyone. Teams/departments don't need one.
|
||||
- Ask 2-3 focused questions per round, not 10.
|
||||
|
||||
### Step 3: Read the Spec
|
||||
|
||||
Before generating any files, read the normative spec:
|
||||
|
||||
```
|
||||
docs/companies/companies-spec.md
|
||||
```
|
||||
|
||||
Also read the quick reference: [references/companies-spec.md](references/companies-spec.md)
|
||||
|
||||
And the example: [references/example-company.md](references/example-company.md)
|
||||
|
||||
### Step 4: Generate the Package
|
||||
|
||||
Create the directory structure and all files. Follow the spec's conventions exactly.
|
||||
|
||||
**Directory structure:**
|
||||
|
||||
```
|
||||
<company-slug>/
|
||||
├── COMPANY.md
|
||||
├── agents/
|
||||
│ └── <slug>/AGENTS.md
|
||||
├── teams/
|
||||
│ └── <slug>/TEAM.md (if teams are needed)
|
||||
├── projects/
|
||||
│ └── <slug>/PROJECT.md (if projects are needed)
|
||||
├── tasks/
|
||||
│ └── <slug>/TASK.md (if tasks are needed)
|
||||
├── skills/
|
||||
│ └── <slug>/SKILL.md (if custom skills are needed)
|
||||
└── .paperclip.yaml (Paperclip vendor extension)
|
||||
```
|
||||
|
||||
**Rules:**
|
||||
|
||||
- Slugs must be URL-safe, lowercase, hyphenated
|
||||
- COMPANY.md gets `schema: agentcompanies/v1` - other files inherit it
|
||||
- Agent instructions go in the AGENTS.md body, not in .paperclip.yaml
|
||||
- Skills referenced by shortname in AGENTS.md resolve to `skills/<shortname>/SKILL.md`
|
||||
- For external skills, use `sources` with `usage: referenced` (see spec section 12)
|
||||
- Do not export secrets, machine-local paths, or database IDs
|
||||
- Omit empty/default fields
|
||||
- For companies generated from a repo, add a references footer at the bottom of COMPANY.md body:
|
||||
`Generated from [repo-name](repo-url) with the company-creator skill from [Paperclip](https://github.com/paperclipai/paperclip)`
|
||||
|
||||
**Reporting structure:**
|
||||
|
||||
- Every agent except the CEO should have `reportsTo` set to their manager's slug
|
||||
- The CEO has `reportsTo: null`
|
||||
- For teams without a CEO, the top-level agent has `reportsTo: null`
|
||||
|
||||
**Writing workflow-aware agent instructions:**
|
||||
|
||||
Each AGENTS.md body should include not just what the agent does, but how they fit into the organization's workflow. Include:
|
||||
|
||||
1. **Where work comes from** — "You receive feature ideas from the user" or "You pick up tasks assigned to you by the CTO"
|
||||
2. **What you produce** — "You produce a technical plan with architecture diagrams" or "You produce a reviewed, approved branch ready for shipping"
|
||||
3. **Who you hand off to** — "When your plan is locked, hand off to the Staff Engineer for implementation" or "When review passes, hand off to the Release Engineer to ship"
|
||||
4. **What triggers you** — "You are activated when a new feature idea needs product-level thinking" or "You are activated when a branch is ready for pre-landing review"
|
||||
|
||||
This turns a collection of agents into an organization that actually works together. Without workflow context, agents operate in isolation — they do their job but don't know what happens before or after them.
|
||||
|
||||
### Step 5: Confirm Output Location
|
||||
|
||||
Ask the user where to write the package. Common options:
|
||||
|
||||
- A subdirectory in the current repo
|
||||
- A new directory the user specifies
|
||||
- The current directory (if it's empty or they confirm)
|
||||
|
||||
### Step 6: Write README.md and LICENSE
|
||||
|
||||
**README.md** — every company package gets a README. It should be a nice, readable introduction that someone browsing GitHub would appreciate. Include:
|
||||
|
||||
- Company name and what it does
|
||||
- The workflow / how the company operates
|
||||
- Org chart as a markdown list or table showing agents, titles, reporting structure, and skills
|
||||
- Brief description of each agent's role
|
||||
- Citations and references: link to the source repo (if from-repo), link to the Agent Companies spec (https://agentcompanies.io/specification), and link to Paperclip (https://github.com/paperclipai/paperclip)
|
||||
- A "Getting Started" section explaining how to import: `paperclipai company import --from <path>`
|
||||
|
||||
**LICENSE** — include a LICENSE file. The copyright holder is the user creating the company, not the upstream repo author (they made the skills, the user is making the company). Use the same license type as the source repo (if from-repo) or ask the user (if from-scratch). Default to MIT if unclear.
|
||||
|
||||
### Step 7: Write Files and Summarize
|
||||
|
||||
Write all files, then give a brief summary:
|
||||
|
||||
- Company name and what it does
|
||||
- Agent roster with roles and reporting structure
|
||||
- Skills (custom + referenced)
|
||||
- Projects and tasks if any
|
||||
- The output path
|
||||
|
||||
## .paperclip.yaml Guidelines
|
||||
|
||||
The `.paperclip.yaml` file is the Paperclip vendor extension. It configures adapters and env inputs per agent.
|
||||
|
||||
### Adapter Rules
|
||||
|
||||
**Do not specify an adapter unless the repo or user context warrants it.** If you don't know what adapter the user wants, omit the adapter block entirely — Paperclip will use its default. Specifying an unknown adapter type causes an import error.
|
||||
|
||||
Paperclip's supported adapter types (these are the ONLY valid values):
|
||||
- `claude_local` — Claude Code CLI
|
||||
- `codex_local` — Codex CLI
|
||||
- `opencode_local` — OpenCode CLI
|
||||
- `pi_local` — Pi CLI
|
||||
- `cursor` — Cursor
|
||||
- `gemini_local` — Gemini CLI
|
||||
- `openclaw_gateway` — OpenClaw gateway
|
||||
|
||||
Only set an adapter when:
|
||||
- The repo or its skills clearly target a specific runtime (e.g. gstack is built for Claude Code, so `claude_local` is appropriate)
|
||||
- The user explicitly requests a specific adapter
|
||||
- The agent's role requires a specific runtime capability
|
||||
|
||||
### Env Inputs Rules
|
||||
|
||||
**Do not add boilerplate env variables.** Only add env inputs that the agent actually needs based on its skills or role:
|
||||
- `GH_TOKEN` for agents that push code, create PRs, or interact with GitHub
|
||||
- API keys only when a skill explicitly requires them
|
||||
- Never set `ANTHROPIC_API_KEY` as a default empty env variable — the runtime handles this
|
||||
|
||||
Example with adapter (only when warranted):
|
||||
```yaml
|
||||
schema: paperclip/v1
|
||||
agents:
|
||||
release-engineer:
|
||||
adapter:
|
||||
type: claude_local
|
||||
config:
|
||||
model: claude-sonnet-4-6
|
||||
inputs:
|
||||
env:
|
||||
GH_TOKEN:
|
||||
kind: secret
|
||||
requirement: optional
|
||||
```
|
||||
|
||||
Example — only agents with actual overrides appear:
|
||||
```yaml
|
||||
schema: paperclip/v1
|
||||
agents:
|
||||
release-engineer:
|
||||
inputs:
|
||||
env:
|
||||
GH_TOKEN:
|
||||
kind: secret
|
||||
requirement: optional
|
||||
```
|
||||
|
||||
In this example, only `release-engineer` appears because it needs `GH_TOKEN`. The other agents (ceo, cto, etc.) have no overrides, so they are omitted entirely from `.paperclip.yaml`.
|
||||
|
||||
## External Skill References
|
||||
|
||||
When referencing skills from a GitHub repo, always use the references pattern:
|
||||
|
||||
```yaml
|
||||
metadata:
|
||||
sources:
|
||||
- kind: github-file
|
||||
repo: owner/repo
|
||||
path: path/to/SKILL.md
|
||||
commit: <full SHA from git ls-remote or the repo>
|
||||
attribution: Owner or Org Name
|
||||
license: <from the repo's LICENSE>
|
||||
usage: referenced
|
||||
```
|
||||
|
||||
Get the commit SHA with:
|
||||
|
||||
```bash
|
||||
git ls-remote https://github.com/owner/repo HEAD
|
||||
```
|
||||
|
||||
Do NOT copy external skill content into the package unless the user explicitly asks.
|
||||
144
.agents/skills/company-creator/references/companies-spec.md
Normal file
144
.agents/skills/company-creator/references/companies-spec.md
Normal file
@@ -0,0 +1,144 @@
|
||||
# Agent Companies Specification Reference
|
||||
|
||||
The normative specification lives at:
|
||||
|
||||
- Web: https://agentcompanies.io/specification
|
||||
- Local: docs/companies/companies-spec.md
|
||||
|
||||
Read the local spec file before generating any package files. The spec defines the canonical format and all frontmatter fields. Below is a quick-reference summary for common authoring tasks.
|
||||
|
||||
## Package Kinds
|
||||
|
||||
| File | Kind | Purpose |
|
||||
| ---------- | ------- | ------------------------------------------------- |
|
||||
| COMPANY.md | company | Root entrypoint, org boundary and defaults |
|
||||
| TEAM.md | team | Reusable org subtree |
|
||||
| AGENTS.md | agent | One role, instructions, and attached skills |
|
||||
| PROJECT.md | project | Planned work grouping |
|
||||
| TASK.md | task | Portable starter task |
|
||||
| SKILL.md | skill | Agent Skills capability package (do not redefine) |
|
||||
|
||||
## Directory Layout
|
||||
|
||||
```
|
||||
company-package/
|
||||
├── COMPANY.md
|
||||
├── agents/
|
||||
│ └── <slug>/AGENTS.md
|
||||
├── teams/
|
||||
│ └── <slug>/TEAM.md
|
||||
├── projects/
|
||||
│ └── <slug>/
|
||||
│ ├── PROJECT.md
|
||||
│ └── tasks/
|
||||
│ └── <slug>/TASK.md
|
||||
├── tasks/
|
||||
│ └── <slug>/TASK.md
|
||||
├── skills/
|
||||
│ └── <slug>/SKILL.md
|
||||
├── assets/
|
||||
├── scripts/
|
||||
├── references/
|
||||
└── .paperclip.yaml (optional vendor extension)
|
||||
```
|
||||
|
||||
## Common Frontmatter Fields
|
||||
|
||||
```yaml
|
||||
schema: agentcompanies/v1
|
||||
kind: company | team | agent | project | task
|
||||
slug: url-safe-stable-identity
|
||||
name: Human Readable Name
|
||||
description: Short description for discovery
|
||||
version: 0.1.0
|
||||
license: MIT
|
||||
authors:
|
||||
- name: Jane Doe
|
||||
tags: []
|
||||
metadata: {}
|
||||
sources: []
|
||||
```
|
||||
|
||||
- `schema` usually appears only at package root
|
||||
- `kind` is optional when filename makes it obvious
|
||||
- `slug` must be URL-safe and stable
|
||||
- exporters should omit empty or default-valued fields
|
||||
|
||||
## COMPANY.md Required Fields
|
||||
|
||||
```yaml
|
||||
name: Company Name
|
||||
description: What this company does
|
||||
slug: company-slug
|
||||
schema: agentcompanies/v1
|
||||
```
|
||||
|
||||
Optional: `version`, `license`, `authors`, `goals`, `includes`, `requirements.secrets`
|
||||
|
||||
## AGENTS.md Key Fields
|
||||
|
||||
```yaml
|
||||
name: Agent Name
|
||||
title: Role Title
|
||||
reportsTo: <agent-slug or null>
|
||||
skills:
|
||||
- skill-shortname
|
||||
```
|
||||
|
||||
- Body content is the agent's default instructions
|
||||
- Skills resolve by shortname: `skills/<shortname>/SKILL.md`
|
||||
- Do not export machine-specific paths or secrets
|
||||
|
||||
## TEAM.md Key Fields
|
||||
|
||||
```yaml
|
||||
name: Team Name
|
||||
description: What this team does
|
||||
slug: team-slug
|
||||
manager: ../agent-slug/AGENTS.md
|
||||
includes:
|
||||
- ../agent-slug/AGENTS.md
|
||||
- ../../skills/skill-slug/SKILL.md
|
||||
```
|
||||
|
||||
## PROJECT.md Key Fields
|
||||
|
||||
```yaml
|
||||
name: Project Name
|
||||
description: What this project delivers
|
||||
owner: agent-slug
|
||||
```
|
||||
|
||||
## TASK.md Key Fields
|
||||
|
||||
```yaml
|
||||
name: Task Name
|
||||
assignee: agent-slug
|
||||
project: project-slug
|
||||
schedule:
|
||||
timezone: America/Chicago
|
||||
startsAt: 2026-03-16T09:00:00-05:00
|
||||
recurrence:
|
||||
frequency: weekly
|
||||
interval: 1
|
||||
weekdays: [monday]
|
||||
time: { hour: 9, minute: 0 }
|
||||
```
|
||||
|
||||
## Source References (for external skills/content)
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
- kind: github-file
|
||||
repo: owner/repo
|
||||
path: path/to/SKILL.md
|
||||
commit: <full-sha>
|
||||
sha256: <hash>
|
||||
attribution: Owner Name
|
||||
license: MIT
|
||||
usage: referenced
|
||||
```
|
||||
|
||||
Usage modes: `vendored` (bytes included), `referenced` (pointer only), `mirrored` (cached locally)
|
||||
|
||||
Default to `referenced` for third-party content.
|
||||
184
.agents/skills/company-creator/references/example-company.md
Normal file
184
.agents/skills/company-creator/references/example-company.md
Normal file
@@ -0,0 +1,184 @@
|
||||
# Example Company Package
|
||||
|
||||
A minimal but complete example of an agent company package.
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
lean-dev-shop/
|
||||
├── COMPANY.md
|
||||
├── agents/
|
||||
│ ├── ceo/AGENTS.md
|
||||
│ ├── cto/AGENTS.md
|
||||
│ └── engineer/AGENTS.md
|
||||
├── teams/
|
||||
│ └── engineering/TEAM.md
|
||||
├── projects/
|
||||
│ └── q2-launch/
|
||||
│ ├── PROJECT.md
|
||||
│ └── tasks/
|
||||
│ └── monday-review/TASK.md
|
||||
├── tasks/
|
||||
│ └── weekly-standup/TASK.md
|
||||
├── skills/
|
||||
│ └── code-review/SKILL.md
|
||||
└── .paperclip.yaml
|
||||
```
|
||||
|
||||
## COMPANY.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: Lean Dev Shop
|
||||
description: Small engineering-focused AI company that builds and ships software products
|
||||
slug: lean-dev-shop
|
||||
schema: agentcompanies/v1
|
||||
version: 1.0.0
|
||||
license: MIT
|
||||
authors:
|
||||
- name: Example Org
|
||||
goals:
|
||||
- Build and ship software products
|
||||
- Maintain high code quality
|
||||
---
|
||||
|
||||
Lean Dev Shop is a small, focused engineering company. The CEO oversees strategy and coordinates work. The CTO leads the engineering team. Engineers build and ship code.
|
||||
```
|
||||
|
||||
## agents/ceo/AGENTS.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: CEO
|
||||
title: Chief Executive Officer
|
||||
reportsTo: null
|
||||
skills:
|
||||
- paperclip
|
||||
---
|
||||
|
||||
You are the CEO of Lean Dev Shop. You oversee company strategy, coordinate work across the team, and ensure projects ship on time.
|
||||
|
||||
Your responsibilities:
|
||||
|
||||
- Review and prioritize work across projects
|
||||
- Coordinate with the CTO on technical decisions
|
||||
- Ensure the company goals are being met
|
||||
```
|
||||
|
||||
## agents/cto/AGENTS.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: CTO
|
||||
title: Chief Technology Officer
|
||||
reportsTo: ceo
|
||||
skills:
|
||||
- code-review
|
||||
- paperclip
|
||||
---
|
||||
|
||||
You are the CTO of Lean Dev Shop. You lead the engineering team and make technical decisions.
|
||||
|
||||
Your responsibilities:
|
||||
|
||||
- Set technical direction and architecture
|
||||
- Review code and ensure quality standards
|
||||
- Mentor engineers and unblock technical challenges
|
||||
```
|
||||
|
||||
## agents/engineer/AGENTS.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: Engineer
|
||||
title: Software Engineer
|
||||
reportsTo: cto
|
||||
skills:
|
||||
- code-review
|
||||
- paperclip
|
||||
---
|
||||
|
||||
You are a software engineer at Lean Dev Shop. You write code, fix bugs, and ship features.
|
||||
|
||||
Your responsibilities:
|
||||
|
||||
- Implement features and fix bugs
|
||||
- Write tests and documentation
|
||||
- Participate in code reviews
|
||||
```
|
||||
|
||||
## teams/engineering/TEAM.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: Engineering
|
||||
description: Product and platform engineering team
|
||||
slug: engineering
|
||||
schema: agentcompanies/v1
|
||||
manager: ../../agents/cto/AGENTS.md
|
||||
includes:
|
||||
- ../../agents/engineer/AGENTS.md
|
||||
- ../../skills/code-review/SKILL.md
|
||||
tags:
|
||||
- engineering
|
||||
---
|
||||
|
||||
The engineering team builds and maintains all software products.
|
||||
```
|
||||
|
||||
## projects/q2-launch/PROJECT.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: Q2 Launch
|
||||
description: Ship the Q2 product launch
|
||||
slug: q2-launch
|
||||
owner: cto
|
||||
---
|
||||
|
||||
Deliver all features planned for the Q2 launch, including the new dashboard and API improvements.
|
||||
```
|
||||
|
||||
## projects/q2-launch/tasks/monday-review/TASK.md
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: Monday Review
|
||||
assignee: ceo
|
||||
project: q2-launch
|
||||
schedule:
|
||||
timezone: America/Chicago
|
||||
startsAt: 2026-03-16T09:00:00-05:00
|
||||
recurrence:
|
||||
frequency: weekly
|
||||
interval: 1
|
||||
weekdays:
|
||||
- monday
|
||||
time:
|
||||
hour: 9
|
||||
minute: 0
|
||||
---
|
||||
|
||||
Review the status of Q2 Launch project. Check progress on all open tasks, identify blockers, and update priorities for the week.
|
||||
```
|
||||
|
||||
## skills/code-review/SKILL.md (with external reference)
|
||||
|
||||
```markdown
|
||||
---
|
||||
name: code-review
|
||||
description: Thorough code review skill for pull requests and diffs
|
||||
metadata:
|
||||
sources:
|
||||
- kind: github-file
|
||||
repo: anthropics/claude-code
|
||||
path: skills/code-review/SKILL.md
|
||||
commit: abc123def456
|
||||
sha256: 3b7e...9a
|
||||
attribution: Anthropic
|
||||
license: MIT
|
||||
usage: referenced
|
||||
---
|
||||
|
||||
Review code changes for correctness, style, and potential issues.
|
||||
```
|
||||
79
.agents/skills/company-creator/references/from-repo-guide.md
Normal file
79
.agents/skills/company-creator/references/from-repo-guide.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# Creating a Company From an Existing Repository
|
||||
|
||||
When a user provides a git repo (URL, local path, or tweet linking to a repo), analyze it and create a company package that wraps its content.
|
||||
|
||||
## Analysis Steps
|
||||
|
||||
1. **Clone or read the repo** - Use `git clone` for URLs, read directly for local paths
|
||||
2. **Scan for existing agent/skill files** - Look for SKILL.md, AGENTS.md, CLAUDE.md, .claude/ directories, or similar agent configuration
|
||||
3. **Understand the repo's purpose** - Read README, package.json, main source files to understand what the project does
|
||||
4. **Identify natural agent roles** - Based on the repo's structure and purpose, determine what agents would be useful
|
||||
|
||||
## Handling Existing Skills
|
||||
|
||||
Many repos already contain skills (SKILL.md files). When you find them:
|
||||
|
||||
**Default behavior: use references, not copies.**
|
||||
|
||||
Instead of copying skill content into your company package, create a source reference:
|
||||
|
||||
```yaml
|
||||
metadata:
|
||||
sources:
|
||||
- kind: github-file
|
||||
repo: owner/repo
|
||||
path: path/to/SKILL.md
|
||||
commit: <get the current HEAD commit SHA>
|
||||
attribution: <repo owner or org name>
|
||||
license: <from repo's LICENSE file>
|
||||
usage: referenced
|
||||
```
|
||||
|
||||
To get the commit SHA:
|
||||
```bash
|
||||
git ls-remote https://github.com/owner/repo HEAD
|
||||
```
|
||||
|
||||
Only vendor (copy) skills when:
|
||||
- The user explicitly asks to copy them
|
||||
- The skill is very small and tightly coupled to the company
|
||||
- The source repo is private or may become unavailable
|
||||
|
||||
## Handling Existing Agent Configurations
|
||||
|
||||
If the repo has agent configs (CLAUDE.md, .claude/ directories, codex configs, etc.):
|
||||
- Use them as inspiration for AGENTS.md instructions
|
||||
- Don't copy them verbatim - adapt them to the Agent Companies format
|
||||
- Preserve the intent and key instructions
|
||||
|
||||
## Repo-Only Skills (No Agents)
|
||||
|
||||
When a repo contains only skills and no agents:
|
||||
- Create agents that would naturally use those skills
|
||||
- The agents should be minimal - just enough to give the skills a runtime context
|
||||
- A single agent may use multiple skills from the repo
|
||||
- Name agents based on the domain the skills cover
|
||||
|
||||
Example: A repo with `code-review`, `testing`, and `deployment` skills might become:
|
||||
- A "Lead Engineer" agent with all three skills
|
||||
- Or separate "Reviewer", "QA Engineer", and "DevOps" agents if the skills are distinct enough
|
||||
|
||||
## Common Repo Patterns
|
||||
|
||||
### Developer Tools / CLI repos
|
||||
- Create agents for the tool's primary use cases
|
||||
- Reference any existing skills
|
||||
- Add a project maintainer or lead agent
|
||||
|
||||
### Library / Framework repos
|
||||
- Create agents for development, testing, documentation
|
||||
- Skills from the repo become agent capabilities
|
||||
|
||||
### Full Application repos
|
||||
- Map to departments: engineering, product, QA
|
||||
- Create a lean team structure appropriate to the project size
|
||||
|
||||
### Skills Collection repos (e.g. skills.sh repos)
|
||||
- Each skill or skill group gets an agent
|
||||
- Create a lightweight company or team wrapper
|
||||
- Keep the agent count proportional to the skill diversity
|
||||
230
.agents/skills/deal-with-security-advisory/SKILL.md
Normal file
230
.agents/skills/deal-with-security-advisory/SKILL.md
Normal file
@@ -0,0 +1,230 @@
|
||||
---
|
||||
name: deal-with-security-advisory
|
||||
description: >
|
||||
Handle a GitHub Security Advisory response for Paperclip, including
|
||||
confidential fix development in a temporary private fork, human coordination
|
||||
on advisory-thread comments, CVE request, synchronized advisory publication,
|
||||
and immediate security release steps.
|
||||
---
|
||||
|
||||
# Security Vulnerability Response Instructions
|
||||
|
||||
## ⚠️ CRITICAL: This is a security vulnerability. Everything about this process is confidential until the advisory is published. Do not mention the vulnerability details in any public commit message, PR title, branch name, or comment. Do not push anything to a public branch. Do not discuss specifics in any public channel. Assume anything on the public repo is visible to attackers who will exploit the window between disclosure and user upgrades.
|
||||
|
||||
***
|
||||
|
||||
## Context
|
||||
|
||||
A security vulnerability has been reported via GitHub Security Advisory:
|
||||
|
||||
* **Advisory:** {{ghsaId}} (e.g. GHSA-x8hx-rhr2-9rf7)
|
||||
* **Reporter:** {{reporterHandle}}
|
||||
* **Severity:** {{severity}}
|
||||
* **Notes:** {{notes}}
|
||||
|
||||
***
|
||||
|
||||
## Step 0: Fetch the Advisory Details
|
||||
|
||||
Pull the full advisory so you understand the vulnerability before doing anything else:
|
||||
|
||||
```
|
||||
gh api repos/paperclipai/paperclip/security-advisories/{{ghsaId}}
|
||||
|
||||
```
|
||||
|
||||
Read the `description`, `severity`, `cvss`, and `vulnerabilities` fields. Understand the attack vector before writing code.
|
||||
|
||||
## Step 1: Acknowledge the Report
|
||||
|
||||
⚠️ **This step requires a human.** The advisory thread does not have a comment API. Ask the human operator to post a comment on the private advisory thread acknowledging the report. Provide them this template:
|
||||
|
||||
> Thanks for the report, @{{reporterHandle}}. We've confirmed the issue and are working on a fix. We're targeting a patch release within {{timeframe}}. We'll keep you updated here.
|
||||
|
||||
Give your human this template, but still continue
|
||||
|
||||
Below we use `gh` tools - you do have access and credentials outside of your sandbox, so use them.
|
||||
|
||||
## Step 2: Create the Temporary Private Fork
|
||||
|
||||
This is where all fix development happens. Never push to the public repo.
|
||||
|
||||
```
|
||||
gh api --method POST \
|
||||
repos/paperclipai/paperclip/security-advisories/{{ghsaId}}/forks
|
||||
|
||||
```
|
||||
|
||||
This returns a repository object for the private fork. Save the `full_name` and `clone_url`.
|
||||
|
||||
Clone it and set up your workspace:
|
||||
|
||||
```
|
||||
# Clone the private fork somewhere outside ~/paperclip
|
||||
git clone <clone_url_from_response> ~/security-patch-{{ghsaId}}
|
||||
cd ~/security-patch-{{ghsaId}}
|
||||
git checkout -b security-fix
|
||||
|
||||
```
|
||||
|
||||
**Do not edit `~/paperclip`** — the dev server is running off the `~/paperclip` master branch and we don't want to touch it. All work happens in the private fork clone.
|
||||
|
||||
**TIPS:**
|
||||
|
||||
* Do not commit `pnpm-lock.yaml` — the repo has actions to manage this
|
||||
* Do not use descriptive branch names that leak the vulnerability (e.g., no `fix-dns-rebinding-rce`). Use something generic like `security-fix`
|
||||
* All work stays in the private fork until publication
|
||||
* CI/GitHub Actions will NOT run on the temporary private fork — this is a GitHub limitation by design. You must run tests locally
|
||||
|
||||
## Step 3: Develop and Validate the Fix
|
||||
|
||||
Write the patch. Same content standards as any PR:
|
||||
|
||||
* It must functionally work — **run tests locally** since CI won't run on the private fork
|
||||
* Consider the whole codebase, not just the narrow vulnerability path. A patch that fixes one vector but opens another is worse than no patch
|
||||
* Ensure backwards compatibility for the database, or be explicit about what breaks
|
||||
* Make sure any UI components still look correct if the fix touches them
|
||||
* The fix should be minimal and focused — don't bundle unrelated changes into a security patch. Reviewers (and the reporter) should be able to read the diff and understand exactly what changed and why
|
||||
|
||||
**Specific to security fixes:**
|
||||
|
||||
* Verify the fix actually closes the attack vector described in the advisory. Reproduce the vulnerability first (using the reporter's description), then confirm the patch prevents it
|
||||
* Consider adjacent attack vectors — if DNS rebinding is the issue, are there other endpoints or modes with the same class of problem?
|
||||
* Do not introduce new dependencies unless absolutely necessary — new deps in a security patch raise eyebrows
|
||||
|
||||
Push your fix to the private fork:
|
||||
|
||||
```
|
||||
git add -A
|
||||
git commit -m "Fix security vulnerability"
|
||||
git push origin security-fix
|
||||
|
||||
```
|
||||
|
||||
## Step 4: Coordinate with the Reporter
|
||||
|
||||
⚠️ **This step requires a human.** Ask the human operator to post on the advisory thread letting the reporter know the fix is ready and giving them a chance to review. Provide them this template:
|
||||
|
||||
> @{{reporterHandle}} — fix is ready in the private fork if you'd like to review before we publish. Planning to release within {{timeframe}}.
|
||||
|
||||
Proceed
|
||||
|
||||
## Step 5: Request a CVE
|
||||
|
||||
This makes vulnerability scanners (npm audit, Snyk, Dependabot) warn users to upgrade. Without it, nobody gets automated notification.
|
||||
|
||||
```
|
||||
gh api --method POST \
|
||||
repos/paperclipai/paperclip/security-advisories/{{ghsaId}}/cve
|
||||
|
||||
```
|
||||
|
||||
GitHub is a CVE Numbering Authority and will assign one automatically. The CVE may take a few hours to propagate after the advisory is published.
|
||||
|
||||
## Step 6: Publish Everything Simultaneously
|
||||
|
||||
This all happens at once — do not stagger these steps. The goal is **zero window** between the vulnerability becoming public knowledge and the fix being available.
|
||||
|
||||
### 6a. Verify reporter credit before publishing
|
||||
|
||||
```
|
||||
gh api repos/paperclipai/paperclip/security-advisories/{{ghsaId}} --jq '.credits'
|
||||
|
||||
```
|
||||
|
||||
If the reporter is not credited, add them:
|
||||
|
||||
```
|
||||
gh api --method PATCH \
|
||||
repos/paperclipai/paperclip/security-advisories/{{ghsaId}} \
|
||||
--input - << 'EOF'
|
||||
{
|
||||
"credits": [
|
||||
{
|
||||
"login": "{{reporterHandle}}",
|
||||
"type": "reporter"
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
```
|
||||
|
||||
### 6b. Update the advisory with the patched version and publish
|
||||
|
||||
```
|
||||
gh api --method PATCH \
|
||||
repos/paperclipai/paperclip/security-advisories/{{ghsaId}} \
|
||||
--input - << 'EOF'
|
||||
{
|
||||
"state": "published",
|
||||
"vulnerabilities": [
|
||||
{
|
||||
"package": {
|
||||
"ecosystem": "npm",
|
||||
"name": "paperclip"
|
||||
},
|
||||
"vulnerable_version_range": "< {{patchedVersion}}",
|
||||
"patched_versions": "{{patchedVersion}}"
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
```
|
||||
|
||||
Publishing the advisory simultaneously:
|
||||
|
||||
* Makes the GHSA public
|
||||
* Merges the temporary private fork into your repo
|
||||
* Triggers the CVE assignment (if requested in step 5)
|
||||
|
||||
### 6c. Cut a release immediately after merge
|
||||
|
||||
```
|
||||
cd ~/paperclip
|
||||
git pull origin master
|
||||
|
||||
gh release create v{{patchedVersion}} \
|
||||
--repo paperclipai/paperclip \
|
||||
--title "v{{patchedVersion}} — Security Release" \
|
||||
--notes "## Security Release
|
||||
|
||||
This release fixes a critical security vulnerability.
|
||||
|
||||
### What was fixed
|
||||
{{briefDescription}} (e.g., Remote code execution via DNS rebinding in \`local_trusted\` mode)
|
||||
|
||||
### Advisory
|
||||
https://github.com/paperclipai/paperclip/security/advisories/{{ghsaId}}
|
||||
|
||||
### Credit
|
||||
Thanks to @{{reporterHandle}} for responsibly disclosing this vulnerability.
|
||||
|
||||
### Action required
|
||||
All users running versions prior to {{patchedVersion}} should upgrade immediately."
|
||||
|
||||
```
|
||||
|
||||
## Step 7: Post-Publication Verification
|
||||
|
||||
```
|
||||
# Verify the advisory is published and CVE is assigned
|
||||
gh api repos/paperclipai/paperclip/security-advisories/{{ghsaId}} \
|
||||
--jq '{state: .state, cve_id: .cve_id, published_at: .published_at}'
|
||||
|
||||
# Verify the release exists
|
||||
gh release view v{{patchedVersion}} --repo paperclipai/paperclip
|
||||
|
||||
```
|
||||
|
||||
If the CVE hasn't been assigned yet, that's normal — it can take a few hours.
|
||||
|
||||
⚠️ **Human step:** Ask the human operator to post a final comment on the advisory thread confirming publication and thanking the reporter.
|
||||
|
||||
Tell the human operator what you did by posting a comment to this task, including:
|
||||
|
||||
* The published advisory URL: `https://github.com/paperclipai/paperclip/security/advisories/{{ghsaId}}`
|
||||
* The release URL
|
||||
* Whether the CVE has been assigned yet
|
||||
* All URLs to any pull requests or branches
|
||||
209
.agents/skills/prcheckloop/SKILL.md
Normal file
209
.agents/skills/prcheckloop/SKILL.md
Normal file
@@ -0,0 +1,209 @@
|
||||
---
|
||||
name: prcheckloop
|
||||
description: >
|
||||
Iteratively gets a GitHub pull request's checks green. Detects the PR for the
|
||||
current branch or uses a provided PR number, waits for every check on the
|
||||
latest head SHA to appear and finish, investigates failing checks, fixes
|
||||
actionable code or test issues, pushes, and repeats. Escalates with a precise
|
||||
blocker when failures are external, flaky, or not safely fixable. Use when a
|
||||
PR still has unsuccessful checks after review fixes, including after greploop.
|
||||
---
|
||||
|
||||
# PRCheckloop
|
||||
|
||||
Get a GitHub PR to a fully green check state, or exit with a concrete blocker.
|
||||
|
||||
## Scope
|
||||
|
||||
- GitHub PRs only. If the repo is GitLab, stop and use `check-pr`.
|
||||
- Focus on checks for the latest PR head SHA, not old commits.
|
||||
- Focus on CI/status checks, not review comments or PR template cleanup.
|
||||
- If the user also wants review-comment cleanup, pair this with `check-pr`.
|
||||
|
||||
## Inputs
|
||||
|
||||
- **PR number** (optional): If not provided, detect the PR for the current branch.
|
||||
- **Max iterations**: default `5`.
|
||||
|
||||
## Workflow
|
||||
|
||||
### 1. Identify the PR
|
||||
|
||||
If no PR number is provided, detect it from the current branch:
|
||||
|
||||
```bash
|
||||
gh pr view --json number,headRefName,headRefOid,url,isDraft
|
||||
```
|
||||
|
||||
If needed, switch to the PR branch before making changes.
|
||||
|
||||
Stop early if:
|
||||
|
||||
- `gh` is not authenticated
|
||||
- there is no PR for the branch
|
||||
- the repo is not hosted on GitHub
|
||||
|
||||
### 2. Track the latest head SHA
|
||||
|
||||
Always work against the current PR head SHA:
|
||||
|
||||
```bash
|
||||
PR_JSON=$(gh pr view "$PR_NUMBER" --json number,headRefName,headRefOid,url)
|
||||
HEAD_SHA=$(echo "$PR_JSON" | jq -r .headRefOid)
|
||||
PR_URL=$(echo "$PR_JSON" | jq -r .url)
|
||||
```
|
||||
|
||||
Ignore failing checks from older SHAs. After every push, refresh `HEAD_SHA` and
|
||||
restart the inspection loop.
|
||||
|
||||
### 3. Inventory checks for that SHA
|
||||
|
||||
Fetch both GitHub check runs and legacy commit status contexts:
|
||||
|
||||
```bash
|
||||
gh api "repos/{owner}/{repo}/commits/$HEAD_SHA/check-runs?per_page=100"
|
||||
gh api "repos/{owner}/{repo}/commits/$HEAD_SHA/status"
|
||||
```
|
||||
|
||||
For a compact PR-level view, this GraphQL payload is useful:
|
||||
|
||||
```bash
|
||||
gh api graphql -f query='
|
||||
query($owner:String!, $repo:String!, $pr:Int!) {
|
||||
repository(owner:$owner, name:$repo) {
|
||||
pullRequest(number:$pr) {
|
||||
headRefOid
|
||||
url
|
||||
statusCheckRollup {
|
||||
contexts(first:100) {
|
||||
nodes {
|
||||
__typename
|
||||
... on CheckRun { name status conclusion detailsUrl workflowName }
|
||||
... on StatusContext { context state targetUrl description }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}' -F owner=OWNER -F repo=REPO -F pr="$PR_NUMBER"
|
||||
```
|
||||
|
||||
### 4. Wait for checks to actually run
|
||||
|
||||
After a new push, checks can take a moment to appear. Poll every 15-30 seconds
|
||||
until one of these is true:
|
||||
|
||||
- checks have appeared and every item is in a terminal state
|
||||
- checks have appeared and at least one failed
|
||||
- no checks appear after a reasonable wait, usually 2 minutes
|
||||
|
||||
Treat these as terminal success states:
|
||||
|
||||
- check runs: `SUCCESS`, `NEUTRAL`, `SKIPPED`
|
||||
- status contexts: `SUCCESS`
|
||||
|
||||
Treat these as pending:
|
||||
|
||||
- check runs: `QUEUED`, `PENDING`, `WAITING`, `REQUESTED`, `IN_PROGRESS`
|
||||
- status contexts: `PENDING`
|
||||
|
||||
Treat these as failures:
|
||||
|
||||
- check runs: `FAILURE`, `TIMED_OUT`, `CANCELLED`, `ACTION_REQUIRED`, `STARTUP_FAILURE`, `STALE`
|
||||
- status contexts: `FAILURE`, `ERROR`
|
||||
|
||||
If no checks appear for the latest SHA, inspect `.github/workflows/`, workflow
|
||||
path filters, and branch protection expectations. If the missing check cannot be
|
||||
caused or fixed from the repo, escalate.
|
||||
|
||||
### 5. Investigate failing checks
|
||||
|
||||
For GitHub Actions failures, inspect runs and failed logs for the current SHA:
|
||||
|
||||
```bash
|
||||
gh run list --commit "$HEAD_SHA" --json databaseId,workflowName,status,conclusion,url,headSha
|
||||
gh run view <RUN_ID> --json databaseId,name,workflowName,status,conclusion,jobs,url,headSha
|
||||
gh run view <RUN_ID> --log-failed
|
||||
```
|
||||
|
||||
For each failing check, classify it:
|
||||
|
||||
| Failure type | Action |
|
||||
|---|---|
|
||||
| Code/test regression | Reproduce locally, fix, and verify |
|
||||
| Lint/type/build mismatch | Run the matching local command from the workflow and fix it |
|
||||
| Flake or transient infra issue | Rerun once if evidence supports flakiness |
|
||||
| External service/status app failure | Escalate with the details URL and owner guess |
|
||||
| Missing secret/permission/branch protection issue | Escalate immediately |
|
||||
|
||||
Only rerun a failed job once without code changes. Do not loop on reruns.
|
||||
|
||||
### 6. Fix actionable failures
|
||||
|
||||
If the failure is actionable from the checked-out code:
|
||||
|
||||
1. Read the workflow or failing command to identify the real gate.
|
||||
2. Reproduce locally where reasonable.
|
||||
3. Make the smallest correct fix.
|
||||
4. Run focused verification first, then broader verification if needed.
|
||||
5. Commit in a logical commit.
|
||||
6. Push before re-checking the PR.
|
||||
|
||||
Do not stop at a local fix. The loop is only complete when the remote PR checks
|
||||
for the new head SHA are green.
|
||||
|
||||
### 7. Push and repeat
|
||||
|
||||
After each fix:
|
||||
|
||||
```bash
|
||||
git push
|
||||
sleep 5
|
||||
```
|
||||
|
||||
Then refresh the PR metadata, get the new `HEAD_SHA`, and restart from Step 3.
|
||||
|
||||
Exit the loop only when:
|
||||
|
||||
- all checks for the latest head SHA are green, or
|
||||
- a blocker remains after reasonable repair effort, or
|
||||
- the max iteration count is reached
|
||||
|
||||
### 8. Escalate blockers precisely
|
||||
|
||||
If you cannot get the PR green, report:
|
||||
|
||||
- PR URL
|
||||
- latest head SHA
|
||||
- exact failing or missing check names
|
||||
- details URLs
|
||||
- what you already tried
|
||||
- why it is blocked
|
||||
- who should likely unblock it
|
||||
- the next concrete action
|
||||
|
||||
Good blocker examples:
|
||||
|
||||
- external status app outage
|
||||
- missing GitHub secret or permission
|
||||
- required check name mismatch in branch protection
|
||||
- persistent flake after one rerun
|
||||
- failure needs credentials or infrastructure access you do not have
|
||||
|
||||
## Output
|
||||
|
||||
When the skill completes, report:
|
||||
|
||||
- PR URL and branch
|
||||
- final head SHA
|
||||
- green/pending/failing check summary
|
||||
- fixes made and verification run
|
||||
- whether changes were pushed
|
||||
- blocker summary if not fully green
|
||||
|
||||
## Notes
|
||||
|
||||
- This skill is intentionally narrower than `check-pr`: it is a repair loop for
|
||||
PR checks.
|
||||
- This skill complements `greploop`: Greptile can be perfect while CI is still
|
||||
red.
|
||||
1
.claude/skills/company-creator
Symbolic link
1
.claude/skills/company-creator
Symbolic link
@@ -0,0 +1 @@
|
||||
../../.agents/skills/company-creator
|
||||
@@ -1,3 +1,4 @@
|
||||
DATABASE_URL=postgres://paperclip:paperclip@localhost:5432/paperclip
|
||||
PORT=3100
|
||||
SERVE_UI=false
|
||||
BETTER_AUTH_SECRET=paperclip-dev-secret
|
||||
|
||||
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@@ -8,3 +8,10 @@ scripts/rollback-latest.sh @cryppadotta @devinfoley
|
||||
doc/RELEASING.md @cryppadotta @devinfoley
|
||||
doc/PUBLISHING.md @cryppadotta @devinfoley
|
||||
doc/RELEASE-AUTOMATION-SETUP.md @cryppadotta @devinfoley
|
||||
|
||||
# Package files — dependency changes require review
|
||||
# package.json matches recursively at all depths (covers root + all workspaces)
|
||||
package.json @cryppadotta @devinfoley
|
||||
pnpm-lock.yaml @cryppadotta @devinfoley
|
||||
pnpm-workspace.yaml @cryppadotta @devinfoley
|
||||
.npmrc @cryppadotta @devinfoley
|
||||
|
||||
65
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
65
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
## Thinking Path
|
||||
|
||||
<!--
|
||||
Required. Trace your reasoning from the top of the project down to this
|
||||
specific change. Start with what Paperclip is, then narrow through the
|
||||
subsystem, the problem, and why this PR exists. Use blockquote style.
|
||||
Aim for 5–8 steps. See CONTRIBUTING.md for full examples.
|
||||
-->
|
||||
|
||||
> - Paperclip orchestrates AI agents for zero-human companies
|
||||
> - [Which subsystem or capability is involved]
|
||||
> - [What problem or gap exists]
|
||||
> - [Why it needs to be addressed]
|
||||
> - This pull request ...
|
||||
> - The benefit is ...
|
||||
|
||||
## What Changed
|
||||
|
||||
<!-- Bullet list of concrete changes. One bullet per logical unit. -->
|
||||
|
||||
-
|
||||
|
||||
## Verification
|
||||
|
||||
<!--
|
||||
How can a reviewer confirm this works? Include test commands, manual
|
||||
steps, or both. For UI changes, include before/after screenshots.
|
||||
-->
|
||||
|
||||
-
|
||||
|
||||
## Risks
|
||||
|
||||
<!--
|
||||
What could go wrong? Mention migration safety, breaking changes,
|
||||
behavioral shifts, or "Low risk" if genuinely minor.
|
||||
-->
|
||||
|
||||
-
|
||||
|
||||
## Model Used
|
||||
|
||||
<!--
|
||||
Required. Specify which AI model was used to produce or assist with
|
||||
this change. Be as descriptive as possible — include:
|
||||
• Provider and model name (e.g., Claude, GPT, Gemini, Codex)
|
||||
• Exact model ID or version (e.g., claude-opus-4-6, gpt-4-turbo-2024-04-09)
|
||||
• Context window size if relevant (e.g., 1M context)
|
||||
• Reasoning/thinking mode if applicable (e.g., extended thinking, chain-of-thought)
|
||||
• Any other relevant capability details (e.g., tool use, code execution)
|
||||
If no AI model was used, write "None — human-authored".
|
||||
-->
|
||||
|
||||
-
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] I have included a thinking path that traces from project context to this change
|
||||
- [ ] I have specified the model used (with version and capability details)
|
||||
- [ ] I have run tests locally and they pass
|
||||
- [ ] I have added or updated tests where applicable
|
||||
- [ ] If this change affects the UI, I have included before/after screenshots
|
||||
- [ ] I have updated relevant documentation to reflect my changes
|
||||
- [ ] I have considered and documented any risks above
|
||||
- [ ] I will address all Greptile and reviewer comments before requesting merge
|
||||
55
.github/workflows/docker.yml
vendored
Normal file
55
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
concurrency:
|
||||
group: docker-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
49
.github/workflows/pr-policy.yml
vendored
49
.github/workflows/pr-policy.yml
vendored
@@ -1,49 +0,0 @@
|
||||
name: PR Policy
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: pr-policy-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
policy:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Block manual lockfile edits
|
||||
if: github.head_ref != 'chore/refresh-lockfile'
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
if printf '%s\n' "$changed" | grep -qx 'pnpm-lock.yaml'; then
|
||||
echo "Do not commit pnpm-lock.yaml in pull requests. CI owns lockfile updates."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
manifest_pattern='(^|/)package\.json$|^pnpm-workspace\.yaml$|^\.npmrc$|^pnpmfile\.(cjs|js|mjs)$'
|
||||
if printf '%s\n' "$changed" | grep -Eq "$manifest_pattern"; then
|
||||
pnpm install --lockfile-only --ignore-scripts --no-frozen-lockfile
|
||||
fi
|
||||
48
.github/workflows/pr-verify.yml
vendored
48
.github/workflows/pr-verify.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: PR Verify
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: pr-verify-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
verify:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --no-frozen-lockfile
|
||||
|
||||
- name: Typecheck
|
||||
run: pnpm -r typecheck
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:run
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
- name: Release canary dry run
|
||||
run: |
|
||||
git checkout -B master HEAD
|
||||
git checkout -- pnpm-lock.yaml
|
||||
./scripts/release.sh canary --skip-verify --dry-run
|
||||
186
.github/workflows/pr.yml
vendored
Normal file
186
.github/workflows/pr.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
name: PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
concurrency:
|
||||
group: pr-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
policy:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Block manual lockfile edits
|
||||
if: github.head_ref != 'chore/refresh-lockfile'
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
if printf '%s\n' "$changed" | grep -qx 'pnpm-lock.yaml'; then
|
||||
echo "Do not commit pnpm-lock.yaml in pull requests. CI owns lockfile updates."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
|
||||
- name: Validate Dockerfile deps stage
|
||||
run: |
|
||||
missing=0
|
||||
|
||||
# Extract only the deps stage from the Dockerfile
|
||||
deps_stage="$(awk '/^FROM .* AS deps$/{found=1; next} found && /^FROM /{exit} found{print}' Dockerfile)"
|
||||
|
||||
if [ -z "$deps_stage" ]; then
|
||||
echo "::error::Could not extract deps stage from Dockerfile (expected 'FROM ... AS deps')"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Derive workspace search roots from pnpm-workspace.yaml (exclude dev-only packages)
|
||||
search_roots="$(grep '^ *- ' pnpm-workspace.yaml | sed 's/^ *- //' | sed 's/\*$//' | grep -v 'examples' | grep -v 'create-paperclip-plugin' | tr '\n' ' ')"
|
||||
|
||||
if [ -z "$search_roots" ]; then
|
||||
echo "::error::Could not derive workspace roots from pnpm-workspace.yaml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check all workspace package.json files are copied in the deps stage
|
||||
for pkg in $(find $search_roots -maxdepth 2 -name package.json -not -path '*/examples/*' -not -path '*/create-paperclip-plugin/*' -not -path '*/node_modules/*' 2>/dev/null | sort -u); do
|
||||
dir="$(dirname "$pkg")"
|
||||
if ! echo "$deps_stage" | grep -q "^COPY ${dir}/package.json"; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY ${pkg} ${dir}/"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Check patches directory is copied if it exists
|
||||
if [ -d patches ] && ! echo "$deps_stage" | grep -q '^COPY patches/'; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY patches/ patches/"
|
||||
missing=1
|
||||
fi
|
||||
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
echo "Dockerfile deps stage is out of sync. Update it to include the missing files."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
manifest_pattern='(^|/)package\.json$|^pnpm-workspace\.yaml$|^\.npmrc$|^pnpmfile\.(cjs|js|mjs)$'
|
||||
if printf '%s\n' "$changed" | grep -Eq "$manifest_pattern"; then
|
||||
pnpm install --lockfile-only --ignore-scripts --no-frozen-lockfile
|
||||
fi
|
||||
|
||||
verify:
|
||||
needs: [policy]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Typecheck
|
||||
run: pnpm -r typecheck
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:run
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
- name: Release canary dry run
|
||||
run: |
|
||||
git checkout -B master HEAD
|
||||
git checkout -- pnpm-lock.yaml
|
||||
./scripts/release.sh canary --skip-verify --dry-run
|
||||
|
||||
e2e:
|
||||
needs: [policy]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 24
|
||||
cache: pnpm
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
|
||||
- name: Install Playwright
|
||||
run: npx playwright install --with-deps chromium
|
||||
|
||||
- name: Generate Paperclip config
|
||||
run: |
|
||||
mkdir -p ~/.paperclip/instances/default
|
||||
cat > ~/.paperclip/instances/default/config.json << 'CONF'
|
||||
{
|
||||
"$meta": { "version": 1, "updatedAt": "2026-01-01T00:00:00.000Z", "source": "onboard" },
|
||||
"database": { "mode": "embedded-postgres" },
|
||||
"logging": { "mode": "file" },
|
||||
"server": { "deploymentMode": "local_trusted", "host": "127.0.0.1", "port": 3100 },
|
||||
"auth": { "baseUrlMode": "auto" },
|
||||
"storage": { "provider": "local_disk" },
|
||||
"secrets": { "provider": "local_encrypted", "strictMode": false }
|
||||
}
|
||||
CONF
|
||||
|
||||
- name: Run e2e tests
|
||||
env:
|
||||
PAPERCLIP_E2E_SKIP_LLM: "true"
|
||||
run: pnpm run test:e2e
|
||||
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: |
|
||||
tests/e2e/playwright-report/
|
||||
tests/e2e/test-results/
|
||||
retention-days: 14
|
||||
31
.github/workflows/refresh-lockfile.yml
vendored
31
.github/workflows/refresh-lockfile.yml
vendored
@@ -51,11 +51,14 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Create or update pull request
|
||||
id: upsert-pr
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPO_OWNER: ${{ github.repository_owner }}
|
||||
run: |
|
||||
if git diff --quiet -- pnpm-lock.yaml; then
|
||||
echo "Lockfile unchanged, nothing to do."
|
||||
echo "pr_url=" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -68,26 +71,26 @@ jobs:
|
||||
git commit -m "chore(lockfile): refresh pnpm-lock.yaml"
|
||||
git push --force origin "$BRANCH"
|
||||
|
||||
# Create PR if one doesn't already exist
|
||||
existing=$(gh pr list --head "$BRANCH" --json number --jq '.[0].number')
|
||||
if [ -z "$existing" ]; then
|
||||
gh pr create \
|
||||
# Only reuse an open PR from this repository owner, not a fork with the same branch name.
|
||||
pr_url="$(
|
||||
gh pr list --state open --head "$BRANCH" --json url,headRepositoryOwner \
|
||||
--jq ".[] | select(.headRepositoryOwner.login == \"$REPO_OWNER\") | .url" |
|
||||
head -n 1
|
||||
)"
|
||||
if [ -z "$pr_url" ]; then
|
||||
pr_url="$(gh pr create \
|
||||
--head "$BRANCH" \
|
||||
--title "chore(lockfile): refresh pnpm-lock.yaml" \
|
||||
--body "Auto-generated lockfile refresh after dependencies changed on master. This PR only updates pnpm-lock.yaml."
|
||||
echo "Created new PR."
|
||||
--body "Auto-generated lockfile refresh after dependencies changed on master. This PR only updates pnpm-lock.yaml.")"
|
||||
echo "Created new PR: $pr_url"
|
||||
else
|
||||
echo "PR #$existing already exists, branch updated via force push."
|
||||
echo "PR already exists: $pr_url"
|
||||
fi
|
||||
echo "pr_url=$pr_url" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Enable auto-merge for lockfile PR
|
||||
if: steps.upsert-pr.outputs.pr_url != ''
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
pr_url="$(gh pr list --head chore/refresh-lockfile --json url --jq '.[0].url')"
|
||||
if [ -z "$pr_url" ]; then
|
||||
echo "Error: lockfile PR was not found." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
gh pr merge --auto --squash --delete-branch "$pr_url"
|
||||
gh pr merge --auto --squash --delete-branch "${{ steps.upsert-pr.outputs.pr_url }}"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -31,6 +31,7 @@ server/src/**/*.js.map
|
||||
server/src/**/*.d.ts
|
||||
server/src/**/*.d.ts.map
|
||||
tmp/
|
||||
feedback-export-*
|
||||
|
||||
# Editor / tool temp files
|
||||
*.tmp
|
||||
|
||||
4
.mailmap
4
.mailmap
@@ -1 +1,3 @@
|
||||
Dotta <bippadotta@protonmail.com> Forgotten <forgottenrunes@protonmail.com>
|
||||
Dotta <bippadotta@protonmail.com> <34892728+cryppadotta@users.noreply.github.com>
|
||||
Dotta <bippadotta@protonmail.com> <forgottenrunes@protonmail.com>
|
||||
Dotta <bippadotta@protonmail.com> <dotta@example.com>
|
||||
|
||||
77
AGENTS.md
77
AGENTS.md
@@ -26,6 +26,9 @@ Before making changes, read in this order:
|
||||
- `ui/`: React + Vite board UI
|
||||
- `packages/db/`: Drizzle schema, migrations, DB clients
|
||||
- `packages/shared/`: shared types, constants, validators, API path constants
|
||||
- `packages/adapters/`: agent adapter implementations (Claude, Codex, Cursor, etc.)
|
||||
- `packages/adapter-utils/`: shared adapter utilities
|
||||
- `packages/plugins/`: plugin system packages
|
||||
- `doc/`: operational and product docs
|
||||
|
||||
## 4. Dev Setup (Auto DB)
|
||||
@@ -78,8 +81,8 @@ If you change schema/API behavior, update all impacted layers:
|
||||
4. Do not replace strategic docs wholesale unless asked.
|
||||
Prefer additive updates. Keep `doc/SPEC.md` and `doc/SPEC-implementation.md` aligned.
|
||||
|
||||
5. Keep plan docs dated and centralized.
|
||||
New plan documents belong in `doc/plans/` and should use `YYYY-MM-DD-slug.md` filenames.
|
||||
5. Keep repo plan docs dated and centralized.
|
||||
When you are creating a plan file in the repository itself, new plan documents belong in `doc/plans/` and should use `YYYY-MM-DD-slug.md` filenames. This does not replace Paperclip issue planning: if a Paperclip issue asks for a plan, update the issue `plan` document per the `paperclip` skill instead of creating a repo markdown file.
|
||||
|
||||
## 6. Database Change Workflow
|
||||
|
||||
@@ -105,6 +108,21 @@ Notes:
|
||||
|
||||
## 7. Verification Before Hand-off
|
||||
|
||||
Default local/agent test path:
|
||||
|
||||
```sh
|
||||
pnpm test
|
||||
```
|
||||
|
||||
This is the cheap default and only runs the Vitest suite. Browser suites stay opt-in:
|
||||
|
||||
```sh
|
||||
pnpm test:e2e
|
||||
pnpm test:release-smoke
|
||||
```
|
||||
|
||||
Run the browser suites only when your change touches them or when you are explicitly verifying CI/release flows.
|
||||
|
||||
Run this full check before claiming done:
|
||||
|
||||
```sh
|
||||
@@ -135,7 +153,18 @@ When adding endpoints:
|
||||
- Use company selection context for company-scoped pages
|
||||
- Surface failures clearly; do not silently ignore API errors
|
||||
|
||||
## 10. Definition of Done
|
||||
## 10. Pull Request Requirements
|
||||
|
||||
When creating a pull request (via `gh pr create` or any other method), you **must** read and fill in every section of [`.github/PULL_REQUEST_TEMPLATE.md`](.github/PULL_REQUEST_TEMPLATE.md). Do not craft ad-hoc PR bodies — use the template as the structure for your PR description. Required sections:
|
||||
|
||||
- **Thinking Path** — trace reasoning from project context to this change (see `CONTRIBUTING.md` for examples)
|
||||
- **What Changed** — bullet list of concrete changes
|
||||
- **Verification** — how a reviewer can confirm it works
|
||||
- **Risks** — what could go wrong
|
||||
- **Model Used** — the AI model that produced or assisted with the change (provider, exact model ID, context window, capabilities). Write "None — human-authored" if no AI was used.
|
||||
- **Checklist** — all items checked
|
||||
|
||||
## 11. Definition of Done
|
||||
|
||||
A change is done when all are true:
|
||||
|
||||
@@ -143,3 +172,45 @@ A change is done when all are true:
|
||||
2. Typecheck, tests, and build pass
|
||||
3. Contracts are synced across db/shared/server/ui
|
||||
4. Docs updated when behavior or commands change
|
||||
5. PR description follows the [PR template](.github/PULL_REQUEST_TEMPLATE.md) with all sections filled in (including Model Used)
|
||||
|
||||
## 11. Fork-Specific: HenkDz/paperclip
|
||||
|
||||
This is a fork of `paperclipai/paperclip` with QoL patches and an **external-only** Hermes adapter story on branch `feat/externalize-hermes-adapter` ([tree](https://github.com/HenkDz/paperclip/tree/feat/externalize-hermes-adapter)).
|
||||
|
||||
### Branch Strategy
|
||||
|
||||
- `feat/externalize-hermes-adapter` → core has **no** `hermes-paperclip-adapter` dependency and **no** built-in `hermes_local` registration. Install Hermes via the Adapter Plugin manager (`@henkey/hermes-paperclip-adapter` or a `file:` path).
|
||||
- Older fork branches may still document built-in Hermes; treat this file as authoritative for the externalize branch.
|
||||
|
||||
### Hermes (plugin only)
|
||||
|
||||
- Register through **Board → Adapter manager** (same as Droid). Type remains `hermes_local` once the package is loaded.
|
||||
- UI uses generic **config-schema** + **ui-parser.js** from the package — no Hermes imports in `server/` or `ui/` source.
|
||||
- Optional: `file:` entry in `~/.paperclip/adapter-plugins.json` for local dev of the adapter repo.
|
||||
|
||||
### Local Dev
|
||||
|
||||
- Fork runs on port 3101+ (auto-detects if 3100 is taken by upstream instance)
|
||||
- `npx vite build` hangs on NTFS — use `node node_modules/vite/bin/vite.js build` instead
|
||||
- Server startup from NTFS takes 30-60s — don't assume failure immediately
|
||||
- Kill ALL paperclip processes before starting: `pkill -f "paperclip"; pkill -f "tsx.*index.ts"`
|
||||
- Vite cache survives `rm -rf dist` — delete both: `rm -rf ui/dist ui/node_modules/.vite`
|
||||
|
||||
### Fork QoL Patches (not in upstream)
|
||||
|
||||
These are local modifications in the fork's UI. If re-copying source, these must be re-applied:
|
||||
|
||||
1. **stderr_group** — amber accordion for MCP init noise in `RunTranscriptView.tsx`
|
||||
2. **tool_group** — accordion for consecutive non-terminal tools (write, read, search, browser)
|
||||
3. **Dashboard excerpt** — `LatestRunCard` strips markdown, shows first 3 lines/280 chars
|
||||
|
||||
### Plugin System
|
||||
|
||||
PR #2218 (`feat/external-adapter-phase1`) adds external adapter support. See root `AGENTS.md` for full details.
|
||||
|
||||
- Adapters can be loaded as external plugins via `~/.paperclip/adapter-plugins.json`
|
||||
- The plugin-loader should have ZERO hardcoded adapter imports — pure dynamic loading
|
||||
- `createServerAdapter()` must include ALL optional fields (especially `detectModel`)
|
||||
- Built-in UI adapters can shadow external plugin parsers — remove built-in when fully externalizing
|
||||
- Reference external adapters: Hermes (`@henkey/hermes-paperclip-adapter` or `file:`) and Droid (npm)
|
||||
|
||||
@@ -11,8 +11,9 @@ We really appreciate both small fixes and thoughtful larger changes.
|
||||
- Pick **one** clear thing to fix/improve
|
||||
- Touch the **smallest possible number of files**
|
||||
- Make sure the change is very targeted and easy to review
|
||||
- All automated checks pass (including Greptile comments)
|
||||
- No new lint/test failures
|
||||
- All tests pass and CI is green
|
||||
- Greptile score is 5/5 with all comments addressed
|
||||
- Use the [PR template](.github/PULL_REQUEST_TEMPLATE.md)
|
||||
|
||||
These almost always get merged quickly when they're clean.
|
||||
|
||||
@@ -26,11 +27,30 @@ These almost always get merged quickly when they're clean.
|
||||
- Before / After screenshots (or short video if UI/behavior change)
|
||||
- Clear description of what & why
|
||||
- Proof it works (manual testing notes)
|
||||
- All tests passing
|
||||
- All Greptile + other PR comments addressed
|
||||
- All tests passing and CI green
|
||||
- Greptile score 5/5 with all comments addressed
|
||||
- [PR template](.github/PULL_REQUEST_TEMPLATE.md) fully filled out
|
||||
|
||||
PRs that follow this path are **much** more likely to be accepted, even when they're large.
|
||||
|
||||
## PR Requirements (all PRs)
|
||||
|
||||
### Use the PR Template
|
||||
|
||||
Every pull request **must** follow the PR template at [`.github/PULL_REQUEST_TEMPLATE.md`](.github/PULL_REQUEST_TEMPLATE.md). If you create a PR via the GitHub API or other tooling that bypasses the template, copy its contents into your PR description manually. The template includes required sections: Thinking Path, What Changed, Verification, Risks, Model Used, and a Checklist.
|
||||
|
||||
### Model Used (Required)
|
||||
|
||||
Every PR must include a **Model Used** section specifying which AI model produced or assisted with the change. Include the provider, exact model ID/version, context window size, and any relevant capability details (e.g., reasoning mode, tool use). If no AI was used, write "None — human-authored". This applies to all contributors — human and AI alike.
|
||||
|
||||
### Tests Must Pass
|
||||
|
||||
All tests must pass before a PR can be merged. Run them locally first and verify CI is green after pushing.
|
||||
|
||||
### Greptile Review
|
||||
|
||||
We use [Greptile](https://greptile.com) for automated code review. Your PR must achieve a **5/5 Greptile score** with **all Greptile comments addressed** before it can be merged. If Greptile leaves comments, fix or respond to each one and request a re-review.
|
||||
|
||||
## General Rules (both paths)
|
||||
|
||||
- Write clear commit messages
|
||||
@@ -41,7 +61,7 @@ PRs that follow this path are **much** more likely to be accepted, even when the
|
||||
|
||||
## Writing a Good PR message
|
||||
|
||||
Please include a "thinking path" at the top of your PR message that explains from the top of the project down to what you fixed. E.g.:
|
||||
Your PR description must follow the [PR template](.github/PULL_REQUEST_TEMPLATE.md). All sections are required. The "thinking path" at the top explains from the top of the project down to what you fixed. E.g.:
|
||||
|
||||
### Thinking Path Example 1:
|
||||
|
||||
|
||||
37
Dockerfile
37
Dockerfile
@@ -1,8 +1,23 @@
|
||||
FROM node:lts-trixie-slim AS base
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=1000
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates curl git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN corepack enable
|
||||
&& apt-get install -y --no-install-recommends ca-certificates gosu curl git wget ripgrep python3 \
|
||||
&& mkdir -p -m 755 /etc/apt/keyrings \
|
||||
&& wget -nv -O/etc/apt/keyrings/githubcli-archive-keyring.gpg https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||
&& echo "20e0125d6f6e077a9ad46f03371bc26d90b04939fb95170f5a1905099cc6bcc0 /etc/apt/keyrings/githubcli-archive-keyring.gpg" | sha256sum -c - \
|
||||
&& chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||
&& mkdir -p -m 755 /etc/apt/sources.list.d \
|
||||
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" > /etc/apt/sources.list.d/github-cli.list \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends gh \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& corepack enable
|
||||
|
||||
# Modify the existing node user/group to have the specified UID/GID to match host user
|
||||
RUN usermod -u $USER_UID --non-unique node \
|
||||
&& groupmod -g $USER_GID --non-unique node \
|
||||
&& usermod -g $USER_GID -d /paperclip node
|
||||
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
@@ -13,6 +28,7 @@ COPY ui/package.json ui/
|
||||
COPY packages/shared/package.json packages/shared/
|
||||
COPY packages/db/package.json packages/db/
|
||||
COPY packages/adapter-utils/package.json packages/adapter-utils/
|
||||
COPY packages/mcp-server/package.json packages/mcp-server/
|
||||
COPY packages/adapters/claude-local/package.json packages/adapters/claude-local/
|
||||
COPY packages/adapters/codex-local/package.json packages/adapters/codex-local/
|
||||
COPY packages/adapters/cursor-local/package.json packages/adapters/cursor-local/
|
||||
@@ -20,6 +36,8 @@ COPY packages/adapters/gemini-local/package.json packages/adapters/gemini-local/
|
||||
COPY packages/adapters/openclaw-gateway/package.json packages/adapters/openclaw-gateway/
|
||||
COPY packages/adapters/opencode-local/package.json packages/adapters/opencode-local/
|
||||
COPY packages/adapters/pi-local/package.json packages/adapters/pi-local/
|
||||
COPY packages/plugins/sdk/package.json packages/plugins/sdk/
|
||||
COPY patches/ patches/
|
||||
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
@@ -28,16 +46,22 @@ WORKDIR /app
|
||||
COPY --from=deps /app /app
|
||||
COPY . .
|
||||
RUN pnpm --filter @paperclipai/ui build
|
||||
RUN pnpm --filter @paperclipai/plugin-sdk build
|
||||
RUN pnpm --filter @paperclipai/server build
|
||||
RUN test -f server/dist/index.js || (echo "ERROR: server build output missing" && exit 1)
|
||||
|
||||
FROM base AS production
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=1000
|
||||
WORKDIR /app
|
||||
COPY --chown=node:node --from=build /app /app
|
||||
RUN npm install --global --omit=dev @anthropic-ai/claude-code@latest @openai/codex@latest opencode-ai \
|
||||
&& mkdir -p /paperclip \
|
||||
&& chown node:node /paperclip
|
||||
|
||||
COPY scripts/docker-entrypoint.sh /usr/local/bin/
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENV NODE_ENV=production \
|
||||
HOME=/paperclip \
|
||||
HOST=0.0.0.0 \
|
||||
@@ -45,12 +69,15 @@ ENV NODE_ENV=production \
|
||||
SERVE_UI=true \
|
||||
PAPERCLIP_HOME=/paperclip \
|
||||
PAPERCLIP_INSTANCE_ID=default \
|
||||
USER_UID=${USER_UID} \
|
||||
USER_GID=${USER_GID} \
|
||||
PAPERCLIP_CONFIG=/paperclip/instances/default/config.json \
|
||||
PAPERCLIP_DEPLOYMENT_MODE=authenticated \
|
||||
PAPERCLIP_DEPLOYMENT_EXPOSURE=private
|
||||
PAPERCLIP_DEPLOYMENT_EXPOSURE=private \
|
||||
OPENCODE_ALLOW_ALL_MODELS=true
|
||||
|
||||
VOLUME ["/paperclip"]
|
||||
EXPOSE 3100
|
||||
|
||||
USER node
|
||||
ENTRYPOINT ["docker-entrypoint.sh"]
|
||||
CMD ["node", "--import", "./server/node_modules/tsx/dist/loader.mjs", "server/dist/index.js"]
|
||||
|
||||
61
README.md
61
README.md
@@ -177,6 +177,16 @@ Open source. Self-hosted. No Paperclip account required.
|
||||
npx paperclipai onboard --yes
|
||||
```
|
||||
|
||||
That quickstart path now defaults to trusted local loopback mode for the fastest first run. To start in authenticated/private mode instead, choose a bind preset explicitly:
|
||||
|
||||
```bash
|
||||
npx paperclipai onboard --yes --bind lan
|
||||
# or:
|
||||
npx paperclipai onboard --yes --bind tailnet
|
||||
```
|
||||
|
||||
If you already have Paperclip configured, rerunning `onboard` keeps the existing config in place. Use `paperclipai configure` to edit settings.
|
||||
|
||||
Or manually:
|
||||
|
||||
```bash
|
||||
@@ -223,27 +233,62 @@ pnpm dev:once # Full dev without file watching
|
||||
pnpm dev:server # Server only
|
||||
pnpm build # Build all
|
||||
pnpm typecheck # Type checking
|
||||
pnpm test:run # Run tests
|
||||
pnpm test # Cheap default test run (Vitest only)
|
||||
pnpm test:watch # Vitest watch mode
|
||||
pnpm test:e2e # Playwright browser suite
|
||||
pnpm db:generate # Generate DB migration
|
||||
pnpm db:migrate # Apply migrations
|
||||
```
|
||||
|
||||
`pnpm test` does not run Playwright. Browser suites stay separate and are typically run only when working on those flows or in CI.
|
||||
|
||||
See [doc/DEVELOPING.md](doc/DEVELOPING.md) for the full development guide.
|
||||
|
||||
<br/>
|
||||
|
||||
## Roadmap
|
||||
|
||||
- ⚪ Get OpenClaw onboarding easier
|
||||
- ⚪ Get cloud agents working e.g. Cursor / e2b agents
|
||||
- ⚪ ClipMart - buy and sell entire agent companies
|
||||
- ⚪ Easy agent configurations / easier to understand
|
||||
- ⚪ Better support for harness engineering
|
||||
- 🟢 Plugin system (e.g. if you want to add a knowledgebase, custom tracing, queues, etc)
|
||||
- ⚪ Better docs
|
||||
- ✅ Plugin system (e.g. add a knowledge base, custom tracing, queues, etc)
|
||||
- ✅ Get OpenClaw / claw-style agent employees
|
||||
- ✅ companies.sh - import and export entire organizations
|
||||
- ✅ Easy AGENTS.md configurations
|
||||
- ✅ Skills Manager
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ✅ Agent Reviews and Approvals
|
||||
- ⚪ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Artifacts & Work Products
|
||||
- ⚪ Memory & Knowledge
|
||||
- ⚪ Enforced Outcomes
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Deep Planning
|
||||
- ⚪ Work Queues
|
||||
- ⚪ Self-Organization
|
||||
- ⚪ Automatic Organizational Learning
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
Find Plugins and more at [awesome-paperclip](https://github.com/gsxdsm/awesome-paperclip)
|
||||
|
||||
## Telemetry
|
||||
|
||||
Paperclip collects anonymous usage telemetry to help us understand how the product is used and improve it. No personal information, issue content, prompts, file paths, or secrets are ever collected. Private repository references are hashed with a per-install salt before being sent.
|
||||
|
||||
Telemetry is **enabled by default** and can be disabled with any of the following:
|
||||
|
||||
| Method | How |
|
||||
| -------------------- | ------------------------------------------------------- |
|
||||
| Environment variable | `PAPERCLIP_TELEMETRY_DISABLED=1` |
|
||||
| Standard convention | `DO_NOT_TRACK=1` |
|
||||
| CI environments | Automatically disabled when `CI=true` |
|
||||
| Config file | Set `telemetry.enabled: false` in your Paperclip config |
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See the [contributing guide](CONTRIBUTING.md) for details.
|
||||
|
||||
8
SECURITY.md
Normal file
8
SECURITY.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please report security vulnerabilities through GitHub's Security Advisory feature:
|
||||
[https://github.com/paperclipai/paperclip/security/advisories/new](https://github.com/paperclipai/paperclip/security/advisories/new)
|
||||
|
||||
Do not open public issues for security vulnerabilities.
|
||||
143
adapter-plugin.md
Normal file
143
adapter-plugin.md
Normal file
@@ -0,0 +1,143 @@
|
||||
- Created branch: feat/external-adapter-phase1
|
||||
|
||||
I started phase 1 in the most merge-friendly way I could: small central changes, reusing existing registry patterns instead of inventing a whole new plugin system up front.
|
||||
|
||||
What I changed
|
||||
|
||||
1. Server adapter registry is now mutable
|
||||
Files:
|
||||
- server/src/adapters/registry.ts
|
||||
- server/src/adapters/index.ts
|
||||
|
||||
Added:
|
||||
- registerServerAdapter(adapter)
|
||||
- unregisterServerAdapter(type)
|
||||
- requireServerAdapter(type)
|
||||
|
||||
Kept the existing built-in registry shape, but changed initialization so built-ins are registered into a mutable map on startup.
|
||||
|
||||
Why this is merge-friendly:
|
||||
- existing built-in adapter definitions stay where they already are
|
||||
- existing lookup helpers still exist
|
||||
- no big architectural rewrite yet
|
||||
|
||||
1. Runtime adapter validation moved to server routes
|
||||
File:
|
||||
- server/src/routes/agents.ts
|
||||
|
||||
Added:
|
||||
- assertKnownAdapterType(...)
|
||||
|
||||
Used it in:
|
||||
- /companies/:companyId/adapters/:type/models
|
||||
- /companies/:companyId/adapters/:type/detect-model
|
||||
- /companies/:companyId/adapters/:type/test-environment
|
||||
- POST /companies/:companyId/agents
|
||||
- POST /companies/:companyId/agent-hires
|
||||
- PATCH /agents/:id when adapterType is touched
|
||||
|
||||
Why:
|
||||
- shared schemas can now allow external adapter strings
|
||||
- server becomes the real source of truth for “is this adapter actually registered?”
|
||||
|
||||
1. Shared adapterType validation is now open-ended for inputs
|
||||
Files:
|
||||
- packages/shared/src/adapter-type.ts
|
||||
- packages/shared/src/validators/agent.ts
|
||||
- packages/shared/src/validators/access.ts
|
||||
- packages/shared/src/index.ts
|
||||
|
||||
Changed input validation from hardcoded z.enum(AGENT_ADAPTER_TYPES) to:
|
||||
- agentAdapterTypeSchema
|
||||
- optionalAgentAdapterTypeSchema
|
||||
|
||||
These accept any non-empty string.
|
||||
|
||||
Important:
|
||||
- I did not remove AGENT_ADAPTER_TYPES from constants
|
||||
- I did not change the built-in type unions yet
|
||||
- this keeps the change minimal and limits blast radius
|
||||
|
||||
So:
|
||||
- input payloads can carry external adapter types
|
||||
- actual acceptance still depends on server registry validation
|
||||
|
||||
1. UI adapter registry is now mutable too
|
||||
Files:
|
||||
- ui/src/adapters/registry.ts
|
||||
- ui/src/adapters/index.ts
|
||||
|
||||
Added:
|
||||
- registerUIAdapter(adapter)
|
||||
- unregisterUIAdapter(type)
|
||||
- findUIAdapter(type)
|
||||
|
||||
Built-ins are still registered from the same file, same pattern as server.
|
||||
|
||||
1. Began switching UI adapter lists toward the registry
|
||||
Files:
|
||||
- ui/src/adapters/metadata.ts
|
||||
- ui/src/components/AgentConfigForm.tsx
|
||||
- ui/src/pages/NewAgent.tsx
|
||||
|
||||
Changed:
|
||||
- AgentConfigForm adapter list now comes from registered UI adapters via metadata helper
|
||||
- NewAgent supported adapter set now derives from listUIAdapters()
|
||||
|
||||
Why:
|
||||
- phase 1 should start reducing hardcoded UI lists
|
||||
- but without rewriting every page at once
|
||||
|
||||
1. Tests added first, then implementation
|
||||
New tests:
|
||||
- server/src/__tests__/adapter-registry.test.ts
|
||||
- server/src/__tests__/agent-adapter-validation-routes.test.ts
|
||||
- ui/src/adapters/registry.test.ts
|
||||
- packages/shared/src/adapter-types.test.ts
|
||||
|
||||
Small test mock fix:
|
||||
- server/src/__tests__/agent-skills-routes.test.ts
|
||||
|
||||
Current status
|
||||
|
||||
The branch is created and the phase-1 code is started.
|
||||
|
||||
I also followed TDD in spirit:
|
||||
- wrote failing tests first for missing register/unregister APIs
|
||||
- then implemented the registry changes
|
||||
|
||||
What is not finished yet
|
||||
|
||||
I have not finished the green/stabilization pass yet.
|
||||
|
||||
I still need to:
|
||||
- run the targeted tests again after the implementation changes
|
||||
- fix any TS / mocking fallout
|
||||
- decide how far to take phase-1 UI wiring in this branch before stopping
|
||||
|
||||
Also, I intentionally did not yet touch the bigger hardcoded surfaces like:
|
||||
- NewAgentDialog
|
||||
- OnboardingWizard
|
||||
- InviteLanding
|
||||
- duplicated adapterLabels maps
|
||||
- plugin manifest / plugin loader adapter contributions
|
||||
|
||||
That is deliberate.
|
||||
For merge safety, I started with the smallest useful seam:
|
||||
- dynamic server registry
|
||||
- dynamic UI registry
|
||||
- runtime validation based on registry
|
||||
- first couple of UI consumers
|
||||
|
||||
One note
|
||||
There was already an unrelated dirty change in:
|
||||
- server/scripts/dev-watch.ts
|
||||
|
||||
I left that alone and did not make phase-1 changes there.
|
||||
|
||||
Recommended next move
|
||||
I should keep going on this same branch and do the stabilization pass:
|
||||
1. rerun server/ui/shared targeted tests
|
||||
2. fix failures
|
||||
3. run typechecks
|
||||
4. then extend the same registry-driven approach to the next safest UI surfaces
|
||||
304
cli/README.md
Normal file
304
cli/README.md
Normal file
@@ -0,0 +1,304 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/header.png" alt="Paperclip — runs your business" width="720" />
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="#quickstart"><strong>Quickstart</strong></a> ·
|
||||
<a href="https://paperclip.ing/docs"><strong>Docs</strong></a> ·
|
||||
<a href="https://github.com/paperclipai/paperclip"><strong>GitHub</strong></a> ·
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><strong>Discord</strong></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/paperclipai/paperclip/blob/master/LICENSE"><img src="https://img.shields.io/badge/license-MIT-blue" alt="MIT License" /></a>
|
||||
<a href="https://github.com/paperclipai/paperclip/stargazers"><img src="https://img.shields.io/github/stars/paperclipai/paperclip?style=flat" alt="Stars" /></a>
|
||||
<a href="https://discord.gg/m4HZY7xNG3"><img src="https://img.shields.io/badge/discord-join%20chat-5865F2?logo=discord&logoColor=white" alt="Discord" /></a>
|
||||
</p>
|
||||
|
||||
<br/>
|
||||
|
||||
<div align="center">
|
||||
<video src="https://github.com/user-attachments/assets/773bdfb2-6d1e-4e30-8c5f-3487d5b70c8f" width="600" controls></video>
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
|
||||
## What is Paperclip?
|
||||
|
||||
# Open-source orchestration for zero-human companies
|
||||
|
||||
**If OpenClaw is an _employee_, Paperclip is the _company_**
|
||||
|
||||
Paperclip is a Node.js server and React UI that orchestrates a team of AI agents to run a business. Bring your own agents, assign goals, and track your agents' work and costs from one dashboard.
|
||||
|
||||
It looks like a task manager — but under the hood it has org charts, budgets, governance, goal alignment, and agent coordination.
|
||||
|
||||
**Manage business goals, not pull requests.**
|
||||
|
||||
| | Step | Example |
|
||||
| ------ | --------------- | ------------------------------------------------------------------ |
|
||||
| **01** | Define the goal | _"Build the #1 AI note-taking app to $1M MRR."_ |
|
||||
| **02** | Hire the team | CEO, CTO, engineers, designers, marketers — any bot, any provider. |
|
||||
| **03** | Approve and run | Review strategy. Set budgets. Hit go. Monitor from the dashboard. |
|
||||
|
||||
<br/>
|
||||
|
||||
> **COMING SOON: Clipmart** — Download and run entire companies with one click. Browse pre-built company templates — full org structures, agent configs, and skills — and import them into your Paperclip instance in seconds.
|
||||
|
||||
<br/>
|
||||
|
||||
<div align="center">
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><strong>Works<br/>with</strong></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/openclaw.svg" width="32" alt="OpenClaw" /><br/><sub>OpenClaw</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/claude.svg" width="32" alt="Claude" /><br/><sub>Claude Code</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/codex.svg" width="32" alt="Codex" /><br/><sub>Codex</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/cursor.svg" width="32" alt="Cursor" /><br/><sub>Cursor</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/bash.svg" width="32" alt="Bash" /><br/><sub>Bash</sub></td>
|
||||
<td align="center"><img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/logos/http.svg" width="32" alt="HTTP" /><br/><sub>HTTP</sub></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<em>If it can receive a heartbeat, it's hired.</em>
|
||||
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
|
||||
## Paperclip is right for you if
|
||||
|
||||
- ✅ You want to build **autonomous AI companies**
|
||||
- ✅ You **coordinate many different agents** (OpenClaw, Codex, Claude, Cursor) toward a common goal
|
||||
- ✅ You have **20 simultaneous Claude Code terminals** open and lose track of what everyone is doing
|
||||
- ✅ You want agents running **autonomously 24/7**, but still want to audit work and chime in when needed
|
||||
- ✅ You want to **monitor costs** and enforce budgets
|
||||
- ✅ You want a process for managing agents that **feels like using a task manager**
|
||||
- ✅ You want to manage your autonomous businesses **from your phone**
|
||||
|
||||
<br/>
|
||||
|
||||
## Features
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center" width="33%">
|
||||
<h3>🔌 Bring Your Own Agent</h3>
|
||||
Any agent, any runtime, one org chart. If it can receive a heartbeat, it's hired.
|
||||
</td>
|
||||
<td align="center" width="33%">
|
||||
<h3>🎯 Goal Alignment</h3>
|
||||
Every task traces back to the company mission. Agents know <em>what</em> to do and <em>why</em>.
|
||||
</td>
|
||||
<td align="center" width="33%">
|
||||
<h3>💓 Heartbeats</h3>
|
||||
Agents wake on a schedule, check work, and act. Delegation flows up and down the org chart.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>💰 Cost Control</h3>
|
||||
Monthly budgets per agent. When they hit the limit, they stop. No runaway costs.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>🏢 Multi-Company</h3>
|
||||
One deployment, many companies. Complete data isolation. One control plane for your portfolio.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>🎫 Ticket System</h3>
|
||||
Every conversation traced. Every decision explained. Full tool-call tracing and immutable audit log.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<h3>🛡️ Governance</h3>
|
||||
You're the board. Approve hires, override strategy, pause or terminate any agent — at any time.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>📊 Org Chart</h3>
|
||||
Hierarchies, roles, reporting lines. Your agents have a boss, a title, and a job description.
|
||||
</td>
|
||||
<td align="center">
|
||||
<h3>📱 Mobile Ready</h3>
|
||||
Monitor and manage your autonomous businesses from anywhere.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<br/>
|
||||
|
||||
## Problems Paperclip solves
|
||||
|
||||
| Without Paperclip | With Paperclip |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| ❌ You have 20 Claude Code tabs open and can't track which one does what. On reboot you lose everything. | ✅ Tasks are ticket-based, conversations are threaded, sessions persist across reboots. |
|
||||
| ❌ You manually gather context from several places to remind your bot what you're actually doing. | ✅ Context flows from the task up through the project and company goals — your agent always knows what to do and why. |
|
||||
| ❌ Folders of agent configs are disorganized and you're re-inventing task management, communication, and coordination between agents. | ✅ Paperclip gives you org charts, ticketing, delegation, and governance out of the box — so you run a company, not a pile of scripts. |
|
||||
| ❌ Runaway loops waste hundreds of dollars of tokens and max your quota before you even know what happened. | ✅ Cost tracking surfaces token budgets and throttles agents when they're out. Management prioritizes with budgets. |
|
||||
| ❌ You have recurring jobs (customer support, social, reports) and have to remember to manually kick them off. | ✅ Heartbeats handle regular work on a schedule. Management supervises. |
|
||||
| ❌ You have an idea, you have to find your repo, fire up Claude Code, keep a tab open, and babysit it. | ✅ Add a task in Paperclip. Your coding agent works on it until it's done. Management reviews their work. |
|
||||
|
||||
<br/>
|
||||
|
||||
## Why Paperclip is special
|
||||
|
||||
Paperclip handles the hard orchestration details correctly.
|
||||
|
||||
| | |
|
||||
| --------------------------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| **Atomic execution.** | Task checkout and budget enforcement are atomic, so no double-work and no runaway spend. |
|
||||
| **Persistent agent state.** | Agents resume the same task context across heartbeats instead of restarting from scratch. |
|
||||
| **Runtime skill injection.** | Agents can learn Paperclip workflows and project context at runtime, without retraining. |
|
||||
| **Governance with rollback.** | Approval gates are enforced, config changes are revisioned, and bad changes can be rolled back safely. |
|
||||
| **Goal-aware execution.** | Tasks carry full goal ancestry so agents consistently see the "why," not just a title. |
|
||||
| **Portable company templates.** | Export/import orgs, agents, and skills with secret scrubbing and collision handling. |
|
||||
| **True multi-company isolation.** | Every entity is company-scoped, so one deployment can run many companies with separate data and audit trails. |
|
||||
|
||||
<br/>
|
||||
|
||||
## What Paperclip is not
|
||||
|
||||
| | |
|
||||
| ---------------------------- | -------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Not a chatbot.** | Agents have jobs, not chat windows. |
|
||||
| **Not an agent framework.** | We don't tell you how to build agents. We tell you how to run a company made of them. |
|
||||
| **Not a workflow builder.** | No drag-and-drop pipelines. Paperclip models companies — with org charts, goals, budgets, and governance. |
|
||||
| **Not a prompt manager.** | Agents bring their own prompts, models, and runtimes. Paperclip manages the organization they work in. |
|
||||
| **Not a single-agent tool.** | This is for teams. If you have one agent, you probably don't need Paperclip. If you have twenty — you definitely do. |
|
||||
| **Not a code review tool.** | Paperclip orchestrates work, not pull requests. Bring your own review process. |
|
||||
|
||||
<br/>
|
||||
|
||||
## Quickstart
|
||||
|
||||
Open source. Self-hosted. No Paperclip account required.
|
||||
|
||||
```bash
|
||||
npx paperclipai onboard --yes
|
||||
```
|
||||
|
||||
That quickstart path now defaults to trusted local loopback mode for the fastest first run. To start in authenticated/private mode instead, choose a bind preset explicitly:
|
||||
|
||||
```bash
|
||||
npx paperclipai onboard --yes --bind lan
|
||||
# or:
|
||||
npx paperclipai onboard --yes --bind tailnet
|
||||
```
|
||||
|
||||
If you already have Paperclip configured, rerunning `onboard` keeps the existing config in place. Use `paperclipai configure` to edit settings.
|
||||
|
||||
Or manually:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/paperclipai/paperclip.git
|
||||
cd paperclip
|
||||
pnpm install
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
This starts the API server at `http://localhost:3100`. An embedded PostgreSQL database is created automatically — no setup required.
|
||||
|
||||
> **Requirements:** Node.js 20+, pnpm 9.15+
|
||||
|
||||
<br/>
|
||||
|
||||
## FAQ
|
||||
|
||||
**What does a typical setup look like?**
|
||||
Locally, a single Node.js process manages an embedded Postgres and local file storage. For production, point it at your own Postgres and deploy however you like. Configure projects, agents, and goals — the agents take care of the rest.
|
||||
|
||||
If you're a solo-entreprenuer you can use Tailscale to access Paperclip on the go. Then later you can deploy to e.g. Vercel when you need it.
|
||||
|
||||
**Can I run multiple companies?**
|
||||
Yes. A single deployment can run an unlimited number of companies with complete data isolation.
|
||||
|
||||
**How is Paperclip different from agents like OpenClaw or Claude Code?**
|
||||
Paperclip _uses_ those agents. It orchestrates them into a company — with org charts, budgets, goals, governance, and accountability.
|
||||
|
||||
**Why should I use Paperclip instead of just pointing my OpenClaw to Asana or Trello?**
|
||||
Agent orchestration has subtleties in how you coordinate who has work checked out, how to maintain sessions, monitoring costs, establishing governance - Paperclip does this for you.
|
||||
|
||||
(Bring-your-own-ticket-system is on the Roadmap)
|
||||
|
||||
**Do agents run continuously?**
|
||||
By default, agents run on scheduled heartbeats and event-based triggers (task assignment, @-mentions). You can also hook in continuous agents like OpenClaw. You bring your agent and Paperclip coordinates.
|
||||
|
||||
<br/>
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
pnpm dev # Full dev (API + UI, watch mode)
|
||||
pnpm dev:once # Full dev without file watching
|
||||
pnpm dev:server # Server only
|
||||
pnpm build # Build all
|
||||
pnpm typecheck # Type checking
|
||||
pnpm test # Cheap default test run (Vitest only)
|
||||
pnpm test:watch # Vitest watch mode
|
||||
pnpm test:e2e # Playwright browser suite
|
||||
pnpm db:generate # Generate DB migration
|
||||
pnpm db:migrate # Apply migrations
|
||||
```
|
||||
|
||||
`pnpm test` does not run Playwright. Browser suites stay separate and are typically run only when working on those flows or in CI.
|
||||
|
||||
See [doc/DEVELOPING.md](https://github.com/paperclipai/paperclip/blob/master/doc/DEVELOPING.md) for the full development guide.
|
||||
|
||||
<br/>
|
||||
|
||||
## Roadmap
|
||||
|
||||
- ✅ Plugin system (e.g. add a knowledge base, custom tracing, queues, etc)
|
||||
- ✅ Get OpenClaw / claw-style agent employees
|
||||
- ✅ companies.sh - import and export entire organizations
|
||||
- ✅ Easy AGENTS.md configurations
|
||||
- ✅ Skills Manager
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ⚪ Artifacts & Deployments
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
Find Plugins and more at [awesome-paperclip](https://github.com/gsxdsm/awesome-paperclip)
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See the [contributing guide](https://github.com/paperclipai/paperclip/blob/master/CONTRIBUTING.md) for details.
|
||||
|
||||
<br/>
|
||||
|
||||
## Community
|
||||
|
||||
- [Discord](https://discord.gg/m4HZY7xNG3) — Join the community
|
||||
- [GitHub Issues](https://github.com/paperclipai/paperclip/issues) — bugs and feature requests
|
||||
- [GitHub Discussions](https://github.com/paperclipai/paperclip/discussions) — ideas and RFC
|
||||
|
||||
<br/>
|
||||
|
||||
## License
|
||||
|
||||
MIT © 2026 Paperclip
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://www.star-history.com/?repos=paperclipai%2Fpaperclip&type=date&legend=top-left)
|
||||
|
||||
<br/>
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/paperclipai/paperclip/master/doc/assets/footer.jpg" alt="" width="720" />
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<sub>Open source under MIT. Built for people who want to run companies, not babysit agents.</sub>
|
||||
</p>
|
||||
@@ -44,6 +44,9 @@ function writeBaseConfig(configPath: string) {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: { baseDir: "/tmp/paperclip-storage" },
|
||||
|
||||
16
cli/src/__tests__/auth-command-registration.test.ts
Normal file
16
cli/src/__tests__/auth-command-registration.test.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Command } from "commander";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { registerClientAuthCommands } from "../commands/client/auth.js";
|
||||
|
||||
describe("registerClientAuthCommands", () => {
|
||||
it("registers auth commands without duplicate company-id flags", () => {
|
||||
const program = new Command();
|
||||
const auth = program.command("auth");
|
||||
|
||||
expect(() => registerClientAuthCommands(auth)).not.toThrow();
|
||||
|
||||
const login = auth.commands.find((command) => command.name() === "login");
|
||||
expect(login).toBeDefined();
|
||||
expect(login?.options.filter((option) => option.long === "--company-id")).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
53
cli/src/__tests__/board-auth.test.ts
Normal file
53
cli/src/__tests__/board-auth.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
getStoredBoardCredential,
|
||||
readBoardAuthStore,
|
||||
removeStoredBoardCredential,
|
||||
setStoredBoardCredential,
|
||||
} from "../client/board-auth.js";
|
||||
|
||||
function createTempAuthPath(): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-cli-auth-"));
|
||||
return path.join(dir, "auth.json");
|
||||
}
|
||||
|
||||
describe("board auth store", () => {
|
||||
it("returns an empty store when the file does not exist", () => {
|
||||
const authPath = createTempAuthPath();
|
||||
expect(readBoardAuthStore(authPath)).toEqual({
|
||||
version: 1,
|
||||
credentials: {},
|
||||
});
|
||||
});
|
||||
|
||||
it("stores and retrieves credentials by normalized api base", () => {
|
||||
const authPath = createTempAuthPath();
|
||||
setStoredBoardCredential({
|
||||
apiBase: "http://localhost:3100/",
|
||||
token: "token-123",
|
||||
userId: "user-1",
|
||||
storePath: authPath,
|
||||
});
|
||||
|
||||
expect(getStoredBoardCredential("http://localhost:3100", authPath)).toMatchObject({
|
||||
apiBase: "http://localhost:3100",
|
||||
token: "token-123",
|
||||
userId: "user-1",
|
||||
});
|
||||
});
|
||||
|
||||
it("removes stored credentials", () => {
|
||||
const authPath = createTempAuthPath();
|
||||
setStoredBoardCredential({
|
||||
apiBase: "http://localhost:3100",
|
||||
token: "token-123",
|
||||
storePath: authPath,
|
||||
});
|
||||
|
||||
expect(removeStoredBoardCredential("http://localhost:3100", authPath)).toBe(true);
|
||||
expect(getStoredBoardCredential("http://localhost:3100", authPath)).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -15,6 +15,10 @@ function makeCompany(overrides: Partial<Company>): Company {
|
||||
budgetMonthlyCents: 0,
|
||||
spentMonthlyCents: 0,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
brandColor: null,
|
||||
logoAssetId: null,
|
||||
logoUrl: null,
|
||||
|
||||
502
cli/src/__tests__/company-import-export-e2e.test.ts
Normal file
502
cli/src/__tests__/company-import-export-e2e.test.ts
Normal file
@@ -0,0 +1,502 @@
|
||||
import { execFile, spawn } from "node:child_process";
|
||||
import { mkdirSync, mkdtempSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { promisify } from "node:util";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { createStoredZipArchive } from "./helpers/zip.js";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
type ServerProcess = ReturnType<typeof spawn>;
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres company import/export e2e tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function writeTestConfig(configPath: string, tempRoot: string, port: number, connectionString: string) {
|
||||
const config = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
source: "doctor",
|
||||
},
|
||||
database: {
|
||||
mode: "postgres",
|
||||
connectionString,
|
||||
embeddedPostgresDataDir: path.join(tempRoot, "embedded-db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: false,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(tempRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(tempRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
allowedHostnames: [],
|
||||
serveUi: false,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(tempRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(tempRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
function createServerEnv(configPath: string, port: number, connectionString: string) {
|
||||
const env = { ...process.env };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (key.startsWith("PAPERCLIP_")) {
|
||||
delete env[key];
|
||||
}
|
||||
}
|
||||
delete env.DATABASE_URL;
|
||||
delete env.PORT;
|
||||
delete env.HOST;
|
||||
delete env.SERVE_UI;
|
||||
delete env.HEARTBEAT_SCHEDULER_ENABLED;
|
||||
|
||||
env.PAPERCLIP_CONFIG = configPath;
|
||||
env.DATABASE_URL = connectionString;
|
||||
env.HOST = "127.0.0.1";
|
||||
env.PORT = String(port);
|
||||
env.SERVE_UI = "false";
|
||||
env.PAPERCLIP_DB_BACKUP_ENABLED = "false";
|
||||
env.HEARTBEAT_SCHEDULER_ENABLED = "false";
|
||||
env.PAPERCLIP_MIGRATION_AUTO_APPLY = "true";
|
||||
env.PAPERCLIP_UI_DEV_MIDDLEWARE = "false";
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
function createCliEnv() {
|
||||
const env = { ...process.env };
|
||||
for (const key of Object.keys(env)) {
|
||||
if (key.startsWith("PAPERCLIP_")) {
|
||||
delete env[key];
|
||||
}
|
||||
}
|
||||
delete env.DATABASE_URL;
|
||||
delete env.PORT;
|
||||
delete env.HOST;
|
||||
delete env.SERVE_UI;
|
||||
delete env.PAPERCLIP_DB_BACKUP_ENABLED;
|
||||
delete env.HEARTBEAT_SCHEDULER_ENABLED;
|
||||
delete env.PAPERCLIP_MIGRATION_AUTO_APPLY;
|
||||
delete env.PAPERCLIP_UI_DEV_MIDDLEWARE;
|
||||
return env;
|
||||
}
|
||||
|
||||
function collectTextFiles(root: string, current: string, files: Record<string, string>) {
|
||||
for (const entry of readdirSync(current, { withFileTypes: true })) {
|
||||
const absolutePath = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
collectTextFiles(root, absolutePath, files);
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile()) continue;
|
||||
const relativePath = path.relative(root, absolutePath).replace(/\\/g, "/");
|
||||
files[relativePath] = readFileSync(absolutePath, "utf8");
|
||||
}
|
||||
}
|
||||
|
||||
async function stopServerProcess(child: ServerProcess | null) {
|
||||
if (!child || child.exitCode !== null) return;
|
||||
child.kill("SIGTERM");
|
||||
await new Promise<void>((resolve) => {
|
||||
child.once("exit", () => resolve());
|
||||
setTimeout(() => {
|
||||
if (child.exitCode === null) {
|
||||
child.kill("SIGKILL");
|
||||
}
|
||||
}, 5_000);
|
||||
});
|
||||
}
|
||||
|
||||
async function api<T>(baseUrl: string, pathname: string, init?: RequestInit): Promise<T> {
|
||||
const res = await fetch(`${baseUrl}${pathname}`, init);
|
||||
const text = await res.text();
|
||||
if (!res.ok) {
|
||||
throw new Error(`Request failed ${res.status} ${pathname}: ${text}`);
|
||||
}
|
||||
return text ? JSON.parse(text) as T : (null as T);
|
||||
}
|
||||
|
||||
async function runCliJson<T>(args: string[], opts: { apiBase: string; configPath: string }) {
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
const result = await execFileAsync(
|
||||
"pnpm",
|
||||
["--silent", "paperclipai", ...args, "--api-base", opts.apiBase, "--config", opts.configPath, "--json"],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createCliEnv(),
|
||||
maxBuffer: 10 * 1024 * 1024,
|
||||
},
|
||||
);
|
||||
const stdout = result.stdout.trim();
|
||||
const jsonStart = stdout.search(/[\[{]/);
|
||||
if (jsonStart === -1) {
|
||||
throw new Error(`CLI did not emit JSON.\nstdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
|
||||
}
|
||||
return JSON.parse(stdout.slice(jsonStart)) as T;
|
||||
}
|
||||
|
||||
async function waitForServer(
|
||||
apiBase: string,
|
||||
child: ServerProcess,
|
||||
output: { stdout: string[]; stderr: string[] },
|
||||
) {
|
||||
const startedAt = Date.now();
|
||||
while (Date.now() - startedAt < 30_000) {
|
||||
if (child.exitCode !== null) {
|
||||
throw new Error(
|
||||
`paperclipai run exited before healthcheck succeeded.\nstdout:\n${output.stdout.join("")}\nstderr:\n${output.stderr.join("")}`,
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(`${apiBase}/api/health`);
|
||||
if (res.ok) return;
|
||||
} catch {
|
||||
// Server is still starting.
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 250));
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Timed out waiting for ${apiBase}/api/health.\nstdout:\n${output.stdout.join("")}\nstderr:\n${output.stderr.join("")}`,
|
||||
);
|
||||
}
|
||||
|
||||
describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
let tempRoot = "";
|
||||
let configPath = "";
|
||||
let exportDir = "";
|
||||
let apiBase = "";
|
||||
let serverProcess: ServerProcess | null = null;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-e2e-"));
|
||||
configPath = path.join(tempRoot, "config", "config.json");
|
||||
exportDir = path.join(tempRoot, "exported-company");
|
||||
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-company-cli-db-");
|
||||
|
||||
const port = await getAvailablePort();
|
||||
writeTestConfig(configPath, tempRoot, port, tempDb.connectionString);
|
||||
apiBase = `http://127.0.0.1:${port}`;
|
||||
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
const output = { stdout: [] as string[], stderr: [] as string[] };
|
||||
const child = spawn(
|
||||
"pnpm",
|
||||
["paperclipai", "run", "--config", configPath],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createServerEnv(configPath, port, tempDb.connectionString),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
serverProcess = child;
|
||||
child.stdout?.on("data", (chunk) => {
|
||||
output.stdout.push(String(chunk));
|
||||
});
|
||||
child.stderr?.on("data", (chunk) => {
|
||||
output.stderr.push(String(chunk));
|
||||
});
|
||||
|
||||
await waitForServer(apiBase, child, output);
|
||||
}, 60_000);
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServerProcess(serverProcess);
|
||||
await tempDb?.cleanup();
|
||||
if (tempRoot) {
|
||||
rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("exports a company package and imports it into new and existing companies", async () => {
|
||||
expect(serverProcess).not.toBeNull();
|
||||
|
||||
const sourceCompany = await api<{ id: string; name: string; issuePrefix: string }>(apiBase, "/api/companies", {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ name: `CLI Export Source ${Date.now()}` }),
|
||||
});
|
||||
|
||||
const sourceAgent = await api<{ id: string; name: string }>(
|
||||
apiBase,
|
||||
`/api/companies/${sourceCompany.id}/agents`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
name: "Export Engineer",
|
||||
role: "engineer",
|
||||
adapterType: "claude_local",
|
||||
adapterConfig: {
|
||||
promptTemplate: "You verify company portability.",
|
||||
},
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
const sourceProject = await api<{ id: string; name: string }>(
|
||||
apiBase,
|
||||
`/api/companies/${sourceCompany.id}/projects`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
name: "Portability Verification",
|
||||
status: "in_progress",
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
const largeIssueDescription = `Round-trip the company package through the CLI.\n\n${"portable-data ".repeat(12_000)}`;
|
||||
|
||||
const sourceIssue = await api<{ id: string; title: string; identifier: string }>(
|
||||
apiBase,
|
||||
`/api/companies/${sourceCompany.id}/issues`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
title: "Validate company import/export",
|
||||
description: largeIssueDescription,
|
||||
status: "todo",
|
||||
projectId: sourceProject.id,
|
||||
assigneeAgentId: sourceAgent.id,
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
const exportResult = await runCliJson<{
|
||||
ok: boolean;
|
||||
out: string;
|
||||
filesWritten: number;
|
||||
}>(
|
||||
[
|
||||
"company",
|
||||
"export",
|
||||
sourceCompany.id,
|
||||
"--out",
|
||||
exportDir,
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
);
|
||||
|
||||
expect(exportResult.ok).toBe(true);
|
||||
expect(exportResult.filesWritten).toBeGreaterThan(0);
|
||||
expect(readFileSync(path.join(exportDir, "COMPANY.md"), "utf8")).toContain(sourceCompany.name);
|
||||
expect(readFileSync(path.join(exportDir, ".paperclip.yaml"), "utf8")).toContain('schema: "paperclip/v1"');
|
||||
|
||||
const importedNew = await runCliJson<{
|
||||
company: { id: string; name: string; action: string };
|
||||
agents: Array<{ id: string | null; action: string; name: string }>;
|
||||
}>(
|
||||
[
|
||||
"company",
|
||||
"import",
|
||||
exportDir,
|
||||
"--target",
|
||||
"new",
|
||||
"--new-company-name",
|
||||
`Imported ${sourceCompany.name}`,
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
);
|
||||
|
||||
expect(importedNew.company.action).toBe("created");
|
||||
expect(importedNew.agents).toHaveLength(1);
|
||||
expect(importedNew.agents[0]?.action).toBe("created");
|
||||
|
||||
const importedAgents = await api<Array<{ id: string; name: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/agents`,
|
||||
);
|
||||
const importedProjects = await api<Array<{ id: string; name: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/projects`,
|
||||
);
|
||||
const importedIssues = await api<Array<{ id: string; title: string; identifier: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/issues`,
|
||||
);
|
||||
|
||||
expect(importedAgents.map((agent) => agent.name)).toContain(sourceAgent.name);
|
||||
expect(importedProjects.map((project) => project.name)).toContain(sourceProject.name);
|
||||
expect(importedIssues.map((issue) => issue.title)).toContain(sourceIssue.title);
|
||||
|
||||
const previewExisting = await runCliJson<{
|
||||
errors: string[];
|
||||
plan: {
|
||||
companyAction: string;
|
||||
agentPlans: Array<{ action: string }>;
|
||||
projectPlans: Array<{ action: string }>;
|
||||
issuePlans: Array<{ action: string }>;
|
||||
};
|
||||
}>(
|
||||
[
|
||||
"company",
|
||||
"import",
|
||||
exportDir,
|
||||
"--target",
|
||||
"existing",
|
||||
"--company-id",
|
||||
importedNew.company.id,
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
"--collision",
|
||||
"rename",
|
||||
"--dry-run",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
);
|
||||
|
||||
expect(previewExisting.errors).toEqual([]);
|
||||
expect(previewExisting.plan.companyAction).toBe("none");
|
||||
expect(previewExisting.plan.agentPlans.some((plan) => plan.action === "create")).toBe(true);
|
||||
expect(previewExisting.plan.projectPlans.some((plan) => plan.action === "create")).toBe(true);
|
||||
expect(previewExisting.plan.issuePlans.some((plan) => plan.action === "create")).toBe(true);
|
||||
|
||||
const importedExisting = await runCliJson<{
|
||||
company: { id: string; action: string };
|
||||
agents: Array<{ id: string | null; action: string; name: string }>;
|
||||
}>(
|
||||
[
|
||||
"company",
|
||||
"import",
|
||||
exportDir,
|
||||
"--target",
|
||||
"existing",
|
||||
"--company-id",
|
||||
importedNew.company.id,
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
"--collision",
|
||||
"rename",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
);
|
||||
|
||||
expect(importedExisting.company.action).toBe("unchanged");
|
||||
expect(importedExisting.agents.some((agent) => agent.action === "created")).toBe(true);
|
||||
|
||||
const twiceImportedAgents = await api<Array<{ id: string; name: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/agents`,
|
||||
);
|
||||
const twiceImportedProjects = await api<Array<{ id: string; name: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/projects`,
|
||||
);
|
||||
const twiceImportedIssues = await api<Array<{ id: string; title: string; identifier: string }>>(
|
||||
apiBase,
|
||||
`/api/companies/${importedNew.company.id}/issues`,
|
||||
);
|
||||
|
||||
expect(twiceImportedAgents).toHaveLength(2);
|
||||
expect(new Set(twiceImportedAgents.map((agent) => agent.name)).size).toBe(2);
|
||||
expect(twiceImportedProjects).toHaveLength(2);
|
||||
expect(twiceImportedIssues).toHaveLength(2);
|
||||
|
||||
const zipPath = path.join(tempRoot, "exported-company.zip");
|
||||
const portableFiles: Record<string, string> = {};
|
||||
collectTextFiles(exportDir, exportDir, portableFiles);
|
||||
writeFileSync(zipPath, createStoredZipArchive(portableFiles, "paperclip-demo"));
|
||||
|
||||
const importedFromZip = await runCliJson<{
|
||||
company: { id: string; name: string; action: string };
|
||||
agents: Array<{ id: string | null; action: string; name: string }>;
|
||||
}>(
|
||||
[
|
||||
"company",
|
||||
"import",
|
||||
zipPath,
|
||||
"--target",
|
||||
"new",
|
||||
"--new-company-name",
|
||||
`Zip Imported ${sourceCompany.name}`,
|
||||
"--include",
|
||||
"company,agents,projects,issues",
|
||||
"--yes",
|
||||
],
|
||||
{ apiBase, configPath },
|
||||
);
|
||||
|
||||
expect(importedFromZip.company.action).toBe("created");
|
||||
expect(importedFromZip.agents.some((agent) => agent.action === "created")).toBe(true);
|
||||
}, 60_000);
|
||||
});
|
||||
74
cli/src/__tests__/company-import-url.test.ts
Normal file
74
cli/src/__tests__/company-import-url.test.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
isGithubShorthand,
|
||||
looksLikeRepoUrl,
|
||||
isHttpUrl,
|
||||
normalizeGithubImportSource,
|
||||
} from "../commands/client/company.js";
|
||||
|
||||
describe("isHttpUrl", () => {
|
||||
it("matches http URLs", () => {
|
||||
expect(isHttpUrl("http://example.com/foo")).toBe(true);
|
||||
});
|
||||
|
||||
it("matches https URLs", () => {
|
||||
expect(isHttpUrl("https://example.com/foo")).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects local paths", () => {
|
||||
expect(isHttpUrl("/tmp/my-company")).toBe(false);
|
||||
expect(isHttpUrl("./relative")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("looksLikeRepoUrl", () => {
|
||||
it("matches GitHub URLs", () => {
|
||||
expect(looksLikeRepoUrl("https://github.com/org/repo")).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects URLs without owner/repo path", () => {
|
||||
expect(looksLikeRepoUrl("https://example.com/foo")).toBe(false);
|
||||
});
|
||||
|
||||
it("rejects local paths", () => {
|
||||
expect(looksLikeRepoUrl("/tmp/my-company")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isGithubShorthand", () => {
|
||||
it("matches owner/repo/path shorthands", () => {
|
||||
expect(isGithubShorthand("paperclipai/companies/gstack")).toBe(true);
|
||||
expect(isGithubShorthand("paperclipai/companies")).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects local-looking paths", () => {
|
||||
expect(isGithubShorthand("./exports/acme")).toBe(false);
|
||||
expect(isGithubShorthand("/tmp/acme")).toBe(false);
|
||||
expect(isGithubShorthand("C:\\temp\\acme")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeGithubImportSource", () => {
|
||||
it("normalizes shorthand imports to canonical GitHub sources", () => {
|
||||
expect(normalizeGithubImportSource("paperclipai/companies/gstack")).toBe(
|
||||
"https://github.com/paperclipai/companies?ref=main&path=gstack",
|
||||
);
|
||||
});
|
||||
|
||||
it("applies --ref to shorthand imports", () => {
|
||||
expect(normalizeGithubImportSource("paperclipai/companies/gstack", "feature/demo")).toBe(
|
||||
"https://github.com/paperclipai/companies?ref=feature%2Fdemo&path=gstack",
|
||||
);
|
||||
});
|
||||
|
||||
it("applies --ref to existing GitHub tree URLs without losing the package path", () => {
|
||||
expect(
|
||||
normalizeGithubImportSource(
|
||||
"https://github.com/paperclipai/companies/tree/main/gstack",
|
||||
"release/2026-03-23",
|
||||
),
|
||||
).toBe(
|
||||
"https://github.com/paperclipai/companies?ref=release%2F2026-03-23&path=gstack",
|
||||
);
|
||||
});
|
||||
});
|
||||
44
cli/src/__tests__/company-import-zip.test.ts
Normal file
44
cli/src/__tests__/company-import-zip.test.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { mkdtemp, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { resolveInlineSourceFromPath } from "../commands/client/company.js";
|
||||
import { createStoredZipArchive } from "./helpers/zip.js";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
afterEach(async () => {
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
await rm(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe("resolveInlineSourceFromPath", () => {
|
||||
it("imports portable files from a zip archive instead of scanning the parent directory", async () => {
|
||||
const tempDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-company-import-zip-"));
|
||||
tempDirs.push(tempDir);
|
||||
|
||||
const archivePath = path.join(tempDir, "paperclip-demo.zip");
|
||||
const archive = createStoredZipArchive(
|
||||
{
|
||||
"COMPANY.md": "# Company\n",
|
||||
".paperclip.yaml": "schema: paperclip/v1\n",
|
||||
"agents/ceo/AGENT.md": "# CEO\n",
|
||||
"notes/todo.txt": "ignore me\n",
|
||||
},
|
||||
"paperclip-demo",
|
||||
);
|
||||
await writeFile(archivePath, archive);
|
||||
|
||||
const resolved = await resolveInlineSourceFromPath(archivePath);
|
||||
|
||||
expect(resolved).toEqual({
|
||||
rootPath: "paperclip-demo",
|
||||
files: {
|
||||
"COMPANY.md": "# Company\n",
|
||||
".paperclip.yaml": "schema: paperclip/v1\n",
|
||||
"agents/ceo/AGENT.md": "# CEO\n",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
599
cli/src/__tests__/company.test.ts
Normal file
599
cli/src/__tests__/company.test.ts
Normal file
@@ -0,0 +1,599 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import type { CompanyPortabilityPreviewResult } from "@paperclipai/shared";
|
||||
import {
|
||||
buildCompanyDashboardUrl,
|
||||
buildDefaultImportAdapterOverrides,
|
||||
buildDefaultImportSelectionState,
|
||||
buildImportSelectionCatalog,
|
||||
buildSelectedFilesFromImportSelection,
|
||||
renderCompanyImportPreview,
|
||||
renderCompanyImportResult,
|
||||
resolveCompanyImportApplyConfirmationMode,
|
||||
resolveCompanyImportApiPath,
|
||||
} from "../commands/client/company.js";
|
||||
|
||||
describe("resolveCompanyImportApiPath", () => {
|
||||
it("uses company-scoped preview route for existing-company dry runs", () => {
|
||||
expect(
|
||||
resolveCompanyImportApiPath({
|
||||
dryRun: true,
|
||||
targetMode: "existing_company",
|
||||
companyId: "company-123",
|
||||
}),
|
||||
).toBe("/api/companies/company-123/imports/preview");
|
||||
});
|
||||
|
||||
it("uses company-scoped apply route for existing-company imports", () => {
|
||||
expect(
|
||||
resolveCompanyImportApiPath({
|
||||
dryRun: false,
|
||||
targetMode: "existing_company",
|
||||
companyId: "company-123",
|
||||
}),
|
||||
).toBe("/api/companies/company-123/imports/apply");
|
||||
});
|
||||
|
||||
it("keeps global routes for new-company imports", () => {
|
||||
expect(
|
||||
resolveCompanyImportApiPath({
|
||||
dryRun: true,
|
||||
targetMode: "new_company",
|
||||
}),
|
||||
).toBe("/api/companies/import/preview");
|
||||
|
||||
expect(
|
||||
resolveCompanyImportApiPath({
|
||||
dryRun: false,
|
||||
targetMode: "new_company",
|
||||
}),
|
||||
).toBe("/api/companies/import");
|
||||
});
|
||||
|
||||
it("throws when an existing-company import is missing a company id", () => {
|
||||
expect(() =>
|
||||
resolveCompanyImportApiPath({
|
||||
dryRun: true,
|
||||
targetMode: "existing_company",
|
||||
companyId: " ",
|
||||
})
|
||||
).toThrow(/require a companyId/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveCompanyImportApplyConfirmationMode", () => {
|
||||
it("skips confirmation when --yes is set", () => {
|
||||
expect(
|
||||
resolveCompanyImportApplyConfirmationMode({
|
||||
yes: true,
|
||||
interactive: false,
|
||||
json: false,
|
||||
}),
|
||||
).toBe("skip");
|
||||
});
|
||||
|
||||
it("prompts in interactive text mode when --yes is not set", () => {
|
||||
expect(
|
||||
resolveCompanyImportApplyConfirmationMode({
|
||||
yes: false,
|
||||
interactive: true,
|
||||
json: false,
|
||||
}),
|
||||
).toBe("prompt");
|
||||
});
|
||||
|
||||
it("requires --yes for non-interactive apply", () => {
|
||||
expect(() =>
|
||||
resolveCompanyImportApplyConfirmationMode({
|
||||
yes: false,
|
||||
interactive: false,
|
||||
json: false,
|
||||
})
|
||||
).toThrow(/non-interactive terminal requires --yes/i);
|
||||
});
|
||||
|
||||
it("requires --yes for json apply", () => {
|
||||
expect(() =>
|
||||
resolveCompanyImportApplyConfirmationMode({
|
||||
yes: false,
|
||||
interactive: false,
|
||||
json: true,
|
||||
})
|
||||
).toThrow(/with --json requires --yes/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildCompanyDashboardUrl", () => {
|
||||
it("preserves the configured base path when building a dashboard URL", () => {
|
||||
expect(buildCompanyDashboardUrl("https://paperclip.example/app/", "PAP")).toBe(
|
||||
"https://paperclip.example/app/PAP/dashboard",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderCompanyImportPreview", () => {
|
||||
it("summarizes the preview with counts, selection info, and truncated examples", () => {
|
||||
const preview: CompanyPortabilityPreviewResult = {
|
||||
include: {
|
||||
company: true,
|
||||
agents: true,
|
||||
projects: true,
|
||||
issues: true,
|
||||
skills: true,
|
||||
},
|
||||
targetCompanyId: "company-123",
|
||||
targetCompanyName: "Imported Co",
|
||||
collisionStrategy: "rename",
|
||||
selectedAgentSlugs: ["ceo", "cto", "eng-1", "eng-2", "eng-3", "eng-4", "eng-5"],
|
||||
plan: {
|
||||
companyAction: "update",
|
||||
agentPlans: [
|
||||
{ slug: "ceo", action: "create", plannedName: "CEO", existingAgentId: null, reason: null },
|
||||
{ slug: "cto", action: "update", plannedName: "CTO", existingAgentId: "agent-2", reason: "replace strategy" },
|
||||
{ slug: "eng-1", action: "skip", plannedName: "Engineer 1", existingAgentId: "agent-3", reason: "skip strategy" },
|
||||
{ slug: "eng-2", action: "create", plannedName: "Engineer 2", existingAgentId: null, reason: null },
|
||||
{ slug: "eng-3", action: "create", plannedName: "Engineer 3", existingAgentId: null, reason: null },
|
||||
{ slug: "eng-4", action: "create", plannedName: "Engineer 4", existingAgentId: null, reason: null },
|
||||
{ slug: "eng-5", action: "create", plannedName: "Engineer 5", existingAgentId: null, reason: null },
|
||||
],
|
||||
projectPlans: [
|
||||
{ slug: "alpha", action: "create", plannedName: "Alpha", existingProjectId: null, reason: null },
|
||||
],
|
||||
issuePlans: [
|
||||
{ slug: "kickoff", action: "create", plannedTitle: "Kickoff", reason: null },
|
||||
],
|
||||
},
|
||||
manifest: {
|
||||
schemaVersion: 1,
|
||||
generatedAt: "2026-03-23T17:00:00.000Z",
|
||||
source: {
|
||||
companyId: "company-src",
|
||||
companyName: "Source Co",
|
||||
},
|
||||
includes: {
|
||||
company: true,
|
||||
agents: true,
|
||||
projects: true,
|
||||
issues: true,
|
||||
skills: true,
|
||||
},
|
||||
company: {
|
||||
path: "COMPANY.md",
|
||||
name: "Source Co",
|
||||
description: null,
|
||||
brandColor: null,
|
||||
logoPath: null,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
},
|
||||
sidebar: {
|
||||
agents: ["ceo"],
|
||||
projects: ["alpha"],
|
||||
},
|
||||
agents: [
|
||||
{
|
||||
slug: "ceo",
|
||||
name: "CEO",
|
||||
path: "agents/ceo/AGENT.md",
|
||||
skills: [],
|
||||
role: "ceo",
|
||||
title: null,
|
||||
icon: null,
|
||||
capabilities: null,
|
||||
reportsToSlug: null,
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
budgetMonthlyCents: 0,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
skills: [
|
||||
{
|
||||
key: "skill-a",
|
||||
slug: "skill-a",
|
||||
name: "Skill A",
|
||||
path: "skills/skill-a/SKILL.md",
|
||||
description: null,
|
||||
sourceType: "inline",
|
||||
sourceLocator: null,
|
||||
sourceRef: null,
|
||||
trustLevel: null,
|
||||
compatibility: null,
|
||||
metadata: null,
|
||||
fileInventory: [],
|
||||
},
|
||||
],
|
||||
projects: [
|
||||
{
|
||||
slug: "alpha",
|
||||
name: "Alpha",
|
||||
path: "projects/alpha/PROJECT.md",
|
||||
description: null,
|
||||
ownerAgentSlug: null,
|
||||
leadAgentSlug: null,
|
||||
targetDate: null,
|
||||
color: null,
|
||||
status: null,
|
||||
executionWorkspacePolicy: null,
|
||||
workspaces: [],
|
||||
env: null,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
issues: [
|
||||
{
|
||||
slug: "kickoff",
|
||||
identifier: null,
|
||||
title: "Kickoff",
|
||||
path: "projects/alpha/issues/kickoff/TASK.md",
|
||||
projectSlug: "alpha",
|
||||
projectWorkspaceKey: null,
|
||||
assigneeAgentSlug: "ceo",
|
||||
description: null,
|
||||
recurring: false,
|
||||
routine: null,
|
||||
legacyRecurrence: null,
|
||||
status: null,
|
||||
priority: null,
|
||||
labelIds: [],
|
||||
billingCode: null,
|
||||
executionWorkspaceSettings: null,
|
||||
assigneeAdapterOverrides: null,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
envInputs: [
|
||||
{
|
||||
key: "OPENAI_API_KEY",
|
||||
description: null,
|
||||
agentSlug: "ceo",
|
||||
projectSlug: null,
|
||||
kind: "secret",
|
||||
requirement: "required",
|
||||
defaultValue: null,
|
||||
portability: "portable",
|
||||
},
|
||||
],
|
||||
},
|
||||
files: {
|
||||
"COMPANY.md": "# Source Co",
|
||||
},
|
||||
envInputs: [
|
||||
{
|
||||
key: "OPENAI_API_KEY",
|
||||
description: null,
|
||||
agentSlug: "ceo",
|
||||
projectSlug: null,
|
||||
kind: "secret",
|
||||
requirement: "required",
|
||||
defaultValue: null,
|
||||
portability: "portable",
|
||||
},
|
||||
],
|
||||
warnings: ["One warning"],
|
||||
errors: ["One error"],
|
||||
};
|
||||
|
||||
const rendered = renderCompanyImportPreview(preview, {
|
||||
sourceLabel: "GitHub: https://github.com/paperclipai/companies/demo",
|
||||
targetLabel: "Imported Co (company-123)",
|
||||
infoMessages: ["Using claude-local adapter"],
|
||||
});
|
||||
|
||||
expect(rendered).toContain("Include");
|
||||
expect(rendered).toContain("company, projects, tasks, agents, skills");
|
||||
expect(rendered).toContain("7 agents total");
|
||||
expect(rendered).toContain("1 project total");
|
||||
expect(rendered).toContain("1 task total");
|
||||
expect(rendered).toContain("skills: 1 skill packaged");
|
||||
expect(rendered).toContain("+1 more");
|
||||
expect(rendered).toContain("Using claude-local adapter");
|
||||
expect(rendered).toContain("Warnings");
|
||||
expect(rendered).toContain("Errors");
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderCompanyImportResult", () => {
|
||||
it("summarizes import results with created, updated, and skipped counts", () => {
|
||||
const rendered = renderCompanyImportResult(
|
||||
{
|
||||
company: {
|
||||
id: "company-123",
|
||||
name: "Imported Co",
|
||||
action: "updated",
|
||||
},
|
||||
agents: [
|
||||
{ slug: "ceo", id: "agent-1", action: "created", name: "CEO", reason: null },
|
||||
{ slug: "cto", id: "agent-2", action: "updated", name: "CTO", reason: "replace strategy" },
|
||||
{ slug: "ops", id: null, action: "skipped", name: "Ops", reason: "skip strategy" },
|
||||
],
|
||||
projects: [
|
||||
{ slug: "app", id: "project-1", action: "created", name: "App", reason: null },
|
||||
{ slug: "ops", id: "project-2", action: "updated", name: "Operations", reason: "replace strategy" },
|
||||
{ slug: "archive", id: null, action: "skipped", name: "Archive", reason: "skip strategy" },
|
||||
],
|
||||
envInputs: [],
|
||||
warnings: ["Review API keys"],
|
||||
},
|
||||
{
|
||||
targetLabel: "Imported Co (company-123)",
|
||||
companyUrl: "https://paperclip.example/PAP/dashboard",
|
||||
infoMessages: ["Using claude-local adapter"],
|
||||
},
|
||||
);
|
||||
|
||||
expect(rendered).toContain("Company");
|
||||
expect(rendered).toContain("https://paperclip.example/PAP/dashboard");
|
||||
expect(rendered).toContain("3 agents total (1 created, 1 updated, 1 skipped)");
|
||||
expect(rendered).toContain("3 projects total (1 created, 1 updated, 1 skipped)");
|
||||
expect(rendered).toContain("Agent results");
|
||||
expect(rendered).toContain("Project results");
|
||||
expect(rendered).toContain("Using claude-local adapter");
|
||||
expect(rendered).toContain("Review API keys");
|
||||
});
|
||||
});
|
||||
|
||||
describe("import selection catalog", () => {
|
||||
it("defaults to everything and keeps project selection separate from task selection", () => {
|
||||
const preview: CompanyPortabilityPreviewResult = {
|
||||
include: {
|
||||
company: true,
|
||||
agents: true,
|
||||
projects: true,
|
||||
issues: true,
|
||||
skills: true,
|
||||
},
|
||||
targetCompanyId: "company-123",
|
||||
targetCompanyName: "Imported Co",
|
||||
collisionStrategy: "rename",
|
||||
selectedAgentSlugs: ["ceo"],
|
||||
plan: {
|
||||
companyAction: "create",
|
||||
agentPlans: [],
|
||||
projectPlans: [],
|
||||
issuePlans: [],
|
||||
},
|
||||
manifest: {
|
||||
schemaVersion: 1,
|
||||
generatedAt: "2026-03-23T18:00:00.000Z",
|
||||
source: {
|
||||
companyId: "company-src",
|
||||
companyName: "Source Co",
|
||||
},
|
||||
includes: {
|
||||
company: true,
|
||||
agents: true,
|
||||
projects: true,
|
||||
issues: true,
|
||||
skills: true,
|
||||
},
|
||||
company: {
|
||||
path: "COMPANY.md",
|
||||
name: "Source Co",
|
||||
description: null,
|
||||
brandColor: null,
|
||||
logoPath: "images/company-logo.png",
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
feedbackDataSharingEnabled: false,
|
||||
feedbackDataSharingConsentAt: null,
|
||||
feedbackDataSharingConsentByUserId: null,
|
||||
feedbackDataSharingTermsVersion: null,
|
||||
},
|
||||
sidebar: {
|
||||
agents: ["ceo"],
|
||||
projects: ["alpha"],
|
||||
},
|
||||
agents: [
|
||||
{
|
||||
slug: "ceo",
|
||||
name: "CEO",
|
||||
path: "agents/ceo/AGENT.md",
|
||||
skills: [],
|
||||
role: "ceo",
|
||||
title: null,
|
||||
icon: null,
|
||||
capabilities: null,
|
||||
reportsToSlug: null,
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
budgetMonthlyCents: 0,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
skills: [
|
||||
{
|
||||
key: "skill-a",
|
||||
slug: "skill-a",
|
||||
name: "Skill A",
|
||||
path: "skills/skill-a/SKILL.md",
|
||||
description: null,
|
||||
sourceType: "inline",
|
||||
sourceLocator: null,
|
||||
sourceRef: null,
|
||||
trustLevel: null,
|
||||
compatibility: null,
|
||||
metadata: null,
|
||||
fileInventory: [{ path: "skills/skill-a/helper.md", kind: "doc" }],
|
||||
},
|
||||
],
|
||||
projects: [
|
||||
{
|
||||
slug: "alpha",
|
||||
name: "Alpha",
|
||||
path: "projects/alpha/PROJECT.md",
|
||||
description: null,
|
||||
ownerAgentSlug: null,
|
||||
leadAgentSlug: null,
|
||||
targetDate: null,
|
||||
color: null,
|
||||
status: null,
|
||||
executionWorkspacePolicy: null,
|
||||
workspaces: [],
|
||||
env: null,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
issues: [
|
||||
{
|
||||
slug: "kickoff",
|
||||
identifier: null,
|
||||
title: "Kickoff",
|
||||
path: "projects/alpha/issues/kickoff/TASK.md",
|
||||
projectSlug: "alpha",
|
||||
projectWorkspaceKey: null,
|
||||
assigneeAgentSlug: "ceo",
|
||||
description: null,
|
||||
recurring: false,
|
||||
routine: null,
|
||||
legacyRecurrence: null,
|
||||
status: null,
|
||||
priority: null,
|
||||
labelIds: [],
|
||||
billingCode: null,
|
||||
executionWorkspaceSettings: null,
|
||||
assigneeAdapterOverrides: null,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
envInputs: [],
|
||||
},
|
||||
files: {
|
||||
"COMPANY.md": "# Source Co",
|
||||
"README.md": "# Readme",
|
||||
".paperclip.yaml": "schema: paperclip/v1\n",
|
||||
"images/company-logo.png": {
|
||||
encoding: "base64",
|
||||
data: "",
|
||||
contentType: "image/png",
|
||||
},
|
||||
"projects/alpha/PROJECT.md": "# Alpha",
|
||||
"projects/alpha/notes.md": "project notes",
|
||||
"projects/alpha/issues/kickoff/TASK.md": "# Kickoff",
|
||||
"projects/alpha/issues/kickoff/details.md": "task details",
|
||||
"agents/ceo/AGENT.md": "# CEO",
|
||||
"agents/ceo/prompt.md": "prompt",
|
||||
"skills/skill-a/SKILL.md": "# Skill A",
|
||||
"skills/skill-a/helper.md": "helper",
|
||||
},
|
||||
envInputs: [],
|
||||
warnings: [],
|
||||
errors: [],
|
||||
};
|
||||
|
||||
const catalog = buildImportSelectionCatalog(preview);
|
||||
const state = buildDefaultImportSelectionState(catalog);
|
||||
|
||||
expect(state.company).toBe(true);
|
||||
expect(state.projects.has("alpha")).toBe(true);
|
||||
expect(state.issues.has("kickoff")).toBe(true);
|
||||
expect(state.agents.has("ceo")).toBe(true);
|
||||
expect(state.skills.has("skill-a")).toBe(true);
|
||||
|
||||
state.company = false;
|
||||
state.issues.clear();
|
||||
state.agents.clear();
|
||||
state.skills.clear();
|
||||
|
||||
const selectedFiles = buildSelectedFilesFromImportSelection(catalog, state);
|
||||
|
||||
expect(selectedFiles).toContain(".paperclip.yaml");
|
||||
expect(selectedFiles).toContain("projects/alpha/PROJECT.md");
|
||||
expect(selectedFiles).toContain("projects/alpha/notes.md");
|
||||
expect(selectedFiles).not.toContain("projects/alpha/issues/kickoff/TASK.md");
|
||||
expect(selectedFiles).not.toContain("projects/alpha/issues/kickoff/details.md");
|
||||
});
|
||||
});
|
||||
|
||||
describe("default adapter overrides", () => {
|
||||
it("maps process-only imported agents to claude_local", () => {
|
||||
const preview: CompanyPortabilityPreviewResult = {
|
||||
include: {
|
||||
company: false,
|
||||
agents: true,
|
||||
projects: false,
|
||||
issues: false,
|
||||
skills: false,
|
||||
},
|
||||
targetCompanyId: null,
|
||||
targetCompanyName: null,
|
||||
collisionStrategy: "rename",
|
||||
selectedAgentSlugs: ["legacy-agent", "explicit-agent"],
|
||||
plan: {
|
||||
companyAction: "none",
|
||||
agentPlans: [],
|
||||
projectPlans: [],
|
||||
issuePlans: [],
|
||||
},
|
||||
manifest: {
|
||||
schemaVersion: 1,
|
||||
generatedAt: "2026-03-23T18:20:00.000Z",
|
||||
source: null,
|
||||
includes: {
|
||||
company: false,
|
||||
agents: true,
|
||||
projects: false,
|
||||
issues: false,
|
||||
skills: false,
|
||||
},
|
||||
company: null,
|
||||
sidebar: null,
|
||||
agents: [
|
||||
{
|
||||
slug: "legacy-agent",
|
||||
name: "Legacy Agent",
|
||||
path: "agents/legacy-agent/AGENT.md",
|
||||
skills: [],
|
||||
role: "agent",
|
||||
title: null,
|
||||
icon: null,
|
||||
capabilities: null,
|
||||
reportsToSlug: null,
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
budgetMonthlyCents: 0,
|
||||
metadata: null,
|
||||
},
|
||||
{
|
||||
slug: "explicit-agent",
|
||||
name: "Explicit Agent",
|
||||
path: "agents/explicit-agent/AGENT.md",
|
||||
skills: [],
|
||||
role: "agent",
|
||||
title: null,
|
||||
icon: null,
|
||||
capabilities: null,
|
||||
reportsToSlug: null,
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
budgetMonthlyCents: 0,
|
||||
metadata: null,
|
||||
},
|
||||
],
|
||||
skills: [],
|
||||
projects: [],
|
||||
issues: [],
|
||||
envInputs: [],
|
||||
},
|
||||
files: {},
|
||||
envInputs: [],
|
||||
warnings: [],
|
||||
errors: [],
|
||||
};
|
||||
|
||||
expect(buildDefaultImportAdapterOverrides(preview)).toEqual({
|
||||
"legacy-agent": {
|
||||
adapterType: "claude_local",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -46,6 +46,9 @@ function createTempConfig(): string {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
|
||||
177
cli/src/__tests__/feedback.test.ts
Normal file
177
cli/src/__tests__/feedback.test.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { mkdtemp, readFile } from "node:fs/promises";
|
||||
import { Command } from "commander";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import type { FeedbackTrace } from "@paperclipai/shared";
|
||||
import { readZipArchive } from "../commands/client/zip.js";
|
||||
import {
|
||||
buildFeedbackTraceQuery,
|
||||
registerFeedbackCommands,
|
||||
renderFeedbackReport,
|
||||
summarizeFeedbackTraces,
|
||||
writeFeedbackExportBundle,
|
||||
} from "../commands/client/feedback.js";
|
||||
|
||||
function makeTrace(overrides: Partial<FeedbackTrace> = {}): FeedbackTrace {
|
||||
return {
|
||||
id: "trace-12345678",
|
||||
companyId: "company-123",
|
||||
feedbackVoteId: "vote-12345678",
|
||||
issueId: "issue-123",
|
||||
projectId: "project-123",
|
||||
issueIdentifier: "PAP-123",
|
||||
issueTitle: "Fix the feedback command",
|
||||
authorUserId: "user-123",
|
||||
targetType: "issue_comment",
|
||||
targetId: "comment-123",
|
||||
vote: "down",
|
||||
status: "pending",
|
||||
destination: "paperclip_labs_feedback_v1",
|
||||
exportId: null,
|
||||
consentVersion: "feedback-data-sharing-v1",
|
||||
schemaVersion: "1",
|
||||
bundleVersion: "1",
|
||||
payloadVersion: "1",
|
||||
payloadDigest: null,
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "down",
|
||||
reason: "Needed more detail",
|
||||
},
|
||||
},
|
||||
targetSummary: {
|
||||
label: "Comment",
|
||||
excerpt: "The first answer was too vague.",
|
||||
authorAgentId: "agent-123",
|
||||
authorUserId: null,
|
||||
createdAt: new Date("2026-03-31T12:00:00.000Z"),
|
||||
documentKey: null,
|
||||
documentTitle: null,
|
||||
revisionNumber: null,
|
||||
},
|
||||
redactionSummary: null,
|
||||
attemptCount: 0,
|
||||
lastAttemptedAt: null,
|
||||
exportedAt: null,
|
||||
failureReason: null,
|
||||
createdAt: new Date("2026-03-31T12:01:00.000Z"),
|
||||
updatedAt: new Date("2026-03-31T12:02:00.000Z"),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("registerFeedbackCommands", () => {
|
||||
it("registers the top-level feedback commands", () => {
|
||||
const program = new Command();
|
||||
|
||||
expect(() => registerFeedbackCommands(program)).not.toThrow();
|
||||
|
||||
const feedback = program.commands.find((command) => command.name() === "feedback");
|
||||
expect(feedback).toBeDefined();
|
||||
expect(feedback?.commands.map((command) => command.name())).toEqual(["report", "export"]);
|
||||
expect(feedback?.commands[0]?.options.filter((option) => option.long === "--company-id")).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildFeedbackTraceQuery", () => {
|
||||
it("encodes all supported filters", () => {
|
||||
expect(
|
||||
buildFeedbackTraceQuery({
|
||||
targetType: "issue_comment",
|
||||
vote: "down",
|
||||
status: "pending",
|
||||
projectId: "project-123",
|
||||
issueId: "issue-123",
|
||||
from: "2026-03-31T00:00:00.000Z",
|
||||
to: "2026-03-31T23:59:59.999Z",
|
||||
sharedOnly: true,
|
||||
}),
|
||||
).toBe(
|
||||
"?targetType=issue_comment&vote=down&status=pending&projectId=project-123&issueId=issue-123&from=2026-03-31T00%3A00%3A00.000Z&to=2026-03-31T23%3A59%3A59.999Z&sharedOnly=true&includePayload=true",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderFeedbackReport", () => {
|
||||
it("includes summary counts and the optional reason", () => {
|
||||
const traces = [
|
||||
makeTrace(),
|
||||
makeTrace({
|
||||
id: "trace-87654321",
|
||||
feedbackVoteId: "vote-87654321",
|
||||
vote: "up",
|
||||
status: "local_only",
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "up",
|
||||
reason: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const report = renderFeedbackReport({
|
||||
apiBase: "http://127.0.0.1:3100",
|
||||
companyId: "company-123",
|
||||
traces,
|
||||
summary: summarizeFeedbackTraces(traces),
|
||||
includePayloads: false,
|
||||
});
|
||||
|
||||
expect(report).toContain("Paperclip Feedback Report");
|
||||
expect(report).toContain("thumbs up");
|
||||
expect(report).toContain("thumbs down");
|
||||
expect(report).toContain("Needed more detail");
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeFeedbackExportBundle", () => {
|
||||
it("writes votes, traces, a manifest, and a zip archive", async () => {
|
||||
const tempDir = await mkdtemp(path.join(os.tmpdir(), "paperclip-feedback-export-"));
|
||||
const outputDir = path.join(tempDir, "feedback-export");
|
||||
const traces = [
|
||||
makeTrace(),
|
||||
makeTrace({
|
||||
id: "trace-abcdef12",
|
||||
feedbackVoteId: "vote-abcdef12",
|
||||
issueIdentifier: "PAP-124",
|
||||
issueId: "issue-124",
|
||||
vote: "up",
|
||||
status: "local_only",
|
||||
payloadSnapshot: {
|
||||
vote: {
|
||||
value: "up",
|
||||
reason: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const exported = await writeFeedbackExportBundle({
|
||||
apiBase: "http://127.0.0.1:3100",
|
||||
companyId: "company-123",
|
||||
traces,
|
||||
outputDir,
|
||||
});
|
||||
|
||||
expect(exported.manifest.summary.total).toBe(2);
|
||||
expect(exported.manifest.summary.withReason).toBe(1);
|
||||
|
||||
const manifest = JSON.parse(await readFile(path.join(outputDir, "index.json"), "utf8")) as {
|
||||
files: { votes: string[]; traces: string[]; zip: string };
|
||||
};
|
||||
expect(manifest.files.votes).toHaveLength(2);
|
||||
expect(manifest.files.traces).toHaveLength(2);
|
||||
|
||||
const archive = await readFile(exported.zipPath);
|
||||
const zip = await readZipArchive(archive);
|
||||
expect(Object.keys(zip.files)).toEqual(
|
||||
expect.arrayContaining([
|
||||
"index.json",
|
||||
`votes/${manifest.files.votes[0]}`,
|
||||
`traces/${manifest.files.traces[0]}`,
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "@paperclipai/db";
|
||||
87
cli/src/__tests__/helpers/zip.ts
Normal file
87
cli/src/__tests__/helpers/zip.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
function writeUint16(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
}
|
||||
|
||||
function writeUint32(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
target[offset + 2] = (value >>> 16) & 0xff;
|
||||
target[offset + 3] = (value >>> 24) & 0xff;
|
||||
}
|
||||
|
||||
function crc32(bytes: Uint8Array) {
|
||||
let crc = 0xffffffff;
|
||||
for (const byte of bytes) {
|
||||
crc ^= byte;
|
||||
for (let bit = 0; bit < 8; bit += 1) {
|
||||
crc = (crc & 1) === 1 ? (crc >>> 1) ^ 0xedb88320 : crc >>> 1;
|
||||
}
|
||||
}
|
||||
return (crc ^ 0xffffffff) >>> 0;
|
||||
}
|
||||
|
||||
export function createStoredZipArchive(files: Record<string, string>, rootPath: string) {
|
||||
const encoder = new TextEncoder();
|
||||
const localChunks: Uint8Array[] = [];
|
||||
const centralChunks: Uint8Array[] = [];
|
||||
let localOffset = 0;
|
||||
let entryCount = 0;
|
||||
|
||||
for (const [relativePath, content] of Object.entries(files).sort(([left], [right]) => left.localeCompare(right))) {
|
||||
const fileName = encoder.encode(`${rootPath}/${relativePath}`);
|
||||
const body = encoder.encode(content);
|
||||
const checksum = crc32(body);
|
||||
|
||||
const localHeader = new Uint8Array(30 + fileName.length);
|
||||
writeUint32(localHeader, 0, 0x04034b50);
|
||||
writeUint16(localHeader, 4, 20);
|
||||
writeUint16(localHeader, 6, 0x0800);
|
||||
writeUint16(localHeader, 8, 0);
|
||||
writeUint32(localHeader, 14, checksum);
|
||||
writeUint32(localHeader, 18, body.length);
|
||||
writeUint32(localHeader, 22, body.length);
|
||||
writeUint16(localHeader, 26, fileName.length);
|
||||
localHeader.set(fileName, 30);
|
||||
|
||||
const centralHeader = new Uint8Array(46 + fileName.length);
|
||||
writeUint32(centralHeader, 0, 0x02014b50);
|
||||
writeUint16(centralHeader, 4, 20);
|
||||
writeUint16(centralHeader, 6, 20);
|
||||
writeUint16(centralHeader, 8, 0x0800);
|
||||
writeUint16(centralHeader, 10, 0);
|
||||
writeUint32(centralHeader, 16, checksum);
|
||||
writeUint32(centralHeader, 20, body.length);
|
||||
writeUint32(centralHeader, 24, body.length);
|
||||
writeUint16(centralHeader, 28, fileName.length);
|
||||
writeUint32(centralHeader, 42, localOffset);
|
||||
centralHeader.set(fileName, 46);
|
||||
|
||||
localChunks.push(localHeader, body);
|
||||
centralChunks.push(centralHeader);
|
||||
localOffset += localHeader.length + body.length;
|
||||
entryCount += 1;
|
||||
}
|
||||
|
||||
const centralDirectoryLength = centralChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const archive = new Uint8Array(
|
||||
localChunks.reduce((sum, chunk) => sum + chunk.length, 0) + centralDirectoryLength + 22,
|
||||
);
|
||||
let offset = 0;
|
||||
for (const chunk of localChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
const centralDirectoryOffset = offset;
|
||||
for (const chunk of centralChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
writeUint32(archive, offset, 0x06054b50);
|
||||
writeUint16(archive, offset + 8, entryCount);
|
||||
writeUint16(archive, offset + 10, entryCount);
|
||||
writeUint32(archive, offset + 12, centralDirectoryLength);
|
||||
writeUint32(archive, offset + 16, centralDirectoryOffset);
|
||||
|
||||
return archive;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
import { ApiConnectionError, ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
|
||||
describe("PaperclipApiClient", () => {
|
||||
afterEach(() => {
|
||||
@@ -58,4 +58,49 @@ describe("PaperclipApiClient", () => {
|
||||
details: { issueId: "1" },
|
||||
} satisfies Partial<ApiRequestError>);
|
||||
});
|
||||
|
||||
it("throws ApiConnectionError with recovery guidance when fetch fails", async () => {
|
||||
const fetchMock = vi.fn().mockRejectedValue(new TypeError("fetch failed"));
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
|
||||
const client = new PaperclipApiClient({ apiBase: "http://localhost:3100" });
|
||||
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toBeInstanceOf(ApiConnectionError);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toMatchObject({
|
||||
url: "http://localhost:3100/api/companies/import/preview",
|
||||
method: "POST",
|
||||
causeMessage: "fetch failed",
|
||||
} satisfies Partial<ApiConnectionError>);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/Could not reach the Paperclip API\./,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/curl http:\/\/localhost:3100\/api\/health/,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/pnpm dev|pnpm paperclipai run/,
|
||||
);
|
||||
});
|
||||
|
||||
it("retries once after interactive auth recovery", async () => {
|
||||
const fetchMock = vi
|
||||
.fn()
|
||||
.mockResolvedValueOnce(new Response(JSON.stringify({ error: "Board access required" }), { status: 403 }))
|
||||
.mockResolvedValueOnce(new Response(JSON.stringify({ ok: true }), { status: 200 }));
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
|
||||
const recoverAuth = vi.fn().mockResolvedValue("board-token-123");
|
||||
const client = new PaperclipApiClient({
|
||||
apiBase: "http://localhost:3100",
|
||||
recoverAuth,
|
||||
});
|
||||
|
||||
const result = await client.post<{ ok: boolean }>("/api/test", { hello: "world" });
|
||||
|
||||
expect(result).toEqual({ ok: true });
|
||||
expect(recoverAuth).toHaveBeenCalledOnce();
|
||||
expect(fetchMock).toHaveBeenCalledTimes(2);
|
||||
const retryHeaders = fetchMock.mock.calls[1]?.[1]?.headers as Record<string, string>;
|
||||
expect(retryHeaders.authorization).toBe("Bearer board-token-123");
|
||||
});
|
||||
});
|
||||
|
||||
62
cli/src/__tests__/network-bind.test.ts
Normal file
62
cli/src/__tests__/network-bind.test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveRuntimeBind, validateConfiguredBindMode } from "@paperclipai/shared";
|
||||
import { buildPresetServerConfig } from "../config/server-bind.js";
|
||||
|
||||
describe("network bind helpers", () => {
|
||||
it("rejects non-loopback bind modes in local_trusted", () => {
|
||||
expect(
|
||||
validateConfiguredBindMode({
|
||||
deploymentMode: "local_trusted",
|
||||
deploymentExposure: "private",
|
||||
bind: "lan",
|
||||
host: "0.0.0.0",
|
||||
}),
|
||||
).toContain("local_trusted requires server.bind=loopback");
|
||||
});
|
||||
|
||||
it("resolves tailnet bind using the detected tailscale address", () => {
|
||||
const resolved = resolveRuntimeBind({
|
||||
bind: "tailnet",
|
||||
host: "127.0.0.1",
|
||||
tailnetBindHost: "100.64.0.8",
|
||||
});
|
||||
|
||||
expect(resolved.errors).toEqual([]);
|
||||
expect(resolved.host).toBe("100.64.0.8");
|
||||
});
|
||||
|
||||
it("requires a custom bind host when bind=custom", () => {
|
||||
const resolved = resolveRuntimeBind({
|
||||
bind: "custom",
|
||||
host: "127.0.0.1",
|
||||
});
|
||||
|
||||
expect(resolved.errors).toContain("server.customBindHost is required when server.bind=custom");
|
||||
});
|
||||
|
||||
it("stores the detected tailscale address for tailnet presets", () => {
|
||||
process.env.PAPERCLIP_TAILNET_BIND_HOST = "100.64.0.8";
|
||||
|
||||
const preset = buildPresetServerConfig("tailnet", {
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
});
|
||||
|
||||
expect(preset.server.host).toBe("100.64.0.8");
|
||||
|
||||
delete process.env.PAPERCLIP_TAILNET_BIND_HOST;
|
||||
});
|
||||
|
||||
it("falls back to loopback when no tailscale address is available for tailnet presets", () => {
|
||||
delete process.env.PAPERCLIP_TAILNET_BIND_HOST;
|
||||
|
||||
const preset = buildPresetServerConfig("tailnet", {
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
});
|
||||
|
||||
expect(preset.server.host).toBe("127.0.0.1");
|
||||
});
|
||||
});
|
||||
166
cli/src/__tests__/onboard.test.ts
Normal file
166
cli/src/__tests__/onboard.test.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
import { onboard } from "../commands/onboard.js";
|
||||
import type { PaperclipConfig } from "../config/schema.js";
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
|
||||
function createExistingConfigFixture() {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-onboard-"));
|
||||
const runtimeRoot = path.join(root, "runtime");
|
||||
const configPath = path.join(root, ".paperclip", "config.json");
|
||||
const config: PaperclipConfig = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-29T00:00:00.000Z",
|
||||
source: "configure",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(runtimeRoot, "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(runtimeRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(runtimeRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(runtimeRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(runtimeRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, { mode: 0o600 });
|
||||
|
||||
return { configPath, configText: fs.readFileSync(configPath, "utf8") };
|
||||
}
|
||||
|
||||
function createFreshConfigPath() {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-onboard-fresh-"));
|
||||
return path.join(root, ".paperclip", "config.json");
|
||||
}
|
||||
|
||||
describe("onboard", () => {
|
||||
beforeEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
delete process.env.PAPERCLIP_AGENT_JWT_SECRET;
|
||||
delete process.env.PAPERCLIP_SECRETS_MASTER_KEY;
|
||||
delete process.env.PAPERCLIP_SECRETS_MASTER_KEY_FILE;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
});
|
||||
|
||||
it("preserves an existing config when rerun without flags", async () => {
|
||||
const fixture = createExistingConfigFixture();
|
||||
|
||||
await onboard({ config: fixture.configPath });
|
||||
|
||||
expect(fs.readFileSync(fixture.configPath, "utf8")).toBe(fixture.configText);
|
||||
expect(fs.existsSync(`${fixture.configPath}.backup`)).toBe(false);
|
||||
expect(fs.existsSync(path.join(path.dirname(fixture.configPath), ".env"))).toBe(true);
|
||||
});
|
||||
|
||||
it("preserves an existing config when rerun with --yes", async () => {
|
||||
const fixture = createExistingConfigFixture();
|
||||
|
||||
await onboard({ config: fixture.configPath, yes: true, invokedByRun: true });
|
||||
|
||||
expect(fs.readFileSync(fixture.configPath, "utf8")).toBe(fixture.configText);
|
||||
expect(fs.existsSync(`${fixture.configPath}.backup`)).toBe(false);
|
||||
expect(fs.existsSync(path.join(path.dirname(fixture.configPath), ".env"))).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps --yes onboarding on local trusted loopback defaults", async () => {
|
||||
const configPath = createFreshConfigPath();
|
||||
process.env.HOST = "0.0.0.0";
|
||||
process.env.PAPERCLIP_BIND = "lan";
|
||||
|
||||
await onboard({ config: configPath, yes: true, invokedByRun: true });
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(configPath, "utf8")) as PaperclipConfig;
|
||||
expect(raw.server.deploymentMode).toBe("local_trusted");
|
||||
expect(raw.server.exposure).toBe("private");
|
||||
expect(raw.server.bind).toBe("loopback");
|
||||
expect(raw.server.host).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("supports authenticated/private quickstart bind presets", async () => {
|
||||
const configPath = createFreshConfigPath();
|
||||
process.env.PAPERCLIP_TAILNET_BIND_HOST = "100.64.0.8";
|
||||
|
||||
await onboard({ config: configPath, yes: true, invokedByRun: true, bind: "tailnet" });
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(configPath, "utf8")) as PaperclipConfig;
|
||||
expect(raw.server.deploymentMode).toBe("authenticated");
|
||||
expect(raw.server.exposure).toBe("private");
|
||||
expect(raw.server.bind).toBe("tailnet");
|
||||
expect(raw.server.host).toBe("100.64.0.8");
|
||||
});
|
||||
|
||||
it("keeps tailnet quickstart on loopback until tailscale is available", async () => {
|
||||
const configPath = createFreshConfigPath();
|
||||
delete process.env.PAPERCLIP_TAILNET_BIND_HOST;
|
||||
|
||||
await onboard({ config: configPath, yes: true, invokedByRun: true, bind: "tailnet" });
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(configPath, "utf8")) as PaperclipConfig;
|
||||
expect(raw.server.deploymentMode).toBe("authenticated");
|
||||
expect(raw.server.exposure).toBe("private");
|
||||
expect(raw.server.bind).toBe("tailnet");
|
||||
expect(raw.server.host).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("ignores deployment env overrides during --yes quickstart", async () => {
|
||||
const configPath = createFreshConfigPath();
|
||||
process.env.PAPERCLIP_DEPLOYMENT_MODE = "authenticated";
|
||||
|
||||
await onboard({ config: configPath, yes: true, invokedByRun: true });
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(configPath, "utf8")) as PaperclipConfig;
|
||||
expect(raw.server.deploymentMode).toBe("local_trusted");
|
||||
expect(raw.server.exposure).toBe("private");
|
||||
expect(raw.server.bind).toBe("loopback");
|
||||
expect(raw.server.host).toBe("127.0.0.1");
|
||||
});
|
||||
});
|
||||
249
cli/src/__tests__/routines.test.ts
Normal file
249
cli/src/__tests__/routines.test.ts
Normal file
@@ -0,0 +1,249 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
agents,
|
||||
companies,
|
||||
createDb,
|
||||
projects,
|
||||
routines,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { disableAllRoutinesInConfig } from "../commands/routines.js";
|
||||
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres routines CLI tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function writeTestConfig(configPath: string, tempRoot: string, connectionString: string) {
|
||||
const config = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
source: "doctor" as const,
|
||||
},
|
||||
database: {
|
||||
mode: "postgres" as const,
|
||||
connectionString,
|
||||
embeddedPostgresDataDir: path.join(tempRoot, "embedded-db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: false,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(tempRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file" as const,
|
||||
logDir: path.join(tempRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted" as const,
|
||||
exposure: "private" as const,
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: false,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto" as const,
|
||||
disableSignUp: false,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk" as const,
|
||||
localDisk: {
|
||||
baseDir: path.join(tempRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted" as const,
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(tempRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
describeEmbeddedPostgres("disableAllRoutinesInConfig", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
let tempRoot = "";
|
||||
let configPath = "";
|
||||
|
||||
beforeAll(async () => {
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-routines-cli-db-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-routines-cli-config-"));
|
||||
configPath = path.join(tempRoot, "config.json");
|
||||
writeTestConfig(configPath, tempRoot, tempDb.connectionString);
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
await db.delete(routines);
|
||||
await db.delete(projects);
|
||||
await db.delete(agents);
|
||||
await db.delete(companies);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await tempDb?.cleanup();
|
||||
if (tempRoot) {
|
||||
rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("pauses only non-archived routines for the selected company", async () => {
|
||||
const companyId = randomUUID();
|
||||
const otherCompanyId = randomUUID();
|
||||
const projectId = randomUUID();
|
||||
const otherProjectId = randomUUID();
|
||||
const agentId = randomUUID();
|
||||
const otherAgentId = randomUUID();
|
||||
const activeRoutineId = randomUUID();
|
||||
const pausedRoutineId = randomUUID();
|
||||
const archivedRoutineId = randomUUID();
|
||||
const otherCompanyRoutineId = randomUUID();
|
||||
|
||||
await db.insert(companies).values([
|
||||
{
|
||||
id: companyId,
|
||||
name: "Paperclip",
|
||||
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
},
|
||||
{
|
||||
id: otherCompanyId,
|
||||
name: "Other company",
|
||||
issuePrefix: `T${otherCompanyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(agents).values([
|
||||
{
|
||||
id: agentId,
|
||||
companyId,
|
||||
name: "Coder",
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
},
|
||||
{
|
||||
id: otherAgentId,
|
||||
companyId: otherCompanyId,
|
||||
name: "Other coder",
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(projects).values([
|
||||
{
|
||||
id: projectId,
|
||||
companyId,
|
||||
name: "Project",
|
||||
status: "in_progress",
|
||||
},
|
||||
{
|
||||
id: otherProjectId,
|
||||
companyId: otherCompanyId,
|
||||
name: "Other project",
|
||||
status: "in_progress",
|
||||
},
|
||||
]);
|
||||
|
||||
await db.insert(routines).values([
|
||||
{
|
||||
id: activeRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Active routine",
|
||||
status: "active",
|
||||
},
|
||||
{
|
||||
id: pausedRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Paused routine",
|
||||
status: "paused",
|
||||
},
|
||||
{
|
||||
id: archivedRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Archived routine",
|
||||
status: "archived",
|
||||
},
|
||||
{
|
||||
id: otherCompanyRoutineId,
|
||||
companyId: otherCompanyId,
|
||||
projectId: otherProjectId,
|
||||
assigneeAgentId: otherAgentId,
|
||||
title: "Other company routine",
|
||||
status: "active",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await disableAllRoutinesInConfig({
|
||||
config: configPath,
|
||||
companyId,
|
||||
});
|
||||
|
||||
expect(result).toMatchObject({
|
||||
companyId,
|
||||
totalRoutines: 3,
|
||||
pausedCount: 1,
|
||||
alreadyPausedCount: 1,
|
||||
archivedCount: 1,
|
||||
});
|
||||
|
||||
const companyRoutines = await db
|
||||
.select({
|
||||
id: routines.id,
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.companyId, companyId));
|
||||
const statusById = new Map(companyRoutines.map((routine) => [routine.id, routine.status]));
|
||||
|
||||
expect(statusById.get(activeRoutineId)).toBe("paused");
|
||||
expect(statusById.get(pausedRoutineId)).toBe("paused");
|
||||
expect(statusById.get(archivedRoutineId)).toBe("archived");
|
||||
|
||||
const otherCompanyRoutine = await db
|
||||
.select({
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.id, otherCompanyRoutineId));
|
||||
expect(otherCompanyRoutine[0]?.status).toBe("active");
|
||||
});
|
||||
});
|
||||
117
cli/src/__tests__/telemetry.test.ts
Normal file
117
cli/src/__tests__/telemetry.test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const CI_ENV_VARS = ["CI", "CONTINUOUS_INTEGRATION", "BUILD_NUMBER", "GITHUB_ACTIONS", "GITLAB_CI"];
|
||||
|
||||
function makeConfigPath(root: string, enabled: boolean): string {
|
||||
const configPath = path.join(root, ".paperclip", "config.json");
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, JSON.stringify({
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-31T00:00:00.000Z",
|
||||
source: "configure",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(root, "runtime", "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(root, "runtime", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(root, "runtime", "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(root, "runtime", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(root, "runtime", "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
}, null, 2));
|
||||
return configPath;
|
||||
}
|
||||
|
||||
describe("cli telemetry", () => {
|
||||
beforeEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
for (const key of CI_ENV_VARS) {
|
||||
delete process.env[key];
|
||||
}
|
||||
vi.stubGlobal("fetch", vi.fn(async () => ({ ok: true })));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
vi.unstubAllGlobals();
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
it("respects telemetry.enabled=false from the config file", async () => {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-cli-telemetry-"));
|
||||
const configPath = makeConfigPath(root, false);
|
||||
process.env.PAPERCLIP_HOME = path.join(root, "home");
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "telemetry-test";
|
||||
|
||||
const { initTelemetryFromConfigFile } = await import("../telemetry.js");
|
||||
const client = initTelemetryFromConfigFile(configPath);
|
||||
|
||||
expect(client).toBeNull();
|
||||
expect(fs.existsSync(path.join(root, "home", "instances", "telemetry-test", "telemetry", "state.json"))).toBe(false);
|
||||
});
|
||||
|
||||
it("creates telemetry state only after the first event is tracked", async () => {
|
||||
const root = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-cli-telemetry-"));
|
||||
process.env.PAPERCLIP_HOME = path.join(root, "home");
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "telemetry-test";
|
||||
|
||||
const { initTelemetry, flushTelemetry } = await import("../telemetry.js");
|
||||
const client = initTelemetry({ enabled: true });
|
||||
const statePath = path.join(root, "home", "instances", "telemetry-test", "telemetry", "state.json");
|
||||
|
||||
expect(client).not.toBeNull();
|
||||
expect(fs.existsSync(statePath)).toBe(false);
|
||||
|
||||
client!.track("install.started", { setupMode: "quickstart" });
|
||||
|
||||
expect(fs.existsSync(statePath)).toBe(true);
|
||||
|
||||
await flushTelemetry();
|
||||
});
|
||||
});
|
||||
492
cli/src/__tests__/worktree-merge-history.test.ts
Normal file
492
cli/src/__tests__/worktree-merge-history.test.ts
Normal file
@@ -0,0 +1,492 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { buildWorktreeMergePlan, parseWorktreeMergeScopes } from "../commands/worktree-merge-history-lib.js";
|
||||
|
||||
function makeIssue(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "issue-1",
|
||||
companyId: "company-1",
|
||||
projectId: null,
|
||||
projectWorkspaceId: null,
|
||||
goalId: "goal-1",
|
||||
parentId: null,
|
||||
title: "Issue",
|
||||
description: null,
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
assigneeAgentId: null,
|
||||
assigneeUserId: null,
|
||||
checkoutRunId: null,
|
||||
executionRunId: null,
|
||||
executionAgentNameKey: null,
|
||||
executionLockedAt: null,
|
||||
createdByAgentId: null,
|
||||
createdByUserId: "local-board",
|
||||
issueNumber: 1,
|
||||
identifier: "PAP-1",
|
||||
requestDepth: 0,
|
||||
billingCode: null,
|
||||
assigneeAdapterOverrides: null,
|
||||
executionWorkspaceId: null,
|
||||
executionWorkspacePreference: null,
|
||||
executionWorkspaceSettings: null,
|
||||
startedAt: null,
|
||||
completedAt: null,
|
||||
cancelledAt: null,
|
||||
hiddenAt: null,
|
||||
createdAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeComment(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "comment-1",
|
||||
companyId: "company-1",
|
||||
issueId: "issue-1",
|
||||
authorAgentId: null,
|
||||
authorUserId: "local-board",
|
||||
body: "hello",
|
||||
createdAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeIssueDocument(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "issue-document-1",
|
||||
companyId: "company-1",
|
||||
issueId: "issue-1",
|
||||
documentId: "document-1",
|
||||
key: "plan",
|
||||
linkCreatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
linkUpdatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
title: "Plan",
|
||||
format: "markdown",
|
||||
latestBody: "# Plan",
|
||||
latestRevisionId: "revision-1",
|
||||
latestRevisionNumber: 1,
|
||||
createdByAgentId: null,
|
||||
createdByUserId: "local-board",
|
||||
updatedByAgentId: null,
|
||||
updatedByUserId: "local-board",
|
||||
documentCreatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
documentUpdatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeDocumentRevision(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "revision-1",
|
||||
companyId: "company-1",
|
||||
documentId: "document-1",
|
||||
revisionNumber: 1,
|
||||
body: "# Plan",
|
||||
changeSummary: null,
|
||||
createdByAgentId: null,
|
||||
createdByUserId: "local-board",
|
||||
createdAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeAttachment(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "attachment-1",
|
||||
companyId: "company-1",
|
||||
issueId: "issue-1",
|
||||
issueCommentId: null,
|
||||
assetId: "asset-1",
|
||||
provider: "local_disk",
|
||||
objectKey: "company-1/issues/issue-1/2026/03/20/asset.png",
|
||||
contentType: "image/png",
|
||||
byteSize: 12,
|
||||
sha256: "deadbeef",
|
||||
originalFilename: "asset.png",
|
||||
createdByAgentId: null,
|
||||
createdByUserId: "local-board",
|
||||
assetCreatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
assetUpdatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
attachmentCreatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
attachmentUpdatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeProject(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "project-1",
|
||||
companyId: "company-1",
|
||||
goalId: null,
|
||||
name: "Project",
|
||||
description: null,
|
||||
status: "in_progress",
|
||||
leadAgentId: null,
|
||||
targetDate: null,
|
||||
color: "#22c55e",
|
||||
pauseReason: null,
|
||||
pausedAt: null,
|
||||
executionWorkspacePolicy: null,
|
||||
archivedAt: null,
|
||||
createdAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
function makeProjectWorkspace(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "workspace-1",
|
||||
companyId: "company-1",
|
||||
projectId: "project-1",
|
||||
name: "Workspace",
|
||||
sourceType: "local_path",
|
||||
cwd: "/tmp/project",
|
||||
repoUrl: "https://github.com/example/project.git",
|
||||
repoRef: "main",
|
||||
defaultRef: "main",
|
||||
visibility: "default",
|
||||
setupCommand: null,
|
||||
cleanupCommand: null,
|
||||
remoteProvider: null,
|
||||
remoteWorkspaceRef: null,
|
||||
sharedWorkspaceKey: null,
|
||||
metadata: null,
|
||||
isPrimary: true,
|
||||
createdAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00.000Z"),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
describe("worktree merge history planner", () => {
|
||||
it("parses default scopes", () => {
|
||||
expect(parseWorktreeMergeScopes(undefined)).toEqual(["issues", "comments"]);
|
||||
expect(parseWorktreeMergeScopes("issues")).toEqual(["issues"]);
|
||||
});
|
||||
|
||||
it("dedupes nested worktree issues by preserved source uuid", () => {
|
||||
const sharedIssue = makeIssue({ id: "issue-a", identifier: "PAP-10", title: "Shared" });
|
||||
const branchOneIssue = makeIssue({
|
||||
id: "issue-b",
|
||||
identifier: "PAP-22",
|
||||
title: "Branch one issue",
|
||||
createdAt: new Date("2026-03-20T01:00:00.000Z"),
|
||||
});
|
||||
const branchTwoIssue = makeIssue({
|
||||
id: "issue-c",
|
||||
identifier: "PAP-23",
|
||||
title: "Branch two issue",
|
||||
createdAt: new Date("2026-03-20T02:00:00.000Z"),
|
||||
});
|
||||
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 500,
|
||||
scopes: ["issues", "comments"],
|
||||
sourceIssues: [sharedIssue, branchOneIssue, branchTwoIssue],
|
||||
targetIssues: [sharedIssue, branchOneIssue],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
});
|
||||
|
||||
expect(plan.counts.issuesToInsert).toBe(1);
|
||||
expect(plan.issuePlans.filter((item) => item.action === "insert").map((item) => item.source.id)).toEqual(["issue-c"]);
|
||||
expect(plan.issuePlans.find((item) => item.source.id === "issue-c" && item.action === "insert")).toMatchObject({
|
||||
previewIdentifier: "PAP-501",
|
||||
});
|
||||
});
|
||||
|
||||
it("clears missing references and coerces in_progress without an assignee", () => {
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues"],
|
||||
sourceIssues: [
|
||||
makeIssue({
|
||||
id: "issue-x",
|
||||
identifier: "PAP-99",
|
||||
status: "in_progress",
|
||||
assigneeAgentId: "agent-missing",
|
||||
projectId: "project-missing",
|
||||
projectWorkspaceId: "workspace-missing",
|
||||
goalId: "goal-missing",
|
||||
}),
|
||||
],
|
||||
targetIssues: [],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [],
|
||||
});
|
||||
|
||||
const insert = plan.issuePlans[0] as any;
|
||||
expect(insert.targetStatus).toBe("todo");
|
||||
expect(insert.targetAssigneeAgentId).toBeNull();
|
||||
expect(insert.targetProjectId).toBeNull();
|
||||
expect(insert.targetProjectWorkspaceId).toBeNull();
|
||||
expect(insert.targetGoalId).toBeNull();
|
||||
expect(insert.adjustments).toEqual([
|
||||
"clear_assignee_agent",
|
||||
"clear_project",
|
||||
"clear_project_workspace",
|
||||
"clear_goal",
|
||||
"coerce_in_progress_to_todo",
|
||||
]);
|
||||
});
|
||||
|
||||
it("applies an explicit project mapping override instead of clearing the project", () => {
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues"],
|
||||
sourceIssues: [
|
||||
makeIssue({
|
||||
id: "issue-project-map",
|
||||
identifier: "PAP-77",
|
||||
projectId: "source-project-1",
|
||||
projectWorkspaceId: "source-workspace-1",
|
||||
}),
|
||||
],
|
||||
targetIssues: [],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
targetAgents: [],
|
||||
targetProjects: [{ id: "target-project-1", name: "Mapped project", status: "in_progress" }] as any,
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
projectIdOverrides: {
|
||||
"source-project-1": "target-project-1",
|
||||
},
|
||||
});
|
||||
|
||||
const insert = plan.issuePlans[0] as any;
|
||||
expect(insert.targetProjectId).toBe("target-project-1");
|
||||
expect(insert.projectResolution).toBe("mapped");
|
||||
expect(insert.mappedProjectName).toBe("Mapped project");
|
||||
expect(insert.targetProjectWorkspaceId).toBeNull();
|
||||
expect(insert.adjustments).toEqual(["clear_project_workspace"]);
|
||||
});
|
||||
|
||||
it("plans selected project imports and preserves project workspace links", () => {
|
||||
const sourceProject = makeProject({
|
||||
id: "source-project-1",
|
||||
name: "Paperclip Evals",
|
||||
goalId: "goal-1",
|
||||
});
|
||||
const sourceWorkspace = makeProjectWorkspace({
|
||||
id: "source-workspace-1",
|
||||
projectId: "source-project-1",
|
||||
cwd: "/Users/dotta/paperclip-evals",
|
||||
repoUrl: "https://github.com/paperclipai/paperclip-evals.git",
|
||||
});
|
||||
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues"],
|
||||
sourceIssues: [
|
||||
makeIssue({
|
||||
id: "issue-project-import",
|
||||
identifier: "PAP-88",
|
||||
projectId: "source-project-1",
|
||||
projectWorkspaceId: "source-workspace-1",
|
||||
}),
|
||||
],
|
||||
targetIssues: [],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
sourceProjects: [sourceProject],
|
||||
sourceProjectWorkspaces: [sourceWorkspace],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
importProjectIds: ["source-project-1"],
|
||||
});
|
||||
|
||||
expect(plan.counts.projectsToImport).toBe(1);
|
||||
expect(plan.projectImports[0]).toMatchObject({
|
||||
source: { id: "source-project-1", name: "Paperclip Evals" },
|
||||
targetGoalId: "goal-1",
|
||||
workspaces: [{ id: "source-workspace-1" }],
|
||||
});
|
||||
|
||||
const insert = plan.issuePlans[0] as any;
|
||||
expect(insert.targetProjectId).toBe("source-project-1");
|
||||
expect(insert.targetProjectWorkspaceId).toBe("source-workspace-1");
|
||||
expect(insert.projectResolution).toBe("imported");
|
||||
expect(insert.mappedProjectName).toBe("Paperclip Evals");
|
||||
expect(insert.adjustments).toEqual([]);
|
||||
});
|
||||
|
||||
it("imports comments onto shared or newly imported issues while skipping existing comments", () => {
|
||||
const sharedIssue = makeIssue({ id: "issue-a", identifier: "PAP-10" });
|
||||
const newIssue = makeIssue({
|
||||
id: "issue-b",
|
||||
identifier: "PAP-11",
|
||||
createdAt: new Date("2026-03-20T01:00:00.000Z"),
|
||||
});
|
||||
const existingComment = makeComment({ id: "comment-existing", issueId: "issue-a" });
|
||||
const sharedIssueComment = makeComment({ id: "comment-shared", issueId: "issue-a" });
|
||||
const newIssueComment = makeComment({
|
||||
id: "comment-new-issue",
|
||||
issueId: "issue-b",
|
||||
authorAgentId: "missing-agent",
|
||||
createdAt: new Date("2026-03-20T01:05:00.000Z"),
|
||||
});
|
||||
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues", "comments"],
|
||||
sourceIssues: [sharedIssue, newIssue],
|
||||
targetIssues: [sharedIssue],
|
||||
sourceComments: [existingComment, sharedIssueComment, newIssueComment],
|
||||
targetComments: [existingComment],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
});
|
||||
|
||||
expect(plan.counts.commentsToInsert).toBe(2);
|
||||
expect(plan.counts.commentsExisting).toBe(1);
|
||||
expect(plan.commentPlans.filter((item) => item.action === "insert").map((item) => item.source.id)).toEqual([
|
||||
"comment-shared",
|
||||
"comment-new-issue",
|
||||
]);
|
||||
expect(plan.adjustments.clear_author_agent).toBe(1);
|
||||
});
|
||||
|
||||
it("merges document revisions onto an existing shared document and renumbers conflicts", () => {
|
||||
const sharedIssue = makeIssue({ id: "issue-a", identifier: "PAP-10" });
|
||||
const sourceDocument = makeIssueDocument({
|
||||
issueId: "issue-a",
|
||||
documentId: "document-a",
|
||||
latestBody: "# Branch plan",
|
||||
latestRevisionId: "revision-branch-2",
|
||||
latestRevisionNumber: 2,
|
||||
documentUpdatedAt: new Date("2026-03-20T02:00:00.000Z"),
|
||||
linkUpdatedAt: new Date("2026-03-20T02:00:00.000Z"),
|
||||
});
|
||||
const targetDocument = makeIssueDocument({
|
||||
issueId: "issue-a",
|
||||
documentId: "document-a",
|
||||
latestBody: "# Main plan",
|
||||
latestRevisionId: "revision-main-2",
|
||||
latestRevisionNumber: 2,
|
||||
documentUpdatedAt: new Date("2026-03-20T01:00:00.000Z"),
|
||||
linkUpdatedAt: new Date("2026-03-20T01:00:00.000Z"),
|
||||
});
|
||||
const sourceRevisionOne = makeDocumentRevision({ documentId: "document-a", id: "revision-1" });
|
||||
const sourceRevisionTwo = makeDocumentRevision({
|
||||
documentId: "document-a",
|
||||
id: "revision-branch-2",
|
||||
revisionNumber: 2,
|
||||
body: "# Branch plan",
|
||||
createdAt: new Date("2026-03-20T02:00:00.000Z"),
|
||||
});
|
||||
const targetRevisionOne = makeDocumentRevision({ documentId: "document-a", id: "revision-1" });
|
||||
const targetRevisionTwo = makeDocumentRevision({
|
||||
documentId: "document-a",
|
||||
id: "revision-main-2",
|
||||
revisionNumber: 2,
|
||||
body: "# Main plan",
|
||||
createdAt: new Date("2026-03-20T01:00:00.000Z"),
|
||||
});
|
||||
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues", "comments"],
|
||||
sourceIssues: [sharedIssue],
|
||||
targetIssues: [sharedIssue],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
sourceDocuments: [sourceDocument],
|
||||
targetDocuments: [targetDocument],
|
||||
sourceDocumentRevisions: [sourceRevisionOne, sourceRevisionTwo],
|
||||
targetDocumentRevisions: [targetRevisionOne, targetRevisionTwo],
|
||||
sourceAttachments: [],
|
||||
targetAttachments: [],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
});
|
||||
|
||||
expect(plan.counts.documentsToMerge).toBe(1);
|
||||
expect(plan.counts.documentRevisionsToInsert).toBe(1);
|
||||
expect(plan.documentPlans[0]).toMatchObject({
|
||||
action: "merge_existing",
|
||||
latestRevisionId: "revision-branch-2",
|
||||
latestRevisionNumber: 3,
|
||||
});
|
||||
const mergePlan = plan.documentPlans[0] as any;
|
||||
expect(mergePlan.revisionsToInsert).toHaveLength(1);
|
||||
expect(mergePlan.revisionsToInsert[0]).toMatchObject({
|
||||
source: { id: "revision-branch-2" },
|
||||
targetRevisionNumber: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it("imports attachments while clearing missing comment and author references", () => {
|
||||
const sharedIssue = makeIssue({ id: "issue-a", identifier: "PAP-10" });
|
||||
const attachment = makeAttachment({
|
||||
issueId: "issue-a",
|
||||
issueCommentId: "comment-missing",
|
||||
createdByAgentId: "agent-missing",
|
||||
});
|
||||
|
||||
const plan = buildWorktreeMergePlan({
|
||||
companyId: "company-1",
|
||||
companyName: "Paperclip",
|
||||
issuePrefix: "PAP",
|
||||
previewIssueCounterStart: 10,
|
||||
scopes: ["issues"],
|
||||
sourceIssues: [sharedIssue],
|
||||
targetIssues: [sharedIssue],
|
||||
sourceComments: [],
|
||||
targetComments: [],
|
||||
sourceDocuments: [],
|
||||
targetDocuments: [],
|
||||
sourceDocumentRevisions: [],
|
||||
targetDocumentRevisions: [],
|
||||
sourceAttachments: [attachment],
|
||||
targetAttachments: [],
|
||||
targetAgents: [],
|
||||
targetProjects: [],
|
||||
targetProjectWorkspaces: [],
|
||||
targetGoals: [{ id: "goal-1" }] as any,
|
||||
});
|
||||
|
||||
expect(plan.counts.attachmentsToInsert).toBe(1);
|
||||
expect(plan.adjustments.clear_attachment_agent).toBe(1);
|
||||
expect(plan.attachmentPlans[0]).toMatchObject({
|
||||
action: "insert",
|
||||
targetIssueCommentId: null,
|
||||
targetCreatedByAgentId: null,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -2,16 +2,31 @@ import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
agents,
|
||||
companies,
|
||||
createDb,
|
||||
projects,
|
||||
routines,
|
||||
routineTriggers,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
copyGitHooksToWorktreeGitDir,
|
||||
copySeededSecretsKey,
|
||||
pauseSeededScheduledRoutines,
|
||||
readSourceAttachmentBody,
|
||||
rebindWorkspaceCwd,
|
||||
resolveSourceConfigPath,
|
||||
resolveWorktreeReseedSource,
|
||||
resolveWorktreeReseedTargetPaths,
|
||||
resolveGitWorktreeAddArgs,
|
||||
resolveWorktreeMakeTargetPath,
|
||||
worktreeRepairCommand,
|
||||
worktreeInitCommand,
|
||||
worktreeMakeCommand,
|
||||
worktreeReseedCommand,
|
||||
} from "../commands/worktree.js";
|
||||
import {
|
||||
buildWorktreeConfig,
|
||||
@@ -24,9 +39,21 @@ import {
|
||||
sanitizeWorktreeInstanceId,
|
||||
} from "../commands/worktree-lib.js";
|
||||
import type { PaperclipConfig } from "../config/schema.js";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
|
||||
const ORIGINAL_CWD = process.cwd();
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres worktree CLI tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(ORIGINAL_CWD);
|
||||
@@ -74,6 +101,9 @@ function buildSourceConfig(): PaperclipConfig {
|
||||
publicBaseUrl: "http://127.0.0.1:3100",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
@@ -195,6 +225,43 @@ describe("worktree helpers", () => {
|
||||
expect(formatShellExports(env)).toContain("export PAPERCLIP_INSTANCE_ID='feature-worktree-support'");
|
||||
});
|
||||
|
||||
it("falls back across storage roots before skipping a missing attachment object", async () => {
|
||||
const missingErr = Object.assign(new Error("missing"), { code: "ENOENT" });
|
||||
const expected = Buffer.from("image-bytes");
|
||||
await expect(
|
||||
readSourceAttachmentBody(
|
||||
[
|
||||
{
|
||||
getObject: vi.fn().mockRejectedValue(missingErr),
|
||||
},
|
||||
{
|
||||
getObject: vi.fn().mockResolvedValue(expected),
|
||||
},
|
||||
],
|
||||
"company-1",
|
||||
"company-1/issues/issue-1/missing.png",
|
||||
),
|
||||
).resolves.toEqual(expected);
|
||||
});
|
||||
|
||||
it("returns null when an attachment object is missing from every lookup storage", async () => {
|
||||
const missingErr = Object.assign(new Error("missing"), { code: "ENOENT" });
|
||||
await expect(
|
||||
readSourceAttachmentBody(
|
||||
[
|
||||
{
|
||||
getObject: vi.fn().mockRejectedValue(missingErr),
|
||||
},
|
||||
{
|
||||
getObject: vi.fn().mockRejectedValue(Object.assign(new Error("missing"), { status: 404 })),
|
||||
},
|
||||
],
|
||||
"company-1",
|
||||
"company-1/issues/issue-1/missing.png",
|
||||
),
|
||||
).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it("generates vivid worktree colors as hex", () => {
|
||||
expect(generateWorktreeColor()).toMatch(/^#[0-9a-f]{6}$/);
|
||||
});
|
||||
@@ -306,6 +373,87 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("avoids ports already claimed by sibling worktree instance configs", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-claimed-ports-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const homeDir = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const siblingInstanceRoot = path.join(homeDir, "instances", "existing-worktree");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
fs.mkdirSync(siblingInstanceRoot, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(siblingInstanceRoot, "config.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildSourceConfig(),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(siblingInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(siblingInstanceRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(siblingInstanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "authenticated",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: ["localhost"],
|
||||
serveUi: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(siblingInstanceRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(siblingInstanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
);
|
||||
|
||||
process.chdir(repoRoot);
|
||||
await worktreeInitCommand({
|
||||
seed: false,
|
||||
fromConfig: path.join(tempRoot, "missing", "config.json"),
|
||||
home: homeDir,
|
||||
});
|
||||
|
||||
const config = JSON.parse(fs.readFileSync(path.join(repoRoot, ".paperclip", "config.json"), "utf8"));
|
||||
expect(config.server.port).toBeGreaterThan(3101);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(54330);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(config.server.port);
|
||||
expect(config.database.embeddedPostgresPort).toBeGreaterThan(54330);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("defaults the seed source config to the current repo-local Paperclip config", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-source-config-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
@@ -359,6 +507,234 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("requires an explicit reseed source", () => {
|
||||
expect(() => resolveWorktreeReseedSource({})).toThrow(
|
||||
"Pass --from <worktree> or --from-config/--from-instance explicitly so the reseed source is unambiguous.",
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects mixed reseed source selectors", () => {
|
||||
expect(() => resolveWorktreeReseedSource({
|
||||
from: "current",
|
||||
fromInstance: "default",
|
||||
})).toThrow(
|
||||
"Use either --from <worktree> or --from-config/--from-data-dir/--from-instance, not both.",
|
||||
);
|
||||
});
|
||||
|
||||
it("derives worktree reseed target paths from the adjacent env file", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-reseed-target-"));
|
||||
const worktreeRoot = path.join(tempRoot, "repo");
|
||||
const configPath = path.join(worktreeRoot, ".paperclip", "config.json");
|
||||
const envPath = path.join(worktreeRoot, ".paperclip", ".env");
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, JSON.stringify(buildSourceConfig()), "utf8");
|
||||
fs.writeFileSync(
|
||||
envPath,
|
||||
[
|
||||
"PAPERCLIP_HOME=/tmp/paperclip-worktrees",
|
||||
"PAPERCLIP_INSTANCE_ID=pap-1132-chat",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
expect(
|
||||
resolveWorktreeReseedTargetPaths({
|
||||
configPath,
|
||||
rootPath: worktreeRoot,
|
||||
}),
|
||||
).toMatchObject({
|
||||
cwd: worktreeRoot,
|
||||
homeDir: "/tmp/paperclip-worktrees",
|
||||
instanceId: "pap-1132-chat",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects reseed targets without worktree env metadata", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-reseed-target-missing-"));
|
||||
const worktreeRoot = path.join(tempRoot, "repo");
|
||||
const configPath = path.join(worktreeRoot, ".paperclip", "config.json");
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, JSON.stringify(buildSourceConfig()), "utf8");
|
||||
fs.writeFileSync(path.join(worktreeRoot, ".paperclip", ".env"), "", "utf8");
|
||||
|
||||
expect(() =>
|
||||
resolveWorktreeReseedTargetPaths({
|
||||
configPath,
|
||||
rootPath: worktreeRoot,
|
||||
})).toThrow("does not look like a worktree-local Paperclip instance");
|
||||
} finally {
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("reseed preserves the current worktree ports, instance id, and branding", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-reseed-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const sourceRoot = path.join(tempRoot, "source");
|
||||
const homeDir = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const currentInstanceId = "existing-worktree";
|
||||
const currentPaths = resolveWorktreeLocalPaths({
|
||||
cwd: repoRoot,
|
||||
homeDir,
|
||||
instanceId: currentInstanceId,
|
||||
});
|
||||
const sourcePaths = resolveWorktreeLocalPaths({
|
||||
cwd: sourceRoot,
|
||||
homeDir: path.join(tempRoot, ".paperclip-source"),
|
||||
instanceId: "default",
|
||||
});
|
||||
const originalCwd = process.cwd();
|
||||
const originalPaperclipConfig = process.env.PAPERCLIP_CONFIG;
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(currentPaths.configPath), { recursive: true });
|
||||
fs.mkdirSync(path.dirname(sourcePaths.configPath), { recursive: true });
|
||||
fs.mkdirSync(path.dirname(sourcePaths.secretsKeyFilePath), { recursive: true });
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
fs.mkdirSync(sourceRoot, { recursive: true });
|
||||
|
||||
const currentConfig = buildWorktreeConfig({
|
||||
sourceConfig: buildSourceConfig(),
|
||||
paths: currentPaths,
|
||||
serverPort: 3114,
|
||||
databasePort: 54341,
|
||||
});
|
||||
const sourceConfig = buildWorktreeConfig({
|
||||
sourceConfig: buildSourceConfig(),
|
||||
paths: sourcePaths,
|
||||
serverPort: 3200,
|
||||
databasePort: 54400,
|
||||
});
|
||||
fs.writeFileSync(currentPaths.configPath, JSON.stringify(currentConfig, null, 2), "utf8");
|
||||
fs.writeFileSync(sourcePaths.configPath, JSON.stringify(sourceConfig, null, 2), "utf8");
|
||||
fs.writeFileSync(sourcePaths.secretsKeyFilePath, "source-secret", "utf8");
|
||||
fs.writeFileSync(
|
||||
currentPaths.envPath,
|
||||
[
|
||||
`PAPERCLIP_HOME=${homeDir}`,
|
||||
`PAPERCLIP_INSTANCE_ID=${currentInstanceId}`,
|
||||
"PAPERCLIP_WORKTREE_NAME=existing-name",
|
||||
"PAPERCLIP_WORKTREE_COLOR=\"#112233\"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
process.chdir(repoRoot);
|
||||
|
||||
await worktreeReseedCommand({
|
||||
fromConfig: sourcePaths.configPath,
|
||||
yes: true,
|
||||
});
|
||||
|
||||
const rewrittenConfig = JSON.parse(fs.readFileSync(currentPaths.configPath, "utf8"));
|
||||
const rewrittenEnv = fs.readFileSync(currentPaths.envPath, "utf8");
|
||||
|
||||
expect(rewrittenConfig.server.port).toBe(3114);
|
||||
expect(rewrittenConfig.database.embeddedPostgresPort).toBe(54341);
|
||||
expect(rewrittenConfig.database.embeddedPostgresDataDir).toBe(currentPaths.embeddedPostgresDataDir);
|
||||
expect(rewrittenEnv).toContain(`PAPERCLIP_INSTANCE_ID=${currentInstanceId}`);
|
||||
expect(rewrittenEnv).toContain("PAPERCLIP_WORKTREE_NAME=existing-name");
|
||||
expect(rewrittenEnv).toContain("PAPERCLIP_WORKTREE_COLOR=\"#112233\"");
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
if (originalPaperclipConfig === undefined) {
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
} else {
|
||||
process.env.PAPERCLIP_CONFIG = originalPaperclipConfig;
|
||||
}
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it("restores the current worktree config and instance data if reseed fails", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-reseed-rollback-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const sourceRoot = path.join(tempRoot, "source");
|
||||
const homeDir = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const currentInstanceId = "rollback-worktree";
|
||||
const currentPaths = resolveWorktreeLocalPaths({
|
||||
cwd: repoRoot,
|
||||
homeDir,
|
||||
instanceId: currentInstanceId,
|
||||
});
|
||||
const sourcePaths = resolveWorktreeLocalPaths({
|
||||
cwd: sourceRoot,
|
||||
homeDir: path.join(tempRoot, ".paperclip-source"),
|
||||
instanceId: "default",
|
||||
});
|
||||
const originalCwd = process.cwd();
|
||||
const originalPaperclipConfig = process.env.PAPERCLIP_CONFIG;
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(currentPaths.configPath), { recursive: true });
|
||||
fs.mkdirSync(path.dirname(sourcePaths.configPath), { recursive: true });
|
||||
fs.mkdirSync(currentPaths.instanceRoot, { recursive: true });
|
||||
fs.mkdirSync(path.dirname(sourcePaths.secretsKeyFilePath), { recursive: true });
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
fs.mkdirSync(sourceRoot, { recursive: true });
|
||||
|
||||
const currentConfig = buildWorktreeConfig({
|
||||
sourceConfig: buildSourceConfig(),
|
||||
paths: currentPaths,
|
||||
serverPort: 3114,
|
||||
databasePort: 54341,
|
||||
});
|
||||
const sourceConfig = {
|
||||
...buildSourceConfig(),
|
||||
database: {
|
||||
mode: "postgres",
|
||||
connectionString: "",
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: sourcePaths.secretsKeyFilePath,
|
||||
},
|
||||
},
|
||||
} as PaperclipConfig;
|
||||
|
||||
fs.writeFileSync(currentPaths.configPath, JSON.stringify(currentConfig, null, 2), "utf8");
|
||||
fs.writeFileSync(currentPaths.envPath, `PAPERCLIP_HOME=${homeDir}\nPAPERCLIP_INSTANCE_ID=${currentInstanceId}\n`, "utf8");
|
||||
fs.writeFileSync(path.join(currentPaths.instanceRoot, "marker.txt"), "keep me", "utf8");
|
||||
fs.writeFileSync(sourcePaths.configPath, JSON.stringify(sourceConfig, null, 2), "utf8");
|
||||
fs.writeFileSync(sourcePaths.secretsKeyFilePath, "source-secret", "utf8");
|
||||
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
process.chdir(repoRoot);
|
||||
|
||||
await expect(worktreeReseedCommand({
|
||||
fromConfig: sourcePaths.configPath,
|
||||
yes: true,
|
||||
})).rejects.toThrow("Source instance uses postgres mode but has no connection string");
|
||||
|
||||
const restoredConfig = JSON.parse(fs.readFileSync(currentPaths.configPath, "utf8"));
|
||||
const restoredEnv = fs.readFileSync(currentPaths.envPath, "utf8");
|
||||
const restoredMarker = fs.readFileSync(path.join(currentPaths.instanceRoot, "marker.txt"), "utf8");
|
||||
|
||||
expect(restoredConfig.server.port).toBe(3114);
|
||||
expect(restoredConfig.database.embeddedPostgresPort).toBe(54341);
|
||||
expect(restoredEnv).toContain(`PAPERCLIP_INSTANCE_ID=${currentInstanceId}`);
|
||||
expect(restoredMarker).toBe("keep me");
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
if (originalPaperclipConfig === undefined) {
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
} else {
|
||||
process.env.PAPERCLIP_CONFIG = originalPaperclipConfig;
|
||||
}
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("rebinds same-repo workspace paths onto the current worktree root", () => {
|
||||
expect(
|
||||
rebindWorkspaceCwd({
|
||||
@@ -469,4 +845,246 @@ describe("worktree helpers", () => {
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it("no-ops on the primary checkout unless --branch is provided", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-repair-primary-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
execFileSync("git", ["init"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.email", "test@example.com"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.name", "Test User"], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.writeFileSync(path.join(repoRoot, "README.md"), "# temp\n", "utf8");
|
||||
execFileSync("git", ["add", "README.md"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["commit", "-m", "Initial commit"], { cwd: repoRoot, stdio: "ignore" });
|
||||
|
||||
process.chdir(repoRoot);
|
||||
await worktreeRepairCommand({});
|
||||
|
||||
expect(fs.existsSync(path.join(repoRoot, ".paperclip", "config.json"))).toBe(false);
|
||||
expect(fs.existsSync(path.join(repoRoot, ".paperclip", "worktrees"))).toBe(false);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("repairs the current linked worktree when Paperclip metadata is missing", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-repair-current-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const worktreePath = path.join(repoRoot, ".paperclip", "worktrees", "repair-me");
|
||||
const sourceConfigPath = path.join(tempRoot, "source-config.json");
|
||||
const worktreeHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const worktreePaths = resolveWorktreeLocalPaths({
|
||||
cwd: worktreePath,
|
||||
homeDir: worktreeHome,
|
||||
instanceId: sanitizeWorktreeInstanceId(path.basename(worktreePath)),
|
||||
});
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
execFileSync("git", ["init"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.email", "test@example.com"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.name", "Test User"], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.writeFileSync(path.join(repoRoot, "README.md"), "# temp\n", "utf8");
|
||||
execFileSync("git", ["add", "README.md"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["commit", "-m", "Initial commit"], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.mkdirSync(path.dirname(worktreePath), { recursive: true });
|
||||
execFileSync("git", ["worktree", "add", "-b", "repair-me", worktreePath, "HEAD"], {
|
||||
cwd: repoRoot,
|
||||
stdio: "ignore",
|
||||
});
|
||||
|
||||
fs.writeFileSync(sourceConfigPath, JSON.stringify(buildSourceConfig(), null, 2), "utf8");
|
||||
fs.mkdirSync(worktreePaths.instanceRoot, { recursive: true });
|
||||
fs.writeFileSync(path.join(worktreePaths.instanceRoot, "marker.txt"), "stale", "utf8");
|
||||
|
||||
process.chdir(worktreePath);
|
||||
await worktreeRepairCommand({
|
||||
fromConfig: sourceConfigPath,
|
||||
home: worktreeHome,
|
||||
noSeed: true,
|
||||
});
|
||||
|
||||
expect(fs.existsSync(path.join(worktreePath, ".paperclip", "config.json"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(worktreePath, ".paperclip", ".env"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(worktreePaths.instanceRoot, "marker.txt"))).toBe(false);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it("creates and repairs a missing branch worktree when --branch is provided", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-repair-branch-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const sourceConfigPath = path.join(tempRoot, "source-config.json");
|
||||
const worktreeHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const originalCwd = process.cwd();
|
||||
const expectedWorktreePath = path.join(repoRoot, ".paperclip", "worktrees", "feature-repair-me");
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
execFileSync("git", ["init"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.email", "test@example.com"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["config", "user.name", "Test User"], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.writeFileSync(path.join(repoRoot, "README.md"), "# temp\n", "utf8");
|
||||
execFileSync("git", ["add", "README.md"], { cwd: repoRoot, stdio: "ignore" });
|
||||
execFileSync("git", ["commit", "-m", "Initial commit"], { cwd: repoRoot, stdio: "ignore" });
|
||||
fs.writeFileSync(sourceConfigPath, JSON.stringify(buildSourceConfig(), null, 2), "utf8");
|
||||
|
||||
process.chdir(repoRoot);
|
||||
await worktreeRepairCommand({
|
||||
branch: "feature/repair-me",
|
||||
fromConfig: sourceConfigPath,
|
||||
home: worktreeHome,
|
||||
noSeed: true,
|
||||
});
|
||||
|
||||
expect(fs.existsSync(path.join(expectedWorktreePath, ".git"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(expectedWorktreePath, ".paperclip", "config.json"))).toBe(true);
|
||||
expect(fs.existsSync(path.join(expectedWorktreePath, ".paperclip", ".env"))).toBe(true);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}, 20_000);
|
||||
});
|
||||
|
||||
describeEmbeddedPostgres("pauseSeededScheduledRoutines", () => {
|
||||
it("pauses only routines with enabled schedule triggers", async () => {
|
||||
const tempDb = await startEmbeddedPostgresTestDatabase("paperclip-worktree-routines-");
|
||||
const db = createDb(tempDb.connectionString);
|
||||
const companyId = randomUUID();
|
||||
const projectId = randomUUID();
|
||||
const agentId = randomUUID();
|
||||
const activeScheduledRoutineId = randomUUID();
|
||||
const activeApiRoutineId = randomUUID();
|
||||
const pausedScheduledRoutineId = randomUUID();
|
||||
const archivedScheduledRoutineId = randomUUID();
|
||||
const disabledScheduleRoutineId = randomUUID();
|
||||
|
||||
try {
|
||||
await db.insert(companies).values({
|
||||
id: companyId,
|
||||
name: "Paperclip",
|
||||
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
});
|
||||
await db.insert(agents).values({
|
||||
id: agentId,
|
||||
companyId,
|
||||
name: "Coder",
|
||||
adapterType: "process",
|
||||
adapterConfig: {},
|
||||
runtimeConfig: {},
|
||||
permissions: {},
|
||||
});
|
||||
await db.insert(projects).values({
|
||||
id: projectId,
|
||||
companyId,
|
||||
name: "Project",
|
||||
status: "in_progress",
|
||||
});
|
||||
await db.insert(routines).values([
|
||||
{
|
||||
id: activeScheduledRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Active scheduled",
|
||||
status: "active",
|
||||
},
|
||||
{
|
||||
id: activeApiRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Active API",
|
||||
status: "active",
|
||||
},
|
||||
{
|
||||
id: pausedScheduledRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Paused scheduled",
|
||||
status: "paused",
|
||||
},
|
||||
{
|
||||
id: archivedScheduledRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Archived scheduled",
|
||||
status: "archived",
|
||||
},
|
||||
{
|
||||
id: disabledScheduleRoutineId,
|
||||
companyId,
|
||||
projectId,
|
||||
assigneeAgentId: agentId,
|
||||
title: "Disabled schedule",
|
||||
status: "active",
|
||||
},
|
||||
]);
|
||||
await db.insert(routineTriggers).values([
|
||||
{
|
||||
companyId,
|
||||
routineId: activeScheduledRoutineId,
|
||||
kind: "schedule",
|
||||
enabled: true,
|
||||
cronExpression: "0 9 * * *",
|
||||
timezone: "UTC",
|
||||
},
|
||||
{
|
||||
companyId,
|
||||
routineId: activeApiRoutineId,
|
||||
kind: "api",
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
companyId,
|
||||
routineId: pausedScheduledRoutineId,
|
||||
kind: "schedule",
|
||||
enabled: true,
|
||||
cronExpression: "0 10 * * *",
|
||||
timezone: "UTC",
|
||||
},
|
||||
{
|
||||
companyId,
|
||||
routineId: archivedScheduledRoutineId,
|
||||
kind: "schedule",
|
||||
enabled: true,
|
||||
cronExpression: "0 11 * * *",
|
||||
timezone: "UTC",
|
||||
},
|
||||
{
|
||||
companyId,
|
||||
routineId: disabledScheduleRoutineId,
|
||||
kind: "schedule",
|
||||
enabled: false,
|
||||
cronExpression: "0 12 * * *",
|
||||
timezone: "UTC",
|
||||
},
|
||||
]);
|
||||
|
||||
const pausedCount = await pauseSeededScheduledRoutines(tempDb.connectionString);
|
||||
expect(pausedCount).toBe(1);
|
||||
|
||||
const rows = await db.select({ id: routines.id, status: routines.status }).from(routines);
|
||||
const statusById = new Map(rows.map((row) => [row.id, row.status]));
|
||||
expect(statusById.get(activeScheduledRoutineId)).toBe("paused");
|
||||
expect(statusById.get(activeApiRoutineId)).toBe("active");
|
||||
expect(statusById.get(pausedScheduledRoutineId)).toBe("paused");
|
||||
expect(statusById.get(archivedScheduledRoutineId)).toBe("archived");
|
||||
expect(statusById.get(disabledScheduleRoutineId)).toBe("active");
|
||||
} finally {
|
||||
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
await tempDb.cleanup();
|
||||
}
|
||||
}, 20_000);
|
||||
});
|
||||
|
||||
@@ -1,24 +1,21 @@
|
||||
import { inferBindModeFromHost } from "@paperclipai/shared";
|
||||
import type { PaperclipConfig } from "../config/schema.js";
|
||||
import type { CheckResult } from "./index.js";
|
||||
|
||||
function isLoopbackHost(host: string) {
|
||||
const normalized = host.trim().toLowerCase();
|
||||
return normalized === "127.0.0.1" || normalized === "localhost" || normalized === "::1";
|
||||
}
|
||||
|
||||
export function deploymentAuthCheck(config: PaperclipConfig): CheckResult {
|
||||
const mode = config.server.deploymentMode;
|
||||
const exposure = config.server.exposure;
|
||||
const auth = config.auth;
|
||||
const bind = config.server.bind ?? inferBindModeFromHost(config.server.host);
|
||||
|
||||
if (mode === "local_trusted") {
|
||||
if (!isLoopbackHost(config.server.host)) {
|
||||
if (bind !== "loopback") {
|
||||
return {
|
||||
name: "Deployment/auth mode",
|
||||
status: "fail",
|
||||
message: `local_trusted requires loopback host binding (found ${config.server.host})`,
|
||||
message: `local_trusted requires loopback binding (found ${bind})`,
|
||||
canRepair: false,
|
||||
repairHint: "Run `paperclipai configure --section server` and set host to 127.0.0.1",
|
||||
repairHint: "Run `paperclipai configure --section server` and choose Local trusted / loopback reachability",
|
||||
};
|
||||
}
|
||||
return {
|
||||
@@ -86,6 +83,6 @@ export function deploymentAuthCheck(config: PaperclipConfig): CheckResult {
|
||||
return {
|
||||
name: "Deployment/auth mode",
|
||||
status: "pass",
|
||||
message: `Mode ${mode}/${exposure} with auth URL mode ${auth.baseUrlMode}`,
|
||||
message: `Mode ${mode}/${exposure} with bind ${bind} and auth URL mode ${auth.baseUrlMode}`,
|
||||
};
|
||||
}
|
||||
|
||||
282
cli/src/client/board-auth.ts
Normal file
282
cli/src/client/board-auth.ts
Normal file
@@ -0,0 +1,282 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import pc from "picocolors";
|
||||
import { buildCliCommandLabel } from "./command-label.js";
|
||||
import { resolveDefaultCliAuthPath } from "../config/home.js";
|
||||
|
||||
type RequestedAccess = "board" | "instance_admin_required";
|
||||
|
||||
interface BoardAuthCredential {
|
||||
apiBase: string;
|
||||
token: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
userId?: string | null;
|
||||
}
|
||||
|
||||
interface BoardAuthStore {
|
||||
version: 1;
|
||||
credentials: Record<string, BoardAuthCredential>;
|
||||
}
|
||||
|
||||
interface CreateChallengeResponse {
|
||||
id: string;
|
||||
token: string;
|
||||
boardApiToken: string;
|
||||
approvalPath: string;
|
||||
approvalUrl: string | null;
|
||||
pollPath: string;
|
||||
expiresAt: string;
|
||||
suggestedPollIntervalMs: number;
|
||||
}
|
||||
|
||||
interface ChallengeStatusResponse {
|
||||
id: string;
|
||||
status: "pending" | "approved" | "cancelled" | "expired";
|
||||
command: string;
|
||||
clientName: string | null;
|
||||
requestedAccess: RequestedAccess;
|
||||
requestedCompanyId: string | null;
|
||||
requestedCompanyName: string | null;
|
||||
approvedAt: string | null;
|
||||
cancelledAt: string | null;
|
||||
expiresAt: string;
|
||||
approvedByUser: { id: string; name: string; email: string } | null;
|
||||
}
|
||||
|
||||
function defaultBoardAuthStore(): BoardAuthStore {
|
||||
return {
|
||||
version: 1,
|
||||
credentials: {},
|
||||
};
|
||||
}
|
||||
|
||||
function toStringOrNull(value: unknown): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function normalizeApiBase(apiBase: string): string {
|
||||
return apiBase.trim().replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
export function resolveBoardAuthStorePath(overridePath?: string): string {
|
||||
if (overridePath?.trim()) return path.resolve(overridePath.trim());
|
||||
if (process.env.PAPERCLIP_AUTH_STORE?.trim()) return path.resolve(process.env.PAPERCLIP_AUTH_STORE.trim());
|
||||
return resolveDefaultCliAuthPath();
|
||||
}
|
||||
|
||||
export function readBoardAuthStore(storePath?: string): BoardAuthStore {
|
||||
const filePath = resolveBoardAuthStorePath(storePath);
|
||||
if (!fs.existsSync(filePath)) return defaultBoardAuthStore();
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(filePath, "utf8")) as Partial<BoardAuthStore> | null;
|
||||
const credentials = raw?.credentials && typeof raw.credentials === "object" ? raw.credentials : {};
|
||||
const normalized: Record<string, BoardAuthCredential> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(credentials)) {
|
||||
if (typeof value !== "object" || value === null) continue;
|
||||
const record = value as unknown as Record<string, unknown>;
|
||||
const apiBase = toStringOrNull(record.apiBase);
|
||||
const token = toStringOrNull(record.token);
|
||||
const createdAt = toStringOrNull(record.createdAt);
|
||||
const updatedAt = toStringOrNull(record.updatedAt);
|
||||
if (!apiBase || !token || !createdAt || !updatedAt) continue;
|
||||
normalized[normalizeApiBase(key)] = {
|
||||
apiBase,
|
||||
token,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
userId: toStringOrNull(record.userId),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
version: 1,
|
||||
credentials: normalized,
|
||||
};
|
||||
}
|
||||
|
||||
export function writeBoardAuthStore(store: BoardAuthStore, storePath?: string): void {
|
||||
const filePath = resolveBoardAuthStorePath(storePath);
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(store, null, 2)}\n`, { mode: 0o600 });
|
||||
}
|
||||
|
||||
export function getStoredBoardCredential(apiBase: string, storePath?: string): BoardAuthCredential | null {
|
||||
const store = readBoardAuthStore(storePath);
|
||||
return store.credentials[normalizeApiBase(apiBase)] ?? null;
|
||||
}
|
||||
|
||||
export function setStoredBoardCredential(input: {
|
||||
apiBase: string;
|
||||
token: string;
|
||||
userId?: string | null;
|
||||
storePath?: string;
|
||||
}): BoardAuthCredential {
|
||||
const normalizedApiBase = normalizeApiBase(input.apiBase);
|
||||
const store = readBoardAuthStore(input.storePath);
|
||||
const now = new Date().toISOString();
|
||||
const existing = store.credentials[normalizedApiBase];
|
||||
const credential: BoardAuthCredential = {
|
||||
apiBase: normalizedApiBase,
|
||||
token: input.token.trim(),
|
||||
createdAt: existing?.createdAt ?? now,
|
||||
updatedAt: now,
|
||||
userId: input.userId ?? existing?.userId ?? null,
|
||||
};
|
||||
store.credentials[normalizedApiBase] = credential;
|
||||
writeBoardAuthStore(store, input.storePath);
|
||||
return credential;
|
||||
}
|
||||
|
||||
export function removeStoredBoardCredential(apiBase: string, storePath?: string): boolean {
|
||||
const normalizedApiBase = normalizeApiBase(apiBase);
|
||||
const store = readBoardAuthStore(storePath);
|
||||
if (!store.credentials[normalizedApiBase]) return false;
|
||||
delete store.credentials[normalizedApiBase];
|
||||
writeBoardAuthStore(store, storePath);
|
||||
return true;
|
||||
}
|
||||
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function requestJson<T>(url: string, init?: RequestInit): Promise<T> {
|
||||
const headers = new Headers(init?.headers ?? undefined);
|
||||
if (init?.body !== undefined && !headers.has("content-type")) {
|
||||
headers.set("content-type", "application/json");
|
||||
}
|
||||
if (!headers.has("accept")) {
|
||||
headers.set("accept", "application/json");
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.json().catch(() => null);
|
||||
const message =
|
||||
body && typeof body === "object" && typeof (body as { error?: unknown }).error === "string"
|
||||
? (body as { error: string }).error
|
||||
: `Request failed: ${response.status}`;
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
return response.json() as Promise<T>;
|
||||
}
|
||||
|
||||
export function openUrl(url: string): boolean {
|
||||
const platform = process.platform;
|
||||
try {
|
||||
if (platform === "darwin") {
|
||||
const child = spawn("open", [url], { detached: true, stdio: "ignore" });
|
||||
child.unref();
|
||||
return true;
|
||||
}
|
||||
if (platform === "win32") {
|
||||
const child = spawn("cmd", ["/c", "start", "", url], { detached: true, stdio: "ignore" });
|
||||
child.unref();
|
||||
return true;
|
||||
}
|
||||
const child = spawn("xdg-open", [url], { detached: true, stdio: "ignore" });
|
||||
child.unref();
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function loginBoardCli(params: {
|
||||
apiBase: string;
|
||||
requestedAccess: RequestedAccess;
|
||||
requestedCompanyId?: string | null;
|
||||
clientName?: string | null;
|
||||
command?: string;
|
||||
storePath?: string;
|
||||
print?: boolean;
|
||||
}): Promise<{ token: string; approvalUrl: string; userId?: string | null }> {
|
||||
const apiBase = normalizeApiBase(params.apiBase);
|
||||
const createUrl = `${apiBase}/api/cli-auth/challenges`;
|
||||
const command = params.command?.trim() || buildCliCommandLabel();
|
||||
|
||||
const challenge = await requestJson<CreateChallengeResponse>(createUrl, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
command,
|
||||
clientName: params.clientName?.trim() || "paperclipai cli",
|
||||
requestedAccess: params.requestedAccess,
|
||||
requestedCompanyId: params.requestedCompanyId?.trim() || null,
|
||||
}),
|
||||
});
|
||||
|
||||
const approvalUrl = challenge.approvalUrl ?? `${apiBase}${challenge.approvalPath}`;
|
||||
if (params.print !== false) {
|
||||
console.error(pc.bold("Board authentication required"));
|
||||
console.error(`Open this URL in your browser to approve CLI access:\n${approvalUrl}`);
|
||||
}
|
||||
|
||||
const opened = openUrl(approvalUrl);
|
||||
if (params.print !== false && opened) {
|
||||
console.error(pc.dim("Opened the approval page in your browser."));
|
||||
}
|
||||
|
||||
const expiresAtMs = Date.parse(challenge.expiresAt);
|
||||
const pollMs = Math.max(500, challenge.suggestedPollIntervalMs || 1000);
|
||||
|
||||
while (Number.isFinite(expiresAtMs) ? Date.now() < expiresAtMs : true) {
|
||||
const status = await requestJson<ChallengeStatusResponse>(
|
||||
`${apiBase}/api${challenge.pollPath}?token=${encodeURIComponent(challenge.token)}`,
|
||||
);
|
||||
|
||||
if (status.status === "approved") {
|
||||
const me = await requestJson<{ userId: string; user?: { id: string } | null }>(
|
||||
`${apiBase}/api/cli-auth/me`,
|
||||
{
|
||||
headers: {
|
||||
authorization: `Bearer ${challenge.boardApiToken}`,
|
||||
},
|
||||
},
|
||||
);
|
||||
setStoredBoardCredential({
|
||||
apiBase,
|
||||
token: challenge.boardApiToken,
|
||||
userId: me.userId ?? me.user?.id ?? null,
|
||||
storePath: params.storePath,
|
||||
});
|
||||
return {
|
||||
token: challenge.boardApiToken,
|
||||
approvalUrl,
|
||||
userId: me.userId ?? me.user?.id ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
if (status.status === "cancelled") {
|
||||
throw new Error("CLI auth challenge was cancelled.");
|
||||
}
|
||||
if (status.status === "expired") {
|
||||
throw new Error("CLI auth challenge expired before approval.");
|
||||
}
|
||||
|
||||
await sleep(pollMs);
|
||||
}
|
||||
|
||||
throw new Error("CLI auth challenge expired before approval.");
|
||||
}
|
||||
|
||||
export async function revokeStoredBoardCredential(params: {
|
||||
apiBase: string;
|
||||
token: string;
|
||||
}): Promise<void> {
|
||||
const apiBase = normalizeApiBase(params.apiBase);
|
||||
await requestJson<{ revoked: boolean }>(`${apiBase}/api/cli-auth/revoke-current`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
authorization: `Bearer ${params.token}`,
|
||||
},
|
||||
body: JSON.stringify({}),
|
||||
});
|
||||
}
|
||||
4
cli/src/client/command-label.ts
Normal file
4
cli/src/client/command-label.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export function buildCliCommandLabel(): string {
|
||||
const args = process.argv.slice(2);
|
||||
return args.length > 0 ? `paperclipai ${args.join(" ")}` : "paperclipai";
|
||||
}
|
||||
@@ -13,25 +13,54 @@ export class ApiRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class ApiConnectionError extends Error {
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
|
||||
constructor(input: {
|
||||
apiBase: string;
|
||||
path: string;
|
||||
method: string;
|
||||
cause?: unknown;
|
||||
}) {
|
||||
const url = buildUrl(input.apiBase, input.path);
|
||||
const causeMessage = formatConnectionCause(input.cause);
|
||||
super(buildConnectionErrorMessage({ apiBase: input.apiBase, url, method: input.method, causeMessage }));
|
||||
this.url = url;
|
||||
this.method = input.method;
|
||||
this.causeMessage = causeMessage;
|
||||
}
|
||||
}
|
||||
|
||||
interface RequestOptions {
|
||||
ignoreNotFound?: boolean;
|
||||
}
|
||||
|
||||
interface RecoverAuthInput {
|
||||
path: string;
|
||||
method: string;
|
||||
error: ApiRequestError;
|
||||
}
|
||||
|
||||
interface ApiClientOptions {
|
||||
apiBase: string;
|
||||
apiKey?: string;
|
||||
runId?: string;
|
||||
recoverAuth?: (input: RecoverAuthInput) => Promise<string | null>;
|
||||
}
|
||||
|
||||
export class PaperclipApiClient {
|
||||
readonly apiBase: string;
|
||||
readonly apiKey?: string;
|
||||
apiKey?: string;
|
||||
readonly runId?: string;
|
||||
readonly recoverAuth?: (input: RecoverAuthInput) => Promise<string | null>;
|
||||
|
||||
constructor(opts: ApiClientOptions) {
|
||||
this.apiBase = opts.apiBase.replace(/\/+$/, "");
|
||||
this.apiKey = opts.apiKey?.trim() || undefined;
|
||||
this.runId = opts.runId?.trim() || undefined;
|
||||
this.recoverAuth = opts.recoverAuth;
|
||||
}
|
||||
|
||||
get<T>(path: string, opts?: RequestOptions): Promise<T | null> {
|
||||
@@ -56,8 +85,18 @@ export class PaperclipApiClient {
|
||||
return this.request<T>(path, { method: "DELETE" }, opts);
|
||||
}
|
||||
|
||||
private async request<T>(path: string, init: RequestInit, opts?: RequestOptions): Promise<T | null> {
|
||||
setApiKey(apiKey: string | undefined) {
|
||||
this.apiKey = apiKey?.trim() || undefined;
|
||||
}
|
||||
|
||||
private async request<T>(
|
||||
path: string,
|
||||
init: RequestInit,
|
||||
opts?: RequestOptions,
|
||||
hasRetriedAuth = false,
|
||||
): Promise<T | null> {
|
||||
const url = buildUrl(this.apiBase, path);
|
||||
const method = String(init.method ?? "GET").toUpperCase();
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
accept: "application/json",
|
||||
@@ -76,17 +115,39 @@ export class PaperclipApiClient {
|
||||
headers["x-paperclip-run-id"] = this.runId;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
let response: Response;
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new ApiConnectionError({
|
||||
apiBase: this.apiBase,
|
||||
path,
|
||||
method,
|
||||
cause: error,
|
||||
});
|
||||
}
|
||||
|
||||
if (opts?.ignoreNotFound && response.status === 404) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw await toApiError(response);
|
||||
const apiError = await toApiError(response);
|
||||
if (!hasRetriedAuth && this.recoverAuth) {
|
||||
const recoveredToken = await this.recoverAuth({
|
||||
path,
|
||||
method,
|
||||
error: apiError,
|
||||
});
|
||||
if (recoveredToken) {
|
||||
this.setApiKey(recoveredToken);
|
||||
return this.request<T>(path, init, opts, true);
|
||||
}
|
||||
}
|
||||
throw apiError;
|
||||
}
|
||||
|
||||
if (response.status === 204) {
|
||||
@@ -136,6 +197,50 @@ async function toApiError(response: Response): Promise<ApiRequestError> {
|
||||
return new ApiRequestError(response.status, `Request failed with status ${response.status}`, undefined, parsed);
|
||||
}
|
||||
|
||||
function buildConnectionErrorMessage(input: {
|
||||
apiBase: string;
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
}): string {
|
||||
const healthUrl = buildHealthCheckUrl(input.url);
|
||||
const lines = [
|
||||
"Could not reach the Paperclip API.",
|
||||
"",
|
||||
`Request: ${input.method} ${input.url}`,
|
||||
];
|
||||
if (input.causeMessage) {
|
||||
lines.push(`Cause: ${input.causeMessage}`);
|
||||
}
|
||||
lines.push(
|
||||
"",
|
||||
"This usually means the Paperclip server is not running, the configured URL is wrong, or the request is being blocked before it reaches Paperclip.",
|
||||
"",
|
||||
"Try:",
|
||||
"- Start Paperclip with `pnpm dev` or `pnpm paperclipai run`.",
|
||||
`- Verify the server is reachable with \`curl ${healthUrl}\`.`,
|
||||
`- If Paperclip is running elsewhere, pass \`--api-base ${input.apiBase.replace(/\/+$/, "")}\` or set \`PAPERCLIP_API_URL\`.`,
|
||||
);
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function buildHealthCheckUrl(requestUrl: string): string {
|
||||
const url = new URL(requestUrl);
|
||||
url.pathname = `${url.pathname.replace(/\/+$/, "").replace(/\/api(?:\/.*)?$/, "")}/api/health`;
|
||||
url.search = "";
|
||||
url.hash = "";
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
function formatConnectionCause(error: unknown): string | undefined {
|
||||
if (!error) return undefined;
|
||||
if (error instanceof Error) {
|
||||
return error.message.trim() || error.name;
|
||||
}
|
||||
const message = String(error).trim();
|
||||
return message || undefined;
|
||||
}
|
||||
|
||||
function toStringRecord(headers: HeadersInit | undefined): Record<string, string> {
|
||||
if (!headers) return {};
|
||||
if (Array.isArray(headers)) {
|
||||
|
||||
@@ -3,6 +3,7 @@ import * as p from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
import { and, eq, gt, isNull } from "drizzle-orm";
|
||||
import { createDb, instanceUserRoles, invites } from "@paperclipai/db";
|
||||
import { inferBindModeFromHost } from "@paperclipai/shared";
|
||||
import { loadPaperclipEnvFile } from "../config/env.js";
|
||||
import { readConfig, resolveConfigPath } from "../config/store.js";
|
||||
|
||||
@@ -40,9 +41,13 @@ function resolveBaseUrl(configPath?: string, explicitBaseUrl?: string) {
|
||||
if (config?.auth.baseUrlMode === "explicit" && config.auth.publicBaseUrl) {
|
||||
return config.auth.publicBaseUrl.replace(/\/+$/, "");
|
||||
}
|
||||
const host = config?.server.host ?? "localhost";
|
||||
const bind = config?.server.bind ?? inferBindModeFromHost(config?.server.host);
|
||||
const host =
|
||||
bind === "custom"
|
||||
? config?.server.customBindHost ?? config?.server.host ?? "localhost"
|
||||
: config?.server.host ?? "localhost";
|
||||
const port = config?.server.port ?? 3100;
|
||||
const publicHost = host === "0.0.0.0" ? "localhost" : host;
|
||||
const publicHost = host === "0.0.0.0" || bind === "lan" ? "localhost" : host;
|
||||
return `http://${publicHost}:${port}`;
|
||||
}
|
||||
|
||||
|
||||
113
cli/src/commands/client/auth.ts
Normal file
113
cli/src/commands/client/auth.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import type { Command } from "commander";
|
||||
import {
|
||||
getStoredBoardCredential,
|
||||
loginBoardCli,
|
||||
removeStoredBoardCredential,
|
||||
revokeStoredBoardCredential,
|
||||
} from "../../client/board-auth.js";
|
||||
import {
|
||||
addCommonClientOptions,
|
||||
handleCommandError,
|
||||
printOutput,
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
} from "./common.js";
|
||||
|
||||
interface AuthLoginOptions extends BaseClientOptions {
|
||||
instanceAdmin?: boolean;
|
||||
}
|
||||
|
||||
interface AuthLogoutOptions extends BaseClientOptions {}
|
||||
interface AuthWhoamiOptions extends BaseClientOptions {}
|
||||
|
||||
export function registerClientAuthCommands(auth: Command): void {
|
||||
addCommonClientOptions(
|
||||
auth
|
||||
.command("login")
|
||||
.description("Authenticate the CLI for board-user access")
|
||||
.option("--instance-admin", "Request instance-admin approval instead of plain board access", false)
|
||||
.action(async (opts: AuthLoginOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const login = await loginBoardCli({
|
||||
apiBase: ctx.api.apiBase,
|
||||
requestedAccess: opts.instanceAdmin ? "instance_admin_required" : "board",
|
||||
requestedCompanyId: ctx.companyId ?? null,
|
||||
command: "paperclipai auth login",
|
||||
});
|
||||
printOutput(
|
||||
{
|
||||
ok: true,
|
||||
apiBase: ctx.api.apiBase,
|
||||
userId: login.userId ?? null,
|
||||
approvalUrl: login.approvalUrl,
|
||||
},
|
||||
{ json: ctx.json },
|
||||
);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: true },
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
auth
|
||||
.command("logout")
|
||||
.description("Remove the stored board-user credential for this API base")
|
||||
.action(async (opts: AuthLogoutOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const credential = getStoredBoardCredential(ctx.api.apiBase);
|
||||
if (!credential) {
|
||||
printOutput({ ok: true, apiBase: ctx.api.apiBase, revoked: false, removedLocalCredential: false }, { json: ctx.json });
|
||||
return;
|
||||
}
|
||||
let revoked = false;
|
||||
try {
|
||||
await revokeStoredBoardCredential({
|
||||
apiBase: ctx.api.apiBase,
|
||||
token: credential.token,
|
||||
});
|
||||
revoked = true;
|
||||
} catch {
|
||||
// Remove the local credential even if the server-side revoke fails.
|
||||
}
|
||||
const removedLocalCredential = removeStoredBoardCredential(ctx.api.apiBase);
|
||||
printOutput(
|
||||
{
|
||||
ok: true,
|
||||
apiBase: ctx.api.apiBase,
|
||||
revoked,
|
||||
removedLocalCredential,
|
||||
},
|
||||
{ json: ctx.json },
|
||||
);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
auth
|
||||
.command("whoami")
|
||||
.description("Show the current board-user identity for this API base")
|
||||
.action(async (opts: AuthWhoamiOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const me = await ctx.api.get<{
|
||||
user: { id: string; name: string; email: string } | null;
|
||||
userId: string;
|
||||
isInstanceAdmin: boolean;
|
||||
companyIds: string[];
|
||||
source: string;
|
||||
keyId: string | null;
|
||||
}>("/api/cli-auth/me");
|
||||
printOutput(me, { json: ctx.json });
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import pc from "picocolors";
|
||||
import type { Command } from "commander";
|
||||
import { getStoredBoardCredential, loginBoardCli } from "../../client/board-auth.js";
|
||||
import { buildCliCommandLabel } from "../../client/command-label.js";
|
||||
import { readConfig } from "../../config/store.js";
|
||||
import { readContext, resolveProfile, type ClientContextProfile } from "../../client/context.js";
|
||||
import { ApiRequestError, PaperclipApiClient } from "../../client/http.js";
|
||||
@@ -53,10 +55,12 @@ export function resolveCommandContext(
|
||||
profile.apiBase ||
|
||||
inferApiBaseFromConfig(options.config);
|
||||
|
||||
const apiKey =
|
||||
const explicitApiKey =
|
||||
options.apiKey?.trim() ||
|
||||
process.env.PAPERCLIP_API_KEY?.trim() ||
|
||||
readKeyFromProfileEnv(profile);
|
||||
const storedBoardCredential = explicitApiKey ? null : getStoredBoardCredential(apiBase);
|
||||
const apiKey = explicitApiKey || storedBoardCredential?.token;
|
||||
|
||||
const companyId =
|
||||
options.companyId?.trim() ||
|
||||
@@ -69,7 +73,27 @@ export function resolveCommandContext(
|
||||
);
|
||||
}
|
||||
|
||||
const api = new PaperclipApiClient({ apiBase, apiKey });
|
||||
const api = new PaperclipApiClient({
|
||||
apiBase,
|
||||
apiKey,
|
||||
recoverAuth: explicitApiKey || !canAttemptInteractiveBoardAuth()
|
||||
? undefined
|
||||
: async ({ error }) => {
|
||||
const requestedAccess = error.message.includes("Instance admin required")
|
||||
? "instance_admin_required"
|
||||
: "board";
|
||||
if (!shouldRecoverBoardAuth(error)) {
|
||||
return null;
|
||||
}
|
||||
const login = await loginBoardCli({
|
||||
apiBase,
|
||||
requestedAccess,
|
||||
requestedCompanyId: companyId ?? null,
|
||||
command: buildCliCommandLabel(),
|
||||
});
|
||||
return login.token;
|
||||
},
|
||||
});
|
||||
return {
|
||||
api,
|
||||
companyId,
|
||||
@@ -79,6 +103,16 @@ export function resolveCommandContext(
|
||||
};
|
||||
}
|
||||
|
||||
function shouldRecoverBoardAuth(error: ApiRequestError): boolean {
|
||||
if (error.status === 401) return true;
|
||||
if (error.status !== 403) return false;
|
||||
return error.message.includes("Board access required") || error.message.includes("Instance admin required");
|
||||
}
|
||||
|
||||
function canAttemptInteractiveBoardAuth(): boolean {
|
||||
return Boolean(process.stdin.isTTY && process.stdout.isTTY);
|
||||
}
|
||||
|
||||
export function printOutput(data: unknown, opts: { json?: boolean; label?: string } = {}): void {
|
||||
if (opts.json) {
|
||||
console.log(JSON.stringify(data, null, 2));
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
645
cli/src/commands/client/feedback.ts
Normal file
645
cli/src/commands/client/feedback.ts
Normal file
@@ -0,0 +1,645 @@
|
||||
import { mkdir, readdir, readFile, stat, writeFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import pc from "picocolors";
|
||||
import { Command } from "commander";
|
||||
import type { Company, FeedbackTrace, FeedbackTraceBundle } from "@paperclipai/shared";
|
||||
import {
|
||||
addCommonClientOptions,
|
||||
handleCommandError,
|
||||
printOutput,
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
type ResolvedClientContext,
|
||||
} from "./common.js";
|
||||
|
||||
interface FeedbackFilterOptions extends BaseClientOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
projectId?: string;
|
||||
issueId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
}
|
||||
|
||||
export interface FeedbackTraceQueryOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
projectId?: string;
|
||||
issueId?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
}
|
||||
|
||||
interface FeedbackReportOptions extends FeedbackFilterOptions {
|
||||
payloads?: boolean;
|
||||
}
|
||||
|
||||
interface FeedbackExportOptions extends FeedbackFilterOptions {
|
||||
out?: string;
|
||||
}
|
||||
|
||||
interface FeedbackSummary {
|
||||
total: number;
|
||||
thumbsUp: number;
|
||||
thumbsDown: number;
|
||||
withReason: number;
|
||||
statuses: Record<string, number>;
|
||||
}
|
||||
|
||||
interface FeedbackExportManifest {
|
||||
exportedAt: string;
|
||||
serverUrl: string;
|
||||
companyId: string;
|
||||
summary: FeedbackSummary & {
|
||||
uniqueIssues: number;
|
||||
issues: string[];
|
||||
};
|
||||
files: {
|
||||
votes: string[];
|
||||
traces: string[];
|
||||
fullTraces: string[];
|
||||
zip: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface FeedbackExportResult {
|
||||
outputDir: string;
|
||||
zipPath: string;
|
||||
manifest: FeedbackExportManifest;
|
||||
}
|
||||
|
||||
export function registerFeedbackCommands(program: Command): void {
|
||||
const feedback = program.command("feedback").description("Inspect and export local feedback traces");
|
||||
|
||||
addCommonClientOptions(
|
||||
feedback
|
||||
.command("report")
|
||||
.description("Render a terminal report for company feedback traces")
|
||||
.option("-C, --company-id <id>", "Company ID (overrides context default)")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--payloads", "Include raw payload dumps in the terminal report", false)
|
||||
.action(async (opts: FeedbackReportOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const companyId = await resolveFeedbackCompanyId(ctx, opts.companyId);
|
||||
const traces = await fetchCompanyFeedbackTraces(ctx, companyId, opts);
|
||||
const summary = summarizeFeedbackTraces(traces);
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
summary,
|
||||
traces,
|
||||
},
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(renderFeedbackReport({
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
traces,
|
||||
summary,
|
||||
includePayloads: Boolean(opts.payloads),
|
||||
}));
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
feedback
|
||||
.command("export")
|
||||
.description("Export feedback votes and raw trace bundles into a folder plus zip archive")
|
||||
.option("-C, --company-id <id>", "Company ID (overrides context default)")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--project-id <id>", "Filter by project ID")
|
||||
.option("--issue-id <id>", "Filter by issue ID")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--out <path>", "Output directory (default: ./feedback-export-<timestamp>)")
|
||||
.action(async (opts: FeedbackExportOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const companyId = await resolveFeedbackCompanyId(ctx, opts.companyId);
|
||||
const traces = await fetchCompanyFeedbackTraces(ctx, companyId, opts);
|
||||
const outputDir = path.resolve(opts.out?.trim() || defaultFeedbackExportDirName());
|
||||
const exported = await writeFeedbackExportBundle({
|
||||
apiBase: ctx.api.apiBase,
|
||||
companyId,
|
||||
traces,
|
||||
outputDir,
|
||||
traceBundleFetcher: (trace) => fetchFeedbackTraceBundle(ctx, trace.id),
|
||||
});
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{
|
||||
companyId,
|
||||
outputDir: exported.outputDir,
|
||||
zipPath: exported.zipPath,
|
||||
summary: exported.manifest.summary,
|
||||
},
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(renderFeedbackExportSummary(exported));
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
{ includeCompany: false },
|
||||
);
|
||||
}
|
||||
|
||||
export async function resolveFeedbackCompanyId(
|
||||
ctx: ResolvedClientContext,
|
||||
explicitCompanyId?: string,
|
||||
): Promise<string> {
|
||||
const direct = explicitCompanyId?.trim() || ctx.companyId?.trim();
|
||||
if (direct) return direct;
|
||||
const companies = (await ctx.api.get<Company[]>("/api/companies")) ?? [];
|
||||
const companyId = companies[0]?.id?.trim();
|
||||
if (!companyId) {
|
||||
throw new Error(
|
||||
"Company ID is required. Pass --company-id, set PAPERCLIP_COMPANY_ID, or configure a CLI context default.",
|
||||
);
|
||||
}
|
||||
return companyId;
|
||||
}
|
||||
|
||||
export function buildFeedbackTraceQuery(opts: FeedbackTraceQueryOptions, includePayload = true): string {
|
||||
const params = new URLSearchParams();
|
||||
if (opts.targetType) params.set("targetType", opts.targetType);
|
||||
if (opts.vote) params.set("vote", opts.vote);
|
||||
if (opts.status) params.set("status", opts.status);
|
||||
if (opts.projectId) params.set("projectId", opts.projectId);
|
||||
if (opts.issueId) params.set("issueId", opts.issueId);
|
||||
if (opts.from) params.set("from", opts.from);
|
||||
if (opts.to) params.set("to", opts.to);
|
||||
if (opts.sharedOnly) params.set("sharedOnly", "true");
|
||||
if (includePayload) params.set("includePayload", "true");
|
||||
const query = params.toString();
|
||||
return query ? `?${query}` : "";
|
||||
}
|
||||
|
||||
export function normalizeFeedbackTraceExportFormat(value: string | undefined): "json" | "ndjson" {
|
||||
if (!value || value === "ndjson") return "ndjson";
|
||||
if (value === "json") return "json";
|
||||
throw new Error(`Unsupported export format: ${value}`);
|
||||
}
|
||||
|
||||
export function serializeFeedbackTraces(traces: FeedbackTrace[], format: string | undefined): string {
|
||||
if (normalizeFeedbackTraceExportFormat(format) === "json") {
|
||||
return JSON.stringify(traces, null, 2);
|
||||
}
|
||||
return traces.map((trace) => JSON.stringify(trace)).join("\n");
|
||||
}
|
||||
|
||||
export async function fetchCompanyFeedbackTraces(
|
||||
ctx: ResolvedClientContext,
|
||||
companyId: string,
|
||||
opts: FeedbackFilterOptions,
|
||||
): Promise<FeedbackTrace[]> {
|
||||
return (
|
||||
(await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/companies/${companyId}/feedback-traces${buildFeedbackTraceQuery(opts, true)}`,
|
||||
)) ?? []
|
||||
);
|
||||
}
|
||||
|
||||
export async function fetchFeedbackTraceBundle(
|
||||
ctx: ResolvedClientContext,
|
||||
traceId: string,
|
||||
): Promise<FeedbackTraceBundle> {
|
||||
const bundle = await ctx.api.get<FeedbackTraceBundle>(`/api/feedback-traces/${traceId}/bundle`);
|
||||
if (!bundle) {
|
||||
throw new Error(`Feedback trace bundle ${traceId} not found`);
|
||||
}
|
||||
return bundle;
|
||||
}
|
||||
|
||||
export function summarizeFeedbackTraces(traces: FeedbackTrace[]): FeedbackSummary {
|
||||
const statuses: Record<string, number> = {};
|
||||
let thumbsUp = 0;
|
||||
let thumbsDown = 0;
|
||||
let withReason = 0;
|
||||
|
||||
for (const trace of traces) {
|
||||
if (trace.vote === "up") thumbsUp += 1;
|
||||
if (trace.vote === "down") thumbsDown += 1;
|
||||
if (readFeedbackReason(trace)) withReason += 1;
|
||||
statuses[trace.status] = (statuses[trace.status] ?? 0) + 1;
|
||||
}
|
||||
|
||||
return {
|
||||
total: traces.length,
|
||||
thumbsUp,
|
||||
thumbsDown,
|
||||
withReason,
|
||||
statuses,
|
||||
};
|
||||
}
|
||||
|
||||
export function renderFeedbackReport(input: {
|
||||
apiBase: string;
|
||||
companyId: string;
|
||||
traces: FeedbackTrace[];
|
||||
summary: FeedbackSummary;
|
||||
includePayloads: boolean;
|
||||
}): string {
|
||||
const lines: string[] = [];
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.magenta("Paperclip Feedback Report")));
|
||||
lines.push(pc.dim(new Date().toISOString()));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(`${pc.dim("Server:")} ${input.apiBase}`);
|
||||
lines.push(`${pc.dim("Company:")} ${input.companyId}`);
|
||||
lines.push("");
|
||||
|
||||
if (input.traces.length === 0) {
|
||||
lines.push(pc.yellow("[!!] No feedback traces found."));
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
lines.push(pc.bold(pc.cyan("Summary")));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(` ${pc.green(pc.bold(String(input.summary.thumbsUp)))} thumbs up`);
|
||||
lines.push(` ${pc.red(pc.bold(String(input.summary.thumbsDown)))} thumbs down`);
|
||||
lines.push(` ${pc.yellow(pc.bold(String(input.summary.withReason)))} downvotes with a reason`);
|
||||
lines.push(` ${pc.bold(String(input.summary.total))} total traces`);
|
||||
lines.push("");
|
||||
lines.push(pc.dim("Export status:"));
|
||||
for (const status of ["pending", "sent", "local_only", "failed"]) {
|
||||
lines.push(` ${padRight(status, 10)} ${input.summary.statuses[status] ?? 0}`);
|
||||
}
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.cyan("Trace Details")));
|
||||
lines.push(horizontalRule());
|
||||
|
||||
for (const trace of input.traces) {
|
||||
const voteColor = trace.vote === "up" ? pc.green : pc.red;
|
||||
const voteIcon = trace.vote === "up" ? "^" : "v";
|
||||
const issueRef = trace.issueIdentifier ?? trace.issueId;
|
||||
const label = trace.targetSummary.label?.trim() || trace.targetType;
|
||||
const excerpt = compactText(trace.targetSummary.excerpt);
|
||||
const reason = readFeedbackReason(trace);
|
||||
lines.push(
|
||||
` ${voteColor(voteIcon)} ${pc.bold(issueRef)} ${pc.dim(compactText(trace.issueTitle, 64))}`,
|
||||
);
|
||||
lines.push(
|
||||
` ${pc.dim("Trace:")} ${trace.id.slice(0, 8)} ${pc.dim("Status:")} ${trace.status} ${pc.dim("Date:")} ${formatTimestamp(trace.createdAt)}`,
|
||||
);
|
||||
lines.push(` ${pc.dim("Target:")} ${label}`);
|
||||
if (excerpt) {
|
||||
lines.push(` ${pc.dim("Excerpt:")} ${excerpt}`);
|
||||
}
|
||||
if (reason) {
|
||||
lines.push(` ${pc.yellow(pc.bold("Reason:"))} ${pc.yellow(reason)}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
|
||||
if (input.includePayloads) {
|
||||
lines.push(pc.bold(pc.cyan("Raw Payloads")));
|
||||
lines.push(horizontalRule());
|
||||
for (const trace of input.traces) {
|
||||
if (!trace.payloadSnapshot) continue;
|
||||
const issueRef = trace.issueIdentifier ?? trace.issueId;
|
||||
lines.push(` ${pc.bold(`${issueRef} (${trace.id.slice(0, 8)})`)}`);
|
||||
const body = JSON.stringify(trace.payloadSnapshot, null, 2)?.split("\n") ?? [];
|
||||
for (const line of body) {
|
||||
lines.push(` ${pc.dim(line)}`);
|
||||
}
|
||||
lines.push("");
|
||||
}
|
||||
}
|
||||
|
||||
lines.push(horizontalRule());
|
||||
lines.push(pc.dim(`Report complete. ${input.traces.length} trace(s) displayed.`));
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
export async function writeFeedbackExportBundle(input: {
|
||||
apiBase: string;
|
||||
companyId: string;
|
||||
traces: FeedbackTrace[];
|
||||
outputDir: string;
|
||||
traceBundleFetcher?: (trace: FeedbackTrace) => Promise<FeedbackTraceBundle>;
|
||||
}): Promise<FeedbackExportResult> {
|
||||
await ensureEmptyOutputDirectory(input.outputDir);
|
||||
await mkdir(path.join(input.outputDir, "votes"), { recursive: true });
|
||||
await mkdir(path.join(input.outputDir, "traces"), { recursive: true });
|
||||
await mkdir(path.join(input.outputDir, "full-traces"), { recursive: true });
|
||||
|
||||
const summary = summarizeFeedbackTraces(input.traces);
|
||||
const voteFiles: string[] = [];
|
||||
const traceFiles: string[] = [];
|
||||
const fullTraceDirs: string[] = [];
|
||||
const fullTraceFiles: string[] = [];
|
||||
const issueSet = new Set<string>();
|
||||
|
||||
for (const trace of input.traces) {
|
||||
const issueRef = sanitizeFileSegment(trace.issueIdentifier ?? trace.issueId);
|
||||
const voteRecord = buildFeedbackVoteRecord(trace);
|
||||
const voteFileName = `${issueRef}-${trace.feedbackVoteId.slice(0, 8)}.json`;
|
||||
const traceFileName = `${issueRef}-${trace.id.slice(0, 8)}.json`;
|
||||
voteFiles.push(voteFileName);
|
||||
traceFiles.push(traceFileName);
|
||||
issueSet.add(trace.issueIdentifier ?? trace.issueId);
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "votes", voteFileName),
|
||||
`${JSON.stringify(voteRecord, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "traces", traceFileName),
|
||||
`${JSON.stringify(trace, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
if (input.traceBundleFetcher) {
|
||||
const bundle = await input.traceBundleFetcher(trace);
|
||||
const bundleDirName = `${issueRef}-${trace.id.slice(0, 8)}`;
|
||||
const bundleDir = path.join(input.outputDir, "full-traces", bundleDirName);
|
||||
await mkdir(bundleDir, { recursive: true });
|
||||
fullTraceDirs.push(bundleDirName);
|
||||
await writeFile(
|
||||
path.join(bundleDir, "bundle.json"),
|
||||
`${JSON.stringify(bundle, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
fullTraceFiles.push(path.posix.join("full-traces", bundleDirName, "bundle.json"));
|
||||
for (const file of bundle.files) {
|
||||
const targetPath = path.join(bundleDir, file.path);
|
||||
await mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await writeFile(targetPath, file.contents, "utf8");
|
||||
fullTraceFiles.push(path.posix.join("full-traces", bundleDirName, file.path.replace(/\\/g, "/")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const zipPath = `${input.outputDir}.zip`;
|
||||
const manifest: FeedbackExportManifest = {
|
||||
exportedAt: new Date().toISOString(),
|
||||
serverUrl: input.apiBase,
|
||||
companyId: input.companyId,
|
||||
summary: {
|
||||
...summary,
|
||||
uniqueIssues: issueSet.size,
|
||||
issues: Array.from(issueSet).sort((left, right) => left.localeCompare(right)),
|
||||
},
|
||||
files: {
|
||||
votes: voteFiles.slice().sort((left, right) => left.localeCompare(right)),
|
||||
traces: traceFiles.slice().sort((left, right) => left.localeCompare(right)),
|
||||
fullTraces: fullTraceDirs.slice().sort((left, right) => left.localeCompare(right)),
|
||||
zip: path.basename(zipPath),
|
||||
},
|
||||
};
|
||||
|
||||
await writeFile(
|
||||
path.join(input.outputDir, "index.json"),
|
||||
`${JSON.stringify(manifest, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
const archiveFiles = await collectJsonFilesForArchive(input.outputDir, [
|
||||
"index.json",
|
||||
...manifest.files.votes.map((file) => path.posix.join("votes", file)),
|
||||
...manifest.files.traces.map((file) => path.posix.join("traces", file)),
|
||||
...fullTraceFiles,
|
||||
]);
|
||||
await writeFile(zipPath, createStoredZipArchive(archiveFiles, path.basename(input.outputDir)));
|
||||
|
||||
return {
|
||||
outputDir: input.outputDir,
|
||||
zipPath,
|
||||
manifest,
|
||||
};
|
||||
}
|
||||
|
||||
export function renderFeedbackExportSummary(exported: FeedbackExportResult): string {
|
||||
const lines: string[] = [];
|
||||
lines.push("");
|
||||
lines.push(pc.bold(pc.magenta("Paperclip Feedback Export")));
|
||||
lines.push(pc.dim(exported.manifest.exportedAt));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(`${pc.dim("Company:")} ${exported.manifest.companyId}`);
|
||||
lines.push(`${pc.dim("Output:")} ${exported.outputDir}`);
|
||||
lines.push(`${pc.dim("Archive:")} ${exported.zipPath}`);
|
||||
lines.push("");
|
||||
lines.push(pc.bold("Export Summary"));
|
||||
lines.push(horizontalRule());
|
||||
lines.push(` ${pc.green(pc.bold(String(exported.manifest.summary.thumbsUp)))} thumbs up`);
|
||||
lines.push(` ${pc.red(pc.bold(String(exported.manifest.summary.thumbsDown)))} thumbs down`);
|
||||
lines.push(` ${pc.yellow(pc.bold(String(exported.manifest.summary.withReason)))} with reason`);
|
||||
lines.push(` ${pc.bold(String(exported.manifest.summary.uniqueIssues))} unique issues`);
|
||||
lines.push("");
|
||||
lines.push(pc.dim("Files:"));
|
||||
lines.push(` ${path.join(exported.outputDir, "index.json")}`);
|
||||
lines.push(` ${path.join(exported.outputDir, "votes")} (${exported.manifest.files.votes.length} files)`);
|
||||
lines.push(` ${path.join(exported.outputDir, "traces")} (${exported.manifest.files.traces.length} files)`);
|
||||
lines.push(` ${path.join(exported.outputDir, "full-traces")} (${exported.manifest.files.fullTraces.length} bundles)`);
|
||||
lines.push(` ${exported.zipPath}`);
|
||||
lines.push("");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function readFeedbackReason(trace: FeedbackTrace): string | null {
|
||||
const payload = asRecord(trace.payloadSnapshot);
|
||||
const vote = asRecord(payload?.vote);
|
||||
const reason = vote?.reason;
|
||||
return typeof reason === "string" && reason.trim() ? reason.trim() : null;
|
||||
}
|
||||
|
||||
function buildFeedbackVoteRecord(trace: FeedbackTrace) {
|
||||
return {
|
||||
voteId: trace.feedbackVoteId,
|
||||
traceId: trace.id,
|
||||
issueId: trace.issueId,
|
||||
issueIdentifier: trace.issueIdentifier,
|
||||
issueTitle: trace.issueTitle,
|
||||
vote: trace.vote,
|
||||
targetType: trace.targetType,
|
||||
targetId: trace.targetId,
|
||||
targetSummary: trace.targetSummary,
|
||||
status: trace.status,
|
||||
consentVersion: trace.consentVersion,
|
||||
createdAt: trace.createdAt,
|
||||
updatedAt: trace.updatedAt,
|
||||
reason: readFeedbackReason(trace),
|
||||
};
|
||||
}
|
||||
|
||||
function asRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== "object" || Array.isArray(value)) return null;
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function compactText(value: string | null | undefined, maxLength = 88): string | null {
|
||||
if (!value) return null;
|
||||
const compact = value.replace(/\s+/g, " ").trim();
|
||||
if (!compact) return null;
|
||||
if (compact.length <= maxLength) return compact;
|
||||
return `${compact.slice(0, maxLength - 3)}...`;
|
||||
}
|
||||
|
||||
function formatTimestamp(value: unknown): string {
|
||||
if (value instanceof Date) return value.toISOString().slice(0, 19).replace("T", " ");
|
||||
if (typeof value === "string") return value.slice(0, 19).replace("T", " ");
|
||||
return "-";
|
||||
}
|
||||
|
||||
function horizontalRule(): string {
|
||||
return pc.dim("-".repeat(72));
|
||||
}
|
||||
|
||||
function padRight(value: string, width: number): string {
|
||||
return `${value}${" ".repeat(Math.max(0, width - value.length))}`;
|
||||
}
|
||||
|
||||
function defaultFeedbackExportDirName(): string {
|
||||
const iso = new Date().toISOString().replace(/[-:]/g, "").replace(/\.\d{3}Z$/, "Z");
|
||||
return `feedback-export-${iso}`;
|
||||
}
|
||||
|
||||
async function ensureEmptyOutputDirectory(outputDir: string): Promise<void> {
|
||||
try {
|
||||
const info = await stat(outputDir);
|
||||
if (!info.isDirectory()) {
|
||||
throw new Error(`Output path already exists and is not a directory: ${outputDir}`);
|
||||
}
|
||||
const entries = await readdir(outputDir);
|
||||
if (entries.length > 0) {
|
||||
throw new Error(`Output directory already exists and is not empty: ${outputDir}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "";
|
||||
if (/ENOENT/.test(message)) {
|
||||
await mkdir(outputDir, { recursive: true });
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function collectJsonFilesForArchive(
|
||||
outputDir: string,
|
||||
relativePaths: string[],
|
||||
): Promise<Record<string, string>> {
|
||||
const files: Record<string, string> = {};
|
||||
for (const relativePath of relativePaths) {
|
||||
const normalized = relativePath.replace(/\\/g, "/");
|
||||
files[normalized] = await readFile(path.join(outputDir, normalized), "utf8");
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function sanitizeFileSegment(value: string): string {
|
||||
return value.replace(/[^a-zA-Z0-9._-]+/g, "-").replace(/^-+|-+$/g, "") || "feedback";
|
||||
}
|
||||
|
||||
function writeUint16(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
}
|
||||
|
||||
function writeUint32(target: Uint8Array, offset: number, value: number) {
|
||||
target[offset] = value & 0xff;
|
||||
target[offset + 1] = (value >>> 8) & 0xff;
|
||||
target[offset + 2] = (value >>> 16) & 0xff;
|
||||
target[offset + 3] = (value >>> 24) & 0xff;
|
||||
}
|
||||
|
||||
function crc32(bytes: Uint8Array) {
|
||||
let crc = 0xffffffff;
|
||||
for (const byte of bytes) {
|
||||
crc ^= byte;
|
||||
for (let bit = 0; bit < 8; bit += 1) {
|
||||
crc = (crc & 1) === 1 ? (crc >>> 1) ^ 0xedb88320 : crc >>> 1;
|
||||
}
|
||||
}
|
||||
return (crc ^ 0xffffffff) >>> 0;
|
||||
}
|
||||
|
||||
function createStoredZipArchive(files: Record<string, string>, rootPath: string): Uint8Array {
|
||||
const encoder = new TextEncoder();
|
||||
const localChunks: Uint8Array[] = [];
|
||||
const centralChunks: Uint8Array[] = [];
|
||||
let localOffset = 0;
|
||||
let entryCount = 0;
|
||||
|
||||
for (const [relativePath, content] of Object.entries(files).sort(([left], [right]) => left.localeCompare(right))) {
|
||||
const fileName = encoder.encode(`${rootPath}/${relativePath}`);
|
||||
const body = encoder.encode(content);
|
||||
const checksum = crc32(body);
|
||||
|
||||
const localHeader = new Uint8Array(30 + fileName.length);
|
||||
writeUint32(localHeader, 0, 0x04034b50);
|
||||
writeUint16(localHeader, 4, 20);
|
||||
writeUint16(localHeader, 6, 0x0800);
|
||||
writeUint16(localHeader, 8, 0);
|
||||
writeUint32(localHeader, 14, checksum);
|
||||
writeUint32(localHeader, 18, body.length);
|
||||
writeUint32(localHeader, 22, body.length);
|
||||
writeUint16(localHeader, 26, fileName.length);
|
||||
localHeader.set(fileName, 30);
|
||||
|
||||
const centralHeader = new Uint8Array(46 + fileName.length);
|
||||
writeUint32(centralHeader, 0, 0x02014b50);
|
||||
writeUint16(centralHeader, 4, 20);
|
||||
writeUint16(centralHeader, 6, 20);
|
||||
writeUint16(centralHeader, 8, 0x0800);
|
||||
writeUint16(centralHeader, 10, 0);
|
||||
writeUint32(centralHeader, 16, checksum);
|
||||
writeUint32(centralHeader, 20, body.length);
|
||||
writeUint32(centralHeader, 24, body.length);
|
||||
writeUint16(centralHeader, 28, fileName.length);
|
||||
writeUint32(centralHeader, 42, localOffset);
|
||||
centralHeader.set(fileName, 46);
|
||||
|
||||
localChunks.push(localHeader, body);
|
||||
centralChunks.push(centralHeader);
|
||||
localOffset += localHeader.length + body.length;
|
||||
entryCount += 1;
|
||||
}
|
||||
|
||||
const centralDirectoryLength = centralChunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const archive = new Uint8Array(
|
||||
localChunks.reduce((sum, chunk) => sum + chunk.length, 0) + centralDirectoryLength + 22,
|
||||
);
|
||||
let offset = 0;
|
||||
for (const chunk of localChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
const centralDirectoryOffset = offset;
|
||||
for (const chunk of centralChunks) {
|
||||
archive.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
writeUint32(archive, offset, 0x06054b50);
|
||||
writeUint16(archive, offset + 8, entryCount);
|
||||
writeUint16(archive, offset + 10, entryCount);
|
||||
writeUint32(archive, offset + 12, centralDirectoryLength);
|
||||
writeUint32(archive, offset + 16, centralDirectoryOffset);
|
||||
return archive;
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import { Command } from "commander";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import {
|
||||
addIssueCommentSchema,
|
||||
checkoutIssueSchema,
|
||||
createIssueSchema,
|
||||
type FeedbackTrace,
|
||||
updateIssueSchema,
|
||||
type Issue,
|
||||
type IssueComment,
|
||||
@@ -15,6 +17,11 @@ import {
|
||||
resolveCommandContext,
|
||||
type BaseClientOptions,
|
||||
} from "./common.js";
|
||||
import {
|
||||
buildFeedbackTraceQuery,
|
||||
normalizeFeedbackTraceExportFormat,
|
||||
serializeFeedbackTraces,
|
||||
} from "./feedback.js";
|
||||
|
||||
interface IssueBaseOptions extends BaseClientOptions {
|
||||
status?: string;
|
||||
@@ -61,6 +68,18 @@ interface IssueCheckoutOptions extends BaseClientOptions {
|
||||
expectedStatuses?: string;
|
||||
}
|
||||
|
||||
interface IssueFeedbackOptions extends BaseClientOptions {
|
||||
targetType?: string;
|
||||
vote?: string;
|
||||
status?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
sharedOnly?: boolean;
|
||||
includePayload?: boolean;
|
||||
out?: string;
|
||||
format?: string;
|
||||
}
|
||||
|
||||
export function registerIssueCommands(program: Command): void {
|
||||
const issue = program.command("issue").description("Issue operations");
|
||||
|
||||
@@ -237,6 +256,85 @@ export function registerIssueCommands(program: Command): void {
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("feedback:list")
|
||||
.description("List feedback traces for an issue")
|
||||
.argument("<issueId>", "Issue ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the response")
|
||||
.action(async (issueId: string, opts: IssueFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/issues/${issueId}/feedback-traces${buildFeedbackTraceQuery(opts)}`,
|
||||
)) ?? [];
|
||||
if (ctx.json) {
|
||||
printOutput(traces, { json: true });
|
||||
return;
|
||||
}
|
||||
printOutput(
|
||||
traces.map((trace) => ({
|
||||
id: trace.id,
|
||||
issue: trace.issueIdentifier ?? trace.issueId,
|
||||
vote: trace.vote,
|
||||
status: trace.status,
|
||||
targetType: trace.targetType,
|
||||
target: trace.targetSummary.label,
|
||||
})),
|
||||
{ json: false },
|
||||
);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("feedback:export")
|
||||
.description("Export feedback traces for an issue")
|
||||
.argument("<issueId>", "Issue ID")
|
||||
.option("--target-type <type>", "Filter by target type")
|
||||
.option("--vote <vote>", "Filter by vote value")
|
||||
.option("--status <status>", "Filter by trace status")
|
||||
.option("--from <iso8601>", "Only include traces created at or after this timestamp")
|
||||
.option("--to <iso8601>", "Only include traces created at or before this timestamp")
|
||||
.option("--shared-only", "Only include traces eligible for sharing/export")
|
||||
.option("--include-payload", "Include stored payload snapshots in the export")
|
||||
.option("--out <path>", "Write export to a file path instead of stdout")
|
||||
.option("--format <format>", "Export format: json or ndjson", "ndjson")
|
||||
.action(async (issueId: string, opts: IssueFeedbackOptions) => {
|
||||
try {
|
||||
const ctx = resolveCommandContext(opts);
|
||||
const traces = (await ctx.api.get<FeedbackTrace[]>(
|
||||
`/api/issues/${issueId}/feedback-traces${buildFeedbackTraceQuery(opts, opts.includePayload ?? true)}`,
|
||||
)) ?? [];
|
||||
const serialized = serializeFeedbackTraces(traces, opts.format);
|
||||
if (opts.out?.trim()) {
|
||||
await writeFile(opts.out, serialized, "utf8");
|
||||
if (ctx.json) {
|
||||
printOutput(
|
||||
{ out: opts.out, count: traces.length, format: normalizeFeedbackTraceExportFormat(opts.format) },
|
||||
{ json: true },
|
||||
);
|
||||
return;
|
||||
}
|
||||
console.log(`Wrote ${traces.length} feedback trace(s) to ${opts.out}`);
|
||||
return;
|
||||
}
|
||||
process.stdout.write(`${serialized}${serialized.endsWith("\n") ? "" : "\n"}`);
|
||||
} catch (err) {
|
||||
handleCommandError(err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
addCommonClientOptions(
|
||||
issue
|
||||
.command("checkout")
|
||||
|
||||
129
cli/src/commands/client/zip.ts
Normal file
129
cli/src/commands/client/zip.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { inflateRawSync } from "node:zlib";
|
||||
import path from "node:path";
|
||||
import type { CompanyPortabilityFileEntry } from "@paperclipai/shared";
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
|
||||
export const binaryContentTypeByExtension: Record<string, string> = {
|
||||
".gif": "image/gif",
|
||||
".jpeg": "image/jpeg",
|
||||
".jpg": "image/jpeg",
|
||||
".png": "image/png",
|
||||
".svg": "image/svg+xml",
|
||||
".webp": "image/webp",
|
||||
};
|
||||
|
||||
function normalizeArchivePath(pathValue: string) {
|
||||
return pathValue
|
||||
.replace(/\\/g, "/")
|
||||
.split("/")
|
||||
.filter(Boolean)
|
||||
.join("/");
|
||||
}
|
||||
|
||||
function readUint16(source: Uint8Array, offset: number) {
|
||||
return source[offset]! | (source[offset + 1]! << 8);
|
||||
}
|
||||
|
||||
function readUint32(source: Uint8Array, offset: number) {
|
||||
return (
|
||||
source[offset]! |
|
||||
(source[offset + 1]! << 8) |
|
||||
(source[offset + 2]! << 16) |
|
||||
(source[offset + 3]! << 24)
|
||||
) >>> 0;
|
||||
}
|
||||
|
||||
function sharedArchiveRoot(paths: string[]) {
|
||||
if (paths.length === 0) return null;
|
||||
const firstSegments = paths
|
||||
.map((entry) => normalizeArchivePath(entry).split("/").filter(Boolean))
|
||||
.filter((parts) => parts.length > 0);
|
||||
if (firstSegments.length === 0) return null;
|
||||
const candidate = firstSegments[0]![0]!;
|
||||
return firstSegments.every((parts) => parts.length > 1 && parts[0] === candidate)
|
||||
? candidate
|
||||
: null;
|
||||
}
|
||||
|
||||
function bytesToPortableFileEntry(pathValue: string, bytes: Uint8Array): CompanyPortabilityFileEntry {
|
||||
const contentType = binaryContentTypeByExtension[path.extname(pathValue).toLowerCase()];
|
||||
if (!contentType) return textDecoder.decode(bytes);
|
||||
return {
|
||||
encoding: "base64",
|
||||
data: Buffer.from(bytes).toString("base64"),
|
||||
contentType,
|
||||
};
|
||||
}
|
||||
|
||||
async function inflateZipEntry(compressionMethod: number, bytes: Uint8Array) {
|
||||
if (compressionMethod === 0) return bytes;
|
||||
if (compressionMethod !== 8) {
|
||||
throw new Error("Unsupported zip archive: only STORE and DEFLATE entries are supported.");
|
||||
}
|
||||
return new Uint8Array(inflateRawSync(bytes));
|
||||
}
|
||||
|
||||
export async function readZipArchive(source: ArrayBuffer | Uint8Array): Promise<{
|
||||
rootPath: string | null;
|
||||
files: Record<string, CompanyPortabilityFileEntry>;
|
||||
}> {
|
||||
const bytes = source instanceof Uint8Array ? source : new Uint8Array(source);
|
||||
const entries: Array<{ path: string; body: CompanyPortabilityFileEntry }> = [];
|
||||
let offset = 0;
|
||||
|
||||
while (offset + 4 <= bytes.length) {
|
||||
const signature = readUint32(bytes, offset);
|
||||
if (signature === 0x02014b50 || signature === 0x06054b50) break;
|
||||
if (signature !== 0x04034b50) {
|
||||
throw new Error("Invalid zip archive: unsupported local file header.");
|
||||
}
|
||||
|
||||
if (offset + 30 > bytes.length) {
|
||||
throw new Error("Invalid zip archive: truncated local file header.");
|
||||
}
|
||||
|
||||
const generalPurposeFlag = readUint16(bytes, offset + 6);
|
||||
const compressionMethod = readUint16(bytes, offset + 8);
|
||||
const compressedSize = readUint32(bytes, offset + 18);
|
||||
const fileNameLength = readUint16(bytes, offset + 26);
|
||||
const extraFieldLength = readUint16(bytes, offset + 28);
|
||||
|
||||
if ((generalPurposeFlag & 0x0008) !== 0) {
|
||||
throw new Error("Unsupported zip archive: data descriptors are not supported.");
|
||||
}
|
||||
|
||||
const nameOffset = offset + 30;
|
||||
const bodyOffset = nameOffset + fileNameLength + extraFieldLength;
|
||||
const bodyEnd = bodyOffset + compressedSize;
|
||||
if (bodyEnd > bytes.length) {
|
||||
throw new Error("Invalid zip archive: truncated file contents.");
|
||||
}
|
||||
|
||||
const rawArchivePath = textDecoder.decode(bytes.slice(nameOffset, nameOffset + fileNameLength));
|
||||
const archivePath = normalizeArchivePath(rawArchivePath);
|
||||
const isDirectoryEntry = /\/$/.test(rawArchivePath.replace(/\\/g, "/"));
|
||||
if (archivePath && !isDirectoryEntry) {
|
||||
const entryBytes = await inflateZipEntry(compressionMethod, bytes.slice(bodyOffset, bodyEnd));
|
||||
entries.push({
|
||||
path: archivePath,
|
||||
body: bytesToPortableFileEntry(archivePath, entryBytes),
|
||||
});
|
||||
}
|
||||
|
||||
offset = bodyEnd;
|
||||
}
|
||||
|
||||
const rootPath = sharedArchiveRoot(entries.map((entry) => entry.path));
|
||||
const files: Record<string, CompanyPortabilityFileEntry> = {};
|
||||
for (const entry of entries) {
|
||||
const normalizedPath =
|
||||
rootPath && entry.path.startsWith(`${rootPath}/`)
|
||||
? entry.path.slice(rootPath.length + 1)
|
||||
: entry.path;
|
||||
if (!normalizedPath) continue;
|
||||
files[normalizedPath] = entry.body;
|
||||
}
|
||||
|
||||
return { rootPath, files };
|
||||
}
|
||||
@@ -54,6 +54,7 @@ function defaultConfig(): PaperclipConfig {
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
bind: "loopback",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
@@ -63,6 +64,9 @@ function defaultConfig(): PaperclipConfig {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage: defaultStorageConfig(),
|
||||
secrets: defaultSecretsConfig(),
|
||||
};
|
||||
|
||||
@@ -73,7 +73,7 @@ export async function dbBackupCommand(opts: DbBackupOptions): Promise<void> {
|
||||
const result = await runDatabaseBackup({
|
||||
connectionString: connection.value,
|
||||
backupDir,
|
||||
retentionDays,
|
||||
retention: { dailyDays: retentionDays, weeklyWeeks: 4, monthlyMonths: 1 },
|
||||
filenamePrefix,
|
||||
});
|
||||
spinner.stop(`Backup saved: ${formatDatabaseBackupResult(result)}`);
|
||||
|
||||
@@ -3,10 +3,14 @@ import path from "node:path";
|
||||
import pc from "picocolors";
|
||||
import {
|
||||
AUTH_BASE_URL_MODES,
|
||||
BIND_MODES,
|
||||
DEPLOYMENT_EXPOSURES,
|
||||
DEPLOYMENT_MODES,
|
||||
SECRET_PROVIDERS,
|
||||
STORAGE_PROVIDERS,
|
||||
inferBindModeFromHost,
|
||||
resolveRuntimeBind,
|
||||
type BindMode,
|
||||
type AuthBaseUrlMode,
|
||||
type DeploymentExposure,
|
||||
type DeploymentMode,
|
||||
@@ -23,6 +27,7 @@ import { promptLogging } from "../prompts/logging.js";
|
||||
import { defaultSecretsConfig } from "../prompts/secrets.js";
|
||||
import { defaultStorageConfig, promptStorage } from "../prompts/storage.js";
|
||||
import { promptServer } from "../prompts/server.js";
|
||||
import { buildPresetServerConfig } from "../config/server-bind.js";
|
||||
import {
|
||||
describeLocalInstancePaths,
|
||||
expandHomePrefix,
|
||||
@@ -33,6 +38,11 @@ import {
|
||||
} from "../config/home.js";
|
||||
import { bootstrapCeoInvite } from "./auth-bootstrap-ceo.js";
|
||||
import { printPaperclipCliBanner } from "../utils/banner.js";
|
||||
import {
|
||||
getTelemetryClient,
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
} from "../telemetry.js";
|
||||
|
||||
type SetupMode = "quickstart" | "advanced";
|
||||
|
||||
@@ -41,10 +51,14 @@ type OnboardOptions = {
|
||||
run?: boolean;
|
||||
yes?: boolean;
|
||||
invokedByRun?: boolean;
|
||||
bind?: BindMode;
|
||||
};
|
||||
|
||||
type OnboardDefaults = Pick<PaperclipConfig, "database" | "logging" | "server" | "auth" | "storage" | "secrets">;
|
||||
|
||||
const TAILNET_BIND_WARNING =
|
||||
"No Tailscale address was detected during setup. The saved config will stay on loopback until Tailscale is available or PAPERCLIP_TAILNET_BIND_HOST is set.";
|
||||
|
||||
const ONBOARD_ENV_KEYS = [
|
||||
"PAPERCLIP_PUBLIC_URL",
|
||||
"DATABASE_URL",
|
||||
@@ -54,6 +68,9 @@ const ONBOARD_ENV_KEYS = [
|
||||
"PAPERCLIP_DB_BACKUP_DIR",
|
||||
"PAPERCLIP_DEPLOYMENT_MODE",
|
||||
"PAPERCLIP_DEPLOYMENT_EXPOSURE",
|
||||
"PAPERCLIP_BIND",
|
||||
"PAPERCLIP_BIND_HOST",
|
||||
"PAPERCLIP_TAILNET_BIND_HOST",
|
||||
"HOST",
|
||||
"PORT",
|
||||
"SERVE_UI",
|
||||
@@ -99,29 +116,62 @@ function resolvePathFromEnv(rawValue: string | undefined): string | null {
|
||||
return path.resolve(expandHomePrefix(rawValue.trim()));
|
||||
}
|
||||
|
||||
function quickstartDefaultsFromEnv(): {
|
||||
function describeServerBinding(server: Pick<PaperclipConfig["server"], "bind" | "customBindHost" | "host" | "port">): string {
|
||||
const bind = server.bind ?? inferBindModeFromHost(server.host);
|
||||
const detail =
|
||||
bind === "custom"
|
||||
? server.customBindHost ?? server.host
|
||||
: bind === "tailnet"
|
||||
? "detected tailscale address"
|
||||
: server.host;
|
||||
return `${bind}${detail ? ` (${detail})` : ""}:${server.port}`;
|
||||
}
|
||||
|
||||
function quickstartDefaultsFromEnv(opts?: { preferTrustedLocal?: boolean }): {
|
||||
defaults: OnboardDefaults;
|
||||
usedEnvKeys: string[];
|
||||
ignoredEnvKeys: Array<{ key: string; reason: string }>;
|
||||
} {
|
||||
const preferTrustedLocal = opts?.preferTrustedLocal ?? false;
|
||||
const instanceId = resolvePaperclipInstanceId();
|
||||
const defaultStorage = defaultStorageConfig();
|
||||
const defaultSecrets = defaultSecretsConfig();
|
||||
const databaseUrl = process.env.DATABASE_URL?.trim() || undefined;
|
||||
const publicUrl =
|
||||
process.env.PAPERCLIP_PUBLIC_URL?.trim() ||
|
||||
process.env.PAPERCLIP_AUTH_PUBLIC_BASE_URL?.trim() ||
|
||||
process.env.BETTER_AUTH_URL?.trim() ||
|
||||
process.env.BETTER_AUTH_BASE_URL?.trim() ||
|
||||
undefined;
|
||||
const deploymentMode =
|
||||
parseEnumFromEnv<DeploymentMode>(process.env.PAPERCLIP_DEPLOYMENT_MODE, DEPLOYMENT_MODES) ?? "local_trusted";
|
||||
const publicUrl = preferTrustedLocal
|
||||
? undefined
|
||||
: (
|
||||
process.env.PAPERCLIP_PUBLIC_URL?.trim() ||
|
||||
process.env.PAPERCLIP_AUTH_PUBLIC_BASE_URL?.trim() ||
|
||||
process.env.BETTER_AUTH_URL?.trim() ||
|
||||
process.env.BETTER_AUTH_BASE_URL?.trim() ||
|
||||
undefined
|
||||
);
|
||||
const deploymentMode = preferTrustedLocal
|
||||
? "local_trusted"
|
||||
: (parseEnumFromEnv<DeploymentMode>(process.env.PAPERCLIP_DEPLOYMENT_MODE, DEPLOYMENT_MODES) ?? "local_trusted");
|
||||
const deploymentExposureFromEnv = parseEnumFromEnv<DeploymentExposure>(
|
||||
process.env.PAPERCLIP_DEPLOYMENT_EXPOSURE,
|
||||
DEPLOYMENT_EXPOSURES,
|
||||
);
|
||||
const deploymentExposure =
|
||||
deploymentMode === "local_trusted" ? "private" : (deploymentExposureFromEnv ?? "private");
|
||||
const bindFromEnv = parseEnumFromEnv<BindMode>(process.env.PAPERCLIP_BIND, BIND_MODES);
|
||||
const customBindHostFromEnv = process.env.PAPERCLIP_BIND_HOST?.trim() || undefined;
|
||||
const hostFromEnv = process.env.HOST?.trim() || undefined;
|
||||
const configuredBindHost = customBindHostFromEnv ?? hostFromEnv;
|
||||
const bind = preferTrustedLocal
|
||||
? "loopback"
|
||||
: (
|
||||
deploymentMode === "local_trusted"
|
||||
? "loopback"
|
||||
: (bindFromEnv ?? (configuredBindHost ? inferBindModeFromHost(configuredBindHost) : "lan"))
|
||||
);
|
||||
const resolvedBind = resolveRuntimeBind({
|
||||
bind,
|
||||
host: hostFromEnv ?? (bind === "loopback" ? "127.0.0.1" : "0.0.0.0"),
|
||||
customBindHost: customBindHostFromEnv,
|
||||
tailnetBindHost: process.env.PAPERCLIP_TAILNET_BIND_HOST?.trim(),
|
||||
});
|
||||
const authPublicBaseUrl = publicUrl;
|
||||
const authBaseUrlModeFromEnv = parseEnumFromEnv<AuthBaseUrlMode>(
|
||||
process.env.PAPERCLIP_AUTH_BASE_URL_MODE,
|
||||
@@ -178,7 +228,9 @@ function quickstartDefaultsFromEnv(): {
|
||||
server: {
|
||||
deploymentMode,
|
||||
exposure: deploymentExposure,
|
||||
host: process.env.HOST ?? "127.0.0.1",
|
||||
bind: resolvedBind.bind,
|
||||
...(resolvedBind.customBindHost ? { customBindHost: resolvedBind.customBindHost } : {}),
|
||||
host: resolvedBind.host,
|
||||
port: Number(process.env.PORT) || 3100,
|
||||
allowedHostnames: Array.from(new Set([...allowedHostnamesFromEnv, ...(hostnameFromPublicUrl ? [hostnameFromPublicUrl] : [])])),
|
||||
serveUi: parseBooleanFromEnv(process.env.SERVE_UI) ?? true,
|
||||
@@ -215,12 +267,49 @@ function quickstartDefaultsFromEnv(): {
|
||||
},
|
||||
};
|
||||
const ignoredEnvKeys: Array<{ key: string; reason: string }> = [];
|
||||
if (preferTrustedLocal) {
|
||||
const forcedLocalReason = "Ignored because --yes quickstart forces trusted local loopback defaults";
|
||||
for (const key of [
|
||||
"PAPERCLIP_DEPLOYMENT_MODE",
|
||||
"PAPERCLIP_DEPLOYMENT_EXPOSURE",
|
||||
"PAPERCLIP_BIND",
|
||||
"PAPERCLIP_BIND_HOST",
|
||||
"HOST",
|
||||
"PAPERCLIP_AUTH_BASE_URL_MODE",
|
||||
"PAPERCLIP_AUTH_PUBLIC_BASE_URL",
|
||||
"PAPERCLIP_PUBLIC_URL",
|
||||
"BETTER_AUTH_URL",
|
||||
"BETTER_AUTH_BASE_URL",
|
||||
] as const) {
|
||||
if (process.env[key] !== undefined) {
|
||||
ignoredEnvKeys.push({ key, reason: forcedLocalReason });
|
||||
}
|
||||
}
|
||||
}
|
||||
if (deploymentMode === "local_trusted" && process.env.PAPERCLIP_DEPLOYMENT_EXPOSURE !== undefined) {
|
||||
ignoredEnvKeys.push({
|
||||
key: "PAPERCLIP_DEPLOYMENT_EXPOSURE",
|
||||
reason: "Ignored because deployment mode local_trusted always forces private exposure",
|
||||
});
|
||||
}
|
||||
if (deploymentMode === "local_trusted" && process.env.PAPERCLIP_BIND !== undefined) {
|
||||
ignoredEnvKeys.push({
|
||||
key: "PAPERCLIP_BIND",
|
||||
reason: "Ignored because deployment mode local_trusted always uses loopback reachability",
|
||||
});
|
||||
}
|
||||
if (deploymentMode === "local_trusted" && process.env.PAPERCLIP_BIND_HOST !== undefined) {
|
||||
ignoredEnvKeys.push({
|
||||
key: "PAPERCLIP_BIND_HOST",
|
||||
reason: "Ignored because deployment mode local_trusted always uses loopback reachability",
|
||||
});
|
||||
}
|
||||
if (deploymentMode === "local_trusted" && process.env.HOST !== undefined) {
|
||||
ignoredEnvKeys.push({
|
||||
key: "HOST",
|
||||
reason: "Ignored because deployment mode local_trusted always uses loopback reachability",
|
||||
});
|
||||
}
|
||||
|
||||
const ignoredKeySet = new Set(ignoredEnvKeys.map((entry) => entry.key));
|
||||
const usedEnvKeys = ONBOARD_ENV_KEYS.filter(
|
||||
@@ -234,6 +323,10 @@ function canCreateBootstrapInviteImmediately(config: Pick<PaperclipConfig, "data
|
||||
}
|
||||
|
||||
export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
if (opts.bind && !["loopback", "lan", "tailnet"].includes(opts.bind)) {
|
||||
throw new Error(`Unsupported bind preset for onboard: ${opts.bind}. Use loopback, lan, or tailnet.`);
|
||||
}
|
||||
|
||||
printPaperclipCliBanner();
|
||||
p.intro(pc.bgCyan(pc.black(" paperclipai onboard ")));
|
||||
const configPath = resolveConfigPath(opts.config);
|
||||
@@ -244,11 +337,12 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
),
|
||||
);
|
||||
|
||||
let existingConfig: PaperclipConfig | null = null;
|
||||
if (configExists(opts.config)) {
|
||||
p.log.message(pc.dim(`${configPath} exists, updating config`));
|
||||
p.log.message(pc.dim(`${configPath} exists`));
|
||||
|
||||
try {
|
||||
readConfig(opts.config);
|
||||
existingConfig = readConfig(opts.config);
|
||||
} catch (err) {
|
||||
p.log.message(
|
||||
pc.yellow(
|
||||
@@ -258,9 +352,85 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
if (existingConfig) {
|
||||
p.log.message(
|
||||
pc.dim("Existing Paperclip install detected; keeping the current configuration unchanged."),
|
||||
);
|
||||
p.log.message(pc.dim(`Use ${pc.cyan("paperclipai configure")} if you want to change settings.`));
|
||||
|
||||
const jwtSecret = ensureAgentJwtSecret(configPath);
|
||||
const envFilePath = resolveAgentJwtEnvFile(configPath);
|
||||
if (jwtSecret.created) {
|
||||
p.log.success(`Created ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} in ${pc.dim(envFilePath)}`);
|
||||
} else if (process.env.PAPERCLIP_AGENT_JWT_SECRET?.trim()) {
|
||||
p.log.info(`Using existing ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} from environment`);
|
||||
} else {
|
||||
p.log.info(`Using existing ${pc.cyan("PAPERCLIP_AGENT_JWT_SECRET")} in ${pc.dim(envFilePath)}`);
|
||||
}
|
||||
|
||||
const keyResult = ensureLocalSecretsKeyFile(existingConfig, configPath);
|
||||
if (keyResult.status === "created") {
|
||||
p.log.success(`Created local secrets key file at ${pc.dim(keyResult.path)}`);
|
||||
} else if (keyResult.status === "existing") {
|
||||
p.log.message(pc.dim(`Using existing local secrets key file at ${keyResult.path}`));
|
||||
}
|
||||
|
||||
p.note(
|
||||
[
|
||||
"Existing config preserved",
|
||||
`Database: ${existingConfig.database.mode}`,
|
||||
existingConfig.llm ? `LLM: ${existingConfig.llm.provider}` : "LLM: not configured",
|
||||
`Logging: ${existingConfig.logging.mode} -> ${existingConfig.logging.logDir}`,
|
||||
`Server: ${existingConfig.server.deploymentMode}/${existingConfig.server.exposure} @ ${describeServerBinding(existingConfig.server)}`,
|
||||
`Allowed hosts: ${existingConfig.server.allowedHostnames.length > 0 ? existingConfig.server.allowedHostnames.join(", ") : "(loopback only)"}`,
|
||||
`Auth URL mode: ${existingConfig.auth.baseUrlMode}${existingConfig.auth.publicBaseUrl ? ` (${existingConfig.auth.publicBaseUrl})` : ""}`,
|
||||
`Storage: ${existingConfig.storage.provider}`,
|
||||
`Secrets: ${existingConfig.secrets.provider} (strict mode ${existingConfig.secrets.strictMode ? "on" : "off"})`,
|
||||
"Agent auth: PAPERCLIP_AGENT_JWT_SECRET configured",
|
||||
].join("\n"),
|
||||
"Configuration ready",
|
||||
);
|
||||
|
||||
p.note(
|
||||
[
|
||||
`Run: ${pc.cyan("paperclipai run")}`,
|
||||
`Reconfigure later: ${pc.cyan("paperclipai configure")}`,
|
||||
`Diagnose setup: ${pc.cyan("paperclipai doctor")}`,
|
||||
].join("\n"),
|
||||
"Next commands",
|
||||
);
|
||||
|
||||
let shouldRunNow = opts.run === true || opts.yes === true;
|
||||
if (!shouldRunNow && !opts.invokedByRun && process.stdin.isTTY && process.stdout.isTTY) {
|
||||
const answer = await p.confirm({
|
||||
message: "Start Paperclip now?",
|
||||
initialValue: true,
|
||||
});
|
||||
if (!p.isCancel(answer)) {
|
||||
shouldRunNow = answer;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldRunNow && !opts.invokedByRun) {
|
||||
process.env.PAPERCLIP_OPEN_ON_LISTEN = "true";
|
||||
const { runCommand } = await import("./run.js");
|
||||
await runCommand({ config: configPath, repair: true, yes: true });
|
||||
return;
|
||||
}
|
||||
|
||||
p.outro("Existing Paperclip setup is ready.");
|
||||
return;
|
||||
}
|
||||
|
||||
let setupMode: SetupMode = "quickstart";
|
||||
if (opts.yes) {
|
||||
p.log.message(pc.dim("`--yes` enabled: using Quickstart defaults."));
|
||||
p.log.message(
|
||||
pc.dim(
|
||||
opts.bind
|
||||
? `\`--yes\` enabled: using Quickstart defaults with bind=${opts.bind}.`
|
||||
: "`--yes` enabled: using Quickstart defaults.",
|
||||
),
|
||||
);
|
||||
} else {
|
||||
const setupModeChoice = await p.select({
|
||||
message: "Choose setup path",
|
||||
@@ -285,8 +455,13 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
setupMode = setupModeChoice as SetupMode;
|
||||
}
|
||||
|
||||
const tc = getTelemetryClient();
|
||||
if (tc) trackInstallStarted(tc);
|
||||
|
||||
let llm: PaperclipConfig["llm"] | undefined;
|
||||
const { defaults: derivedDefaults, usedEnvKeys, ignoredEnvKeys } = quickstartDefaultsFromEnv();
|
||||
const { defaults: derivedDefaults, usedEnvKeys, ignoredEnvKeys } = quickstartDefaultsFromEnv({
|
||||
preferTrustedLocal: opts.yes === true && !opts.bind,
|
||||
});
|
||||
let {
|
||||
database,
|
||||
logging,
|
||||
@@ -296,6 +471,19 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
secrets,
|
||||
} = derivedDefaults;
|
||||
|
||||
if (opts.bind === "loopback" || opts.bind === "lan" || opts.bind === "tailnet") {
|
||||
const preset = buildPresetServerConfig(opts.bind, {
|
||||
port: server.port,
|
||||
allowedHostnames: server.allowedHostnames,
|
||||
serveUi: server.serveUi,
|
||||
});
|
||||
server = preset.server;
|
||||
auth = preset.auth;
|
||||
if (opts.bind === "tailnet" && server.host === "127.0.0.1") {
|
||||
p.log.warn(TAILNET_BIND_WARNING);
|
||||
}
|
||||
}
|
||||
|
||||
if (setupMode === "advanced") {
|
||||
p.log.step(pc.bold("Database"));
|
||||
database = await promptDatabase(database);
|
||||
@@ -383,7 +571,13 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
);
|
||||
} else {
|
||||
p.log.step(pc.bold("Quickstart"));
|
||||
p.log.message(pc.dim("Using quickstart defaults."));
|
||||
p.log.message(
|
||||
pc.dim(
|
||||
opts.bind
|
||||
? `Using quickstart defaults with bind=${opts.bind}.`
|
||||
: `Using quickstart defaults: ${server.deploymentMode}/${server.exposure} @ ${describeServerBinding(server)}.`,
|
||||
),
|
||||
);
|
||||
if (usedEnvKeys.length > 0) {
|
||||
p.log.message(pc.dim(`Environment-aware defaults active (${usedEnvKeys.length} env var(s) detected).`));
|
||||
} else {
|
||||
@@ -417,6 +611,9 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
logging,
|
||||
server,
|
||||
auth,
|
||||
telemetry: {
|
||||
enabled: true,
|
||||
},
|
||||
storage,
|
||||
secrets,
|
||||
};
|
||||
@@ -430,12 +627,16 @@ export async function onboard(opts: OnboardOptions): Promise<void> {
|
||||
|
||||
writeConfig(config, opts.config);
|
||||
|
||||
if (tc) trackInstallCompleted(tc, {
|
||||
adapterType: server.deploymentMode,
|
||||
});
|
||||
|
||||
p.note(
|
||||
[
|
||||
`Database: ${database.mode}`,
|
||||
llm ? `LLM: ${llm.provider}` : "LLM: not configured",
|
||||
`Logging: ${logging.mode} -> ${logging.logDir}`,
|
||||
`Server: ${server.deploymentMode}/${server.exposure} @ ${server.host}:${server.port}`,
|
||||
`Server: ${server.deploymentMode}/${server.exposure} @ ${describeServerBinding(server)}`,
|
||||
`Allowed hosts: ${server.allowedHostnames.length > 0 ? server.allowedHostnames.join(", ") : "(loopback only)"}`,
|
||||
`Auth URL mode: ${auth.baseUrlMode}${auth.publicBaseUrl ? ` (${auth.publicBaseUrl})` : ""}`,
|
||||
`Storage: ${storage.provider}`,
|
||||
|
||||
352
cli/src/commands/routines.ts
Normal file
352
cli/src/commands/routines.ts
Normal file
@@ -0,0 +1,352 @@
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import path from "node:path";
|
||||
import { Command } from "commander";
|
||||
import pc from "picocolors";
|
||||
import {
|
||||
applyPendingMigrations,
|
||||
createDb,
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
ensurePostgresDatabase,
|
||||
formatEmbeddedPostgresError,
|
||||
routines,
|
||||
} from "@paperclipai/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { loadPaperclipEnvFile } from "../config/env.js";
|
||||
import { readConfig, resolveConfigPath } from "../config/store.js";
|
||||
|
||||
type RoutinesDisableAllOptions = {
|
||||
config?: string;
|
||||
dataDir?: string;
|
||||
companyId?: string;
|
||||
json?: boolean;
|
||||
};
|
||||
|
||||
type DisableAllRoutinesResult = {
|
||||
companyId: string;
|
||||
totalRoutines: number;
|
||||
pausedCount: number;
|
||||
alreadyPausedCount: number;
|
||||
archivedCount: number;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
type EmbeddedPostgresHandle = {
|
||||
port: number;
|
||||
startedByThisProcess: boolean;
|
||||
stop: () => Promise<void>;
|
||||
};
|
||||
|
||||
type ClosableDb = ReturnType<typeof createDb> & {
|
||||
$client?: {
|
||||
end?: (options?: { timeout?: number }) => Promise<void>;
|
||||
};
|
||||
};
|
||||
|
||||
function nonEmpty(value: string | null | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
async function isPortAvailable(port: number): Promise<boolean> {
|
||||
return await new Promise<boolean>((resolve) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.once("error", () => resolve(false));
|
||||
server.listen(port, "127.0.0.1", () => {
|
||||
server.close(() => resolve(true));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function findAvailablePort(preferredPort: number): Promise<number> {
|
||||
let port = Math.max(1, Math.trunc(preferredPort));
|
||||
while (!(await isPortAvailable(port))) {
|
||||
port += 1;
|
||||
}
|
||||
return port;
|
||||
}
|
||||
|
||||
function readPidFilePort(postmasterPidFile: string): number | null {
|
||||
if (!fs.existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
const lines = fs.readFileSync(postmasterPidFile, "utf8").split("\n");
|
||||
const port = Number(lines[3]?.trim());
|
||||
return Number.isInteger(port) && port > 0 ? port : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function readRunningPostmasterPid(postmasterPidFile: string): number | null {
|
||||
if (!fs.existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
const pid = Number(fs.readFileSync(postmasterPidFile, "utf8").split("\n")[0]?.trim());
|
||||
if (!Number.isInteger(pid) || pid <= 0) return null;
|
||||
process.kill(pid, 0);
|
||||
return pid;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): Promise<EmbeddedPostgresHandle> {
|
||||
const moduleName = "embedded-postgres";
|
||||
let EmbeddedPostgres: EmbeddedPostgresCtor;
|
||||
try {
|
||||
const mod = await import(moduleName);
|
||||
EmbeddedPostgres = mod.default as EmbeddedPostgresCtor;
|
||||
} catch {
|
||||
throw new Error(
|
||||
"Embedded PostgreSQL support requires dependency `embedded-postgres`. Reinstall dependencies and try again.",
|
||||
);
|
||||
}
|
||||
|
||||
const postmasterPidFile = path.resolve(dataDir, "postmaster.pid");
|
||||
const runningPid = readRunningPostmasterPid(postmasterPidFile);
|
||||
if (runningPid) {
|
||||
return {
|
||||
port: readPidFilePort(postmasterPidFile) ?? preferredPort,
|
||||
startedByThisProcess: false,
|
||||
stop: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
const port = await findAvailablePort(preferredPort);
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!fs.existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (fs.existsSync(postmasterPidFile)) {
|
||||
fs.rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
port,
|
||||
startedByThisProcess: true,
|
||||
stop: async () => {
|
||||
await instance.stop();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function closeDb(db: ClosableDb): Promise<void> {
|
||||
await db.$client?.end?.({ timeout: 5 }).catch(() => undefined);
|
||||
}
|
||||
|
||||
async function openConfiguredDb(configPath: string): Promise<{
|
||||
db: ClosableDb;
|
||||
stop: () => Promise<void>;
|
||||
}> {
|
||||
const config = readConfig(configPath);
|
||||
if (!config) {
|
||||
throw new Error(`Config not found at ${configPath}.`);
|
||||
}
|
||||
|
||||
let embeddedHandle: EmbeddedPostgresHandle | null = null;
|
||||
try {
|
||||
if (config.database.mode === "embedded-postgres") {
|
||||
embeddedHandle = await ensureEmbeddedPostgres(
|
||||
config.database.embeddedPostgresDataDir,
|
||||
config.database.embeddedPostgresPort,
|
||||
);
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
const db = createDb(connectionString) as ClosableDb;
|
||||
return {
|
||||
db,
|
||||
stop: async () => {
|
||||
await closeDb(db);
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const connectionString = nonEmpty(config.database.connectionString);
|
||||
if (!connectionString) {
|
||||
throw new Error(`Config at ${configPath} does not define a database connection string.`);
|
||||
}
|
||||
|
||||
await applyPendingMigrations(connectionString);
|
||||
const db = createDb(connectionString) as ClosableDb;
|
||||
return {
|
||||
db,
|
||||
stop: async () => {
|
||||
await closeDb(db);
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function disableAllRoutinesInConfig(
|
||||
options: Pick<RoutinesDisableAllOptions, "config" | "companyId">,
|
||||
): Promise<DisableAllRoutinesResult> {
|
||||
const configPath = resolveConfigPath(options.config);
|
||||
loadPaperclipEnvFile(configPath);
|
||||
const companyId =
|
||||
nonEmpty(options.companyId)
|
||||
?? nonEmpty(process.env.PAPERCLIP_COMPANY_ID)
|
||||
?? null;
|
||||
if (!companyId) {
|
||||
throw new Error("Company ID is required. Pass --company-id or set PAPERCLIP_COMPANY_ID.");
|
||||
}
|
||||
|
||||
const config = readConfig(configPath);
|
||||
if (!config) {
|
||||
throw new Error(`Config not found at ${configPath}.`);
|
||||
}
|
||||
|
||||
let embeddedHandle: EmbeddedPostgresHandle | null = null;
|
||||
let db: ClosableDb | null = null;
|
||||
try {
|
||||
if (config.database.mode === "embedded-postgres") {
|
||||
embeddedHandle = await ensureEmbeddedPostgres(
|
||||
config.database.embeddedPostgresDataDir,
|
||||
config.database.embeddedPostgresPort,
|
||||
);
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${embeddedHandle.port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
db = createDb(connectionString) as ClosableDb;
|
||||
} else {
|
||||
const connectionString = nonEmpty(config.database.connectionString);
|
||||
if (!connectionString) {
|
||||
throw new Error(`Config at ${configPath} does not define a database connection string.`);
|
||||
}
|
||||
await applyPendingMigrations(connectionString);
|
||||
db = createDb(connectionString) as ClosableDb;
|
||||
}
|
||||
|
||||
const existing = await db
|
||||
.select({
|
||||
id: routines.id,
|
||||
status: routines.status,
|
||||
})
|
||||
.from(routines)
|
||||
.where(eq(routines.companyId, companyId));
|
||||
|
||||
const alreadyPausedCount = existing.filter((routine) => routine.status === "paused").length;
|
||||
const archivedCount = existing.filter((routine) => routine.status === "archived").length;
|
||||
const idsToPause = existing
|
||||
.filter((routine) => routine.status !== "paused" && routine.status !== "archived")
|
||||
.map((routine) => routine.id);
|
||||
|
||||
if (idsToPause.length > 0) {
|
||||
await db
|
||||
.update(routines)
|
||||
.set({
|
||||
status: "paused",
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(inArray(routines.id, idsToPause));
|
||||
}
|
||||
|
||||
return {
|
||||
companyId,
|
||||
totalRoutines: existing.length,
|
||||
pausedCount: idsToPause.length,
|
||||
alreadyPausedCount,
|
||||
archivedCount,
|
||||
};
|
||||
} finally {
|
||||
if (db) {
|
||||
await closeDb(db);
|
||||
}
|
||||
if (embeddedHandle?.startedByThisProcess) {
|
||||
await embeddedHandle.stop().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function disableAllRoutinesCommand(options: RoutinesDisableAllOptions): Promise<void> {
|
||||
const result = await disableAllRoutinesInConfig(options);
|
||||
|
||||
if (options.json) {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.totalRoutines === 0) {
|
||||
console.log(pc.dim(`No routines found for company ${result.companyId}.`));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Paused ${result.pausedCount} routine(s) for company ${result.companyId} ` +
|
||||
`(${result.alreadyPausedCount} already paused, ${result.archivedCount} archived).`,
|
||||
);
|
||||
}
|
||||
|
||||
export function registerRoutineCommands(program: Command): void {
|
||||
const routinesCommand = program.command("routines").description("Local routine maintenance commands");
|
||||
|
||||
routinesCommand
|
||||
.command("disable-all")
|
||||
.description("Pause all non-archived routines in the configured local instance for one company")
|
||||
.option("-c, --config <path>", "Path to config file")
|
||||
.option("-d, --data-dir <path>", "Paperclip data directory root (isolates state from ~/.paperclip)")
|
||||
.option("-C, --company-id <id>", "Company ID")
|
||||
.option("--json", "Output raw JSON")
|
||||
.action(async (opts: RoutinesDisableAllOptions) => {
|
||||
try {
|
||||
await disableAllRoutinesCommand(opts);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(pc.red(message));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
import * as p from "@clack/prompts";
|
||||
import pc from "picocolors";
|
||||
@@ -21,6 +22,7 @@ interface RunOptions {
|
||||
instance?: string;
|
||||
repair?: boolean;
|
||||
yes?: boolean;
|
||||
bind?: "loopback" | "lan" | "tailnet";
|
||||
}
|
||||
|
||||
interface StartedServer {
|
||||
@@ -57,7 +59,7 @@ export async function runCommand(opts: RunOptions): Promise<void> {
|
||||
}
|
||||
|
||||
p.log.step("No config found. Starting onboarding...");
|
||||
await onboard({ config: configPath, invokedByRun: true });
|
||||
await onboard({ config: configPath, invokedByRun: true, bind: opts.bind });
|
||||
}
|
||||
|
||||
p.log.step("Running doctor checks...");
|
||||
@@ -146,11 +148,35 @@ function maybeEnableUiDevMiddleware(entrypoint: string): void {
|
||||
}
|
||||
}
|
||||
|
||||
function ensureDevWorkspaceBuildDeps(projectRoot: string): void {
|
||||
const buildScript = path.resolve(projectRoot, "scripts/ensure-plugin-build-deps.mjs");
|
||||
if (!fs.existsSync(buildScript)) return;
|
||||
|
||||
const result = spawnSync(process.execPath, [buildScript], {
|
||||
cwd: projectRoot,
|
||||
stdio: "inherit",
|
||||
timeout: 120_000,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
throw new Error(
|
||||
`Failed to prepare workspace build artifacts before starting the Paperclip dev server.\n${formatError(result.error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
if ((result.status ?? 1) !== 0) {
|
||||
throw new Error(
|
||||
"Failed to prepare workspace build artifacts before starting the Paperclip dev server.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function importServerEntry(): Promise<StartedServer> {
|
||||
// Dev mode: try local workspace path (monorepo with tsx)
|
||||
const projectRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
const devEntry = path.resolve(projectRoot, "server/src/index.ts");
|
||||
if (fs.existsSync(devEntry)) {
|
||||
ensureDevWorkspaceBuildDeps(projectRoot);
|
||||
maybeEnableUiDevMiddleware(devEntry);
|
||||
const mod = await import(pathToFileURL(devEntry).href);
|
||||
return await startServerFromModule(mod, devEntry);
|
||||
|
||||
@@ -214,6 +214,8 @@ export function buildWorktreeConfig(input: {
|
||||
server: {
|
||||
deploymentMode: source?.server.deploymentMode ?? "local_trusted",
|
||||
exposure: source?.server.exposure ?? "private",
|
||||
...(source?.server.bind ? { bind: source.server.bind } : {}),
|
||||
...(source?.server.customBindHost ? { customBindHost: source.server.customBindHost } : {}),
|
||||
host: source?.server.host ?? "127.0.0.1",
|
||||
port: serverPort,
|
||||
allowedHostnames: source?.server.allowedHostnames ?? [],
|
||||
@@ -224,6 +226,9 @@ export function buildWorktreeConfig(input: {
|
||||
...(authPublicBaseUrl ? { publicBaseUrl: authPublicBaseUrl } : {}),
|
||||
disableSignUp: source?.auth.disableSignUp ?? false,
|
||||
},
|
||||
telemetry: {
|
||||
enabled: source?.telemetry?.enabled ?? true,
|
||||
},
|
||||
storage: {
|
||||
provider: source?.storage.provider ?? "local_disk",
|
||||
localDisk: {
|
||||
|
||||
764
cli/src/commands/worktree-merge-history-lib.ts
Normal file
764
cli/src/commands/worktree-merge-history-lib.ts
Normal file
@@ -0,0 +1,764 @@
|
||||
import {
|
||||
agents,
|
||||
assets,
|
||||
documentRevisions,
|
||||
goals,
|
||||
issueAttachments,
|
||||
issueComments,
|
||||
issueDocuments,
|
||||
issues,
|
||||
projects,
|
||||
projectWorkspaces,
|
||||
} from "@paperclipai/db";
|
||||
|
||||
type IssueRow = typeof issues.$inferSelect;
|
||||
type CommentRow = typeof issueComments.$inferSelect;
|
||||
type AgentRow = typeof agents.$inferSelect;
|
||||
type ProjectRow = typeof projects.$inferSelect;
|
||||
type ProjectWorkspaceRow = typeof projectWorkspaces.$inferSelect;
|
||||
type GoalRow = typeof goals.$inferSelect;
|
||||
type IssueDocumentLinkRow = typeof issueDocuments.$inferSelect;
|
||||
type DocumentRevisionTableRow = typeof documentRevisions.$inferSelect;
|
||||
type IssueAttachmentTableRow = typeof issueAttachments.$inferSelect;
|
||||
type AssetRow = typeof assets.$inferSelect;
|
||||
|
||||
export const WORKTREE_MERGE_SCOPES = ["issues", "comments"] as const;
|
||||
export type WorktreeMergeScope = (typeof WORKTREE_MERGE_SCOPES)[number];
|
||||
|
||||
export type ImportAdjustment =
|
||||
| "clear_assignee_agent"
|
||||
| "clear_project"
|
||||
| "clear_project_workspace"
|
||||
| "clear_goal"
|
||||
| "clear_author_agent"
|
||||
| "coerce_in_progress_to_todo"
|
||||
| "clear_document_agent"
|
||||
| "clear_document_revision_agent"
|
||||
| "clear_attachment_agent";
|
||||
|
||||
export type IssueMergeAction = "skip_existing" | "insert";
|
||||
export type CommentMergeAction = "skip_existing" | "skip_missing_parent" | "insert";
|
||||
|
||||
export type PlannedIssueInsert = {
|
||||
source: IssueRow;
|
||||
action: "insert";
|
||||
previewIssueNumber: number;
|
||||
previewIdentifier: string;
|
||||
targetStatus: string;
|
||||
targetAssigneeAgentId: string | null;
|
||||
targetCreatedByAgentId: string | null;
|
||||
targetProjectId: string | null;
|
||||
targetProjectWorkspaceId: string | null;
|
||||
targetGoalId: string | null;
|
||||
projectResolution: "preserved" | "cleared" | "mapped" | "imported";
|
||||
mappedProjectName: string | null;
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedIssueSkip = {
|
||||
source: IssueRow;
|
||||
action: "skip_existing";
|
||||
driftKeys: string[];
|
||||
};
|
||||
|
||||
export type PlannedCommentInsert = {
|
||||
source: CommentRow;
|
||||
action: "insert";
|
||||
targetAuthorAgentId: string | null;
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedCommentSkip = {
|
||||
source: CommentRow;
|
||||
action: "skip_existing" | "skip_missing_parent";
|
||||
};
|
||||
|
||||
export type IssueDocumentRow = {
|
||||
id: IssueDocumentLinkRow["id"];
|
||||
companyId: IssueDocumentLinkRow["companyId"];
|
||||
issueId: IssueDocumentLinkRow["issueId"];
|
||||
documentId: IssueDocumentLinkRow["documentId"];
|
||||
key: IssueDocumentLinkRow["key"];
|
||||
linkCreatedAt: IssueDocumentLinkRow["createdAt"];
|
||||
linkUpdatedAt: IssueDocumentLinkRow["updatedAt"];
|
||||
title: string | null;
|
||||
format: string;
|
||||
latestBody: string;
|
||||
latestRevisionId: string | null;
|
||||
latestRevisionNumber: number;
|
||||
createdByAgentId: string | null;
|
||||
createdByUserId: string | null;
|
||||
updatedByAgentId: string | null;
|
||||
updatedByUserId: string | null;
|
||||
documentCreatedAt: Date;
|
||||
documentUpdatedAt: Date;
|
||||
};
|
||||
|
||||
export type DocumentRevisionRow = {
|
||||
id: DocumentRevisionTableRow["id"];
|
||||
companyId: DocumentRevisionTableRow["companyId"];
|
||||
documentId: DocumentRevisionTableRow["documentId"];
|
||||
revisionNumber: DocumentRevisionTableRow["revisionNumber"];
|
||||
body: DocumentRevisionTableRow["body"];
|
||||
changeSummary: DocumentRevisionTableRow["changeSummary"];
|
||||
createdByAgentId: string | null;
|
||||
createdByUserId: string | null;
|
||||
createdAt: Date;
|
||||
};
|
||||
|
||||
export type IssueAttachmentRow = {
|
||||
id: IssueAttachmentTableRow["id"];
|
||||
companyId: IssueAttachmentTableRow["companyId"];
|
||||
issueId: IssueAttachmentTableRow["issueId"];
|
||||
issueCommentId: IssueAttachmentTableRow["issueCommentId"];
|
||||
assetId: IssueAttachmentTableRow["assetId"];
|
||||
provider: AssetRow["provider"];
|
||||
objectKey: AssetRow["objectKey"];
|
||||
contentType: AssetRow["contentType"];
|
||||
byteSize: AssetRow["byteSize"];
|
||||
sha256: AssetRow["sha256"];
|
||||
originalFilename: AssetRow["originalFilename"];
|
||||
createdByAgentId: string | null;
|
||||
createdByUserId: string | null;
|
||||
assetCreatedAt: Date;
|
||||
assetUpdatedAt: Date;
|
||||
attachmentCreatedAt: Date;
|
||||
attachmentUpdatedAt: Date;
|
||||
};
|
||||
|
||||
export type PlannedDocumentRevisionInsert = {
|
||||
source: DocumentRevisionRow;
|
||||
targetRevisionNumber: number;
|
||||
targetCreatedByAgentId: string | null;
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedIssueDocumentInsert = {
|
||||
source: IssueDocumentRow;
|
||||
action: "insert";
|
||||
targetCreatedByAgentId: string | null;
|
||||
targetUpdatedByAgentId: string | null;
|
||||
latestRevisionId: string | null;
|
||||
latestRevisionNumber: number;
|
||||
revisionsToInsert: PlannedDocumentRevisionInsert[];
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedIssueDocumentMerge = {
|
||||
source: IssueDocumentRow;
|
||||
action: "merge_existing";
|
||||
targetCreatedByAgentId: string | null;
|
||||
targetUpdatedByAgentId: string | null;
|
||||
latestRevisionId: string | null;
|
||||
latestRevisionNumber: number;
|
||||
revisionsToInsert: PlannedDocumentRevisionInsert[];
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedIssueDocumentSkip = {
|
||||
source: IssueDocumentRow;
|
||||
action: "skip_existing" | "skip_missing_parent" | "skip_conflicting_key";
|
||||
};
|
||||
|
||||
export type PlannedAttachmentInsert = {
|
||||
source: IssueAttachmentRow;
|
||||
action: "insert";
|
||||
targetIssueCommentId: string | null;
|
||||
targetCreatedByAgentId: string | null;
|
||||
adjustments: ImportAdjustment[];
|
||||
};
|
||||
|
||||
export type PlannedAttachmentSkip = {
|
||||
source: IssueAttachmentRow;
|
||||
action: "skip_existing" | "skip_missing_parent";
|
||||
};
|
||||
|
||||
export type PlannedProjectImport = {
|
||||
source: ProjectRow;
|
||||
targetLeadAgentId: string | null;
|
||||
targetGoalId: string | null;
|
||||
workspaces: ProjectWorkspaceRow[];
|
||||
};
|
||||
|
||||
export type WorktreeMergePlan = {
|
||||
companyId: string;
|
||||
companyName: string;
|
||||
issuePrefix: string;
|
||||
previewIssueCounterStart: number;
|
||||
scopes: WorktreeMergeScope[];
|
||||
projectImports: PlannedProjectImport[];
|
||||
issuePlans: Array<PlannedIssueInsert | PlannedIssueSkip>;
|
||||
commentPlans: Array<PlannedCommentInsert | PlannedCommentSkip>;
|
||||
documentPlans: Array<PlannedIssueDocumentInsert | PlannedIssueDocumentMerge | PlannedIssueDocumentSkip>;
|
||||
attachmentPlans: Array<PlannedAttachmentInsert | PlannedAttachmentSkip>;
|
||||
counts: {
|
||||
projectsToImport: number;
|
||||
issuesToInsert: number;
|
||||
issuesExisting: number;
|
||||
issueDrift: number;
|
||||
commentsToInsert: number;
|
||||
commentsExisting: number;
|
||||
commentsMissingParent: number;
|
||||
documentsToInsert: number;
|
||||
documentsToMerge: number;
|
||||
documentsExisting: number;
|
||||
documentsConflictingKey: number;
|
||||
documentsMissingParent: number;
|
||||
documentRevisionsToInsert: number;
|
||||
attachmentsToInsert: number;
|
||||
attachmentsExisting: number;
|
||||
attachmentsMissingParent: number;
|
||||
};
|
||||
adjustments: Record<ImportAdjustment, number>;
|
||||
};
|
||||
|
||||
function compareIssueCoreFields(source: IssueRow, target: IssueRow): string[] {
|
||||
const driftKeys: string[] = [];
|
||||
if (source.title !== target.title) driftKeys.push("title");
|
||||
if ((source.description ?? null) !== (target.description ?? null)) driftKeys.push("description");
|
||||
if (source.status !== target.status) driftKeys.push("status");
|
||||
if (source.priority !== target.priority) driftKeys.push("priority");
|
||||
if ((source.parentId ?? null) !== (target.parentId ?? null)) driftKeys.push("parentId");
|
||||
if ((source.projectId ?? null) !== (target.projectId ?? null)) driftKeys.push("projectId");
|
||||
if ((source.projectWorkspaceId ?? null) !== (target.projectWorkspaceId ?? null)) driftKeys.push("projectWorkspaceId");
|
||||
if ((source.goalId ?? null) !== (target.goalId ?? null)) driftKeys.push("goalId");
|
||||
if ((source.assigneeAgentId ?? null) !== (target.assigneeAgentId ?? null)) driftKeys.push("assigneeAgentId");
|
||||
if ((source.assigneeUserId ?? null) !== (target.assigneeUserId ?? null)) driftKeys.push("assigneeUserId");
|
||||
return driftKeys;
|
||||
}
|
||||
|
||||
function incrementAdjustment(
|
||||
counts: Record<ImportAdjustment, number>,
|
||||
adjustment: ImportAdjustment,
|
||||
): void {
|
||||
counts[adjustment] += 1;
|
||||
}
|
||||
|
||||
function groupBy<T>(rows: T[], keyFor: (row: T) => string): Map<string, T[]> {
|
||||
const out = new Map<string, T[]>();
|
||||
for (const row of rows) {
|
||||
const key = keyFor(row);
|
||||
const existing = out.get(key);
|
||||
if (existing) {
|
||||
existing.push(row);
|
||||
} else {
|
||||
out.set(key, [row]);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function sameDate(left: Date, right: Date): boolean {
|
||||
return left.getTime() === right.getTime();
|
||||
}
|
||||
|
||||
function sortDocumentRows(rows: IssueDocumentRow[]): IssueDocumentRow[] {
|
||||
return [...rows].sort((left, right) => {
|
||||
const createdDelta = left.documentCreatedAt.getTime() - right.documentCreatedAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
const linkDelta = left.linkCreatedAt.getTime() - right.linkCreatedAt.getTime();
|
||||
if (linkDelta !== 0) return linkDelta;
|
||||
return left.documentId.localeCompare(right.documentId);
|
||||
});
|
||||
}
|
||||
|
||||
function sortDocumentRevisions(rows: DocumentRevisionRow[]): DocumentRevisionRow[] {
|
||||
return [...rows].sort((left, right) => {
|
||||
const revisionDelta = left.revisionNumber - right.revisionNumber;
|
||||
if (revisionDelta !== 0) return revisionDelta;
|
||||
const createdDelta = left.createdAt.getTime() - right.createdAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
return left.id.localeCompare(right.id);
|
||||
});
|
||||
}
|
||||
|
||||
function sortAttachments(rows: IssueAttachmentRow[]): IssueAttachmentRow[] {
|
||||
return [...rows].sort((left, right) => {
|
||||
const createdDelta = left.attachmentCreatedAt.getTime() - right.attachmentCreatedAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
return left.id.localeCompare(right.id);
|
||||
});
|
||||
}
|
||||
|
||||
function sortIssuesForImport(sourceIssues: IssueRow[]): IssueRow[] {
|
||||
const byId = new Map(sourceIssues.map((issue) => [issue.id, issue]));
|
||||
const memoDepth = new Map<string, number>();
|
||||
|
||||
const depthFor = (issue: IssueRow, stack = new Set<string>()): number => {
|
||||
const memoized = memoDepth.get(issue.id);
|
||||
if (memoized !== undefined) return memoized;
|
||||
if (!issue.parentId) {
|
||||
memoDepth.set(issue.id, 0);
|
||||
return 0;
|
||||
}
|
||||
if (stack.has(issue.id)) {
|
||||
memoDepth.set(issue.id, 0);
|
||||
return 0;
|
||||
}
|
||||
const parent = byId.get(issue.parentId);
|
||||
if (!parent) {
|
||||
memoDepth.set(issue.id, 0);
|
||||
return 0;
|
||||
}
|
||||
stack.add(issue.id);
|
||||
const depth = depthFor(parent, stack) + 1;
|
||||
stack.delete(issue.id);
|
||||
memoDepth.set(issue.id, depth);
|
||||
return depth;
|
||||
};
|
||||
|
||||
return [...sourceIssues].sort((left, right) => {
|
||||
const depthDelta = depthFor(left) - depthFor(right);
|
||||
if (depthDelta !== 0) return depthDelta;
|
||||
const createdDelta = left.createdAt.getTime() - right.createdAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
return left.id.localeCompare(right.id);
|
||||
});
|
||||
}
|
||||
|
||||
export function parseWorktreeMergeScopes(rawValue: string | undefined): WorktreeMergeScope[] {
|
||||
if (!rawValue || rawValue.trim().length === 0) {
|
||||
return ["issues", "comments"];
|
||||
}
|
||||
|
||||
const parsed = rawValue
|
||||
.split(",")
|
||||
.map((value) => value.trim().toLowerCase())
|
||||
.filter((value): value is WorktreeMergeScope =>
|
||||
(WORKTREE_MERGE_SCOPES as readonly string[]).includes(value),
|
||||
);
|
||||
|
||||
if (parsed.length === 0) {
|
||||
throw new Error(
|
||||
`Invalid scope "${rawValue}". Expected a comma-separated list of: ${WORKTREE_MERGE_SCOPES.join(", ")}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return [...new Set(parsed)];
|
||||
}
|
||||
|
||||
export function buildWorktreeMergePlan(input: {
|
||||
companyId: string;
|
||||
companyName: string;
|
||||
issuePrefix: string;
|
||||
previewIssueCounterStart: number;
|
||||
scopes: WorktreeMergeScope[];
|
||||
sourceIssues: IssueRow[];
|
||||
targetIssues: IssueRow[];
|
||||
sourceComments: CommentRow[];
|
||||
targetComments: CommentRow[];
|
||||
sourceProjects?: ProjectRow[];
|
||||
sourceProjectWorkspaces?: ProjectWorkspaceRow[];
|
||||
sourceDocuments?: IssueDocumentRow[];
|
||||
targetDocuments?: IssueDocumentRow[];
|
||||
sourceDocumentRevisions?: DocumentRevisionRow[];
|
||||
targetDocumentRevisions?: DocumentRevisionRow[];
|
||||
sourceAttachments?: IssueAttachmentRow[];
|
||||
targetAttachments?: IssueAttachmentRow[];
|
||||
targetAgents: AgentRow[];
|
||||
targetProjects: ProjectRow[];
|
||||
targetProjectWorkspaces: ProjectWorkspaceRow[];
|
||||
targetGoals: GoalRow[];
|
||||
importProjectIds?: Iterable<string>;
|
||||
projectIdOverrides?: Record<string, string | null | undefined>;
|
||||
}): WorktreeMergePlan {
|
||||
const targetIssuesById = new Map(input.targetIssues.map((issue) => [issue.id, issue]));
|
||||
const targetCommentIds = new Set(input.targetComments.map((comment) => comment.id));
|
||||
const targetAgentIds = new Set(input.targetAgents.map((agent) => agent.id));
|
||||
const targetProjectIds = new Set(input.targetProjects.map((project) => project.id));
|
||||
const targetProjectsById = new Map(input.targetProjects.map((project) => [project.id, project]));
|
||||
const targetProjectWorkspaceIds = new Set(input.targetProjectWorkspaces.map((workspace) => workspace.id));
|
||||
const targetGoalIds = new Set(input.targetGoals.map((goal) => goal.id));
|
||||
const sourceProjectsById = new Map((input.sourceProjects ?? []).map((project) => [project.id, project]));
|
||||
const sourceProjectWorkspaces = input.sourceProjectWorkspaces ?? [];
|
||||
const sourceProjectWorkspacesByProjectId = groupBy(sourceProjectWorkspaces, (workspace) => workspace.projectId);
|
||||
const importProjectIds = new Set(input.importProjectIds ?? []);
|
||||
const scopes = new Set(input.scopes);
|
||||
|
||||
const adjustmentCounts: Record<ImportAdjustment, number> = {
|
||||
clear_assignee_agent: 0,
|
||||
clear_project: 0,
|
||||
clear_project_workspace: 0,
|
||||
clear_goal: 0,
|
||||
clear_author_agent: 0,
|
||||
coerce_in_progress_to_todo: 0,
|
||||
clear_document_agent: 0,
|
||||
clear_document_revision_agent: 0,
|
||||
clear_attachment_agent: 0,
|
||||
};
|
||||
|
||||
const projectImports: PlannedProjectImport[] = [];
|
||||
for (const projectId of importProjectIds) {
|
||||
if (targetProjectIds.has(projectId)) continue;
|
||||
const sourceProject = sourceProjectsById.get(projectId);
|
||||
if (!sourceProject) continue;
|
||||
projectImports.push({
|
||||
source: sourceProject,
|
||||
targetLeadAgentId:
|
||||
sourceProject.leadAgentId && targetAgentIds.has(sourceProject.leadAgentId)
|
||||
? sourceProject.leadAgentId
|
||||
: null,
|
||||
targetGoalId:
|
||||
sourceProject.goalId && targetGoalIds.has(sourceProject.goalId)
|
||||
? sourceProject.goalId
|
||||
: null,
|
||||
workspaces: [...(sourceProjectWorkspacesByProjectId.get(projectId) ?? [])].sort((left, right) => {
|
||||
const primaryDelta = Number(right.isPrimary) - Number(left.isPrimary);
|
||||
if (primaryDelta !== 0) return primaryDelta;
|
||||
const createdDelta = left.createdAt.getTime() - right.createdAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
return left.id.localeCompare(right.id);
|
||||
}),
|
||||
});
|
||||
}
|
||||
const importedProjectWorkspaceIds = new Set(
|
||||
projectImports.flatMap((project) => project.workspaces.map((workspace) => workspace.id)),
|
||||
);
|
||||
|
||||
const issuePlans: Array<PlannedIssueInsert | PlannedIssueSkip> = [];
|
||||
let nextPreviewIssueNumber = input.previewIssueCounterStart;
|
||||
for (const issue of sortIssuesForImport(input.sourceIssues)) {
|
||||
const existing = targetIssuesById.get(issue.id);
|
||||
if (existing) {
|
||||
issuePlans.push({
|
||||
source: issue,
|
||||
action: "skip_existing",
|
||||
driftKeys: compareIssueCoreFields(issue, existing),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
nextPreviewIssueNumber += 1;
|
||||
const adjustments: ImportAdjustment[] = [];
|
||||
const targetAssigneeAgentId =
|
||||
issue.assigneeAgentId && targetAgentIds.has(issue.assigneeAgentId) ? issue.assigneeAgentId : null;
|
||||
if (issue.assigneeAgentId && !targetAssigneeAgentId) {
|
||||
adjustments.push("clear_assignee_agent");
|
||||
incrementAdjustment(adjustmentCounts, "clear_assignee_agent");
|
||||
}
|
||||
|
||||
const targetCreatedByAgentId =
|
||||
issue.createdByAgentId && targetAgentIds.has(issue.createdByAgentId) ? issue.createdByAgentId : null;
|
||||
|
||||
let targetProjectId =
|
||||
issue.projectId && targetProjectIds.has(issue.projectId) ? issue.projectId : null;
|
||||
let projectResolution: PlannedIssueInsert["projectResolution"] = targetProjectId ? "preserved" : "cleared";
|
||||
let mappedProjectName: string | null = null;
|
||||
const overrideProjectId =
|
||||
issue.projectId && input.projectIdOverrides
|
||||
? input.projectIdOverrides[issue.projectId] ?? null
|
||||
: null;
|
||||
if (!targetProjectId && overrideProjectId && targetProjectIds.has(overrideProjectId)) {
|
||||
targetProjectId = overrideProjectId;
|
||||
projectResolution = "mapped";
|
||||
mappedProjectName = targetProjectsById.get(overrideProjectId)?.name ?? null;
|
||||
}
|
||||
if (!targetProjectId && issue.projectId && importProjectIds.has(issue.projectId)) {
|
||||
const sourceProject = sourceProjectsById.get(issue.projectId);
|
||||
if (sourceProject) {
|
||||
targetProjectId = sourceProject.id;
|
||||
projectResolution = "imported";
|
||||
mappedProjectName = sourceProject.name;
|
||||
}
|
||||
}
|
||||
if (issue.projectId && !targetProjectId) {
|
||||
adjustments.push("clear_project");
|
||||
incrementAdjustment(adjustmentCounts, "clear_project");
|
||||
}
|
||||
|
||||
const targetProjectWorkspaceId =
|
||||
targetProjectId
|
||||
&& targetProjectId === issue.projectId
|
||||
&& issue.projectWorkspaceId
|
||||
&& (targetProjectWorkspaceIds.has(issue.projectWorkspaceId)
|
||||
|| importedProjectWorkspaceIds.has(issue.projectWorkspaceId))
|
||||
? issue.projectWorkspaceId
|
||||
: null;
|
||||
if (issue.projectWorkspaceId && !targetProjectWorkspaceId) {
|
||||
adjustments.push("clear_project_workspace");
|
||||
incrementAdjustment(adjustmentCounts, "clear_project_workspace");
|
||||
}
|
||||
|
||||
const targetGoalId =
|
||||
issue.goalId && targetGoalIds.has(issue.goalId) ? issue.goalId : null;
|
||||
if (issue.goalId && !targetGoalId) {
|
||||
adjustments.push("clear_goal");
|
||||
incrementAdjustment(adjustmentCounts, "clear_goal");
|
||||
}
|
||||
|
||||
let targetStatus = issue.status;
|
||||
if (
|
||||
targetStatus === "in_progress"
|
||||
&& !targetAssigneeAgentId
|
||||
&& !(issue.assigneeUserId && issue.assigneeUserId.trim().length > 0)
|
||||
) {
|
||||
targetStatus = "todo";
|
||||
adjustments.push("coerce_in_progress_to_todo");
|
||||
incrementAdjustment(adjustmentCounts, "coerce_in_progress_to_todo");
|
||||
}
|
||||
|
||||
issuePlans.push({
|
||||
source: issue,
|
||||
action: "insert",
|
||||
previewIssueNumber: nextPreviewIssueNumber,
|
||||
previewIdentifier: `${input.issuePrefix}-${nextPreviewIssueNumber}`,
|
||||
targetStatus,
|
||||
targetAssigneeAgentId,
|
||||
targetCreatedByAgentId,
|
||||
targetProjectId,
|
||||
targetProjectWorkspaceId,
|
||||
targetGoalId,
|
||||
projectResolution,
|
||||
mappedProjectName,
|
||||
adjustments,
|
||||
});
|
||||
}
|
||||
|
||||
const issueIdsAvailableAfterImport = new Set<string>([
|
||||
...input.targetIssues.map((issue) => issue.id),
|
||||
...issuePlans.filter((plan): plan is PlannedIssueInsert => plan.action === "insert").map((plan) => plan.source.id),
|
||||
]);
|
||||
|
||||
const commentPlans: Array<PlannedCommentInsert | PlannedCommentSkip> = [];
|
||||
if (scopes.has("comments")) {
|
||||
const sortedComments = [...input.sourceComments].sort((left, right) => {
|
||||
const createdDelta = left.createdAt.getTime() - right.createdAt.getTime();
|
||||
if (createdDelta !== 0) return createdDelta;
|
||||
return left.id.localeCompare(right.id);
|
||||
});
|
||||
|
||||
for (const comment of sortedComments) {
|
||||
if (targetCommentIds.has(comment.id)) {
|
||||
commentPlans.push({ source: comment, action: "skip_existing" });
|
||||
continue;
|
||||
}
|
||||
if (!issueIdsAvailableAfterImport.has(comment.issueId)) {
|
||||
commentPlans.push({ source: comment, action: "skip_missing_parent" });
|
||||
continue;
|
||||
}
|
||||
|
||||
const adjustments: ImportAdjustment[] = [];
|
||||
const targetAuthorAgentId =
|
||||
comment.authorAgentId && targetAgentIds.has(comment.authorAgentId) ? comment.authorAgentId : null;
|
||||
if (comment.authorAgentId && !targetAuthorAgentId) {
|
||||
adjustments.push("clear_author_agent");
|
||||
incrementAdjustment(adjustmentCounts, "clear_author_agent");
|
||||
}
|
||||
|
||||
commentPlans.push({
|
||||
source: comment,
|
||||
action: "insert",
|
||||
targetAuthorAgentId,
|
||||
adjustments,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const sourceDocuments = input.sourceDocuments ?? [];
|
||||
const targetDocuments = input.targetDocuments ?? [];
|
||||
const sourceDocumentRevisions = input.sourceDocumentRevisions ?? [];
|
||||
const targetDocumentRevisions = input.targetDocumentRevisions ?? [];
|
||||
|
||||
const targetDocumentsById = new Map(targetDocuments.map((document) => [document.documentId, document]));
|
||||
const targetDocumentsByIssueKey = new Map(targetDocuments.map((document) => [`${document.issueId}:${document.key}`, document]));
|
||||
const sourceRevisionsByDocumentId = groupBy(sourceDocumentRevisions, (revision) => revision.documentId);
|
||||
const targetRevisionsByDocumentId = groupBy(targetDocumentRevisions, (revision) => revision.documentId);
|
||||
const commentIdsAvailableAfterImport = new Set<string>([
|
||||
...input.targetComments.map((comment) => comment.id),
|
||||
...commentPlans.filter((plan): plan is PlannedCommentInsert => plan.action === "insert").map((plan) => plan.source.id),
|
||||
]);
|
||||
|
||||
const documentPlans: Array<PlannedIssueDocumentInsert | PlannedIssueDocumentMerge | PlannedIssueDocumentSkip> = [];
|
||||
for (const document of sortDocumentRows(sourceDocuments)) {
|
||||
if (!issueIdsAvailableAfterImport.has(document.issueId)) {
|
||||
documentPlans.push({ source: document, action: "skip_missing_parent" });
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingDocument = targetDocumentsById.get(document.documentId);
|
||||
const conflictingIssueKeyDocument = targetDocumentsByIssueKey.get(`${document.issueId}:${document.key}`);
|
||||
if (!existingDocument && conflictingIssueKeyDocument && conflictingIssueKeyDocument.documentId !== document.documentId) {
|
||||
documentPlans.push({ source: document, action: "skip_conflicting_key" });
|
||||
continue;
|
||||
}
|
||||
|
||||
const adjustments: ImportAdjustment[] = [];
|
||||
const targetCreatedByAgentId =
|
||||
document.createdByAgentId && targetAgentIds.has(document.createdByAgentId) ? document.createdByAgentId : null;
|
||||
const targetUpdatedByAgentId =
|
||||
document.updatedByAgentId && targetAgentIds.has(document.updatedByAgentId) ? document.updatedByAgentId : null;
|
||||
if (
|
||||
(document.createdByAgentId && !targetCreatedByAgentId)
|
||||
|| (document.updatedByAgentId && !targetUpdatedByAgentId)
|
||||
) {
|
||||
adjustments.push("clear_document_agent");
|
||||
incrementAdjustment(adjustmentCounts, "clear_document_agent");
|
||||
}
|
||||
|
||||
const sourceRevisions = sortDocumentRevisions(sourceRevisionsByDocumentId.get(document.documentId) ?? []);
|
||||
const targetRevisions = sortDocumentRevisions(targetRevisionsByDocumentId.get(document.documentId) ?? []);
|
||||
const existingRevisionIds = new Set(targetRevisions.map((revision) => revision.id));
|
||||
const usedRevisionNumbers = new Set(targetRevisions.map((revision) => revision.revisionNumber));
|
||||
let nextRevisionNumber = targetRevisions.reduce(
|
||||
(maxValue, revision) => Math.max(maxValue, revision.revisionNumber),
|
||||
0,
|
||||
) + 1;
|
||||
|
||||
const targetRevisionNumberById = new Map<string, number>(
|
||||
targetRevisions.map((revision) => [revision.id, revision.revisionNumber]),
|
||||
);
|
||||
const revisionsToInsert: PlannedDocumentRevisionInsert[] = [];
|
||||
|
||||
for (const revision of sourceRevisions) {
|
||||
if (existingRevisionIds.has(revision.id)) continue;
|
||||
let targetRevisionNumber = revision.revisionNumber;
|
||||
if (usedRevisionNumbers.has(targetRevisionNumber)) {
|
||||
while (usedRevisionNumbers.has(nextRevisionNumber)) {
|
||||
nextRevisionNumber += 1;
|
||||
}
|
||||
targetRevisionNumber = nextRevisionNumber;
|
||||
nextRevisionNumber += 1;
|
||||
}
|
||||
usedRevisionNumbers.add(targetRevisionNumber);
|
||||
targetRevisionNumberById.set(revision.id, targetRevisionNumber);
|
||||
|
||||
const revisionAdjustments: ImportAdjustment[] = [];
|
||||
const targetCreatedByAgentId =
|
||||
revision.createdByAgentId && targetAgentIds.has(revision.createdByAgentId) ? revision.createdByAgentId : null;
|
||||
if (revision.createdByAgentId && !targetCreatedByAgentId) {
|
||||
revisionAdjustments.push("clear_document_revision_agent");
|
||||
incrementAdjustment(adjustmentCounts, "clear_document_revision_agent");
|
||||
}
|
||||
|
||||
revisionsToInsert.push({
|
||||
source: revision,
|
||||
targetRevisionNumber,
|
||||
targetCreatedByAgentId,
|
||||
adjustments: revisionAdjustments,
|
||||
});
|
||||
}
|
||||
|
||||
const latestRevisionId = document.latestRevisionId ?? existingDocument?.latestRevisionId ?? null;
|
||||
const latestRevisionNumber =
|
||||
(latestRevisionId ? targetRevisionNumberById.get(latestRevisionId) : undefined)
|
||||
?? document.latestRevisionNumber
|
||||
?? existingDocument?.latestRevisionNumber
|
||||
?? 0;
|
||||
|
||||
if (!existingDocument) {
|
||||
documentPlans.push({
|
||||
source: document,
|
||||
action: "insert",
|
||||
targetCreatedByAgentId,
|
||||
targetUpdatedByAgentId,
|
||||
latestRevisionId,
|
||||
latestRevisionNumber,
|
||||
revisionsToInsert,
|
||||
adjustments,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const documentAlreadyMatches =
|
||||
existingDocument.key === document.key
|
||||
&& existingDocument.title === document.title
|
||||
&& existingDocument.format === document.format
|
||||
&& existingDocument.latestBody === document.latestBody
|
||||
&& (existingDocument.latestRevisionId ?? null) === latestRevisionId
|
||||
&& existingDocument.latestRevisionNumber === latestRevisionNumber
|
||||
&& (existingDocument.updatedByAgentId ?? null) === targetUpdatedByAgentId
|
||||
&& (existingDocument.updatedByUserId ?? null) === (document.updatedByUserId ?? null)
|
||||
&& sameDate(existingDocument.documentUpdatedAt, document.documentUpdatedAt)
|
||||
&& sameDate(existingDocument.linkUpdatedAt, document.linkUpdatedAt)
|
||||
&& revisionsToInsert.length === 0;
|
||||
|
||||
if (documentAlreadyMatches) {
|
||||
documentPlans.push({ source: document, action: "skip_existing" });
|
||||
continue;
|
||||
}
|
||||
|
||||
documentPlans.push({
|
||||
source: document,
|
||||
action: "merge_existing",
|
||||
targetCreatedByAgentId,
|
||||
targetUpdatedByAgentId,
|
||||
latestRevisionId,
|
||||
latestRevisionNumber,
|
||||
revisionsToInsert,
|
||||
adjustments,
|
||||
});
|
||||
}
|
||||
|
||||
const sourceAttachments = input.sourceAttachments ?? [];
|
||||
const targetAttachmentIds = new Set((input.targetAttachments ?? []).map((attachment) => attachment.id));
|
||||
const attachmentPlans: Array<PlannedAttachmentInsert | PlannedAttachmentSkip> = [];
|
||||
for (const attachment of sortAttachments(sourceAttachments)) {
|
||||
if (targetAttachmentIds.has(attachment.id)) {
|
||||
attachmentPlans.push({ source: attachment, action: "skip_existing" });
|
||||
continue;
|
||||
}
|
||||
if (!issueIdsAvailableAfterImport.has(attachment.issueId)) {
|
||||
attachmentPlans.push({ source: attachment, action: "skip_missing_parent" });
|
||||
continue;
|
||||
}
|
||||
|
||||
const adjustments: ImportAdjustment[] = [];
|
||||
const targetCreatedByAgentId =
|
||||
attachment.createdByAgentId && targetAgentIds.has(attachment.createdByAgentId)
|
||||
? attachment.createdByAgentId
|
||||
: null;
|
||||
if (attachment.createdByAgentId && !targetCreatedByAgentId) {
|
||||
adjustments.push("clear_attachment_agent");
|
||||
incrementAdjustment(adjustmentCounts, "clear_attachment_agent");
|
||||
}
|
||||
|
||||
attachmentPlans.push({
|
||||
source: attachment,
|
||||
action: "insert",
|
||||
targetIssueCommentId:
|
||||
attachment.issueCommentId && commentIdsAvailableAfterImport.has(attachment.issueCommentId)
|
||||
? attachment.issueCommentId
|
||||
: null,
|
||||
targetCreatedByAgentId,
|
||||
adjustments,
|
||||
});
|
||||
}
|
||||
|
||||
const counts = {
|
||||
projectsToImport: projectImports.length,
|
||||
issuesToInsert: issuePlans.filter((plan) => plan.action === "insert").length,
|
||||
issuesExisting: issuePlans.filter((plan) => plan.action === "skip_existing").length,
|
||||
issueDrift: issuePlans.filter((plan) => plan.action === "skip_existing" && plan.driftKeys.length > 0).length,
|
||||
commentsToInsert: commentPlans.filter((plan) => plan.action === "insert").length,
|
||||
commentsExisting: commentPlans.filter((plan) => plan.action === "skip_existing").length,
|
||||
commentsMissingParent: commentPlans.filter((plan) => plan.action === "skip_missing_parent").length,
|
||||
documentsToInsert: documentPlans.filter((plan) => plan.action === "insert").length,
|
||||
documentsToMerge: documentPlans.filter((plan) => plan.action === "merge_existing").length,
|
||||
documentsExisting: documentPlans.filter((plan) => plan.action === "skip_existing").length,
|
||||
documentsConflictingKey: documentPlans.filter((plan) => plan.action === "skip_conflicting_key").length,
|
||||
documentsMissingParent: documentPlans.filter((plan) => plan.action === "skip_missing_parent").length,
|
||||
documentRevisionsToInsert: documentPlans.reduce(
|
||||
(sum, plan) =>
|
||||
sum + (plan.action === "insert" || plan.action === "merge_existing" ? plan.revisionsToInsert.length : 0),
|
||||
0,
|
||||
),
|
||||
attachmentsToInsert: attachmentPlans.filter((plan) => plan.action === "insert").length,
|
||||
attachmentsExisting: attachmentPlans.filter((plan) => plan.action === "skip_existing").length,
|
||||
attachmentsMissingParent: attachmentPlans.filter((plan) => plan.action === "skip_missing_parent").length,
|
||||
};
|
||||
|
||||
return {
|
||||
companyId: input.companyId,
|
||||
companyName: input.companyName,
|
||||
issuePrefix: input.issuePrefix,
|
||||
previewIssueCounterStart: input.previewIssueCounterStart,
|
||||
scopes: input.scopes,
|
||||
projectImports,
|
||||
issuePlans,
|
||||
commentPlans,
|
||||
documentPlans,
|
||||
attachmentPlans,
|
||||
counts,
|
||||
adjustments: adjustmentCounts,
|
||||
};
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,10 @@ export function resolveDefaultContextPath(): string {
|
||||
return path.resolve(resolvePaperclipHomeDir(), "context.json");
|
||||
}
|
||||
|
||||
export function resolveDefaultCliAuthPath(): string {
|
||||
return path.resolve(resolvePaperclipHomeDir(), "auth.json");
|
||||
}
|
||||
|
||||
export function resolveDefaultEmbeddedPostgresDir(instanceId?: string): string {
|
||||
return path.resolve(resolvePaperclipInstanceRoot(instanceId), "db");
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ export {
|
||||
loggingConfigSchema,
|
||||
serverConfigSchema,
|
||||
authConfigSchema,
|
||||
telemetryConfigSchema,
|
||||
storageConfigSchema,
|
||||
storageLocalDiskConfigSchema,
|
||||
storageS3ConfigSchema,
|
||||
@@ -19,10 +20,11 @@ export {
|
||||
type LoggingConfig,
|
||||
type ServerConfig,
|
||||
type AuthConfig,
|
||||
type TelemetryConfig,
|
||||
type StorageConfig,
|
||||
type StorageLocalDiskConfig,
|
||||
type StorageS3Config,
|
||||
type SecretsConfig,
|
||||
type SecretsLocalEncryptedConfig,
|
||||
type ConfigMeta,
|
||||
} from "@paperclipai/shared";
|
||||
} from "../../../packages/shared/src/config-schema.js";
|
||||
|
||||
183
cli/src/config/server-bind.ts
Normal file
183
cli/src/config/server-bind.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { execFileSync } from "node:child_process";
|
||||
import {
|
||||
ALL_INTERFACES_BIND_HOST,
|
||||
LOOPBACK_BIND_HOST,
|
||||
inferBindModeFromHost,
|
||||
isAllInterfacesHost,
|
||||
isLoopbackHost,
|
||||
type BindMode,
|
||||
type DeploymentExposure,
|
||||
type DeploymentMode,
|
||||
} from "@paperclipai/shared";
|
||||
import type { AuthConfig, ServerConfig } from "./schema.js";
|
||||
|
||||
const TAILSCALE_DETECT_TIMEOUT_MS = 3000;
|
||||
|
||||
type BaseServerInput = {
|
||||
port: number;
|
||||
allowedHostnames: string[];
|
||||
serveUi: boolean;
|
||||
};
|
||||
|
||||
export function inferConfiguredBind(server?: Partial<ServerConfig>): BindMode {
|
||||
if (server?.bind) return server.bind;
|
||||
return inferBindModeFromHost(server?.customBindHost ?? server?.host);
|
||||
}
|
||||
|
||||
export function detectTailnetBindHost(): string | undefined {
|
||||
const explicit = process.env.PAPERCLIP_TAILNET_BIND_HOST?.trim();
|
||||
if (explicit) return explicit;
|
||||
|
||||
try {
|
||||
const stdout = execFileSync("tailscale", ["ip", "-4"], {
|
||||
encoding: "utf8",
|
||||
stdio: ["ignore", "pipe", "ignore"],
|
||||
timeout: TAILSCALE_DETECT_TIMEOUT_MS,
|
||||
});
|
||||
return stdout
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.find(Boolean);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export function buildPresetServerConfig(
|
||||
bind: Exclude<BindMode, "custom">,
|
||||
input: BaseServerInput,
|
||||
): { server: ServerConfig; auth: AuthConfig } {
|
||||
const host =
|
||||
bind === "loopback"
|
||||
? LOOPBACK_BIND_HOST
|
||||
: bind === "tailnet"
|
||||
? (detectTailnetBindHost() ?? LOOPBACK_BIND_HOST)
|
||||
: ALL_INTERFACES_BIND_HOST;
|
||||
|
||||
return {
|
||||
server: {
|
||||
deploymentMode: bind === "loopback" ? "local_trusted" : "authenticated",
|
||||
exposure: "private",
|
||||
bind,
|
||||
customBindHost: undefined,
|
||||
host,
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function buildCustomServerConfig(input: BaseServerInput & {
|
||||
deploymentMode: DeploymentMode;
|
||||
exposure: DeploymentExposure;
|
||||
host: string;
|
||||
publicBaseUrl?: string;
|
||||
}): { server: ServerConfig; auth: AuthConfig } {
|
||||
const normalizedHost = input.host.trim();
|
||||
const bind = isLoopbackHost(normalizedHost)
|
||||
? "loopback"
|
||||
: isAllInterfacesHost(normalizedHost)
|
||||
? "lan"
|
||||
: "custom";
|
||||
|
||||
return {
|
||||
server: {
|
||||
deploymentMode: input.deploymentMode,
|
||||
exposure: input.deploymentMode === "local_trusted" ? "private" : input.exposure,
|
||||
bind,
|
||||
customBindHost: bind === "custom" ? normalizedHost : undefined,
|
||||
host: normalizedHost,
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
},
|
||||
auth:
|
||||
input.deploymentMode === "authenticated" && input.exposure === "public"
|
||||
? {
|
||||
baseUrlMode: "explicit",
|
||||
disableSignUp: false,
|
||||
publicBaseUrl: input.publicBaseUrl,
|
||||
}
|
||||
: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveQuickstartServerConfig(input: {
|
||||
bind?: BindMode | null;
|
||||
deploymentMode?: DeploymentMode | null;
|
||||
exposure?: DeploymentExposure | null;
|
||||
host?: string | null;
|
||||
port: number;
|
||||
allowedHostnames: string[];
|
||||
serveUi: boolean;
|
||||
publicBaseUrl?: string;
|
||||
}): { server: ServerConfig; auth: AuthConfig } {
|
||||
const trimmedHost = input.host?.trim();
|
||||
const explicitBind = input.bind ?? null;
|
||||
|
||||
if (explicitBind === "loopback" || explicitBind === "lan" || explicitBind === "tailnet") {
|
||||
return buildPresetServerConfig(explicitBind, {
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
});
|
||||
}
|
||||
|
||||
if (explicitBind === "custom") {
|
||||
return buildCustomServerConfig({
|
||||
deploymentMode: input.deploymentMode ?? "authenticated",
|
||||
exposure: input.exposure ?? "private",
|
||||
host: trimmedHost || LOOPBACK_BIND_HOST,
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
publicBaseUrl: input.publicBaseUrl,
|
||||
});
|
||||
}
|
||||
|
||||
if (trimmedHost) {
|
||||
return buildCustomServerConfig({
|
||||
deploymentMode: input.deploymentMode ?? (isLoopbackHost(trimmedHost) ? "local_trusted" : "authenticated"),
|
||||
exposure: input.exposure ?? "private",
|
||||
host: trimmedHost,
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
publicBaseUrl: input.publicBaseUrl,
|
||||
});
|
||||
}
|
||||
|
||||
if (input.deploymentMode === "authenticated") {
|
||||
if (input.exposure === "public") {
|
||||
return buildCustomServerConfig({
|
||||
deploymentMode: "authenticated",
|
||||
exposure: "public",
|
||||
host: ALL_INTERFACES_BIND_HOST,
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
publicBaseUrl: input.publicBaseUrl,
|
||||
});
|
||||
}
|
||||
|
||||
return buildPresetServerConfig("lan", {
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
});
|
||||
}
|
||||
|
||||
return buildPresetServerConfig("loopback", {
|
||||
port: input.port,
|
||||
allowedHostnames: input.allowedHostnames,
|
||||
serveUi: input.serveUi,
|
||||
});
|
||||
}
|
||||
@@ -15,10 +15,15 @@ import { registerAgentCommands } from "./commands/client/agent.js";
|
||||
import { registerApprovalCommands } from "./commands/client/approval.js";
|
||||
import { registerActivityCommands } from "./commands/client/activity.js";
|
||||
import { registerDashboardCommands } from "./commands/client/dashboard.js";
|
||||
import { registerRoutineCommands } from "./commands/routines.js";
|
||||
import { registerFeedbackCommands } from "./commands/client/feedback.js";
|
||||
import { applyDataDirOverride, type DataDirOptionLike } from "./config/data-dir.js";
|
||||
import { loadPaperclipEnvFile } from "./config/env.js";
|
||||
import { initTelemetryFromConfigFile, flushTelemetry } from "./telemetry.js";
|
||||
import { registerWorktreeCommands } from "./commands/worktree.js";
|
||||
import { registerPluginCommands } from "./commands/client/plugin.js";
|
||||
import { registerClientAuthCommands } from "./commands/client/auth.js";
|
||||
import { cliVersion } from "./version.js";
|
||||
|
||||
const program = new Command();
|
||||
const DATA_DIR_OPTION_HELP =
|
||||
@@ -27,7 +32,7 @@ const DATA_DIR_OPTION_HELP =
|
||||
program
|
||||
.name("paperclipai")
|
||||
.description("Paperclip CLI — setup, diagnose, and configure your instance")
|
||||
.version("0.2.7");
|
||||
.version(cliVersion);
|
||||
|
||||
program.hook("preAction", (_thisCommand, actionCommand) => {
|
||||
const options = actionCommand.optsWithGlobals() as DataDirOptionLike;
|
||||
@@ -37,6 +42,7 @@ program.hook("preAction", (_thisCommand, actionCommand) => {
|
||||
hasContextOption: optionNames.has("context"),
|
||||
});
|
||||
loadPaperclipEnvFile(options.config);
|
||||
initTelemetryFromConfigFile(options.config);
|
||||
});
|
||||
|
||||
program
|
||||
@@ -44,7 +50,8 @@ program
|
||||
.description("Interactive first-run setup wizard")
|
||||
.option("-c, --config <path>", "Path to config file")
|
||||
.option("-d, --data-dir <path>", DATA_DIR_OPTION_HELP)
|
||||
.option("-y, --yes", "Accept defaults (quickstart + start immediately)", false)
|
||||
.option("--bind <mode>", "Quickstart reachability preset (loopback, lan, tailnet)")
|
||||
.option("-y, --yes", "Accept quickstart defaults (trusted local loopback unless --bind is set) and start immediately", false)
|
||||
.option("--run", "Start Paperclip immediately after saving config", false)
|
||||
.action(onboard);
|
||||
|
||||
@@ -102,6 +109,7 @@ program
|
||||
.option("-c, --config <path>", "Path to config file")
|
||||
.option("-d, --data-dir <path>", DATA_DIR_OPTION_HELP)
|
||||
.option("-i, --instance <id>", "Local instance id (default: default)")
|
||||
.option("--bind <mode>", "On first run, use onboarding reachability preset (loopback, lan, tailnet)")
|
||||
.option("--repair", "Attempt automatic repairs during doctor", true)
|
||||
.option("--no-repair", "Disable automatic repairs during doctor")
|
||||
.action(runCommand);
|
||||
@@ -136,6 +144,8 @@ registerAgentCommands(program);
|
||||
registerApprovalCommands(program);
|
||||
registerActivityCommands(program);
|
||||
registerDashboardCommands(program);
|
||||
registerRoutineCommands(program);
|
||||
registerFeedbackCommands(program);
|
||||
registerWorktreeCommands(program);
|
||||
registerPluginCommands(program);
|
||||
|
||||
@@ -151,7 +161,22 @@ auth
|
||||
.option("--base-url <url>", "Public base URL used to print invite link")
|
||||
.action(bootstrapCeoInvite);
|
||||
|
||||
program.parseAsync().catch((err) => {
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
});
|
||||
registerClientAuthCommands(auth);
|
||||
|
||||
async function main(): Promise<void> {
|
||||
let failed = false;
|
||||
try {
|
||||
await program.parseAsync();
|
||||
} catch (err) {
|
||||
failed = true;
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
} finally {
|
||||
await flushTelemetry();
|
||||
}
|
||||
|
||||
if (failed) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
void main();
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
import * as p from "@clack/prompts";
|
||||
import { isLoopbackHost, type BindMode } from "@paperclipai/shared";
|
||||
import type { AuthConfig, ServerConfig } from "../config/schema.js";
|
||||
import { parseHostnameCsv } from "../config/hostnames.js";
|
||||
import { buildCustomServerConfig, buildPresetServerConfig, inferConfiguredBind } from "../config/server-bind.js";
|
||||
|
||||
const TAILNET_BIND_WARNING =
|
||||
"No Tailscale address was detected during setup. The saved config will stay on loopback until Tailscale is available or PAPERCLIP_TAILNET_BIND_HOST is set.";
|
||||
|
||||
function cancelled(): never {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
export async function promptServer(opts?: {
|
||||
currentServer?: Partial<ServerConfig>;
|
||||
@@ -8,69 +18,37 @@ export async function promptServer(opts?: {
|
||||
}): Promise<{ server: ServerConfig; auth: AuthConfig }> {
|
||||
const currentServer = opts?.currentServer;
|
||||
const currentAuth = opts?.currentAuth;
|
||||
const currentBind = inferConfiguredBind(currentServer);
|
||||
|
||||
const deploymentModeSelection = await p.select({
|
||||
message: "Deployment mode",
|
||||
const bindSelection = await p.select({
|
||||
message: "Reachability",
|
||||
options: [
|
||||
{
|
||||
value: "local_trusted",
|
||||
label: "Local trusted",
|
||||
hint: "Easiest for local setup (no login, localhost-only)",
|
||||
value: "loopback" as const,
|
||||
label: "Trusted local",
|
||||
hint: "Recommended for first run: localhost only, no login friction",
|
||||
},
|
||||
{
|
||||
value: "authenticated",
|
||||
label: "Authenticated",
|
||||
hint: "Login required; use for private network or public hosting",
|
||||
value: "lan" as const,
|
||||
label: "Private network",
|
||||
hint: "Broad private bind for LAN, VPN, or legacy --tailscale-auth style access",
|
||||
},
|
||||
{
|
||||
value: "tailnet" as const,
|
||||
label: "Tailnet",
|
||||
hint: "Private authenticated access using the machine's detected Tailscale address",
|
||||
},
|
||||
{
|
||||
value: "custom" as const,
|
||||
label: "Custom",
|
||||
hint: "Choose exact auth mode, exposure, and host manually",
|
||||
},
|
||||
],
|
||||
initialValue: currentServer?.deploymentMode ?? "local_trusted",
|
||||
initialValue: currentBind,
|
||||
});
|
||||
|
||||
if (p.isCancel(deploymentModeSelection)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
const deploymentMode = deploymentModeSelection as ServerConfig["deploymentMode"];
|
||||
|
||||
let exposure: ServerConfig["exposure"] = "private";
|
||||
if (deploymentMode === "authenticated") {
|
||||
const exposureSelection = await p.select({
|
||||
message: "Exposure profile",
|
||||
options: [
|
||||
{
|
||||
value: "private",
|
||||
label: "Private network",
|
||||
hint: "Private access (for example Tailscale), lower setup friction",
|
||||
},
|
||||
{
|
||||
value: "public",
|
||||
label: "Public internet",
|
||||
hint: "Internet-facing deployment with stricter requirements",
|
||||
},
|
||||
],
|
||||
initialValue: currentServer?.exposure ?? "private",
|
||||
});
|
||||
if (p.isCancel(exposureSelection)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
exposure = exposureSelection as ServerConfig["exposure"];
|
||||
}
|
||||
|
||||
const hostDefault = deploymentMode === "local_trusted" ? "127.0.0.1" : "0.0.0.0";
|
||||
const hostStr = await p.text({
|
||||
message: "Bind host",
|
||||
defaultValue: currentServer?.host ?? hostDefault,
|
||||
placeholder: hostDefault,
|
||||
validate: (val) => {
|
||||
if (!val.trim()) return "Host is required";
|
||||
},
|
||||
});
|
||||
|
||||
if (p.isCancel(hostStr)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
if (p.isCancel(bindSelection)) cancelled();
|
||||
const bind = bindSelection as BindMode;
|
||||
|
||||
const portStr = await p.text({
|
||||
message: "Server port",
|
||||
@@ -84,15 +62,113 @@ export async function promptServer(opts?: {
|
||||
},
|
||||
});
|
||||
|
||||
if (p.isCancel(portStr)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
if (p.isCancel(portStr)) cancelled();
|
||||
const port = Number(portStr) || 3100;
|
||||
const serveUi = currentServer?.serveUi ?? true;
|
||||
|
||||
if (bind === "loopback") {
|
||||
return buildPresetServerConfig("loopback", {
|
||||
port,
|
||||
allowedHostnames: [],
|
||||
serveUi,
|
||||
});
|
||||
}
|
||||
|
||||
if (bind === "lan" || bind === "tailnet") {
|
||||
const allowedHostnamesInput = await p.text({
|
||||
message: "Allowed private hostnames (comma-separated, optional)",
|
||||
defaultValue: (currentServer?.allowedHostnames ?? []).join(", "),
|
||||
placeholder:
|
||||
bind === "tailnet"
|
||||
? "your-machine.tailnet.ts.net"
|
||||
: "dotta-macbook-pro, host.docker.internal",
|
||||
validate: (val) => {
|
||||
try {
|
||||
parseHostnameCsv(val);
|
||||
return;
|
||||
} catch (err) {
|
||||
return err instanceof Error ? err.message : "Invalid hostname list";
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
if (p.isCancel(allowedHostnamesInput)) cancelled();
|
||||
|
||||
const preset = buildPresetServerConfig(bind, {
|
||||
port,
|
||||
allowedHostnames: parseHostnameCsv(allowedHostnamesInput),
|
||||
serveUi,
|
||||
});
|
||||
if (bind === "tailnet" && isLoopbackHost(preset.server.host)) {
|
||||
p.log.warn(TAILNET_BIND_WARNING);
|
||||
}
|
||||
return preset;
|
||||
}
|
||||
|
||||
const deploymentModeSelection = await p.select({
|
||||
message: "Auth mode",
|
||||
options: [
|
||||
{
|
||||
value: "local_trusted",
|
||||
label: "Local trusted",
|
||||
hint: "No login required; only safe with loopback-only or similarly trusted access",
|
||||
},
|
||||
{
|
||||
value: "authenticated",
|
||||
label: "Authenticated",
|
||||
hint: "Login required; supports both private-network and public deployments",
|
||||
},
|
||||
],
|
||||
initialValue: currentServer?.deploymentMode ?? "authenticated",
|
||||
});
|
||||
|
||||
if (p.isCancel(deploymentModeSelection)) cancelled();
|
||||
const deploymentMode = deploymentModeSelection as ServerConfig["deploymentMode"];
|
||||
|
||||
let exposure: ServerConfig["exposure"] = "private";
|
||||
if (deploymentMode === "authenticated") {
|
||||
const exposureSelection = await p.select({
|
||||
message: "Exposure profile",
|
||||
options: [
|
||||
{
|
||||
value: "private",
|
||||
label: "Private network",
|
||||
hint: "Private access only, with automatic URL handling",
|
||||
},
|
||||
{
|
||||
value: "public",
|
||||
label: "Public internet",
|
||||
hint: "Internet-facing deployment with explicit public URL requirements",
|
||||
},
|
||||
],
|
||||
initialValue: currentServer?.exposure ?? "private",
|
||||
});
|
||||
if (p.isCancel(exposureSelection)) cancelled();
|
||||
exposure = exposureSelection as ServerConfig["exposure"];
|
||||
}
|
||||
|
||||
const defaultHost =
|
||||
currentServer?.customBindHost ??
|
||||
currentServer?.host ??
|
||||
(deploymentMode === "local_trusted" ? "127.0.0.1" : "0.0.0.0");
|
||||
const host = await p.text({
|
||||
message: "Bind host",
|
||||
defaultValue: defaultHost,
|
||||
placeholder: defaultHost,
|
||||
validate: (val) => {
|
||||
if (!val.trim()) return "Host is required";
|
||||
if (deploymentMode === "local_trusted" && !isLoopbackHost(val.trim())) {
|
||||
return "Local trusted mode requires a loopback host such as 127.0.0.1";
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
if (p.isCancel(host)) cancelled();
|
||||
|
||||
let allowedHostnames: string[] = [];
|
||||
if (deploymentMode === "authenticated" && exposure === "private") {
|
||||
const allowedHostnamesInput = await p.text({
|
||||
message: "Allowed hostnames (comma-separated, optional)",
|
||||
message: "Allowed private hostnames (comma-separated, optional)",
|
||||
defaultValue: (currentServer?.allowedHostnames ?? []).join(", "),
|
||||
placeholder: "dotta-macbook-pro, your-host.tailnet.ts.net",
|
||||
validate: (val) => {
|
||||
@@ -105,15 +181,11 @@ export async function promptServer(opts?: {
|
||||
},
|
||||
});
|
||||
|
||||
if (p.isCancel(allowedHostnamesInput)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
if (p.isCancel(allowedHostnamesInput)) cancelled();
|
||||
allowedHostnames = parseHostnameCsv(allowedHostnamesInput);
|
||||
}
|
||||
|
||||
const port = Number(portStr) || 3100;
|
||||
let auth: AuthConfig = { baseUrlMode: "auto", disableSignUp: false };
|
||||
let publicBaseUrl: string | undefined;
|
||||
if (deploymentMode === "authenticated" && exposure === "public") {
|
||||
const urlInput = await p.text({
|
||||
message: "Public base URL",
|
||||
@@ -133,32 +205,17 @@ export async function promptServer(opts?: {
|
||||
}
|
||||
},
|
||||
});
|
||||
if (p.isCancel(urlInput)) {
|
||||
p.cancel("Setup cancelled.");
|
||||
process.exit(0);
|
||||
}
|
||||
auth = {
|
||||
baseUrlMode: "explicit",
|
||||
disableSignUp: false,
|
||||
publicBaseUrl: urlInput.trim().replace(/\/+$/, ""),
|
||||
};
|
||||
} else if (currentAuth?.baseUrlMode === "explicit" && currentAuth.publicBaseUrl) {
|
||||
auth = {
|
||||
baseUrlMode: "explicit",
|
||||
disableSignUp: false,
|
||||
publicBaseUrl: currentAuth.publicBaseUrl,
|
||||
};
|
||||
if (p.isCancel(urlInput)) cancelled();
|
||||
publicBaseUrl = urlInput.trim().replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
return {
|
||||
server: {
|
||||
deploymentMode,
|
||||
exposure,
|
||||
host: hostStr.trim(),
|
||||
port,
|
||||
allowedHostnames,
|
||||
serveUi: currentServer?.serveUi ?? true,
|
||||
},
|
||||
auth,
|
||||
};
|
||||
return buildCustomServerConfig({
|
||||
deploymentMode,
|
||||
exposure,
|
||||
host: host.trim(),
|
||||
port,
|
||||
allowedHostnames,
|
||||
serveUi,
|
||||
publicBaseUrl,
|
||||
});
|
||||
}
|
||||
|
||||
49
cli/src/telemetry.ts
Normal file
49
cli/src/telemetry.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import path from "node:path";
|
||||
import {
|
||||
TelemetryClient,
|
||||
resolveTelemetryConfig,
|
||||
loadOrCreateState,
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
trackCompanyImported,
|
||||
} from "../../packages/shared/src/telemetry/index.js";
|
||||
import { resolvePaperclipInstanceRoot } from "./config/home.js";
|
||||
import { readConfig } from "./config/store.js";
|
||||
import { cliVersion } from "./version.js";
|
||||
|
||||
let client: TelemetryClient | null = null;
|
||||
|
||||
export function initTelemetry(fileConfig?: { enabled?: boolean }): TelemetryClient | null {
|
||||
if (client) return client;
|
||||
|
||||
const config = resolveTelemetryConfig(fileConfig);
|
||||
if (!config.enabled) return null;
|
||||
|
||||
const stateDir = path.join(resolvePaperclipInstanceRoot(), "telemetry");
|
||||
client = new TelemetryClient(config, () => loadOrCreateState(stateDir, cliVersion), cliVersion);
|
||||
return client;
|
||||
}
|
||||
|
||||
export function initTelemetryFromConfigFile(configPath?: string): TelemetryClient | null {
|
||||
try {
|
||||
return initTelemetry(readConfig(configPath)?.telemetry);
|
||||
} catch {
|
||||
return initTelemetry();
|
||||
}
|
||||
}
|
||||
|
||||
export function getTelemetryClient(): TelemetryClient | null {
|
||||
return client;
|
||||
}
|
||||
|
||||
export async function flushTelemetry(): Promise<void> {
|
||||
if (client) {
|
||||
await client.flush();
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
trackInstallStarted,
|
||||
trackInstallCompleted,
|
||||
trackCompanyImported,
|
||||
};
|
||||
10
cli/src/version.ts
Normal file
10
cli/src/version.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { createRequire } from "node:module";
|
||||
|
||||
type PackageJson = {
|
||||
version?: string;
|
||||
};
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const pkg = require("../package.json") as PackageJson;
|
||||
|
||||
export const cliVersion = pkg.version ?? "0.0.0";
|
||||
@@ -2,7 +2,7 @@
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
"rootDir": ".."
|
||||
},
|
||||
"include": ["src"]
|
||||
"include": ["src", "../packages/shared/src"]
|
||||
}
|
||||
|
||||
115
doc/AGENTCOMPANIES_SPEC_INVENTORY.md
Normal file
115
doc/AGENTCOMPANIES_SPEC_INVENTORY.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# Agent Companies Spec Inventory
|
||||
|
||||
This document indexes every part of the Paperclip codebase that touches the [Agent Companies Specification](docs/companies/companies-spec.md) (`agentcompanies/v1-draft`).
|
||||
|
||||
Use it when you need to:
|
||||
|
||||
1. **Update the spec** — know which implementation code must change in lockstep.
|
||||
2. **Change code that involves the spec** — find all related files quickly.
|
||||
3. **Keep things aligned** — audit whether implementation matches the spec.
|
||||
|
||||
---
|
||||
|
||||
## 1. Specification & Design Documents
|
||||
|
||||
| File | Role |
|
||||
|---|---|
|
||||
| `docs/companies/companies-spec.md` | **Normative spec** — defines the markdown-first package format (COMPANY.md, TEAM.md, AGENTS.md, PROJECT.md, TASK.md, SKILL.md), reserved files, frontmatter schemas, and vendor extension conventions (`.paperclip.yaml`). |
|
||||
| `doc/plans/2026-03-13-company-import-export-v2.md` | Implementation plan for the markdown-first package model cutover — phases, API changes, UI plan, and rollout strategy. |
|
||||
| `doc/SPEC-implementation.md` | V1 implementation contract; references the portability system and `.paperclip.yaml` sidecar format. |
|
||||
| `docs/specs/cliphub-plan.md` | Earlier blueprint bundle plan; partially superseded by the markdown-first spec (noted in the v2 plan). |
|
||||
| `doc/plans/2026-02-16-module-system.md` | Module system plan; JSON-only company template sections superseded by the markdown-first model. |
|
||||
| `doc/plans/2026-03-14-skills-ui-product-plan.md` | Skills UI plan; references portable skill files and `.paperclip.yaml`. |
|
||||
| `doc/plans/2026-03-14-adapter-skill-sync-rollout.md` | Adapter skill sync rollout; companion to the v2 import/export plan. |
|
||||
|
||||
## 2. Shared Types & Validators
|
||||
|
||||
These define the contract between server, CLI, and UI.
|
||||
|
||||
| File | What it defines |
|
||||
|---|---|
|
||||
| `packages/shared/src/types/company-portability.ts` | TypeScript interfaces: `CompanyPortabilityManifest`, `CompanyPortabilityFileEntry`, `CompanyPortabilityEnvInput`, export/import/preview request and result types, manifest entry types for agents, skills, projects, issues, recurring routines, companies. |
|
||||
| `packages/shared/src/validators/company-portability.ts` | Zod schemas for all portability request/response shapes — used by both server routes and CLI. |
|
||||
| `packages/shared/src/types/index.ts` | Re-exports portability types. |
|
||||
| `packages/shared/src/validators/index.ts` | Re-exports portability validators. |
|
||||
|
||||
## 3. Server — Services
|
||||
|
||||
| File | Responsibility |
|
||||
|---|---|
|
||||
| `server/src/services/company-portability.ts` | **Core portability service.** Export (manifest generation, markdown file emission, `.paperclip.yaml` sidecars), import (graph resolution, collision handling, entity creation), preview (planned-action summary). Handles skill key derivation, recurring task <-> routine mapping, legacy recurrence migration, and package README generation. References `agentcompanies/v1` version string. |
|
||||
| `server/src/services/routines.ts` | Paperclip routine runtime service. Portability now exports routines as recurring `TASK.md` entries and imports recurring tasks back through this service. |
|
||||
| `server/src/services/company-export-readme.ts` | Generates `README.md` and Mermaid org-chart for exported company packages. |
|
||||
| `server/src/services/index.ts` | Re-exports `companyPortabilityService`. |
|
||||
|
||||
## 4. Server — Routes
|
||||
|
||||
| File | Endpoints |
|
||||
|---|---|
|
||||
| `server/src/routes/companies.ts` | `POST /api/companies/:companyId/export` — legacy export bundle<br>`POST /api/companies/:companyId/exports/preview` — export preview<br>`POST /api/companies/:companyId/exports` — export package<br>`POST /api/companies/import/preview` — import preview<br>`POST /api/companies/import` — perform import |
|
||||
|
||||
Route registration lives in `server/src/app.ts` via `companyRoutes(db, storage)`.
|
||||
|
||||
## 5. Server — Tests
|
||||
|
||||
| File | Coverage |
|
||||
|---|---|
|
||||
| `server/src/__tests__/company-portability.test.ts` | Unit tests for the portability service (export, import, preview, manifest shape, `agentcompanies/v1` version). |
|
||||
| `server/src/__tests__/company-portability-routes.test.ts` | Integration tests for the portability HTTP endpoints. |
|
||||
|
||||
## 6. CLI
|
||||
|
||||
| File | Commands |
|
||||
|---|---|
|
||||
| `cli/src/commands/client/company.ts` | `company export` — exports a company package to disk (flags: `--out`, `--include`, `--projects`, `--issues`, `--projectIssues`).<br>`company import <fromPathOrUrl>` — imports a company package from a file or folder (flags: positional source path/URL or GitHub shorthand, `--include`, `--target`, `--companyId`, `--newCompanyName`, `--agents`, `--collision`, `--ref`, `--dryRun`).<br>Reads/writes portable file entries and handles `.paperclip.yaml` filtering. |
|
||||
|
||||
## 7. UI — Pages
|
||||
|
||||
| File | Role |
|
||||
|---|---|
|
||||
| `ui/src/pages/CompanyExport.tsx` | Export UI: preview, manifest display, file tree visualization, ZIP archive creation and download. Filters `.paperclip.yaml` based on selection. Shows manifest and README in editor. |
|
||||
| `ui/src/pages/CompanyImport.tsx` | Import UI: source input (upload/folder/GitHub URL/generic URL), ZIP reading, preview pane with dependency tree, entity selection checkboxes, trust/licensing warnings, secrets requirements, collision strategy, adapter config. |
|
||||
|
||||
## 8. UI — Components
|
||||
|
||||
| File | Role |
|
||||
|---|---|
|
||||
| `ui/src/components/PackageFileTree.tsx` | Reusable file tree component for both import and export. Builds tree from `CompanyPortabilityFileEntry` items, parses frontmatter, shows action indicators (create/update/skip), and maps frontmatter field labels. |
|
||||
|
||||
## 9. UI — Libraries
|
||||
|
||||
| File | Role |
|
||||
|---|---|
|
||||
| `ui/src/lib/portable-files.ts` | Helpers for portable file entries: `getPortableFileText`, `getPortableFileDataUrl`, `getPortableFileContentType`, `isPortableImageFile`. |
|
||||
| `ui/src/lib/zip.ts` | ZIP archive creation (`createZipArchive`) and reading (`readZipArchive`) — implements ZIP format from scratch for company packages. CRC32, DOS date/time encoding. |
|
||||
| `ui/src/lib/zip.test.ts` | Tests for ZIP utilities; exercises round-trip with portability file entries and `.paperclip.yaml` content. |
|
||||
|
||||
## 10. UI — API Client
|
||||
|
||||
| File | Functions |
|
||||
|---|---|
|
||||
| `ui/src/api/companies.ts` | `companiesApi.exportBundle`, `companiesApi.exportPreview`, `companiesApi.exportPackage`, `companiesApi.importPreview`, `companiesApi.importBundle` — typed fetch wrappers for the portability endpoints. |
|
||||
|
||||
## 11. Skills & Agent Instructions
|
||||
|
||||
| File | Relevance |
|
||||
|---|---|
|
||||
| `skills/paperclip/references/company-skills.md` | Reference doc for company skill library workflow — install, inspect, update, assign. Skill packages are a subset of the agent companies spec. |
|
||||
| `server/src/services/company-skills.ts` | Company skill management service — handles SKILL.md-based imports and company-level skill library. |
|
||||
| `server/src/services/agent-instructions.ts` | Agent instructions service — resolves AGENTS.md paths for agent instruction loading. |
|
||||
|
||||
## 12. Quick Cross-Reference by Spec Concept
|
||||
|
||||
| Spec concept | Primary implementation files |
|
||||
|---|---|
|
||||
| `COMPANY.md` frontmatter & body | `company-portability.ts` (export emitter + import parser) |
|
||||
| `AGENTS.md` frontmatter & body | `company-portability.ts`, `agent-instructions.ts` |
|
||||
| `PROJECT.md` frontmatter & body | `company-portability.ts` |
|
||||
| `TASK.md` frontmatter & body | `company-portability.ts` |
|
||||
| `SKILL.md` packages | `company-portability.ts`, `company-skills.ts` |
|
||||
| `.paperclip.yaml` vendor sidecar | `company-portability.ts`, `routines.ts`, `CompanyExport.tsx`, `company.ts` (CLI) |
|
||||
| `manifest.json` | `company-portability.ts` (generation), shared types (schema) |
|
||||
| ZIP package format | `zip.ts` (UI), `company.ts` (CLI file I/O) |
|
||||
| Collision resolution | `company-portability.ts` (server), `CompanyImport.tsx` (UI) |
|
||||
| Env/secrets declarations | shared types (`CompanyPortabilityEnvInput`), `CompanyImport.tsx` (UI) |
|
||||
| README + org chart | `company-export-readme.ts` |
|
||||
@@ -32,10 +32,12 @@ Mode taxonomy and design intent are documented in `doc/DEPLOYMENT-MODES.md`.
|
||||
Current CLI behavior:
|
||||
|
||||
- `paperclipai onboard` and `paperclipai configure --section server` set deployment mode in config
|
||||
- server onboarding/configure ask for reachability intent and write `server.bind`
|
||||
- `paperclipai run --bind <loopback|lan|tailnet>` passes a quickstart bind preset into first-run onboarding when config is missing
|
||||
- runtime can override mode with `PAPERCLIP_DEPLOYMENT_MODE`
|
||||
- `paperclipai run` and `paperclipai doctor` do not yet expose a direct `--mode` flag
|
||||
- `paperclipai run` and `paperclipai doctor` still do not expose a direct low-level `--mode` flag
|
||||
|
||||
Target behavior (planned) is documented in `doc/DEPLOYMENT-MODES.md` section 5.
|
||||
Canonical behavior is documented in `doc/DEPLOYMENT-MODES.md`.
|
||||
|
||||
Allow an authenticated/private hostname (for example custom Tailscale DNS):
|
||||
|
||||
|
||||
@@ -17,6 +17,11 @@ Paperclip supports two runtime modes:
|
||||
|
||||
This keeps one authenticated auth stack while still separating low-friction private-network defaults from internet-facing hardening requirements.
|
||||
|
||||
Paperclip now treats **bind** as a separate concern from auth:
|
||||
|
||||
- auth model: `local_trusted` vs `authenticated`, plus `private/public`
|
||||
- reachability model: `server.bind = loopback | lan | tailnet | custom`
|
||||
|
||||
## 2. Canonical Model
|
||||
|
||||
| Runtime Mode | Exposure | Human auth | Primary use |
|
||||
@@ -25,6 +30,15 @@ This keeps one authenticated auth stack while still separating low-friction priv
|
||||
| `authenticated` | `private` | Login required | Private-network access (for example Tailscale/VPN/LAN) |
|
||||
| `authenticated` | `public` | Login required | Internet-facing/cloud deployment |
|
||||
|
||||
## Reachability Model
|
||||
|
||||
| Bind | Meaning | Typical use |
|
||||
|---|---|---|
|
||||
| `loopback` | Listen on localhost only | default local usage, reverse-proxy deployments |
|
||||
| `lan` | Listen on all interfaces (`0.0.0.0`) | LAN/VPN/private-network access |
|
||||
| `tailnet` | Listen on a detected Tailscale IP | Tailscale-only access |
|
||||
| `custom` | Listen on an explicit host/IP | advanced interface-specific setups |
|
||||
|
||||
## 3. Security Policy
|
||||
|
||||
## `local_trusted`
|
||||
@@ -38,12 +52,14 @@ This keeps one authenticated auth stack while still separating low-friction priv
|
||||
- login required
|
||||
- low-friction URL handling (`auto` base URL mode)
|
||||
- private-host trust policy required
|
||||
- bind can be `loopback`, `lan`, `tailnet`, or `custom`
|
||||
|
||||
## `authenticated + public`
|
||||
|
||||
- login required
|
||||
- explicit public URL required
|
||||
- stricter deployment checks and failures in doctor
|
||||
- recommended bind is `loopback` behind a reverse proxy; direct `lan/custom` is advanced
|
||||
|
||||
## 4. Onboarding UX Contract
|
||||
|
||||
@@ -55,14 +71,22 @@ pnpm paperclipai onboard
|
||||
|
||||
Server prompt behavior:
|
||||
|
||||
1. ask mode, default `local_trusted`
|
||||
2. option copy:
|
||||
- `local_trusted`: "Easiest for local setup (no login, localhost-only)"
|
||||
- `authenticated`: "Login required; use for private network or public hosting"
|
||||
3. if `authenticated`, ask exposure:
|
||||
- `private`: "Private network access (for example Tailscale), lower setup friction"
|
||||
- `public`: "Internet-facing deployment, stricter security requirements"
|
||||
4. ask explicit public URL only for `authenticated + public`
|
||||
1. quickstart `--yes` defaults to `server.bind=loopback` and therefore `local_trusted/private`
|
||||
2. advanced server setup asks reachability first:
|
||||
- `Trusted local` → `bind=loopback`, `local_trusted/private`
|
||||
- `Private network` → `bind=lan`, `authenticated/private`
|
||||
- `Tailnet` → `bind=tailnet`, `authenticated/private`
|
||||
- `Custom` → manual mode/exposure/host entry
|
||||
3. raw host entry is only required for the `Custom` path
|
||||
4. explicit public URL is only required for `authenticated + public`
|
||||
|
||||
Examples:
|
||||
|
||||
```sh
|
||||
pnpm paperclipai onboard --yes
|
||||
pnpm paperclipai onboard --yes --bind lan
|
||||
pnpm paperclipai run --bind tailnet
|
||||
```
|
||||
|
||||
`configure --section server` follows the same interactive behavior.
|
||||
|
||||
|
||||
@@ -39,13 +39,39 @@ This starts:
|
||||
|
||||
`pnpm dev` runs the server in watch mode and restarts on changes from workspace packages (including adapter packages). Use `pnpm dev:once` to run without file watching.
|
||||
|
||||
`pnpm dev:once` auto-applies pending local migrations by default before starting the dev server.
|
||||
|
||||
`pnpm dev` and `pnpm dev:once` are now idempotent for the current repo and instance: if the matching Paperclip dev runner is already alive, Paperclip reports the existing process instead of starting a duplicate.
|
||||
|
||||
Inspect or stop the current repo's managed dev runner:
|
||||
|
||||
```sh
|
||||
pnpm dev:list
|
||||
pnpm dev:stop
|
||||
```
|
||||
|
||||
`pnpm dev:once` now tracks backend-relevant file changes and pending migrations. When the current boot is stale, the board UI shows a `Restart required` banner. You can also enable guarded auto-restart in `Instance Settings > Experimental`, which waits for queued/running local agent runs to finish before restarting the dev server.
|
||||
|
||||
Tailscale/private-auth dev mode:
|
||||
|
||||
```sh
|
||||
pnpm dev --tailscale-auth
|
||||
pnpm dev --bind lan
|
||||
```
|
||||
|
||||
This runs dev as `authenticated/private` and binds the server to `0.0.0.0` for private-network access.
|
||||
This runs dev as `authenticated/private` with a private-network bind preset.
|
||||
|
||||
For Tailscale-only reachability on a detected tailnet address:
|
||||
|
||||
```sh
|
||||
pnpm dev --bind tailnet
|
||||
```
|
||||
|
||||
Legacy aliases still map to the old broad private-network behavior:
|
||||
|
||||
```sh
|
||||
pnpm dev --tailscale-auth
|
||||
pnpm dev --authenticated-private
|
||||
```
|
||||
|
||||
Allow additional private hostnames (for example custom Tailscale hostnames):
|
||||
|
||||
@@ -53,6 +79,29 @@ Allow additional private hostnames (for example custom Tailscale hostnames):
|
||||
pnpm paperclipai allowed-hostname dotta-macbook-pro
|
||||
```
|
||||
|
||||
## Test Commands
|
||||
|
||||
Use the cheap local default unless you are specifically working on browser flows:
|
||||
|
||||
```sh
|
||||
pnpm test
|
||||
```
|
||||
|
||||
`pnpm test` runs the Vitest suite only. For interactive Vitest watch mode use:
|
||||
|
||||
```sh
|
||||
pnpm test:watch
|
||||
```
|
||||
|
||||
Browser suites stay separate:
|
||||
|
||||
```sh
|
||||
pnpm test:e2e
|
||||
pnpm test:release-smoke
|
||||
```
|
||||
|
||||
These browser suites are intended for targeted local verification and CI, not the default agent/human test command.
|
||||
|
||||
## One-Command Local Run
|
||||
|
||||
For a first-time local install, you can bootstrap and run in one command:
|
||||
@@ -84,7 +133,7 @@ docker run --name paperclip \
|
||||
Or use Compose:
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
See `doc/DOCKER.md` for API key wiring (`OPENAI_API_KEY` / `ANTHROPIC_API_KEY`) and persistence details.
|
||||
@@ -128,6 +177,12 @@ When a local agent run has no resolved project/session workspace, Paperclip fall
|
||||
|
||||
This path honors `PAPERCLIP_HOME` and `PAPERCLIP_INSTANCE_ID` in non-default setups.
|
||||
|
||||
For `codex_local`, Paperclip also manages a per-company Codex home under the instance root and seeds it from the shared Codex login/config home (`$CODEX_HOME` or `~/.codex`):
|
||||
|
||||
- `~/.paperclip/instances/default/companies/<company-id>/codex-home`
|
||||
|
||||
If the `codex` CLI is not installed or not on `PATH`, `codex_local` agent runs fail at execution time with a clear adapter error. Quota polling uses a short-lived `codex app-server` subprocess: when `codex` cannot be spawned, that provider reports `ok: false` in aggregated quota results and the API server keeps running (it must not exit on a missing binary).
|
||||
|
||||
## Worktree-local Instances
|
||||
|
||||
When developing from multiple git worktrees, do not point two Paperclip servers at the same embedded PostgreSQL data directory.
|
||||
@@ -156,6 +211,10 @@ Seed modes:
|
||||
|
||||
After `worktree init`, both the server and the CLI auto-load the repo-local `.paperclip/.env` when run inside that worktree, so normal commands like `pnpm dev`, `paperclipai doctor`, and `paperclipai db:backup` stay scoped to the worktree instance.
|
||||
|
||||
`pnpm dev` now fails fast in a linked git worktree when `.paperclip/.env` is missing, instead of silently booting against the default instance/port. If that happens, run `paperclipai worktree init` in the worktree first.
|
||||
|
||||
Provisioned git worktrees also pause seeded routines that still have enabled schedule triggers in the isolated worktree database by default. This prevents copied daily/cron routines from firing unexpectedly inside the new workspace instance during development without disabling webhook/API-only routines.
|
||||
|
||||
That repo-local env also sets:
|
||||
|
||||
- `PAPERCLIP_IN_WORKTREE=true`
|
||||
@@ -200,6 +259,77 @@ paperclipai worktree init --from-data-dir ~/.paperclip
|
||||
paperclipai worktree init --force
|
||||
```
|
||||
|
||||
Repair an already-created repo-managed worktree and reseed its isolated instance from the main default install:
|
||||
|
||||
```sh
|
||||
cd /path/to/paperclip/.paperclip/worktrees/PAP-884-ai-commits-component
|
||||
pnpm paperclipai worktree init --force --seed-mode minimal \
|
||||
--name PAP-884-ai-commits-component \
|
||||
--from-config ~/.paperclip/instances/default/config.json
|
||||
```
|
||||
|
||||
That rewrites the worktree-local `.paperclip/config.json` + `.paperclip/.env`, recreates the isolated instance under `~/.paperclip-worktrees/instances/<worktree-id>/`, and preserves the git worktree contents themselves.
|
||||
|
||||
For an already-created worktree where you want the CLI to decide whether to rebuild missing worktree metadata or just reseed the isolated DB, use `worktree repair`.
|
||||
|
||||
**`pnpm paperclipai worktree repair [options]`** — Repair the current linked worktree by default, or create/repair a named linked worktree under `.paperclip/worktrees/` when `--branch` is provided. The command never targets the primary checkout unless you explicitly pass `--branch`.
|
||||
|
||||
| Option | Description |
|
||||
|---|---|
|
||||
| `--branch <name>` | Existing branch/worktree selector to repair, or a branch name to create under `.paperclip/worktrees` |
|
||||
| `--home <path>` | Home root for worktree instances (default: `~/.paperclip-worktrees`) |
|
||||
| `--from-config <path>` | Source config.json to seed from |
|
||||
| `--from-data-dir <path>` | Source `PAPERCLIP_HOME` used when deriving the source config |
|
||||
| `--from-instance <id>` | Source instance id when deriving the source config (default: `default`) |
|
||||
| `--seed-mode <mode>` | Seed profile: `minimal` or `full` (default: `minimal`) |
|
||||
| `--no-seed` | Repair metadata only when bootstrapping a missing worktree config |
|
||||
| `--allow-live-target` | Override the guard that requires the target worktree DB to be stopped first |
|
||||
|
||||
Examples:
|
||||
|
||||
```sh
|
||||
# From inside a linked worktree, rebuild missing .paperclip metadata and reseed it from the default instance.
|
||||
cd /path/to/paperclip/.paperclip/worktrees/PAP-1132-assistant-ui-pap-1131-make-issues-comments-be-like-a-chat
|
||||
pnpm paperclipai worktree repair
|
||||
|
||||
# From the primary checkout, create or repair a linked worktree for a branch under .paperclip/worktrees/.
|
||||
cd /path/to/paperclip
|
||||
pnpm paperclipai worktree repair --branch PAP-1132-assistant-ui-pap-1131-make-issues-comments-be-like-a-chat
|
||||
```
|
||||
|
||||
For an already-created worktree where you want to keep the existing repo-local config/env and only overwrite the isolated database, use `worktree reseed` instead. Stop the target worktree's Paperclip server first so the command can replace the DB safely.
|
||||
|
||||
**`pnpm paperclipai worktree reseed [options]`** — Re-seed an existing worktree-local instance from another Paperclip instance or worktree while preserving the target worktree's current config, ports, and instance identity.
|
||||
|
||||
| Option | Description |
|
||||
|---|---|
|
||||
| `--from <worktree>` | Source worktree path, directory name, branch name, or `current` |
|
||||
| `--to <worktree>` | Target worktree path, directory name, branch name, or `current` (defaults to `current`) |
|
||||
| `--from-config <path>` | Source config.json to seed from |
|
||||
| `--from-data-dir <path>` | Source `PAPERCLIP_HOME` used when deriving the source config |
|
||||
| `--from-instance <id>` | Source instance id when deriving the source config |
|
||||
| `--seed-mode <mode>` | Seed profile: `minimal` or `full` (default: `full`) |
|
||||
| `--yes` | Skip the destructive confirmation prompt |
|
||||
| `--allow-live-target` | Override the guard that requires the target worktree DB to be stopped first |
|
||||
|
||||
Examples:
|
||||
|
||||
```sh
|
||||
# From the main repo, reseed a worktree from the current default/master instance.
|
||||
cd /path/to/paperclip
|
||||
pnpm paperclipai worktree reseed \
|
||||
--from current \
|
||||
--to PAP-1132-assistant-ui-pap-1131-make-issues-comments-be-like-a-chat \
|
||||
--seed-mode full \
|
||||
--yes
|
||||
|
||||
# From inside a worktree, reseed it from the default instance config.
|
||||
cd /path/to/paperclip/.paperclip/worktrees/PAP-1132-assistant-ui-pap-1131-make-issues-comments-be-like-a-chat
|
||||
pnpm paperclipai worktree reseed \
|
||||
--from-instance default \
|
||||
--seed-mode full
|
||||
```
|
||||
|
||||
**`pnpm paperclipai worktree:make <name> [options]`** — Create `~/NAME` as a git worktree, then initialize an isolated Paperclip instance inside it. This combines `git worktree add` with `worktree init` in a single step.
|
||||
|
||||
| Option | Description |
|
||||
|
||||
132
doc/DOCKER.md
132
doc/DOCKER.md
@@ -2,6 +2,28 @@
|
||||
|
||||
Run Paperclip in Docker without installing Node or pnpm locally.
|
||||
|
||||
All commands below assume you are in the **project root** (the directory containing `package.json`), not inside `docker/`.
|
||||
|
||||
## Building the image
|
||||
|
||||
```sh
|
||||
docker build -t paperclip-local .
|
||||
```
|
||||
|
||||
The Dockerfile installs common agent tools (`git`, `gh`, `curl`, `wget`, `ripgrep`, `python3`) and the Claude, Codex, and OpenCode CLIs.
|
||||
|
||||
Build arguments:
|
||||
|
||||
| Arg | Default | Purpose |
|
||||
|-----|---------|---------|
|
||||
| `USER_UID` | `1000` | UID for the container `node` user (match your host UID to avoid permission issues on bind mounts) |
|
||||
| `USER_GID` | `1000` | GID for the container `node` group |
|
||||
|
||||
```sh
|
||||
docker build -t paperclip-local \
|
||||
--build-arg USER_UID=$(id -u) --build-arg USER_GID=$(id -g) .
|
||||
```
|
||||
|
||||
## One-liner (build + run)
|
||||
|
||||
```sh
|
||||
@@ -10,6 +32,7 @@ docker run --name paperclip \
|
||||
-p 3100:3100 \
|
||||
-e HOST=0.0.0.0 \
|
||||
-e PAPERCLIP_HOME=/paperclip \
|
||||
-e BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
-v "$(pwd)/data/docker-paperclip:/paperclip" \
|
||||
paperclip-local
|
||||
```
|
||||
@@ -25,10 +48,15 @@ Data persistence:
|
||||
|
||||
All persisted under your bind mount (`./data/docker-paperclip` in the example above).
|
||||
|
||||
## Compose Quickstart
|
||||
## Docker Compose
|
||||
|
||||
### Quickstart (embedded SQLite)
|
||||
|
||||
Single container, no external database. Data persists via a bind mount.
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.quickstart.yml up --build
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
Defaults:
|
||||
@@ -39,11 +67,36 @@ Defaults:
|
||||
Optional overrides:
|
||||
|
||||
```sh
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=./data/pc docker compose -f docker-compose.quickstart.yml up --build
|
||||
PAPERCLIP_PORT=3200 PAPERCLIP_DATA_DIR=../data/pc \
|
||||
docker compose -f docker/docker-compose.quickstart.yml up --build
|
||||
```
|
||||
|
||||
**Note:** `PAPERCLIP_DATA_DIR` is resolved relative to the compose file (`docker/`), so `../data/pc` maps to `data/pc` in the project root.
|
||||
|
||||
If you change host port or use a non-local domain, set `PAPERCLIP_PUBLIC_URL` to the external URL you will use in browser/auth flows.
|
||||
|
||||
Pass `OPENAI_API_KEY` and/or `ANTHROPIC_API_KEY` to enable local adapter runs.
|
||||
|
||||
### Full stack (with PostgreSQL)
|
||||
|
||||
Paperclip server + PostgreSQL 17. The database is health-checked before the server starts.
|
||||
|
||||
```sh
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32) \
|
||||
docker compose -f docker/docker-compose.yml up --build
|
||||
```
|
||||
|
||||
PostgreSQL data persists in a named Docker volume (`pgdata`). Paperclip data persists in `paperclip-data`.
|
||||
|
||||
### Untrusted PR review
|
||||
|
||||
Isolated container for reviewing untrusted pull requests with Codex or Claude, without exposing your host machine. See `doc/UNTRUSTED-PR-REVIEW.md` for the full workflow.
|
||||
|
||||
```sh
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
```
|
||||
|
||||
## Authenticated Compose (Single Public URL)
|
||||
|
||||
For authenticated deployments, set one canonical public URL and let Paperclip derive auth/callback defaults:
|
||||
@@ -93,11 +146,71 @@ Notes:
|
||||
- Without API keys, the app still runs normally.
|
||||
- Adapter environment checks in Paperclip will surface missing auth/CLI prerequisites.
|
||||
|
||||
## Untrusted PR Review Container
|
||||
## Podman Quadlet (systemd)
|
||||
|
||||
If you want a separate Docker environment for reviewing untrusted pull requests with `codex` or `claude`, use the dedicated review workflow in `doc/UNTRUSTED-PR-REVIEW.md`.
|
||||
The `docker/quadlet/` directory contains unit files to run Paperclip + PostgreSQL as systemd services via Podman Quadlet.
|
||||
|
||||
That setup keeps CLI auth state in Docker volumes instead of your host home directory and uses a separate scratch workspace for PR checkouts and preview runs.
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `docker/quadlet/paperclip.pod` | Pod definition — groups containers into a shared network namespace |
|
||||
| `docker/quadlet/paperclip.container` | Paperclip server — joins the pod, connects to Postgres at `127.0.0.1` |
|
||||
| `docker/quadlet/paperclip-db.container` | PostgreSQL 17 — joins the pod, health-checked |
|
||||
|
||||
### Setup
|
||||
|
||||
1. Build the image (see above).
|
||||
|
||||
2. Copy quadlet files to your systemd directory:
|
||||
|
||||
```sh
|
||||
# Rootless (recommended)
|
||||
cp docker/quadlet/*.pod docker/quadlet/*.container \
|
||||
~/.config/containers/systemd/
|
||||
|
||||
# Or rootful
|
||||
sudo cp docker/quadlet/*.pod docker/quadlet/*.container \
|
||||
/etc/containers/systemd/
|
||||
```
|
||||
|
||||
3. Create a secrets env file (keep out of version control):
|
||||
|
||||
```sh
|
||||
cat > ~/.config/containers/systemd/paperclip.env <<EOL
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32)
|
||||
POSTGRES_USER=paperclip
|
||||
POSTGRES_PASSWORD=paperclip
|
||||
POSTGRES_DB=paperclip
|
||||
DATABASE_URL=postgres://paperclip:paperclip@127.0.0.1:5432/paperclip
|
||||
# OPENAI_API_KEY=sk-...
|
||||
# ANTHROPIC_API_KEY=sk-...
|
||||
EOL
|
||||
```
|
||||
|
||||
4. Create the data directory and start:
|
||||
|
||||
```sh
|
||||
mkdir -p ~/.local/share/paperclip
|
||||
systemctl --user daemon-reload
|
||||
systemctl --user start paperclip-pod
|
||||
```
|
||||
|
||||
### Quadlet management
|
||||
|
||||
```sh
|
||||
journalctl --user -u paperclip -f # App logs
|
||||
journalctl --user -u paperclip-db -f # DB logs
|
||||
systemctl --user status paperclip-pod # Pod status
|
||||
systemctl --user restart paperclip-pod # Restart all
|
||||
systemctl --user stop paperclip-pod # Stop all
|
||||
```
|
||||
|
||||
### Quadlet notes
|
||||
|
||||
- **First boot**: Unlike Docker Compose's `condition: service_healthy`, Quadlet's `After=` only waits for the DB unit to *start*, not for PostgreSQL to be ready. On a cold first boot you may see one or two restart attempts in `journalctl --user -u paperclip` while PostgreSQL initialises — this is expected and resolves automatically via `Restart=on-failure`.
|
||||
- Containers in a pod share `localhost`, so Paperclip reaches Postgres at `127.0.0.1:5432`.
|
||||
- PostgreSQL data persists in the `paperclip-pgdata` named volume.
|
||||
- Paperclip data persists at `~/.local/share/paperclip`.
|
||||
- For rootful quadlet deployment, remove `%h` prefixes and use absolute paths.
|
||||
|
||||
## Onboard Smoke Test (Ubuntu + npm only)
|
||||
|
||||
@@ -133,4 +246,9 @@ Notes:
|
||||
- In authenticated mode, the smoke script defaults `SMOKE_AUTO_BOOTSTRAP=true` and drives the real bootstrap path automatically: it signs up a real user, runs `paperclipai auth bootstrap-ceo` inside the container to mint a real bootstrap invite, accepts that invite over HTTP, and verifies board session access.
|
||||
- Run the script in the foreground to watch the onboarding flow; stop with `Ctrl+C` after validation.
|
||||
- Set `SMOKE_DETACH=true` to leave the container running for automation and optionally write shell-ready metadata to `SMOKE_METADATA_FILE`.
|
||||
- The image definition is in `Dockerfile.onboard-smoke`.
|
||||
- The image definition is in `docker/Dockerfile.onboard-smoke`.
|
||||
|
||||
## General Notes
|
||||
|
||||
- The `docker-entrypoint.sh` adjusts the container `node` user UID/GID at startup to match the values passed via `USER_UID`/`USER_GID`, avoiding permission issues on bind-mounted volumes.
|
||||
- Paperclip data persists via Docker volumes/bind mounts (compose) or at `~/.local/share/paperclip` (quadlet).
|
||||
|
||||
@@ -3,7 +3,7 @@ Use this exact checklist.
|
||||
1. Start Paperclip in auth mode.
|
||||
```bash
|
||||
cd <paperclip-repo-root>
|
||||
pnpm dev --tailscale-auth
|
||||
pnpm dev --bind lan
|
||||
```
|
||||
Then verify:
|
||||
```bash
|
||||
|
||||
@@ -51,10 +51,9 @@ Public packages are discovered from:
|
||||
|
||||
- `packages/`
|
||||
- `server/`
|
||||
- `ui/`
|
||||
- `cli/`
|
||||
|
||||
`ui/` is ignored because it is private.
|
||||
|
||||
The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs), which:
|
||||
|
||||
- finds all public packages
|
||||
@@ -65,6 +64,89 @@ The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts
|
||||
|
||||
Those rewrites are temporary. The working tree is restored after publish or dry-run.
|
||||
|
||||
## `@paperclipai/ui` packaging
|
||||
|
||||
The UI package publishes prebuilt static assets, not the source workspace.
|
||||
|
||||
The `ui` package uses [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs) during `prepack` to swap in a lean publish manifest that:
|
||||
|
||||
- keeps the release-managed `name` and `version`
|
||||
- publishes only `dist/`
|
||||
- omits the source-only dependency graph from downstream installs
|
||||
|
||||
After packing or publishing, `postpack` restores the development manifest automatically.
|
||||
|
||||
### Manual first publish for `@paperclipai/ui`
|
||||
|
||||
If you need to publish only the UI package once by hand, use the real package name:
|
||||
|
||||
- `@paperclipai/ui`
|
||||
|
||||
Recommended flow from the repo root:
|
||||
|
||||
```bash
|
||||
# optional sanity check: this 404s until the first publish exists
|
||||
npm view @paperclipai/ui version
|
||||
|
||||
# make sure the dist payload is fresh
|
||||
pnpm --filter @paperclipai/ui build
|
||||
|
||||
# confirm your local npm auth before the real publish
|
||||
npm whoami
|
||||
|
||||
# safe preview of the exact publish payload
|
||||
cd ui
|
||||
pnpm publish --dry-run --no-git-checks --access public
|
||||
|
||||
# real publish
|
||||
pnpm publish --no-git-checks --access public
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- Publish from `ui/`, not the repo root.
|
||||
- `prepack` automatically rewrites `ui/package.json` to the lean publish manifest, and `postpack` restores the dev manifest after the command finishes.
|
||||
- If `npm view @paperclipai/ui version` already returns the same version that is in [`ui/package.json`](../ui/package.json), do not republish. Bump the version or use the normal repo-wide release flow in [`scripts/release.sh`](../scripts/release.sh).
|
||||
|
||||
If the first real publish returns npm `E404`, check npm-side prerequisites before retrying:
|
||||
|
||||
- `npm whoami` must succeed first. An expired or missing npm login will block the publish.
|
||||
- For an organization-scoped package like `@paperclipai/ui`, the `paperclipai` npm organization must exist and the publisher must be a member with permission to publish to that scope.
|
||||
- The initial publish must include `--access public` for a public scoped package.
|
||||
- npm also requires either account 2FA for publishing or a granular token that is allowed to bypass 2FA.
|
||||
|
||||
### Manual first publish for `@paperclipai/mcp-server`
|
||||
|
||||
If you need to publish only the MCP server package once by hand, use:
|
||||
|
||||
- `@paperclipai/mcp-server`
|
||||
|
||||
Recommended flow from the repo root:
|
||||
|
||||
```bash
|
||||
# optional sanity check: this 404s until the first publish exists
|
||||
npm view @paperclipai/mcp-server version
|
||||
|
||||
# make sure the build output is fresh
|
||||
pnpm --filter @paperclipai/mcp-server build
|
||||
|
||||
# confirm your local npm auth before the real publish
|
||||
npm whoami
|
||||
|
||||
# safe preview of the exact publish payload
|
||||
cd packages/mcp-server
|
||||
pnpm publish --dry-run --no-git-checks --access public
|
||||
|
||||
# real publish
|
||||
pnpm publish --no-git-checks --access public
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- Publish from `packages/mcp-server/`, not the repo root.
|
||||
- If `npm view @paperclipai/mcp-server version` already returns the same version that is in [`packages/mcp-server/package.json`](../packages/mcp-server/package.json), do not republish. Bump the version or use the normal repo-wide release flow in [`scripts/release.sh`](../scripts/release.sh).
|
||||
- The same npm-side prerequisites apply as above: valid npm auth, permission to publish to the `@paperclipai` scope, `--access public`, and the required publish auth/2FA policy.
|
||||
|
||||
## Version formats
|
||||
|
||||
Paperclip uses calendar versions:
|
||||
@@ -135,6 +217,7 @@ This is the fastest way to restore the default install path if a stable release
|
||||
|
||||
- [`scripts/build-npm.sh`](../scripts/build-npm.sh)
|
||||
- [`scripts/generate-npm-package-json.mjs`](../scripts/generate-npm-package-json.mjs)
|
||||
- [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs)
|
||||
- [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs)
|
||||
- [`cli/esbuild.config.mjs`](../cli/esbuild.config.mjs)
|
||||
- [`doc/RELEASING.md`](RELEASING.md)
|
||||
|
||||
@@ -35,6 +35,7 @@ At minimum that includes:
|
||||
|
||||
- `paperclipai`
|
||||
- `@paperclipai/server`
|
||||
- `@paperclipai/ui`
|
||||
- public packages under `packages/`
|
||||
|
||||
### 2.1. In npm, open each package settings page
|
||||
|
||||
@@ -184,6 +184,11 @@ Invariant: at least one root `company` level goal per company.
|
||||
- `status` enum: `backlog | planned | in_progress | completed | cancelled`
|
||||
- `lead_agent_id` uuid fk `agents.id` null
|
||||
- `target_date` date null
|
||||
- `env` jsonb null (same secret-aware env binding format used by agent config)
|
||||
|
||||
Invariant:
|
||||
|
||||
- project env is merged into run environment for issues in that project and overrides conflicting agent env keys before Paperclip runtime-owned keys are injected
|
||||
|
||||
## 7.6 `issues` (core task entity)
|
||||
|
||||
@@ -390,6 +395,8 @@ Side effects:
|
||||
- entering `done` sets `completed_at`
|
||||
- entering `cancelled` sets `cancelled_at`
|
||||
|
||||
Detailed ownership, execution, blocker, and crash-recovery semantics are documented in `doc/execution-semantics.md`.
|
||||
|
||||
## 8.3 Approval Status
|
||||
|
||||
- `pending -> approved | rejected | cancelled`
|
||||
@@ -441,6 +448,7 @@ All endpoints are under `/api` and return JSON.
|
||||
- `POST /companies`
|
||||
- `GET /companies/:companyId`
|
||||
- `PATCH /companies/:companyId`
|
||||
- `PATCH /companies/:companyId/branding`
|
||||
- `POST /companies/:companyId/archive`
|
||||
|
||||
## 10.2 Goals
|
||||
@@ -490,7 +498,7 @@ All endpoints are under `/api` and return JSON.
|
||||
```json
|
||||
{
|
||||
"agentId": "uuid",
|
||||
"expectedStatuses": ["todo", "backlog", "blocked"]
|
||||
"expectedStatuses": ["todo", "backlog", "blocked", "in_review"]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -843,20 +851,31 @@ V1 is complete only when all criteria are true:
|
||||
|
||||
V1 supports company import/export using a portable package contract:
|
||||
|
||||
- exactly one JSON entrypoint: `paperclip.manifest.json`
|
||||
- all other package files are markdown with frontmatter
|
||||
- agent convention:
|
||||
- `agents/<slug>/AGENTS.md` (required for V1 export/import)
|
||||
- `agents/<slug>/HEARTBEAT.md` (optional, import accepted)
|
||||
- `agents/<slug>/*.md` (optional, import accepted)
|
||||
- markdown-first package rooted at `COMPANY.md`
|
||||
- implicit folder discovery by convention
|
||||
- `.paperclip.yaml` sidecar for Paperclip-specific fidelity
|
||||
- canonical base package is vendor-neutral and aligned with `docs/companies/companies-spec.md`
|
||||
- common conventions:
|
||||
- `agents/<slug>/AGENTS.md`
|
||||
- `teams/<slug>/TEAM.md`
|
||||
- `projects/<slug>/PROJECT.md`
|
||||
- `projects/<slug>/tasks/<slug>/TASK.md`
|
||||
- `tasks/<slug>/TASK.md`
|
||||
- `skills/<slug>/SKILL.md`
|
||||
|
||||
Export/import behavior in V1:
|
||||
|
||||
- export includes company metadata and/or agents based on selection
|
||||
- export strips environment-specific paths (`cwd`, local instruction file paths)
|
||||
- export never includes secret values; secret requirements are reported
|
||||
- export emits a clean vendor-neutral markdown package plus `.paperclip.yaml`
|
||||
- projects and starter tasks are opt-in export content rather than default package content
|
||||
- recurring `TASK.md` entries use `recurring: true` in the base package and Paperclip routine fidelity in `.paperclip.yaml`
|
||||
- Paperclip imports recurring task packages as routines instead of downgrading them to one-time issues
|
||||
- export strips environment-specific paths (`cwd`, local instruction file paths, inline prompt duplication) while preserving portable project repo/workspace metadata such as `repoUrl`, refs, and workspace-policy references keyed in `.paperclip.yaml`
|
||||
- export never includes secret values; env inputs are reported as portable declarations instead
|
||||
- import supports target modes:
|
||||
- create a new company
|
||||
- import into an existing company
|
||||
- import recreates exported project workspaces and remaps portable workspace keys back to target-local workspace ids
|
||||
- import forces imported agent timer heartbeats off so packages never start scheduled runs implicitly
|
||||
- import supports collision strategies: `rename`, `skip`, `replace`
|
||||
- import supports preview (dry-run) before apply
|
||||
- GitHub imports warn on unpinned refs instead of blocking
|
||||
|
||||
33
doc/SPEC.md
33
doc/SPEC.md
@@ -186,17 +186,21 @@ The heartbeat is a protocol, not a runtime. Paperclip defines how to initiate an
|
||||
|
||||
### Execution Adapters
|
||||
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Initial adapters:
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Built-in adapters include:
|
||||
|
||||
| Adapter | Mechanism | Example |
|
||||
| -------------------- | ----------------------- | --------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `gemini_local` | Gemini CLI process | Local Gemini CLI with sandbox and approval |
|
||||
| `hermes_local` | Hermes agent process | Local Hermes agent |
|
||||
| Adapter | Mechanism | Example |
|
||||
| ---------------- | -------------------------- | -------------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `claude_local` | Local Claude Code process | Claude Code heartbeat worker |
|
||||
| `codex_local` | Local Codex process | Codex CLI heartbeat worker |
|
||||
| `opencode_local` | Local OpenCode process | OpenCode heartbeat worker |
|
||||
| `pi_local` | Local Pi process | Pi CLI heartbeat worker |
|
||||
| `cursor` | Cursor API/CLI bridge | Cursor-integrated heartbeat worker |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `hermes_local` | Local Hermes process | Hermes agent heartbeat worker |
|
||||
|
||||
The `process` and `http` adapters ship as defaults. Additional adapters have been added for specific agent runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
The `process` and `http` adapters ship as generic defaults. Additional built-in adapters cover common local coding runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
|
||||
### Adapter Interface
|
||||
|
||||
@@ -376,7 +380,7 @@ Flow:
|
||||
| Layer | Technology |
|
||||
| -------- | ------------------------------------------------------------ |
|
||||
| Frontend | React + Vite |
|
||||
| Backend | TypeScript + Hono (REST API, not tRPC — need non-TS clients) |
|
||||
| Backend | TypeScript + Express (REST API, not tRPC — need non-TS clients) |
|
||||
| Database | PostgreSQL (see [doc/DATABASE.md](./doc/DATABASE.md) for details — PGlite embedded for dev, Docker or hosted Supabase for production) |
|
||||
| Auth | [Better Auth](https://www.better-auth.com/) |
|
||||
|
||||
@@ -406,7 +410,7 @@ No separate "agent API" vs. "board API." Same endpoints, different authorization
|
||||
|
||||
### Work Artifacts
|
||||
|
||||
Paperclip does **not** manage work artifacts (code repos, file systems, deployments, documents). That's entirely the agent's domain. Paperclip tracks tasks and costs. Where and how work gets done is outside scope.
|
||||
Paperclip manages task-linked work artifacts: issue documents (rich-text plans, specs, notes attached to issues) and file attachments. Agents read and write these through the API as part of normal task execution. Full delivery infrastructure (code repos, deployments, production runtime) remains the agent's domain — Paperclip orchestrates the work, not the build pipeline.
|
||||
|
||||
### Open Questions
|
||||
|
||||
@@ -476,15 +480,14 @@ Each is a distinct page/route:
|
||||
- [ ] **Default agent** — basic Claude Code/Codex loop with Paperclip skill
|
||||
- [ ] **Default CEO** — strategic planning, delegation, board communication
|
||||
- [ ] **Paperclip skill (SKILL.md)** — teaches agents to interact with the API
|
||||
- [ ] **REST API** — full API for agent interaction (Hono)
|
||||
- [ ] **REST API** — full API for agent interaction (Express)
|
||||
- [ ] **Web UI** — React/Vite: org chart, task board, dashboard, cost views
|
||||
- [ ] **Agent auth** — connection string generation with URL + key + instructions
|
||||
- [ ] **One-command dev setup** — embedded PGlite, everything local
|
||||
- [ ] **Multiple Adapter types** (HTTP Adapter, OpenClaw Adapter)
|
||||
- [ ] **Multiple Adapter types** (HTTP, OpenClaw gateway, and local coding adapters)
|
||||
|
||||
### Not V1
|
||||
|
||||
- Template export/import
|
||||
- Knowledge base - a future plugin
|
||||
- Advanced governance models (hiring budgets, multi-member boards)
|
||||
- Revenue/expense tracking beyond token costs - a future plugin
|
||||
@@ -509,7 +512,7 @@ Things Paperclip explicitly does **not** do:
|
||||
- **Not a SaaS** — single-tenant, self-hosted
|
||||
- **Not opinionated about Agent implementation** — any language, any framework, any runtime
|
||||
- **Not automatically self-healing** — surfaces problems, doesn't silently fix them
|
||||
- **Does not manage work artifacts** — no repo management, no deployment, no file systems
|
||||
- **Does not manage delivery infrastructure** — no repo management, no deployment, no file systems (but does manage task-linked documents and attachments)
|
||||
- **Does not auto-reassign work** — stale tasks are surfaced, not silently redistributed
|
||||
- **Does not track external revenue/expenses** — that's a future plugin. Token/LLM cost budgeting is core.
|
||||
|
||||
|
||||
@@ -16,14 +16,14 @@ By default this workflow does **not** mount your host repo checkout, your host h
|
||||
## Files
|
||||
|
||||
- `docker/untrusted-review/Dockerfile`
|
||||
- `docker-compose.untrusted-review.yml`
|
||||
- `docker/docker-compose.untrusted-review.yml`
|
||||
- `review-checkout-pr` inside the container
|
||||
|
||||
## Build and start a shell
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml build
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml run --rm --service-ports review
|
||||
```
|
||||
|
||||
That opens an interactive shell in the review container with:
|
||||
@@ -47,7 +47,7 @@ claude login
|
||||
If you prefer API-key auth instead of CLI login, pass keys through Compose env:
|
||||
|
||||
```sh
|
||||
OPENAI_API_KEY=... ANTHROPIC_API_KEY=... docker compose -f docker-compose.untrusted-review.yml run --rm review
|
||||
OPENAI_API_KEY=... ANTHROPIC_API_KEY=... docker compose -f docker/docker-compose.untrusted-review.yml run --rm review
|
||||
```
|
||||
|
||||
## Check out a PR safely
|
||||
@@ -117,7 +117,7 @@ Notes:
|
||||
Remove the review container volumes when you want a clean environment:
|
||||
|
||||
```sh
|
||||
docker compose -f docker-compose.untrusted-review.yml down -v
|
||||
docker compose -f docker/docker-compose.untrusted-review.yml down -v
|
||||
```
|
||||
|
||||
That deletes:
|
||||
|
||||
252
doc/execution-semantics.md
Normal file
252
doc/execution-semantics.md
Normal file
@@ -0,0 +1,252 @@
|
||||
# Execution Semantics
|
||||
|
||||
Status: Current implementation guide
|
||||
Date: 2026-04-13
|
||||
Audience: Product and engineering
|
||||
|
||||
This document explains how Paperclip interprets issue assignment, issue status, execution runs, wakeups, parent/sub-issue structure, and blocker relationships.
|
||||
|
||||
`doc/SPEC-implementation.md` remains the V1 contract. This document is the detailed execution model behind that contract.
|
||||
|
||||
## 1. Core Model
|
||||
|
||||
Paperclip separates four concepts that are easy to blur together:
|
||||
|
||||
1. structure: parent/sub-issue relationships
|
||||
2. dependency: blocker relationships
|
||||
3. ownership: who is responsible for the issue now
|
||||
4. execution: whether the control plane currently has a live path to move the issue forward
|
||||
|
||||
The system works best when those are kept separate.
|
||||
|
||||
## 2. Assignee Semantics
|
||||
|
||||
An issue has at most one assignee.
|
||||
|
||||
- `assigneeAgentId` means the issue is owned by an agent
|
||||
- `assigneeUserId` means the issue is owned by a human board user
|
||||
- both cannot be set at the same time
|
||||
|
||||
This is a hard invariant. Paperclip is single-assignee by design.
|
||||
|
||||
## 3. Status Semantics
|
||||
|
||||
Paperclip issue statuses are not just UI labels. They imply different expectations about ownership and execution.
|
||||
|
||||
### `backlog`
|
||||
|
||||
The issue is not ready for active work.
|
||||
|
||||
- no execution expectation
|
||||
- no pickup expectation
|
||||
- safe resting state for future work
|
||||
|
||||
### `todo`
|
||||
|
||||
The issue is actionable but not actively claimed.
|
||||
|
||||
- it may be assigned or unassigned
|
||||
- no checkout/execution lock is required yet
|
||||
- for agent-assigned work, Paperclip may still need a wake path to ensure the assignee actually sees it
|
||||
|
||||
### `in_progress`
|
||||
|
||||
The issue is actively owned work.
|
||||
|
||||
- requires an assignee
|
||||
- for agent-owned issues, this is a strict execution-backed state
|
||||
- for user-owned issues, this is a human ownership state and is not backed by heartbeat execution
|
||||
|
||||
For agent-owned issues, `in_progress` should not be allowed to become a silent dead state.
|
||||
|
||||
### `blocked`
|
||||
|
||||
The issue cannot proceed until something external changes.
|
||||
|
||||
This is the right state for:
|
||||
|
||||
- waiting on another issue
|
||||
- waiting on a human decision
|
||||
- waiting on an external dependency or system
|
||||
- work that automatic recovery could not safely continue
|
||||
|
||||
### `in_review`
|
||||
|
||||
Execution work is paused because the next move belongs to a reviewer or approver, not the current executor.
|
||||
|
||||
### `done`
|
||||
|
||||
The work is complete and terminal.
|
||||
|
||||
### `cancelled`
|
||||
|
||||
The work will not continue and is terminal.
|
||||
|
||||
## 4. Agent-Owned vs User-Owned Execution
|
||||
|
||||
The execution model differs depending on assignee type.
|
||||
|
||||
### Agent-owned issues
|
||||
|
||||
Agent-owned issues are part of the control plane's execution loop.
|
||||
|
||||
- Paperclip can wake the assignee
|
||||
- Paperclip can track runs linked to the issue
|
||||
- Paperclip can recover some lost execution state after crashes/restarts
|
||||
|
||||
### User-owned issues
|
||||
|
||||
User-owned issues are not executed by the heartbeat scheduler.
|
||||
|
||||
- Paperclip can track the ownership and status
|
||||
- Paperclip cannot rely on heartbeat/run semantics to keep them moving
|
||||
- stranded-work reconciliation does not apply to them
|
||||
|
||||
This is why `in_progress` can be strict for agents without forcing the same runtime rules onto human-held work.
|
||||
|
||||
## 5. Checkout and Active Execution
|
||||
|
||||
Checkout is the bridge from issue ownership to active agent execution.
|
||||
|
||||
- checkout is required to move an issue into agent-owned `in_progress`
|
||||
- `checkoutRunId` represents issue-ownership lock for the current agent run
|
||||
- `executionRunId` represents the currently active execution path for the issue
|
||||
|
||||
These are related but not identical:
|
||||
|
||||
- `checkoutRunId` answers who currently owns execution rights for the issue
|
||||
- `executionRunId` answers which run is actually live right now
|
||||
|
||||
Paperclip already clears stale execution locks and can adopt some stale checkout locks when the original run is gone.
|
||||
|
||||
## 6. Parent/Sub-Issue vs Blockers
|
||||
|
||||
Paperclip uses two different relationships for different jobs.
|
||||
|
||||
### Parent/Sub-Issue (`parentId`)
|
||||
|
||||
This is structural.
|
||||
|
||||
Use it for:
|
||||
|
||||
- work breakdown
|
||||
- rollup context
|
||||
- explaining why a child issue exists
|
||||
- waking the parent assignee when all direct children become terminal
|
||||
|
||||
Do not treat `parentId` as execution dependency by itself.
|
||||
|
||||
### Blockers (`blockedByIssueIds`)
|
||||
|
||||
This is dependency semantics.
|
||||
|
||||
Use it for:
|
||||
|
||||
- \"this issue cannot continue until that issue changes state\"
|
||||
- explicit waiting relationships
|
||||
- automatic wakeups when all blockers resolve
|
||||
|
||||
If a parent is truly waiting on a child, model that with blockers. Do not rely on the parent/child relationship alone.
|
||||
|
||||
## 7. Consistent Execution Path Rules
|
||||
|
||||
For agent-assigned, non-terminal, actionable issues, Paperclip should not leave work in a state where nobody is working it and nothing will wake it.
|
||||
|
||||
The relevant execution path depends on status.
|
||||
|
||||
### Agent-assigned `todo`
|
||||
|
||||
This is dispatch state: ready to start, not yet actively claimed.
|
||||
|
||||
A healthy dispatch state means at least one of these is true:
|
||||
|
||||
- the issue already has a queued/running wake path
|
||||
- the issue is intentionally resting in `todo` after a successful agent heartbeat, not after an interrupted dispatch
|
||||
- the issue has been explicitly surfaced as stranded
|
||||
|
||||
### Agent-assigned `in_progress`
|
||||
|
||||
This is active-work state.
|
||||
|
||||
A healthy active-work state means at least one of these is true:
|
||||
|
||||
- there is an active run for the issue
|
||||
- there is already a queued continuation wake
|
||||
- the issue has been explicitly surfaced as stranded
|
||||
|
||||
## 8. Crash and Restart Recovery
|
||||
|
||||
Paperclip now treats crash/restart recovery as a stranded-assigned-work problem, not just a stranded-run problem.
|
||||
|
||||
There are two distinct failure modes.
|
||||
|
||||
### 8.1 Stranded assigned `todo`
|
||||
|
||||
Example:
|
||||
|
||||
- issue is assigned to an agent
|
||||
- status is `todo`
|
||||
- the original wake/run died during or after dispatch
|
||||
- after restart there is no queued wake and nothing picks the issue back up
|
||||
|
||||
Recovery rule:
|
||||
|
||||
- if the latest issue-linked run failed/timed out/cancelled and no live execution path remains, Paperclip queues one automatic assignment recovery wake
|
||||
- if that recovery wake also finishes and the issue is still stranded, Paperclip moves the issue to `blocked` and posts a visible comment
|
||||
|
||||
This is a dispatch recovery, not a continuation recovery.
|
||||
|
||||
### 8.2 Stranded assigned `in_progress`
|
||||
|
||||
Example:
|
||||
|
||||
- issue is assigned to an agent
|
||||
- status is `in_progress`
|
||||
- the live run disappeared
|
||||
- after restart there is no active run and no queued continuation
|
||||
|
||||
Recovery rule:
|
||||
|
||||
- Paperclip queues one automatic continuation wake
|
||||
- if that continuation wake also finishes and the issue is still stranded, Paperclip moves the issue to `blocked` and posts a visible comment
|
||||
|
||||
This is an active-work continuity recovery.
|
||||
|
||||
## 9. Startup and Periodic Reconciliation
|
||||
|
||||
Startup recovery and periodic recovery are different from normal wakeup delivery.
|
||||
|
||||
On startup and on the periodic recovery loop, Paperclip now does three things in sequence:
|
||||
|
||||
1. reap orphaned `running` runs
|
||||
2. resume persisted `queued` runs
|
||||
3. reconcile stranded assigned work
|
||||
|
||||
That last step is what closes the gap where issue state survives a crash but the wake/run path does not.
|
||||
|
||||
## 10. What This Does Not Mean
|
||||
|
||||
These semantics do not change V1 into an auto-reassignment system.
|
||||
|
||||
Paperclip still does not:
|
||||
|
||||
- automatically reassign work to a different agent
|
||||
- infer dependency semantics from `parentId` alone
|
||||
- treat human-held work as heartbeat-managed execution
|
||||
|
||||
The recovery model is intentionally conservative:
|
||||
|
||||
- preserve ownership
|
||||
- retry once when the control plane lost execution continuity
|
||||
- escalate visibly when the system cannot safely keep going
|
||||
|
||||
## 11. Practical Interpretation
|
||||
|
||||
For a board operator, the intended meaning is:
|
||||
|
||||
- agent-owned `in_progress` should mean \"this is live work or clearly surfaced as a problem\"
|
||||
- agent-owned `todo` should not stay assigned forever after a crash with no remaining wake path
|
||||
- parent/sub-issue explains structure
|
||||
- blockers explain waiting
|
||||
|
||||
That is the execution contract Paperclip should present to operators.
|
||||
@@ -22,6 +22,7 @@ The question is not "which memory project wins?" The question is "what is the sm
|
||||
### Hosted memory APIs
|
||||
|
||||
- `mem0`
|
||||
- `AWS Bedrock AgentCore Memory`
|
||||
- `supermemory`
|
||||
- `Memori`
|
||||
|
||||
@@ -49,6 +50,7 @@ These emphasize local persistence, inspectability, and low operational overhead.
|
||||
|---|---|---|---|---|
|
||||
| [nuggets](https://github.com/NeoVertex1/nuggets) | local memory engine + messaging gateway | topic-scoped HRR memory with `remember`, `recall`, `forget`, fact promotion into `MEMORY.md` | good example of lightweight local memory and automatic promotion | very specific architecture; not a general multi-tenant service |
|
||||
| [mem0](https://github.com/mem0ai/mem0) | hosted + OSS SDK | `add`, `search`, `getAll`, `get`, `update`, `delete`, `deleteAll`; entity partitioning via `user_id`, `agent_id`, `run_id`, `app_id` | closest to a clean provider API with identities and metadata filters | provider owns extraction heavily; Paperclip should not assume every backend behaves like mem0 |
|
||||
| [AWS Bedrock AgentCore Memory](https://docs.aws.amazon.com/bedrock-agentcore/latest/devguide/memory.html) | AWS-managed memory service | explicit short-term and long-term memories, actor/session/event APIs, memory strategies, namespace templates, optional self-managed extraction pipeline | strong example of provider-managed memory with clear scoped ids, retention controls, and standalone API access outside a single agent framework | AWS-hosted and IAM-centric; Paperclip would still need its own company/run/comment provenance, cost rollups, and likely a plugin wrapper instead of baking AWS semantics into core |
|
||||
| [MemOS](https://github.com/MemTensor/MemOS) | memory OS / framework | unified add-retrieve-edit-delete, memory cubes, multimodal memory, tool memory, async scheduler, feedback/correction | strong source for optional capabilities beyond plain search | much broader than the minimal contract Paperclip should standardize first |
|
||||
| [supermemory](https://github.com/supermemoryai/supermemory) | hosted memory + context API | `add`, `profile`, `search.memories`, `search.documents`, document upload, settings; automatic profile building and forgetting | strong example of "context bundle" rather than raw search results | heavily productized around its own ontology and hosted flow |
|
||||
| [memU](https://github.com/NevaMind-AI/memU) | proactive agent memory framework | file-system metaphor, proactive loop, intent prediction, always-on companion model | good source for when memory should trigger agent behavior, not just retrieval | proactive assistant framing is broader than Paperclip's task-centric control plane |
|
||||
@@ -77,6 +79,7 @@ These differences are exactly why Paperclip needs a layered contract instead of
|
||||
### 1. Who owns extraction?
|
||||
|
||||
- `mem0`, `supermemory`, and `Memori` expect the provider to infer memories from conversations.
|
||||
- `AWS Bedrock AgentCore Memory` supports both provider-managed extraction and self-managed pipelines where the host writes curated long-term memory records.
|
||||
- `memsearch` expects the host to decide what markdown to write, then indexes it.
|
||||
- `MemOS`, `memU`, `EverMemOS`, and `OpenViking` sit somewhere in between and often expose richer memory construction pipelines.
|
||||
|
||||
@@ -104,6 +107,7 @@ Paperclip should make plain search the minimum contract and richer outputs optio
|
||||
### 4. Is memory synchronous or asynchronous?
|
||||
|
||||
- local tools often work synchronously in-process.
|
||||
- `AWS Bedrock AgentCore Memory` is synchronous at the API edge, but its long-term memory path includes background extraction/indexing behavior and retention policies managed by the provider.
|
||||
- larger systems add schedulers, background indexing, compaction, or sync jobs.
|
||||
|
||||
Paperclip needs both direct request/response operations and background maintenance hooks.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# Paperclip Module System
|
||||
|
||||
> Supersession note: the company-template/package-format direction in this document is no longer current. For the current markdown-first company import/export plan, see `doc/plans/2026-03-13-company-import-export-v2.md` and `docs/companies/companies-spec.md`.
|
||||
|
||||
## Overview
|
||||
|
||||
Paperclip's module system lets you extend the control plane with new capabilities — revenue tracking, observability, notifications, dashboards — without forking core. Modules are self-contained packages that register routes, UI pages, database tables, and lifecycle hooks.
|
||||
|
||||
644
doc/plans/2026-03-13-company-import-export-v2.md
Normal file
644
doc/plans/2026-03-13-company-import-export-v2.md
Normal file
@@ -0,0 +1,644 @@
|
||||
# 2026-03-13 Company Import / Export V2 Plan
|
||||
|
||||
Status: Proposed implementation plan
|
||||
Date: 2026-03-13
|
||||
Audience: Product and engineering
|
||||
Supersedes for package-format direction:
|
||||
- `doc/plans/2026-02-16-module-system.md` sections that describe company templates as JSON-only
|
||||
- `docs/specs/cliphub-plan.md` assumptions about blueprint bundle shape where they conflict with the markdown-first package model
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This document defines the next-stage plan for Paperclip company import/export.
|
||||
|
||||
The core shift is:
|
||||
|
||||
- move from a Paperclip-specific JSON-first portability package toward a markdown-first package format
|
||||
- make GitHub repositories first-class package sources
|
||||
- treat the company package model as an extension of the existing Agent Skills ecosystem instead of inventing a separate skill format
|
||||
- support company, team, agent, and skill reuse without requiring a central registry
|
||||
|
||||
The normative package format draft lives in:
|
||||
|
||||
- `docs/companies/companies-spec.md`
|
||||
|
||||
This plan is about implementation and rollout inside Paperclip.
|
||||
|
||||
Adapter-wide skill rollout details live in:
|
||||
|
||||
- `doc/plans/2026-03-14-adapter-skill-sync-rollout.md`
|
||||
|
||||
## 2. Executive Summary
|
||||
|
||||
Paperclip already has portability primitives in the repo:
|
||||
|
||||
- server import/export/preview APIs
|
||||
- CLI import/export commands
|
||||
- shared portability types and validators
|
||||
|
||||
Those primitives are being cut over to the new package model rather than extended for backward compatibility.
|
||||
|
||||
The new direction is:
|
||||
|
||||
1. markdown-first package authoring
|
||||
2. GitHub repo or local folder as the default source of truth
|
||||
3. a vendor-neutral base package spec for agent-company runtimes, not just Paperclip
|
||||
4. the company package model is explicitly an extension of Agent Skills
|
||||
5. no future dependency on `paperclip.manifest.json`
|
||||
6. implicit folder discovery by convention for the common case
|
||||
7. an always-emitted `.paperclip.yaml` sidecar for high-fidelity Paperclip-specific details
|
||||
8. package graph resolution at import time
|
||||
9. entity-level import UI with dependency-aware tree selection
|
||||
10. `skills.sh` compatibility is a V1 requirement for skill packages and skill installation flows
|
||||
11. adapter-aware skill sync surfaces so Paperclip can read, diff, enable, disable, and reconcile skills where the adapter supports it
|
||||
|
||||
## 3. Product Goals
|
||||
|
||||
### 3.1 Goals
|
||||
|
||||
- A user can point Paperclip at a local folder or GitHub repo and import a company package without any registry.
|
||||
- A package is readable and writable by humans with normal git workflows.
|
||||
- A package can contain:
|
||||
- company definition
|
||||
- org subtree / team definition
|
||||
- agent definitions
|
||||
- optional starter projects and tasks
|
||||
- reusable skills
|
||||
- V1 skill support is compatible with the existing `skills.sh` / Agent Skills ecosystem.
|
||||
- A user can import into:
|
||||
- a new company
|
||||
- an existing company
|
||||
- Import preview shows:
|
||||
- what will be created
|
||||
- what will be updated
|
||||
- what is skipped
|
||||
- what is referenced externally
|
||||
- what needs secrets or approvals
|
||||
- Export preserves attribution, licensing, and pinned upstream references.
|
||||
- Export produces a clean vendor-neutral package plus a Paperclip sidecar.
|
||||
- `companies.sh` can later act as a discovery/index layer over repos implementing this format.
|
||||
|
||||
### 3.2 Non-Goals
|
||||
|
||||
- No central registry is required for package validity.
|
||||
- This is not full database backup/restore.
|
||||
- This does not attempt to export runtime state like:
|
||||
- heartbeat runs
|
||||
- API keys
|
||||
- spend totals
|
||||
- run sessions
|
||||
- transient workspaces
|
||||
- This does not require a first-class runtime `teams` table before team portability ships.
|
||||
|
||||
## 4. Current State In Repo
|
||||
|
||||
Current implementation exists here:
|
||||
|
||||
- shared types: `packages/shared/src/types/company-portability.ts`
|
||||
- shared validators: `packages/shared/src/validators/company-portability.ts`
|
||||
- server routes: `server/src/routes/companies.ts`
|
||||
- server service: `server/src/services/company-portability.ts`
|
||||
- CLI commands: `cli/src/commands/client/company.ts`
|
||||
|
||||
Current product limitations:
|
||||
|
||||
1. Import/export UX still needs deeper tree-selection and skill/package management polish.
|
||||
2. Adapter-specific skill sync remains uneven across adapters and must degrade cleanly when unsupported.
|
||||
3. Projects and starter tasks should stay opt-in on export rather than default package content.
|
||||
4. Import/export still needs stronger coverage around attribution, pin verification, and executable-package warnings.
|
||||
5. The current markdown frontmatter parser is intentionally lightweight and should stay constrained to the documented shape.
|
||||
|
||||
## 5. Canonical Package Direction
|
||||
|
||||
### 5.1 Canonical Authoring Format
|
||||
|
||||
The canonical authoring format becomes a markdown-first package rooted in one of:
|
||||
|
||||
- `COMPANY.md`
|
||||
- `TEAM.md`
|
||||
- `AGENTS.md`
|
||||
- `PROJECT.md`
|
||||
- `TASK.md`
|
||||
- `SKILL.md`
|
||||
|
||||
The normative draft is:
|
||||
|
||||
- `docs/companies/companies-spec.md`
|
||||
|
||||
### 5.2 Relationship To Agent Skills
|
||||
|
||||
Paperclip must not redefine `SKILL.md`.
|
||||
|
||||
Rules:
|
||||
|
||||
- `SKILL.md` stays Agent Skills compatible
|
||||
- the company package model is an extension of Agent Skills
|
||||
- the base package is vendor-neutral and intended for any agent-company runtime
|
||||
- Paperclip-specific fidelity lives in `.paperclip.yaml`
|
||||
- Paperclip may resolve and install `SKILL.md` packages, but it must not require a Paperclip-only skill format
|
||||
- `skills.sh` compatibility is a V1 requirement, not a future nice-to-have
|
||||
|
||||
### 5.3 Agent-To-Skill Association
|
||||
|
||||
`AGENTS.md` should associate skills by skill shortname or slug, not by verbose path in the common case.
|
||||
|
||||
Preferred example:
|
||||
|
||||
- `skills: [review, react-best-practices]`
|
||||
|
||||
Resolution model:
|
||||
|
||||
- `review` resolves to `skills/review/SKILL.md` by package convention
|
||||
- if the skill is external or referenced, the skill package owns that complexity
|
||||
- exporters should prefer shortname-based associations in `AGENTS.md`
|
||||
- importers should resolve the shortname against local package skills first, then referenced or installed company skills
|
||||
### 5.4 Base Package Vs Paperclip Extension
|
||||
|
||||
The repo format should have two layers:
|
||||
|
||||
- base package:
|
||||
- minimal, readable, social, vendor-neutral
|
||||
- implicit folder discovery by convention
|
||||
- no Paperclip-only runtime fields by default
|
||||
- Paperclip extension:
|
||||
- `.paperclip.yaml`
|
||||
- adapter/runtime/permissions/budget/workspace fidelity
|
||||
- emitted by Paperclip tools as a sidecar while the base package stays readable
|
||||
|
||||
### 5.5 Relationship To Current V1 Manifest
|
||||
|
||||
`paperclip.manifest.json` is not part of the future package direction.
|
||||
|
||||
This should be treated as a hard cutover in product direction.
|
||||
|
||||
- markdown-first repo layout is the target
|
||||
- no new work should deepen investment in the old manifest model
|
||||
- future portability APIs and UI should target the markdown-first model only
|
||||
|
||||
## 6. Package Graph Model
|
||||
|
||||
### 6.1 Entity Kinds
|
||||
|
||||
Paperclip import/export should support these entity kinds:
|
||||
|
||||
- company
|
||||
- team
|
||||
- agent
|
||||
- project
|
||||
- task
|
||||
- skill
|
||||
|
||||
### 6.2 Team Semantics
|
||||
|
||||
`team` is a package concept first, not a database-table requirement.
|
||||
|
||||
In Paperclip V2 portability:
|
||||
|
||||
- a team is an importable org subtree
|
||||
- it is rooted at a manager agent
|
||||
- it can be attached under a target manager in an existing company
|
||||
|
||||
This avoids blocking portability on a future runtime `teams` model.
|
||||
|
||||
Imported-team tracking should initially be package/provenance-based:
|
||||
|
||||
- if a team package was imported, the imported agents should carry enough provenance to reconstruct that grouping
|
||||
- Paperclip can treat “this set of agents came from team package X” as the imported-team model
|
||||
- provenance grouping is the intended near- and medium-term team model for import/export
|
||||
- only add a first-class runtime `teams` table later if product needs move beyond what provenance grouping can express
|
||||
|
||||
### 6.3 Dependency Graph
|
||||
|
||||
Import should operate on an entity graph, not raw file selection.
|
||||
|
||||
Examples:
|
||||
|
||||
- selecting an agent auto-selects its required docs and skill refs
|
||||
- selecting a team auto-selects its subtree
|
||||
- selecting a company auto-selects all included entities by default
|
||||
- selecting a project auto-selects its starter tasks
|
||||
|
||||
The preview output should reflect graph resolution explicitly.
|
||||
|
||||
## 7. External References, Pinning, And Attribution
|
||||
|
||||
### 7.1 Why This Matters
|
||||
|
||||
Some packages will:
|
||||
|
||||
- reference upstream files we do not want to republish
|
||||
- include third-party work where attribution must remain visible
|
||||
- need protection from branch hot-swapping
|
||||
|
||||
### 7.2 Policy
|
||||
|
||||
Paperclip should support source references in package metadata with:
|
||||
|
||||
- repo
|
||||
- path
|
||||
- commit sha
|
||||
- optional blob sha
|
||||
- optional sha256
|
||||
- attribution
|
||||
- license
|
||||
- usage mode
|
||||
|
||||
Usage modes:
|
||||
|
||||
- `vendored`
|
||||
- `referenced`
|
||||
- `mirrored`
|
||||
|
||||
Default exporter behavior for third-party content should be:
|
||||
|
||||
- prefer `referenced`
|
||||
- preserve attribution
|
||||
- do not silently inline third-party content into exports
|
||||
|
||||
### 7.3 Trust Model
|
||||
|
||||
Imported package content should be classified by trust level:
|
||||
|
||||
- markdown-only
|
||||
- markdown + assets
|
||||
- markdown + scripts/executables
|
||||
|
||||
The UI and CLI should surface this clearly before apply.
|
||||
|
||||
## 8. Import Behavior
|
||||
|
||||
### 8.1 Supported Sources
|
||||
|
||||
- local folder
|
||||
- local package root file
|
||||
- GitHub repo URL
|
||||
- GitHub subtree URL
|
||||
- direct URL to markdown/package root
|
||||
|
||||
Registry-based discovery may be added later, but must remain optional.
|
||||
|
||||
### 8.2 Import Targets
|
||||
|
||||
- new company
|
||||
- existing company
|
||||
|
||||
For existing company imports, the preview must support:
|
||||
|
||||
- collision handling
|
||||
- attach-point selection for team imports
|
||||
- selective entity import
|
||||
|
||||
### 8.3 Collision Strategy
|
||||
|
||||
Current `rename | skip | replace` support remains, but matching should improve over time.
|
||||
|
||||
Preferred matching order:
|
||||
|
||||
1. prior install provenance
|
||||
2. stable package entity identity
|
||||
3. slug
|
||||
4. human name as weak fallback
|
||||
|
||||
Slug-only matching is acceptable only as a transitional strategy.
|
||||
|
||||
### 8.4 Required Preview Output
|
||||
|
||||
Every import preview should surface:
|
||||
|
||||
- target company action
|
||||
- entity-level create/update/skip plan
|
||||
- referenced external content
|
||||
- missing files
|
||||
- hash mismatch or pinning issues
|
||||
- env inputs, including required vs optional and default values when present
|
||||
- unsupported content types
|
||||
- trust/licensing warnings
|
||||
|
||||
### 8.5 Adapter Skill Sync Surface
|
||||
|
||||
People want skill management in the UI, but skills are adapter-dependent.
|
||||
|
||||
That means portability and UI planning must include an adapter capability model for skills.
|
||||
|
||||
Paperclip should define a new adapter surface area around skills:
|
||||
|
||||
- list currently enabled skills for an agent
|
||||
- report how those skills are represented by the adapter
|
||||
- install or enable a skill
|
||||
- disable or remove a skill
|
||||
- report sync state between desired package config and actual adapter state
|
||||
|
||||
Examples:
|
||||
|
||||
- Claude Code / Codex style adapters may manage skills as local filesystem packages or adapter-owned skill directories
|
||||
- OpenClaw-style adapters may expose currently enabled skills through an API or a reflected config surface
|
||||
- some adapters may be read-only and only report what they have
|
||||
|
||||
Planned adapter capability shape:
|
||||
|
||||
- `supportsSkillRead`
|
||||
- `supportsSkillWrite`
|
||||
- `supportsSkillRemove`
|
||||
- `supportsSkillSync`
|
||||
- `skillStorageKind` such as `filesystem`, `remote_api`, `inline_config`, or `unknown`
|
||||
|
||||
Baseline adapter interface:
|
||||
|
||||
- `listSkills(agent)`
|
||||
- `applySkills(agent, desiredSkills)`
|
||||
- `removeSkill(agent, skillId)` optional
|
||||
- `getSkillSyncState(agent, desiredSkills)` optional
|
||||
|
||||
Planned Paperclip behavior:
|
||||
|
||||
- if an adapter supports read, Paperclip should show current skills in the UI
|
||||
- if an adapter supports write, Paperclip should let the user enable/disable imported skills
|
||||
- if an adapter supports sync, Paperclip should compute desired vs actual state and offer reconcile actions
|
||||
- if an adapter does not support these capabilities, the UI should still show the package-level desired skills but mark them unmanaged
|
||||
|
||||
## 9. Export Behavior
|
||||
|
||||
### 9.1 Default Export Target
|
||||
|
||||
Default export target should become a markdown-first folder structure.
|
||||
|
||||
Example:
|
||||
|
||||
```text
|
||||
my-company/
|
||||
├── COMPANY.md
|
||||
├── agents/
|
||||
├── teams/
|
||||
└── skills/
|
||||
```
|
||||
|
||||
### 9.2 Export Rules
|
||||
|
||||
Exports should:
|
||||
|
||||
- omit machine-local ids
|
||||
- omit timestamps and counters unless explicitly needed
|
||||
- omit secret values
|
||||
- omit local absolute paths
|
||||
- omit duplicated inline prompt content from `.paperclip.yaml` when `AGENTS.md` already carries the instructions
|
||||
- preserve references and attribution
|
||||
- emit `.paperclip.yaml` alongside the base package
|
||||
- express adapter env/secrets as portable env input declarations rather than exported secret binding ids
|
||||
- preserve compatible `SKILL.md` content as-is
|
||||
|
||||
Projects and issues should not be exported by default.
|
||||
|
||||
They should be opt-in through selectors such as:
|
||||
|
||||
- `--projects project-shortname-1,project-shortname-2`
|
||||
- `--issues PAP-1,PAP-3`
|
||||
- `--project-issues project-shortname-1,project-shortname-2`
|
||||
|
||||
This supports “clean public company package” workflows where a maintainer exports a follower-facing company package without bundling active work items every time.
|
||||
|
||||
### 9.3 Export Units
|
||||
|
||||
Initial export units:
|
||||
|
||||
- company package
|
||||
- team package
|
||||
- single agent package
|
||||
|
||||
Later optional units:
|
||||
|
||||
- skill pack export
|
||||
- seed projects/tasks bundle
|
||||
|
||||
## 10. Storage Model Inside Paperclip
|
||||
|
||||
### 10.1 Short-Term
|
||||
|
||||
In the first phase, imported entities can continue mapping onto current runtime tables:
|
||||
|
||||
- company -> companies
|
||||
- agent -> agents
|
||||
- team -> imported agent subtree attachment plus package provenance grouping
|
||||
- skill -> company-scoped reusable package metadata plus agent-scoped desired-skill attachment state where supported
|
||||
|
||||
### 10.2 Medium-Term
|
||||
|
||||
Paperclip should add managed package/provenance records so imports are not anonymous one-off copies.
|
||||
|
||||
Needed capabilities:
|
||||
|
||||
- remember install origin
|
||||
- support re-import / upgrade
|
||||
- distinguish local edits from upstream package state
|
||||
- preserve external refs and package-level metadata
|
||||
- preserve imported team grouping without requiring a runtime `teams` table immediately
|
||||
- preserve desired-skill state separately from adapter runtime state
|
||||
- support both company-scoped reusable skills and agent-scoped skill attachments
|
||||
|
||||
Suggested future tables:
|
||||
|
||||
- package_installs
|
||||
- package_install_entities
|
||||
- package_sources
|
||||
- agent_skill_desires
|
||||
- adapter_skill_snapshots
|
||||
|
||||
This is not required for phase 1 UI, but it is required for a robust long-term system.
|
||||
|
||||
## 11. API Plan
|
||||
|
||||
### 11.1 Keep Existing Endpoints Initially
|
||||
|
||||
Retain:
|
||||
|
||||
- `POST /api/companies/:companyId/export`
|
||||
- `POST /api/companies/import/preview`
|
||||
- `POST /api/companies/import`
|
||||
|
||||
But evolve payloads toward the markdown-first graph model.
|
||||
|
||||
### 11.2 New API Capabilities
|
||||
|
||||
Add support for:
|
||||
|
||||
- package root resolution from local/GitHub inputs
|
||||
- graph resolution preview
|
||||
- source pin and hash verification results
|
||||
- entity-level selection
|
||||
- team attach target selection
|
||||
- provenance-aware collision planning
|
||||
|
||||
### 11.3 Parsing Changes
|
||||
|
||||
Replace the current ad hoc markdown frontmatter parser with a real parser that can handle:
|
||||
|
||||
- nested YAML
|
||||
- arrays/objects reliably
|
||||
- consistent round-tripping
|
||||
|
||||
This is a prerequisite for the new package model.
|
||||
|
||||
## 12. CLI Plan
|
||||
|
||||
The CLI should continue to support direct import/export without a registry.
|
||||
|
||||
Target commands:
|
||||
|
||||
- `paperclipai company export <company-id> --out <path>`
|
||||
- `paperclipai company import <path-or-url> --dry-run`
|
||||
- `paperclipai company import <path-or-url> --target existing -C <company-id>`
|
||||
|
||||
Planned additions:
|
||||
|
||||
- `--package-kind company|team|agent`
|
||||
- `--attach-under <agent-id-or-slug>` for team imports
|
||||
- `--strict-pins`
|
||||
- `--allow-unpinned`
|
||||
- `--materialize-references`
|
||||
- `--sync-skills`
|
||||
|
||||
## 13. UI Plan
|
||||
|
||||
### 13.1 Company Settings Import / Export
|
||||
|
||||
Add a real import/export section to Company Settings.
|
||||
|
||||
Export UI:
|
||||
|
||||
- export package kind selector
|
||||
- include options
|
||||
- local download/export destination guidance
|
||||
- attribution/reference summary
|
||||
|
||||
Import UI:
|
||||
|
||||
- source entry:
|
||||
- upload/folder where supported
|
||||
- GitHub URL
|
||||
- generic URL
|
||||
- preview pane with:
|
||||
- resolved package root
|
||||
- dependency tree
|
||||
- checkboxes by entity
|
||||
- trust/licensing warnings
|
||||
- secrets requirements
|
||||
- collision plan
|
||||
|
||||
### 13.2 Team Import UX
|
||||
|
||||
If importing a team into an existing company:
|
||||
|
||||
- show the subtree structure
|
||||
- require the user to choose where to attach it
|
||||
- preview manager/reporting updates before apply
|
||||
- preserve imported-team provenance so the UI can later say “these agents came from team package X”
|
||||
|
||||
### 13.3 Skills UX
|
||||
|
||||
See also:
|
||||
|
||||
- `doc/plans/2026-03-14-skills-ui-product-plan.md`
|
||||
|
||||
If importing skills:
|
||||
|
||||
- show whether each skill is local, vendored, or referenced
|
||||
- show whether it contains scripts/assets
|
||||
- preserve Agent Skills compatibility in presentation and export
|
||||
- preserve `skills.sh` compatibility in both import and install flows
|
||||
- show agent skill attachments by shortname/slug rather than noisy file paths
|
||||
- treat agent skills as a dedicated agent tab, not just another subsection of configuration
|
||||
- show current adapter-reported skills when supported
|
||||
- show desired package skills separately from actual adapter state
|
||||
- offer reconcile actions when the adapter supports sync
|
||||
|
||||
## 14. Rollout Phases
|
||||
|
||||
### Phase 1: Stabilize Current V1 Portability
|
||||
|
||||
- add tests for current portability flows
|
||||
- replace the frontmatter parser
|
||||
- add Company Settings UI for current import/export capabilities
|
||||
- start cutover work toward the markdown-first package reader
|
||||
|
||||
### Phase 2: Markdown-First Package Reader
|
||||
|
||||
- support `COMPANY.md` / `TEAM.md` / `AGENTS.md` root detection
|
||||
- build internal graph from markdown-first packages
|
||||
- support local folder and GitHub repo inputs natively
|
||||
- support agent skill references by shortname/slug
|
||||
- resolve local `skills/<slug>/SKILL.md` packages by convention
|
||||
- support `skills.sh`-compatible skill repos as V1 package sources
|
||||
|
||||
### Phase 3: Graph-Based Import UX And Skill Surfaces
|
||||
|
||||
- entity tree preview
|
||||
- checkbox selection
|
||||
- team subtree attach flow
|
||||
- licensing/trust/reference warnings
|
||||
- company skill library groundwork
|
||||
- dedicated agent `Skills` tab groundwork
|
||||
- adapter skill read/sync UI groundwork
|
||||
|
||||
### Phase 4: New Export Model
|
||||
|
||||
- export markdown-first folder structure by default
|
||||
|
||||
### Phase 5: Provenance And Upgrades
|
||||
|
||||
- persist install provenance
|
||||
- support package-aware re-import and upgrades
|
||||
- improve collision matching beyond slug-only
|
||||
- add imported-team provenance grouping
|
||||
- add desired-vs-actual skill sync state
|
||||
|
||||
### Phase 6: Optional Seed Content
|
||||
|
||||
- goals
|
||||
- projects
|
||||
- starter issues/tasks
|
||||
|
||||
This phase is intentionally after the structural model is stable.
|
||||
|
||||
## 15. Documentation Plan
|
||||
|
||||
Primary docs:
|
||||
|
||||
- `docs/companies/companies-spec.md` as the package-format draft
|
||||
- this implementation plan for rollout sequencing
|
||||
|
||||
Docs to update later as implementation lands:
|
||||
|
||||
- `doc/SPEC-implementation.md`
|
||||
- `docs/api/companies.md`
|
||||
- `docs/cli/control-plane-commands.md`
|
||||
- board operator docs for Company Settings import/export
|
||||
|
||||
## 16. Open Questions
|
||||
|
||||
1. Should imported skill packages be stored as managed package files in Paperclip storage, or only referenced at import time?
|
||||
Decision: managed package files should support both company-scoped reuse and agent-scoped attachment.
|
||||
2. What is the minimum adapter skill interface needed to make the UI useful across Claude Code, Codex, OpenClaw, and future adapters?
|
||||
Decision: use the baseline interface in section 8.5.
|
||||
3. Should Paperclip support direct local folder selection in the web UI, or keep that CLI-only initially?
|
||||
4. Do we want optional generated lock files in phase 2, or defer them until provenance work?
|
||||
5. How strict should pinning be by default for GitHub references:
|
||||
- warn on unpinned
|
||||
- or block in normal mode
|
||||
6. Is package-provenance grouping enough for imported teams, or do we expect product requirements soon that would justify a first-class runtime `teams` table?
|
||||
Decision: provenance grouping is enough for the import/export product model for now.
|
||||
|
||||
## 17. Recommendation
|
||||
|
||||
Engineering should treat this as the current plan of record for company import/export beyond the existing V1 portability feature.
|
||||
|
||||
Immediate next steps:
|
||||
|
||||
1. accept `docs/companies/companies-spec.md` as the package-format draft
|
||||
2. implement phase 1 stabilization work
|
||||
3. build phase 2 markdown-first package reader before expanding ClipHub or `companies.sh`
|
||||
4. treat the old manifest-based format as deprecated and not part of the future surface
|
||||
|
||||
This keeps Paperclip aligned with:
|
||||
|
||||
- GitHub-native distribution
|
||||
- Agent Skills compatibility
|
||||
- a registry-optional ecosystem model
|
||||
399
doc/plans/2026-03-14-adapter-skill-sync-rollout.md
Normal file
399
doc/plans/2026-03-14-adapter-skill-sync-rollout.md
Normal file
@@ -0,0 +1,399 @@
|
||||
# 2026-03-14 Adapter Skill Sync Rollout
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-03-14
|
||||
Audience: Product and engineering
|
||||
Related:
|
||||
- `doc/plans/2026-03-14-skills-ui-product-plan.md`
|
||||
- `doc/plans/2026-03-13-company-import-export-v2.md`
|
||||
- `docs/companies/companies-spec.md`
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This document defines the rollout plan for adapter-wide skill support in Paperclip.
|
||||
|
||||
The goal is not just “show a skills tab.” The goal is:
|
||||
|
||||
- every adapter has a deliberate skill-sync truth model
|
||||
- the UI tells the truth for that adapter
|
||||
- Paperclip stores desired skill state consistently even when the adapter cannot fully reconcile it
|
||||
- unsupported adapters degrade clearly and safely
|
||||
|
||||
## 2. Current Adapter Matrix
|
||||
|
||||
Paperclip currently has these adapters:
|
||||
|
||||
- `claude_local`
|
||||
- `codex_local`
|
||||
- `cursor_local`
|
||||
- `gemini_local`
|
||||
- `opencode_local`
|
||||
- `pi_local`
|
||||
- `openclaw_gateway`
|
||||
|
||||
The current skill API supports:
|
||||
|
||||
- `unsupported`
|
||||
- `persistent`
|
||||
- `ephemeral`
|
||||
|
||||
Current implementation state:
|
||||
|
||||
- `codex_local`: implemented, `persistent`
|
||||
- `claude_local`: implemented, `ephemeral`
|
||||
- `cursor_local`: not yet implemented, but technically suited to `persistent`
|
||||
- `gemini_local`: not yet implemented, but technically suited to `persistent`
|
||||
- `pi_local`: not yet implemented, but technically suited to `persistent`
|
||||
- `opencode_local`: not yet implemented; likely `persistent`, but with special handling because it currently injects into Claude’s shared skills home
|
||||
- `openclaw_gateway`: not yet implemented; blocked on gateway protocol support, so `unsupported` for now
|
||||
|
||||
## 3. Product Principles
|
||||
|
||||
1. Desired skills live in Paperclip for every adapter.
|
||||
2. Adapters may expose different truth models, and the UI must reflect that honestly.
|
||||
3. Persistent adapters should read and reconcile actual installed state.
|
||||
4. Ephemeral adapters should report effective runtime state, not pretend they own a persistent install.
|
||||
5. Shared-home adapters need stronger safeguards than isolated-home adapters.
|
||||
6. Gateway or cloud adapters must not fake local filesystem sync.
|
||||
|
||||
## 4. Adapter Classification
|
||||
|
||||
### 4.1 Persistent local-home adapters
|
||||
|
||||
These adapters have a stable local skills directory that Paperclip can read and manage.
|
||||
|
||||
Candidates:
|
||||
|
||||
- `codex_local`
|
||||
- `cursor_local`
|
||||
- `gemini_local`
|
||||
- `pi_local`
|
||||
- `opencode_local` with caveats
|
||||
|
||||
Expected UX:
|
||||
|
||||
- show actual installed skills
|
||||
- show managed vs external skills
|
||||
- support `sync`
|
||||
- support stale removal
|
||||
- preserve unknown external skills
|
||||
|
||||
### 4.2 Ephemeral mount adapters
|
||||
|
||||
These adapters do not have a meaningful Paperclip-owned persistent install state.
|
||||
|
||||
Current adapter:
|
||||
|
||||
- `claude_local`
|
||||
|
||||
Expected UX:
|
||||
|
||||
- show desired Paperclip skills
|
||||
- show any discoverable external dirs if available
|
||||
- say “mounted on next run” instead of “installed”
|
||||
- do not imply a persistent adapter-owned install state
|
||||
|
||||
### 4.3 Unsupported / remote adapters
|
||||
|
||||
These adapters cannot support skill sync without new external capabilities.
|
||||
|
||||
Current adapter:
|
||||
|
||||
- `openclaw_gateway`
|
||||
|
||||
Expected UX:
|
||||
|
||||
- company skill library still works
|
||||
- agent attachment UI still works at the desired-state level
|
||||
- actual adapter state is `unsupported`
|
||||
- sync button is disabled or replaced with explanatory text
|
||||
|
||||
## 5. Per-Adapter Plan
|
||||
|
||||
### 5.1 Codex Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `persistent`
|
||||
|
||||
Current state:
|
||||
|
||||
- already implemented
|
||||
|
||||
Requirements to finish:
|
||||
|
||||
- keep as reference implementation
|
||||
- tighten tests around external custom skills and stale removal
|
||||
- ensure imported company skills can be attached and synced without manual path work
|
||||
|
||||
Success criteria:
|
||||
|
||||
- list installed managed and external skills
|
||||
- sync desired skills into `CODEX_HOME/skills`
|
||||
- preserve external user-managed skills
|
||||
|
||||
### 5.2 Claude Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `ephemeral`
|
||||
|
||||
Current state:
|
||||
|
||||
- already implemented
|
||||
|
||||
Requirements to finish:
|
||||
|
||||
- polish status language in UI
|
||||
- clearly distinguish “desired” from “mounted on next run”
|
||||
- optionally surface configured external skill dirs if Claude exposes them
|
||||
|
||||
Success criteria:
|
||||
|
||||
- desired skills stored in Paperclip
|
||||
- selected skills mounted per run
|
||||
- no misleading “installed” language
|
||||
|
||||
### 5.3 Cursor Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `persistent`
|
||||
|
||||
Technical basis:
|
||||
|
||||
- runtime already injects Paperclip skills into `~/.cursor/skills`
|
||||
|
||||
Implementation work:
|
||||
|
||||
1. Add `listSkills` for Cursor.
|
||||
2. Add `syncSkills` for Cursor.
|
||||
3. Reuse the same managed-symlink pattern as Codex.
|
||||
4. Distinguish:
|
||||
- managed Paperclip skills
|
||||
- external skills already present
|
||||
- missing desired skills
|
||||
- stale managed skills
|
||||
|
||||
Testing:
|
||||
|
||||
- unit tests for discovery
|
||||
- unit tests for sync and stale removal
|
||||
- verify shared auth/session setup is not disturbed
|
||||
|
||||
Success criteria:
|
||||
|
||||
- Cursor agents show real installed state
|
||||
- syncing from the agent Skills tab works
|
||||
|
||||
### 5.4 Gemini Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `persistent`
|
||||
|
||||
Technical basis:
|
||||
|
||||
- runtime already injects Paperclip skills into `~/.gemini/skills`
|
||||
|
||||
Implementation work:
|
||||
|
||||
1. Add `listSkills` for Gemini.
|
||||
2. Add `syncSkills` for Gemini.
|
||||
3. Reuse managed-symlink conventions from Codex/Cursor.
|
||||
4. Verify auth remains untouched while skills are reconciled.
|
||||
|
||||
Potential caveat:
|
||||
|
||||
- if Gemini treats that skills directory as shared user state, the UI should warn before removing stale managed skills
|
||||
|
||||
Success criteria:
|
||||
|
||||
- Gemini agents can reconcile desired vs actual skill state
|
||||
|
||||
### 5.5 Pi Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `persistent`
|
||||
|
||||
Technical basis:
|
||||
|
||||
- runtime already injects Paperclip skills into `~/.pi/agent/skills`
|
||||
|
||||
Implementation work:
|
||||
|
||||
1. Add `listSkills` for Pi.
|
||||
2. Add `syncSkills` for Pi.
|
||||
3. Reuse managed-symlink helpers.
|
||||
4. Verify session-file behavior remains independent from skill sync.
|
||||
|
||||
Success criteria:
|
||||
|
||||
- Pi agents expose actual installed skill state
|
||||
- Paperclip can sync desired skills into Pi’s persistent home
|
||||
|
||||
### 5.6 OpenCode Local
|
||||
|
||||
Target mode:
|
||||
|
||||
- `persistent`
|
||||
|
||||
Special case:
|
||||
|
||||
- OpenCode currently injects Paperclip skills into `~/.claude/skills`
|
||||
|
||||
This is product-risky because:
|
||||
|
||||
- it shares state with Claude
|
||||
- Paperclip may accidentally imply the skills belong only to OpenCode when the home is shared
|
||||
|
||||
Plan:
|
||||
|
||||
Phase 1:
|
||||
|
||||
- implement `listSkills` and `syncSkills`
|
||||
- treat it as `persistent`
|
||||
- explicitly label the home as shared in UI copy
|
||||
- only remove stale managed Paperclip skills that are clearly marked as Paperclip-managed
|
||||
|
||||
Phase 2:
|
||||
|
||||
- investigate whether OpenCode supports its own isolated skills home
|
||||
- if yes, migrate to an adapter-specific home and remove the shared-home caveat
|
||||
|
||||
Success criteria:
|
||||
|
||||
- OpenCode agents show real state
|
||||
- shared-home risk is visible and bounded
|
||||
|
||||
### 5.7 OpenClaw Gateway
|
||||
|
||||
Target mode:
|
||||
|
||||
- `unsupported` until gateway protocol support exists
|
||||
|
||||
Required external work:
|
||||
|
||||
- gateway API to list installed/available skills
|
||||
- gateway API to install/remove or otherwise reconcile skills
|
||||
- gateway metadata for whether state is persistent or ephemeral
|
||||
|
||||
Until then:
|
||||
|
||||
- Paperclip stores desired skills only
|
||||
- UI shows unsupported actual state
|
||||
- no fake sync implementation
|
||||
|
||||
Future target:
|
||||
|
||||
- likely a fourth truth model eventually, such as remote-managed persistent state
|
||||
- for now, keep the current API and treat gateway as unsupported
|
||||
|
||||
## 6. API Plan
|
||||
|
||||
## 6.1 Keep the current minimal adapter API
|
||||
|
||||
Near-term adapter contract remains:
|
||||
|
||||
- `listSkills(ctx)`
|
||||
- `syncSkills(ctx, desiredSkills)`
|
||||
|
||||
This is enough for all local adapters.
|
||||
|
||||
## 6.2 Optional extension points
|
||||
|
||||
Add only if needed after the first broad rollout:
|
||||
|
||||
- `skillHomeLabel`
|
||||
- `sharedHome: boolean`
|
||||
- `supportsExternalDiscovery: boolean`
|
||||
- `supportsDestructiveSync: boolean`
|
||||
|
||||
These should be optional metadata additions to the snapshot, not required new adapter methods.
|
||||
|
||||
## 7. UI Plan
|
||||
|
||||
The company-level skill library can stay adapter-neutral.
|
||||
|
||||
The agent-level Skills tab must become adapter-aware by copy and status:
|
||||
|
||||
- `persistent`: installed / missing / stale / external
|
||||
- `ephemeral`: mounted on next run / external / desired only
|
||||
- `unsupported`: desired only, adapter cannot report actual state
|
||||
|
||||
Additional UI requirement for shared-home adapters:
|
||||
|
||||
- show a small warning that the adapter uses a shared user skills home
|
||||
- avoid destructive wording unless Paperclip can prove a skill is Paperclip-managed
|
||||
|
||||
## 8. Rollout Phases
|
||||
|
||||
### Phase 1: Finish the local filesystem family
|
||||
|
||||
Ship:
|
||||
|
||||
- `cursor_local`
|
||||
- `gemini_local`
|
||||
- `pi_local`
|
||||
|
||||
Rationale:
|
||||
|
||||
- these are the closest to Codex in architecture
|
||||
- they already inject into stable local skill homes
|
||||
|
||||
### Phase 2: OpenCode shared-home support
|
||||
|
||||
Ship:
|
||||
|
||||
- `opencode_local`
|
||||
|
||||
Rationale:
|
||||
|
||||
- technically feasible now
|
||||
- needs slightly more careful product language because of the shared Claude skills home
|
||||
|
||||
### Phase 3: Gateway support decision
|
||||
|
||||
Decide:
|
||||
|
||||
- keep `openclaw_gateway` unsupported for V1
|
||||
- or extend the gateway protocol for remote skill management
|
||||
|
||||
My recommendation:
|
||||
|
||||
- do not block V1 on gateway support
|
||||
- keep it explicitly unsupported until the remote protocol exists
|
||||
|
||||
## 9. Definition Of Done
|
||||
|
||||
Adapter-wide skill support is ready when all are true:
|
||||
|
||||
1. Every adapter has an explicit truth model:
|
||||
- `persistent`
|
||||
- `ephemeral`
|
||||
- `unsupported`
|
||||
2. The UI copy matches that truth model.
|
||||
3. All local persistent adapters implement:
|
||||
- `listSkills`
|
||||
- `syncSkills`
|
||||
4. Tests cover:
|
||||
- desired-state storage
|
||||
- actual-state discovery
|
||||
- managed vs external distinctions
|
||||
- stale managed-skill cleanup where supported
|
||||
5. `openclaw_gateway` is either:
|
||||
- explicitly unsupported with clean UX
|
||||
- or backed by a real remote skill API
|
||||
|
||||
## 10. Recommendation
|
||||
|
||||
The recommended immediate order is:
|
||||
|
||||
1. `cursor_local`
|
||||
2. `gemini_local`
|
||||
3. `pi_local`
|
||||
4. `opencode_local`
|
||||
5. defer `openclaw_gateway`
|
||||
|
||||
That gets Paperclip from “skills work for Codex and Claude” to “skills work for the whole local-adapter family,” which is the meaningful V1 milestone.
|
||||
729
doc/plans/2026-03-14-skills-ui-product-plan.md
Normal file
729
doc/plans/2026-03-14-skills-ui-product-plan.md
Normal file
@@ -0,0 +1,729 @@
|
||||
# 2026-03-14 Skills UI Product Plan
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-03-14
|
||||
Audience: Product and engineering
|
||||
Related:
|
||||
- `doc/plans/2026-03-13-company-import-export-v2.md`
|
||||
- `doc/plans/2026-03-14-adapter-skill-sync-rollout.md`
|
||||
- `docs/companies/companies-spec.md`
|
||||
- `ui/src/pages/AgentDetail.tsx`
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This document defines the product and UI plan for skill management in Paperclip.
|
||||
|
||||
The goal is to make skills understandable and manageable in the website without pretending that all adapters behave the same way.
|
||||
|
||||
This plan assumes:
|
||||
|
||||
- `SKILL.md` remains Agent Skills compatible
|
||||
- `skills.sh` compatibility is a V1 requirement
|
||||
- Paperclip company import/export can include skills as package content
|
||||
- adapters may support persistent skill sync, ephemeral skill mounting, read-only skill discovery, or no skill integration at all
|
||||
|
||||
## 2. Current State
|
||||
|
||||
There is already a first-pass agent-level skill sync UI on `AgentDetail`.
|
||||
|
||||
Today it supports:
|
||||
|
||||
- loading adapter skill sync state
|
||||
- showing unsupported adapters clearly
|
||||
- showing managed skills as checkboxes
|
||||
- showing external skills separately
|
||||
- syncing desired skills for adapters that implement the new API
|
||||
|
||||
Current limitations:
|
||||
|
||||
1. There is no company-level skill library UI.
|
||||
2. There is no package import flow for skills in the website.
|
||||
3. There is no distinction between skill package management and per-agent skill attachment.
|
||||
4. There is no multi-agent desired-vs-actual view.
|
||||
5. The current UI is adapter-sync-oriented, not package-oriented.
|
||||
6. Unsupported adapters degrade safely, but not elegantly.
|
||||
|
||||
## 2.1 V1 Decisions
|
||||
|
||||
For V1, this plan assumes the following product decisions are already made:
|
||||
|
||||
1. `skills.sh` compatibility is required.
|
||||
2. Agent-to-skill association in `AGENTS.md` is by shortname or slug.
|
||||
3. Company skills and agent skill attachments are separate concepts.
|
||||
4. Agent skills should move to their own tab rather than living inside configuration.
|
||||
5. Company import/export should eventually round-trip skill packages and agent skill attachments.
|
||||
|
||||
## 3. Product Principles
|
||||
|
||||
1. Skills are company assets first, agent attachments second.
|
||||
2. Package management and adapter sync are different concerns and should not be conflated in one screen.
|
||||
3. The UI must always tell the truth about what Paperclip knows:
|
||||
- desired state in Paperclip
|
||||
- actual state reported by the adapter
|
||||
- whether the adapter can reconcile the two
|
||||
4. Agent Skills compatibility must remain visible in the product model.
|
||||
5. Agent-to-skill associations should be human-readable and shortname-based wherever possible.
|
||||
6. Unsupported adapters should still have a useful UI, not just a dead end.
|
||||
|
||||
## 4. User Model
|
||||
|
||||
Paperclip should treat skills at two scopes:
|
||||
|
||||
### 4.1 Company skills
|
||||
|
||||
These are reusable skills known to the company.
|
||||
|
||||
Examples:
|
||||
|
||||
- imported from a GitHub repo
|
||||
- added from a local folder
|
||||
- installed from a `skills.sh`-compatible repo
|
||||
- created locally inside Paperclip later
|
||||
|
||||
These should have:
|
||||
|
||||
- name
|
||||
- description
|
||||
- slug or package identity
|
||||
- source/provenance
|
||||
- trust level
|
||||
- compatibility status
|
||||
|
||||
### 4.2 Agent skills
|
||||
|
||||
These are skill attachments for a specific agent.
|
||||
|
||||
Each attachment should have:
|
||||
|
||||
- shortname
|
||||
- desired state in Paperclip
|
||||
- actual state in the adapter when readable
|
||||
- sync status
|
||||
- origin
|
||||
|
||||
Agent attachments should normally reference skills by shortname or slug, for example:
|
||||
|
||||
- `review`
|
||||
- `react-best-practices`
|
||||
|
||||
not by noisy relative file path.
|
||||
|
||||
## 4.3 Primary user jobs
|
||||
|
||||
The UI should support these jobs cleanly:
|
||||
|
||||
1. “Show me what skills this company has.”
|
||||
2. “Import a skill from GitHub or a local folder.”
|
||||
3. “See whether a skill is safe, compatible, and who uses it.”
|
||||
4. “Attach skills to an agent.”
|
||||
5. “See whether the adapter actually has those skills.”
|
||||
6. “Reconcile desired vs actual skill state.”
|
||||
7. “Understand what Paperclip knows vs what the adapter knows.”
|
||||
|
||||
## 5. Core UI Surfaces
|
||||
|
||||
The product should have two primary skill surfaces.
|
||||
|
||||
### 5.1 Company Skills page
|
||||
|
||||
Add a company-level page, likely:
|
||||
|
||||
- `/companies/:companyId/skills`
|
||||
|
||||
Purpose:
|
||||
|
||||
- manage the company skill library
|
||||
- import and inspect skill packages
|
||||
- understand provenance and trust
|
||||
- see which agents use which skills
|
||||
|
||||
#### Route
|
||||
|
||||
- `/companies/:companyId/skills`
|
||||
|
||||
#### Primary actions
|
||||
|
||||
- import skill
|
||||
- inspect skill
|
||||
- attach to agents
|
||||
- detach from agents
|
||||
- export selected skills later
|
||||
|
||||
#### Empty state
|
||||
|
||||
When the company has no managed skills:
|
||||
|
||||
- explain what skills are
|
||||
- explain `skills.sh` / Agent Skills compatibility
|
||||
- offer `Import from GitHub` and `Import from folder`
|
||||
- optionally show adapter-discovered skills as a secondary “not managed yet” section
|
||||
|
||||
#### A. Skill library list
|
||||
|
||||
Each skill row should show:
|
||||
|
||||
- name
|
||||
- short description
|
||||
- source badge
|
||||
- trust badge
|
||||
- compatibility badge
|
||||
- number of attached agents
|
||||
|
||||
Suggested source states:
|
||||
|
||||
- local
|
||||
- github
|
||||
- imported package
|
||||
- external reference
|
||||
- adapter-discovered only
|
||||
|
||||
Suggested compatibility states:
|
||||
|
||||
- compatible
|
||||
- paperclip-extension
|
||||
- unknown
|
||||
- invalid
|
||||
|
||||
Suggested trust states:
|
||||
|
||||
- markdown-only
|
||||
- assets
|
||||
- scripts/executables
|
||||
|
||||
Suggested list affordances:
|
||||
|
||||
- search by name or slug
|
||||
- filter by source
|
||||
- filter by trust level
|
||||
- filter by usage
|
||||
- sort by name, recent import, usage count
|
||||
|
||||
#### B. Import actions
|
||||
|
||||
Allow:
|
||||
|
||||
- import from local folder
|
||||
- import from GitHub URL
|
||||
- import from direct URL
|
||||
|
||||
Future:
|
||||
|
||||
- install from `companies.sh`
|
||||
- install from `skills.sh`
|
||||
|
||||
V1 requirement:
|
||||
|
||||
- importing from a `skills.sh`-compatible source should work without requiring a Paperclip-specific package layout
|
||||
|
||||
#### C. Skill detail drawer or page
|
||||
|
||||
Each skill should have a detail view showing:
|
||||
|
||||
- rendered `SKILL.md`
|
||||
- package source and pinning
|
||||
- included files
|
||||
- trust and licensing warnings
|
||||
- who uses it
|
||||
- adapter compatibility notes
|
||||
|
||||
Recommended route:
|
||||
|
||||
- `/companies/:companyId/skills/:skillId`
|
||||
|
||||
Recommended sections:
|
||||
|
||||
- Overview
|
||||
- Contents
|
||||
- Usage
|
||||
- Source
|
||||
- Trust / licensing
|
||||
|
||||
#### D. Usage view
|
||||
|
||||
Each company skill should show which agents use it.
|
||||
|
||||
Suggested columns:
|
||||
|
||||
- agent
|
||||
- desired state
|
||||
- actual state
|
||||
- adapter
|
||||
- sync mode
|
||||
- last sync status
|
||||
|
||||
### 5.2 Agent Skills tab
|
||||
|
||||
Keep and evolve the existing `AgentDetail` skill sync UI, but move it out of configuration.
|
||||
|
||||
Purpose:
|
||||
|
||||
- attach/detach company skills to one agent
|
||||
- inspect adapter reality for that agent
|
||||
- reconcile desired vs actual state
|
||||
- keep the association format readable and aligned with `AGENTS.md`
|
||||
|
||||
#### Route
|
||||
|
||||
- `/agents/:agentId/skills`
|
||||
|
||||
#### Agent tabs
|
||||
|
||||
The intended agent-level tab model becomes:
|
||||
|
||||
- `dashboard`
|
||||
- `configuration`
|
||||
- `skills`
|
||||
- `runs`
|
||||
|
||||
This is preferable to hiding skills inside configuration because:
|
||||
|
||||
- skills are not just adapter config
|
||||
- skills need their own sync/status language
|
||||
- skills are a reusable company asset, not merely one agent field
|
||||
- the screen needs room for desired vs actual state, warnings, and external skill adoption
|
||||
|
||||
#### Tab layout
|
||||
|
||||
The `Skills` tab should have three stacked sections:
|
||||
|
||||
1. Summary
|
||||
2. Managed skills
|
||||
3. External / discovered skills
|
||||
|
||||
Summary should show:
|
||||
|
||||
- adapter sync support
|
||||
- sync mode
|
||||
- number of managed skills
|
||||
- number of external skills
|
||||
- drift or warning count
|
||||
|
||||
#### A. Desired skills
|
||||
|
||||
Show company-managed skills attached to the agent.
|
||||
|
||||
Each row should show:
|
||||
|
||||
- skill name
|
||||
- shortname
|
||||
- sync state
|
||||
- source
|
||||
- last adapter observation if available
|
||||
|
||||
Each row should support:
|
||||
|
||||
- enable / disable
|
||||
- open skill detail
|
||||
- see source badge
|
||||
- see sync badge
|
||||
|
||||
#### B. External or discovered skills
|
||||
|
||||
Show skills reported by the adapter that are not company-managed.
|
||||
|
||||
This matters because Codex and similar adapters may already have local skills that Paperclip did not install.
|
||||
|
||||
These should be clearly marked:
|
||||
|
||||
- external
|
||||
- not managed by Paperclip
|
||||
|
||||
Each external row should support:
|
||||
|
||||
- inspect
|
||||
- adopt into company library later
|
||||
- attach as managed skill later if appropriate
|
||||
|
||||
#### C. Sync controls
|
||||
|
||||
Support:
|
||||
|
||||
- sync
|
||||
- reset draft
|
||||
- detach
|
||||
|
||||
Future:
|
||||
|
||||
- import external skill into company library
|
||||
- promote ad hoc local skill into a managed company skill
|
||||
|
||||
Recommended footer actions:
|
||||
|
||||
- `Sync skills`
|
||||
- `Reset`
|
||||
- `Refresh adapter state`
|
||||
|
||||
## 6. Skill State Model In The UI
|
||||
|
||||
Each skill attachment should have a user-facing state.
|
||||
|
||||
Suggested states:
|
||||
|
||||
- `in_sync`
|
||||
- `desired_only`
|
||||
- `external`
|
||||
- `drifted`
|
||||
- `unmanaged`
|
||||
- `unknown`
|
||||
|
||||
Definitions:
|
||||
|
||||
- `in_sync`: desired and actual match
|
||||
- `desired_only`: Paperclip wants it, adapter does not show it yet
|
||||
- `external`: adapter has it but Paperclip does not manage it
|
||||
- `drifted`: adapter has a conflicting or unexpected version/location
|
||||
- `unmanaged`: adapter does not support sync, Paperclip only tracks desired state
|
||||
- `unknown`: adapter read failed or state cannot be trusted
|
||||
|
||||
Suggested badge copy:
|
||||
|
||||
- `In sync`
|
||||
- `Needs sync`
|
||||
- `External`
|
||||
- `Drifted`
|
||||
- `Unmanaged`
|
||||
- `Unknown`
|
||||
|
||||
## 7. Adapter Presentation Rules
|
||||
|
||||
The UI should not describe all adapters the same way.
|
||||
|
||||
### 7.1 Persistent adapters
|
||||
|
||||
Example:
|
||||
|
||||
- Codex local
|
||||
|
||||
Language:
|
||||
|
||||
- installed
|
||||
- synced into adapter home
|
||||
- external skills detected
|
||||
|
||||
### 7.2 Ephemeral adapters
|
||||
|
||||
Example:
|
||||
|
||||
- Claude local
|
||||
|
||||
Language:
|
||||
|
||||
- will be mounted on next run
|
||||
- effective runtime skills
|
||||
- not globally installed
|
||||
|
||||
### 7.3 Unsupported adapters
|
||||
|
||||
Language:
|
||||
|
||||
- this adapter does not implement skill sync yet
|
||||
- Paperclip can still track desired skills
|
||||
- actual adapter state is unavailable
|
||||
|
||||
This state should still allow:
|
||||
|
||||
- attaching company skills to the agent as desired state
|
||||
- export/import of those desired attachments
|
||||
|
||||
## 7.4 Read-only adapters
|
||||
|
||||
Some adapters may be able to list skills but not mutate them.
|
||||
|
||||
Language:
|
||||
|
||||
- Paperclip can see adapter skills
|
||||
- this adapter does not support applying changes
|
||||
- desired state can be tracked, but reconciliation is manual
|
||||
|
||||
## 8. Information Architecture
|
||||
|
||||
Recommended navigation:
|
||||
|
||||
- company nav adds `Skills`
|
||||
- agent detail adds `Skills` as its own tab
|
||||
- company skill detail gets its own route when the company library ships
|
||||
|
||||
Recommended separation:
|
||||
|
||||
- Company Skills page answers: “What skills do we have?”
|
||||
- Agent Skills tab answers: “What does this agent use, and is it synced?”
|
||||
|
||||
## 8.1 Proposed route map
|
||||
|
||||
- `/companies/:companyId/skills`
|
||||
- `/companies/:companyId/skills/:skillId`
|
||||
- `/agents/:agentId/skills`
|
||||
|
||||
## 8.2 Nav and discovery
|
||||
|
||||
Recommended entry points:
|
||||
|
||||
- company sidebar: `Skills`
|
||||
- agent page tabs: `Skills`
|
||||
- company import preview: link imported skills to company skills page later
|
||||
- agent skills rows: link to company skill detail
|
||||
|
||||
## 9. Import / Export Integration
|
||||
|
||||
Skill UI and package portability should meet in the company skill library.
|
||||
|
||||
Import behavior:
|
||||
|
||||
- importing a company package with `SKILL.md` content should create or update company skills
|
||||
- agent attachments should primarily come from `AGENTS.md` shortname associations
|
||||
- `.paperclip.yaml` may add Paperclip-specific fidelity, but should not replace the base shortname association model
|
||||
- referenced third-party skills should keep provenance visible
|
||||
|
||||
Export behavior:
|
||||
|
||||
- exporting a company should include company-managed skills when selected
|
||||
- `AGENTS.md` should emit skill associations by shortname or slug
|
||||
- `.paperclip.yaml` may add Paperclip-specific skill fidelity later if needed, but should not be required for ordinary agent-to-skill association
|
||||
- adapter-only external skills should not be silently exported as managed company skills
|
||||
|
||||
## 9.1 Import workflows
|
||||
|
||||
V1 workflows should support:
|
||||
|
||||
1. import one or more skills from a local folder
|
||||
2. import one or more skills from a GitHub repo
|
||||
3. import a company package that contains skills
|
||||
4. attach imported skills to one or more agents
|
||||
|
||||
Import preview for skills should show:
|
||||
|
||||
- skills discovered
|
||||
- source and pinning
|
||||
- trust level
|
||||
- licensing warnings
|
||||
- whether an existing company skill will be created, updated, or skipped
|
||||
|
||||
## 9.2 Export workflows
|
||||
|
||||
V1 should support:
|
||||
|
||||
1. export a company with managed skills included when selected
|
||||
2. export an agent whose `AGENTS.md` contains shortname skill associations
|
||||
3. preserve Agent Skills compatibility for each `SKILL.md`
|
||||
|
||||
Out of scope for V1:
|
||||
|
||||
- exporting adapter-only external skills as managed packages automatically
|
||||
|
||||
## 10. Data And API Shape
|
||||
|
||||
This plan implies a clean split in backend concepts.
|
||||
|
||||
### 10.1 Company skill records
|
||||
|
||||
Paperclip should have a company-scoped skill model or managed package model representing:
|
||||
|
||||
- identity
|
||||
- source
|
||||
- files
|
||||
- provenance
|
||||
- trust and licensing metadata
|
||||
|
||||
### 10.2 Agent skill attachments
|
||||
|
||||
Paperclip should separately store:
|
||||
|
||||
- agent id
|
||||
- skill identity
|
||||
- desired enabled state
|
||||
- optional ordering or metadata later
|
||||
|
||||
### 10.3 Adapter sync snapshot
|
||||
|
||||
Adapter reads should return:
|
||||
|
||||
- supported flag
|
||||
- sync mode
|
||||
- entries
|
||||
- warnings
|
||||
- desired skills
|
||||
|
||||
This already exists in rough form and should be the basis for the UI.
|
||||
|
||||
### 10.4 UI-facing API needs
|
||||
|
||||
The complete UI implies these API surfaces:
|
||||
|
||||
- list company-managed skills
|
||||
- import company skills from path/URL/GitHub
|
||||
- get one company skill detail
|
||||
- list agents using a given skill
|
||||
- attach/detach company skills for an agent
|
||||
- list adapter sync snapshot for an agent
|
||||
- apply desired skills for an agent
|
||||
|
||||
Existing agent-level skill sync APIs can remain the base for the agent tab.
|
||||
The company-level library APIs still need to be designed and implemented.
|
||||
|
||||
## 11. Page-by-page UX
|
||||
|
||||
### 11.1 Company Skills list page
|
||||
|
||||
Header:
|
||||
|
||||
- title
|
||||
- short explanation of compatibility with Agent Skills / `skills.sh`
|
||||
- import button
|
||||
|
||||
Body:
|
||||
|
||||
- filters
|
||||
- skill table or cards
|
||||
- empty state when none
|
||||
|
||||
Secondary content:
|
||||
|
||||
- warnings panel for untrusted or incompatible skills
|
||||
|
||||
### 11.2 Company Skill detail page
|
||||
|
||||
Header:
|
||||
|
||||
- skill name
|
||||
- shortname
|
||||
- source badge
|
||||
- trust badge
|
||||
- compatibility badge
|
||||
|
||||
Sections:
|
||||
|
||||
- rendered `SKILL.md`
|
||||
- files and references
|
||||
- usage by agents
|
||||
- source / provenance
|
||||
- trust and licensing warnings
|
||||
|
||||
Actions:
|
||||
|
||||
- attach to agent
|
||||
- remove from company library later
|
||||
- export later
|
||||
|
||||
### 11.3 Agent Skills tab
|
||||
|
||||
Header:
|
||||
|
||||
- adapter support summary
|
||||
- sync mode
|
||||
- refresh and sync actions
|
||||
|
||||
Body:
|
||||
|
||||
- managed skills list
|
||||
- external/discovered skills list
|
||||
- warnings / unsupported state block
|
||||
|
||||
## 12. States And Empty Cases
|
||||
|
||||
### 12.1 Company Skills page
|
||||
|
||||
States:
|
||||
|
||||
- empty
|
||||
- loading
|
||||
- loaded
|
||||
- import in progress
|
||||
- import failed
|
||||
|
||||
### 12.2 Company Skill detail
|
||||
|
||||
States:
|
||||
|
||||
- loading
|
||||
- not found
|
||||
- incompatible
|
||||
- loaded
|
||||
|
||||
### 12.3 Agent Skills tab
|
||||
|
||||
States:
|
||||
|
||||
- loading snapshot
|
||||
- unsupported adapter
|
||||
- read-only adapter
|
||||
- sync-capable adapter
|
||||
- sync failed
|
||||
- stale draft
|
||||
|
||||
## 13. Permissions And Governance
|
||||
|
||||
Suggested V1 policy:
|
||||
|
||||
- board users can manage company skills
|
||||
- board users can attach skills to agents
|
||||
- agents themselves do not mutate company skill library by default
|
||||
- later, certain agents may get scoped permissions for skill attachment or sync
|
||||
|
||||
## 14. UI Phases
|
||||
|
||||
### Phase A: Stabilize current agent skill sync UI
|
||||
|
||||
Goals:
|
||||
|
||||
- move skills to an `AgentDetail` tab
|
||||
- improve status language
|
||||
- support desired-only state even on unsupported adapters
|
||||
- polish copy for persistent vs ephemeral adapters
|
||||
|
||||
### Phase B: Add Company Skills page
|
||||
|
||||
Goals:
|
||||
|
||||
- company-level skill library
|
||||
- import from GitHub/local folder
|
||||
- basic detail view
|
||||
- usage counts by agent
|
||||
- `skills.sh`-compatible import path
|
||||
|
||||
### Phase C: Connect skills to portability
|
||||
|
||||
Goals:
|
||||
|
||||
- importing company packages creates company skills
|
||||
- exporting selected skills works cleanly
|
||||
- agent attachments round-trip primarily through `AGENTS.md` shortnames
|
||||
|
||||
### Phase D: External skill adoption flow
|
||||
|
||||
Goals:
|
||||
|
||||
- detect adapter external skills
|
||||
- allow importing them into company-managed state where possible
|
||||
- make provenance explicit
|
||||
|
||||
### Phase E: Advanced sync and drift UX
|
||||
|
||||
Goals:
|
||||
|
||||
- desired-vs-actual diffing
|
||||
- drift resolution actions
|
||||
- multi-agent skill usage and sync reporting
|
||||
|
||||
## 15. Design Risks
|
||||
|
||||
1. Overloading the agent page with package management will make the feature confusing.
|
||||
2. Treating unsupported adapters as broken rather than unmanaged will make the product feel inconsistent.
|
||||
3. Mixing external adapter-discovered skills with company-managed skills without clear labels will erode trust.
|
||||
4. If company skill records do not exist, import/export and UI will remain loosely coupled and round-trip fidelity will stay weak.
|
||||
5. If agent skill associations are path-based instead of shortname-based, the format will feel too technical and too Paperclip-specific.
|
||||
|
||||
## 16. Recommendation
|
||||
|
||||
The next product step should be:
|
||||
|
||||
1. move skills out of agent configuration and into a dedicated `Skills` tab
|
||||
2. add a dedicated company-level `Skills` page as the library and package-management surface
|
||||
3. make company import/export target that company skill library, not the agent page directly
|
||||
4. preserve adapter-aware truth in the UI by clearly separating:
|
||||
- desired
|
||||
- actual
|
||||
- external
|
||||
- unmanaged
|
||||
5. keep agent-to-skill associations shortname-based in `AGENTS.md`
|
||||
|
||||
That gives Paperclip one coherent skill story instead of forcing package management, adapter sync, and agent configuration into the same screen.
|
||||
@@ -7,10 +7,10 @@ Define a Paperclip memory service and surface API that can sit above multiple me
|
||||
- company scoping
|
||||
- auditability
|
||||
- provenance back to Paperclip work objects
|
||||
- budget / cost visibility
|
||||
- budget and cost visibility
|
||||
- plugin-first extensibility
|
||||
|
||||
This plan is based on the external landscape summarized in `doc/memory-landscape.md` and on the current Paperclip architecture in:
|
||||
This plan is based on the external landscape summarized in `doc/memory-landscape.md`, the AWS AgentCore comparison captured in [PAP-1274](/PAP/issues/PAP-1274), and the current Paperclip architecture in:
|
||||
|
||||
- `doc/SPEC-implementation.md`
|
||||
- `doc/plugins/PLUGIN_SPEC.md`
|
||||
@@ -19,23 +19,26 @@ This plan is based on the external landscape summarized in `doc/memory-landscape
|
||||
|
||||
## Recommendation In One Sentence
|
||||
|
||||
Paperclip should not embed one opinionated memory engine into core. It should add a company-scoped memory control plane with a small normalized adapter contract, then let built-ins and plugins implement the provider-specific behavior.
|
||||
Paperclip should add a company-scoped memory control plane with company default plus agent override resolution, shared hook delivery, and full operation attribution, while leaving extraction and storage semantics to built-ins and plugins.
|
||||
|
||||
## Product Decisions
|
||||
|
||||
### 1. Memory is company-scoped by default
|
||||
### 1. Memory resolution is company default plus agent override
|
||||
|
||||
Every memory binding belongs to exactly one company.
|
||||
|
||||
That binding can then be:
|
||||
Resolution order in V1:
|
||||
|
||||
- the company default
|
||||
- an agent override
|
||||
- a project override later if we need it
|
||||
- company default binding
|
||||
- optional per-agent override
|
||||
|
||||
There is no per-project override in V1.
|
||||
|
||||
Project context can still appear in scope and provenance so providers can use it for retrieval and partitioning, but projects do not participate in binding selection.
|
||||
|
||||
No cross-company memory sharing in the initial design.
|
||||
|
||||
### 2. Providers are selected by key
|
||||
### 2. Providers are selected by stable binding key
|
||||
|
||||
Each configured memory provider gets a stable key inside a company, for example:
|
||||
|
||||
@@ -44,36 +47,53 @@ Each configured memory provider gets a stable key inside a company, for example:
|
||||
- `local-markdown`
|
||||
- `research-kb`
|
||||
|
||||
Agents and services resolve the active provider by key, not by hard-coded vendor logic.
|
||||
Agents, tools, and background hooks resolve the active provider by key, not by hard-coded vendor logic.
|
||||
|
||||
### 3. Plugins are the primary provider path
|
||||
|
||||
Built-ins are useful for a zero-config local path, but most providers should arrive through the existing Paperclip plugin runtime.
|
||||
|
||||
That keeps the core small and matches the current direction that optional knowledge-like systems live at the edges.
|
||||
That keeps the core small and matches the broader Paperclip direction that specialized knowledge systems live at the edges.
|
||||
|
||||
### 4. Paperclip owns routing, provenance, and accounting
|
||||
### 4. Paperclip owns routing, provenance, and policy
|
||||
|
||||
Providers should not decide how Paperclip entities map to governance.
|
||||
|
||||
Paperclip core should own:
|
||||
|
||||
- binding resolution
|
||||
- who is allowed to call a memory operation
|
||||
- which company / agent / project scope is active
|
||||
- what issue / run / comment / document the operation belongs to
|
||||
- how usage gets recorded
|
||||
- which company, agent, issue, project, run, and subject scope is active
|
||||
- what source object the operation belongs to
|
||||
- how usage and costs are attributed
|
||||
- how operators inspect what happened
|
||||
|
||||
### 5. Automatic memory should be narrow at first
|
||||
### 5. Paperclip exposes shared hooks, providers own extraction
|
||||
|
||||
Paperclip should emit a common set of memory hooks that built-ins, third-party adapters, and plugins can all use.
|
||||
|
||||
Those hooks should pass structured Paperclip source objects plus normalized metadata. The provider then decides how to extract from those objects.
|
||||
|
||||
Paperclip should not force one extraction pipeline or one canonical "memory text" transform before the provider sees the input.
|
||||
|
||||
### 6. Automatic memory should start narrow, but the hook surface should be general
|
||||
|
||||
Automatic capture is useful, but broad silent capture is dangerous.
|
||||
|
||||
Initial automatic hooks should be:
|
||||
Initial built-in automatic hooks should be:
|
||||
|
||||
- pre-run hydrate for agent context recall
|
||||
- post-run capture from agent runs
|
||||
- issue comment / document capture when the binding enables it
|
||||
- pre-run recall for agent context hydration
|
||||
- optional issue comment capture
|
||||
- optional issue document capture
|
||||
|
||||
Everything else should start explicit.
|
||||
The hook registry itself should be general enough that other providers can subscribe to the same events without core changes.
|
||||
|
||||
### 7. No approval gate for binding changes in the open-source product
|
||||
|
||||
For the open-source version, changing memory bindings should not require approvals.
|
||||
|
||||
Paperclip should still log those changes in activity and preserve full auditability. Approval-gated memory governance can remain an enterprise or future policy layer.
|
||||
|
||||
## Proposed Concepts
|
||||
|
||||
@@ -83,7 +103,7 @@ A built-in or plugin-supplied implementation that stores and retrieves memory.
|
||||
|
||||
Examples:
|
||||
|
||||
- local markdown + vector index
|
||||
- local markdown plus semantic index
|
||||
- mem0 adapter
|
||||
- supermemory adapter
|
||||
- MemOS adapter
|
||||
@@ -94,6 +114,15 @@ A company-scoped configuration record that points to a provider and carries prov
|
||||
|
||||
This is the object selected by key.
|
||||
|
||||
### Memory binding target
|
||||
|
||||
A mapping from a Paperclip target to a binding.
|
||||
|
||||
V1 targets:
|
||||
|
||||
- `company`
|
||||
- `agent`
|
||||
|
||||
### Memory scope
|
||||
|
||||
The normalized Paperclip scope passed into a provider request.
|
||||
@@ -105,7 +134,9 @@ At minimum:
|
||||
- optional `projectId`
|
||||
- optional `issueId`
|
||||
- optional `runId`
|
||||
- optional `subjectId` for external/user identity
|
||||
- optional `subjectId` for external or user identity
|
||||
- optional `sessionKey` for providers that organize memory around sessions
|
||||
- optional `namespace` for providers that need an explicit partition hint
|
||||
|
||||
### Memory source reference
|
||||
|
||||
@@ -121,24 +152,36 @@ Supported source kinds should include:
|
||||
- `manual_note`
|
||||
- `external_document`
|
||||
|
||||
### Memory hook
|
||||
|
||||
A normalized trigger emitted by Paperclip when something memory-relevant happens.
|
||||
|
||||
Initial hook kinds:
|
||||
|
||||
- `pre_run_hydrate`
|
||||
- `post_run_capture`
|
||||
- `issue_comment_capture`
|
||||
- `issue_document_capture`
|
||||
- `manual_capture`
|
||||
|
||||
### Memory operation
|
||||
|
||||
A normalized write, query, browse, or delete action performed through Paperclip.
|
||||
A normalized capture, record-write, query, browse, get, correction, or delete action performed through Paperclip.
|
||||
|
||||
Paperclip should log every operation, whether the provider is local or external.
|
||||
Paperclip should log every memory operation whether the provider is local, plugin-backed, or external.
|
||||
|
||||
## Required Adapter Contract
|
||||
|
||||
The required core should be small enough to fit `memsearch`, `mem0`, `Memori`, `MemOS`, or `OpenViking`.
|
||||
The required core should be small enough to fit `memsearch`, `mem0`, `Memori`, `MemOS`, or `OpenViking`, but strong enough to satisfy Paperclip's attribution and inspectability requirements.
|
||||
|
||||
```ts
|
||||
export interface MemoryAdapterCapabilities {
|
||||
profile?: boolean;
|
||||
browse?: boolean;
|
||||
correction?: boolean;
|
||||
asyncIngestion?: boolean;
|
||||
multimodal?: boolean;
|
||||
providerManagedExtraction?: boolean;
|
||||
asyncExtraction?: boolean;
|
||||
providerNativeBrowse?: boolean;
|
||||
}
|
||||
|
||||
export interface MemoryScope {
|
||||
@@ -148,6 +191,8 @@ export interface MemoryScope {
|
||||
issueId?: string;
|
||||
runId?: string;
|
||||
subjectId?: string;
|
||||
sessionKey?: string;
|
||||
namespace?: string;
|
||||
}
|
||||
|
||||
export interface MemorySourceRef {
|
||||
@@ -168,10 +213,34 @@ export interface MemorySourceRef {
|
||||
externalRef?: string;
|
||||
}
|
||||
|
||||
export interface MemoryHookContext {
|
||||
hookKind:
|
||||
| "pre_run_hydrate"
|
||||
| "post_run_capture"
|
||||
| "issue_comment_capture"
|
||||
| "issue_document_capture"
|
||||
| "manual_capture";
|
||||
hookId: string;
|
||||
triggeredAt: string;
|
||||
actorAgentId?: string;
|
||||
heartbeatRunId?: string;
|
||||
}
|
||||
|
||||
export interface MemorySourcePayload {
|
||||
text?: string;
|
||||
mimeType?: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
object?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemoryUsage {
|
||||
provider: string;
|
||||
biller?: string;
|
||||
model?: string;
|
||||
billingType?: "metered_api" | "subscription_included" | "subscription_overage" | "unknown";
|
||||
attributionMode?: "billed_directly" | "included_in_run" | "external_invoice" | "untracked";
|
||||
inputTokens?: number;
|
||||
cachedInputTokens?: number;
|
||||
outputTokens?: number;
|
||||
embeddingTokens?: number;
|
||||
costCents?: number;
|
||||
@@ -179,20 +248,32 @@ export interface MemoryUsage {
|
||||
details?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemoryWriteRequest {
|
||||
bindingKey: string;
|
||||
scope: MemoryScope;
|
||||
source: MemorySourceRef;
|
||||
content: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
mode?: "append" | "upsert" | "summarize";
|
||||
}
|
||||
|
||||
export interface MemoryRecordHandle {
|
||||
providerKey: string;
|
||||
providerRecordId: string;
|
||||
}
|
||||
|
||||
export interface MemoryCaptureRequest {
|
||||
bindingKey: string;
|
||||
scope: MemoryScope;
|
||||
source: MemorySourceRef;
|
||||
payload: MemorySourcePayload;
|
||||
hook?: MemoryHookContext;
|
||||
mode?: "capture_residue" | "capture_record";
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemoryRecordWriteRequest {
|
||||
bindingKey: string;
|
||||
scope: MemoryScope;
|
||||
source?: MemorySourceRef;
|
||||
records: Array<{
|
||||
text: string;
|
||||
summary?: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface MemoryQueryRequest {
|
||||
bindingKey: string;
|
||||
scope: MemoryScope;
|
||||
@@ -202,6 +283,14 @@ export interface MemoryQueryRequest {
|
||||
metadataFilter?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemoryListRequest {
|
||||
bindingKey: string;
|
||||
scope: MemoryScope;
|
||||
cursor?: string;
|
||||
limit?: number;
|
||||
metadataFilter?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemorySnippet {
|
||||
handle: MemoryRecordHandle;
|
||||
text: string;
|
||||
@@ -217,30 +306,149 @@ export interface MemoryContextBundle {
|
||||
usage?: MemoryUsage[];
|
||||
}
|
||||
|
||||
export interface MemoryListPage {
|
||||
items: MemorySnippet[];
|
||||
nextCursor?: string;
|
||||
usage?: MemoryUsage[];
|
||||
}
|
||||
|
||||
export interface MemoryExtractionJob {
|
||||
providerJobId: string;
|
||||
status: "queued" | "running" | "succeeded" | "failed" | "cancelled";
|
||||
hookKind?: MemoryHookContext["hookKind"];
|
||||
source?: MemorySourceRef;
|
||||
error?: string;
|
||||
submittedAt?: string;
|
||||
startedAt?: string;
|
||||
finishedAt?: string;
|
||||
}
|
||||
|
||||
export interface MemoryAdapter {
|
||||
key: string;
|
||||
capabilities: MemoryAdapterCapabilities;
|
||||
write(req: MemoryWriteRequest): Promise<{
|
||||
capture(req: MemoryCaptureRequest): Promise<{
|
||||
records?: MemoryRecordHandle[];
|
||||
jobs?: MemoryExtractionJob[];
|
||||
usage?: MemoryUsage[];
|
||||
}>;
|
||||
upsertRecords(req: MemoryRecordWriteRequest): Promise<{
|
||||
records?: MemoryRecordHandle[];
|
||||
usage?: MemoryUsage[];
|
||||
}>;
|
||||
query(req: MemoryQueryRequest): Promise<MemoryContextBundle>;
|
||||
list(req: MemoryListRequest): Promise<MemoryListPage>;
|
||||
get(handle: MemoryRecordHandle, scope: MemoryScope): Promise<MemorySnippet | null>;
|
||||
forget(handles: MemoryRecordHandle[], scope: MemoryScope): Promise<{ usage?: MemoryUsage[] }>;
|
||||
}
|
||||
```
|
||||
|
||||
This contract intentionally does not force a provider to expose its internal graph, filesystem, or ontology.
|
||||
This contract intentionally does not force a provider to expose its internal graph, file tree, or ontology. It does require enough structure for Paperclip to browse, attribute, and audit what happened.
|
||||
|
||||
## Optional Adapter Surfaces
|
||||
|
||||
These should be capability-gated, not required:
|
||||
|
||||
- `browse(scope, filters)` for file-system / graph / timeline inspection
|
||||
- `correct(handle, patch)` for natural-language correction flows
|
||||
- `profile(scope)` when the provider can synthesize stable preferences or summaries
|
||||
- `sync(source)` for connectors or background ingestion
|
||||
- `listExtractionJobs(scope, cursor)` when async extraction needs richer operator visibility
|
||||
- `retryExtractionJob(jobId)` when a provider supports re-drive
|
||||
- `explain(queryResult)` for providers that can expose retrieval traces
|
||||
- provider-native browse or graph surfaces exposed through plugin UI
|
||||
|
||||
## Lessons From AWS AgentCore Memory API
|
||||
|
||||
AWS AgentCore Memory is a useful check on whether this plan is too abstract or missing important operational surfaces.
|
||||
|
||||
The broad direction still looks right:
|
||||
|
||||
- AWS splits memory into a control plane (`CreateMemory`, `UpdateMemory`, `ListMemories`) and a data plane (`CreateEvent`, `RetrieveMemoryRecords`, `GetMemoryRecord`, `ListMemoryRecords`)
|
||||
- AWS separates raw interaction capture from curated long-term memory records
|
||||
- AWS supports both provider-managed extraction and self-managed pipelines
|
||||
- AWS treats browse and list operations as first-class APIs, not ad hoc debugging helpers
|
||||
- AWS exposes extraction jobs instead of hiding asynchronous maintenance completely
|
||||
|
||||
That lines up with the Paperclip plan at a high level: provider configuration, scoped writes, scoped retrieval, provider-managed extraction as a capability, and a browse and inspect surface.
|
||||
|
||||
The concrete changes Paperclip should take from AWS are:
|
||||
|
||||
### 1. Keep config APIs separate from runtime traffic
|
||||
|
||||
The rollout should preserve a clean separation between:
|
||||
|
||||
- control-plane APIs for binding CRUD, defaults, overrides, and capability metadata
|
||||
- runtime APIs and tools for capture, record writes, query, list, get, forget, and extraction status
|
||||
|
||||
This keeps governance changes distinct from high-volume memory traffic.
|
||||
|
||||
### 2. Distinguish capture from curated record writes
|
||||
|
||||
AWS does not flatten everything into one write primitive. It distinguishes captured events from durable memory records.
|
||||
|
||||
Paperclip should do the same:
|
||||
|
||||
- `capture(...)` for raw run, comment, document, or activity residue
|
||||
- `upsertRecords(...)` for curated durable facts and notes
|
||||
|
||||
That is a better fit for provider-managed extraction and for manual curation flows.
|
||||
|
||||
### 3. Make list and browse first-class
|
||||
|
||||
AWS exposes list and retrieve surfaces directly. Paperclip should not make browse optional at the portable layer.
|
||||
|
||||
The minimum portable surface should include:
|
||||
|
||||
- `query`
|
||||
- `list`
|
||||
- `get`
|
||||
|
||||
Provider-native graph or file browsing can remain optional beyond that.
|
||||
|
||||
### 4. Add pagination and cursors for operator inspection
|
||||
|
||||
AWS consistently uses pagination on browse-heavy APIs.
|
||||
|
||||
Paperclip should add cursor-based pagination to:
|
||||
|
||||
- record listing
|
||||
- extraction job listing
|
||||
- memory operation explorer APIs
|
||||
|
||||
Prompt hydration can continue to use `topK`, but operator surfaces need cursors.
|
||||
|
||||
### 5. Add explicit session and namespace hints
|
||||
|
||||
AWS uses `actorId`, `sessionId`, `namespace`, and `memoryStrategyId` heavily.
|
||||
|
||||
Paperclip should keep its own control-plane-centric model, but the adapter contract needs obvious places to map those concepts:
|
||||
|
||||
- `sessionKey`
|
||||
- `namespace`
|
||||
|
||||
The provider adapter can map them to AWS or other vendor-specific identifiers without leaking those identifiers into core.
|
||||
|
||||
### 6. Treat asynchronous extraction as a real operational surface
|
||||
|
||||
AWS exposes extraction jobs explicitly. Paperclip should too.
|
||||
|
||||
Operators should be able to see:
|
||||
|
||||
- pending extraction work
|
||||
- failed extraction work
|
||||
- which hook or source caused the work
|
||||
- whether a retry is available
|
||||
|
||||
### 7. Keep Paperclip provenance primary
|
||||
|
||||
Paperclip should continue to center:
|
||||
|
||||
- `companyId`
|
||||
- `agentId`
|
||||
- `projectId`
|
||||
- `issueId`
|
||||
- `runId`
|
||||
- issue comments, documents, and activity as sources
|
||||
|
||||
The lesson from AWS is to support clean mapping into provider-specific models, not to let provider identifiers take over the core product model.
|
||||
|
||||
## What Paperclip Should Persist
|
||||
|
||||
@@ -248,39 +456,67 @@ Paperclip should not mirror the full provider memory corpus into Postgres unless
|
||||
|
||||
Paperclip core should persist:
|
||||
|
||||
- memory bindings and overrides
|
||||
- memory bindings
|
||||
- company default and agent override resolution targets
|
||||
- provider keys and capability metadata
|
||||
- normalized memory operation logs
|
||||
- provider record handles returned by operations when available
|
||||
- source references back to issue comments, documents, runs, and activity
|
||||
- usage and cost data
|
||||
- provider record handles returned by operations when available
|
||||
- hook delivery records and extraction job state
|
||||
- usage and cost attribution
|
||||
|
||||
For external providers, the memory payload itself can remain in the provider.
|
||||
For external providers, the actual memory payload can remain in the provider.
|
||||
|
||||
## Hook Model
|
||||
|
||||
### Automatic hooks
|
||||
### Shared hook surface
|
||||
|
||||
Paperclip should expose one shared hook system for memory.
|
||||
|
||||
That same system must be available to:
|
||||
|
||||
- built-in memory providers
|
||||
- plugin-based memory providers
|
||||
- third-party adapter integrations that want to use memory hooks
|
||||
|
||||
### What a hook delivers
|
||||
|
||||
Each hook delivery should include:
|
||||
|
||||
- resolved binding key
|
||||
- normalized `MemoryScope`
|
||||
- `MemorySourceRef`
|
||||
- structured source payload
|
||||
- hook metadata such as hook kind, trigger time, and related run id
|
||||
|
||||
The payload should include structured objects where possible so the provider can decide how to extract and chunk.
|
||||
|
||||
### Initial automatic hooks
|
||||
|
||||
These should be low-risk and easy to reason about:
|
||||
|
||||
1. `pre-run hydrate`
|
||||
1. `pre_run_hydrate`
|
||||
Before an agent run starts, Paperclip may call `query(... intent = "agent_preamble")` using the active binding.
|
||||
|
||||
2. `post-run capture`
|
||||
After a run finishes, Paperclip may write a summary or transcript-derived note tied to the run.
|
||||
2. `post_run_capture`
|
||||
After a run finishes, Paperclip may call `capture(...)` with structured run output, excerpts, and provenance.
|
||||
|
||||
3. `issue comment / document capture`
|
||||
When enabled on the binding, Paperclip may capture selected issue comments or issue documents as memory sources.
|
||||
3. `issue_comment_capture`
|
||||
When enabled on the binding, Paperclip may call `capture(...)` for selected issue comments.
|
||||
|
||||
### Explicit hooks
|
||||
4. `issue_document_capture`
|
||||
When enabled on the binding, Paperclip may call `capture(...)` for selected issue documents.
|
||||
|
||||
These should be tool- or UI-driven first:
|
||||
### Explicit tools and APIs
|
||||
|
||||
These should be tool-driven or UI-driven first:
|
||||
|
||||
- `memory.search`
|
||||
- `memory.note`
|
||||
- `memory.forget`
|
||||
- `memory.correct`
|
||||
- `memory.browse`
|
||||
- memory record list and get
|
||||
- extraction-job inspection
|
||||
|
||||
### Not automatic in the first version
|
||||
|
||||
@@ -309,34 +545,69 @@ The initial browse surface should support:
|
||||
|
||||
- active binding by company and agent
|
||||
- recent memory operations
|
||||
- recent write sources
|
||||
- recent write and capture sources
|
||||
- record list and record detail with source backlinks
|
||||
- query results with source backlinks
|
||||
- filters by agent, issue, run, source kind, and date
|
||||
- provider usage / cost / latency summaries
|
||||
- extraction job status
|
||||
- filters by agent, issue, project, run, source kind, and date
|
||||
- provider usage, cost, and latency summaries
|
||||
|
||||
When a provider supports richer browsing, the plugin can add deeper views through the existing plugin UI surfaces.
|
||||
|
||||
## Cost And Evaluation
|
||||
|
||||
Every adapter response should be able to return usage records.
|
||||
Paperclip should treat memory accounting as two related but distinct concerns:
|
||||
|
||||
Paperclip should roll up:
|
||||
### 1. `memory_operations` is the authoritative audit trail
|
||||
|
||||
- memory inference tokens
|
||||
- embedding tokens
|
||||
- external provider cost
|
||||
Every memory action should create a normalized operation record that captures:
|
||||
|
||||
- binding
|
||||
- scope
|
||||
- source provenance
|
||||
- operation type
|
||||
- success or failure
|
||||
- latency
|
||||
- query count
|
||||
- write count
|
||||
- usage details reported by the provider
|
||||
- attribution mode
|
||||
- related run, issue, and agent when available
|
||||
|
||||
It should also record evaluation-oriented metrics where possible:
|
||||
This is where operators answer "what memory work happened and why?"
|
||||
|
||||
### 2. `cost_events` remains the canonical spend ledger for billable metered usage
|
||||
|
||||
The current `cost_events` model is already the canonical cost ledger for token and model spend, and `agent_runtime_state` plus `heartbeat_runs.usageJson` already roll up and summarize run usage.
|
||||
|
||||
The recommendation is:
|
||||
|
||||
- if a memory operation runs inside a normal Paperclip agent heartbeat and the model usage is already counted on that run, do not create a duplicate `cost_event`
|
||||
- instead, store the memory operation with `attributionMode = "included_in_run"` and link it to the related `heartbeatRunId`
|
||||
- if a memory provider makes a direct metered model call outside the agent run accounting path, the provider must report usage and Paperclip should create a `cost_event`
|
||||
- that direct `cost_event` should still link back to the memory operation, agent, company, and issue or run context when possible
|
||||
|
||||
### 3. `finance_events` should carry flat subscription or invoice-style costs
|
||||
|
||||
If a memory service incurs:
|
||||
|
||||
- monthly subscription cost
|
||||
- storage invoices
|
||||
- provider platform charges not tied to one request
|
||||
|
||||
those should be represented as `finance_events`, not as synthetic per-query memory operations.
|
||||
|
||||
That keeps usage telemetry separate from accounting entries like invoices and flat fees.
|
||||
|
||||
### 4. Evaluation metrics still matter
|
||||
|
||||
Paperclip should record evaluation-oriented metrics where possible:
|
||||
|
||||
- recall hit rate
|
||||
- empty query rate
|
||||
- manual correction count
|
||||
- per-binding success / failure counts
|
||||
- extraction failure count
|
||||
- per-binding success and failure counts
|
||||
|
||||
This is important because a memory system that "works" but silently burns budget is not acceptable in Paperclip.
|
||||
This is important because a memory system that "works" but silently burns budget or silently fails extraction is not acceptable in Paperclip.
|
||||
|
||||
## Suggested Data Model Additions
|
||||
|
||||
@@ -344,23 +615,36 @@ At the control-plane level, the likely new core tables are:
|
||||
|
||||
- `memory_bindings`
|
||||
- company-scoped key
|
||||
- provider id / plugin id
|
||||
- provider id or plugin id
|
||||
- config blob
|
||||
- enabled status
|
||||
|
||||
- `memory_binding_targets`
|
||||
- target type (`company`, `agent`, later `project`)
|
||||
- target type (`company`, `agent`)
|
||||
- target id
|
||||
- binding id
|
||||
|
||||
- `memory_operations`
|
||||
- company id
|
||||
- binding id
|
||||
- operation type (`write`, `query`, `forget`, `browse`, `correct`)
|
||||
- operation type (`capture`, `record_upsert`, `query`, `list`, `get`, `forget`, `correct`)
|
||||
- scope fields
|
||||
- source refs
|
||||
- usage / latency / cost
|
||||
- success / error
|
||||
- usage, latency, and attribution mode
|
||||
- related heartbeat run id
|
||||
- related cost event id
|
||||
- success or error
|
||||
|
||||
- `memory_extraction_jobs`
|
||||
- company id
|
||||
- binding id
|
||||
- operation id
|
||||
- provider job id
|
||||
- hook kind
|
||||
- status
|
||||
- source refs
|
||||
- error
|
||||
- submitted, started, and finished timestamps
|
||||
|
||||
Provider-specific long-form state should stay in plugin state or the provider itself unless a built-in local provider needs its own schema.
|
||||
|
||||
@@ -382,45 +666,46 @@ The design should still treat that built-in as just another provider behind the
|
||||
### Phase 1: Control-plane contract
|
||||
|
||||
- add memory binding models and API types
|
||||
- add plugin capability / registration surface for memory providers
|
||||
- add operation logging and usage reporting
|
||||
- add company default plus agent override resolution
|
||||
- add plugin capability and registration surface for memory providers
|
||||
|
||||
### Phase 2: One built-in + one plugin example
|
||||
### Phase 2: Hook delivery and operation audit
|
||||
|
||||
- add shared memory hook emission in core
|
||||
- add operation logging, extraction job state, and usage attribution
|
||||
- add direct-provider cost and finance-event linkage rules
|
||||
|
||||
### Phase 3: One built-in plus one plugin example
|
||||
|
||||
- ship a local markdown-first provider
|
||||
- ship one hosted adapter example to validate the external-provider path
|
||||
|
||||
### Phase 3: UI inspection
|
||||
### Phase 4: UI inspection
|
||||
|
||||
- add company / agent memory settings
|
||||
- add company and agent memory settings
|
||||
- add a memory operation explorer
|
||||
- add record list and detail surfaces
|
||||
- add source backlinks to issues and runs
|
||||
|
||||
### Phase 4: Automatic hooks
|
||||
|
||||
- pre-run hydrate
|
||||
- post-run capture
|
||||
- selected issue comment / document capture
|
||||
|
||||
### Phase 5: Rich capabilities
|
||||
|
||||
- correction flows
|
||||
- provider-native browse / graph views
|
||||
- project-level overrides if needed
|
||||
- provider-native browse or graph views
|
||||
- evaluation dashboards
|
||||
- retention and quota controls
|
||||
|
||||
## Open Questions
|
||||
## Remaining Open Questions
|
||||
|
||||
- Should project overrides exist in V1 of the memory service, or should we force company default + agent override first?
|
||||
- Do we want Paperclip-managed extraction pipelines at all, or should built-ins be the only place where Paperclip owns extraction?
|
||||
- Should memory usage extend the current `cost_events` model directly, or should memory operations keep a parallel usage log and roll up into `cost_events` secondarily?
|
||||
- Do we want provider install / binding changes to require approvals for some companies?
|
||||
- Which built-in local provider should ship first: pure markdown, markdown plus embeddings, or a lightweight local vector store?
|
||||
- How much source payload should Paperclip snapshot inside `memory_operations` for debugging without duplicating large transcripts?
|
||||
- Should correction flows mutate provider state directly, create superseding records, or both depending on provider capability?
|
||||
- What default retention and size limits should the local built-in enforce?
|
||||
|
||||
## Bottom Line
|
||||
|
||||
The right abstraction is:
|
||||
|
||||
- Paperclip owns memory bindings, scopes, provenance, governance, and usage reporting.
|
||||
- Paperclip owns bindings, resolution, hooks, provenance, policy, and attribution.
|
||||
- Providers own extraction, ranking, storage, and provider-native memory semantics.
|
||||
|
||||
That gives Paperclip a stable "memory service" without locking the product to one memory philosophy or one vendor.
|
||||
That gives Paperclip a stable memory service without locking the product to one memory philosophy or one vendor, and it integrates the AWS lessons without importing AWS's model into core.
|
||||
|
||||
362
doc/plans/2026-04-06-smart-model-routing.md
Normal file
362
doc/plans/2026-04-06-smart-model-routing.md
Normal file
@@ -0,0 +1,362 @@
|
||||
# 2026-04-06 Smart Model Routing
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-04-06
|
||||
Audience: Product and engineering
|
||||
Related:
|
||||
- `doc/SPEC-implementation.md`
|
||||
- `doc/PRODUCT.md`
|
||||
- `doc/plans/2026-03-14-adapter-skill-sync-rollout.md`
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This document defines a V1 plan for "smart model routing" in Paperclip.
|
||||
|
||||
The goal is not to build a generic cross-provider router in the server. The goal is:
|
||||
|
||||
- let supported adapters use a cheaper model for lightweight heartbeat orchestration work
|
||||
- keep the main task execution on the adapter's normal primary model
|
||||
- preserve Paperclip's existing task, session, and audit invariants
|
||||
- report cost and model usage truthfully when more than one model participates in a single heartbeat
|
||||
|
||||
The motivating use case is a local coding adapter where a cheap model can handle the first fast pass:
|
||||
|
||||
- read the wake context
|
||||
- orient to the task and workspace
|
||||
- leave an immediate progress comment when appropriate
|
||||
- perform bounded lightweight triage
|
||||
|
||||
Then the primary model does the substantive work.
|
||||
|
||||
## 2. Hermes Findings
|
||||
|
||||
Hermes does have a real "smart model routing" feature, but it is narrower than the name suggests.
|
||||
|
||||
Observed behavior:
|
||||
|
||||
- `agent/smart_model_routing.py` implements a conservative classifier for "simple" turns
|
||||
- the cheap path only triggers for short, single-line, non-code, non-URL, non-tool-heavy messages
|
||||
- complexity is detected with hardcoded thresholds plus a keyword denylist like `debug`, `implement`, `test`, `plan`, `tool`, `docker`, and similar terms
|
||||
- if the cheap route cannot be resolved, Hermes silently falls back to the primary model
|
||||
|
||||
Important architectural detail:
|
||||
|
||||
- Hermes applies this routing before constructing the agent for that turn
|
||||
- the route is resolved in `cron/scheduler.py` and passed into agent creation as the active provider/model/runtime
|
||||
|
||||
More useful than the routing heuristic itself is Hermes' broader model-slot design:
|
||||
|
||||
- main conversational model
|
||||
- fallback model for failover
|
||||
- auxiliary model slots for side tasks like compression and classification
|
||||
|
||||
That separation is a better fit for Paperclip than copying Hermes' exact keyword heuristic.
|
||||
|
||||
## 3. Current Paperclip State
|
||||
|
||||
Paperclip already has the right execution shape for adapter-specific routing, but it currently assumes one model per heartbeat run.
|
||||
|
||||
Current implementation facts:
|
||||
|
||||
- `server/src/services/heartbeat.ts` builds rich run context, including `paperclipWake`, workspace metadata, and session handoff context
|
||||
- each adapter receives a single resolved `config` object and executes once
|
||||
- built-in local adapters read one `config.model` and pass it directly to the underlying CLI
|
||||
- UI config today exposes one main `model` field plus adapter-specific thinking-effort controls
|
||||
- cost accounting currently records one provider/model tuple per run via `AdapterExecutionResult`
|
||||
|
||||
What this means:
|
||||
|
||||
- there is no shared routing layer in the server today
|
||||
- model choice already lives at the adapter boundary, which is good
|
||||
- multi-model execution in a single heartbeat needs explicit contract work or cost reporting will become misleading
|
||||
|
||||
## 4. Product Decision
|
||||
|
||||
Paperclip should implement smart model routing as an adapter-local, opt-in execution pattern.
|
||||
|
||||
V1 decision:
|
||||
|
||||
1. Do not add a global server-side router that tries to understand every adapter.
|
||||
2. Do not copy Hermes' prompt-keyword classifier as Paperclip's default routing policy.
|
||||
3. Add an adapter-specific "cheap preflight" phase for supported adapters.
|
||||
4. Keep the primary model as the canonical work model.
|
||||
5. Persist only the primary session unless an adapter can prove that cross-model session resume is safe.
|
||||
|
||||
Rationale:
|
||||
|
||||
- Paperclip heartbeats are structured, issue-scoped, and already include wake metadata
|
||||
- routing by execution phase is more reliable than routing by free-text prompt complexity
|
||||
- session semantics differ by adapter, so resume behavior must stay adapter-owned
|
||||
|
||||
## 5. Proposed V1 Behavior
|
||||
|
||||
## 5.1 Config shape
|
||||
|
||||
Supported adapters should add an optional routing block to `adapterConfig`.
|
||||
|
||||
Proposed shape:
|
||||
|
||||
```ts
|
||||
smartModelRouting?: {
|
||||
enabled: boolean;
|
||||
cheapModel: string;
|
||||
cheapThinkingEffort?: string;
|
||||
maxPreflightTurns?: number;
|
||||
allowInitialProgressComment?: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- keep existing `model` as the primary model
|
||||
- `cheapModel` is adapter-specific, not global
|
||||
- adapters that cannot safely support this block simply ignore it
|
||||
|
||||
For adapters with provider-specific model fields later, the shape can expand to include provider/base-url overrides. V1 should start simple.
|
||||
|
||||
## 5.2 Routing policy
|
||||
|
||||
Supported adapters should run cheap preflight only when all are true:
|
||||
|
||||
- `smartModelRouting.enabled` is true
|
||||
- `cheapModel` is configured
|
||||
- the run is issue-scoped
|
||||
- the adapter is starting a fresh session, not resuming a persisted one
|
||||
- the run is expected to do real task work rather than just resume an existing thread
|
||||
|
||||
Supported adapters should skip cheap preflight when any are true:
|
||||
|
||||
- a persisted task session already exists
|
||||
- the adapter cannot safely isolate preflight from the primary session
|
||||
- the issue or wake type implies the task is already mid-flight and continuity matters more than first-response speed
|
||||
|
||||
This is intentionally phase-based, not text-heuristic-based.
|
||||
|
||||
## 5.3 Cheap preflight responsibilities
|
||||
|
||||
The cheap phase should be narrow and bounded.
|
||||
|
||||
Allowed responsibilities:
|
||||
|
||||
- ingest wake context and issue summary
|
||||
- inspect the workspace at a shallow level
|
||||
- leave a short "starting investigation" style comment when appropriate
|
||||
- collect a compact handoff summary for the primary phase
|
||||
|
||||
Not allowed in V1:
|
||||
|
||||
- long tool loops
|
||||
- risky file mutations
|
||||
- being the canonical persisted task session
|
||||
- deciding final completion without either explicit adapter support or a trivial success case
|
||||
|
||||
Implementation detail:
|
||||
|
||||
- the adapter should inject an explicit preflight prompt telling the model this is a bounded orchestration pass
|
||||
- preflight should use a very small turn budget, for example 1-2 turns
|
||||
|
||||
## 5.4 Primary execution responsibilities
|
||||
|
||||
After preflight, the adapter launches the normal primary execution using the existing prompt and primary model.
|
||||
|
||||
The primary phase should receive:
|
||||
|
||||
- the normal Paperclip prompt
|
||||
- any preflight-generated handoff summary
|
||||
- normal workspace and wake context
|
||||
|
||||
The primary phase remains the source of truth for:
|
||||
|
||||
- persisted session state
|
||||
- final task completion
|
||||
- most file changes
|
||||
- most cost
|
||||
|
||||
## 6. Required Contract Changes
|
||||
|
||||
The current `AdapterExecutionResult` is too narrow for truthful multi-model accounting.
|
||||
|
||||
Add an optional segmented execution report, for example:
|
||||
|
||||
```ts
|
||||
executionSegments?: Array<{
|
||||
phase: "cheap_preflight" | "primary";
|
||||
provider?: string | null;
|
||||
biller?: string | null;
|
||||
model?: string | null;
|
||||
billingType?: AdapterBillingType | null;
|
||||
usage?: UsageSummary;
|
||||
costUsd?: number | null;
|
||||
summary?: string | null;
|
||||
}>
|
||||
```
|
||||
|
||||
V1 server behavior:
|
||||
|
||||
- if `executionSegments` is absent, keep current single-result behavior unchanged
|
||||
- if present, write one `cost_events` row per segment that has cost or token usage
|
||||
- store the segment array in run usage/result metadata for later UI inspection
|
||||
- keep the existing top-level `provider` / `model` fields as a summary, preferably the primary phase when present
|
||||
|
||||
This avoids breaking existing adapters while giving routed adapters truthful reporting.
|
||||
|
||||
## 7. Adapter Rollout Plan
|
||||
|
||||
## 7.1 Phase 1: contract and server plumbing
|
||||
|
||||
Work:
|
||||
|
||||
1. Extend adapter result types with segmented execution metadata.
|
||||
2. Update heartbeat cost recording to emit multiple cost events when segments are present.
|
||||
3. Include segment summaries in run metadata for transcript/debug views.
|
||||
|
||||
Success criteria:
|
||||
|
||||
- existing adapters behave exactly as before
|
||||
- a routed adapter can report cheap plus primary usage without collapsing them into one fake model
|
||||
|
||||
## 7.2 Phase 2: `codex_local`
|
||||
|
||||
Why first:
|
||||
|
||||
- Codex already has rich prompt/handoff handling
|
||||
- the adapter already injects Paperclip skills and workspace metadata cleanly
|
||||
- the current implementation already distinguishes bootstrap, wake delta, and handoff prompt sections
|
||||
|
||||
Implementation work:
|
||||
|
||||
1. Add config support for `smartModelRouting`.
|
||||
2. Add a cheap-preflight prompt builder.
|
||||
3. Run cheap preflight only on fresh sessions.
|
||||
4. Pass a compact preflight handoff note into the primary prompt.
|
||||
5. Report segmented usage and model metadata.
|
||||
|
||||
Important guardrail:
|
||||
|
||||
- do not resume the cheap-model session as the primary session in V1
|
||||
|
||||
## 7.3 Phase 3: `claude_local`
|
||||
|
||||
Implementation work is similar, but the session model-switch risk is even less attractive.
|
||||
|
||||
Same rule:
|
||||
|
||||
- cheap preflight is ephemeral
|
||||
- primary Claude session remains canonical
|
||||
|
||||
## 7.4 Phase 4: other adapters
|
||||
|
||||
Candidates:
|
||||
|
||||
- `cursor`
|
||||
- `gemini_local`
|
||||
- `opencode_local`
|
||||
- external plugin adapters through `createServerAdapter()`
|
||||
|
||||
These should come later because each runtime has different session and model-switch semantics.
|
||||
|
||||
## 8. UI and Config Changes
|
||||
|
||||
For supported built-in adapters, the agent config UI should expose:
|
||||
|
||||
- `model` as the primary model
|
||||
- `smart model routing` toggle
|
||||
- `cheap model`
|
||||
- optional cheap thinking effort
|
||||
- optional `allow initial progress comment` toggle
|
||||
|
||||
The run detail UI should also show when routing occurred, for example:
|
||||
|
||||
- cheap preflight model
|
||||
- primary model
|
||||
- token/cost split
|
||||
|
||||
This matters because Paperclip's board UI is supposed to make cost and behavior legible.
|
||||
|
||||
## 9. Why Not Copy Hermes Exactly
|
||||
|
||||
Hermes' cheap-route heuristic is useful precedent, but Paperclip should not start there.
|
||||
|
||||
Reasons:
|
||||
|
||||
- Hermes is optimizing free-form conversational turns
|
||||
- Paperclip agents run structured, issue-scoped heartbeats with explicit task and workspace context
|
||||
- Paperclip already knows whether a run is fresh vs resumed, issue-scoped vs approval follow-up, and what workspace/session exists
|
||||
- those execution facts are stronger routing signals than prompt keyword matching
|
||||
|
||||
If Paperclip later wants a cheap-only completion path for trivial runs, that can be a second-stage feature built on observed run data, not the first implementation.
|
||||
|
||||
## 10. Risks
|
||||
|
||||
## 10.1 Duplicate or noisy comments
|
||||
|
||||
If the cheap phase posts an update and the primary phase posts another near-identical update, the issue thread gets worse.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- keep cheap comments optional
|
||||
- make the preflight prompt explicitly avoid repeating status if a useful comment was already posted
|
||||
|
||||
## 10.2 Misleading cost reporting
|
||||
|
||||
If we only record the primary model, the board loses visibility into the routing cost tradeoff.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- add segmented execution reporting before shipping adapter behavior
|
||||
|
||||
## 10.3 Session corruption
|
||||
|
||||
Cross-model session reuse may fail or degrade context quality.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- V1 does not persist or resume cheap preflight sessions
|
||||
|
||||
## 10.4 Cheap model overreach
|
||||
|
||||
A cheap model with full tools and permissions may do too much low-quality work.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- hard cap preflight turns
|
||||
- use an explicit orchestration-only prompt
|
||||
- start with supported adapters where we can test the behavior well
|
||||
|
||||
## 11. Verification Plan
|
||||
|
||||
Required tests:
|
||||
|
||||
- adapter unit tests for route eligibility
|
||||
- adapter unit tests for "fresh session -> cheap preflight + primary"
|
||||
- adapter unit tests for "resumed session -> primary only"
|
||||
- heartbeat tests for segmented cost-event creation
|
||||
- UI tests for config save/load of cheap-model fields
|
||||
|
||||
Manual checks:
|
||||
|
||||
- create a fresh issue for a routed Codex or Claude agent
|
||||
- verify the run metadata shows both phases
|
||||
- verify only the primary session is persisted
|
||||
- verify cost rows reflect both models
|
||||
- verify the issue thread does not get duplicate kickoff comments
|
||||
|
||||
## 12. Recommended Sequence
|
||||
|
||||
1. Add segmented execution reporting to the adapter/server contract.
|
||||
2. Implement `codex_local` cheap preflight.
|
||||
3. Validate cost visibility and transcript UX.
|
||||
4. Implement `claude_local` cheap preflight.
|
||||
5. Decide later whether any adapters need Hermes-style text heuristics in addition to phase-based routing.
|
||||
|
||||
## 13. Recommendation
|
||||
|
||||
Paperclip should ship smart model routing as:
|
||||
|
||||
- adapter-specific
|
||||
- opt-in
|
||||
- phase-based
|
||||
- session-safe
|
||||
- cost-truthful
|
||||
|
||||
The right V1 is not "choose the cheapest model for simple prompts." The right V1 is "use a cheap model for bounded orchestration work on fresh runs, then hand off to the primary model for the real task."
|
||||
209
doc/plans/2026-04-06-subissue-creation-on-issue-detail.md
Normal file
209
doc/plans/2026-04-06-subissue-creation-on-issue-detail.md
Normal file
@@ -0,0 +1,209 @@
|
||||
# 2026-04-06 Sub-issue Creation On Issue Detail Plan
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-04-06
|
||||
Audience: Product and engineering
|
||||
Related:
|
||||
- `ui/src/pages/IssueDetail.tsx`
|
||||
- `ui/src/components/IssueProperties.tsx`
|
||||
- `ui/src/components/NewIssueDialog.tsx`
|
||||
- `ui/src/context/DialogContext.tsx`
|
||||
- `packages/shared/src/validators/issue.ts`
|
||||
- `server/src/services/issues.ts`
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This document defines the implementation plan for adding manual sub-issue creation from the issue detail page.
|
||||
|
||||
Requested UX:
|
||||
|
||||
- the `Sub-issues` tab should always show an `Add sub-issue` action, even when there are no children yet
|
||||
- the properties pane should also expose a `Sub-issues` section with the same `Add sub-issue` entry point
|
||||
- both entry points should open the existing new-issue dialog in a "create sub-issue" mode
|
||||
- the dialog should only show sub-issue-specific UI when it was opened from one of those entry points
|
||||
|
||||
This is a UI-first change. The backend already supports child issue creation with `parentId`.
|
||||
|
||||
## 2. Current State
|
||||
|
||||
### 2.1 Existing child issue display
|
||||
|
||||
`ui/src/pages/IssueDetail.tsx` already derives `childIssues` by filtering the company issue list on `parentId === issue.id`.
|
||||
|
||||
Current limitation:
|
||||
|
||||
- the `Sub-issues` tab only renders the empty state or the child issue list
|
||||
- there is no action to create a child issue from that tab
|
||||
|
||||
### 2.2 Existing properties pane
|
||||
|
||||
`ui/src/components/IssueProperties.tsx` shows `Blocked by`, `Blocking`, and `Parent`, but it has no sub-issue section or child issue affordance.
|
||||
|
||||
### 2.3 Existing dialog state
|
||||
|
||||
`ui/src/context/DialogContext.tsx` can open the global new-issue dialog with defaults such as status, priority, project, assignee, title, and description.
|
||||
|
||||
Current limitation:
|
||||
|
||||
- there is no way to pass sub-issue context like `parentId`
|
||||
- `ui/src/components/NewIssueDialog.tsx` therefore cannot submit a child issue or render parent-specific context
|
||||
|
||||
### 2.4 Backend contract already exists
|
||||
|
||||
The create-issue validator already accepts `parentId`.
|
||||
|
||||
`server/src/services/issues.ts` already uses:
|
||||
|
||||
- `parentId` for parent-child issue relationships
|
||||
- `parentId` as the default workspace inheritance source when `inheritExecutionWorkspaceFromIssueId` is not provided
|
||||
|
||||
That means the required API and workspace inheritance behavior already exist. No server or schema change is required for the first pass.
|
||||
|
||||
## 3. Proposed Implementation
|
||||
|
||||
## 3.1 Extend dialog defaults for sub-issue context
|
||||
|
||||
Extend `NewIssueDefaults` in `ui/src/context/DialogContext.tsx` with:
|
||||
|
||||
- `parentId?: string`
|
||||
- optional parent display metadata for the dialog header, for example:
|
||||
- `parentIdentifier?: string`
|
||||
- `parentTitle?: string`
|
||||
|
||||
This keeps the dialog self-contained and avoids re-fetching parent context purely for presentation.
|
||||
|
||||
## 3.2 Add issue-detail entry points
|
||||
|
||||
Use `openNewIssue(...)` from `ui/src/pages/IssueDetail.tsx` in two places:
|
||||
|
||||
1. `Sub-issues` tab
|
||||
2. properties pane via props passed into `IssueProperties`
|
||||
|
||||
Both entry points should pass:
|
||||
|
||||
- `parentId: issue.id`
|
||||
- `parentIdentifier: issue.identifier ?? issue.id`
|
||||
- `parentTitle: issue.title`
|
||||
- `projectId: issue.projectId ?? undefined`
|
||||
|
||||
Using the current issue's `projectId` preserves the common expectation that sub-issues stay inside the same project unless the operator changes it in the dialog.
|
||||
|
||||
No special assignee default should be forced in V1.
|
||||
|
||||
## 3.3 Add a dedicated properties-pane section
|
||||
|
||||
Extend `IssueProperties` to accept:
|
||||
|
||||
- `childIssues: Issue[]`
|
||||
- `onCreateSubissue: () => void`
|
||||
|
||||
Render a new `Sub-issues` section near `Blocked by` / `Blocking`:
|
||||
|
||||
- if children exist, show compact links or pills to the existing sub-issues
|
||||
- always show an `Add sub-issue` button
|
||||
|
||||
This keeps the child issue affordance visible in the property area without requiring a generic parent selector.
|
||||
|
||||
## 3.4 Update the sub-issues tab layout
|
||||
|
||||
Refactor the `Sub-issues` tab in `IssueDetail` to render:
|
||||
|
||||
- a small header row with child count
|
||||
- an `Add sub-issue` button
|
||||
- the existing empty state or child issue list beneath it
|
||||
|
||||
This satisfies the requirement that the action is visible whether or not sub-issues already exist.
|
||||
|
||||
## 3.5 Add sub-issue mode to the new-issue dialog
|
||||
|
||||
Update `ui/src/components/NewIssueDialog.tsx` so that when `newIssueDefaults.parentId` is present:
|
||||
|
||||
- the dialog submits `parentId`
|
||||
- the header/button copy can switch to `New sub-issue` / `Create sub-issue`
|
||||
- a compact parent context row is shown, for example `Parent: PAP-1150 add the ability...`
|
||||
|
||||
Important constraint:
|
||||
|
||||
- this parent context row should only render when the dialog was opened with sub-issue defaults
|
||||
- opening the dialog from global create actions should remain unchanged and should not expose a generic parent control
|
||||
|
||||
That preserves the requested UX boundary: sub-issue creation is intentional, not part of the default create-issue surface.
|
||||
|
||||
## 3.6 Query invalidation and refresh behavior
|
||||
|
||||
No new data-fetch path is needed.
|
||||
|
||||
The existing create success handler in `NewIssueDialog` already invalidates:
|
||||
|
||||
- `queryKeys.issues.list(companyId)`
|
||||
- issue-related list badges
|
||||
|
||||
That should be enough for the parent `IssueDetail` view to recompute `childIssues` after creation because it derives children from the company issue list query.
|
||||
|
||||
If the detail page ever moves away from the full company issue list, this should be revisited, but it does not require additional work for the current architecture.
|
||||
|
||||
## 4. Implementation Order
|
||||
|
||||
1. Extend `DialogContext` issue defaults with sub-issue fields.
|
||||
2. Wire `IssueDetail` to open the dialog in sub-issue mode from the `Sub-issues` tab.
|
||||
3. Extend `IssueProperties` to display child issues and the `Add sub-issue` action.
|
||||
4. Update `NewIssueDialog` submission and header UI for sub-issue mode.
|
||||
5. Add UI tests for the new entry points and payload behavior.
|
||||
|
||||
## 5. Testing Plan
|
||||
|
||||
Add focused UI tests covering:
|
||||
|
||||
1. `IssueDetail`
|
||||
- `Sub-issues` tab shows `Add sub-issue` when there are zero children
|
||||
- clicking the action opens the dialog with parent defaults
|
||||
|
||||
2. `IssueProperties`
|
||||
- the properties pane renders the sub-issue section
|
||||
- `Add sub-issue` remains available when there are no child issues
|
||||
|
||||
3. `NewIssueDialog`
|
||||
- when opened with `parentId`, submit payload includes `parentId`
|
||||
- sub-issue-specific copy appears only in that mode
|
||||
- when opened normally, no parent UI is shown and payload is unchanged
|
||||
|
||||
No backend test expansion is required unless implementation discovers a client/server contract gap.
|
||||
|
||||
## 6. Risks And Decisions
|
||||
|
||||
### 6.1 Parent metadata source
|
||||
|
||||
Decision: pass parent label metadata through dialog defaults instead of making `NewIssueDialog` fetch the parent issue.
|
||||
|
||||
Reason:
|
||||
|
||||
- less coupling
|
||||
- no loading state inside the dialog
|
||||
- simpler tests
|
||||
|
||||
### 6.2 Project inheritance
|
||||
|
||||
Decision: prefill `projectId` from the parent issue, but keep it editable.
|
||||
|
||||
Reason:
|
||||
|
||||
- matches expected operator behavior
|
||||
- avoids silently moving a sub-issue outside the current project by default
|
||||
|
||||
### 6.3 Keep parent selection out of the generic dialog
|
||||
|
||||
Decision: do not add a freeform parent picker in this change.
|
||||
|
||||
Reason:
|
||||
|
||||
- the request explicitly wants sub-issue controls only when the flow starts from a sub-issue action
|
||||
- this keeps the default issue creation surface simpler
|
||||
|
||||
## 7. Success Criteria
|
||||
|
||||
This plan is complete when an operator can:
|
||||
|
||||
1. open any issue detail page
|
||||
2. click `Add sub-issue` from either the `Sub-issues` tab or the properties pane
|
||||
3. land in the existing new-issue dialog with clear parent context
|
||||
4. create the child issue and see it appear under the parent without a page reload
|
||||
@@ -0,0 +1,302 @@
|
||||
# 2026-04-07 Issue Detail Speed And Optimistic Inventory
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-04-07
|
||||
Audience: Product and engineering
|
||||
Related:
|
||||
- `ui/src/pages/IssueDetail.tsx`
|
||||
- `ui/src/components/IssueProperties.tsx`
|
||||
- `ui/src/api/issues.ts`
|
||||
- `ui/src/lib/queryKeys.ts`
|
||||
- `server/src/routes/issues.ts`
|
||||
- `server/src/services/issues.ts`
|
||||
- [PAP-1192](/PAP/issues/PAP-1192)
|
||||
- [PAP-1191](/PAP/issues/PAP-1191)
|
||||
- [PAP-1188](/PAP/issues/PAP-1188)
|
||||
- [PAP-1119](/PAP/issues/PAP-1119)
|
||||
- [PAP-945](/PAP/issues/PAP-945)
|
||||
- [PAP-1165](/PAP/issues/PAP-1165)
|
||||
- [PAP-890](/PAP/issues/PAP-890)
|
||||
- [PAP-254](/PAP/issues/PAP-254)
|
||||
- [PAP-138](/PAP/issues/PAP-138)
|
||||
|
||||
## 1. Purpose
|
||||
|
||||
This note inventories the Paperclip issues that point to the same UX class of problem:
|
||||
|
||||
- pages feel slow because they over-fetch or refetch too much
|
||||
- actions feel slow because the UI waits for the round trip before reflecting obvious local intent
|
||||
- optimistic updates exist in some places, but not in a consistent system
|
||||
|
||||
The immediate trigger is [PAP-1192](/PAP/issues/PAP-1192): the issue detail page now feels very slow.
|
||||
|
||||
## 2. Short Answer
|
||||
|
||||
The issue detail page is not obviously blocked by one pathological endpoint. The main problem is the shape of the page:
|
||||
|
||||
- `IssueDetail` fans out into many independent queries on mount
|
||||
- some of those queries fetch full company-wide collections for data that is local to one issue
|
||||
- common mutations invalidate almost every issue-related query, which creates avoidable refetch storms
|
||||
- the page has only a minimal top-level `Loading...` fallback and very little staged or sectional loading UX
|
||||
|
||||
Measured against the current assigned issue (`PAP-1191`) on local dev, the slowest single request was the full company issues list:
|
||||
|
||||
- `GET /api/issues/:id` about `18ms`
|
||||
- `GET /api/issues/:id/comments|activity|approvals|attachments` about `6-8ms`
|
||||
- `GET /api/companies/:companyId/agents|projects` about `9-11ms`
|
||||
- `GET /api/companies/:companyId/issues` about `76ms`
|
||||
|
||||
That strongly suggests the current pain is aggregate client fan-out plus over-broad invalidation, not one obviously broken endpoint.
|
||||
|
||||
## 3. Similar Issue Inventory
|
||||
|
||||
## 3.1 Issue-detail and issue-action siblings
|
||||
|
||||
- [PAP-1192](/PAP/issues/PAP-1192): issue page feels like it loads forever
|
||||
- [PAP-1188](/PAP/issues/PAP-1188): assignee changes in the issue properties pane were slow and needed optimistic UI
|
||||
- [PAP-945](/PAP/issues/PAP-945): optimistic comment rendering
|
||||
- [PAP-1003](/PAP/issues/PAP-1003): optimistic comments had duplicate draft/pending behavior
|
||||
- [PAP-947](/PAP/issues/PAP-947): follow-up breakage from optimistic comments
|
||||
- [PAP-254](/PAP/issues/PAP-254): long issue threads become sluggish when adding comments
|
||||
- [PAP-189](/PAP/issues/PAP-189): comment semantics while an issue has a live run
|
||||
|
||||
Pattern: the issue page already has a history of needing both optimistic behavior and bounded thread/loading behavior. `PAP-1192` is the same family, not a new category.
|
||||
|
||||
## 3.2 Inbox and list-view siblings
|
||||
|
||||
- [PAP-1119](/PAP/issues/PAP-1119): optimistic archive had fade-out then snap-back
|
||||
- [PAP-1165](/PAP/issues/PAP-1165): issue search slow
|
||||
- [PAP-890](/PAP/issues/PAP-890): issue search slow, make it very fast
|
||||
- [PAP-138](/PAP/issues/PAP-138): inbox loading feels stuck
|
||||
- [PAP-470](/PAP/issues/PAP-470): create-issue save state felt slow and awkward
|
||||
|
||||
Pattern: Paperclip already has several places where the right fix was "show intent immediately, then reconcile," not "wait for refetch."
|
||||
|
||||
## 3.3 Broader app-loading siblings
|
||||
|
||||
- [PAP-472](/PAP/issues/PAP-472): dashboard charts load very slowly
|
||||
- [PAP-797](/PAP/issues/PAP-797): reduce loading states through static generation/caching where possible
|
||||
- [PAP-799](/PAP/issues/PAP-799): embed company data at build time to eliminate loading states
|
||||
- [PAP-703](/PAP/issues/PAP-703): faster chat and better visual feedback
|
||||
|
||||
Pattern: the product has recurring pressure to reduce blank/loading states across the app, so the issue-detail work should fit that broader direction.
|
||||
|
||||
## 4. Current Issue Detail Findings
|
||||
|
||||
## 4.1 Mount query fan-out is high
|
||||
|
||||
`ui/src/pages/IssueDetail.tsx` mounts all of these data sources up front:
|
||||
|
||||
- issue detail
|
||||
- comments
|
||||
- activity
|
||||
- linked runs
|
||||
- linked approvals
|
||||
- attachments
|
||||
- live runs
|
||||
- active run
|
||||
- full company issues list
|
||||
- agents list
|
||||
- auth session
|
||||
- projects list
|
||||
- feedback votes
|
||||
- instance general settings
|
||||
- plugin slots
|
||||
|
||||
This is too much for the initial view of a single issue.
|
||||
|
||||
## 4.2 The page fetches full company issue data just to derive child issues
|
||||
|
||||
`IssueDetail` currently does:
|
||||
|
||||
- `issuesApi.list(selectedCompanyId!)`
|
||||
- then filters client-side for `parentId === issue.id`
|
||||
|
||||
That is expensive relative to the need.
|
||||
|
||||
Important detail:
|
||||
|
||||
- the server route already supports `parentId`
|
||||
- `server/src/services/issues.ts` already supports `parentId`
|
||||
- but `ui/src/api/issues.ts` does not expose `parentId` in the filter type
|
||||
|
||||
So the client is missing an already-supported narrow query path.
|
||||
|
||||
## 4.3 Comments are still fetched as full-thread loads
|
||||
|
||||
`server/src/routes/issues.ts` and `server/src/services/issues.ts` already support:
|
||||
|
||||
- `after`
|
||||
- `order`
|
||||
- `limit`
|
||||
|
||||
But `IssueDetail` still calls `issuesApi.listComments(issueId)` with no cursor or limit and then re-invalidates the full thread after common comment actions.
|
||||
|
||||
That means we already have the server-side building blocks for incremental comment loading, but the page is not using them.
|
||||
|
||||
## 4.4 Cache invalidation is broader than necessary
|
||||
|
||||
`invalidateIssue()` in `IssueDetail` invalidates:
|
||||
|
||||
- detail
|
||||
- activity
|
||||
- runs
|
||||
- approvals
|
||||
- feedback votes
|
||||
- attachments
|
||||
- documents
|
||||
- live runs
|
||||
- active run
|
||||
- multiple issue collections
|
||||
- sidebar badges
|
||||
|
||||
That is acceptable for correctness, but it is expensive for perceived speed and makes optimistic work feel less stable because the page keeps re-painting from fresh network results.
|
||||
|
||||
## 4.5 Live run state is fetched twice
|
||||
|
||||
The page polls both:
|
||||
|
||||
- `issues.liveRuns(issueId)` every 3s
|
||||
- `issues.activeRun(issueId)` every 3s
|
||||
|
||||
That is duplicate polling for closely related state.
|
||||
|
||||
## 4.6 Properties panel duplicates more list fetching
|
||||
|
||||
`ui/src/components/IssueProperties.tsx` fetches:
|
||||
|
||||
- session
|
||||
- agents list
|
||||
- projects list
|
||||
- labels
|
||||
- and, when the blocker picker opens, the full company issues list
|
||||
|
||||
The page and panel are each doing their own list work instead of sharing a narrower issue-detail data model.
|
||||
|
||||
## 4.7 The perceived loading UX is too thin
|
||||
|
||||
`IssueDetail` only shows:
|
||||
|
||||
- plain `Loading...` while the main issue query is pending
|
||||
|
||||
After that, many sub-sections can appear empty or incomplete until their own queries resolve. That makes the page feel slower than the raw request times suggest.
|
||||
|
||||
## 5. Recommended Plan
|
||||
|
||||
## 5.1 Phase 1: Fix perceived speed first
|
||||
|
||||
Ship UX changes that make the page feel immediate before deeper backend reshaping:
|
||||
|
||||
- replace the plain `Loading...` state with an issue-detail skeleton
|
||||
- give comments, activity, attachments, and sub-issues their own skeleton/empty/loading states
|
||||
- preserve visible stale data during refetch instead of clearing sections
|
||||
- show explicit pending state for local actions that are already optimistic
|
||||
|
||||
Why first:
|
||||
|
||||
- it improves the user-facing feel immediately
|
||||
- it reduces the chance that later data changes still feel slow because the page flashes blank
|
||||
|
||||
## 5.2 Phase 2: Stop fetching the full company issues list for child issues
|
||||
|
||||
Add `parentId` to the `issuesApi.list(...)` filter type and switch `IssueDetail` to:
|
||||
|
||||
- fetch child issues only
|
||||
- stop loading the full company issue collection on page mount
|
||||
|
||||
This is the highest-confidence narrow win because the server path already exists.
|
||||
|
||||
## 5.3 Phase 3: Convert comments to a bounded + incremental model
|
||||
|
||||
Use the existing server support for:
|
||||
|
||||
- latest comment cursor from heartbeat context or issue bootstrap
|
||||
- incremental fetch with `after`
|
||||
- bounded initial fetch with `limit`
|
||||
|
||||
Suggested behavior:
|
||||
|
||||
- first load: fetch the latest N comments
|
||||
- offer `load earlier` for long threads
|
||||
- after posting or on live updates: append incrementally instead of invalidating the whole thread
|
||||
|
||||
This should address the same performance family as [PAP-254](/PAP/issues/PAP-254).
|
||||
|
||||
## 5.4 Phase 4: Reduce duplicate polling and invalidation
|
||||
|
||||
Tighten the runtime side of the page:
|
||||
|
||||
- collapse `liveRuns` and `activeRun` into one client source if possible
|
||||
- stop invalidating unrelated issue collections after mutations that only affect the current issue
|
||||
- merge server responses into cache where we already have enough information
|
||||
|
||||
Examples:
|
||||
|
||||
- posting a comment should not force a broad company issue list refetch unless list-visible metadata changed
|
||||
- attachment changes should not invalidate approvals or unrelated live-run queries
|
||||
|
||||
## 5.5 Phase 5: Consider an issue-detail bootstrap contract
|
||||
|
||||
If the page is still too chatty after the client fixes, add one tailored bootstrap surface for the issue detail page.
|
||||
|
||||
Potential bootstrap payload:
|
||||
|
||||
- issue core data
|
||||
- child issue summaries
|
||||
- latest comment cursor and recent comment page
|
||||
- live run summary
|
||||
- attachment summaries
|
||||
- approval summaries
|
||||
- any lightweight mention/selector metadata truly needed at first paint
|
||||
|
||||
This should happen after the obvious client overfetch fixes, not before.
|
||||
|
||||
## 6. Concrete Opportunities By Surface
|
||||
|
||||
## 6.1 Issue detail page
|
||||
|
||||
- narrow child issue fetch from full list to `parentId`
|
||||
- stage loading by section instead of all-or-nothing perception
|
||||
- bound initial comments payload
|
||||
- reduce duplicate live-run polling
|
||||
- replace broad invalidation with targeted cache writes
|
||||
|
||||
## 6.2 Issue properties panel
|
||||
|
||||
- reuse page-level agents/projects data where possible
|
||||
- fetch blockers lazily and narrowly
|
||||
- keep local optimistic field updates without broad page invalidation
|
||||
|
||||
## 6.3 Thread/comment UX
|
||||
|
||||
- append optimistic comments directly into the visible thread
|
||||
- keep queued/pending comment state stable during reconciliation
|
||||
- fetch only new comments after the last known cursor
|
||||
|
||||
## 6.4 Cross-app optimistic consistency
|
||||
|
||||
The same standards should apply to:
|
||||
|
||||
- issue archive/unarchive
|
||||
- issue property edits
|
||||
- create issue/sub-issue flows
|
||||
- comment posting
|
||||
- attachment/document actions where the local result is obvious
|
||||
|
||||
## 7. Suggested Execution Order
|
||||
|
||||
1. `PAP-1192`: issue-detail skeletons and staged loading
|
||||
2. add `parentId` support to `ui/src/api/issues.ts` and switch child-issue fetching to a narrow query
|
||||
3. move comments to bounded initial load plus incremental updates
|
||||
4. shrink invalidation and polling scope
|
||||
5. only then decide whether a new issue-detail bootstrap endpoint is still needed
|
||||
|
||||
## 8. Success Criteria
|
||||
|
||||
This inventory is successful if the follow-up implementation makes the issue page behave like this:
|
||||
|
||||
1. navigating to an issue shows a shaped skeleton immediately, not plain text
|
||||
2. the page no longer fetches the full company issue list just to render sub-issues
|
||||
3. long threads do not require full-thread fetches on every load or comment mutation
|
||||
4. local actions feel immediate and do not snap back because of broad invalidation
|
||||
5. the issue page feels faster even when absolute backend timings are already reasonable
|
||||
248
doc/plans/2026-04-07-pi-hooks-survey.md
Normal file
248
doc/plans/2026-04-07-pi-hooks-survey.md
Normal file
@@ -0,0 +1,248 @@
|
||||
# Pi Hook Survey
|
||||
|
||||
Status: investigation note
|
||||
Date: 2026-04-07
|
||||
|
||||
## Why this exists
|
||||
|
||||
We were asked to find the hook surfaces exposed by `pi` and `pi-mono`, then decide which ideas transfer cleanly into Paperclip.
|
||||
|
||||
This note is based on direct source inspection of:
|
||||
|
||||
- `badlogic/pi` default branch and `pi2` branch
|
||||
- `badlogic/pi-mono` `packages/coding-agent`
|
||||
- current Paperclip plugin and adapter surfaces in this repo
|
||||
|
||||
## Short answer
|
||||
|
||||
- Current `pi` does not expose a comparable extension hook API. What it exposes today is a JSON event stream from `pi-agent`.
|
||||
- `pi-mono` does expose a real extension hook system. It is broad, typed, and intentionally allows mutation of agent/runtime behavior.
|
||||
- Paperclip should copy only the safe subset:
|
||||
- typed event subscriptions
|
||||
- read-only run lifecycle events
|
||||
- explicit worker lifecycle hooks
|
||||
- plugin-to-plugin events
|
||||
- Paperclip should not copy the dangerous subset:
|
||||
- arbitrary mutation hooks on core control-plane decisions
|
||||
- project-local plugin loading
|
||||
- built-in tool shadowing by name collision
|
||||
|
||||
## What `pi` has today
|
||||
|
||||
Current `badlogic/pi` is primarily a GPU pod manager plus a lightweight agent runner. It does not expose a `pi.on(...)`-style extension API like `pi-mono`.
|
||||
|
||||
The closest thing to hooks is the `pi-agent --json` event stream:
|
||||
|
||||
- `session_start`
|
||||
- `user_message`
|
||||
- `assistant_start`
|
||||
- `assistant_message`
|
||||
- `thinking`
|
||||
- `tool_call`
|
||||
- `tool_result`
|
||||
- `token_usage`
|
||||
- `error`
|
||||
- `interrupted`
|
||||
|
||||
That makes `pi` useful as an event producer, but not as a host for third-party runtime interception.
|
||||
|
||||
## What `pi-mono` has
|
||||
|
||||
`pi-mono` exposes a real extension API through `packages/coding-agent/src/core/extensions/types.ts`.
|
||||
|
||||
### Extension event hooks
|
||||
|
||||
Verified `pi.on(...)` hook names:
|
||||
|
||||
- `resources_discover`
|
||||
- `session_start`
|
||||
- `session_before_switch`
|
||||
- `session_before_fork`
|
||||
- `session_before_compact`
|
||||
- `session_compact`
|
||||
- `session_shutdown`
|
||||
- `session_before_tree`
|
||||
- `session_tree`
|
||||
- `context`
|
||||
- `before_provider_request`
|
||||
- `before_agent_start`
|
||||
- `agent_start`
|
||||
- `agent_end`
|
||||
- `turn_start`
|
||||
- `turn_end`
|
||||
- `message_start`
|
||||
- `message_update`
|
||||
- `message_end`
|
||||
- `tool_execution_start`
|
||||
- `tool_execution_update`
|
||||
- `tool_execution_end`
|
||||
- `model_select`
|
||||
- `tool_call`
|
||||
- `tool_result`
|
||||
- `user_bash`
|
||||
- `input`
|
||||
|
||||
### Other extension surfaces
|
||||
|
||||
`pi-mono` extensions can also:
|
||||
|
||||
- `registerTool(...)`
|
||||
- `registerCommand(...)`
|
||||
- `registerShortcut(...)`
|
||||
- `registerFlag(...)`
|
||||
- `registerMessageRenderer(...)`
|
||||
- `registerProvider(...)`
|
||||
- `unregisterProvider(...)`
|
||||
- use an inter-extension event bus via `pi.events`
|
||||
|
||||
### Important behavior
|
||||
|
||||
`pi-mono` hooks are not just observers. Several can actively mutate behavior:
|
||||
|
||||
- `before_agent_start` can rewrite the effective system prompt and inject messages
|
||||
- `context` can replace the message set before an LLM call
|
||||
- `before_provider_request` can rewrite the serialized provider payload
|
||||
- `tool_call` can mutate tool inputs and block execution
|
||||
- `tool_result` can rewrite tool output
|
||||
- `user_bash` can replace shell execution entirely
|
||||
- `input` can transform or fully handle user input before normal processing
|
||||
|
||||
That is a good fit for a local coding harness. It is not automatically a good fit for a company control plane.
|
||||
|
||||
## What Paperclip already has
|
||||
|
||||
Paperclip already has several hook-like surfaces, but they are much narrower and safer:
|
||||
|
||||
- plugin worker lifecycle hooks such as `setup()` and `onHealth()`
|
||||
- declared webhook endpoints for plugins
|
||||
- scheduled jobs
|
||||
- a typed plugin event bus with filtering and plugin namespacing
|
||||
- adapter runtime hooks for logs/status/usage in the run pipeline
|
||||
|
||||
The plugin event bus is already pointed in the right direction:
|
||||
|
||||
- core domain events can be subscribed to
|
||||
- filters are applied server-side
|
||||
- plugin-emitted events are namespaced under `plugin.<pluginId>.*`
|
||||
- plugins do not override core behavior by name collision
|
||||
|
||||
## What transfers well to Paperclip
|
||||
|
||||
These ideas from `pi-mono` fit Paperclip with little conceptual risk:
|
||||
|
||||
### 1. Read-only run lifecycle subscriptions
|
||||
|
||||
Paperclip should continue exposing run and transcript events to plugins, for example:
|
||||
|
||||
- run started / finished
|
||||
- tool started / finished
|
||||
- usage reported
|
||||
- issue comment created
|
||||
|
||||
This matches Paperclip's control-plane posture: observe, react, automate.
|
||||
|
||||
### 2. Plugin-to-plugin events
|
||||
|
||||
Paperclip already has this. It is worth keeping and extending.
|
||||
|
||||
This is the clean replacement for many ad hoc hook chains.
|
||||
|
||||
### 3. Explicit worker lifecycle hooks
|
||||
|
||||
Paperclip already has `setup()` and `onHealth()`. That is the right shape.
|
||||
|
||||
If more lifecycle is needed, it should stay explicit and host-controlled.
|
||||
|
||||
### 4. Trusted adapter-level prompt/runtime middleware
|
||||
|
||||
Some `pi-mono` ideas do belong in Paperclip, but only inside trusted adapter/runtime code:
|
||||
|
||||
- prompt shaping before a run starts
|
||||
- provider request customization
|
||||
- tool execution wrappers for local coding adapters
|
||||
|
||||
This should be an adapter surface, not a general company plugin surface.
|
||||
|
||||
## What should not transfer directly
|
||||
|
||||
These `pi-mono` capabilities are a bad fit for Paperclip core:
|
||||
|
||||
### 1. Arbitrary mutation hooks on control-plane decisions
|
||||
|
||||
Paperclip should not let general plugins rewrite:
|
||||
|
||||
- issue checkout semantics
|
||||
- approval outcomes
|
||||
- budget enforcement
|
||||
- assignment rules
|
||||
- company scoping
|
||||
|
||||
Those are core invariants.
|
||||
|
||||
### 2. Tool shadowing by name collision
|
||||
|
||||
`pi-mono`'s low-friction override model is great for a personal coding harness.
|
||||
|
||||
Paperclip should keep plugin tools namespaced and non-shadowing.
|
||||
|
||||
### 3. Project-local plugin loading
|
||||
|
||||
Paperclip is an operator-controlled control plane. Repo-local plugin auto-loading would make behavior too implicit and too hard to govern.
|
||||
|
||||
### 4. UI-session-specific hooks as first-class product surface
|
||||
|
||||
Hooks like:
|
||||
|
||||
- `session_before_switch`
|
||||
- `session_before_fork`
|
||||
- `session_before_tree`
|
||||
- `model_select`
|
||||
- `input`
|
||||
- `user_bash`
|
||||
|
||||
are tied to `pi-mono` being an interactive terminal coding harness.
|
||||
|
||||
They do not map directly to Paperclip's board-and-issues model.
|
||||
|
||||
## Recommended Paperclip direction
|
||||
|
||||
If we want a "hooks" story inspired by `pi-mono`, it should split into two layers:
|
||||
|
||||
### Layer 1: safe control-plane plugins
|
||||
|
||||
Allowed surfaces:
|
||||
|
||||
- typed domain event subscriptions
|
||||
- jobs
|
||||
- webhooks
|
||||
- plugin-to-plugin events
|
||||
- UI slots and bridge actions
|
||||
- plugin-owned tools and data endpoints
|
||||
|
||||
Disallowed:
|
||||
|
||||
- mutation of core issue/approval/budget invariants
|
||||
|
||||
### Layer 2: trusted runtime middleware
|
||||
|
||||
For adapters and other trusted runtime packages only:
|
||||
|
||||
- prompt assembly hooks
|
||||
- provider payload hooks
|
||||
- tool execution wrappers
|
||||
- transcript rendering helpers
|
||||
|
||||
This is where the best `pi-mono` runtime ideas belong.
|
||||
|
||||
## Bottom line
|
||||
|
||||
If the question is "what hooks do `pi` and `pi-mono` have?":
|
||||
|
||||
- `pi`: JSON output events, not a general extension hook system
|
||||
- `pi-mono`: a broad extension hook API with 27 named event hooks plus tool/command/provider registration
|
||||
|
||||
If the question is "what works for Paperclip too?":
|
||||
|
||||
- yes: typed event subscriptions, worker lifecycle hooks, namespaced plugin events, read-only run lifecycle events
|
||||
- maybe, but trusted-only: prompt/provider/tool middleware around adapter execution
|
||||
- no: arbitrary mutation hooks on control-plane invariants, project-local plugin loading, tool shadowing
|
||||
238
doc/plans/2026-04-08-agent-browser-process-cleanup-plan.md
Normal file
238
doc/plans/2026-04-08-agent-browser-process-cleanup-plan.md
Normal file
@@ -0,0 +1,238 @@
|
||||
# PAP-1231 Agent Browser Process Cleanup Plan
|
||||
|
||||
Status: Proposed
|
||||
Date: 2026-04-08
|
||||
Related issue: `PAP-1231`
|
||||
Audience: Engineering
|
||||
|
||||
## Goal
|
||||
|
||||
Explain why browser processes accumulate during local agent runs and define a cleanup plan that fixes the general process-ownership problem rather than treating `agent-browser` as a one-off.
|
||||
|
||||
## Short answer
|
||||
|
||||
Yes, there is a likely root cause in Paperclip's local execution model.
|
||||
|
||||
Today, heartbeat-run local adapters persist and manage only the top-level spawned PID. Their timeout/cancel path uses direct `child.kill()` semantics. That is weaker than the runtime-service path, which already tracks and terminates whole process groups.
|
||||
|
||||
If Codex, Claude, Cursor, or a skill launched through them starts Chrome or Chromium helpers, Paperclip can lose ownership of those descendants even when it still believes it handled the run correctly.
|
||||
|
||||
## Observed implementation facts
|
||||
|
||||
### 1. Heartbeat-run local adapters track only one PID
|
||||
|
||||
`packages/adapter-utils/src/server-utils.ts`
|
||||
|
||||
- `runChildProcess()` spawns the adapter command and records only `child.pid`
|
||||
- timeout handling sends `SIGTERM` and then `SIGKILL` to the direct child
|
||||
- there is no process-group creation or process-group kill path there today
|
||||
|
||||
`packages/db/src/schema/heartbeat_runs.ts`
|
||||
|
||||
- `heartbeat_runs` stores `process_pid`
|
||||
- there is no persisted `process_group_id`
|
||||
|
||||
`server/src/services/heartbeat.ts`
|
||||
|
||||
- cancellation logic uses the in-memory child handle and calls `child.kill()`
|
||||
- orphaned-run recovery checks whether the recorded direct PID is alive
|
||||
- the recovery model is built around one tracked process, not a descendant tree
|
||||
|
||||
### 2. Workspace runtime already uses stronger ownership
|
||||
|
||||
`server/src/services/workspace-runtime.ts`
|
||||
|
||||
- runtime services are spawned with `detached: process.platform !== "win32"`
|
||||
- the service record stores `processGroupId`
|
||||
- shutdown calls `terminateLocalService()` with group-aware killing
|
||||
|
||||
`server/src/services/local-service-supervisor.ts`
|
||||
|
||||
- `terminateLocalService()` prefers `process.kill(-processGroupId, signal)` on POSIX
|
||||
- it escalates from `SIGTERM` to `SIGKILL`
|
||||
|
||||
This is the clearest internal comparison point: Paperclip already has one local-process subsystem that treats process-group ownership as the right abstraction.
|
||||
|
||||
### 3. The current recovery path explains why leaks would be visible but hard to reason about
|
||||
|
||||
If the direct adapter process exits, hangs, or is cancelled after launching a browser subtree:
|
||||
|
||||
- Paperclip may think it cancelled the run because the parent process is gone
|
||||
- descendant Chrome helpers may still be running
|
||||
- orphan recovery has no persisted process-group identity to reconcile or reap later
|
||||
|
||||
That makes the failure look like an `agent-browser` problem when the more general bug is "executor descendants are not owned strongly enough."
|
||||
|
||||
## Why `agent-browser` makes the problem obvious
|
||||
|
||||
Inference:
|
||||
|
||||
- Chromium is intentionally multi-process
|
||||
- browser automation often leaves a browser process plus renderer, GPU, utility, and crashpad/helper children
|
||||
- skills that open browsers repeatedly amplify the symptom because each run can produce several descendant processes
|
||||
|
||||
So `agent-browser` is probably not the root cause. It is the workload that exposes the weak ownership model fastest.
|
||||
|
||||
## Success condition
|
||||
|
||||
This work is successful when Paperclip can:
|
||||
|
||||
1. start a local adapter run and own the full descendant tree it created
|
||||
2. cancel, timeout, or recover that run without leaving Chrome descendants behind on POSIX
|
||||
3. detect and clean up stale local descendants after server restarts
|
||||
4. expose enough metadata that operators can see which run owns which spawned process tree
|
||||
|
||||
## Non-goals
|
||||
|
||||
Do not:
|
||||
|
||||
- special-case `agent-browser` only
|
||||
- depend on manual `pkill chrome` cleanup as the primary fix
|
||||
- require every skill author to add bespoke browser teardown logic before Paperclip can clean up correctly
|
||||
- change remote/http adapter behavior as part of the first pass
|
||||
|
||||
## Proposed plan
|
||||
|
||||
### Phase 0: reproduce and instrument
|
||||
|
||||
Objective:
|
||||
|
||||
- make the leak measurable from Paperclip's side before changing execution semantics
|
||||
|
||||
Work:
|
||||
|
||||
- add a reproducible local test script or fixture that launches a child process which itself launches descendants and ignores normal parent exit
|
||||
- capture parent PID, descendant PIDs, and run ID in logs during local adapter execution
|
||||
- document current behavior separately for:
|
||||
- normal completion
|
||||
- timeout
|
||||
- explicit cancellation
|
||||
- server restart during run
|
||||
|
||||
Deliverable:
|
||||
|
||||
- one short repro note attached to the implementation issue or child issue
|
||||
|
||||
### Phase 1: give heartbeat-run local adapters process-group ownership
|
||||
|
||||
Objective:
|
||||
|
||||
- align adapter-run execution with the stronger runtime-service model
|
||||
|
||||
Work:
|
||||
|
||||
- update `runChildProcess()` to create a dedicated process group on POSIX
|
||||
- persist both:
|
||||
- direct PID
|
||||
- process-group ID
|
||||
- update the run cancellation and timeout paths to kill the group first, then escalate
|
||||
- keep direct-PID fallback behavior for platforms where group kill is not available
|
||||
|
||||
Likely touched surfaces:
|
||||
|
||||
- `packages/adapter-utils/src/server-utils.ts`
|
||||
- `packages/db/src/schema/heartbeat_runs.ts`
|
||||
- `packages/shared/src/types/heartbeat.ts`
|
||||
- `server/src/services/heartbeat.ts`
|
||||
|
||||
Important design choice:
|
||||
|
||||
- use the same ownership model for all local child-process adapters, not just Codex or Claude
|
||||
|
||||
### Phase 2: make restart recovery group-aware
|
||||
|
||||
Objective:
|
||||
|
||||
- prevent stale descendants from surviving server crashes or restarts indefinitely
|
||||
|
||||
Work:
|
||||
|
||||
- teach orphan reconciliation to inspect the persisted process-group ID, not only the direct PID
|
||||
- if the direct parent is gone but the group still exists, mark the run as detached-orphaned with clearer metadata
|
||||
- decide whether restart recovery should:
|
||||
- adopt the still-running group, or
|
||||
- terminate it as unrecoverable
|
||||
|
||||
Recommendation:
|
||||
|
||||
- for heartbeat runs, prefer terminating unrecoverable orphan groups rather than adopting them unless we can prove the adapter session remains safe and observable
|
||||
|
||||
Reason:
|
||||
|
||||
- runtime services are long-lived and adoptable
|
||||
- heartbeat runs are task executions with stricter audit and cancellation semantics
|
||||
|
||||
### Phase 3: add operator-visible cleanup tools
|
||||
|
||||
Objective:
|
||||
|
||||
- make the system diagnosable when ownership still fails
|
||||
|
||||
Work:
|
||||
|
||||
- surface the tracked process metadata in run details or debug endpoints
|
||||
- add a control-plane cleanup action or CLI utility for stale local run processes owned by Paperclip
|
||||
- scope cleanup by run/agent/company instead of broad browser-name matching
|
||||
|
||||
This should replace ad hoc scripts as the general-purpose escape hatch.
|
||||
|
||||
### Phase 4: cover platform and regression cases
|
||||
|
||||
Objective:
|
||||
|
||||
- keep the fix from regressing and define platform behavior explicitly
|
||||
|
||||
Tests to add:
|
||||
|
||||
- unit tests around process-group-aware cancellation in adapter execution utilities
|
||||
- heartbeat recovery tests for:
|
||||
- surviving descendant tree after parent loss
|
||||
- timeout cleanup
|
||||
- cancellation cleanup
|
||||
- platform-conditional behavior notes for Windows, where negative-PID group kill does not apply
|
||||
|
||||
## Recommended first implementation slice
|
||||
|
||||
The first shipping slice should be narrow:
|
||||
|
||||
1. introduce process-group ownership for local heartbeat-run adapters on POSIX
|
||||
2. persist group metadata on `heartbeat_runs`
|
||||
3. switch timeout/cancel paths from direct-child kill to group kill
|
||||
4. add one regression test that proves descendants die with the parent run
|
||||
|
||||
That should address the main Chrome accumulation path without taking on the full restart-recovery design in the same patch.
|
||||
|
||||
## Risks
|
||||
|
||||
### 1. Over-killing unrelated processes
|
||||
|
||||
If process-group boundaries are created incorrectly, cleanup could terminate more than the run owns.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- create a fresh process group only for the spawned adapter command
|
||||
- persist and target that exact group
|
||||
|
||||
### 2. Cross-platform differences
|
||||
|
||||
Windows does not support the POSIX negative-PID kill pattern used elsewhere in the repo.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- ship POSIX-first
|
||||
- keep direct-child fallback on Windows
|
||||
- document Windows as partial until job-object or equivalent handling is designed
|
||||
|
||||
### 3. Session recovery complexity
|
||||
|
||||
Adopting a still-running orphaned group may look attractive but can break observability if stdout/stderr pipes are already gone.
|
||||
|
||||
Mitigation:
|
||||
|
||||
- default to deterministic cleanup for heartbeat runs unless adoption is explicitly proven safe
|
||||
|
||||
## Recommendation
|
||||
|
||||
Treat this as a Paperclip executor ownership bug, not an `agent-browser` bug.
|
||||
|
||||
`agent-browser` should remain a useful repro case, but the implementation should be shared across all local child-process adapters so any descendant process tree spawned by Codex, Claude, Cursor, Gemini, Pi, or OpenCode is owned and cleaned up consistently.
|
||||
261
doc/plans/2026-04-08-agent-os-follow-up-plan.md
Normal file
261
doc/plans/2026-04-08-agent-os-follow-up-plan.md
Normal file
@@ -0,0 +1,261 @@
|
||||
# PAP-1229 Agent OS Follow-up Plan
|
||||
|
||||
Date: 2026-04-08
|
||||
Related issue: `PAP-1229`
|
||||
Companion analysis: `doc/plans/2026-04-08-agent-os-technical-report.md`
|
||||
|
||||
## Goal
|
||||
|
||||
Turn the `agent-os` research into a low-risk Paperclip execution plan that preserves Paperclip's control-plane model while testing the few runtime ideas that appear worth adopting.
|
||||
|
||||
## Decision summary
|
||||
|
||||
Paperclip should not absorb `agent-os` as a product model or orchestration layer.
|
||||
|
||||
Paperclip should evaluate `agent-os` in three narrow areas:
|
||||
|
||||
1. optional agent runtime for selected local adapters
|
||||
2. capability-based runtime permission vocabulary
|
||||
3. snapshot-backed disposable execution roots
|
||||
|
||||
Everything else should stay out of scope unless those three experiments produce strong evidence.
|
||||
|
||||
## Success condition
|
||||
|
||||
This work is successful when Paperclip has:
|
||||
|
||||
- a clear yes/no answer on whether `agent-os` is worth supporting as an execution substrate
|
||||
- a concrete adapter/runtime experiment with measurable results
|
||||
- a proposed runtime capability model that fits current Paperclip adapters
|
||||
- a clear decision on whether snapshot-backed execution roots are worth integrating
|
||||
|
||||
## Non-goals
|
||||
|
||||
Do not:
|
||||
|
||||
- replace Paperclip heartbeats, issues, comments, approvals, or budgets with `agent-os` primitives
|
||||
- introduce Rust/sidecar requirements for all local execution paths
|
||||
- migrate all adapters at once
|
||||
- add runtime workflow/queue abstractions to Paperclip core
|
||||
|
||||
## Existing Paperclip integration points
|
||||
|
||||
The plan should stay anchored to these existing surfaces:
|
||||
|
||||
- `packages/adapter-utils/src/types.ts`
|
||||
- adapter contract, runtime service reporting, session metadata, and capability normalization targets
|
||||
- `server/src/services/heartbeat.ts`
|
||||
- execution entry point, log capture, issue comment summaries, and cost reporting
|
||||
- `server/src/services/execution-workspaces.ts`
|
||||
- current workspace lifecycle and git-oriented cleanup/readiness model
|
||||
- `server/src/services/plugin-loader.ts`
|
||||
- typed host capability boundary and extension loading patterns
|
||||
- local adapter implementations in `packages/adapters/*/src/server/`
|
||||
- current execution behavior to compare against an `agent-os`-backed path
|
||||
|
||||
## Phase plan
|
||||
|
||||
### Phase 0: constraints and experiment design
|
||||
|
||||
Objective:
|
||||
|
||||
- make the evaluation falsifiable before writing integration code
|
||||
|
||||
Deliverables:
|
||||
|
||||
- short experiment brief added to this document or a child issue
|
||||
- chosen first runtime target: `pi_local` or `opencode_local`
|
||||
- baseline metrics definition
|
||||
|
||||
Questions to lock down:
|
||||
|
||||
- what exact developer experience should improve
|
||||
- what security/isolation property we expect to gain
|
||||
- what failure modes are unacceptable
|
||||
- whether the prototype is adapter-only or a deeper internal runtime abstraction spike
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- a single first target chosen
|
||||
- measurable comparison criteria agreed on
|
||||
|
||||
Recommended metrics:
|
||||
|
||||
- cold start latency
|
||||
- session resume reliability across heartbeats
|
||||
- transcript/log quality
|
||||
- implementation complexity
|
||||
- operational complexity on local dev machines
|
||||
|
||||
### Phase 1: `agentos_local` spike
|
||||
|
||||
Objective:
|
||||
|
||||
- prove that Paperclip can drive one local agent through an `agent-os` runtime without breaking heartbeat semantics
|
||||
|
||||
Suggested scope:
|
||||
|
||||
- implement a new experimental adapter, `agentos_local`, or a feature-flagged runtime path under one existing adapter
|
||||
- start with `pi_local` or `opencode_local`
|
||||
- keep Paperclip's existing heartbeat, issue, workspace, and comment flow authoritative
|
||||
|
||||
Minimum implementation shape:
|
||||
|
||||
- adapter accepts model/runtime config
|
||||
- `server/src/services/heartbeat.ts` still owns run lifecycle
|
||||
- execution result still maps into existing `AdapterExecutionResult`
|
||||
- session state still fits current `sessionParams` / `sessionDisplayId` flow
|
||||
|
||||
What to verify:
|
||||
|
||||
- checkout and heartbeat flow still work end to end
|
||||
- resume across multiple heartbeats works
|
||||
- logs/transcripts remain readable in the UI
|
||||
- failure paths surface cleanly in issue comments and run logs
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- one agent type can run reliably through the new path
|
||||
- documented comparison against the existing local adapter path
|
||||
- explicit recommendation: continue, pause, or abandon
|
||||
|
||||
### Phase 2: capability-based runtime permissions
|
||||
|
||||
Objective:
|
||||
|
||||
- introduce a Paperclip-native capability vocabulary without coupling the product to `agent-os`
|
||||
|
||||
Suggested scope:
|
||||
|
||||
- extend adapter config schema vocabulary for runtime permissions
|
||||
- prototype normalized capabilities such as:
|
||||
- `fs.read`
|
||||
- `fs.write`
|
||||
- `network.fetch`
|
||||
- `network.listen`
|
||||
- `process.spawn`
|
||||
- `env.read`
|
||||
|
||||
Integration targets:
|
||||
|
||||
- `packages/adapter-utils/src/types.ts`
|
||||
- adapter config-schema support
|
||||
- server-side runtime config validation
|
||||
- future board-facing UI for permissions, if needed
|
||||
|
||||
What to avoid:
|
||||
|
||||
- building a full human policy UI before the vocabulary is proven useful
|
||||
- forcing every adapter to implement capability enforcement immediately
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- documented capability schema
|
||||
- one adapter path using it meaningfully
|
||||
- clear compatibility story for non-`agent-os` adapters
|
||||
|
||||
### Phase 3: snapshot-backed execution root experiment
|
||||
|
||||
Objective:
|
||||
|
||||
- determine whether a layered/snapshotted root model improves some Paperclip workloads
|
||||
|
||||
Suggested scope:
|
||||
|
||||
- evaluate it only for disposable or non-repo-heavy tasks first
|
||||
- keep git worktree-based repo editing as the default for codebase tasks
|
||||
|
||||
Promising use cases:
|
||||
|
||||
- routine-style runs
|
||||
- ephemeral preview/test environments
|
||||
- isolated document/artifact generation
|
||||
- tasks that do not need full git history or branch semantics
|
||||
|
||||
Integration targets:
|
||||
|
||||
- `server/src/services/execution-workspaces.ts`
|
||||
- workspace realization paths called from `server/src/services/heartbeat.ts`
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- clear statement on which workload classes benefit
|
||||
- clear statement on which workloads should stay on worktrees
|
||||
- go/no-go decision for broader implementation
|
||||
|
||||
### Phase 4: typed host tool evaluation
|
||||
|
||||
Objective:
|
||||
|
||||
- identify where Paperclip should prefer explicit typed tools over ambient shell access
|
||||
|
||||
Suggested scope:
|
||||
|
||||
- compare `agent-os` host-toolkit ideas with existing plugin and runtime-service surfaces
|
||||
- choose 1-2 sensitive operations that should become typed tools
|
||||
|
||||
Good candidates:
|
||||
|
||||
- git metadata/status inspection
|
||||
- runtime service inspection
|
||||
- deployment/preview status retrieval
|
||||
- generated artifact publishing
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- one concrete proposal for typed-tool adoption in Paperclip
|
||||
- clear statement on whether this belongs in plugins, adapters, or core services
|
||||
|
||||
## Recommended sequencing
|
||||
|
||||
Recommended order:
|
||||
|
||||
1. Phase 0
|
||||
2. Phase 1
|
||||
3. Phase 2
|
||||
4. Phase 3
|
||||
5. Phase 4
|
||||
|
||||
Reasoning:
|
||||
|
||||
- Phase 1 is the fastest way to invalidate or validate the entire `agent-os` direction
|
||||
- Phase 2 is valuable even if Phase 1 is abandoned
|
||||
- Phase 3 should wait until there is confidence that the runtime approach is operationally worthwhile
|
||||
- Phase 4 is useful independently but should be informed by what Phase 1 and Phase 2 expose
|
||||
|
||||
## Risks
|
||||
|
||||
### Technical risk
|
||||
|
||||
- `agent-os` introduces Rust sidecar and packaging complexity that may outweigh runtime benefits
|
||||
|
||||
### Product risk
|
||||
|
||||
- runtime experimentation could blur the boundary between Paperclip as control plane and Paperclip as execution platform
|
||||
|
||||
### Integration risk
|
||||
|
||||
- session semantics, log formatting, and failure behavior may degrade relative to current local adapters
|
||||
|
||||
### Scope risk
|
||||
|
||||
- a small runtime spike could expand into an adapter-system rewrite if not kept tightly bounded
|
||||
|
||||
## Guardrails
|
||||
|
||||
To keep this effort controlled:
|
||||
|
||||
- keep all experiments behind a clearly experimental adapter or feature flag
|
||||
- do not change issue/comment/approval/budget semantics to suit the runtime
|
||||
- measure against current local adapters instead of judging in isolation
|
||||
- stop after Phase 1 if the operational burden is already clearly too high
|
||||
|
||||
## Proposed next action
|
||||
|
||||
The next concrete action should be a small implementation spike issue:
|
||||
|
||||
- title: `Prototype experimental agentos_local runtime for one local adapter`
|
||||
- target adapter: `opencode_local` unless `pi_local` is materially easier
|
||||
- expected output: code spike, short verification notes, and a continue/stop recommendation
|
||||
|
||||
If leadership wants planning only and no spike yet, this document is the handoff artifact for that decision.
|
||||
397
doc/plans/2026-04-08-agent-os-technical-report.md
Normal file
397
doc/plans/2026-04-08-agent-os-technical-report.md
Normal file
@@ -0,0 +1,397 @@
|
||||
# Agent OS Technical Report for Paperclip
|
||||
|
||||
Date: 2026-04-08
|
||||
Analyzed upstream: `rivet-dev/agent-os` at commit `0063cdccd1dcb1c8e211670cd05482d70d26a5c4` (`0063cdc`), dated 2026-04-06
|
||||
|
||||
## Executive summary
|
||||
|
||||
`agent-os` is not a competitor to Paperclip's core product. It is an execution substrate: an embedded, VM-like runtime for agents, tools, filesystems, and session orchestration. Paperclip is a control plane: company scoping, task hierarchy, approvals, budgets, activity logs, workspaces, and governance.
|
||||
|
||||
The strongest takeaway is not "copy agent-os wholesale." The strongest takeaway is that Paperclip could selectively use its runtime ideas to improve local agent execution safety, reproducibility, and portability while keeping all company/task/governance logic in Paperclip.
|
||||
|
||||
My recommendation is:
|
||||
|
||||
1. Do not merge agent-os concepts into the Paperclip core product model.
|
||||
2. Do evaluate an optional `agentos_local` execution adapter or internal runtime experiment.
|
||||
3. Borrow a few design patterns aggressively:
|
||||
- layered/snapshotted execution filesystems
|
||||
- explicit capability-based runtime permissions
|
||||
- a better host-tools bridge for controlled tool execution
|
||||
- a normalized session capability model for agent adapters
|
||||
4. Do not import its workflow/cron/queue abstractions into Paperclip core until they are reconciled with Paperclip's issue/comment/governance model.
|
||||
|
||||
## What agent-os actually is
|
||||
|
||||
From the repo layout and implementation, `agent-os` is a mixed TypeScript/Rust system that provides:
|
||||
|
||||
- an `AgentOs` TypeScript API for creating isolated agent VMs
|
||||
- a Rust kernel/sidecar that virtualizes filesystem, processes, PTYs, pipes, permissions, and networking
|
||||
- an ACP-based session model for agent runtimes such as Pi, OpenCode, and Claude-style adapters
|
||||
- a registry of WASM command packages and mount plugins
|
||||
- optional host toolkits, cron scheduling, and filesystem mounts
|
||||
|
||||
The repo is substantial already:
|
||||
|
||||
- monorepo with `packages/`, `crates/`, and `registry/`
|
||||
- roughly 1,200 files just across `packages/`, `crates/`, and `registry/`
|
||||
- mixed implementation model: TypeScript public API plus Rust kernel/sidecar internals
|
||||
|
||||
## Architecture notes
|
||||
|
||||
### 1. Public runtime surface
|
||||
|
||||
The main API lives in `packages/core/src/agent-os.ts` and exports an `AgentOs` class with methods such as:
|
||||
|
||||
- `create()`
|
||||
- `createSession()`
|
||||
- `prompt()`
|
||||
- `exec()`
|
||||
- `spawn()`
|
||||
- `snapshotRootFilesystem()`
|
||||
- cron scheduling helpers
|
||||
|
||||
This is an execution API, not a coordination API.
|
||||
|
||||
### 2. Virtualized kernel model
|
||||
|
||||
The kernel is implemented in Rust under `crates/kernel/src/`. It models:
|
||||
|
||||
- virtual filesystem
|
||||
- process table
|
||||
- PTYs and pipes
|
||||
- resource accounting
|
||||
- permissioned filesystem access
|
||||
- network permission checks
|
||||
|
||||
That gives `agent-os` a much stronger isolation story than Paperclip's current "launch a host CLI in a workspace" local adapter approach.
|
||||
|
||||
### 3. Layered filesystem and snapshots
|
||||
|
||||
The filesystem design is one of the most reusable ideas. `agent-os` uses:
|
||||
|
||||
- a bundled base filesystem
|
||||
- a writable overlay
|
||||
- optional mounted filesystems
|
||||
- snapshot export/import for reusing root states
|
||||
|
||||
This is cleaner than treating every execution workspace as a mutable checkout plus ad hoc cleanup. It enables reproducible starting states and cheap isolation.
|
||||
|
||||
### 4. Capability-based permissions
|
||||
|
||||
The kernel-level permission vocabulary is strong and concrete:
|
||||
|
||||
- filesystem operations
|
||||
- network operations
|
||||
- child-process execution
|
||||
- environment access
|
||||
|
||||
The Rust kernel defaults are deny-oriented, but the high-level JS API currently serializes permissive defaults unless the caller provides a policy. That is an important nuance: the primitive is security-minded, but the product surface is still convenience-first.
|
||||
|
||||
### 5. Host-tools bridge
|
||||
|
||||
`agent-os` exposes host-side tools via a toolkit abstraction (`hostTool`, `toolKit`) and a local RPC bridge. This is a strong pattern because it gives the agent explicit, typed tools rather than ambient shell access to everything on the host.
|
||||
|
||||
### 6. ACP session abstraction
|
||||
|
||||
The session model is more uniform than most agent wrappers. It includes:
|
||||
|
||||
- capabilities
|
||||
- mode/config options
|
||||
- permission requests
|
||||
- sequenced session events
|
||||
- JSON-RPC transport through ACP adapters
|
||||
|
||||
This is directly relevant to Paperclip because our adapter layer still normalizes each CLI agent in a fairly bespoke way.
|
||||
|
||||
## Paperclip anchor points
|
||||
|
||||
The most relevant current Paperclip surfaces for any future `agent-os` integration are:
|
||||
|
||||
- `packages/adapter-utils/src/types.ts`
|
||||
- shared adapter contract, session metadata, runtime service reporting, environment tests, and optional `detectModel()`
|
||||
- `server/src/services/heartbeat.ts`
|
||||
- heartbeat execution, adapter invocation, cost capture, workspace realization, and issue-comment summaries
|
||||
- `server/src/services/execution-workspaces.ts`
|
||||
- execution workspace lifecycle and git readiness/cleanup logic
|
||||
- `server/src/services/plugin-loader.ts`
|
||||
- dynamic plugin activation, host capability boundaries, and runtime extension loading
|
||||
- local adapters such as `packages/adapters/codex-local/src/server/execute.ts` and peers
|
||||
- current host-CLI execution model that an `agent-os` runtime experiment would complement or replace for selected agents
|
||||
|
||||
## What Paperclip can learn from it
|
||||
|
||||
### 1. A safer local execution substrate
|
||||
|
||||
Paperclip's local adapters currently run host CLIs in managed workspaces and rely on adapter-specific behavior plus process-level controls. That is pragmatic, but weakly isolated.
|
||||
|
||||
`agent-os` shows a path toward:
|
||||
|
||||
- running local agent tooling in a constrained runtime
|
||||
- applying explicit network/filesystem/env policies
|
||||
- reducing accidental host leakage
|
||||
- making adapter behavior more portable across machines
|
||||
|
||||
Best use in Paperclip:
|
||||
|
||||
- as an optional runtime beneath local adapters
|
||||
- or as a new adapter family for agents that can run inside ACP-compatible `agent-os` sessions
|
||||
|
||||
This fits Paperclip because it improves execution safety without changing the control-plane model.
|
||||
|
||||
### 2. Snapshotted execution roots instead of only mutable workspaces
|
||||
|
||||
Paperclip already has strong execution-workspace concepts, but they are repo/worktree-centric. `agent-os` adds a stronger "start from known lower layers, write into a disposable upper layer" model.
|
||||
|
||||
That could improve:
|
||||
|
||||
- reproducible issue starts
|
||||
- disposable task sandboxes
|
||||
- faster reset/cleanup
|
||||
- "resume from snapshot" behavior for recurring routines
|
||||
- safe preview environments for risky agent operations
|
||||
|
||||
This is especially interesting for tasks that do not need a full git worktree.
|
||||
|
||||
### 3. A capability vocabulary for runtime governance
|
||||
|
||||
Paperclip has governance at the company/task level:
|
||||
|
||||
- approvals
|
||||
- budgets
|
||||
- activity logs
|
||||
- actor permissions
|
||||
- company scoping
|
||||
|
||||
It has less structure at the runtime capability level. `agent-os` offers a clear vocabulary that Paperclip could adopt even without adopting the runtime itself:
|
||||
|
||||
- `fs.read`, `fs.write`, `fs.mount_sensitive`
|
||||
- `network.fetch`, `network.http`, `network.listen`, `network.dns`
|
||||
- child process execution
|
||||
- env access
|
||||
|
||||
That vocabulary would improve:
|
||||
|
||||
- adapter configuration schemas
|
||||
- policy UIs
|
||||
- execution review surfaces
|
||||
- future approval gates for governed actions
|
||||
|
||||
### 4. Typed host tools instead of shelling out for everything
|
||||
|
||||
Paperclip's plugin system and adapters already have the beginnings of a controlled extension surface. `agent-os` reinforces the value of exposing capabilities as typed tools rather than raw shell access.
|
||||
|
||||
Concrete Paperclip uses:
|
||||
|
||||
- board-approved toolkits for sensitive operations
|
||||
- company-scoped service tools
|
||||
- plugin-defined tools with explicit schemas
|
||||
- safer execution for common actions like git metadata inspection, preview lookups, deployment status checks, or document generation
|
||||
|
||||
This aligns well with Paperclip's governance story.
|
||||
|
||||
### 5. Better adapter normalization around sessions and capabilities
|
||||
|
||||
Paperclip's adapter contract already supports execution results, session params, environment tests, skill syncing, quota windows, and optional `detectModel()`. But much of the per-agent behavior is still adapter-specific.
|
||||
|
||||
`agent-os` suggests a cleaner normalization target:
|
||||
|
||||
- a standard capability map
|
||||
- a consistent event stream model
|
||||
- explicit mode/config surfaces
|
||||
- explicit permission request semantics
|
||||
|
||||
Paperclip does not need ACP everywhere, but it would benefit from a more formal internal session capability model inspired by this.
|
||||
|
||||
### 6. On-demand heavy sandbox escalation
|
||||
|
||||
One of the best architectural choices in `agent-os` is that it does not pretend every workload fits the lightweight runtime. It has a sandbox extension for workloads that need a fuller environment.
|
||||
|
||||
Paperclip can adopt that philosophy directly:
|
||||
|
||||
- lightweight execution by default
|
||||
- escalate to full worktree / container / remote sandbox only when needed
|
||||
- keep the escalation explicit in the issue/run model
|
||||
|
||||
That is better than forcing all tasks into the heaviest environment up front.
|
||||
|
||||
## What does not fit Paperclip well
|
||||
|
||||
### 1. Its built-in orchestration primitives overlap the wrong layer
|
||||
|
||||
`agent-os` includes cron/session/workflow style primitives inside the runtime package. Paperclip already has higher-level orchestration concepts:
|
||||
|
||||
- issues/comments
|
||||
- heartbeat runs
|
||||
- approvals
|
||||
- company/org structure
|
||||
- execution workspaces
|
||||
- budget enforcement
|
||||
|
||||
If Paperclip copied `agent-os` cron/workflow/queue ideas directly into core, we would likely duplicate orchestration across two layers. That would blur ownership and make debugging harder.
|
||||
|
||||
Paperclip should keep orchestration authoritative at the control-plane layer.
|
||||
|
||||
### 2. It is not company-scoped or governance-native
|
||||
|
||||
`agent-os` is runtime-first, not company-first. It has no native concepts for:
|
||||
|
||||
- company boundaries
|
||||
- board/operator actor types
|
||||
- audit logs for business actions
|
||||
- issue hierarchy
|
||||
- approval routing
|
||||
- budget hard-stop behavior
|
||||
|
||||
Those are Paperclip's differentiators. They should not be displaced by runtime abstractions.
|
||||
|
||||
### 3. It introduces meaningful implementation complexity
|
||||
|
||||
Adopting `agent-os` deeply would add:
|
||||
|
||||
- Rust build/runtime complexity
|
||||
- sidecar lifecycle management
|
||||
- new failure modes across JS/Rust boundaries
|
||||
- more packaging and platform compatibility work
|
||||
- another abstraction layer for debugging already-complex local adapters
|
||||
|
||||
This is justified only if we want stronger local isolation or portability. It is not justified as a general refactor.
|
||||
|
||||
### 4. Its security model is not a drop-in governance solution
|
||||
|
||||
The permission model is good, but it is low-level. Paperclip would still need to answer:
|
||||
|
||||
- who can authorize a capability
|
||||
- how approval decisions are logged
|
||||
- how policies are scoped by company/project/issue/agent
|
||||
- how runtime permissions interact with budgets and task status
|
||||
|
||||
In other words, `agent-os` can supply enforcement primitives, not the control policy system itself.
|
||||
|
||||
### 5. The agent compatibility story is still selective
|
||||
|
||||
The repo is explicit that some runtimes are planned, partial, or still being adapted. In practice this means:
|
||||
|
||||
- good ideas for ACP-native or compatible agents
|
||||
- less certainty for every CLI agent we support today
|
||||
- real integration work for Codex/Cursor/Gemini-style Paperclip adapters
|
||||
|
||||
So the main near-term value is not universal replacement. It is selective use where compatibility is strong.
|
||||
|
||||
## Concrete recommendations for Paperclip
|
||||
|
||||
### Recommendation A: prototype an optional `agentos_local` adapter
|
||||
|
||||
This is the highest-value experiment.
|
||||
|
||||
Goal:
|
||||
|
||||
- run one supported agent type inside `agent-os`
|
||||
- keep Paperclip heartbeat/task/workspace/budget logic unchanged
|
||||
- evaluate startup time, isolation, transcript quality, and operational complexity
|
||||
|
||||
Good first target:
|
||||
|
||||
- `pi_local` or `opencode_local`
|
||||
|
||||
Why not start with Codex:
|
||||
|
||||
- Paperclip's Codex adapter is already important and carries repo-specific behavior
|
||||
- `agent-os`'s Codex story is present in the registry/docs, but the safest path is to validate the runtime on a less central adapter first
|
||||
|
||||
Success criteria:
|
||||
|
||||
- heartbeat can invoke the adapter reliably
|
||||
- session resume works across heartbeats
|
||||
- Paperclip still records logs, summaries, cost metadata, and issue comments normally
|
||||
- runtime permissions can be configured without breaking common tasks
|
||||
|
||||
### Recommendation B: adopt capability vocabulary into adapter configs
|
||||
|
||||
Even without using `agent-os`, Paperclip should consider standardizing adapter/runtime permissions around a vocabulary like:
|
||||
|
||||
- filesystem
|
||||
- network
|
||||
- subprocess/tool execution
|
||||
- environment access
|
||||
|
||||
This would improve:
|
||||
|
||||
- schema-driven adapter UIs
|
||||
- future approvals
|
||||
- observability
|
||||
- policy portability across adapters
|
||||
|
||||
### Recommendation C: explore snapshot-backed execution workspaces
|
||||
|
||||
Paperclip should evaluate whether some execution workspaces can be backed by:
|
||||
|
||||
- a reusable lower snapshot
|
||||
- a disposable upper layer
|
||||
- optional mounts for project data or artifacts
|
||||
|
||||
This is most valuable for:
|
||||
|
||||
- non-repo tasks
|
||||
- repeatable routines
|
||||
- preview/test environments
|
||||
- isolation-heavy local execution
|
||||
|
||||
It is less urgent for full repo editing flows that already benefit from git worktrees.
|
||||
|
||||
### Recommendation D: strengthen typed tool surfaces
|
||||
|
||||
Paperclip plugins and adapters should continue moving toward explicit typed tools over ad hoc shell access. `agent-os` confirms that this is the right direction.
|
||||
|
||||
This is a good fit for:
|
||||
|
||||
- plugin tools
|
||||
- workspace runtime services
|
||||
- governed operations that need approval or auditability
|
||||
|
||||
### Recommendation E: do not import runtime-level workflows into Paperclip core
|
||||
|
||||
Paperclip should not copy `agent-os` cron/workflow/queue concepts into core orchestration yet.
|
||||
|
||||
If we want them later, they must map cleanly onto:
|
||||
|
||||
- issues
|
||||
- comments
|
||||
- heartbeats
|
||||
- approvals
|
||||
- budgets
|
||||
- activity logs
|
||||
|
||||
Without that mapping, they would create a second orchestration system inside the product.
|
||||
|
||||
## A practical integration map
|
||||
|
||||
### Best near-term fits
|
||||
|
||||
- optional local adapter runtime
|
||||
- runtime capability schema
|
||||
- typed host-tool ideas for plugins/adapters
|
||||
- snapshot ideas for disposable execution roots
|
||||
|
||||
### Medium-term fits
|
||||
|
||||
- stronger session capability normalization across adapters
|
||||
- policy-aware runtime permission UI
|
||||
- selective ACP-inspired event normalization
|
||||
|
||||
### Poor fits right now
|
||||
|
||||
- moving Paperclip orchestration into agent-os workflows
|
||||
- replacing company/task/governance models with runtime constructs
|
||||
- making Rust sidecars a mandatory dependency for all local execution
|
||||
|
||||
## Bottom line
|
||||
|
||||
`agent-os` is useful to Paperclip as an execution technology reference, not as a product model.
|
||||
|
||||
Paperclip should treat it the same way it treats sandboxes or agent CLIs:
|
||||
|
||||
- execution substrate underneath the control plane
|
||||
- optional where the tradeoff is worth it
|
||||
- never the source of truth for company/task/governance state
|
||||
|
||||
If we do one thing from this report, it should be a narrowly scoped `agentos_local` experiment plus a design pass on capability-based runtime permissions. Those two ideas have the best upside and the lowest architectural risk.
|
||||
382
doc/plans/2026-04-12-vscode-task-interoperability-plan.md
Normal file
382
doc/plans/2026-04-12-vscode-task-interoperability-plan.md
Normal file
@@ -0,0 +1,382 @@
|
||||
# VS Code Task Interoperability Plan
|
||||
|
||||
Status: planning only, no code changes
|
||||
Date: 2026-04-12
|
||||
Related issue: `PAP-1377`
|
||||
|
||||
## Summary
|
||||
|
||||
Paperclip should not replace its workspace runtime service model with VS Code tasks.
|
||||
It should add a narrow interoperability layer that can discover and adopt supported entries from `.vscode/tasks.json`.
|
||||
|
||||
The core product model should stay:
|
||||
|
||||
- Paperclip owns long-running workspace services and their desired state
|
||||
- Paperclip shows operators exactly which named thing they are starting or stopping
|
||||
- Paperclip distinguishes long-running services from one-shot jobs
|
||||
|
||||
VS Code tasks should be treated as:
|
||||
|
||||
- an import/discovery format for workspace commands
|
||||
- a convenience for repos that already maintain `tasks.json`
|
||||
- a partial compatibility layer, not a full execution model
|
||||
|
||||
## Current State
|
||||
|
||||
The current implementation is already service-oriented:
|
||||
|
||||
- project workspaces and execution workspaces can store `workspaceRuntime` config plus `desiredState` and per-service `serviceStates`
|
||||
- the UI renders one control row per configured service and persists start/stop intent
|
||||
- the backend supervises long-running local processes, reuses eligible services, and restores desired services on startup
|
||||
|
||||
Relevant files:
|
||||
|
||||
- `packages/shared/src/types/workspace-runtime.ts`
|
||||
- `server/src/services/workspace-runtime.ts`
|
||||
- `server/src/services/project-workspace-runtime-config.ts`
|
||||
- `ui/src/components/WorkspaceRuntimeControls.tsx`
|
||||
- `ui/src/pages/ProjectWorkspaceDetail.tsx`
|
||||
- `ui/src/pages/ExecutionWorkspaceDetail.tsx`
|
||||
|
||||
This is directionally correct for Paperclip because it gives the control plane an explicit model for service lifecycle, health, reuse, and restart behavior.
|
||||
|
||||
## Problem To Solve
|
||||
|
||||
The current UX is still too raw:
|
||||
|
||||
- operators have to hand-author runtime JSON
|
||||
- a workspace can have multiple attached services, but the higher-level intent is not obvious
|
||||
- start/stop controls are visible in multiple places, which makes it easy to lose track of what is being controlled
|
||||
- there is no interoperability with repos that already define useful local workflows in `.vscode/tasks.json`
|
||||
|
||||
The issue is not that services are the wrong abstraction.
|
||||
The issue is that the configuration surface is too low-level and Paperclip does not yet leverage existing workspace metadata.
|
||||
|
||||
## Recommendation
|
||||
|
||||
Keep Paperclip runtime services as the source of truth for service supervision.
|
||||
Add a new workspace command model above the raw JSON layer, with VS Code task discovery as one input.
|
||||
|
||||
The product model should become:
|
||||
|
||||
1. `Workspace command`
|
||||
A named runnable thing attached to a workspace.
|
||||
|
||||
2. `Workspace service`
|
||||
A workspace command that is expected to stay alive and be supervised.
|
||||
|
||||
3. `Workspace job`
|
||||
A workspace command that runs once and exits.
|
||||
|
||||
4. `Runtime service instance`
|
||||
The live process record that already exists today in Paperclip.
|
||||
|
||||
In that model, VS Code tasks are a way to populate workspace commands.
|
||||
Only commands that map cleanly to Paperclip service or job semantics should become runnable in Paperclip.
|
||||
|
||||
## Why Not Fully Adopt VS Code Tasks
|
||||
|
||||
VS Code tasks are broader than Paperclip runtime services.
|
||||
They include shell/process tasks, compound tasks, background/watch tasks, presentation settings, extension/task-provider types, variable substitution, and problem-matcher-driven lifecycle.
|
||||
|
||||
That creates a bad fit if Paperclip tries to use `tasks.json` as its only runtime model:
|
||||
|
||||
- many tasks are one-shot jobs, not long-running services
|
||||
- some tasks depend on VS Code task providers or editor-only variable resolution
|
||||
- compound task graphs are useful, but they are not the same thing as a supervised service
|
||||
- problem matcher readiness is useful metadata, but it is not enough to replace Paperclip's persisted service lifecycle model
|
||||
|
||||
The right boundary is interoperability, not replacement.
|
||||
|
||||
## Interoperability Contract
|
||||
|
||||
Paperclip should support a conservative subset of VS Code tasks and clearly mark unsupported entries.
|
||||
|
||||
### Supported in phase 1
|
||||
|
||||
- `shell` and `process` tasks with a concrete command Paperclip can resolve
|
||||
- optional task `options.cwd`
|
||||
- optional task environment values that can be flattened safely
|
||||
- task labels and detail text for naming and display
|
||||
- `dependsOn` for import-time expansion or display-only dependency hints
|
||||
- background/watch-oriented tasks that can reasonably be treated as long-running services
|
||||
|
||||
### Maybe supported in later phases
|
||||
|
||||
- grouping and default task metadata for better UX
|
||||
- selected variable substitution when Paperclip can resolve it safely from workspace context
|
||||
- mapping task metadata into Paperclip readiness/expose hints
|
||||
- limited compound-task launch flows
|
||||
|
||||
### Not supported initially
|
||||
|
||||
- extension-provided task types Paperclip cannot execute directly
|
||||
- arbitrary VS Code variable substitution semantics
|
||||
- problem matcher parsing as the main source of service health
|
||||
- full parity with VS Code task execution behavior
|
||||
|
||||
## Long-Running Service Detection
|
||||
|
||||
Paperclip needs an explicit classification layer instead of assuming every VS Code task is a service.
|
||||
|
||||
Recommended classification:
|
||||
|
||||
- `service`
|
||||
Explicitly marked by Paperclip metadata, or confidently inferred from background/watch task semantics
|
||||
|
||||
- `job`
|
||||
One-shot command expected to exit
|
||||
|
||||
- `unsupported`
|
||||
Present in `tasks.json`, but not safely runnable by Paperclip
|
||||
|
||||
The important product decision is that service classification must be visible and editable by the operator.
|
||||
Inference can help, but it should not be the only source of truth.
|
||||
|
||||
## Proposed Product Shape
|
||||
|
||||
### 1. Replace raw-first editing with command-first editing
|
||||
|
||||
Project and execution workspace pages should stop making raw runtime JSON the primary editing surface.
|
||||
|
||||
Default UI should show:
|
||||
|
||||
- workspace commands
|
||||
- command type: service or job
|
||||
- source: Paperclip or VS Code
|
||||
- exact command and cwd
|
||||
- current state for services
|
||||
- explicit start, stop, restart, and run-now actions
|
||||
|
||||
Raw JSON should remain available behind an advanced section.
|
||||
|
||||
### 2. Add VS Code task discovery on workspaces
|
||||
|
||||
For a workspace with `cwd`, Paperclip should look for `.vscode/tasks.json`.
|
||||
|
||||
The workspace UI should show:
|
||||
|
||||
- whether a `tasks.json` file was found
|
||||
- last parse time
|
||||
- supported commands discovered
|
||||
- unsupported tasks with reasons
|
||||
- whether commands are inherited into execution workspaces
|
||||
|
||||
### 3. Make the controlled thing explicit
|
||||
|
||||
Start and stop UI should always name the exact entry being controlled.
|
||||
|
||||
Examples:
|
||||
|
||||
- `Start web`
|
||||
- `Stop api`
|
||||
- `Run db:migrate`
|
||||
|
||||
Avoid generic workspace-level labels when multiple commands exist.
|
||||
|
||||
### 4. Separate services from jobs in the UI
|
||||
|
||||
Do not mix one-shot jobs and long-running services into one undifferentiated list.
|
||||
|
||||
Recommended sections:
|
||||
|
||||
- `Services`
|
||||
- `Jobs`
|
||||
- `Unsupported imported tasks`
|
||||
|
||||
That resolves the ambiguity called out in the issue.
|
||||
|
||||
## Data Model Direction
|
||||
|
||||
Do not replace `workspaceRuntime` immediately.
|
||||
Instead add a higher-level representation that can compile down to the existing runtime-service machinery.
|
||||
|
||||
Suggested workspace metadata shape:
|
||||
|
||||
```ts
|
||||
type WorkspaceCommandSource =
|
||||
| { type: "paperclip" }
|
||||
| { type: "vscode_task"; taskLabel: string; taskPath: ".vscode/tasks.json" };
|
||||
|
||||
type WorkspaceCommandKind = "service" | "job";
|
||||
|
||||
type WorkspaceCommandDefinition = {
|
||||
id: string;
|
||||
name: string;
|
||||
kind: WorkspaceCommandKind;
|
||||
source: WorkspaceCommandSource;
|
||||
command: string | null;
|
||||
cwd: string | null;
|
||||
env?: Record<string, string> | null;
|
||||
autoStart?: boolean;
|
||||
serviceConfig?: {
|
||||
lifecycle?: "shared" | "ephemeral";
|
||||
reuseScope?: "project_workspace" | "execution_workspace" | "run";
|
||||
readiness?: Record<string, unknown> | null;
|
||||
expose?: Record<string, unknown> | null;
|
||||
} | null;
|
||||
importWarnings?: string[];
|
||||
disabledReason?: string | null;
|
||||
};
|
||||
```
|
||||
|
||||
`workspaceRuntime` can then become a derived or advanced representation for service-type commands until the rest of the system is migrated.
|
||||
|
||||
## VS Code Mapping Rules
|
||||
|
||||
Paperclip should map imported tasks with explicit, documented rules.
|
||||
|
||||
Recommended rules:
|
||||
|
||||
1. A task becomes a `job` by default.
|
||||
2. A task becomes a `service` only when:
|
||||
- Paperclip metadata marks it as a service, or
|
||||
- the task clearly represents a background/watch process and the operator confirms the classification.
|
||||
3. Unsupported tasks stay visible but disabled.
|
||||
4. Task labels become default command names.
|
||||
5. `dependsOn` is preserved as metadata, not silently flattened into hidden behavior.
|
||||
|
||||
Paperclip-specific metadata can live in a namespaced field on the imported task definition, for example:
|
||||
|
||||
```json
|
||||
{
|
||||
"label": "web",
|
||||
"type": "shell",
|
||||
"command": "pnpm dev",
|
||||
"isBackground": true,
|
||||
"paperclip": {
|
||||
"kind": "service",
|
||||
"readiness": {
|
||||
"type": "http",
|
||||
"urlTemplate": "http://127.0.0.1:${port}"
|
||||
},
|
||||
"expose": {
|
||||
"type": "url",
|
||||
"urlTemplate": "http://127.0.0.1:${port}"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
That gives us interoperability without depending on VS Code-only semantics for service readiness and exposure.
|
||||
|
||||
## Execution Policy
|
||||
|
||||
Project workspaces should be the main place where imported commands are discovered and curated.
|
||||
Execution workspaces should inherit that curated command set by default, with optional issue-level overrides.
|
||||
|
||||
Recommended precedence:
|
||||
|
||||
1. execution workspace override
|
||||
2. project workspace command set
|
||||
3. imported VS Code tasks from the linked workspace
|
||||
4. advanced raw runtime fallback
|
||||
|
||||
This matches the existing direction in `doc/plans/2026-03-10-workspace-strategy-and-git-worktrees.md`.
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: Discovery and read-only visibility
|
||||
|
||||
Goal:
|
||||
show imported VS Code tasks in the workspace UI without changing runtime behavior.
|
||||
|
||||
Work:
|
||||
|
||||
- parse `.vscode/tasks.json` for project workspaces with local `cwd`
|
||||
- derive a list of candidate commands plus unsupported items
|
||||
- show source, label, command, cwd, and classification
|
||||
- show parse warnings and unsupported reasons
|
||||
|
||||
Success condition:
|
||||
an operator can see what Paperclip would import and why.
|
||||
|
||||
### Phase 2: Command model and explicit classification
|
||||
|
||||
Goal:
|
||||
introduce a first-class workspace command layer above raw runtime JSON.
|
||||
|
||||
Work:
|
||||
|
||||
- add a persisted command definition model in workspace metadata or a dedicated table
|
||||
- allow operator edits to imported command classification
|
||||
- separate `service` and `job` in UI
|
||||
- keep existing runtime-service storage for live supervised processes
|
||||
|
||||
Success condition:
|
||||
the workspace UI is command-first, and raw runtime JSON is advanced-only.
|
||||
|
||||
### Phase 3: Service execution backed by existing runtime supervisor
|
||||
|
||||
Goal:
|
||||
run supported imported service commands through the current Paperclip supervisor.
|
||||
|
||||
Work:
|
||||
|
||||
- compile service commands into the existing runtime service start/stop path
|
||||
- persist desired state per named command
|
||||
- keep startup restoration behavior for service commands
|
||||
- make the active command name explicit everywhere control actions appear
|
||||
|
||||
Success condition:
|
||||
imported service commands behave like native Paperclip services once adopted.
|
||||
|
||||
### Phase 4: Job execution and optional dependency handling
|
||||
|
||||
Goal:
|
||||
support one-shot imported commands without pretending they are services.
|
||||
|
||||
Work:
|
||||
|
||||
- add `Run` actions for jobs
|
||||
- record output in workspace operations
|
||||
- optionally support simple `dependsOn` execution for jobs with clear logging
|
||||
|
||||
Success condition:
|
||||
one-shot tasks are runnable, but they are not mixed into the service lifecycle model.
|
||||
|
||||
### Phase 5: Adapter and execution workspace integration
|
||||
|
||||
Goal:
|
||||
let agents and issue-scoped workspaces consume the curated command model consistently.
|
||||
|
||||
Work:
|
||||
|
||||
- expose inherited workspace commands to execution workspaces
|
||||
- allow issue-level selection of a default service command when relevant
|
||||
- make service selection explicit in issue and workspace views
|
||||
|
||||
Success condition:
|
||||
agents, operators, and workspaces all refer to the same named commands.
|
||||
|
||||
## Non-Goals
|
||||
|
||||
- full VS Code task-runner parity
|
||||
- support for every VS Code task type
|
||||
- removal of Paperclip's own runtime supervision model
|
||||
- editor-dependent execution semantics inside the control plane
|
||||
|
||||
## Risks
|
||||
|
||||
- overfitting Paperclip to VS Code and making the model worse for non-VS-Code repos
|
||||
- misclassifying watch tasks as durable services
|
||||
- hiding too much detail and making debugging harder
|
||||
- allowing imported task graphs to become implicit magic
|
||||
|
||||
These risks are manageable if the import layer stays explicit, conservative, and operator-editable.
|
||||
|
||||
## Decision
|
||||
|
||||
Paperclip should adopt VS Code tasks as an optional workspace command source, not as the canonical runtime model.
|
||||
|
||||
The main UX change should be:
|
||||
|
||||
- move from raw runtime JSON to named workspace commands
|
||||
- separate services from jobs
|
||||
- make the exact controlled command explicit
|
||||
- let `.vscode/tasks.json` pre-populate those commands when available
|
||||
|
||||
## External References
|
||||
|
||||
- VS Code tasks documentation: https://code.visualstudio.com/docs/debugtest/tasks
|
||||
- Existing Paperclip workspace plan: `doc/plans/2026-03-10-workspace-strategy-and-git-worktrees.md`
|
||||
@@ -249,7 +249,7 @@ Runs local `claude` CLI directly.
|
||||
"cwd": "/absolute/or/relative/path",
|
||||
"promptTemplate": "You are agent {{agent.id}} ...",
|
||||
"model": "optional-model-id",
|
||||
"maxTurnsPerRun": 300,
|
||||
"maxTurnsPerRun": 1000,
|
||||
"dangerouslySkipPermissions": true,
|
||||
"env": {"KEY": "VALUE"},
|
||||
"extraArgs": [],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
paperclip:
|
||||
build:
|
||||
context: .
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "${PAPERCLIP_PORT:-3100}:3100"
|
||||
@@ -15,4 +15,4 @@ services:
|
||||
PAPERCLIP_PUBLIC_URL: "${PAPERCLIP_PUBLIC_URL:-http://localhost:3100}"
|
||||
BETTER_AUTH_SECRET: "${BETTER_AUTH_SECRET:?BETTER_AUTH_SECRET must be set}"
|
||||
volumes:
|
||||
- "${PAPERCLIP_DATA_DIR:-./data/docker-paperclip}:/paperclip"
|
||||
- "${PAPERCLIP_DATA_DIR:-../data/docker-paperclip}:/paperclip"
|
||||
@@ -1,7 +1,7 @@
|
||||
services:
|
||||
review:
|
||||
build:
|
||||
context: .
|
||||
context: ..
|
||||
dockerfile: docker/untrusted-review/Dockerfile
|
||||
init: true
|
||||
tty: true
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user