mirror of
https://github.com/paperclipai/paperclip
synced 2026-05-05 06:32:10 +02:00
Compare commits
102 Commits
pr/pap-817
...
pap-979-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c54b985d9f | ||
|
|
a3537a86e3 | ||
|
|
5d538d4792 | ||
|
|
6a72faf83b | ||
|
|
1fd40920db | ||
|
|
caef115b95 | ||
|
|
17e5322e28 | ||
|
|
582f4ceaf4 | ||
|
|
1583a2d65a | ||
|
|
9a70a4edaa | ||
|
|
0ac01a04e5 | ||
|
|
11ff24cd22 | ||
|
|
a5d47166e2 | ||
|
|
af5b980362 | ||
|
|
2e563ccd50 | ||
|
|
2c406d3b8c | ||
|
|
49c7fb7fbd | ||
|
|
995f5b0b66 | ||
|
|
b34fa3b273 | ||
|
|
9ddf960312 | ||
|
|
a8894799e4 | ||
|
|
76a692c260 | ||
|
|
5913706329 | ||
|
|
b944293eda | ||
|
|
3c1ebed539 | ||
|
|
ab0d04ff7a | ||
|
|
6073ac3145 | ||
|
|
3b329467eb | ||
|
|
aa5b2be907 | ||
|
|
dcb66eeae7 | ||
|
|
874fe5ec7d | ||
|
|
c916626cef | ||
|
|
555f026c24 | ||
|
|
e91da556ee | ||
|
|
ab82e3f022 | ||
|
|
c74cda1851 | ||
|
|
fcf3ba6974 | ||
|
|
ed62d58cb2 | ||
|
|
dd8c1ca3b2 | ||
|
|
5ee4cd98e8 | ||
|
|
a6ca3a9418 | ||
|
|
0fd75aa579 | ||
|
|
eaa765118f | ||
|
|
ed73547fb6 | ||
|
|
692105e202 | ||
|
|
01b550d61a | ||
|
|
c6364149b1 | ||
|
|
844b6dfd70 | ||
|
|
0a32e3838a | ||
|
|
e186449f94 | ||
|
|
4bb42005ea | ||
|
|
66aa65f8f7 | ||
|
|
15f6079c6b | ||
|
|
9e9eec9af6 | ||
|
|
1a4ed8c953 | ||
|
|
bd60ea4909 | ||
|
|
6ebfc0ff3d | ||
|
|
083d7c9ac4 | ||
|
|
80766e589c | ||
|
|
c5c6c62bd7 | ||
|
|
1549799c1e | ||
|
|
af1b08fdf4 | ||
|
|
72bc4ab403 | ||
|
|
4c6b9c190b | ||
|
|
f6ac6e47c4 | ||
|
|
623ab1c3ea | ||
|
|
eeec52ad74 | ||
|
|
db3883d2e7 | ||
|
|
9637351880 | ||
|
|
cbca599625 | ||
|
|
b1d12d2f37 | ||
|
|
0a952dc93d | ||
|
|
ff8b839f42 | ||
|
|
fea892c8b3 | ||
|
|
1696ff0c3f | ||
|
|
4eecd23ea3 | ||
|
|
4da83296a9 | ||
|
|
0ce4134ce1 | ||
|
|
03f44d0089 | ||
|
|
d38d5e1a7b | ||
|
|
add6ca5648 | ||
|
|
04a07080af | ||
|
|
8bebc9599a | ||
|
|
6250d536a0 | ||
|
|
de5985bb75 | ||
|
|
331e1f0d06 | ||
|
|
58c511af9a | ||
|
|
06b85d62b2 | ||
|
|
3447e2087a | ||
|
|
44fbf83106 | ||
|
|
eb73fc747a | ||
|
|
c4838cca6e | ||
|
|
67841a0c6d | ||
|
|
5561a9c17f | ||
|
|
a9dcea023b | ||
|
|
14ffbe30a0 | ||
|
|
98a5e287ef | ||
|
|
2735ef1f4a | ||
|
|
53f0988006 | ||
|
|
730a67bb20 | ||
|
|
dd1d9bed80 | ||
|
|
47449152ac |
40
.github/workflows/pr.yml
vendored
40
.github/workflows/pr.yml
vendored
@@ -40,6 +40,46 @@ jobs:
|
||||
with:
|
||||
node-version: 24
|
||||
|
||||
- name: Validate Dockerfile deps stage
|
||||
run: |
|
||||
missing=0
|
||||
|
||||
# Extract only the deps stage from the Dockerfile
|
||||
deps_stage="$(awk '/^FROM .* AS deps$/{found=1; next} found && /^FROM /{exit} found{print}' Dockerfile)"
|
||||
|
||||
if [ -z "$deps_stage" ]; then
|
||||
echo "::error::Could not extract deps stage from Dockerfile (expected 'FROM ... AS deps')"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Derive workspace search roots from pnpm-workspace.yaml (exclude dev-only packages)
|
||||
search_roots="$(grep '^ *- ' pnpm-workspace.yaml | sed 's/^ *- //' | sed 's/\*$//' | grep -v 'examples' | grep -v 'create-paperclip-plugin' | tr '\n' ' ')"
|
||||
|
||||
if [ -z "$search_roots" ]; then
|
||||
echo "::error::Could not derive workspace roots from pnpm-workspace.yaml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check all workspace package.json files are copied in the deps stage
|
||||
for pkg in $(find $search_roots -maxdepth 2 -name package.json -not -path '*/examples/*' -not -path '*/create-paperclip-plugin/*' -not -path '*/node_modules/*' 2>/dev/null | sort -u); do
|
||||
dir="$(dirname "$pkg")"
|
||||
if ! echo "$deps_stage" | grep -q "^COPY ${dir}/package.json"; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY ${pkg} ${dir}/"
|
||||
missing=1
|
||||
fi
|
||||
done
|
||||
|
||||
# Check patches directory is copied if it exists
|
||||
if [ -d patches ] && ! echo "$deps_stage" | grep -q '^COPY patches/'; then
|
||||
echo "::error::Dockerfile deps stage missing: COPY patches/ patches/"
|
||||
missing=1
|
||||
fi
|
||||
|
||||
if [ "$missing" -eq 1 ]; then
|
||||
echo "Dockerfile deps stage is out of sync. Update it to include the missing files."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate dependency resolution when manifests change
|
||||
run: |
|
||||
changed="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.event.pull_request.head.sha }}")"
|
||||
|
||||
@@ -26,6 +26,9 @@ Before making changes, read in this order:
|
||||
- `ui/`: React + Vite board UI
|
||||
- `packages/db/`: Drizzle schema, migrations, DB clients
|
||||
- `packages/shared/`: shared types, constants, validators, API path constants
|
||||
- `packages/adapters/`: agent adapter implementations (Claude, Codex, Cursor, etc.)
|
||||
- `packages/adapter-utils/`: shared adapter utilities
|
||||
- `packages/plugins/`: plugin system packages
|
||||
- `doc/`: operational and product docs
|
||||
|
||||
## 4. Dev Setup (Auto DB)
|
||||
|
||||
@@ -21,6 +21,7 @@ COPY packages/adapters/openclaw-gateway/package.json packages/adapters/openclaw-
|
||||
COPY packages/adapters/opencode-local/package.json packages/adapters/opencode-local/
|
||||
COPY packages/adapters/pi-local/package.json packages/adapters/pi-local/
|
||||
COPY packages/plugins/sdk/package.json packages/plugins/sdk/
|
||||
COPY patches/ patches/
|
||||
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
|
||||
25
README.md
25
README.md
@@ -234,16 +234,27 @@ See [doc/DEVELOPING.md](doc/DEVELOPING.md) for the full development guide.
|
||||
|
||||
## Roadmap
|
||||
|
||||
- ⚪ Get OpenClaw onboarding easier
|
||||
- ⚪ Get cloud agents working e.g. Cursor / e2b agents
|
||||
- ⚪ ClipMart - buy and sell entire agent companies
|
||||
- ⚪ Easy agent configurations / easier to understand
|
||||
- ⚪ Better support for harness engineering
|
||||
- 🟢 Plugin system (e.g. if you want to add a knowledgebase, custom tracing, queues, etc)
|
||||
- ⚪ Better docs
|
||||
- ✅ Plugin system (e.g. add a knowledge base, custom tracing, queues, etc)
|
||||
- ✅ Get OpenClaw / claw-style agent employees
|
||||
- ✅ companies.sh - import and export entire organizations
|
||||
- ✅ Easy AGENTS.md configurations
|
||||
- ✅ Skills Manager
|
||||
- ✅ Scheduled Routines
|
||||
- ✅ Better Budgeting
|
||||
- ⚪ Artifacts & Deployments
|
||||
- ⚪ CEO Chat
|
||||
- ⚪ MAXIMIZER MODE
|
||||
- ⚪ Multiple Human Users
|
||||
- ⚪ Cloud / Sandbox agents (e.g. Cursor / e2b agents)
|
||||
- ⚪ Cloud deployments
|
||||
- ⚪ Desktop App
|
||||
|
||||
<br/>
|
||||
|
||||
## Community & Plugins
|
||||
|
||||
Find Plugins and more at [awesome-paperclip](https://github.com/gsxdsm/awesome-paperclip)
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions. See the [contributing guide](CONTRIBUTING.md) for details.
|
||||
|
||||
@@ -6,33 +6,15 @@ import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { promisify } from "node:util";
|
||||
import { afterAll, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { createStoredZipArchive } from "./helpers/zip.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
type ServerProcess = ReturnType<typeof spawn>;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
@@ -53,30 +35,13 @@ async function getAvailablePort(): Promise<number> {
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-db-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
const { applyPendingMigrations, ensurePostgresDatabase } = await import("@paperclipai/db");
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
return { connectionString, dataDir, instance };
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres company import/export e2e tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function writeTestConfig(configPath: string, tempRoot: string, port: number, connectionString: string) {
|
||||
@@ -265,26 +230,23 @@ async function waitForServer(
|
||||
);
|
||||
}
|
||||
|
||||
describe("paperclipai company import/export e2e", () => {
|
||||
describeEmbeddedPostgres("paperclipai company import/export e2e", () => {
|
||||
let tempRoot = "";
|
||||
let configPath = "";
|
||||
let exportDir = "";
|
||||
let apiBase = "";
|
||||
let serverProcess: ServerProcess | null = null;
|
||||
let dbDataDir = "";
|
||||
let dbInstance: EmbeddedPostgresInstance | null = null;
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
tempRoot = mkdtempSync(path.join(os.tmpdir(), "paperclip-company-cli-e2e-"));
|
||||
configPath = path.join(tempRoot, "config", "config.json");
|
||||
exportDir = path.join(tempRoot, "exported-company");
|
||||
|
||||
const db = await startTempDatabase();
|
||||
dbDataDir = db.dataDir;
|
||||
dbInstance = db.instance;
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-company-cli-db-");
|
||||
|
||||
const port = await getAvailablePort();
|
||||
writeTestConfig(configPath, tempRoot, port, db.connectionString);
|
||||
writeTestConfig(configPath, tempRoot, port, tempDb.connectionString);
|
||||
apiBase = `http://127.0.0.1:${port}`;
|
||||
|
||||
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../..");
|
||||
@@ -294,7 +256,7 @@ describe("paperclipai company import/export e2e", () => {
|
||||
["paperclipai", "run", "--config", configPath],
|
||||
{
|
||||
cwd: repoRoot,
|
||||
env: createServerEnv(configPath, port, db.connectionString),
|
||||
env: createServerEnv(configPath, port, tempDb.connectionString),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
@@ -311,10 +273,7 @@ describe("paperclipai company import/export e2e", () => {
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServerProcess(serverProcess);
|
||||
await dbInstance?.stop();
|
||||
if (dbDataDir) {
|
||||
rmSync(dbDataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
if (tempRoot) {
|
||||
rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
6
cli/src/__tests__/helpers/embedded-postgres.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "@paperclipai/db";
|
||||
@@ -1,5 +1,5 @@
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
import { ApiConnectionError, ApiRequestError, PaperclipApiClient } from "../client/http.js";
|
||||
|
||||
describe("PaperclipApiClient", () => {
|
||||
afterEach(() => {
|
||||
@@ -59,6 +59,29 @@ describe("PaperclipApiClient", () => {
|
||||
} satisfies Partial<ApiRequestError>);
|
||||
});
|
||||
|
||||
it("throws ApiConnectionError with recovery guidance when fetch fails", async () => {
|
||||
const fetchMock = vi.fn().mockRejectedValue(new TypeError("fetch failed"));
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
|
||||
const client = new PaperclipApiClient({ apiBase: "http://localhost:3100" });
|
||||
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toBeInstanceOf(ApiConnectionError);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toMatchObject({
|
||||
url: "http://localhost:3100/api/companies/import/preview",
|
||||
method: "POST",
|
||||
causeMessage: "fetch failed",
|
||||
} satisfies Partial<ApiConnectionError>);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/Could not reach the Paperclip API\./,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/curl http:\/\/localhost:3100\/api\/health/,
|
||||
);
|
||||
await expect(client.post("/api/companies/import/preview", {})).rejects.toThrow(
|
||||
/pnpm dev|pnpm paperclipai run/,
|
||||
);
|
||||
});
|
||||
|
||||
it("retries once after interactive auth recovery", async () => {
|
||||
const fetchMock = vi
|
||||
.fn()
|
||||
|
||||
@@ -344,6 +344,87 @@ describe("worktree helpers", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("avoids ports already claimed by sibling worktree instance configs", async () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-claimed-ports-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
const homeDir = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const siblingInstanceRoot = path.join(homeDir, "instances", "existing-worktree");
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
fs.mkdirSync(repoRoot, { recursive: true });
|
||||
fs.mkdirSync(siblingInstanceRoot, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(siblingInstanceRoot, "config.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildSourceConfig(),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(siblingInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(siblingInstanceRoot, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(siblingInstanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "authenticated",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: ["localhost"],
|
||||
serveUi: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(siblingInstanceRoot, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(siblingInstanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
);
|
||||
|
||||
process.chdir(repoRoot);
|
||||
await worktreeInitCommand({
|
||||
seed: false,
|
||||
fromConfig: path.join(tempRoot, "missing", "config.json"),
|
||||
home: homeDir,
|
||||
});
|
||||
|
||||
const config = JSON.parse(fs.readFileSync(path.join(repoRoot, ".paperclip", "config.json"), "utf8"));
|
||||
expect(config.server.port).toBe(3102);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(54330);
|
||||
expect(config.database.embeddedPostgresPort).not.toBe(config.server.port);
|
||||
expect(config.database.embeddedPostgresPort).toBeGreaterThan(54330);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
fs.rmSync(tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("defaults the seed source config to the current repo-local Paperclip config", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-worktree-source-config-"));
|
||||
const repoRoot = path.join(tempRoot, "repo");
|
||||
|
||||
@@ -13,6 +13,26 @@ export class ApiRequestError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
export class ApiConnectionError extends Error {
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
|
||||
constructor(input: {
|
||||
apiBase: string;
|
||||
path: string;
|
||||
method: string;
|
||||
cause?: unknown;
|
||||
}) {
|
||||
const url = buildUrl(input.apiBase, input.path);
|
||||
const causeMessage = formatConnectionCause(input.cause);
|
||||
super(buildConnectionErrorMessage({ apiBase: input.apiBase, url, method: input.method, causeMessage }));
|
||||
this.url = url;
|
||||
this.method = input.method;
|
||||
this.causeMessage = causeMessage;
|
||||
}
|
||||
}
|
||||
|
||||
interface RequestOptions {
|
||||
ignoreNotFound?: boolean;
|
||||
}
|
||||
@@ -76,6 +96,7 @@ export class PaperclipApiClient {
|
||||
hasRetriedAuth = false,
|
||||
): Promise<T | null> {
|
||||
const url = buildUrl(this.apiBase, path);
|
||||
const method = String(init.method ?? "GET").toUpperCase();
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
accept: "application/json",
|
||||
@@ -94,10 +115,20 @@ export class PaperclipApiClient {
|
||||
headers["x-paperclip-run-id"] = this.runId;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
let response: Response;
|
||||
try {
|
||||
response = await fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new ApiConnectionError({
|
||||
apiBase: this.apiBase,
|
||||
path,
|
||||
method,
|
||||
cause: error,
|
||||
});
|
||||
}
|
||||
|
||||
if (opts?.ignoreNotFound && response.status === 404) {
|
||||
return null;
|
||||
@@ -108,7 +139,7 @@ export class PaperclipApiClient {
|
||||
if (!hasRetriedAuth && this.recoverAuth) {
|
||||
const recoveredToken = await this.recoverAuth({
|
||||
path,
|
||||
method: String(init.method ?? "GET").toUpperCase(),
|
||||
method,
|
||||
error: apiError,
|
||||
});
|
||||
if (recoveredToken) {
|
||||
@@ -166,6 +197,50 @@ async function toApiError(response: Response): Promise<ApiRequestError> {
|
||||
return new ApiRequestError(response.status, `Request failed with status ${response.status}`, undefined, parsed);
|
||||
}
|
||||
|
||||
function buildConnectionErrorMessage(input: {
|
||||
apiBase: string;
|
||||
url: string;
|
||||
method: string;
|
||||
causeMessage?: string;
|
||||
}): string {
|
||||
const healthUrl = buildHealthCheckUrl(input.url);
|
||||
const lines = [
|
||||
"Could not reach the Paperclip API.",
|
||||
"",
|
||||
`Request: ${input.method} ${input.url}`,
|
||||
];
|
||||
if (input.causeMessage) {
|
||||
lines.push(`Cause: ${input.causeMessage}`);
|
||||
}
|
||||
lines.push(
|
||||
"",
|
||||
"This usually means the Paperclip server is not running, the configured URL is wrong, or the request is being blocked before it reaches Paperclip.",
|
||||
"",
|
||||
"Try:",
|
||||
"- Start Paperclip with `pnpm dev` or `pnpm paperclipai run`.",
|
||||
`- Verify the server is reachable with \`curl ${healthUrl}\`.`,
|
||||
`- If Paperclip is running elsewhere, pass \`--api-base ${input.apiBase.replace(/\/+$/, "")}\` or set \`PAPERCLIP_API_URL\`.`,
|
||||
);
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
function buildHealthCheckUrl(requestUrl: string): string {
|
||||
const url = new URL(requestUrl);
|
||||
url.pathname = `${url.pathname.replace(/\/+$/, "").replace(/\/api(?:\/.*)?$/, "")}/api/health`;
|
||||
url.search = "";
|
||||
url.hash = "";
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
function formatConnectionCause(error: unknown): string | undefined {
|
||||
if (!error) return undefined;
|
||||
if (error instanceof Error) {
|
||||
return error.message.trim() || error.name;
|
||||
}
|
||||
const message = String(error).trim();
|
||||
return message || undefined;
|
||||
}
|
||||
|
||||
function toStringRecord(headers: HeadersInit | undefined): Record<string, string> {
|
||||
if (!headers) return {};
|
||||
if (Array.isArray(headers)) {
|
||||
|
||||
@@ -41,6 +41,8 @@ import {
|
||||
projects,
|
||||
runDatabaseBackup,
|
||||
runDatabaseRestore,
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
formatEmbeddedPostgresError,
|
||||
} from "@paperclipai/db";
|
||||
import type { Command } from "commander";
|
||||
import { ensureAgentJwtSecret, loadPaperclipEnvFile, mergePaperclipEnvEntries, readPaperclipEnvEntries, resolvePaperclipEnvFile } from "../config/env.js";
|
||||
@@ -465,6 +467,62 @@ async function findAvailablePort(preferredPort: number, reserved = new Set<numbe
|
||||
return port;
|
||||
}
|
||||
|
||||
function resolveRepoManagedWorktreesRoot(cwd: string): string | null {
|
||||
const normalized = path.resolve(cwd);
|
||||
const marker = `${path.sep}.paperclip${path.sep}worktrees${path.sep}`;
|
||||
const index = normalized.indexOf(marker);
|
||||
if (index === -1) return null;
|
||||
const repoRoot = normalized.slice(0, index);
|
||||
return path.resolve(repoRoot, ".paperclip", "worktrees");
|
||||
}
|
||||
|
||||
function collectClaimedWorktreePorts(homeDir: string, currentInstanceId: string, cwd: string): {
|
||||
serverPorts: Set<number>;
|
||||
databasePorts: Set<number>;
|
||||
} {
|
||||
const serverPorts = new Set<number>();
|
||||
const databasePorts = new Set<number>();
|
||||
const configPaths = new Set<string>();
|
||||
const instancesDir = path.resolve(homeDir, "instances");
|
||||
if (existsSync(instancesDir)) {
|
||||
for (const entry of readdirSync(instancesDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory() || entry.name === currentInstanceId) continue;
|
||||
|
||||
const configPath = path.resolve(instancesDir, entry.name, "config.json");
|
||||
if (existsSync(configPath)) {
|
||||
configPaths.add(configPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const repoManagedWorktreesRoot = resolveRepoManagedWorktreesRoot(cwd);
|
||||
if (repoManagedWorktreesRoot && existsSync(repoManagedWorktreesRoot)) {
|
||||
for (const entry of readdirSync(repoManagedWorktreesRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const configPath = path.resolve(repoManagedWorktreesRoot, entry.name, ".paperclip", "config.json");
|
||||
if (existsSync(configPath)) {
|
||||
configPaths.add(configPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const configPath of configPaths) {
|
||||
try {
|
||||
const config = readConfig(configPath);
|
||||
if (config?.server.port) {
|
||||
serverPorts.add(config.server.port);
|
||||
}
|
||||
if (config?.database.mode === "embedded-postgres") {
|
||||
databasePorts.add(config.database.embeddedPostgresPort);
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed sibling configs.
|
||||
}
|
||||
}
|
||||
|
||||
return { serverPorts, databasePorts };
|
||||
}
|
||||
|
||||
function detectGitBranchName(cwd: string): string | null {
|
||||
try {
|
||||
const value = execFileSync("git", ["branch", "--show-current"], {
|
||||
@@ -750,6 +808,7 @@ async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): P
|
||||
}
|
||||
|
||||
const port = await findAvailablePort(preferredPort);
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
@@ -757,17 +816,31 @@ async function ensureEmbeddedPostgres(dataDir: string, preferredPort: number): P
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
await instance.initialise();
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
rmSync(postmasterPidFile, { force: true });
|
||||
}
|
||||
await instance.start();
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
port,
|
||||
@@ -886,10 +959,14 @@ async function runWorktreeInit(opts: WorktreeInitOptions): Promise<void> {
|
||||
rmSync(paths.instanceRoot, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
const claimedPorts = collectClaimedWorktreePorts(paths.homeDir, paths.instanceId, paths.cwd);
|
||||
const preferredServerPort = opts.serverPort ?? ((sourceConfig?.server.port ?? 3100) + 1);
|
||||
const serverPort = await findAvailablePort(preferredServerPort);
|
||||
const serverPort = await findAvailablePort(preferredServerPort, claimedPorts.serverPorts);
|
||||
const preferredDbPort = opts.dbPort ?? ((sourceConfig?.database.embeddedPostgresPort ?? 54329) + 1);
|
||||
const databasePort = await findAvailablePort(preferredDbPort, new Set([serverPort]));
|
||||
const databasePort = await findAvailablePort(
|
||||
preferredDbPort,
|
||||
new Set([...claimedPorts.databasePorts, serverPort]),
|
||||
);
|
||||
const targetConfig = buildWorktreeConfig({
|
||||
sourceConfig,
|
||||
paths,
|
||||
|
||||
@@ -206,6 +206,17 @@ paperclipai worktree init --from-data-dir ~/.paperclip
|
||||
paperclipai worktree init --force
|
||||
```
|
||||
|
||||
Repair an already-created repo-managed worktree and reseed its isolated instance from the main default install:
|
||||
|
||||
```sh
|
||||
cd ~/.paperclip/worktrees/PAP-884-ai-commits-component
|
||||
pnpm paperclipai worktree init --force --seed-mode minimal \
|
||||
--name PAP-884-ai-commits-component \
|
||||
--from-config ~/.paperclip/instances/default/config.json
|
||||
```
|
||||
|
||||
That rewrites the worktree-local `.paperclip/config.json` + `.paperclip/.env`, recreates the isolated instance under `~/.paperclip-worktrees/instances/<worktree-id>/`, and preserves the git worktree contents themselves.
|
||||
|
||||
**`pnpm paperclipai worktree:make <name> [options]`** — Create `~/NAME` as a git worktree, then initialize an isolated Paperclip instance inside it. This combines `git worktree add` with `worktree init` in a single step.
|
||||
|
||||
| Option | Description |
|
||||
|
||||
@@ -51,10 +51,9 @@ Public packages are discovered from:
|
||||
|
||||
- `packages/`
|
||||
- `server/`
|
||||
- `ui/`
|
||||
- `cli/`
|
||||
|
||||
`ui/` is ignored because it is private.
|
||||
|
||||
The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs), which:
|
||||
|
||||
- finds all public packages
|
||||
@@ -65,6 +64,18 @@ The version rewrite step now uses [`scripts/release-package-map.mjs`](../scripts
|
||||
|
||||
Those rewrites are temporary. The working tree is restored after publish or dry-run.
|
||||
|
||||
## `@paperclipai/ui` packaging
|
||||
|
||||
The UI package publishes prebuilt static assets, not the source workspace.
|
||||
|
||||
The `ui` package uses [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs) during `prepack` to swap in a lean publish manifest that:
|
||||
|
||||
- keeps the release-managed `name` and `version`
|
||||
- publishes only `dist/`
|
||||
- omits the source-only dependency graph from downstream installs
|
||||
|
||||
After packing or publishing, `postpack` restores the development manifest automatically.
|
||||
|
||||
## Version formats
|
||||
|
||||
Paperclip uses calendar versions:
|
||||
@@ -135,6 +146,7 @@ This is the fastest way to restore the default install path if a stable release
|
||||
|
||||
- [`scripts/build-npm.sh`](../scripts/build-npm.sh)
|
||||
- [`scripts/generate-npm-package-json.mjs`](../scripts/generate-npm-package-json.mjs)
|
||||
- [`scripts/generate-ui-package-json.mjs`](../scripts/generate-ui-package-json.mjs)
|
||||
- [`scripts/release-package-map.mjs`](../scripts/release-package-map.mjs)
|
||||
- [`cli/esbuild.config.mjs`](../cli/esbuild.config.mjs)
|
||||
- [`doc/RELEASING.md`](RELEASING.md)
|
||||
|
||||
@@ -35,6 +35,7 @@ At minimum that includes:
|
||||
|
||||
- `paperclipai`
|
||||
- `@paperclipai/server`
|
||||
- `@paperclipai/ui`
|
||||
- public packages under `packages/`
|
||||
|
||||
### 2.1. In npm, open each package settings page
|
||||
|
||||
33
doc/SPEC.md
33
doc/SPEC.md
@@ -186,17 +186,21 @@ The heartbeat is a protocol, not a runtime. Paperclip defines how to initiate an
|
||||
|
||||
### Execution Adapters
|
||||
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Initial adapters:
|
||||
Agent configuration includes an **adapter** that defines how Paperclip invokes the agent. Built-in adapters include:
|
||||
|
||||
| Adapter | Mechanism | Example |
|
||||
| -------------------- | ----------------------- | --------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `gemini_local` | Gemini CLI process | Local Gemini CLI with sandbox and approval |
|
||||
| `hermes_local` | Hermes agent process | Local Hermes agent |
|
||||
| Adapter | Mechanism | Example |
|
||||
| ---------------- | -------------------------- | -------------------------------------------------- |
|
||||
| `process` | Execute a child process | `python run_agent.py --agent-id {id}` |
|
||||
| `http` | Send an HTTP request | `POST https://openclaw.example.com/hook/{id}` |
|
||||
| `claude_local` | Local Claude Code process | Claude Code heartbeat worker |
|
||||
| `codex_local` | Local Codex process | Codex CLI heartbeat worker |
|
||||
| `opencode_local` | Local OpenCode process | OpenCode heartbeat worker |
|
||||
| `pi_local` | Local Pi process | Pi CLI heartbeat worker |
|
||||
| `cursor` | Cursor API/CLI bridge | Cursor-integrated heartbeat worker |
|
||||
| `openclaw_gateway` | OpenClaw gateway API | Managed OpenClaw agent via gateway |
|
||||
| `hermes_local` | Local Hermes process | Hermes agent heartbeat worker |
|
||||
|
||||
The `process` and `http` adapters ship as defaults. Additional adapters have been added for specific agent runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
The `process` and `http` adapters ship as generic defaults. Additional built-in adapters cover common local coding runtimes (see list above), and new adapter types can be registered via the plugin system (see Plugin / Extension Architecture).
|
||||
|
||||
### Adapter Interface
|
||||
|
||||
@@ -376,7 +380,7 @@ Flow:
|
||||
| Layer | Technology |
|
||||
| -------- | ------------------------------------------------------------ |
|
||||
| Frontend | React + Vite |
|
||||
| Backend | TypeScript + Hono (REST API, not tRPC — need non-TS clients) |
|
||||
| Backend | TypeScript + Express (REST API, not tRPC — need non-TS clients) |
|
||||
| Database | PostgreSQL (see [doc/DATABASE.md](./doc/DATABASE.md) for details — PGlite embedded for dev, Docker or hosted Supabase for production) |
|
||||
| Auth | [Better Auth](https://www.better-auth.com/) |
|
||||
|
||||
@@ -406,7 +410,7 @@ No separate "agent API" vs. "board API." Same endpoints, different authorization
|
||||
|
||||
### Work Artifacts
|
||||
|
||||
Paperclip does **not** manage work artifacts (code repos, file systems, deployments, documents). That's entirely the agent's domain. Paperclip tracks tasks and costs. Where and how work gets done is outside scope.
|
||||
Paperclip manages task-linked work artifacts: issue documents (rich-text plans, specs, notes attached to issues) and file attachments. Agents read and write these through the API as part of normal task execution. Full delivery infrastructure (code repos, deployments, production runtime) remains the agent's domain — Paperclip orchestrates the work, not the build pipeline.
|
||||
|
||||
### Open Questions
|
||||
|
||||
@@ -476,15 +480,14 @@ Each is a distinct page/route:
|
||||
- [ ] **Default agent** — basic Claude Code/Codex loop with Paperclip skill
|
||||
- [ ] **Default CEO** — strategic planning, delegation, board communication
|
||||
- [ ] **Paperclip skill (SKILL.md)** — teaches agents to interact with the API
|
||||
- [ ] **REST API** — full API for agent interaction (Hono)
|
||||
- [ ] **REST API** — full API for agent interaction (Express)
|
||||
- [ ] **Web UI** — React/Vite: org chart, task board, dashboard, cost views
|
||||
- [ ] **Agent auth** — connection string generation with URL + key + instructions
|
||||
- [ ] **One-command dev setup** — embedded PGlite, everything local
|
||||
- [ ] **Multiple Adapter types** (HTTP Adapter, OpenClaw Adapter)
|
||||
- [ ] **Multiple Adapter types** (HTTP, OpenClaw gateway, and local coding adapters)
|
||||
|
||||
### Not V1
|
||||
|
||||
- Template export/import
|
||||
- Knowledge base - a future plugin
|
||||
- Advanced governance models (hiring budgets, multi-member boards)
|
||||
- Revenue/expense tracking beyond token costs - a future plugin
|
||||
@@ -509,7 +512,7 @@ Things Paperclip explicitly does **not** do:
|
||||
- **Not a SaaS** — single-tenant, self-hosted
|
||||
- **Not opinionated about Agent implementation** — any language, any framework, any runtime
|
||||
- **Not automatically self-healing** — surfaces problems, doesn't silently fix them
|
||||
- **Does not manage work artifacts** — no repo management, no deployment, no file systems
|
||||
- **Does not manage delivery infrastructure** — no repo management, no deployment, no file systems (but does manage task-linked documents and attachments)
|
||||
- **Does not auto-reassign work** — stale tasks are surfaced, not silently redistributed
|
||||
- **Does not track external revenue/expenses** — that's a future plugin. Token/LLM cost budgeting is core.
|
||||
|
||||
|
||||
@@ -20,9 +20,12 @@ When a heartbeat fires, Paperclip:
|
||||
|---------|----------|-------------|
|
||||
| [Claude Local](/adapters/claude-local) | `claude_local` | Runs Claude Code CLI locally |
|
||||
| [Codex Local](/adapters/codex-local) | `codex_local` | Runs OpenAI Codex CLI locally |
|
||||
| [Gemini Local](/adapters/gemini-local) | `gemini_local` | Runs Gemini CLI locally |
|
||||
| [Gemini Local](/adapters/gemini-local) | `gemini_local` | Runs Gemini CLI locally (experimental — adapter package exists, not yet in stable type enum) |
|
||||
| OpenCode Local | `opencode_local` | Runs OpenCode CLI locally (multi-provider `provider/model`) |
|
||||
| OpenClaw | `openclaw` | Sends wake payloads to an OpenClaw webhook |
|
||||
| Hermes Local | `hermes_local` | Runs Hermes CLI locally |
|
||||
| Cursor | `cursor` | Runs Cursor in background mode |
|
||||
| Pi Local | `pi_local` | Runs an embedded Pi agent locally |
|
||||
| OpenClaw Gateway | `openclaw_gateway` | Connects to an OpenClaw gateway endpoint |
|
||||
| [Process](/adapters/process) | `process` | Executes arbitrary shell commands |
|
||||
| [HTTP](/adapters/http) | `http` | Sends webhooks to external agents |
|
||||
|
||||
@@ -55,7 +58,7 @@ Three registries consume these modules:
|
||||
|
||||
## Choosing an Adapter
|
||||
|
||||
- **Need a coding agent?** Use `claude_local`, `codex_local`, `gemini_local`, or `opencode_local`
|
||||
- **Need a coding agent?** Use `claude_local`, `codex_local`, `opencode_local`, or `hermes_local`
|
||||
- **Need to run a script or command?** Use `process`
|
||||
- **Need to call an external service?** Use `http`
|
||||
- **Need something custom?** [Create your own adapter](/adapters/creating-an-adapter)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Agent Runtime Guide
|
||||
|
||||
Status: User-facing guide
|
||||
Last updated: 2026-02-17
|
||||
Status: User-facing guide
|
||||
Last updated: 2026-03-26
|
||||
Audience: Operators setting up and running agents in Paperclip
|
||||
|
||||
## 1. What this system does
|
||||
@@ -32,14 +32,19 @@ If an agent is already running, new wakeups are merged (coalesced) instead of la
|
||||
|
||||
## 3.1 Adapter choice
|
||||
|
||||
Common choices:
|
||||
Built-in adapters:
|
||||
|
||||
- `claude_local`: runs your local `claude` CLI
|
||||
- `codex_local`: runs your local `codex` CLI
|
||||
- `opencode_local`: runs your local `opencode` CLI
|
||||
- `hermes_local`: runs your local `hermes` CLI
|
||||
- `cursor`: runs Cursor in background mode
|
||||
- `pi_local`: runs an embedded Pi agent locally
|
||||
- `openclaw_gateway`: connects to an OpenClaw gateway endpoint
|
||||
- `process`: generic shell command adapter
|
||||
- `http`: calls an external HTTP endpoint
|
||||
|
||||
For `claude_local` and `codex_local`, Paperclip assumes the CLI is already installed and authenticated on the host machine.
|
||||
For local CLI adapters (`claude_local`, `codex_local`, `opencode_local`, `hermes_local`), Paperclip assumes the CLI is already installed and authenticated on the host machine.
|
||||
|
||||
## 3.2 Runtime behavior
|
||||
|
||||
@@ -69,6 +74,8 @@ You can set:
|
||||
|
||||
Templates support variables like `{{agent.id}}`, `{{agent.name}}`, and run context values.
|
||||
|
||||
> **Note:** `bootstrapPromptTemplate` is deprecated and should not be used for new agents. Existing configs that use it will continue to work but should be migrated to the managed instructions bundle system.
|
||||
|
||||
## 4. Session resume behavior
|
||||
|
||||
Paperclip stores session IDs for resumable adapters.
|
||||
@@ -133,7 +140,7 @@ If the connection drops, the UI reconnects automatically.
|
||||
|
||||
If runs fail repeatedly:
|
||||
|
||||
1. Check adapter command availability (`claude`/`codex` installed and logged in).
|
||||
1. Check adapter command availability (e.g. `claude`/`codex`/`opencode`/`hermes` installed and logged in).
|
||||
2. Verify `cwd` exists and is accessible.
|
||||
3. Inspect run error + stderr excerpt, then full log.
|
||||
4. Confirm timeout is not too low.
|
||||
@@ -166,9 +173,9 @@ Start with least privilege where possible, and avoid exposing secrets in broad r
|
||||
|
||||
## 10. Minimal setup checklist
|
||||
|
||||
1. Choose adapter (`claude_local` or `codex_local`).
|
||||
2. Set `cwd` to the target workspace.
|
||||
3. Add bootstrap + normal prompt templates.
|
||||
1. Choose adapter (e.g. `claude_local`, `codex_local`, `opencode_local`, `hermes_local`, `cursor`, or `openclaw_gateway`).
|
||||
2. Set `cwd` to the target workspace (for local adapters).
|
||||
3. Optionally add a prompt template (`promptTemplate`) or use the managed instructions bundle.
|
||||
4. Configure heartbeat policy (timer and/or assignment wakeups).
|
||||
5. Trigger a manual wakeup.
|
||||
6. Confirm run succeeds and session/token usage is recorded.
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"guides/board-operator/managing-agents",
|
||||
"guides/board-operator/org-structure",
|
||||
"guides/board-operator/managing-tasks",
|
||||
"guides/board-operator/delegation",
|
||||
"guides/board-operator/approvals",
|
||||
"guides/board-operator/costs-and-budgets",
|
||||
"guides/board-operator/activity-log",
|
||||
|
||||
122
docs/guides/board-operator/delegation.md
Normal file
122
docs/guides/board-operator/delegation.md
Normal file
@@ -0,0 +1,122 @@
|
||||
---
|
||||
title: How Delegation Works
|
||||
summary: How the CEO breaks down goals into tasks and assigns them to agents
|
||||
---
|
||||
|
||||
Delegation is one of Paperclip's most powerful features. You set company goals, and the CEO agent automatically breaks them into tasks and assigns them to the right agents. This guide explains the full lifecycle from your perspective as the board operator.
|
||||
|
||||
## The Delegation Lifecycle
|
||||
|
||||
When you create a company goal, the CEO doesn't just acknowledge it — it builds a plan and mobilizes the team:
|
||||
|
||||
```
|
||||
You set a company goal
|
||||
→ CEO wakes up on heartbeat
|
||||
→ CEO proposes a strategy (creates an approval for you)
|
||||
→ You approve the strategy
|
||||
→ CEO breaks goals into tasks and assigns them to reports
|
||||
→ Reports wake up (heartbeat triggered by assignment)
|
||||
→ Reports execute work and update task status
|
||||
→ CEO monitors progress, unblocks, and escalates
|
||||
→ You see results in the dashboard and activity log
|
||||
```
|
||||
|
||||
Each step is traceable. Every task links back to the goal through a parent hierarchy, so you can always see why work is happening.
|
||||
|
||||
## What You Need to Do
|
||||
|
||||
Your role is strategic oversight, not task management. Here's what the delegation model expects from you:
|
||||
|
||||
1. **Set clear company goals.** The CEO works from these. Specific, measurable goals produce better delegation. "Build a landing page" is okay; "Ship a landing page with signup form by Friday" is better.
|
||||
|
||||
2. **Approve the CEO's strategy.** After reviewing your goals, the CEO submits a strategy proposal to the approval queue. Review it, then approve, reject, or request revisions.
|
||||
|
||||
3. **Approve hire requests.** When the CEO needs more capacity (e.g., a frontend engineer to build the landing page), it submits a hire request. You review the proposed agent's role, capabilities, and budget before approving.
|
||||
|
||||
4. **Monitor progress.** Use the dashboard and activity log to track how work is flowing. Check task status, agent activity, and completion rates.
|
||||
|
||||
5. **Intervene only when things stall.** If progress stops, check these in order:
|
||||
- Is an approval pending in your queue?
|
||||
- Is an agent paused or in an error state?
|
||||
- Is the CEO's budget exhausted (above 80%, it focuses on critical tasks only)?
|
||||
|
||||
## What the CEO Does Automatically
|
||||
|
||||
You do **not** need to tell the CEO to engage specific agents. After you approve its strategy, the CEO:
|
||||
|
||||
- **Breaks goals into concrete tasks** with clear descriptions, priorities, and acceptance criteria
|
||||
- **Assigns tasks to the right agent** based on role and capabilities (e.g., engineering tasks go to the CTO or engineers, marketing tasks go to the CMO)
|
||||
- **Creates subtasks** when work needs to be decomposed further
|
||||
- **Hires new agents** when the team lacks capacity for a goal (subject to your approval)
|
||||
- **Monitors progress** on each heartbeat, checking task status and unblocking reports
|
||||
- **Escalates to you** when it encounters something it can't resolve — budget issues, blocked approvals, or strategic ambiguity
|
||||
|
||||
## Common Delegation Patterns
|
||||
|
||||
### Flat Hierarchy (Small Teams)
|
||||
|
||||
For small companies with 3-5 agents, the CEO delegates directly to each report:
|
||||
|
||||
```
|
||||
CEO
|
||||
├── CTO (engineering tasks)
|
||||
├── CMO (marketing tasks)
|
||||
└── Designer (design tasks)
|
||||
```
|
||||
|
||||
The CEO assigns tasks directly. Each agent works independently and reports status back.
|
||||
|
||||
### Three-Level Hierarchy (Larger Teams)
|
||||
|
||||
For larger organizations, managers delegate further down the chain:
|
||||
|
||||
```
|
||||
CEO
|
||||
├── CTO
|
||||
│ ├── Backend Engineer
|
||||
│ └── Frontend Engineer
|
||||
└── CMO
|
||||
└── Content Writer
|
||||
```
|
||||
|
||||
The CEO assigns high-level tasks to the CTO and CMO. They break those into subtasks and assign them to their own reports. You only interact with the CEO — the rest happens automatically.
|
||||
|
||||
### Hire-on-Demand
|
||||
|
||||
The CEO can start as the only agent and hire as work requires:
|
||||
|
||||
1. You set a goal that needs engineering work
|
||||
2. The CEO proposes a strategy that includes hiring a CTO
|
||||
3. You approve the hire
|
||||
4. The CEO assigns engineering tasks to the new CTO
|
||||
5. As scope grows, the CTO may request to hire engineers
|
||||
|
||||
This pattern lets you start small and scale the team based on actual work, not upfront planning.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Why isn't the CEO delegating?"
|
||||
|
||||
If you've set a goal but nothing is happening, check these common causes:
|
||||
|
||||
| Check | What to look for |
|
||||
|-------|-----------------|
|
||||
| **Approval queue** | The CEO may have submitted a strategy or hire request that's waiting for your approval. This is the most common reason. |
|
||||
| **Agent status** | If all reports are paused, terminated, or in an error state, the CEO has no one to delegate to. Check the Agents page. |
|
||||
| **Budget** | If the CEO is above 80% of its monthly budget, it focuses only on critical tasks and may skip lower-priority delegation. |
|
||||
| **Goals** | If no company goals are set, the CEO has nothing to work from. Create a goal first. |
|
||||
| **Heartbeat** | Is the CEO's heartbeat enabled and running? Check the agent detail page for recent heartbeat history. |
|
||||
| **Agent instructions** | The CEO's delegation behavior is driven by its `AGENTS.md` instructions file. Open the CEO agent's detail page and verify that its instructions path is set and that the file includes delegation directives (subtask creation, hiring, assignment). If AGENTS.md is missing or doesn't mention delegation, the CEO won't know to break down goals and assign work. |
|
||||
|
||||
### "Do I have to tell the CEO to engage engineering and marketing?"
|
||||
|
||||
**No.** The CEO will delegate automatically after you approve its strategy. It knows the org chart and assigns tasks based on each agent's role and capabilities. You set the goal and approve the plan — the CEO handles task breakdown and assignment.
|
||||
|
||||
### "A task seems stuck"
|
||||
|
||||
If a specific task isn't progressing:
|
||||
|
||||
1. Check the task's comment thread — the assigned agent may have posted a blocker
|
||||
2. Check if the task is in `blocked` status — read the blocker comment to understand why
|
||||
3. Check the assigned agent's status — it may be paused or over budget
|
||||
4. If the agent is stuck, you can reassign the task or add a comment with guidance
|
||||
@@ -29,7 +29,7 @@ Create agents from the Agents page. Each agent requires:
|
||||
|
||||
Common adapter choices:
|
||||
- `claude_local` / `codex_local` / `opencode_local` for local coding agents
|
||||
- `openclaw` / `http` for webhook-based external agents
|
||||
- `openclaw_gateway` / `http` for webhook-based external agents
|
||||
- `process` for generic local command execution
|
||||
|
||||
For `opencode_local`, configure an explicit `adapterConfig.model` (`provider/model`).
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: Core Concepts
|
||||
summary: Companies, agents, issues, heartbeats, and governance
|
||||
summary: Companies, agents, issues, delegation, heartbeats, and governance
|
||||
---
|
||||
|
||||
Paperclip organizes autonomous AI work around five key concepts.
|
||||
Paperclip organizes autonomous AI work around six key concepts.
|
||||
|
||||
## Company
|
||||
|
||||
@@ -50,6 +50,17 @@ Terminal states: `done`, `cancelled`.
|
||||
|
||||
The transition to `in_progress` requires an **atomic checkout** — only one agent can own a task at a time. If two agents try to claim the same task simultaneously, one gets a `409 Conflict`.
|
||||
|
||||
## Delegation
|
||||
|
||||
The CEO is the primary delegator. When you set company goals, the CEO:
|
||||
|
||||
1. Creates a strategy and submits it for your approval
|
||||
2. Breaks approved goals into tasks
|
||||
3. Assigns tasks to agents based on their role and capabilities
|
||||
4. Hires new agents when needed (subject to your approval)
|
||||
|
||||
You don't need to manually assign every task — set the goals and let the CEO organize the work. You approve key decisions (strategy, hiring) and monitor progress. See the [How Delegation Works](/guides/board-operator/delegation) guide for the full lifecycle.
|
||||
|
||||
## Heartbeats
|
||||
|
||||
Agents don't run continuously. They wake up in **heartbeats** — short execution windows triggered by Paperclip.
|
||||
|
||||
@@ -32,7 +32,8 @@
|
||||
"test:e2e:headed": "npx playwright test --config tests/e2e/playwright.config.ts --headed",
|
||||
"evals:smoke": "cd evals/promptfoo && npx promptfoo@0.103.3 eval",
|
||||
"test:release-smoke": "npx playwright test --config tests/release-smoke/playwright.config.ts",
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed"
|
||||
"test:release-smoke:headed": "npx playwright test --config tests/release-smoke/playwright.config.ts --headed",
|
||||
"metrics:paperclip-commits": "tsx scripts/paperclip-commit-metrics.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.58.2",
|
||||
|
||||
@@ -287,6 +287,12 @@ export interface ServerAdapterModule {
|
||||
* without knowing provider-specific credential paths or API shapes.
|
||||
*/
|
||||
getQuotaWindows?: () => Promise<ProviderQuotaResult>;
|
||||
/**
|
||||
* Optional: detect the currently configured model from local config files.
|
||||
* Returns the detected model/provider and the config source, or null if
|
||||
* the adapter does not support detection or no config is found.
|
||||
*/
|
||||
detectModel?: () => Promise<{ model: string; provider: string; source: string } | null>;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@@ -352,7 +352,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const combinedPath = path.join(skillsDir, "agent-instructions.md");
|
||||
await fs.writeFile(combinedPath, instructionsContent + pathDirective, "utf-8");
|
||||
effectiveInstructionsFilePath = combinedPath;
|
||||
await onLog("stderr", `[paperclip] Loaded agent instructions file: ${instructionsFilePath}\n`);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
|
||||
@@ -42,7 +42,7 @@ Notes:
|
||||
- Prompts are piped via stdin (Codex receives "-" prompt argument).
|
||||
- If instructionsFilePath is configured, Paperclip prepends that file's contents to the stdin prompt on every run.
|
||||
- Codex exec automatically applies repo-scoped AGENTS.md instructions from the active workspace. Paperclip cannot suppress that discovery in exec mode, so repo AGENTS.md files may still apply even when you only configured an explicit instructionsFilePath.
|
||||
- Paperclip injects desired local skills into the active workspace's ".agents/skills" directory at execution time so Codex can discover "$paperclip" and related skills without coupling them to the user's login home.
|
||||
- Paperclip injects desired local skills into the effective CODEX_HOME/skills/ directory at execution time so Codex can discover "$paperclip" and related skills without polluting the project working directory. In managed-home mode (the default) this is ~/.paperclip/instances/<id>/companies/<companyId>/codex-home/skills/; when CODEX_HOME is explicitly overridden in adapter config, that override is used instead.
|
||||
- Unless explicitly overridden in adapter config, Paperclip runs Codex with a per-company managed CODEX_HOME under the active Paperclip instance and seeds auth/config from the shared Codex home (the CODEX_HOME env var, when set, or ~/.codex).
|
||||
- Some model/tool combinations reject certain effort levels (for example minimal with web search enabled).
|
||||
- When Paperclip realizes a workspace/runtime for a run, it injects PAPERCLIP_WORKSPACE_* and PAPERCLIP_RUNTIME_* env vars for agent-side tooling.
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { parseCodexJsonl, isCodexUnknownSessionError } from "./parse.js";
|
||||
import { pathExists, prepareManagedCodexHome, resolveManagedCodexHomeDir } from "./codex-home.js";
|
||||
import { pathExists, prepareManagedCodexHome, resolveManagedCodexHomeDir, resolveSharedCodexHomeDir } from "./codex-home.js";
|
||||
import { resolveCodexDesiredSkillNames } from "./skills.js";
|
||||
|
||||
const __moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
@@ -135,8 +135,8 @@ async function pruneBrokenUnavailablePaperclipSkillSymlinks(
|
||||
}
|
||||
}
|
||||
|
||||
function resolveCodexWorkspaceSkillsDir(cwd: string): string {
|
||||
return path.join(cwd, ".agents", "skills");
|
||||
function resolveCodexSkillsDir(codexHome: string): string {
|
||||
return path.join(codexHome, "skills");
|
||||
}
|
||||
|
||||
type EnsureCodexSkillsInjectedOptions = {
|
||||
@@ -157,7 +157,7 @@ export async function ensureCodexSkillsInjected(
|
||||
const skillsEntries = allSkillsEntries.filter((entry) => desiredSet.has(entry.key));
|
||||
if (skillsEntries.length === 0) return;
|
||||
|
||||
const skillsHome = options.skillsHome ?? resolveCodexWorkspaceSkillsDir(process.cwd());
|
||||
const skillsHome = options.skillsHome ?? resolveCodexSkillsDir(resolveSharedCodexHomeDir());
|
||||
await fs.mkdir(skillsHome, { recursive: true });
|
||||
const linkSkill = options.linkSkill;
|
||||
for (const entry of skillsEntries) {
|
||||
@@ -273,11 +273,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const defaultCodexHome = resolveManagedCodexHomeDir(process.env, agent.companyId);
|
||||
const effectiveCodexHome = configuredCodexHome ?? preparedManagedCodexHome ?? defaultCodexHome;
|
||||
await fs.mkdir(effectiveCodexHome, { recursive: true });
|
||||
const codexWorkspaceSkillsDir = resolveCodexWorkspaceSkillsDir(cwd);
|
||||
// Inject skills into the same CODEX_HOME that Codex will actually run with
|
||||
// (managed home in the default case, or an explicit override from adapter config).
|
||||
const codexSkillsDir = resolveCodexSkillsDir(effectiveCodexHome);
|
||||
await ensureCodexSkillsInjected(
|
||||
onLog,
|
||||
{
|
||||
skillsHome: codexWorkspaceSkillsDir,
|
||||
skillsHome: codexSkillsDir,
|
||||
skillsEntries: codexSkillEntries,
|
||||
desiredSkillNames,
|
||||
},
|
||||
@@ -415,10 +417,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
`The above agent instructions were loaded from ${instructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
instructionsChars = instructionsPrefix.length;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Loaded agent instructions file: ${instructionsFilePath}\n`,
|
||||
);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
|
||||
@@ -107,8 +107,8 @@ function parsePlanAndEmailFromToken(idToken: string | null, accessToken: string
|
||||
return { email: null, planType: null };
|
||||
}
|
||||
|
||||
export async function readCodexAuthInfo(): Promise<CodexAuthInfo | null> {
|
||||
const authPath = path.join(codexHomeDir(), "auth.json");
|
||||
export async function readCodexAuthInfo(codexHome?: string): Promise<CodexAuthInfo | null> {
|
||||
const authPath = path.join(codexHome ?? codexHomeDir(), "auth.json");
|
||||
let raw: string;
|
||||
try {
|
||||
raw = await fs.readFile(authPath, "utf8");
|
||||
|
||||
@@ -31,7 +31,7 @@ async function buildCodexSkillSnapshot(
|
||||
sourcePath: entry.source,
|
||||
targetPath: null,
|
||||
detail: desiredSet.has(entry.key)
|
||||
? "Will be linked into the workspace .agents/skills directory on the next run."
|
||||
? "Will be linked into the effective CODEX_HOME/skills/ directory on the next run."
|
||||
: null,
|
||||
required: Boolean(entry.required),
|
||||
requiredReason: entry.requiredReason ?? null,
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import path from "node:path";
|
||||
import { parseCodexJsonl } from "./parse.js";
|
||||
import { codexHomeDir, readCodexAuthInfo } from "./quota.js";
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
if (checks.some((check) => check.level === "error")) return "fail";
|
||||
@@ -108,12 +109,23 @@ export async function testEnvironment(
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "codex_openai_api_key_missing",
|
||||
level: "warn",
|
||||
message: "OPENAI_API_KEY is not set. Codex runs may fail until authentication is configured.",
|
||||
hint: "Set OPENAI_API_KEY in adapter env, shell environment, or Codex auth configuration.",
|
||||
});
|
||||
const codexHome = isNonEmpty(env.CODEX_HOME) ? env.CODEX_HOME : undefined;
|
||||
const codexAuth = await readCodexAuthInfo(codexHome).catch(() => null);
|
||||
if (codexAuth) {
|
||||
checks.push({
|
||||
code: "codex_native_auth_present",
|
||||
level: "info",
|
||||
message: "Codex is authenticated via its own auth configuration.",
|
||||
detail: codexAuth.email ? `Logged in as ${codexAuth.email}.` : `Credentials found in ${path.join(codexHome ?? codexHomeDir(), "auth.json")}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "codex_openai_api_key_missing",
|
||||
level: "warn",
|
||||
message: "OPENAI_API_KEY is not set. Codex runs may fail until authentication is configured.",
|
||||
hint: "Set OPENAI_API_KEY in adapter env, shell environment, or run `codex auth` to log in.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
|
||||
@@ -307,10 +307,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
`The above agent instructions were loaded from ${instructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
instructionsChars = instructionsPrefix.length;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Loaded agent instructions file: ${instructionsFilePath}\n`,
|
||||
);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
|
||||
@@ -12,6 +12,8 @@ import {
|
||||
ensurePathInEnv,
|
||||
runChildProcess,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "../index.js";
|
||||
import { parseCursorJsonl } from "./parse.js";
|
||||
@@ -49,6 +51,41 @@ function summarizeProbeDetail(stdout: string, stderr: string, parsedError: strin
|
||||
return clean.length > max ? `${clean.slice(0, max - 1)}…` : clean;
|
||||
}
|
||||
|
||||
export interface CursorAuthInfo {
|
||||
email: string | null;
|
||||
displayName: string | null;
|
||||
userId: number | null;
|
||||
}
|
||||
|
||||
export function cursorConfigPath(cursorHome?: string): string {
|
||||
return path.join(cursorHome ?? path.join(os.homedir(), ".cursor"), "cli-config.json");
|
||||
}
|
||||
|
||||
export async function readCursorAuthInfo(cursorHome?: string): Promise<CursorAuthInfo | null> {
|
||||
let raw: string;
|
||||
try {
|
||||
raw = await fs.readFile(cursorConfigPath(cursorHome), "utf8");
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (typeof parsed !== "object" || parsed === null) return null;
|
||||
const obj = parsed as Record<string, unknown>;
|
||||
const authInfo = obj.authInfo;
|
||||
if (typeof authInfo !== "object" || authInfo === null) return null;
|
||||
const info = authInfo as Record<string, unknown>;
|
||||
const email = typeof info.email === "string" && info.email.trim().length > 0 ? info.email.trim() : null;
|
||||
const displayName = typeof info.displayName === "string" && info.displayName.trim().length > 0 ? info.displayName.trim() : null;
|
||||
const userId = typeof info.userId === "number" ? info.userId : null;
|
||||
if (!email && !displayName && userId == null) return null;
|
||||
return { email, displayName, userId };
|
||||
}
|
||||
|
||||
const CURSOR_AUTH_REQUIRED_RE =
|
||||
/(?:authentication\s+required|not\s+authenticated|not\s+logged\s+in|unauthorized|invalid(?:\s+or\s+missing)?\s+api(?:[_\s-]?key)?|cursor[_\s-]?api[_\s-]?key|run\s+'?agent\s+login'?\s+first|api(?:[_\s-]?key)?(?:\s+is)?\s+required)/i;
|
||||
|
||||
@@ -109,12 +146,25 @@ export async function testEnvironment(
|
||||
detail: `Detected in ${source}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "cursor_api_key_missing",
|
||||
level: "warn",
|
||||
message: "CURSOR_API_KEY is not set. Cursor runs may fail until authentication is configured.",
|
||||
hint: "Set CURSOR_API_KEY in adapter env or run `agent login`.",
|
||||
});
|
||||
const cursorHome = isNonEmpty(env.CURSOR_HOME) ? env.CURSOR_HOME : undefined;
|
||||
const cursorAuth = await readCursorAuthInfo(cursorHome).catch(() => null);
|
||||
if (cursorAuth) {
|
||||
checks.push({
|
||||
code: "cursor_native_auth_present",
|
||||
level: "info",
|
||||
message: "Cursor is authenticated via `agent login`.",
|
||||
detail: cursorAuth.email
|
||||
? `Logged in as ${cursorAuth.email}.`
|
||||
: `Credentials found in ${cursorConfigPath(cursorHome)}.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "cursor_api_key_missing",
|
||||
level: "warn",
|
||||
message: "CURSOR_API_KEY is not set. Cursor runs may fail until authentication is configured.",
|
||||
hint: "Set CURSOR_API_KEY in adapter env or run `agent login`.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
|
||||
@@ -253,10 +253,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${instructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Loaded agent instructions file: ${instructionsFilePath}\n`,
|
||||
);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
|
||||
@@ -22,6 +22,7 @@ Core fields:
|
||||
- instructionsFilePath (string, optional): absolute path to a markdown instructions file prepended to the run prompt
|
||||
- model (string, required): OpenCode model id in provider/model format (for example anthropic/claude-sonnet-4-5)
|
||||
- variant (string, optional): provider-specific model variant (for example minimal|low|medium|high|max)
|
||||
- dangerouslySkipPermissions (boolean, optional): inject a runtime OpenCode config that allows \`external_directory\` access without interactive prompts; defaults to true for unattended Paperclip runs
|
||||
- promptTemplate (string, optional): run prompt template
|
||||
- command (string, optional): defaults to "opencode"
|
||||
- extraArgs (string[], optional): additional CLI args
|
||||
@@ -37,4 +38,10 @@ Notes:
|
||||
- Paperclip requires an explicit \`model\` value for \`opencode_local\` agents.
|
||||
- Runs are executed with: opencode run --format json ...
|
||||
- Sessions are resumed with --session when stored session cwd matches current cwd.
|
||||
- The adapter sets OPENCODE_DISABLE_PROJECT_CONFIG=true to prevent OpenCode from \
|
||||
writing an opencode.json config file into the project working directory. Model \
|
||||
selection is passed via the --model CLI flag instead.
|
||||
- When \`dangerouslySkipPermissions\` is enabled, Paperclip injects a temporary \
|
||||
runtime config with \`permission.external_directory=allow\` so headless runs do \
|
||||
not stall on approval prompts.
|
||||
`;
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
import { isOpenCodeUnknownSessionError, parseOpenCodeJsonl } from "./parse.js";
|
||||
import { ensureOpenCodeModelConfiguredAndAvailable } from "./models.js";
|
||||
import { removeMaintainerOnlySkillSymlinks } from "@paperclipai/adapter-utils/server-utils";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
const __moduleDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
@@ -169,238 +170,247 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
for (const [key, value] of Object.entries(envConfig)) {
|
||||
if (typeof value === "string") env[key] = value;
|
||||
}
|
||||
// Prevent OpenCode from writing an opencode.json config file into the
|
||||
// project working directory (which would pollute the git repo). Model
|
||||
// selection is already handled via the --model CLI flag. Set after the
|
||||
// envConfig loop so user overrides cannot disable this guard.
|
||||
env.OPENCODE_DISABLE_PROJECT_CONFIG = "true";
|
||||
if (!hasExplicitApiKey && authToken) {
|
||||
env.PAPERCLIP_API_KEY = authToken;
|
||||
}
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
const preparedRuntimeConfig = await prepareOpenCodeRuntimeConfig({ env, config });
|
||||
try {
|
||||
const runtimeEnv = Object.fromEntries(
|
||||
Object.entries(ensurePathInEnv({ ...process.env, ...preparedRuntimeConfig.env })).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
}
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
const resolvedInstructionsFilePath = instructionsFilePath
|
||||
? path.resolve(cwd, instructionsFilePath)
|
||||
: "";
|
||||
const instructionsDir = resolvedInstructionsFilePath ? `${path.dirname(resolvedInstructionsFilePath)}/` : "";
|
||||
let instructionsPrefix = "";
|
||||
if (resolvedInstructionsFilePath) {
|
||||
try {
|
||||
const instructionsContents = await fs.readFile(resolvedInstructionsFilePath, "utf8");
|
||||
instructionsPrefix =
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Loaded agent instructions file: ${resolvedInstructionsFilePath}\n`,
|
||||
);
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Warning: could not read agent instructions file "${resolvedInstructionsFilePath}": ${reason}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const commandNotes = (() => {
|
||||
if (!resolvedInstructionsFilePath) return [] as string[];
|
||||
if (instructionsPrefix.length > 0) {
|
||||
return [
|
||||
`Loaded agent instructions from ${resolvedInstructionsFilePath}`,
|
||||
`Prepended instructions + path directive to stdin prompt (relative references from ${instructionsDir}).`,
|
||||
];
|
||||
}
|
||||
return [
|
||||
`Configured instructionsFilePath ${resolvedInstructionsFilePath}, but file could not be read; continuing without injected instructions.`,
|
||||
];
|
||||
})();
|
||||
|
||||
const bootstrapPromptTemplate = asString(config.bootstrapPromptTemplate, "");
|
||||
const templateData = {
|
||||
agentId: agent.id,
|
||||
companyId: agent.companyId,
|
||||
runId,
|
||||
company: { id: agent.companyId },
|
||||
agent,
|
||||
run: { id: runId, source: "on_demand" },
|
||||
context,
|
||||
};
|
||||
const renderedPrompt = renderTemplate(promptTemplate, templateData);
|
||||
const renderedBootstrapPrompt =
|
||||
!sessionId && bootstrapPromptTemplate.trim().length > 0
|
||||
? renderTemplate(bootstrapPromptTemplate, templateData).trim()
|
||||
: "";
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
const prompt = joinPromptSections([
|
||||
instructionsPrefix,
|
||||
renderedBootstrapPrompt,
|
||||
sessionHandoffNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
const promptMetrics = {
|
||||
promptChars: prompt.length,
|
||||
instructionsChars: instructionsPrefix.length,
|
||||
bootstrapPromptChars: renderedBootstrapPrompt.length,
|
||||
sessionHandoffChars: sessionHandoffNote.length,
|
||||
heartbeatPromptChars: renderedPrompt.length,
|
||||
};
|
||||
|
||||
const buildArgs = (resumeSessionId: string | null) => {
|
||||
const args = ["run", "--format", "json"];
|
||||
if (resumeSessionId) args.push("--session", resumeSessionId);
|
||||
if (model) args.push("--model", model);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
return args;
|
||||
};
|
||||
|
||||
const runAttempt = async (resumeSessionId: string | null) => {
|
||||
const args = buildArgs(resumeSessionId);
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "opencode_local",
|
||||
command,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: [...args, `<stdin prompt ${prompt.length} chars>`],
|
||||
env: redactEnvForLogs(env),
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
stdin: prompt,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog,
|
||||
});
|
||||
return {
|
||||
proc,
|
||||
rawStderr: proc.stderr,
|
||||
parsed: parseOpenCodeJsonl(proc.stdout),
|
||||
};
|
||||
};
|
||||
|
||||
const toResult = (
|
||||
attempt: {
|
||||
proc: { exitCode: number | null; signal: string | null; timedOut: boolean; stdout: string; stderr: string };
|
||||
rawStderr: string;
|
||||
parsed: ReturnType<typeof parseOpenCodeJsonl>;
|
||||
},
|
||||
clearSessionOnMissingSession = false,
|
||||
): AdapterExecutionResult => {
|
||||
if (attempt.proc.timedOut) {
|
||||
return {
|
||||
exitCode: attempt.proc.exitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: true,
|
||||
errorMessage: `Timed out after ${timeoutSec}s`,
|
||||
clearSession: clearSessionOnMissingSession,
|
||||
};
|
||||
const timeoutSec = asNumber(config.timeoutSec, 0);
|
||||
const graceSec = asNumber(config.graceSec, 20);
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
|
||||
const runtimeSessionParams = parseObject(runtime.sessionParams);
|
||||
const runtimeSessionId = asString(runtimeSessionParams.sessionId, runtime.sessionId ?? "");
|
||||
const runtimeSessionCwd = asString(runtimeSessionParams.cwd, "");
|
||||
const canResumeSession =
|
||||
runtimeSessionId.length > 0 &&
|
||||
(runtimeSessionCwd.length === 0 || path.resolve(runtimeSessionCwd) === path.resolve(cwd));
|
||||
const sessionId = canResumeSession ? runtimeSessionId : null;
|
||||
if (runtimeSessionId && !canResumeSession) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${runtimeSessionId}" was saved for cwd "${runtimeSessionCwd}" and will not be resumed in "${cwd}".\n`,
|
||||
);
|
||||
}
|
||||
|
||||
const resolvedSessionId =
|
||||
attempt.parsed.sessionId ??
|
||||
(clearSessionOnMissingSession ? null : runtimeSessionId ?? runtime.sessionId ?? null);
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
const instructionsFilePath = asString(config.instructionsFilePath, "").trim();
|
||||
const resolvedInstructionsFilePath = instructionsFilePath
|
||||
? path.resolve(cwd, instructionsFilePath)
|
||||
: "";
|
||||
const instructionsDir = resolvedInstructionsFilePath ? `${path.dirname(resolvedInstructionsFilePath)}/` : "";
|
||||
let instructionsPrefix = "";
|
||||
if (resolvedInstructionsFilePath) {
|
||||
try {
|
||||
const instructionsContents = await fs.readFile(resolvedInstructionsFilePath, "utf8");
|
||||
instructionsPrefix =
|
||||
`${instructionsContents}\n\n` +
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsDir}.\n\n`;
|
||||
} catch (err) {
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Warning: could not read agent instructions file "${resolvedInstructionsFilePath}": ${reason}\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
const stderrLine = firstNonEmptyLine(attempt.proc.stderr);
|
||||
const rawExitCode = attempt.proc.exitCode;
|
||||
const synthesizedExitCode = parsedError && (rawExitCode ?? 0) === 0 ? 1 : rawExitCode;
|
||||
const fallbackErrorMessage =
|
||||
parsedError ||
|
||||
stderrLine ||
|
||||
`OpenCode exited with code ${synthesizedExitCode ?? -1}`;
|
||||
const modelId = model || null;
|
||||
const commandNotes = (() => {
|
||||
const notes = [...preparedRuntimeConfig.notes];
|
||||
if (!resolvedInstructionsFilePath) return notes;
|
||||
if (instructionsPrefix.length > 0) {
|
||||
notes.push(`Loaded agent instructions from ${resolvedInstructionsFilePath}`);
|
||||
notes.push(
|
||||
`Prepended instructions + path directive to stdin prompt (relative references from ${instructionsDir}).`,
|
||||
);
|
||||
return notes;
|
||||
}
|
||||
notes.push(
|
||||
`Configured instructionsFilePath ${resolvedInstructionsFilePath}, but file could not be read; continuing without injected instructions.`,
|
||||
);
|
||||
return notes;
|
||||
})();
|
||||
|
||||
return {
|
||||
exitCode: synthesizedExitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage: (synthesizedExitCode ?? 0) === 0 ? null : fallbackErrorMessage,
|
||||
usage: {
|
||||
inputTokens: attempt.parsed.usage.inputTokens,
|
||||
outputTokens: attempt.parsed.usage.outputTokens,
|
||||
cachedInputTokens: attempt.parsed.usage.cachedInputTokens,
|
||||
},
|
||||
sessionId: resolvedSessionId,
|
||||
sessionParams: resolvedSessionParams,
|
||||
sessionDisplayId: resolvedSessionId,
|
||||
provider: parseModelProvider(modelId),
|
||||
biller: resolveOpenCodeBiller(runtimeEnv, parseModelProvider(modelId)),
|
||||
model: modelId,
|
||||
billingType: "unknown",
|
||||
costUsd: attempt.parsed.costUsd,
|
||||
resultJson: {
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
},
|
||||
summary: attempt.parsed.summary,
|
||||
clearSession: Boolean(clearSessionOnMissingSession && !attempt.parsed.sessionId),
|
||||
const bootstrapPromptTemplate = asString(config.bootstrapPromptTemplate, "");
|
||||
const templateData = {
|
||||
agentId: agent.id,
|
||||
companyId: agent.companyId,
|
||||
runId,
|
||||
company: { id: agent.companyId },
|
||||
agent,
|
||||
run: { id: runId, source: "on_demand" },
|
||||
context,
|
||||
};
|
||||
const renderedPrompt = renderTemplate(promptTemplate, templateData);
|
||||
const renderedBootstrapPrompt =
|
||||
!sessionId && bootstrapPromptTemplate.trim().length > 0
|
||||
? renderTemplate(bootstrapPromptTemplate, templateData).trim()
|
||||
: "";
|
||||
const sessionHandoffNote = asString(context.paperclipSessionHandoffMarkdown, "").trim();
|
||||
const prompt = joinPromptSections([
|
||||
instructionsPrefix,
|
||||
renderedBootstrapPrompt,
|
||||
sessionHandoffNote,
|
||||
renderedPrompt,
|
||||
]);
|
||||
const promptMetrics = {
|
||||
promptChars: prompt.length,
|
||||
instructionsChars: instructionsPrefix.length,
|
||||
bootstrapPromptChars: renderedBootstrapPrompt.length,
|
||||
sessionHandoffChars: sessionHandoffNote.length,
|
||||
heartbeatPromptChars: renderedPrompt.length,
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
const buildArgs = (resumeSessionId: string | null) => {
|
||||
const args = ["run", "--format", "json"];
|
||||
if (resumeSessionId) args.push("--session", resumeSessionId);
|
||||
if (model) args.push("--model", model);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
return args;
|
||||
};
|
||||
|
||||
const runAttempt = async (resumeSessionId: string | null) => {
|
||||
const args = buildArgs(resumeSessionId);
|
||||
if (onMeta) {
|
||||
await onMeta({
|
||||
adapterType: "opencode_local",
|
||||
command,
|
||||
cwd,
|
||||
commandNotes,
|
||||
commandArgs: [...args, `<stdin prompt ${prompt.length} chars>`],
|
||||
env: redactEnvForLogs(preparedRuntimeConfig.env),
|
||||
prompt,
|
||||
promptMetrics,
|
||||
context,
|
||||
});
|
||||
}
|
||||
|
||||
const proc = await runChildProcess(runId, command, args, {
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
stdin: prompt,
|
||||
timeoutSec,
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog,
|
||||
});
|
||||
return {
|
||||
proc,
|
||||
rawStderr: proc.stderr,
|
||||
parsed: parseOpenCodeJsonl(proc.stdout),
|
||||
};
|
||||
};
|
||||
|
||||
const toResult = (
|
||||
attempt: {
|
||||
proc: { exitCode: number | null; signal: string | null; timedOut: boolean; stdout: string; stderr: string };
|
||||
rawStderr: string;
|
||||
parsed: ReturnType<typeof parseOpenCodeJsonl>;
|
||||
},
|
||||
clearSessionOnMissingSession = false,
|
||||
): AdapterExecutionResult => {
|
||||
if (attempt.proc.timedOut) {
|
||||
return {
|
||||
exitCode: attempt.proc.exitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: true,
|
||||
errorMessage: `Timed out after ${timeoutSec}s`,
|
||||
clearSession: clearSessionOnMissingSession,
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedSessionId =
|
||||
attempt.parsed.sessionId ??
|
||||
(clearSessionOnMissingSession ? null : runtimeSessionId ?? runtime.sessionId ?? null);
|
||||
const resolvedSessionParams = resolvedSessionId
|
||||
? ({
|
||||
sessionId: resolvedSessionId,
|
||||
cwd,
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
...(workspaceRepoUrl ? { repoUrl: workspaceRepoUrl } : {}),
|
||||
...(workspaceRepoRef ? { repoRef: workspaceRepoRef } : {}),
|
||||
} as Record<string, unknown>)
|
||||
: null;
|
||||
|
||||
const parsedError = typeof attempt.parsed.errorMessage === "string" ? attempt.parsed.errorMessage.trim() : "";
|
||||
const stderrLine = firstNonEmptyLine(attempt.proc.stderr);
|
||||
const rawExitCode = attempt.proc.exitCode;
|
||||
const synthesizedExitCode = parsedError && (rawExitCode ?? 0) === 0 ? 1 : rawExitCode;
|
||||
const fallbackErrorMessage =
|
||||
parsedError ||
|
||||
stderrLine ||
|
||||
`OpenCode exited with code ${synthesizedExitCode ?? -1}`;
|
||||
const modelId = model || null;
|
||||
|
||||
return {
|
||||
exitCode: synthesizedExitCode,
|
||||
signal: attempt.proc.signal,
|
||||
timedOut: false,
|
||||
errorMessage: (synthesizedExitCode ?? 0) === 0 ? null : fallbackErrorMessage,
|
||||
usage: {
|
||||
inputTokens: attempt.parsed.usage.inputTokens,
|
||||
outputTokens: attempt.parsed.usage.outputTokens,
|
||||
cachedInputTokens: attempt.parsed.usage.cachedInputTokens,
|
||||
},
|
||||
sessionId: resolvedSessionId,
|
||||
sessionParams: resolvedSessionParams,
|
||||
sessionDisplayId: resolvedSessionId,
|
||||
provider: parseModelProvider(modelId),
|
||||
biller: resolveOpenCodeBiller(runtimeEnv, parseModelProvider(modelId)),
|
||||
model: modelId,
|
||||
billingType: "unknown",
|
||||
costUsd: attempt.parsed.costUsd,
|
||||
resultJson: {
|
||||
stdout: attempt.proc.stdout,
|
||||
stderr: attempt.proc.stderr,
|
||||
},
|
||||
summary: attempt.parsed.summary,
|
||||
clearSession: Boolean(clearSessionOnMissingSession && !attempt.parsed.sessionId),
|
||||
};
|
||||
};
|
||||
|
||||
const initial = await runAttempt(sessionId);
|
||||
const initialFailed =
|
||||
!initial.proc.timedOut && ((initial.proc.exitCode ?? 0) !== 0 || Boolean(initial.parsed.errorMessage));
|
||||
if (
|
||||
sessionId &&
|
||||
initialFailed &&
|
||||
isOpenCodeUnknownSessionError(initial.proc.stdout, initial.rawStderr)
|
||||
) {
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] OpenCode session "${sessionId}" is unavailable; retrying with a fresh session.\n`,
|
||||
);
|
||||
const retry = await runAttempt(null);
|
||||
return toResult(retry, true);
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
} finally {
|
||||
await preparedRuntimeConfig.cleanup();
|
||||
}
|
||||
|
||||
return toResult(initial);
|
||||
}
|
||||
|
||||
@@ -120,7 +120,8 @@ export async function discoverOpenCodeModels(input: {
|
||||
// /etc/passwd entry (e.g. `docker run --user 1234` with a minimal
|
||||
// image). Fall back to process.env.HOME.
|
||||
}
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env, ...(resolvedHome ? { HOME: resolvedHome } : {}) }));
|
||||
// Prevent OpenCode from writing an opencode.json into the working directory.
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env, ...(resolvedHome ? { HOME: resolvedHome } : {}), OPENCODE_DISABLE_PROJECT_CONFIG: "true" }));
|
||||
|
||||
const result = await runChildProcess(
|
||||
`opencode-models-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
const cleanupPaths = new Set<string>();
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
[...cleanupPaths].map(async (filepath) => {
|
||||
await fs.rm(filepath, { recursive: true, force: true });
|
||||
cleanupPaths.delete(filepath);
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
async function makeConfigHome(initialConfig?: Record<string, unknown>) {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-test-"));
|
||||
cleanupPaths.add(root);
|
||||
const configDir = path.join(root, "opencode");
|
||||
await fs.mkdir(configDir, { recursive: true });
|
||||
if (initialConfig) {
|
||||
await fs.writeFile(
|
||||
path.join(configDir, "opencode.json"),
|
||||
`${JSON.stringify(initialConfig, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
describe("prepareOpenCodeRuntimeConfig", () => {
|
||||
it("injects an external_directory allow rule by default", async () => {
|
||||
const configHome = await makeConfigHome({
|
||||
permission: {
|
||||
read: "allow",
|
||||
},
|
||||
theme: "system",
|
||||
});
|
||||
|
||||
const prepared = await prepareOpenCodeRuntimeConfig({
|
||||
env: { XDG_CONFIG_HOME: configHome },
|
||||
config: {},
|
||||
});
|
||||
cleanupPaths.add(prepared.env.XDG_CONFIG_HOME);
|
||||
|
||||
expect(prepared.env.XDG_CONFIG_HOME).not.toBe(configHome);
|
||||
const runtimeConfig = JSON.parse(
|
||||
await fs.readFile(
|
||||
path.join(prepared.env.XDG_CONFIG_HOME, "opencode", "opencode.json"),
|
||||
"utf8",
|
||||
),
|
||||
) as Record<string, unknown>;
|
||||
expect(runtimeConfig).toMatchObject({
|
||||
theme: "system",
|
||||
permission: {
|
||||
read: "allow",
|
||||
external_directory: "allow",
|
||||
},
|
||||
});
|
||||
|
||||
await prepared.cleanup();
|
||||
cleanupPaths.delete(prepared.env.XDG_CONFIG_HOME);
|
||||
await expect(fs.access(prepared.env.XDG_CONFIG_HOME)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("respects explicit opt-out", async () => {
|
||||
const configHome = await makeConfigHome();
|
||||
const prepared = await prepareOpenCodeRuntimeConfig({
|
||||
env: { XDG_CONFIG_HOME: configHome },
|
||||
config: { dangerouslySkipPermissions: false },
|
||||
});
|
||||
|
||||
expect(prepared.env).toEqual({ XDG_CONFIG_HOME: configHome });
|
||||
expect(prepared.notes).toEqual([]);
|
||||
await prepared.cleanup();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { asBoolean } from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
type PreparedOpenCodeRuntimeConfig = {
|
||||
env: Record<string, string>;
|
||||
notes: string[];
|
||||
cleanup: () => Promise<void>;
|
||||
};
|
||||
|
||||
function resolveXdgConfigHome(env: Record<string, string>): string {
|
||||
return (
|
||||
(typeof env.XDG_CONFIG_HOME === "string" && env.XDG_CONFIG_HOME.trim()) ||
|
||||
(typeof process.env.XDG_CONFIG_HOME === "string" && process.env.XDG_CONFIG_HOME.trim()) ||
|
||||
path.join(os.homedir(), ".config")
|
||||
);
|
||||
}
|
||||
|
||||
function isPlainObject(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null && !Array.isArray(value);
|
||||
}
|
||||
|
||||
async function readJsonObject(filepath: string): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const raw = await fs.readFile(filepath, "utf8");
|
||||
const parsed = JSON.parse(raw);
|
||||
return isPlainObject(parsed) ? parsed : {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
export async function prepareOpenCodeRuntimeConfig(input: {
|
||||
env: Record<string, string>;
|
||||
config: Record<string, unknown>;
|
||||
}): Promise<PreparedOpenCodeRuntimeConfig> {
|
||||
const skipPermissions = asBoolean(input.config.dangerouslySkipPermissions, true);
|
||||
if (!skipPermissions) {
|
||||
return {
|
||||
env: input.env,
|
||||
notes: [],
|
||||
cleanup: async () => {},
|
||||
};
|
||||
}
|
||||
|
||||
const sourceConfigDir = path.join(resolveXdgConfigHome(input.env), "opencode");
|
||||
const runtimeConfigHome = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-opencode-config-"));
|
||||
const runtimeConfigDir = path.join(runtimeConfigHome, "opencode");
|
||||
const runtimeConfigPath = path.join(runtimeConfigDir, "opencode.json");
|
||||
|
||||
await fs.mkdir(runtimeConfigDir, { recursive: true });
|
||||
try {
|
||||
await fs.cp(sourceConfigDir, runtimeConfigDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
errorOnExist: false,
|
||||
dereference: false,
|
||||
});
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException | null)?.code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const existingConfig = await readJsonObject(runtimeConfigPath);
|
||||
const existingPermission = isPlainObject(existingConfig.permission)
|
||||
? existingConfig.permission
|
||||
: {};
|
||||
const nextConfig = {
|
||||
...existingConfig,
|
||||
permission: {
|
||||
...existingPermission,
|
||||
external_directory: "allow",
|
||||
},
|
||||
};
|
||||
await fs.writeFile(runtimeConfigPath, `${JSON.stringify(nextConfig, null, 2)}\n`, "utf8");
|
||||
|
||||
return {
|
||||
env: {
|
||||
...input.env,
|
||||
XDG_CONFIG_HOME: runtimeConfigHome,
|
||||
},
|
||||
notes: [
|
||||
"Injected runtime OpenCode config with permission.external_directory=allow to avoid headless approval prompts.",
|
||||
],
|
||||
cleanup: async () => {
|
||||
await fs.rm(runtimeConfigHome, { recursive: true, force: true });
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import type {
|
||||
AdapterEnvironmentTestResult,
|
||||
} from "@paperclipai/adapter-utils";
|
||||
import {
|
||||
asBoolean,
|
||||
asString,
|
||||
asStringArray,
|
||||
parseObject,
|
||||
@@ -14,6 +15,7 @@ import {
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
import { discoverOpenCodeModels, ensureOpenCodeModelConfiguredAndAvailable } from "./models.js";
|
||||
import { parseOpenCodeJsonl } from "./parse.js";
|
||||
import { prepareOpenCodeRuntimeConfig } from "./runtime-config.js";
|
||||
|
||||
function summarizeStatus(checks: AdapterEnvironmentCheck[]): AdapterEnvironmentTestResult["status"] {
|
||||
if (checks.some((check) => check.level === "error")) return "fail";
|
||||
@@ -90,224 +92,238 @@ export async function testEnvironment(
|
||||
});
|
||||
}
|
||||
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...env }));
|
||||
|
||||
const cwdInvalid = checks.some((check) => check.code === "opencode_cwd_invalid");
|
||||
if (cwdInvalid) {
|
||||
// Prevent OpenCode from writing an opencode.json into the working directory.
|
||||
env.OPENCODE_DISABLE_PROJECT_CONFIG = "true";
|
||||
const preparedRuntimeConfig = await prepareOpenCodeRuntimeConfig({ env, config });
|
||||
if (asBoolean(config.dangerouslySkipPermissions, true)) {
|
||||
checks.push({
|
||||
code: "opencode_command_skipped",
|
||||
level: "warn",
|
||||
message: "Skipped command check because working directory validation failed.",
|
||||
detail: command,
|
||||
code: "opencode_headless_permissions_enabled",
|
||||
level: "info",
|
||||
message: "Headless OpenCode external-directory permissions are auto-approved for unattended runs.",
|
||||
});
|
||||
} else {
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
}
|
||||
try {
|
||||
const runtimeEnv = normalizeEnv(ensurePathInEnv({ ...process.env, ...preparedRuntimeConfig.env }));
|
||||
|
||||
const cwdInvalid = checks.some((check) => check.code === "opencode_cwd_invalid");
|
||||
if (cwdInvalid) {
|
||||
checks.push({
|
||||
code: "opencode_command_resolvable",
|
||||
level: "info",
|
||||
message: `Command is executable: ${command}`,
|
||||
});
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_command_unresolvable",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Command is not executable",
|
||||
code: "opencode_command_skipped",
|
||||
level: "warn",
|
||||
message: "Skipped command check because working directory validation failed.",
|
||||
detail: command,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const canRunProbe =
|
||||
checks.every((check) => check.code !== "opencode_cwd_invalid" && check.code !== "opencode_command_unresolvable");
|
||||
|
||||
let modelValidationPassed = false;
|
||||
const configuredModel = asString(config.model, "").trim();
|
||||
|
||||
if (canRunProbe && configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
} else {
|
||||
try {
|
||||
await ensureCommandResolvable(command, cwd, runtimeEnv);
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
code: "opencode_command_resolvable",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
message: `Command is executable: ${command}`,
|
||||
});
|
||||
} else {
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_models_empty",
|
||||
code: "opencode_command_unresolvable",
|
||||
level: "error",
|
||||
message: "OpenCode returned no models.",
|
||||
hint: "Run `opencode models` and verify provider authentication.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "error",
|
||||
message: errMsg || "OpenCode model discovery failed.",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
message: err instanceof Error ? err.message : "Command is not executable",
|
||||
detail: command,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else if (canRunProbe && !configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
|
||||
const canRunProbe =
|
||||
checks.every((check) => check.code !== "opencode_cwd_invalid" && check.code !== "opencode_command_unresolvable");
|
||||
|
||||
let modelValidationPassed = false;
|
||||
const configuredModel = asString(config.model, "").trim();
|
||||
|
||||
if (canRunProbe && configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_empty",
|
||||
level: "error",
|
||||
message: "OpenCode returned no models.",
|
||||
hint: "Run `opencode models` and verify provider authentication.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "error",
|
||||
message: errMsg || "OpenCode model discovery failed.",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "warn",
|
||||
message: errMsg || "OpenCode model discovery failed (best-effort, no model configured).",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
} else if (canRunProbe && !configuredModel) {
|
||||
try {
|
||||
const discovered = await discoverOpenCodeModels({ command, cwd, env: runtimeEnv });
|
||||
if (discovered.length > 0) {
|
||||
checks.push({
|
||||
code: "opencode_models_discovered",
|
||||
level: "info",
|
||||
message: `Discovered ${discovered.length} model(s) from OpenCode providers.`,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = err instanceof Error ? err.message : String(err);
|
||||
if (/ProviderModelNotFoundError/i.test(errMsg)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
detail: errMsg,
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_models_discovery_failed",
|
||||
level: "warn",
|
||||
message: errMsg || "OpenCode model discovery failed (best-effort, no model configured).",
|
||||
hint: "Run `opencode models` manually to verify provider auth and config.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const modelUnavailable = checks.some((check) => check.code === "opencode_hello_probe_model_unavailable");
|
||||
if (!configuredModel && !modelUnavailable) {
|
||||
// No model configured – skip model requirement if no model-related checks exist
|
||||
} else if (configuredModel && canRunProbe) {
|
||||
try {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model: configuredModel,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
});
|
||||
checks.push({
|
||||
code: "opencode_model_configured",
|
||||
level: "info",
|
||||
message: `Configured model: ${configuredModel}`,
|
||||
});
|
||||
modelValidationPassed = true;
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_model_invalid",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Configured model is unavailable.",
|
||||
hint: "Run `opencode models` and choose a currently available provider/model ID.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (canRunProbe && modelValidationPassed) {
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const variant = asString(config.variant, "").trim();
|
||||
const probeModel = configuredModel;
|
||||
|
||||
const args = ["run", "--format", "json"];
|
||||
args.push("--model", probeModel);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
try {
|
||||
const probe = await runChildProcess(
|
||||
`opencode-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
const modelUnavailable = checks.some((check) => check.code === "opencode_hello_probe_model_unavailable");
|
||||
if (!configuredModel && !modelUnavailable) {
|
||||
// No model configured – skip model requirement if no model-related checks exist
|
||||
} else if (configuredModel && canRunProbe) {
|
||||
try {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
model: configuredModel,
|
||||
command,
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec: 60,
|
||||
graceSec: 5,
|
||||
stdin: "Respond with hello.",
|
||||
onLog: async () => {},
|
||||
},
|
||||
);
|
||||
});
|
||||
checks.push({
|
||||
code: "opencode_model_configured",
|
||||
level: "info",
|
||||
message: `Configured model: ${configuredModel}`,
|
||||
});
|
||||
modelValidationPassed = true;
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_model_invalid",
|
||||
level: "error",
|
||||
message: err instanceof Error ? err.message : "Configured model is unavailable.",
|
||||
hint: "Run `opencode models` and choose a currently available provider/model ID.",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const parsed = parseOpenCodeJsonl(probe.stdout);
|
||||
const detail = summarizeProbeDetail(probe.stdout, probe.stderr, parsed.errorMessage);
|
||||
const authEvidence = `${parsed.errorMessage ?? ""}\n${probe.stdout}\n${probe.stderr}`.trim();
|
||||
if (canRunProbe && modelValidationPassed) {
|
||||
const extraArgs = (() => {
|
||||
const fromExtraArgs = asStringArray(config.extraArgs);
|
||||
if (fromExtraArgs.length > 0) return fromExtraArgs;
|
||||
return asStringArray(config.args);
|
||||
})();
|
||||
const variant = asString(config.variant, "").trim();
|
||||
const probeModel = configuredModel;
|
||||
|
||||
if (probe.timedOut) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_timed_out",
|
||||
level: "warn",
|
||||
message: "OpenCode hello probe timed out.",
|
||||
hint: "Retry the probe. If this persists, run OpenCode manually in this working directory.",
|
||||
});
|
||||
} else if ((probe.exitCode ?? 1) === 0 && !parsed.errorMessage) {
|
||||
const summary = parsed.summary.trim();
|
||||
const hasHello = /\bhello\b/i.test(summary);
|
||||
checks.push({
|
||||
code: hasHello ? "opencode_hello_probe_passed" : "opencode_hello_probe_unexpected_output",
|
||||
level: hasHello ? "info" : "warn",
|
||||
message: hasHello
|
||||
? "OpenCode hello probe succeeded."
|
||||
: "OpenCode probe ran but did not return `hello` as expected.",
|
||||
...(summary ? { detail: summary.replace(/\s+/g, " ").trim().slice(0, 240) } : {}),
|
||||
...(hasHello
|
||||
? {}
|
||||
: {
|
||||
hint: "Run `opencode run --format json` manually and prompt `Respond with hello` to inspect output.",
|
||||
}),
|
||||
});
|
||||
} else if (/ProviderModelNotFoundError/i.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else if (OPENCODE_AUTH_REQUIRED_RE.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_auth_required",
|
||||
level: "warn",
|
||||
message: "OpenCode is installed, but provider authentication is not ready.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode auth login` or set provider credentials, then retry the probe.",
|
||||
});
|
||||
} else {
|
||||
const args = ["run", "--format", "json"];
|
||||
args.push("--model", probeModel);
|
||||
if (variant) args.push("--variant", variant);
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
try {
|
||||
const probe = await runChildProcess(
|
||||
`opencode-envtest-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
command,
|
||||
args,
|
||||
{
|
||||
cwd,
|
||||
env: runtimeEnv,
|
||||
timeoutSec: 60,
|
||||
graceSec: 5,
|
||||
stdin: "Respond with hello.",
|
||||
onLog: async () => {},
|
||||
},
|
||||
);
|
||||
|
||||
const parsed = parseOpenCodeJsonl(probe.stdout);
|
||||
const detail = summarizeProbeDetail(probe.stdout, probe.stderr, parsed.errorMessage);
|
||||
const authEvidence = `${parsed.errorMessage ?? ""}\n${probe.stdout}\n${probe.stderr}`.trim();
|
||||
|
||||
if (probe.timedOut) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_timed_out",
|
||||
level: "warn",
|
||||
message: "OpenCode hello probe timed out.",
|
||||
hint: "Retry the probe. If this persists, run OpenCode manually in this working directory.",
|
||||
});
|
||||
} else if ((probe.exitCode ?? 1) === 0 && !parsed.errorMessage) {
|
||||
const summary = parsed.summary.trim();
|
||||
const hasHello = /\bhello\b/i.test(summary);
|
||||
checks.push({
|
||||
code: hasHello ? "opencode_hello_probe_passed" : "opencode_hello_probe_unexpected_output",
|
||||
level: hasHello ? "info" : "warn",
|
||||
message: hasHello
|
||||
? "OpenCode hello probe succeeded."
|
||||
: "OpenCode probe ran but did not return `hello` as expected.",
|
||||
...(summary ? { detail: summary.replace(/\s+/g, " ").trim().slice(0, 240) } : {}),
|
||||
...(hasHello
|
||||
? {}
|
||||
: {
|
||||
hint: "Run `opencode run --format json` manually and prompt `Respond with hello` to inspect output.",
|
||||
}),
|
||||
});
|
||||
} else if (/ProviderModelNotFoundError/i.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_model_unavailable",
|
||||
level: "warn",
|
||||
message: "The configured model was not found by the provider.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode models` and choose an available provider/model ID.",
|
||||
});
|
||||
} else if (OPENCODE_AUTH_REQUIRED_RE.test(authEvidence)) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_auth_required",
|
||||
level: "warn",
|
||||
message: "OpenCode is installed, but provider authentication is not ready.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode auth login` or set provider credentials, then retry the probe.",
|
||||
});
|
||||
} else {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
...(detail ? { detail } : {}),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
...(detail ? { detail } : {}),
|
||||
detail: err instanceof Error ? err.message : String(err),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
checks.push({
|
||||
code: "opencode_hello_probe_failed",
|
||||
level: "error",
|
||||
message: "OpenCode hello probe failed.",
|
||||
detail: err instanceof Error ? err.message : String(err),
|
||||
hint: "Run `opencode run --format json` manually in this working directory to debug.",
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await preparedRuntimeConfig.cleanup();
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -58,6 +58,7 @@ export function buildOpenCodeLocalConfig(v: CreateConfigValues): Record<string,
|
||||
if (v.bootstrapPrompt) ac.bootstrapPromptTemplate = v.bootstrapPrompt;
|
||||
if (v.model) ac.model = v.model;
|
||||
if (v.thinkingEffort) ac.variant = v.thinkingEffort;
|
||||
ac.dangerouslySkipPermissions = v.dangerouslySkipPermissions;
|
||||
// OpenCode sessions can run until the CLI exits naturally; keep timeout disabled (0)
|
||||
// and rely on graceSec for termination handling when a timeout is configured elsewhere.
|
||||
ac.timeoutSec = 0;
|
||||
|
||||
@@ -266,10 +266,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
`The above agent instructions were loaded from ${resolvedInstructionsFilePath}. ` +
|
||||
`Resolve any relative file references from ${instructionsFileDir}.\n\n` +
|
||||
`You are agent {{agent.id}} ({{agent.name}}). Continue your Paperclip work.`;
|
||||
await onLog(
|
||||
"stdout",
|
||||
`[paperclip] Loaded agent instructions file: ${resolvedInstructionsFilePath}\n`,
|
||||
);
|
||||
} catch (err) {
|
||||
instructionsReadFailed = true;
|
||||
const reason = err instanceof Error ? err.message : String(err);
|
||||
@@ -330,8 +326,9 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
const buildArgs = (sessionFile: string): string[] => {
|
||||
const args: string[] = [];
|
||||
|
||||
// Use RPC mode for proper lifecycle management (waits for agent completion)
|
||||
args.push("--mode", "rpc");
|
||||
// Use JSON mode for structured output with print mode (non-interactive)
|
||||
args.push("--mode", "json");
|
||||
args.push("-p"); // Non-interactive mode: process prompt and exit
|
||||
|
||||
// Use --append-system-prompt to extend Pi's default system prompt
|
||||
args.push("--append-system-prompt", renderedSystemPromptExtension);
|
||||
@@ -347,19 +344,13 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
args.push("--skill", PI_AGENT_SKILLS_DIR);
|
||||
|
||||
if (extraArgs.length > 0) args.push(...extraArgs);
|
||||
|
||||
// Add the user prompt as the last argument
|
||||
args.push(userPrompt);
|
||||
|
||||
return args;
|
||||
};
|
||||
|
||||
const buildRpcStdin = (): string => {
|
||||
// Send the prompt as an RPC command
|
||||
const promptCommand = {
|
||||
type: "prompt",
|
||||
message: userPrompt,
|
||||
};
|
||||
return JSON.stringify(promptCommand) + "\n";
|
||||
};
|
||||
|
||||
const runAttempt = async (sessionFile: string) => {
|
||||
const args = buildArgs(sessionFile);
|
||||
if (onMeta) {
|
||||
@@ -406,7 +397,6 @@ export async function execute(ctx: AdapterExecutionContext): Promise<AdapterExec
|
||||
graceSec,
|
||||
onSpawn,
|
||||
onLog: bufferedOnLog,
|
||||
stdin: buildRpcStdin(),
|
||||
});
|
||||
|
||||
// Flush any remaining buffer content
|
||||
|
||||
@@ -131,7 +131,9 @@ export async function discoverPiModels(input: {
|
||||
throw new Error(detail ? `\`pi --list-models\` failed: ${detail}` : "`pi --list-models` failed.");
|
||||
}
|
||||
|
||||
return sortModels(dedupeModels(parseModelsOutput(result.stdout)));
|
||||
// Pi outputs model list to stderr, but fall back to stdout for older versions
|
||||
const output = result.stderr || result.stdout;
|
||||
return sortModels(dedupeModels(parseModelsOutput(output)));
|
||||
}
|
||||
|
||||
function normalizeEnv(input: unknown): Record<string, string> {
|
||||
|
||||
@@ -17,19 +17,39 @@ function asString(value: unknown, fallback = ""): string {
|
||||
return typeof value === "string" ? value : fallback;
|
||||
}
|
||||
|
||||
function extractTextContent(content: string | Array<{ type: string; text?: string }>): string {
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return "";
|
||||
return content
|
||||
.filter((c) => c.type === "text" && c.text)
|
||||
.map((c) => c.text!)
|
||||
.join("");
|
||||
function extractTextContent(content: string | Array<{ type: string; text?: string; thinking?: string }>): { text: string; thinking: string } {
|
||||
if (typeof content === "string") return { text: content, thinking: "" };
|
||||
if (!Array.isArray(content)) return { text: "", thinking: "" };
|
||||
|
||||
let text = "";
|
||||
let thinking = "";
|
||||
|
||||
for (const c of content) {
|
||||
if (c.type === "text" && c.text) {
|
||||
text += c.text;
|
||||
}
|
||||
if (c.type === "thinking" && c.thinking) {
|
||||
thinking += c.thinking;
|
||||
}
|
||||
}
|
||||
|
||||
return { text, thinking };
|
||||
}
|
||||
|
||||
// Track pending tool calls for proper toolUseId matching
|
||||
let pendingToolCalls = new Map<string, { toolName: string; args: unknown }>();
|
||||
|
||||
export function resetParserState(): void {
|
||||
pendingToolCalls.clear();
|
||||
}
|
||||
|
||||
export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const parsed = asRecord(safeJsonParse(line));
|
||||
if (!parsed) {
|
||||
return [{ kind: "stdout", ts, text: line }];
|
||||
// Non-JSON line, treat as raw stdout
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) return [];
|
||||
return [{ kind: "stdout", ts, text: trimmed }];
|
||||
}
|
||||
|
||||
const type = asString(parsed.type);
|
||||
@@ -41,16 +61,64 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
|
||||
// Agent lifecycle
|
||||
if (type === "agent_start") {
|
||||
return [{ kind: "system", ts, text: "Pi agent started" }];
|
||||
return [{ kind: "system", ts, text: "🚀 Pi agent started" }];
|
||||
}
|
||||
|
||||
if (type === "agent_end") {
|
||||
return [{ kind: "system", ts, text: "Pi agent finished" }];
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
// Extract final message from messages array if available
|
||||
const messages = parsed.messages as Array<Record<string, unknown>> | undefined;
|
||||
if (messages && messages.length > 0) {
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
if (lastMessage?.role === "assistant") {
|
||||
const content = lastMessage.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
|
||||
// Extract usage
|
||||
const usage = asRecord(lastMessage.usage);
|
||||
if (usage) {
|
||||
const inputTokens = (usage.inputTokens ?? usage.input ?? 0) as number;
|
||||
const outputTokens = (usage.outputTokens ?? usage.output ?? 0) as number;
|
||||
const cachedTokens = (usage.cacheRead ?? usage.cachedInputTokens ?? 0) as number;
|
||||
const costRecord = asRecord(usage.cost);
|
||||
const costUsd = (costRecord?.total ?? usage.costUsd ?? 0) as number;
|
||||
|
||||
if (inputTokens > 0 || outputTokens > 0) {
|
||||
entries.push({
|
||||
kind: "result",
|
||||
ts,
|
||||
text: "Run completed",
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
cachedTokens,
|
||||
costUsd,
|
||||
subtype: "end",
|
||||
isError: false,
|
||||
errors: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (entries.length === 0) {
|
||||
entries.push({ kind: "system", ts, text: "✅ Pi agent finished" });
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Turn lifecycle
|
||||
if (type === "turn_start") {
|
||||
return [{ kind: "system", ts, text: "Turn started" }];
|
||||
return []; // Skip noisy lifecycle events
|
||||
}
|
||||
|
||||
if (type === "turn_end") {
|
||||
@@ -60,16 +128,21 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
if (message) {
|
||||
const content = message.content as string | Array<{ type: string; text?: string }>;
|
||||
const text = extractTextContent(content);
|
||||
const content = message.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
}
|
||||
|
||||
// Process tool results
|
||||
// Process tool results - match with pending tool calls
|
||||
if (toolResults) {
|
||||
for (const tr of toolResults) {
|
||||
const toolCallId = asString(tr.toolCallId, `tool-${Date.now()}`);
|
||||
const content = tr.content;
|
||||
const isError = tr.isError === true;
|
||||
|
||||
@@ -78,23 +151,31 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
if (typeof content === "string") {
|
||||
contentStr = content;
|
||||
} else if (Array.isArray(content)) {
|
||||
contentStr = extractTextContent(content as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(content as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(content);
|
||||
} else {
|
||||
contentStr = JSON.stringify(content);
|
||||
}
|
||||
|
||||
// Get tool name from pending calls if available
|
||||
const pendingCall = pendingToolCalls.get(toolCallId);
|
||||
const toolName = asString(tr.toolName, pendingCall?.toolName || "tool");
|
||||
|
||||
entries.push({
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: asString(tr.toolCallId, "unknown"),
|
||||
toolName: asString(tr.toolName),
|
||||
toolUseId: toolCallId,
|
||||
toolName,
|
||||
content: contentStr,
|
||||
isError,
|
||||
});
|
||||
|
||||
// Clean up pending call
|
||||
pendingToolCalls.delete(toolCallId);
|
||||
}
|
||||
}
|
||||
|
||||
return entries.length > 0 ? entries : [{ kind: "system", ts, text: "Turn ended" }];
|
||||
return entries;
|
||||
}
|
||||
|
||||
// Message streaming
|
||||
@@ -106,33 +187,81 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
const assistantEvent = asRecord(parsed.assistantMessageEvent);
|
||||
if (assistantEvent) {
|
||||
const msgType = asString(assistantEvent.type);
|
||||
|
||||
// Handle thinking deltas
|
||||
if (msgType === "thinking_delta") {
|
||||
const delta = asString(assistantEvent.delta);
|
||||
if (delta) {
|
||||
return [{ kind: "thinking", ts, text: delta, delta: true }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle text deltas
|
||||
if (msgType === "text_delta") {
|
||||
const delta = asString(assistantEvent.delta);
|
||||
if (delta) {
|
||||
return [{ kind: "assistant", ts, text: delta, delta: true }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle thinking end - emit full thinking block
|
||||
if (msgType === "thinking_end") {
|
||||
const content = asString(assistantEvent.content);
|
||||
if (content) {
|
||||
return [{ kind: "thinking", ts, text: content }];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle text end - emit full text block
|
||||
if (msgType === "text_end") {
|
||||
const content = asString(assistantEvent.content);
|
||||
if (content) {
|
||||
return [{ kind: "assistant", ts, text: content }];
|
||||
}
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
if (type === "message_end") {
|
||||
const message = asRecord(parsed.message);
|
||||
if (message) {
|
||||
const content = message.content as string | Array<{ type: string; text?: string; thinking?: string }>;
|
||||
const { text, thinking } = extractTextContent(content);
|
||||
|
||||
const entries: TranscriptEntry[] = [];
|
||||
|
||||
// Emit final thinking block if present
|
||||
if (thinking) {
|
||||
entries.push({ kind: "thinking", ts, text: thinking });
|
||||
}
|
||||
|
||||
// Emit final text block if present
|
||||
if (text) {
|
||||
entries.push({ kind: "assistant", ts, text });
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
// Tool execution
|
||||
if (type === "tool_execution_start") {
|
||||
const toolName = asString(parsed.toolName);
|
||||
const toolCallId = asString(parsed.toolCallId, `tool-${Date.now()}`);
|
||||
const toolName = asString(parsed.toolName, "tool");
|
||||
const args = parsed.args;
|
||||
if (toolName) {
|
||||
return [{
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: toolName,
|
||||
input: args,
|
||||
}];
|
||||
}
|
||||
return [{ kind: "system", ts, text: `Tool started` }];
|
||||
|
||||
// Track this tool call for later matching
|
||||
pendingToolCalls.set(toolCallId, { toolName, args });
|
||||
|
||||
return [{
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: toolName,
|
||||
input: args,
|
||||
toolUseId: toolCallId,
|
||||
}];
|
||||
}
|
||||
|
||||
if (type === "tool_execution_update") {
|
||||
@@ -140,40 +269,43 @@ export function parsePiStdoutLine(line: string, ts: string): TranscriptEntry[] {
|
||||
}
|
||||
|
||||
if (type === "tool_execution_end") {
|
||||
const toolCallId = asString(parsed.toolCallId);
|
||||
const toolName = asString(parsed.toolName);
|
||||
const toolCallId = asString(parsed.toolCallId, `tool-${Date.now()}`);
|
||||
const toolName = asString(parsed.toolName, "tool");
|
||||
const result = parsed.result;
|
||||
const isError = parsed.isError === true;
|
||||
|
||||
// Extract text from Pi's content array format
|
||||
// Can be: {"content": [{"type": "text", "text": "..."}]} or [{"type": "text", "text": "..."}]
|
||||
let contentStr: string;
|
||||
if (typeof result === "string") {
|
||||
contentStr = result;
|
||||
} else if (Array.isArray(result)) {
|
||||
// Direct array format: result is [{"type": "text", "text": "..."}]
|
||||
contentStr = extractTextContent(result as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(result as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(result);
|
||||
} else if (result && typeof result === "object") {
|
||||
const resultObj = result as Record<string, unknown>;
|
||||
if (Array.isArray(resultObj.content)) {
|
||||
// Wrapped format: result is {"content": [{"type": "text", "text": "..."}]}
|
||||
contentStr = extractTextContent(resultObj.content as Array<{ type: string; text?: string }>);
|
||||
const extracted = extractTextContent(resultObj.content as Array<{ type: string; text?: string }>);
|
||||
contentStr = extracted.text || JSON.stringify(result);
|
||||
} else {
|
||||
contentStr = JSON.stringify(result);
|
||||
}
|
||||
} else {
|
||||
contentStr = JSON.stringify(result);
|
||||
contentStr = String(result);
|
||||
}
|
||||
|
||||
// Clean up pending call
|
||||
pendingToolCalls.delete(toolCallId);
|
||||
|
||||
return [{
|
||||
kind: "tool_result",
|
||||
ts,
|
||||
toolUseId: toolCallId || "unknown",
|
||||
toolUseId: toolCallId,
|
||||
toolName,
|
||||
content: contentStr,
|
||||
isError,
|
||||
}];
|
||||
}
|
||||
|
||||
// Fallback for unknown event types
|
||||
return [{ kind: "stdout", ts, text: line }];
|
||||
}
|
||||
|
||||
@@ -1,83 +1,24 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import postgres from "postgres";
|
||||
import {
|
||||
applyPendingMigrations,
|
||||
ensurePostgresDatabase,
|
||||
inspectMigrations,
|
||||
} from "./client.js";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./test-embedded-postgres.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
const tempPaths: string[] = [];
|
||||
const runningInstances: EmbeddedPostgresInstance[] = [];
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
const cleanups: Array<() => Promise<void>> = [];
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
async function createTempDatabase(): Promise<string> {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-db-client-"));
|
||||
tempPaths.push(dataDir);
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
runningInstances.push(instance);
|
||||
|
||||
const adminUrl = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminUrl, "paperclip");
|
||||
return `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
const db = await startEmbeddedPostgresTestDatabase("paperclip-db-client-");
|
||||
cleanups.push(db.cleanup);
|
||||
return db.connectionString;
|
||||
}
|
||||
|
||||
async function migrationHash(migrationFile: string): Promise<string> {
|
||||
@@ -89,19 +30,19 @@ async function migrationHash(migrationFile: string): Promise<string> {
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
while (runningInstances.length > 0) {
|
||||
const instance = runningInstances.pop();
|
||||
if (!instance) continue;
|
||||
await instance.stop();
|
||||
}
|
||||
while (tempPaths.length > 0) {
|
||||
const tempPath = tempPaths.pop();
|
||||
if (!tempPath) continue;
|
||||
fs.rmSync(tempPath, { recursive: true, force: true });
|
||||
while (cleanups.length > 0) {
|
||||
const cleanup = cleanups.pop();
|
||||
await cleanup?.();
|
||||
}
|
||||
});
|
||||
|
||||
describe("applyPendingMigrations", () => {
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres migration tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
describeEmbeddedPostgres("applyPendingMigrations", () => {
|
||||
it(
|
||||
"applies an inserted earlier migration without replaying later legacy migrations",
|
||||
async () => {
|
||||
|
||||
28
packages/db/src/embedded-postgres-error.test.ts
Normal file
28
packages/db/src/embedded-postgres-error.test.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { createEmbeddedPostgresLogBuffer, formatEmbeddedPostgresError } from "./embedded-postgres-error.js";
|
||||
|
||||
describe("formatEmbeddedPostgresError", () => {
|
||||
it("adds a shared-memory hint when initdb logs expose the real cause", () => {
|
||||
const error = formatEmbeddedPostgresError("Postgres init script exited with code 1.", {
|
||||
fallbackMessage: "Failed to initialize embedded PostgreSQL cluster",
|
||||
recentLogs: [
|
||||
"running bootstrap script ...",
|
||||
"FATAL: could not create shared memory segment: Cannot allocate memory",
|
||||
"DETAIL: Failed system call was shmget(key=123, size=56, 03600).",
|
||||
],
|
||||
});
|
||||
|
||||
expect(error.message).toContain("could not allocate shared memory");
|
||||
expect(error.message).toContain("kern.sysv.shm");
|
||||
expect(error.message).toContain("could not create shared memory segment");
|
||||
});
|
||||
|
||||
it("keeps only recent non-empty log lines in the collector", () => {
|
||||
const buffer = createEmbeddedPostgresLogBuffer(2);
|
||||
buffer.append("line one\n\n");
|
||||
buffer.append("line two");
|
||||
buffer.append("line three");
|
||||
|
||||
expect(buffer.getRecentLogs()).toEqual(["line two", "line three"]);
|
||||
});
|
||||
});
|
||||
89
packages/db/src/embedded-postgres-error.ts
Normal file
89
packages/db/src/embedded-postgres-error.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
const DEFAULT_RECENT_LOG_LIMIT = 40;
|
||||
const RECENT_LOG_SUMMARY_LINES = 8;
|
||||
|
||||
function toError(error: unknown, fallbackMessage: string): Error {
|
||||
if (error instanceof Error) return error;
|
||||
if (error === undefined) return new Error(fallbackMessage);
|
||||
if (typeof error === "string") return new Error(`${fallbackMessage}: ${error}`);
|
||||
|
||||
try {
|
||||
return new Error(`${fallbackMessage}: ${JSON.stringify(error)}`);
|
||||
} catch {
|
||||
return new Error(`${fallbackMessage}: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeRecentLogs(recentLogs: string[]): string | null {
|
||||
if (recentLogs.length === 0) return null;
|
||||
return recentLogs
|
||||
.slice(-RECENT_LOG_SUMMARY_LINES)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
.join(" | ");
|
||||
}
|
||||
|
||||
function detectEmbeddedPostgresHint(recentLogs: string[]): string | null {
|
||||
const haystack = recentLogs.join("\n").toLowerCase();
|
||||
if (!haystack.includes("could not create shared memory segment")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
"Embedded PostgreSQL bootstrap could not allocate shared memory. " +
|
||||
"On macOS, this usually means the host's kern.sysv.shm* limits are too low for another local PostgreSQL cluster. " +
|
||||
"Stop other local PostgreSQL servers or raise the shared-memory sysctls, then retry."
|
||||
);
|
||||
}
|
||||
|
||||
export function createEmbeddedPostgresLogBuffer(limit = DEFAULT_RECENT_LOG_LIMIT): {
|
||||
append(message: unknown): void;
|
||||
getRecentLogs(): string[];
|
||||
} {
|
||||
const recentLogs: string[] = [];
|
||||
|
||||
return {
|
||||
append(message: unknown) {
|
||||
const text =
|
||||
typeof message === "string"
|
||||
? message
|
||||
: message instanceof Error
|
||||
? message.message
|
||||
: String(message ?? "");
|
||||
|
||||
for (const rawLine of text.split(/\r?\n/)) {
|
||||
const line = rawLine.trim();
|
||||
if (!line) continue;
|
||||
recentLogs.push(line);
|
||||
if (recentLogs.length > limit) {
|
||||
recentLogs.splice(0, recentLogs.length - limit);
|
||||
}
|
||||
}
|
||||
},
|
||||
getRecentLogs() {
|
||||
return [...recentLogs];
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function formatEmbeddedPostgresError(
|
||||
error: unknown,
|
||||
input: {
|
||||
fallbackMessage: string;
|
||||
recentLogs?: string[];
|
||||
},
|
||||
): Error {
|
||||
const baseError = toError(error, input.fallbackMessage);
|
||||
const recentLogs = input.recentLogs ?? [];
|
||||
const parts = [baseError.message];
|
||||
const hint = detectEmbeddedPostgresHint(recentLogs);
|
||||
const recentSummary = summarizeRecentLogs(recentLogs);
|
||||
|
||||
if (hint) {
|
||||
parts.push(hint);
|
||||
}
|
||||
if (recentSummary) {
|
||||
parts.push(`Recent embedded Postgres logs: ${recentSummary}`);
|
||||
}
|
||||
|
||||
return new Error(parts.join(" "));
|
||||
}
|
||||
@@ -11,6 +11,12 @@ export {
|
||||
type MigrationBootstrapResult,
|
||||
type Db,
|
||||
} from "./client.js";
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "./test-embedded-postgres.js";
|
||||
export {
|
||||
runDatabaseBackup,
|
||||
runDatabaseRestore,
|
||||
@@ -19,4 +25,8 @@ export {
|
||||
type RunDatabaseBackupResult,
|
||||
type RunDatabaseRestoreOptions,
|
||||
} from "./backup-lib.js";
|
||||
export {
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
formatEmbeddedPostgresError,
|
||||
} from "./embedded-postgres-error.js";
|
||||
export * from "./schema/index.js";
|
||||
|
||||
@@ -2,6 +2,7 @@ import { existsSync, readFileSync, rmSync } from "node:fs";
|
||||
import { createServer } from "node:net";
|
||||
import path from "node:path";
|
||||
import { ensurePostgresDatabase, getPostgresDataDirectory } from "./client.js";
|
||||
import { createEmbeddedPostgresLogBuffer, formatEmbeddedPostgresError } from "./embedded-postgres-error.js";
|
||||
import { resolveDatabaseTarget } from "./runtime-config.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
@@ -27,18 +28,6 @@ export type MigrationConnection = {
|
||||
stop: () => Promise<void>;
|
||||
};
|
||||
|
||||
function toError(error: unknown, fallbackMessage: string): Error {
|
||||
if (error instanceof Error) return error;
|
||||
if (error === undefined) return new Error(fallbackMessage);
|
||||
if (typeof error === "string") return new Error(`${fallbackMessage}: ${error}`);
|
||||
|
||||
try {
|
||||
return new Error(`${fallbackMessage}: ${JSON.stringify(error)}`);
|
||||
} catch {
|
||||
return new Error(`${fallbackMessage}: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function readRunningPostmasterPid(postmasterPidFile: string): number | null {
|
||||
if (!existsSync(postmasterPidFile)) return null;
|
||||
try {
|
||||
@@ -109,6 +98,7 @@ async function ensureEmbeddedPostgresConnection(
|
||||
const runningPid = readRunningPostmasterPid(postmasterPidFile);
|
||||
const runningPort = readPidFilePort(postmasterPidFile);
|
||||
const preferredAdminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${preferredPort}/postgres`;
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer();
|
||||
|
||||
if (!runningPid && existsSync(pgVersionFile)) {
|
||||
try {
|
||||
@@ -151,18 +141,19 @@ async function ensureEmbeddedPostgresConnection(
|
||||
port: selectedPort,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
onLog: logBuffer.append,
|
||||
onError: logBuffer.append,
|
||||
});
|
||||
|
||||
if (!existsSync(path.resolve(dataDir, "PG_VERSION"))) {
|
||||
try {
|
||||
await instance.initialise();
|
||||
} catch (error) {
|
||||
throw toError(
|
||||
error,
|
||||
`Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${selectedPort}`,
|
||||
);
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage:
|
||||
`Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${selectedPort}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
}
|
||||
if (existsSync(postmasterPidFile)) {
|
||||
@@ -171,7 +162,10 @@ async function ensureEmbeddedPostgresConnection(
|
||||
try {
|
||||
await instance.start();
|
||||
} catch (error) {
|
||||
throw toError(error, `Failed to start embedded PostgreSQL on port ${selectedPort}`);
|
||||
throw formatEmbeddedPostgresError(error, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${selectedPort}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${selectedPort}/postgres`;
|
||||
|
||||
17
packages/db/src/migrations/0045_workable_shockwave.sql
Normal file
17
packages/db/src/migrations/0045_workable_shockwave.sql
Normal file
@@ -0,0 +1,17 @@
|
||||
CREATE TABLE "issue_inbox_archives" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||
"company_id" uuid NOT NULL,
|
||||
"issue_id" uuid NOT NULL,
|
||||
"user_id" text NOT NULL,
|
||||
"archived_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
|
||||
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
DROP INDEX "board_api_keys_key_hash_idx";--> statement-breakpoint
|
||||
ALTER TABLE "issue_inbox_archives" ADD CONSTRAINT "issue_inbox_archives_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "issue_inbox_archives" ADD CONSTRAINT "issue_inbox_archives_issue_id_issues_id_fk" FOREIGN KEY ("issue_id") REFERENCES "public"."issues"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "issue_inbox_archives_company_issue_idx" ON "issue_inbox_archives" USING btree ("company_id","issue_id");--> statement-breakpoint
|
||||
CREATE INDEX "issue_inbox_archives_company_user_idx" ON "issue_inbox_archives" USING btree ("company_id","user_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "issue_inbox_archives_company_issue_user_idx" ON "issue_inbox_archives" USING btree ("company_id","issue_id","user_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "board_api_keys_key_hash_idx" ON "board_api_keys" USING btree ("key_hash");
|
||||
11857
packages/db/src/migrations/meta/0045_snapshot.json
Normal file
11857
packages/db/src/migrations/meta/0045_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -316,6 +316,13 @@
|
||||
"when": 1774269579794,
|
||||
"tag": "0044_illegal_toad",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 45,
|
||||
"version": "7",
|
||||
"when": 1774530504348,
|
||||
"tag": "0045_workable_shockwave",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -31,6 +31,7 @@ export { labels } from "./labels.js";
|
||||
export { issueLabels } from "./issue_labels.js";
|
||||
export { issueApprovals } from "./issue_approvals.js";
|
||||
export { issueComments } from "./issue_comments.js";
|
||||
export { issueInboxArchives } from "./issue_inbox_archives.js";
|
||||
export { issueReadStates } from "./issue_read_states.js";
|
||||
export { assets } from "./assets.js";
|
||||
export { issueAttachments } from "./issue_attachments.js";
|
||||
|
||||
25
packages/db/src/schema/issue_inbox_archives.ts
Normal file
25
packages/db/src/schema/issue_inbox_archives.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { pgTable, uuid, text, timestamp, index, uniqueIndex } from "drizzle-orm/pg-core";
|
||||
import { companies } from "./companies.js";
|
||||
import { issues } from "./issues.js";
|
||||
|
||||
export const issueInboxArchives = pgTable(
|
||||
"issue_inbox_archives",
|
||||
{
|
||||
id: uuid("id").primaryKey().defaultRandom(),
|
||||
companyId: uuid("company_id").notNull().references(() => companies.id),
|
||||
issueId: uuid("issue_id").notNull().references(() => issues.id),
|
||||
userId: text("user_id").notNull(),
|
||||
archivedAt: timestamp("archived_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
(table) => ({
|
||||
companyIssueIdx: index("issue_inbox_archives_company_issue_idx").on(table.companyId, table.issueId),
|
||||
companyUserIdx: index("issue_inbox_archives_company_user_idx").on(table.companyId, table.userId),
|
||||
companyIssueUserUnique: uniqueIndex("issue_inbox_archives_company_issue_user_idx").on(
|
||||
table.companyId,
|
||||
table.issueId,
|
||||
table.userId,
|
||||
),
|
||||
}),
|
||||
);
|
||||
144
packages/db/src/test-embedded-postgres.ts
Normal file
144
packages/db/src/test-embedded-postgres.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { applyPendingMigrations, ensurePostgresDatabase } from "./client.js";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
export type EmbeddedPostgresTestSupport = {
|
||||
supported: boolean;
|
||||
reason?: string;
|
||||
};
|
||||
|
||||
export type EmbeddedPostgresTestDatabase = {
|
||||
connectionString: string;
|
||||
cleanup(): Promise<void>;
|
||||
};
|
||||
|
||||
let embeddedPostgresSupportPromise: Promise<EmbeddedPostgresTestSupport> | null = null;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function formatEmbeddedPostgresError(error: unknown): string {
|
||||
if (error instanceof Error && error.message.length > 0) return error.message;
|
||||
if (typeof error === "string" && error.length > 0) return error;
|
||||
return "embedded Postgres startup failed";
|
||||
}
|
||||
|
||||
async function probeEmbeddedPostgresSupport(): Promise<EmbeddedPostgresTestSupport> {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-embedded-postgres-probe-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
|
||||
try {
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
return { supported: true };
|
||||
} catch (error) {
|
||||
return {
|
||||
supported: false,
|
||||
reason: formatEmbeddedPostgresError(error),
|
||||
};
|
||||
} finally {
|
||||
await instance.stop().catch(() => {});
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
export async function getEmbeddedPostgresTestSupport(): Promise<EmbeddedPostgresTestSupport> {
|
||||
if (!embeddedPostgresSupportPromise) {
|
||||
embeddedPostgresSupportPromise = probeEmbeddedPostgresSupport();
|
||||
}
|
||||
return await embeddedPostgresSupportPromise;
|
||||
}
|
||||
|
||||
export async function startEmbeddedPostgresTestDatabase(
|
||||
tempDirPrefix: string,
|
||||
): Promise<EmbeddedPostgresTestDatabase> {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), tempDirPrefix));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
|
||||
try {
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
|
||||
return {
|
||||
connectionString,
|
||||
cleanup: async () => {
|
||||
await instance.stop().catch(() => {});
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
await instance.stop().catch(() => {});
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
throw new Error(
|
||||
`Failed to start embedded PostgreSQL test database: ${formatEmbeddedPostgresError(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
34
pnpm-lock.yaml
generated
34
pnpm-lock.yaml
generated
@@ -4,6 +4,11 @@ settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
patchedDependencies:
|
||||
embedded-postgres@18.1.0-beta.16:
|
||||
hash: 55uhvnotpqyiy37rn3pqpukhei
|
||||
path: patches/embedded-postgres@18.1.0-beta.16.patch
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
@@ -73,7 +78,7 @@ importers:
|
||||
version: 0.38.4(@electric-sql/pglite@0.3.15)(@types/react@19.2.14)(kysely@0.28.11)(pg@8.18.0)(postgres@3.4.8)(react@19.2.4)
|
||||
embedded-postgres:
|
||||
specifier: ^18.1.0-beta.16
|
||||
version: 18.1.0-beta.16
|
||||
version: 18.1.0-beta.16(patch_hash=55uhvnotpqyiy37rn3pqpukhei)
|
||||
picocolors:
|
||||
specifier: ^1.1.1
|
||||
version: 1.1.1
|
||||
@@ -225,7 +230,7 @@ importers:
|
||||
version: 0.38.4(@electric-sql/pglite@0.3.15)(@types/react@19.2.14)(kysely@0.28.11)(pg@8.18.0)(postgres@3.4.8)(react@19.2.4)
|
||||
embedded-postgres:
|
||||
specifier: ^18.1.0-beta.16
|
||||
version: 18.1.0-beta.16
|
||||
version: 18.1.0-beta.16(patch_hash=55uhvnotpqyiy37rn3pqpukhei)
|
||||
postgres:
|
||||
specifier: ^3.4.5
|
||||
version: 3.4.8
|
||||
@@ -494,13 +499,13 @@ importers:
|
||||
version: 0.38.4(@electric-sql/pglite@0.3.15)(@types/react@19.2.14)(kysely@0.28.11)(pg@8.18.0)(postgres@3.4.8)(react@19.2.4)
|
||||
embedded-postgres:
|
||||
specifier: ^18.1.0-beta.16
|
||||
version: 18.1.0-beta.16
|
||||
version: 18.1.0-beta.16(patch_hash=55uhvnotpqyiy37rn3pqpukhei)
|
||||
express:
|
||||
specifier: ^5.1.0
|
||||
version: 5.2.1
|
||||
hermes-paperclip-adapter:
|
||||
specifier: 0.1.1
|
||||
version: 0.1.1
|
||||
specifier: ^0.2.0
|
||||
version: 0.2.0
|
||||
jsdom:
|
||||
specifier: ^28.1.0
|
||||
version: 28.1.0(@noble/hashes@2.0.1)
|
||||
@@ -634,6 +639,9 @@ importers:
|
||||
cmdk:
|
||||
specifier: ^1.1.1
|
||||
version: 1.1.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
hermes-paperclip-adapter:
|
||||
specifier: ^0.2.0
|
||||
version: 0.2.0
|
||||
lexical:
|
||||
specifier: 0.35.0
|
||||
version: 0.35.0
|
||||
@@ -2035,8 +2043,8 @@ packages:
|
||||
'@open-draft/deferred-promise@2.2.0':
|
||||
resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==}
|
||||
|
||||
'@paperclipai/adapter-utils@0.3.1':
|
||||
resolution: {integrity: sha512-W66k+hJkQE8ma0asM/Sd90AC8HHy/BLG/sd0aOC+rDWw+gOasQyUkTnDoPv1zhQuTyKEEvLFV6ByOOKqEiAz/A==}
|
||||
'@paperclipai/adapter-utils@2026.325.0':
|
||||
resolution: {integrity: sha512-YDVSAgjkeJ0PvxXDJVN9MZDX7oYRzidLtGHmGgRGd6gSk/bF2ygAKvND4FI1YxDc/cRLQjqAFCpCYaC/9wqIEA==}
|
||||
|
||||
'@paralleldrive/cuid2@2.3.1':
|
||||
resolution: {integrity: sha512-XO7cAxhnTZl0Yggq6jOgjiOHhbgcO4NqFqwSmQpjK3b6TEE6Uj/jfSk6wzYyemh3+I0sHirKSetjQwn5cZktFw==}
|
||||
@@ -4463,8 +4471,8 @@ packages:
|
||||
help-me@5.0.0:
|
||||
resolution: {integrity: sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==}
|
||||
|
||||
hermes-paperclip-adapter@0.1.1:
|
||||
resolution: {integrity: sha512-kbdX349VxExSkVL8n4RwTpP9fUBf2yWpsTsJp02X12A9NynRJatlpYqt0vEkFyE/X7qEXqdJvpBm9tlvUHahsA==}
|
||||
hermes-paperclip-adapter@0.2.0:
|
||||
resolution: {integrity: sha512-6CP5vxfvY4jY9XJK5zu4ZUL9aB7HHNtEMk6q7m1Pu9Gzoby1Vx5VNmVqte3NUO+1cvVK9Arj1f67xLagWkbo5Q==}
|
||||
engines: {node: '>=20.0.0'}
|
||||
|
||||
html-encoding-sniffer@6.0.0:
|
||||
@@ -7735,7 +7743,7 @@ snapshots:
|
||||
|
||||
'@open-draft/deferred-promise@2.2.0': {}
|
||||
|
||||
'@paperclipai/adapter-utils@0.3.1': {}
|
||||
'@paperclipai/adapter-utils@2026.325.0': {}
|
||||
|
||||
'@paralleldrive/cuid2@2.3.1':
|
||||
dependencies:
|
||||
@@ -9973,7 +9981,7 @@ snapshots:
|
||||
|
||||
electron-to-chromium@1.5.286: {}
|
||||
|
||||
embedded-postgres@18.1.0-beta.16:
|
||||
embedded-postgres@18.1.0-beta.16(patch_hash=55uhvnotpqyiy37rn3pqpukhei):
|
||||
dependencies:
|
||||
async-exit-hook: 2.0.1
|
||||
pg: 8.18.0
|
||||
@@ -10332,9 +10340,9 @@ snapshots:
|
||||
|
||||
help-me@5.0.0: {}
|
||||
|
||||
hermes-paperclip-adapter@0.1.1:
|
||||
hermes-paperclip-adapter@0.2.0:
|
||||
dependencies:
|
||||
'@paperclipai/adapter-utils': 0.3.1
|
||||
'@paperclipai/adapter-utils': 2026.325.0
|
||||
picocolors: 1.1.1
|
||||
|
||||
html-encoding-sniffer@6.0.0(@noble/hashes@2.0.1):
|
||||
|
||||
77
releases/v2026.325.0.md
Normal file
77
releases/v2026.325.0.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# v2026.325.0
|
||||
|
||||
> Released: 2026-03-25
|
||||
|
||||
## Highlights
|
||||
|
||||
- **Company import/export** — Full company portability with a file-browser UX for importing and exporting agent companies. Includes rich frontmatter preview, nested file picker, merge-history support, GitHub shorthand refs, and CLI `company import`/`company export` commands. Imported companies open automatically after import, and heartbeat timers are disabled for imported agents by default. ([#840](https://github.com/paperclipai/paperclip/pull/840), [#1631](https://github.com/paperclipai/paperclip/pull/1631), [#1632](https://github.com/paperclipai/paperclip/pull/1632), [#1655](https://github.com/paperclipai/paperclip/pull/1655))
|
||||
- **Company skills library** — New company-scoped skills system with a skills UI, agent skill sync across all local adapters (Claude, Codex, Pi, Gemini), pinned GitHub skills with update checks, and built-in skill support. ([#1346](https://github.com/paperclipai/paperclip/pull/1346))
|
||||
- **Routines and recurring tasks** — Full routines engine with triggers, routine runs, coalescing, and recurring task portability. Includes API documentation and routine export support. ([#1351](https://github.com/paperclipai/paperclip/pull/1351), [#1622](https://github.com/paperclipai/paperclip/pull/1622), @aronprins)
|
||||
|
||||
## Improvements
|
||||
|
||||
- **Inline join requests in inbox** — Join requests now render inline in the inbox alongside approvals and other work items.
|
||||
- **Onboarding seeding** — New projects and issues are seeded with goal context during onboarding for a better first-run experience.
|
||||
- **Agent instructions recovery** — Managed agent instructions are recovered from disk on startup; instructions are preserved across adapter switches.
|
||||
- **Heartbeats settings page** — Shows all agents regardless of interval config; added a "Disable All" button for quick bulk control.
|
||||
- **Agent history via participation** — Agent issue history now uses participation records instead of direct assignment lookups.
|
||||
- **Alphabetical agent sorting** — Agents are sorted alphabetically by name across all views.
|
||||
- **Company org chart assets** — Improved generated org chart visuals for companies.
|
||||
- **Improved CLI API connection errors** — Better error messages when the CLI cannot reach the Paperclip API.
|
||||
- **Markdown mention links** — Custom URL schemes are now allowed in Lexical LinkNode, enabling mention pills with proper linking behavior. Atomic deletion of mention pills works correctly.
|
||||
- **Issue workspace reuse** — Workspaces are correctly reused after isolation runs.
|
||||
- **Failed-run session resume** — Explicit failed-run sessions can now be resumed via honor flag.
|
||||
- **Docker image CI** — Added Docker image build and deploy workflow. ([#542](https://github.com/paperclipai/paperclip/pull/542), @albttx)
|
||||
- **Project filter on issues** — Issues list can now be filtered by project. ([#552](https://github.com/paperclipai/paperclip/pull/552), @mvanhorn)
|
||||
- **Inline comment image attachments** — Uploaded images are now embedded inline in comments. ([#551](https://github.com/paperclipai/paperclip/pull/551), @mvanhorn)
|
||||
- **AGENTS.md fallback** — Claude-local adapter gracefully falls back when AGENTS.md is missing. ([#550](https://github.com/paperclipai/paperclip/pull/550), @mvanhorn)
|
||||
- **Company-creator skill** — New skill for scaffolding agent company packages from scratch.
|
||||
- **Reports page rename** — Reports section renamed for clarity. ([#1380](https://github.com/paperclipai/paperclip/pull/1380), @DanielSousa)
|
||||
- **Eval framework bootstrap** — Promptfoo-based evaluation framework with YAML test cases for systematic agent behavior testing. ([#832](https://github.com/paperclipai/paperclip/pull/832), @mvanhorn)
|
||||
- **Board CLI authentication** — Browser-based auth flow for the CLI so board users can authenticate without manually copying API keys. ([#1635](https://github.com/paperclipai/paperclip/pull/1635))
|
||||
|
||||
## Fixes
|
||||
|
||||
- **Embedded Postgres initdb in Docker slim** — Fixed initdb failure in slim containers by adding proper initdbFlags types. ([#737](https://github.com/paperclipai/paperclip/pull/737), @alaa-alghazouli)
|
||||
- **OpenClaw gateway crash** — Fixed unhandled rejection when challengePromise fails. ([#743](https://github.com/paperclipai/paperclip/pull/743), @Sigmabrogz)
|
||||
- **Agent mention pill alignment** — Fixed vertical misalignment between agent mention pills and project mention pills.
|
||||
- **Task assignment grants** — Preserved task assignment grants for agents that have already joined.
|
||||
- **Instructions tab state** — Fixed tab state not updating correctly when switching between agents.
|
||||
- **Imported agent bundle frontmatter** — Fixed frontmatter leakage in imported agent bundles.
|
||||
- **Login form 1Password detection** — Fixed login form not being detected by password managers; Enter key now submits correctly. ([#1014](https://github.com/paperclipai/paperclip/pull/1014))
|
||||
- **Pill contrast (WCAG)** — Improved mention pill contrast using WCAG contrast ratios on composited backgrounds.
|
||||
- **Documents horizontal scroll** — Prevented documents row from causing horizontal scroll on mobile.
|
||||
- **Toggle switch sizing** — Fixed oversized toggle switches on mobile; added missing `data-slot` attributes.
|
||||
- **Agent instructions tab responsive** — Made agent instructions tab responsive on mobile.
|
||||
- **Monospace font sizing** — Adjusted inline code font size and added dark mode background.
|
||||
- **Priority icon removal** — Removed priority icon from issue rows for a cleaner list view.
|
||||
- **Same-page issue toasts** — Suppressed redundant toasts when navigating to an issue already on screen.
|
||||
- **Noisy adapter log** — Removed noisy "Loaded agent instructions file" log message from all adapters.
|
||||
- **Pi local adapter** — Fixed Pi adapter missing from `isLocal` check. ([#1382](https://github.com/paperclipai/paperclip/pull/1382), @lucas-stellet)
|
||||
- **CLI auth migration idempotency** — Made migration 0044 idempotent to avoid failures on re-run.
|
||||
- **Dev restart tracking** — `.paperclip` and test-only paths are now ignored in dev restart detection.
|
||||
- **Duplicate CLI auth flag** — Fixed duplicate `--company` flag on `auth login`.
|
||||
- **Gemini local execution** — Fixed Gemini local adapter execution and diagnostics.
|
||||
- **Sidebar ordering** — Preserved sidebar ordering during company portability operations.
|
||||
- **Company skill deduplication** — Fixed duplicate skill inventory refreshes.
|
||||
- **Worktree merge-history migrations** — Fixed migration handling in worktree contexts. ([#1385](https://github.com/paperclipai/paperclip/pull/1385))
|
||||
|
||||
## Upgrade Guide
|
||||
|
||||
Seven new database migrations (`0038`–`0044`) will run automatically on startup:
|
||||
|
||||
- **Migration 0038** adds process tracking columns to heartbeat runs (PID, started-at, retry tracking).
|
||||
- **Migration 0039** adds the routines engine tables (routines, triggers, routine runs).
|
||||
- **Migrations 0040–0042** extend company skills, recurring tasks, and portability metadata.
|
||||
- **Migration 0043** adds the Codex managed-home and agent instructions recovery columns.
|
||||
- **Migration 0044** adds board API keys and CLI auth challenge tables for browser-based CLI auth.
|
||||
|
||||
All migrations are additive (new tables and columns) — no existing data is modified. Standard `paperclipai` startup will apply them automatically.
|
||||
|
||||
If you use the company import/export feature, note that imported companies have heartbeat timers disabled by default. Re-enable them manually from the Heartbeats settings page after verifying adapter configuration.
|
||||
|
||||
## Contributors
|
||||
|
||||
Thank you to everyone who contributed to this release!
|
||||
|
||||
@alaa-alghazouli, @albttx, @AOrobator, @aronprins, @cryppadotta, @DanielSousa, @lucas-stellet, @mvanhorn, @richardanaya, @Sigmabrogz
|
||||
31
scripts/generate-ui-package-json.mjs
Normal file
31
scripts/generate-ui-package-json.mjs
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
import { dirname, join, resolve } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const repoRoot = resolve(__dirname, "..");
|
||||
const uiDir = join(repoRoot, "ui");
|
||||
const packageJsonPath = join(uiDir, "package.json");
|
||||
|
||||
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf8"));
|
||||
|
||||
const publishPackageJson = {
|
||||
name: packageJson.name,
|
||||
version: packageJson.version,
|
||||
description: packageJson.description,
|
||||
license: packageJson.license,
|
||||
homepage: packageJson.homepage,
|
||||
bugs: packageJson.bugs,
|
||||
repository: packageJson.repository,
|
||||
type: packageJson.type,
|
||||
files: ["dist"],
|
||||
publishConfig: {
|
||||
access: "public",
|
||||
},
|
||||
};
|
||||
|
||||
writeFileSync(packageJsonPath, `${JSON.stringify(publishPackageJson, null, 2)}\n`);
|
||||
|
||||
console.log(" ✓ Generated publishable UI package.json");
|
||||
883
scripts/paperclip-commit-metrics.ts
Normal file
883
scripts/paperclip-commit-metrics.ts
Normal file
@@ -0,0 +1,883 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
|
||||
import { execFile } from "node:child_process";
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const DEFAULT_QUERY = "\"Co-Authored-By: Paperclip <noreply@paperclip.ing>\"";
|
||||
const DEFAULT_CACHE_FILE = path.resolve("data/paperclip-commit-metrics-cache.json");
|
||||
const DEFAULT_SEARCH_START = "2008-01-01T00:00:00Z";
|
||||
const SEARCH_WINDOW_LIMIT = 900;
|
||||
const MIN_WINDOW_MS = 60_000;
|
||||
const DEFAULT_STATS_FETCH_LIMIT = 250;
|
||||
const DEFAULT_STATS_CONCURRENCY = 4;
|
||||
const DEFAULT_SEARCH_FIELD = "committer-date";
|
||||
const PAPERCLIP_EMAIL = "noreply@paperclip.ing";
|
||||
const PAPERCLIP_NAME = "paperclip";
|
||||
|
||||
interface CliOptions {
|
||||
cacheFile: string;
|
||||
end: Date;
|
||||
excludeOwners: string[];
|
||||
exportFormat: "csv" | "json";
|
||||
includePrivate: boolean;
|
||||
json: boolean;
|
||||
output: string | null;
|
||||
query: string;
|
||||
refreshSearch: boolean;
|
||||
refreshStats: boolean;
|
||||
searchField: "author-date" | "committer-date";
|
||||
start: Date;
|
||||
statsConcurrency: number;
|
||||
statsFetchLimit: number;
|
||||
skipStats: boolean;
|
||||
}
|
||||
|
||||
interface SearchCommitItem {
|
||||
author: {
|
||||
login?: string;
|
||||
} | null;
|
||||
commit: {
|
||||
author: {
|
||||
date: string;
|
||||
email: string | null;
|
||||
name: string | null;
|
||||
} | null;
|
||||
message: string;
|
||||
};
|
||||
html_url: string;
|
||||
repository: {
|
||||
full_name: string;
|
||||
html_url: string;
|
||||
};
|
||||
sha: string;
|
||||
}
|
||||
|
||||
interface CommitStats {
|
||||
additions: number;
|
||||
deletions: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
interface CachedCommit {
|
||||
authorEmail: string | null;
|
||||
authorLogin: string | null;
|
||||
authorName: string | null;
|
||||
committedAt: string | null;
|
||||
contributors: ContributorRecord[];
|
||||
htmlUrl: string;
|
||||
repositoryFullName: string;
|
||||
repositoryUrl: string;
|
||||
sha: string;
|
||||
}
|
||||
|
||||
interface CachedCommitStats extends CommitStats {
|
||||
fetchedAt: string;
|
||||
}
|
||||
|
||||
interface ContributorRecord {
|
||||
displayName: string;
|
||||
email: string | null;
|
||||
key: string;
|
||||
login: string | null;
|
||||
}
|
||||
|
||||
interface WindowCacheEntry {
|
||||
completedAt: string;
|
||||
key: string;
|
||||
shas: string[];
|
||||
totalCount: number;
|
||||
}
|
||||
|
||||
interface CacheFile {
|
||||
commits: Record<string, CachedCommit>;
|
||||
queryKey: string;
|
||||
searchField: CliOptions["searchField"];
|
||||
stats: Record<string, CachedCommitStats>;
|
||||
updatedAt: string | null;
|
||||
version: number;
|
||||
windows: Record<string, WindowCacheEntry>;
|
||||
}
|
||||
|
||||
interface SearchResponse {
|
||||
incomplete_results: boolean;
|
||||
items: SearchCommitItem[];
|
||||
total_count: number;
|
||||
}
|
||||
|
||||
interface SearchWindowResult {
|
||||
shas: Set<string>;
|
||||
totalCount: number;
|
||||
}
|
||||
|
||||
interface Summary {
|
||||
cacheFile: string;
|
||||
contributors: {
|
||||
count: number;
|
||||
sample: ContributorRecord[];
|
||||
};
|
||||
detectedQuery: string;
|
||||
lineStats: {
|
||||
additions: number;
|
||||
complete: boolean;
|
||||
coveredCommits: number;
|
||||
deletions: number;
|
||||
missingCommits: number;
|
||||
totalChanges: number;
|
||||
};
|
||||
range: {
|
||||
end: string;
|
||||
searchField: CliOptions["searchField"];
|
||||
start: string;
|
||||
};
|
||||
filters: {
|
||||
excludedOwners: string[];
|
||||
};
|
||||
repos: {
|
||||
count: number;
|
||||
sample: string[];
|
||||
};
|
||||
statsFetch: {
|
||||
fetchedThisRun: number;
|
||||
skipped: boolean;
|
||||
};
|
||||
totals: {
|
||||
commits: number;
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const options = parseArgs(process.argv.slice(2));
|
||||
const cache = await loadCache(options.cacheFile, options);
|
||||
const client = new GitHubClient(await resolveGitHubToken());
|
||||
|
||||
const { shas } = await searchWindow(client, cache, options, options.start, options.end);
|
||||
const sortedShas = [...shas].sort();
|
||||
|
||||
let fetchedThisRun = 0;
|
||||
if (!options.skipStats) {
|
||||
fetchedThisRun = await enrichCommitStats(client, cache, options, sortedShas);
|
||||
}
|
||||
|
||||
cache.updatedAt = new Date().toISOString();
|
||||
await saveCache(options.cacheFile, cache);
|
||||
|
||||
const filteredShas = sortFilteredShas(cache, filterShas(cache, sortedShas, options));
|
||||
const summary = buildSummary(cache, options, filteredShas, fetchedThisRun);
|
||||
|
||||
if (options.output) {
|
||||
await writeExport(options.output, options.exportFormat, cache, filteredShas, summary);
|
||||
}
|
||||
|
||||
if (options.json) {
|
||||
console.log(JSON.stringify(summary, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
printSummary(summary);
|
||||
}
|
||||
|
||||
function parseArgs(argv: string[]): CliOptions {
|
||||
const options: CliOptions = {
|
||||
cacheFile: DEFAULT_CACHE_FILE,
|
||||
end: new Date(),
|
||||
excludeOwners: [],
|
||||
exportFormat: "csv",
|
||||
includePrivate: false,
|
||||
json: false,
|
||||
output: null,
|
||||
query: DEFAULT_QUERY,
|
||||
refreshSearch: false,
|
||||
refreshStats: false,
|
||||
searchField: DEFAULT_SEARCH_FIELD,
|
||||
start: new Date(DEFAULT_SEARCH_START),
|
||||
statsConcurrency: DEFAULT_STATS_CONCURRENCY,
|
||||
statsFetchLimit: DEFAULT_STATS_FETCH_LIMIT,
|
||||
skipStats: false,
|
||||
};
|
||||
|
||||
for (let index = 0; index < argv.length; index += 1) {
|
||||
const arg = argv[index];
|
||||
switch (arg) {
|
||||
case "--cache-file":
|
||||
options.cacheFile = requireValue(argv, ++index, arg);
|
||||
break;
|
||||
case "--end":
|
||||
options.end = parseDateArg(requireValue(argv, ++index, arg), arg);
|
||||
break;
|
||||
case "--exclude-owner":
|
||||
options.excludeOwners.push(requireValue(argv, ++index, arg).toLowerCase());
|
||||
break;
|
||||
case "--export-format": {
|
||||
const value = requireValue(argv, ++index, arg);
|
||||
if (value !== "csv" && value !== "json") {
|
||||
throw new Error(`Invalid --export-format value: ${value}`);
|
||||
}
|
||||
options.exportFormat = value;
|
||||
break;
|
||||
}
|
||||
case "--include-private":
|
||||
options.includePrivate = true;
|
||||
break;
|
||||
case "--json":
|
||||
options.json = true;
|
||||
break;
|
||||
case "--output":
|
||||
options.output = requireValue(argv, ++index, arg);
|
||||
break;
|
||||
case "--query":
|
||||
options.query = requireValue(argv, ++index, arg);
|
||||
break;
|
||||
case "--refresh-search":
|
||||
options.refreshSearch = true;
|
||||
break;
|
||||
case "--refresh-stats":
|
||||
options.refreshStats = true;
|
||||
break;
|
||||
case "--search-field": {
|
||||
const value = requireValue(argv, ++index, arg);
|
||||
if (value !== "author-date" && value !== "committer-date") {
|
||||
throw new Error(`Invalid --search-field value: ${value}`);
|
||||
}
|
||||
options.searchField = value;
|
||||
break;
|
||||
}
|
||||
case "--skip-stats":
|
||||
options.skipStats = true;
|
||||
break;
|
||||
case "--start":
|
||||
options.start = parseDateArg(requireValue(argv, ++index, arg), arg);
|
||||
break;
|
||||
case "--stats-concurrency":
|
||||
options.statsConcurrency = parsePositiveInt(requireValue(argv, ++index, arg), arg);
|
||||
break;
|
||||
case "--stats-fetch-limit":
|
||||
options.statsFetchLimit = parseNonNegativeInt(requireValue(argv, ++index, arg), arg);
|
||||
break;
|
||||
case "--help":
|
||||
printHelp();
|
||||
process.exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown argument: ${arg}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (Number.isNaN(options.start.getTime()) || Number.isNaN(options.end.getTime())) {
|
||||
throw new Error("Invalid start or end date");
|
||||
}
|
||||
if (options.start >= options.end) {
|
||||
throw new Error("--start must be earlier than --end");
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
function requireValue(argv: string[], index: number, flag: string): string {
|
||||
const value = argv[index];
|
||||
if (!value) {
|
||||
throw new Error(`Missing value for ${flag}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function parseDateArg(value: string, flag: string): Date {
|
||||
const parsed = new Date(value);
|
||||
if (Number.isNaN(parsed.getTime())) {
|
||||
throw new Error(`Invalid date for ${flag}: ${value}`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parsePositiveInt(value: string, flag: string): number {
|
||||
const parsed = Number.parseInt(value, 10);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
throw new Error(`Invalid positive integer for ${flag}: ${value}`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parseNonNegativeInt(value: string, flag: string): number {
|
||||
const parsed = Number.parseInt(value, 10);
|
||||
if (!Number.isFinite(parsed) || parsed < 0) {
|
||||
throw new Error(`Invalid non-negative integer for ${flag}: ${value}`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function printHelp() {
|
||||
console.log(`Usage: tsx scripts/paperclip-commit-metrics.ts [options]
|
||||
|
||||
Options:
|
||||
--start <date> ISO date/time lower bound (default: ${DEFAULT_SEARCH_START})
|
||||
--end <date> ISO date/time upper bound (default: now)
|
||||
--query <search> Commit search string (default: ${DEFAULT_QUERY})
|
||||
--search-field <field> author-date | committer-date (default: ${DEFAULT_SEARCH_FIELD})
|
||||
--include-private Include repos visible to the current token
|
||||
--exclude-owner <owner> Exclude repositories owned by this GitHub owner/org (repeatable)
|
||||
--cache-file <path> Cache path (default: ${DEFAULT_CACHE_FILE})
|
||||
--skip-stats Skip additions/deletions enrichment
|
||||
--stats-fetch-limit <n> Max uncached commit stats to fetch this run (default: ${DEFAULT_STATS_FETCH_LIMIT})
|
||||
--stats-concurrency <n> Parallel commit stat requests (default: ${DEFAULT_STATS_CONCURRENCY})
|
||||
--output <path> Write the full filtered result set to a file
|
||||
--export-format <format> csv | json for --output exports (default: csv)
|
||||
--refresh-search Ignore cached search windows
|
||||
--refresh-stats Re-fetch cached commit stats
|
||||
--json Print JSON summary
|
||||
--help Show this help
|
||||
`);
|
||||
}
|
||||
|
||||
async function resolveGitHubToken(): Promise<string> {
|
||||
const envToken = process.env.GITHUB_TOKEN ?? process.env.GH_TOKEN;
|
||||
if (envToken) {
|
||||
return envToken;
|
||||
}
|
||||
|
||||
const { stdout } = await execFileAsync("gh", ["auth", "token"]);
|
||||
const token = stdout.trim();
|
||||
if (!token) {
|
||||
throw new Error("Unable to resolve a GitHub token. Set GITHUB_TOKEN/GH_TOKEN or run `gh auth login`.");
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
async function loadCache(cacheFile: string, options: CliOptions): Promise<CacheFile> {
|
||||
try {
|
||||
const raw = await fs.readFile(cacheFile, "utf8");
|
||||
const parsed = JSON.parse(raw) as CacheFile;
|
||||
if (parsed.version !== 1 || parsed.queryKey !== buildQueryKey(options) || parsed.searchField !== options.searchField) {
|
||||
return createEmptyCache(options);
|
||||
}
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return createEmptyCache(options);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function createEmptyCache(options: CliOptions): CacheFile {
|
||||
return {
|
||||
commits: {},
|
||||
queryKey: buildQueryKey(options),
|
||||
searchField: options.searchField,
|
||||
stats: {},
|
||||
updatedAt: null,
|
||||
version: 1,
|
||||
windows: {},
|
||||
};
|
||||
}
|
||||
|
||||
function buildQueryKey(options: CliOptions): string {
|
||||
const visibility = options.includePrivate ? "all" : "public";
|
||||
return JSON.stringify({
|
||||
query: options.query,
|
||||
searchField: options.searchField,
|
||||
visibility,
|
||||
});
|
||||
}
|
||||
|
||||
async function saveCache(cacheFile: string, cache: CacheFile): Promise<void> {
|
||||
await fs.mkdir(path.dirname(cacheFile), { recursive: true });
|
||||
await fs.writeFile(cacheFile, JSON.stringify(cache, null, 2), "utf8");
|
||||
}
|
||||
|
||||
async function searchWindow(
|
||||
client: GitHubClient,
|
||||
cache: CacheFile,
|
||||
options: CliOptions,
|
||||
start: Date,
|
||||
end: Date,
|
||||
): Promise<SearchWindowResult> {
|
||||
const windowKey = makeWindowKey(start, end);
|
||||
if (!options.refreshSearch) {
|
||||
const cached = cache.windows[windowKey];
|
||||
if (cached) {
|
||||
return { shas: new Set(cached.shas), totalCount: cached.totalCount };
|
||||
}
|
||||
}
|
||||
|
||||
const firstPage = await searchPage(client, options, start, end, 1, 100);
|
||||
if (firstPage.incomplete_results) {
|
||||
throw new Error(`GitHub returned incomplete search results for window ${windowKey}`);
|
||||
}
|
||||
|
||||
if (firstPage.total_count > SEARCH_WINDOW_LIMIT) {
|
||||
const durationMs = end.getTime() - start.getTime();
|
||||
if (durationMs <= MIN_WINDOW_MS) {
|
||||
throw new Error(
|
||||
`Search window ${windowKey} still has ${firstPage.total_count} results after splitting to ${durationMs}ms.`,
|
||||
);
|
||||
}
|
||||
|
||||
const midpoint = new Date(start.getTime() + Math.floor(durationMs / 2));
|
||||
const left = await searchWindow(client, cache, options, start, midpoint);
|
||||
const right = await searchWindow(client, cache, options, new Date(midpoint.getTime() + 1), end);
|
||||
const shas = new Set([...left.shas, ...right.shas]);
|
||||
|
||||
cache.windows[windowKey] = {
|
||||
completedAt: new Date().toISOString(),
|
||||
key: windowKey,
|
||||
shas: [...shas],
|
||||
totalCount: shas.size,
|
||||
};
|
||||
|
||||
return { shas, totalCount: shas.size };
|
||||
}
|
||||
|
||||
const pageCount = Math.ceil(firstPage.total_count / 100);
|
||||
const shas = new Set<string>();
|
||||
ingestSearchItems(cache, firstPage.items, shas);
|
||||
|
||||
for (let page = 2; page <= pageCount; page += 1) {
|
||||
const response = await searchPage(client, options, start, end, page, 100);
|
||||
if (response.incomplete_results) {
|
||||
throw new Error(`GitHub returned incomplete search results for window ${windowKey} on page ${page}`);
|
||||
}
|
||||
ingestSearchItems(cache, response.items, shas);
|
||||
}
|
||||
|
||||
cache.windows[windowKey] = {
|
||||
completedAt: new Date().toISOString(),
|
||||
key: windowKey,
|
||||
shas: [...shas],
|
||||
totalCount: firstPage.total_count,
|
||||
};
|
||||
|
||||
return { shas, totalCount: firstPage.total_count };
|
||||
}
|
||||
|
||||
async function searchPage(
|
||||
client: GitHubClient,
|
||||
options: CliOptions,
|
||||
start: Date,
|
||||
end: Date,
|
||||
page: number,
|
||||
perPage: number,
|
||||
): Promise<SearchResponse> {
|
||||
const searchQuery = buildSearchQuery(options, start, end);
|
||||
const params = new URLSearchParams({
|
||||
page: String(page),
|
||||
per_page: String(perPage),
|
||||
q: searchQuery,
|
||||
});
|
||||
|
||||
return client.getJson<SearchResponse>(`/search/commits?${params.toString()}`);
|
||||
}
|
||||
|
||||
function buildSearchQuery(options: CliOptions, start: Date, end: Date): string {
|
||||
const qualifiers = [`${options.searchField}:${formatQueryDate(start)}..${formatQueryDate(end)}`];
|
||||
if (!options.includePrivate) {
|
||||
qualifiers.push("is:public");
|
||||
}
|
||||
return `${options.query} ${qualifiers.join(" ")}`.trim();
|
||||
}
|
||||
|
||||
function filterShas(cache: CacheFile, shas: string[], options: CliOptions): string[] {
|
||||
if (options.excludeOwners.length === 0) {
|
||||
return shas;
|
||||
}
|
||||
|
||||
const excludedOwners = new Set(options.excludeOwners);
|
||||
return shas.filter((sha) => {
|
||||
const commit = cache.commits[sha];
|
||||
if (!commit) {
|
||||
return false;
|
||||
}
|
||||
return !excludedOwners.has(getRepoOwner(commit.repositoryFullName));
|
||||
});
|
||||
}
|
||||
|
||||
function sortFilteredShas(cache: CacheFile, shas: string[]): string[] {
|
||||
return [...shas].sort((leftSha, rightSha) => {
|
||||
const left = cache.commits[leftSha];
|
||||
const right = cache.commits[rightSha];
|
||||
const leftTime = left?.committedAt ? Date.parse(left.committedAt) : 0;
|
||||
const rightTime = right?.committedAt ? Date.parse(right.committedAt) : 0;
|
||||
if (rightTime !== leftTime) {
|
||||
return rightTime - leftTime;
|
||||
}
|
||||
|
||||
const repoCompare = (left?.repositoryFullName ?? "").localeCompare(right?.repositoryFullName ?? "");
|
||||
if (repoCompare !== 0) {
|
||||
return repoCompare;
|
||||
}
|
||||
return leftSha.localeCompare(rightSha);
|
||||
});
|
||||
}
|
||||
|
||||
function formatQueryDate(value: Date): string {
|
||||
return new Date(Math.floor(value.getTime() / 1000) * 1000).toISOString().replace(".000Z", "Z");
|
||||
}
|
||||
|
||||
function ingestSearchItems(cache: CacheFile, items: SearchCommitItem[], shas: Set<string>) {
|
||||
for (const item of items) {
|
||||
shas.add(item.sha);
|
||||
cache.commits[item.sha] = {
|
||||
authorEmail: item.commit.author?.email ?? null,
|
||||
authorLogin: item.author?.login ?? null,
|
||||
authorName: item.commit.author?.name ?? null,
|
||||
committedAt: item.commit.author?.date ?? null,
|
||||
contributors: extractContributors(item),
|
||||
htmlUrl: item.html_url,
|
||||
repositoryFullName: item.repository.full_name,
|
||||
repositoryUrl: item.repository.html_url,
|
||||
sha: item.sha,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function extractContributors(item: SearchCommitItem): ContributorRecord[] {
|
||||
const contributors = new Map<string, ContributorRecord>();
|
||||
|
||||
const primaryAuthor = normalizeContributor({
|
||||
email: item.commit.author?.email ?? null,
|
||||
login: item.author?.login ?? null,
|
||||
name: item.commit.author?.name ?? null,
|
||||
});
|
||||
if (primaryAuthor) {
|
||||
contributors.set(primaryAuthor.key, primaryAuthor);
|
||||
}
|
||||
|
||||
const coAuthorPattern = /^co-authored-by:\s*(.+?)\s*<([^>]+)>\s*$/gim;
|
||||
for (const match of item.commit.message.matchAll(coAuthorPattern)) {
|
||||
const contributor = normalizeContributor({
|
||||
email: match[2] ?? null,
|
||||
login: null,
|
||||
name: match[1] ?? null,
|
||||
});
|
||||
if (contributor) {
|
||||
contributors.set(contributor.key, contributor);
|
||||
}
|
||||
}
|
||||
|
||||
return [...contributors.values()];
|
||||
}
|
||||
|
||||
function normalizeContributor(input: {
|
||||
email: string | null;
|
||||
login: string | null;
|
||||
name: string | null;
|
||||
}): ContributorRecord | null {
|
||||
const email = normalizeOptional(input.email);
|
||||
const login = normalizeOptional(input.login);
|
||||
const displayName = normalizeOptional(input.name) ?? login ?? email;
|
||||
|
||||
if (!displayName && !email && !login) {
|
||||
return null;
|
||||
}
|
||||
if ((email && email === PAPERCLIP_EMAIL) || (displayName && displayName.toLowerCase() === PAPERCLIP_NAME)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const key = login ? `login:${login}` : email ? `email:${email}` : `name:${displayName!.toLowerCase()}`;
|
||||
return {
|
||||
displayName: displayName ?? email ?? login ?? "unknown",
|
||||
email,
|
||||
key,
|
||||
login,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeOptional(value: string | null | undefined): string | null {
|
||||
const trimmed = value?.trim();
|
||||
return trimmed ? trimmed : null;
|
||||
}
|
||||
|
||||
function getRepoOwner(repositoryFullName: string): string {
|
||||
return repositoryFullName.split("/", 1)[0]?.toLowerCase() ?? "";
|
||||
}
|
||||
|
||||
async function enrichCommitStats(
|
||||
client: GitHubClient,
|
||||
cache: CacheFile,
|
||||
options: CliOptions,
|
||||
shas: string[],
|
||||
): Promise<number> {
|
||||
const pending = shas.filter((sha) => options.refreshStats || !cache.stats[sha]).slice(0, options.statsFetchLimit);
|
||||
let nextIndex = 0;
|
||||
let fetched = 0;
|
||||
|
||||
const workers = Array.from({ length: Math.min(options.statsConcurrency, pending.length) }, async () => {
|
||||
while (true) {
|
||||
const currentIndex = nextIndex;
|
||||
nextIndex += 1;
|
||||
const sha = pending[currentIndex];
|
||||
if (!sha) {
|
||||
return;
|
||||
}
|
||||
const commit = cache.commits[sha];
|
||||
if (!commit) {
|
||||
continue;
|
||||
}
|
||||
const stats = await fetchCommitStats(client, commit.repositoryFullName, sha);
|
||||
cache.stats[sha] = {
|
||||
...stats,
|
||||
fetchedAt: new Date().toISOString(),
|
||||
};
|
||||
fetched += 1;
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.all(workers);
|
||||
return fetched;
|
||||
}
|
||||
|
||||
async function fetchCommitStats(client: GitHubClient, repositoryFullName: string, sha: string): Promise<CommitStats> {
|
||||
const response = await client.getJson<{ stats?: CommitStats }>(
|
||||
`/repos/${repositoryFullName}/commits/${sha}`,
|
||||
);
|
||||
return {
|
||||
additions: response.stats?.additions ?? 0,
|
||||
deletions: response.stats?.deletions ?? 0,
|
||||
total: response.stats?.total ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
function buildSummary(cache: CacheFile, options: CliOptions, shas: string[], fetchedThisRun: number): Summary {
|
||||
const repoNames = new Set<string>();
|
||||
const contributors = new Map<string, ContributorRecord>();
|
||||
let additions = 0;
|
||||
let deletions = 0;
|
||||
let coveredCommits = 0;
|
||||
|
||||
for (const sha of shas) {
|
||||
const commit = cache.commits[sha];
|
||||
if (!commit) {
|
||||
continue;
|
||||
}
|
||||
repoNames.add(commit.repositoryFullName);
|
||||
for (const contributor of commit.contributors) {
|
||||
contributors.set(contributor.key, contributor);
|
||||
}
|
||||
|
||||
const stats = cache.stats[sha];
|
||||
if (stats) {
|
||||
additions += stats.additions;
|
||||
deletions += stats.deletions;
|
||||
coveredCommits += 1;
|
||||
}
|
||||
}
|
||||
|
||||
const contributorSample = [...contributors.values()]
|
||||
.sort((left, right) => left.displayName.localeCompare(right.displayName))
|
||||
.slice(0, 10);
|
||||
const repoSample = [...repoNames].sort((left, right) => left.localeCompare(right)).slice(0, 10);
|
||||
|
||||
return {
|
||||
cacheFile: options.cacheFile,
|
||||
contributors: {
|
||||
count: contributors.size,
|
||||
sample: contributorSample,
|
||||
},
|
||||
detectedQuery: buildSearchQuery(options, options.start, options.end),
|
||||
lineStats: {
|
||||
additions,
|
||||
complete: coveredCommits === shas.length,
|
||||
coveredCommits,
|
||||
deletions,
|
||||
missingCommits: shas.length - coveredCommits,
|
||||
totalChanges: additions + deletions,
|
||||
},
|
||||
range: {
|
||||
end: options.end.toISOString(),
|
||||
searchField: options.searchField,
|
||||
start: options.start.toISOString(),
|
||||
},
|
||||
filters: {
|
||||
excludedOwners: [...options.excludeOwners].sort(),
|
||||
},
|
||||
repos: {
|
||||
count: repoNames.size,
|
||||
sample: repoSample,
|
||||
},
|
||||
statsFetch: {
|
||||
fetchedThisRun,
|
||||
skipped: options.skipStats,
|
||||
},
|
||||
totals: {
|
||||
commits: shas.length,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function printSummary(summary: Summary) {
|
||||
console.log("Paperclip commit metrics");
|
||||
console.log(`Query: ${summary.detectedQuery}`);
|
||||
console.log(`Range: ${summary.range.start} -> ${summary.range.end} (${summary.range.searchField})`);
|
||||
if (summary.filters.excludedOwners.length > 0) {
|
||||
console.log(`Excluded owners: ${summary.filters.excludedOwners.join(", ")}`);
|
||||
}
|
||||
console.log(`Commits: ${summary.totals.commits}`);
|
||||
console.log(`Distinct repos: ${summary.repos.count}`);
|
||||
console.log(`Distinct contributors: ${summary.contributors.count}`);
|
||||
console.log(
|
||||
`Line stats: +${summary.lineStats.additions} / -${summary.lineStats.deletions} / ${summary.lineStats.totalChanges} total`,
|
||||
);
|
||||
console.log(
|
||||
`Line stat coverage: ${summary.lineStats.coveredCommits}/${summary.totals.commits}` +
|
||||
(summary.lineStats.complete ? " (complete)" : " (partial; rerun to hydrate more commits)"),
|
||||
);
|
||||
console.log(`Stats fetched this run: ${summary.statsFetch.fetchedThisRun}${summary.statsFetch.skipped ? " (skipped)" : ""}`);
|
||||
console.log(`Cache: ${summary.cacheFile}`);
|
||||
|
||||
if (summary.repos.sample.length > 0) {
|
||||
console.log(`Sample repos: ${summary.repos.sample.join(", ")}`);
|
||||
}
|
||||
if (summary.contributors.sample.length > 0) {
|
||||
console.log(
|
||||
`Sample contributors: ${summary.contributors.sample
|
||||
.map((contributor) => contributor.login ?? contributor.displayName)
|
||||
.join(", ")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeExport(
|
||||
outputPath: string,
|
||||
format: CliOptions["exportFormat"],
|
||||
cache: CacheFile,
|
||||
shas: string[],
|
||||
summary: Summary,
|
||||
): Promise<void> {
|
||||
await fs.mkdir(path.dirname(outputPath), { recursive: true });
|
||||
if (format === "json") {
|
||||
const report = {
|
||||
summary,
|
||||
commits: shas.map((sha) => buildExportRow(cache, sha)),
|
||||
};
|
||||
await fs.writeFile(outputPath, JSON.stringify(report, null, 2), "utf8");
|
||||
return;
|
||||
}
|
||||
|
||||
const header = [
|
||||
"committedAt",
|
||||
"repository",
|
||||
"repositoryUrl",
|
||||
"sha",
|
||||
"commitUrl",
|
||||
"authorLogin",
|
||||
"authorName",
|
||||
"authorEmail",
|
||||
"contributors",
|
||||
"additions",
|
||||
"deletions",
|
||||
"totalChanges",
|
||||
];
|
||||
const rows = [header.join(",")];
|
||||
for (const sha of shas) {
|
||||
const row = buildExportRow(cache, sha);
|
||||
rows.push(
|
||||
[
|
||||
row.committedAt,
|
||||
row.repository,
|
||||
row.repositoryUrl,
|
||||
row.sha,
|
||||
row.commitUrl,
|
||||
row.authorLogin,
|
||||
row.authorName,
|
||||
row.authorEmail,
|
||||
row.contributors,
|
||||
String(row.additions),
|
||||
String(row.deletions),
|
||||
String(row.totalChanges),
|
||||
]
|
||||
.map(escapeCsv)
|
||||
.join(","),
|
||||
);
|
||||
}
|
||||
await fs.writeFile(outputPath, `${rows.join("\n")}\n`, "utf8");
|
||||
}
|
||||
|
||||
function buildExportRow(cache: CacheFile, sha: string) {
|
||||
const commit = cache.commits[sha];
|
||||
if (!commit) {
|
||||
throw new Error(`Missing cached commit for sha ${sha}`);
|
||||
}
|
||||
const stats = cache.stats[sha];
|
||||
return {
|
||||
additions: stats?.additions ?? 0,
|
||||
authorEmail: commit.authorEmail ?? "",
|
||||
authorLogin: commit.authorLogin ?? "",
|
||||
authorName: commit.authorName ?? "",
|
||||
commitUrl: commit.htmlUrl,
|
||||
committedAt: commit.committedAt ?? "",
|
||||
contributors: commit.contributors.map((contributor) => contributor.login ?? contributor.displayName).join(" | "),
|
||||
deletions: stats?.deletions ?? 0,
|
||||
repository: commit.repositoryFullName,
|
||||
repositoryUrl: commit.repositoryUrl,
|
||||
sha: commit.sha,
|
||||
totalChanges: stats?.total ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
function escapeCsv(value: string): string {
|
||||
if (value.includes(",") || value.includes("\"") || value.includes("\n")) {
|
||||
return `"${value.replaceAll("\"", "\"\"")}"`;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function makeWindowKey(start: Date, end: Date): string {
|
||||
return `${start.toISOString()}..${end.toISOString()}`;
|
||||
}
|
||||
|
||||
class GitHubClient {
|
||||
private readonly apiBase = "https://api.github.com";
|
||||
private readonly token: string;
|
||||
|
||||
constructor(token: string) {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
async getJson<T>(pathname: string): Promise<T> {
|
||||
while (true) {
|
||||
const response = await fetch(`${this.apiBase}${pathname}`, {
|
||||
headers: {
|
||||
Accept: "application/vnd.github+json",
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
"User-Agent": "paperclip-commit-metrics",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
|
||||
const retryAfter = response.headers.get("retry-after");
|
||||
if ((response.status === 403 || response.status === 429) && retryAfter) {
|
||||
const waitMs = Math.max(Number.parseInt(retryAfter, 10) * 1000, 1_000);
|
||||
console.error(`GitHub secondary rate limit hit for ${pathname}; waiting ${Math.ceil(waitMs / 1000)}s...`);
|
||||
await sleep(waitMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
const remaining = response.headers.get("x-ratelimit-remaining");
|
||||
const resetAt = response.headers.get("x-ratelimit-reset");
|
||||
if ((response.status === 403 || response.status === 429) && remaining === "0" && resetAt) {
|
||||
const waitMs = Math.max(Number.parseInt(resetAt, 10) * 1000 - Date.now() + 1_000, 1_000);
|
||||
console.error(`GitHub rate limit hit for ${pathname}; waiting ${Math.ceil(waitMs / 1000)}s...`);
|
||||
await sleep(waitMs);
|
||||
continue;
|
||||
}
|
||||
|
||||
const body = await response.text();
|
||||
throw new Error(`GitHub API request failed (${response.status}) for ${pathname}: ${body}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error instanceof Error ? error.message : String(error));
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -3,6 +3,12 @@ set -euo pipefail
|
||||
|
||||
base_cwd="${PAPERCLIP_WORKSPACE_BASE_CWD:?PAPERCLIP_WORKSPACE_BASE_CWD is required}"
|
||||
worktree_cwd="${PAPERCLIP_WORKSPACE_CWD:?PAPERCLIP_WORKSPACE_CWD is required}"
|
||||
paperclip_home="${PAPERCLIP_HOME:-$HOME/.paperclip}"
|
||||
paperclip_instance_id="${PAPERCLIP_INSTANCE_ID:-default}"
|
||||
paperclip_dir="$worktree_cwd/.paperclip"
|
||||
worktree_config_path="$paperclip_dir/config.json"
|
||||
worktree_env_path="$paperclip_dir/.env"
|
||||
worktree_name="${PAPERCLIP_WORKSPACE_BRANCH:-$(basename "$worktree_cwd")}"
|
||||
|
||||
if [[ ! -d "$base_cwd" ]]; then
|
||||
echo "Base workspace does not exist: $base_cwd" >&2
|
||||
@@ -14,6 +20,286 @@ if [[ ! -d "$worktree_cwd" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
source_config_path="${PAPERCLIP_CONFIG:-}"
|
||||
if [[ -z "$source_config_path" && ( -e "$base_cwd/.paperclip/config.json" || -L "$base_cwd/.paperclip/config.json" ) ]]; then
|
||||
source_config_path="$base_cwd/.paperclip/config.json"
|
||||
fi
|
||||
if [[ -z "$source_config_path" ]]; then
|
||||
source_config_path="$paperclip_home/instances/$paperclip_instance_id/config.json"
|
||||
fi
|
||||
source_env_path="$(dirname "$source_config_path")/.env"
|
||||
|
||||
mkdir -p "$paperclip_dir"
|
||||
|
||||
run_isolated_worktree_init() {
|
||||
if command -v pnpm >/dev/null 2>&1 && pnpm paperclipai --help >/dev/null 2>&1; then
|
||||
pnpm paperclipai worktree init --force --seed-mode minimal --name "$worktree_name" --from-config "$source_config_path"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if command -v paperclipai >/dev/null 2>&1; then
|
||||
paperclipai worktree init --force --seed-mode minimal --name "$worktree_name" --from-config "$source_config_path"
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
write_fallback_worktree_config() {
|
||||
WORKTREE_NAME="$worktree_name" \
|
||||
BASE_CWD="$base_cwd" \
|
||||
WORKTREE_CWD="$worktree_cwd" \
|
||||
PAPERCLIP_DIR="$paperclip_dir" \
|
||||
SOURCE_CONFIG_PATH="$source_config_path" \
|
||||
SOURCE_ENV_PATH="$source_env_path" \
|
||||
PAPERCLIP_WORKTREES_DIR="${PAPERCLIP_WORKTREES_DIR:-}" \
|
||||
node <<'EOF'
|
||||
const fs = require("node:fs");
|
||||
const os = require("node:os");
|
||||
const path = require("node:path");
|
||||
const net = require("node:net");
|
||||
|
||||
function expandHomePrefix(value) {
|
||||
if (!value) return value;
|
||||
if (value === "~") return os.homedir();
|
||||
if (value.startsWith("~/")) return path.resolve(os.homedir(), value.slice(2));
|
||||
return value;
|
||||
}
|
||||
|
||||
function nonEmpty(value) {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function sanitizeInstanceId(value) {
|
||||
const trimmed = String(value ?? "").trim().toLowerCase();
|
||||
const normalized = trimmed
|
||||
.replace(/[^a-z0-9_-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^[-_]+|[-_]+$/g, "");
|
||||
return normalized || "worktree";
|
||||
}
|
||||
|
||||
function parseEnvFile(contents) {
|
||||
const entries = {};
|
||||
for (const rawLine of contents.split(/\r?\n/)) {
|
||||
const line = rawLine.trim();
|
||||
if (!line || line.startsWith("#")) continue;
|
||||
const match = rawLine.match(/^\s*(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$/);
|
||||
if (!match) continue;
|
||||
const [, key, rawValue] = match;
|
||||
const value = rawValue.trim();
|
||||
if (!value) {
|
||||
entries[key] = "";
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
(value.startsWith("\"") && value.endsWith("\"")) ||
|
||||
(value.startsWith("'") && value.endsWith("'"))
|
||||
) {
|
||||
entries[key] = value.slice(1, -1);
|
||||
continue;
|
||||
}
|
||||
entries[key] = value.replace(/\s+#.*$/, "").trim();
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
async function findAvailablePort(preferredPort, reserved = new Set()) {
|
||||
const startPort = Number.isFinite(preferredPort) && preferredPort > 0 ? Math.trunc(preferredPort) : 0;
|
||||
if (startPort > 0) {
|
||||
for (let port = startPort; port < startPort + 100; port += 1) {
|
||||
if (reserved.has(port)) continue;
|
||||
const available = await new Promise((resolve) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.once("error", () => resolve(false));
|
||||
server.listen(port, "127.0.0.1", () => {
|
||||
server.close(() => resolve(true));
|
||||
});
|
||||
});
|
||||
if (available) return port;
|
||||
}
|
||||
}
|
||||
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.once("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate a port.")));
|
||||
return;
|
||||
}
|
||||
const port = address.port;
|
||||
server.close(() => resolve(port));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function isLoopbackHost(hostname) {
|
||||
const value = hostname.trim().toLowerCase();
|
||||
return value === "127.0.0.1" || value === "localhost" || value === "::1";
|
||||
}
|
||||
|
||||
function rewriteLocalUrlPort(rawUrl, port) {
|
||||
if (!rawUrl) return undefined;
|
||||
try {
|
||||
const parsed = new URL(rawUrl);
|
||||
if (!isLoopbackHost(parsed.hostname)) return rawUrl;
|
||||
parsed.port = String(port);
|
||||
return parsed.toString();
|
||||
} catch {
|
||||
return rawUrl;
|
||||
}
|
||||
}
|
||||
|
||||
function resolveRuntimeLikePath(value, configPath) {
|
||||
const expanded = expandHomePrefix(value);
|
||||
if (path.isAbsolute(expanded)) return expanded;
|
||||
return path.resolve(path.dirname(configPath), expanded);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const worktreeName = process.env.WORKTREE_NAME;
|
||||
const paperclipDir = process.env.PAPERCLIP_DIR;
|
||||
const sourceConfigPath = process.env.SOURCE_CONFIG_PATH;
|
||||
const sourceEnvPath = process.env.SOURCE_ENV_PATH;
|
||||
const worktreeHome = path.resolve(expandHomePrefix(nonEmpty(process.env.PAPERCLIP_WORKTREES_DIR) ?? "~/.paperclip-worktrees"));
|
||||
const instanceId = sanitizeInstanceId(worktreeName);
|
||||
const instanceRoot = path.resolve(worktreeHome, "instances", instanceId);
|
||||
const configPath = path.resolve(paperclipDir, "config.json");
|
||||
const envPath = path.resolve(paperclipDir, ".env");
|
||||
|
||||
let sourceConfig = null;
|
||||
if (sourceConfigPath && fs.existsSync(sourceConfigPath)) {
|
||||
sourceConfig = JSON.parse(fs.readFileSync(sourceConfigPath, "utf8"));
|
||||
}
|
||||
|
||||
const sourceEnvEntries =
|
||||
sourceEnvPath && fs.existsSync(sourceEnvPath)
|
||||
? parseEnvFile(fs.readFileSync(sourceEnvPath, "utf8"))
|
||||
: {};
|
||||
|
||||
const preferredServerPort = Number(sourceConfig?.server?.port ?? 3101) + 1;
|
||||
const serverPort = await findAvailablePort(preferredServerPort);
|
||||
const preferredDbPort = Number(sourceConfig?.database?.embeddedPostgresPort ?? 54329) + 1;
|
||||
const databasePort = await findAvailablePort(preferredDbPort, new Set([serverPort]));
|
||||
|
||||
fs.rmSync(configPath, { force: true });
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.mkdirSync(instanceRoot, { recursive: true });
|
||||
|
||||
const authPublicBaseUrl = rewriteLocalUrlPort(sourceConfig?.auth?.publicBaseUrl, serverPort);
|
||||
const targetConfig = {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
source: "configure",
|
||||
},
|
||||
...(sourceConfig?.llm ? { llm: sourceConfig.llm } : {}),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.resolve(instanceRoot, "db"),
|
||||
embeddedPostgresPort: databasePort,
|
||||
backup: {
|
||||
enabled: sourceConfig?.database?.backup?.enabled ?? true,
|
||||
intervalMinutes: sourceConfig?.database?.backup?.intervalMinutes ?? 60,
|
||||
retentionDays: sourceConfig?.database?.backup?.retentionDays ?? 30,
|
||||
dir: path.resolve(instanceRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: sourceConfig?.logging?.mode ?? "file",
|
||||
logDir: path.resolve(instanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: sourceConfig?.server?.deploymentMode ?? "local_trusted",
|
||||
exposure: sourceConfig?.server?.exposure ?? "private",
|
||||
host: sourceConfig?.server?.host ?? "127.0.0.1",
|
||||
port: serverPort,
|
||||
allowedHostnames: sourceConfig?.server?.allowedHostnames ?? [],
|
||||
serveUi: sourceConfig?.server?.serveUi ?? true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: sourceConfig?.auth?.baseUrlMode ?? "auto",
|
||||
...(authPublicBaseUrl ? { publicBaseUrl: authPublicBaseUrl } : {}),
|
||||
disableSignUp: sourceConfig?.auth?.disableSignUp ?? false,
|
||||
},
|
||||
storage: {
|
||||
provider: sourceConfig?.storage?.provider ?? "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.resolve(instanceRoot, "data", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: sourceConfig?.storage?.s3?.bucket ?? "paperclip",
|
||||
region: sourceConfig?.storage?.s3?.region ?? "us-east-1",
|
||||
endpoint: sourceConfig?.storage?.s3?.endpoint,
|
||||
prefix: sourceConfig?.storage?.s3?.prefix ?? "",
|
||||
forcePathStyle: sourceConfig?.storage?.s3?.forcePathStyle ?? false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: sourceConfig?.secrets?.provider ?? "local_encrypted",
|
||||
strictMode: sourceConfig?.secrets?.strictMode ?? false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.resolve(instanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(configPath, `${JSON.stringify(targetConfig, null, 2)}\n`, { mode: 0o600 });
|
||||
|
||||
const inlineMasterKey = nonEmpty(sourceEnvEntries.PAPERCLIP_SECRETS_MASTER_KEY);
|
||||
if (inlineMasterKey) {
|
||||
fs.mkdirSync(path.resolve(instanceRoot, "secrets"), { recursive: true });
|
||||
fs.writeFileSync(targetConfig.secrets.localEncrypted.keyFilePath, inlineMasterKey, {
|
||||
encoding: "utf8",
|
||||
mode: 0o600,
|
||||
});
|
||||
} else {
|
||||
const sourceKeyFilePath = nonEmpty(sourceEnvEntries.PAPERCLIP_SECRETS_MASTER_KEY_FILE)
|
||||
? resolveRuntimeLikePath(sourceEnvEntries.PAPERCLIP_SECRETS_MASTER_KEY_FILE, sourceConfigPath)
|
||||
: nonEmpty(sourceConfig?.secrets?.localEncrypted?.keyFilePath)
|
||||
? resolveRuntimeLikePath(sourceConfig.secrets.localEncrypted.keyFilePath, sourceConfigPath)
|
||||
: null;
|
||||
|
||||
if (sourceKeyFilePath && fs.existsSync(sourceKeyFilePath)) {
|
||||
fs.mkdirSync(path.resolve(instanceRoot, "secrets"), { recursive: true });
|
||||
fs.copyFileSync(sourceKeyFilePath, targetConfig.secrets.localEncrypted.keyFilePath);
|
||||
fs.chmodSync(targetConfig.secrets.localEncrypted.keyFilePath, 0o600);
|
||||
}
|
||||
}
|
||||
|
||||
const envLines = [
|
||||
"PAPERCLIP_HOME=" + JSON.stringify(worktreeHome),
|
||||
"PAPERCLIP_INSTANCE_ID=" + JSON.stringify(instanceId),
|
||||
"PAPERCLIP_CONFIG=" + JSON.stringify(configPath),
|
||||
"PAPERCLIP_CONTEXT=" + JSON.stringify(path.resolve(worktreeHome, "context.json")),
|
||||
"PAPERCLIP_IN_WORKTREE=true",
|
||||
"PAPERCLIP_WORKTREE_NAME=" + JSON.stringify(worktreeName),
|
||||
];
|
||||
|
||||
const agentJwtSecret = nonEmpty(sourceEnvEntries.PAPERCLIP_AGENT_JWT_SECRET);
|
||||
if (agentJwtSecret) {
|
||||
envLines.push("PAPERCLIP_AGENT_JWT_SECRET=" + JSON.stringify(agentJwtSecret));
|
||||
}
|
||||
|
||||
fs.writeFileSync(envPath, `${envLines.join("\n")}\n`, { mode: 0o600 });
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error(error instanceof Error ? error.message : String(error));
|
||||
process.exit(1);
|
||||
});
|
||||
EOF
|
||||
}
|
||||
|
||||
if ! run_isolated_worktree_init; then
|
||||
echo "paperclipai CLI not available in this workspace; writing isolated fallback config without DB seeding." >&2
|
||||
write_fallback_worktree_config
|
||||
fi
|
||||
|
||||
while IFS= read -r relative_path; do
|
||||
[[ -n "$relative_path" ]] || continue
|
||||
source_path="$base_cwd/$relative_path"
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
],
|
||||
"scripts": {
|
||||
"dev": "tsx src/index.ts",
|
||||
"dev:watch": "cross-env PAPERCLIP_MIGRATION_PROMPT=never PAPERCLIP_MIGRATION_AUTO_APPLY=true tsx watch --ignore ../ui/node_modules --ignore ../ui/.vite --ignore ../ui/dist src/index.ts",
|
||||
"dev:watch": "cross-env PAPERCLIP_MIGRATION_PROMPT=never PAPERCLIP_MIGRATION_AUTO_APPLY=true tsx ./scripts/dev-watch.ts",
|
||||
"prepare:ui-dist": "bash ../scripts/prepare-server-ui-dist.sh",
|
||||
"build": "tsc && mkdir -p dist/onboarding-assets && cp -R src/onboarding-assets/. dist/onboarding-assets/",
|
||||
"prepack": "pnpm run prepare:ui-dist",
|
||||
@@ -65,7 +65,7 @@
|
||||
"drizzle-orm": "^0.38.4",
|
||||
"embedded-postgres": "^18.1.0-beta.16",
|
||||
"express": "^5.1.0",
|
||||
"hermes-paperclip-adapter": "0.1.1",
|
||||
"hermes-paperclip-adapter": "^0.2.0",
|
||||
"jsdom": "^28.1.0",
|
||||
"multer": "^2.0.2",
|
||||
"open": "^11.0.0",
|
||||
|
||||
33
server/scripts/dev-watch.ts
Normal file
33
server/scripts/dev-watch.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import { createRequire } from "node:module";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { resolveServerDevWatchIgnorePaths } from "../src/dev-watch-ignore.ts";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const tsxCliPath = require.resolve("tsx/cli");
|
||||
const serverRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
||||
const ignoreArgs = resolveServerDevWatchIgnorePaths(serverRoot).flatMap((ignorePath) => ["--exclude", ignorePath]);
|
||||
|
||||
const child = spawn(
|
||||
process.execPath,
|
||||
[tsxCliPath, "watch", ...ignoreArgs, "src/index.ts"],
|
||||
{
|
||||
cwd: serverRoot,
|
||||
env: process.env,
|
||||
stdio: "inherit",
|
||||
},
|
||||
);
|
||||
|
||||
child.on("exit", (code, signal) => {
|
||||
if (signal) {
|
||||
process.kill(process.pid, signal);
|
||||
return;
|
||||
}
|
||||
process.exit(code ?? 0);
|
||||
});
|
||||
|
||||
child.on("error", (error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
@@ -7,6 +7,12 @@ import { testEnvironment } from "@paperclipai/adapter-codex-local/server";
|
||||
const itWindows = process.platform === "win32" ? it : it.skip;
|
||||
|
||||
describe("codex_local environment diagnostics", () => {
|
||||
beforeEach(() => {
|
||||
vi.stubEnv("OPENAI_API_KEY", "");
|
||||
});
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
it("creates a missing working directory when cwd is absolute", async () => {
|
||||
const cwd = path.join(
|
||||
os.tmpdir(),
|
||||
@@ -32,6 +38,67 @@ describe("codex_local environment diagnostics", () => {
|
||||
await fs.rm(path.dirname(cwd), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("emits codex_native_auth_present when ~/.codex/auth.json exists and OPENAI_API_KEY is unset", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-codex-auth-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const codexHome = path.join(root, ".codex");
|
||||
const cwd = path.join(root, "workspace");
|
||||
|
||||
try {
|
||||
await fs.mkdir(codexHome, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(codexHome, "auth.json"),
|
||||
JSON.stringify({ accessToken: "fake-token", accountId: "acct-1" }),
|
||||
);
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "codex_local",
|
||||
config: {
|
||||
command: process.execPath,
|
||||
cwd,
|
||||
env: { CODEX_HOME: codexHome },
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.checks.some((check) => check.code === "codex_native_auth_present")).toBe(true);
|
||||
expect(result.checks.some((check) => check.code === "codex_openai_api_key_missing")).toBe(false);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("emits codex_openai_api_key_missing when neither env var nor native auth exists", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-codex-noauth-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const codexHome = path.join(root, ".codex");
|
||||
const cwd = path.join(root, "workspace");
|
||||
|
||||
try {
|
||||
await fs.mkdir(codexHome, { recursive: true });
|
||||
// No auth.json written
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "codex_local",
|
||||
config: {
|
||||
command: process.execPath,
|
||||
cwd,
|
||||
env: { CODEX_HOME: codexHome },
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.checks.some((check) => check.code === "codex_openai_api_key_missing")).toBe(true);
|
||||
expect(result.checks.some((check) => check.code === "codex_native_auth_present")).toBe(false);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
itWindows("runs the hello probe when Codex is available via a Windows .cmd wrapper", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
|
||||
@@ -210,7 +210,7 @@ describe("codex execute", () => {
|
||||
"company-1",
|
||||
"codex-home",
|
||||
);
|
||||
const workspaceSkill = path.join(workspace, ".agents", "skills", "paperclip");
|
||||
const homeSkill = path.join(isolatedCodexHome, "skills", "paperclip");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(sharedCodexHome, { recursive: true });
|
||||
await fs.writeFile(path.join(sharedCodexHome, "auth.json"), '{"token":"shared"}\n', "utf8");
|
||||
@@ -284,7 +284,7 @@ describe("codex execute", () => {
|
||||
expect(await fs.realpath(isolatedAuth)).toBe(await fs.realpath(path.join(sharedCodexHome, "auth.json")));
|
||||
expect((await fs.lstat(isolatedConfig)).isFile()).toBe(true);
|
||||
expect(await fs.readFile(isolatedConfig, "utf8")).toBe('model = "codex-mini-latest"\n');
|
||||
expect((await fs.lstat(workspaceSkill)).isSymbolicLink()).toBe(true);
|
||||
expect((await fs.lstat(homeSkill)).isSymbolicLink()).toBe(true);
|
||||
expect(logs).toContainEqual(
|
||||
expect.objectContaining({
|
||||
stream: "stdout",
|
||||
@@ -371,7 +371,7 @@ describe("codex execute", () => {
|
||||
|
||||
const capture = JSON.parse(await fs.readFile(capturePath, "utf8")) as CapturePayload;
|
||||
expect(capture.codexHome).toBe(explicitCodexHome);
|
||||
expect((await fs.lstat(path.join(workspace, ".agents", "skills", "paperclip"))).isSymbolicLink()).toBe(true);
|
||||
expect((await fs.lstat(path.join(explicitCodexHome, "skills", "paperclip"))).isSymbolicLink()).toBe(true);
|
||||
await expect(fs.lstat(path.join(paperclipHome, "instances", "worktree-1", "codex-home"))).rejects.toThrow();
|
||||
} finally {
|
||||
if (previousHome === undefined) delete process.env.HOME;
|
||||
|
||||
@@ -43,7 +43,7 @@ describe("codex local skill sync", () => {
|
||||
expect(before.desiredSkills).toContain(paperclipKey);
|
||||
expect(before.entries.find((entry) => entry.key === paperclipKey)?.required).toBe(true);
|
||||
expect(before.entries.find((entry) => entry.key === paperclipKey)?.state).toBe("configured");
|
||||
expect(before.entries.find((entry) => entry.key === paperclipKey)?.detail).toContain(".agents/skills");
|
||||
expect(before.entries.find((entry) => entry.key === paperclipKey)?.detail).toContain("CODEX_HOME/skills/");
|
||||
});
|
||||
|
||||
it("does not persist Paperclip skills into CODEX_HOME during sync", async () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
@@ -28,6 +28,13 @@ console.log(JSON.stringify({
|
||||
}
|
||||
|
||||
describe("cursor environment diagnostics", () => {
|
||||
beforeEach(() => {
|
||||
vi.stubEnv("CURSOR_API_KEY", "");
|
||||
});
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it("creates a missing working directory when cwd is absolute", async () => {
|
||||
const cwd = path.join(
|
||||
os.tmpdir(),
|
||||
@@ -116,4 +123,73 @@ describe("cursor environment diagnostics", () => {
|
||||
expect(args).not.toContain("--trust");
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("emits cursor_native_auth_present when cli-config.json has authInfo and CURSOR_API_KEY is unset", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-cursor-auth-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const cursorHome = path.join(root, ".cursor");
|
||||
const cwd = path.join(root, "workspace");
|
||||
|
||||
try {
|
||||
await fs.mkdir(cursorHome, { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(cursorHome, "cli-config.json"),
|
||||
JSON.stringify({
|
||||
authInfo: {
|
||||
email: "test@example.com",
|
||||
displayName: "Test User",
|
||||
userId: 12345,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "cursor",
|
||||
config: {
|
||||
command: process.execPath,
|
||||
cwd,
|
||||
env: { CURSOR_HOME: cursorHome },
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.checks.some((check) => check.code === "cursor_native_auth_present")).toBe(true);
|
||||
expect(result.checks.some((check) => check.code === "cursor_api_key_missing")).toBe(false);
|
||||
const authCheck = result.checks.find((check) => check.code === "cursor_native_auth_present");
|
||||
expect(authCheck?.detail).toContain("test@example.com");
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("emits cursor_api_key_missing when neither env var nor native auth exists", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-cursor-noauth-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const cursorHome = path.join(root, ".cursor");
|
||||
const cwd = path.join(root, "workspace");
|
||||
|
||||
try {
|
||||
await fs.mkdir(cursorHome, { recursive: true });
|
||||
// No cli-config.json written
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "cursor",
|
||||
config: {
|
||||
command: process.execPath,
|
||||
cwd,
|
||||
env: { CURSOR_HOME: cursorHome },
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.checks.some((check) => check.code === "cursor_api_key_missing")).toBe(true);
|
||||
expect(result.checks.some((check) => check.code === "cursor_native_auth_present")).toBe(false);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
42
server/src/__tests__/dev-watch-ignore.test.ts
Normal file
42
server/src/__tests__/dev-watch-ignore.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveServerDevWatchIgnorePaths } from "../dev-watch-ignore.js";
|
||||
|
||||
describe("resolveServerDevWatchIgnorePaths", () => {
|
||||
it("includes both the worktree UI paths and their real shared targets", () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-dev-watch-"));
|
||||
const sharedUiRoot = path.join(tempRoot, "shared-ui");
|
||||
const worktreeRoot = path.join(tempRoot, "repo", ".paperclip", "worktrees", "PAP-884");
|
||||
const serverRoot = path.join(worktreeRoot, "server");
|
||||
const worktreeUiRoot = path.join(worktreeRoot, "ui");
|
||||
|
||||
fs.mkdirSync(path.join(sharedUiRoot, "node_modules"), { recursive: true });
|
||||
fs.mkdirSync(path.join(sharedUiRoot, ".vite"), { recursive: true });
|
||||
fs.mkdirSync(path.join(sharedUiRoot, "dist"), { recursive: true });
|
||||
fs.mkdirSync(serverRoot, { recursive: true });
|
||||
fs.mkdirSync(worktreeUiRoot, { recursive: true });
|
||||
|
||||
fs.symlinkSync(path.join(sharedUiRoot, "node_modules"), path.join(worktreeUiRoot, "node_modules"));
|
||||
fs.symlinkSync(path.join(sharedUiRoot, ".vite"), path.join(worktreeUiRoot, ".vite"));
|
||||
fs.symlinkSync(path.join(sharedUiRoot, "dist"), path.join(worktreeUiRoot, "dist"));
|
||||
|
||||
const ignorePaths = resolveServerDevWatchIgnorePaths(serverRoot);
|
||||
|
||||
expect(ignorePaths).toContain(path.join(worktreeUiRoot, "node_modules"));
|
||||
expect(ignorePaths).toContain(`${path.join(worktreeUiRoot, "node_modules").replaceAll(path.sep, "/")}/**`);
|
||||
expect(ignorePaths).toContain(fs.realpathSync(path.join(sharedUiRoot, "node_modules")));
|
||||
expect(ignorePaths).toContain(`${fs.realpathSync(path.join(sharedUiRoot, "node_modules")).replaceAll(path.sep, "/")}/**`);
|
||||
expect(ignorePaths).toContain(path.join(worktreeUiRoot, "node_modules", ".vite-temp"));
|
||||
expect(ignorePaths).toContain(
|
||||
`${path.join(worktreeUiRoot, "node_modules", ".vite-temp").replaceAll(path.sep, "/")}/**`,
|
||||
);
|
||||
expect(ignorePaths).toContain(path.join(worktreeUiRoot, ".vite"));
|
||||
expect(ignorePaths).toContain(fs.realpathSync(path.join(sharedUiRoot, ".vite")));
|
||||
expect(ignorePaths).toContain(path.join(worktreeUiRoot, "dist"));
|
||||
expect(ignorePaths).toContain(fs.realpathSync(path.join(sharedUiRoot, "dist")));
|
||||
expect(ignorePaths).toContain("**/{node_modules,bower_components,vendor}/**");
|
||||
expect(ignorePaths).toContain("**/.vite-temp/**");
|
||||
});
|
||||
});
|
||||
@@ -1,89 +1,29 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { spawn, type ChildProcess } from "node:child_process";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
applyPendingMigrations,
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
agents,
|
||||
agentWakeupRequests,
|
||||
companies,
|
||||
createDb,
|
||||
heartbeatRunEvents,
|
||||
heartbeatRuns,
|
||||
issues,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { runningProcesses } from "../adapters/index.ts";
|
||||
import { heartbeatService } from "../services/heartbeat.ts";
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-heartbeat-recovery-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
return { connectionString, instance, dataDir };
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres heartbeat recovery tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
function spawnAliveProcess() {
|
||||
@@ -92,17 +32,14 @@ function spawnAliveProcess() {
|
||||
});
|
||||
}
|
||||
|
||||
describe("heartbeat orphaned process recovery", () => {
|
||||
describeEmbeddedPostgres("heartbeat orphaned process recovery", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let instance: EmbeddedPostgresInstance | null = null;
|
||||
let dataDir = "";
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
const childProcesses = new Set<ChildProcess>();
|
||||
|
||||
beforeAll(async () => {
|
||||
const started = await startTempDatabase();
|
||||
db = createDb(started.connectionString);
|
||||
instance = started.instance;
|
||||
dataDir = started.dataDir;
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-heartbeat-recovery-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -125,10 +62,7 @@ describe("heartbeat orphaned process recovery", () => {
|
||||
}
|
||||
childProcesses.clear();
|
||||
runningProcesses.clear();
|
||||
await instance?.stop();
|
||||
if (dataDir) {
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
});
|
||||
|
||||
async function seedRunFixture(input?: {
|
||||
|
||||
6
server/src/__tests__/helpers/embedded-postgres.ts
Normal file
6
server/src/__tests__/helpers/embedded-postgres.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestDatabase,
|
||||
type EmbeddedPostgresTestSupport,
|
||||
} from "@paperclipai/db";
|
||||
57
server/src/__tests__/invite-join-grants.test.ts
Normal file
57
server/src/__tests__/invite-join-grants.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { agentJoinGrantsFromDefaults } from "../routes/access.js";
|
||||
|
||||
describe("agentJoinGrantsFromDefaults", () => {
|
||||
it("adds tasks:assign when invite defaults do not specify agent grants", () => {
|
||||
expect(agentJoinGrantsFromDefaults(null)).toEqual([
|
||||
{
|
||||
permissionKey: "tasks:assign",
|
||||
scope: null,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("preserves invite agent grants and appends tasks:assign", () => {
|
||||
expect(
|
||||
agentJoinGrantsFromDefaults({
|
||||
agent: {
|
||||
grants: [
|
||||
{
|
||||
permissionKey: "agents:create",
|
||||
scope: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual([
|
||||
{
|
||||
permissionKey: "agents:create",
|
||||
scope: null,
|
||||
},
|
||||
{
|
||||
permissionKey: "tasks:assign",
|
||||
scope: null,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("does not duplicate tasks:assign when invite defaults already include it", () => {
|
||||
expect(
|
||||
agentJoinGrantsFromDefaults({
|
||||
agent: {
|
||||
grants: [
|
||||
{
|
||||
permissionKey: "tasks:assign",
|
||||
scope: { projectId: "project-1" },
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
).toEqual([
|
||||
{
|
||||
permissionKey: "tasks:assign",
|
||||
scope: { projectId: "project-1" },
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -20,16 +20,29 @@ describe("issue goal fallback", () => {
|
||||
resolveIssueGoalId({
|
||||
projectId: null,
|
||||
goalId: "goal-2",
|
||||
projectGoalId: "goal-3",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-2");
|
||||
});
|
||||
|
||||
it("does not force a company goal when the issue belongs to a project", () => {
|
||||
it("inherits the project goal when creating a project-linked issue", () => {
|
||||
expect(
|
||||
resolveIssueGoalId({
|
||||
projectId: "project-1",
|
||||
goalId: null,
|
||||
projectGoalId: "goal-2",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-2");
|
||||
});
|
||||
|
||||
it("does not force a company goal when the project has no goal", () => {
|
||||
expect(
|
||||
resolveIssueGoalId({
|
||||
projectId: "project-1",
|
||||
goalId: null,
|
||||
projectGoalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBeNull();
|
||||
@@ -40,20 +53,47 @@ describe("issue goal fallback", () => {
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: null,
|
||||
currentGoalId: null,
|
||||
currentProjectGoalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-1");
|
||||
});
|
||||
|
||||
it("clears the fallback when a project is added later", () => {
|
||||
it("switches from the company fallback to the project goal when a project is added later", () => {
|
||||
expect(
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: null,
|
||||
currentGoalId: "goal-1",
|
||||
currentProjectGoalId: null,
|
||||
projectId: "project-1",
|
||||
goalId: null,
|
||||
projectGoalId: "goal-2",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBeNull();
|
||||
).toBe("goal-2");
|
||||
});
|
||||
|
||||
it("backfills the project goal for legacy project-linked issues on update", () => {
|
||||
expect(
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: "project-1",
|
||||
currentGoalId: null,
|
||||
currentProjectGoalId: "goal-2",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-2");
|
||||
});
|
||||
|
||||
it("preserves an explicit goal across project fallback changes", () => {
|
||||
expect(
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: "project-1",
|
||||
currentGoalId: "goal-explicit",
|
||||
currentProjectGoalId: "goal-2",
|
||||
projectId: "project-2",
|
||||
projectGoalId: "goal-3",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-explicit");
|
||||
});
|
||||
});
|
||||
|
||||
187
server/src/__tests__/issues-goal-context-routes.test.ts
Normal file
187
server/src/__tests__/issues-goal-context-routes.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { issueRoutes } from "../routes/issues.js";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
|
||||
const mockIssueService = vi.hoisted(() => ({
|
||||
getById: vi.fn(),
|
||||
getAncestors: vi.fn(),
|
||||
findMentionedProjectIds: vi.fn(),
|
||||
getCommentCursor: vi.fn(),
|
||||
getComment: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockProjectService = vi.hoisted(() => ({
|
||||
getById: vi.fn(),
|
||||
listByIds: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockGoalService = vi.hoisted(() => ({
|
||||
getById: vi.fn(),
|
||||
getDefaultCompanyGoal: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../services/index.js", () => ({
|
||||
accessService: () => ({
|
||||
canUser: vi.fn(),
|
||||
hasPermission: vi.fn(),
|
||||
}),
|
||||
agentService: () => ({
|
||||
getById: vi.fn(),
|
||||
}),
|
||||
documentService: () => ({
|
||||
getIssueDocumentPayload: vi.fn(async () => ({})),
|
||||
}),
|
||||
executionWorkspaceService: () => ({
|
||||
getById: vi.fn(),
|
||||
}),
|
||||
goalService: () => mockGoalService,
|
||||
heartbeatService: () => ({
|
||||
wakeup: vi.fn(async () => undefined),
|
||||
reportRunActivity: vi.fn(async () => undefined),
|
||||
}),
|
||||
issueApprovalService: () => ({}),
|
||||
issueService: () => mockIssueService,
|
||||
logActivity: vi.fn(async () => undefined),
|
||||
projectService: () => mockProjectService,
|
||||
routineService: () => ({
|
||||
syncRunStatusForIssue: vi.fn(async () => undefined),
|
||||
}),
|
||||
workProductService: () => ({
|
||||
listForIssue: vi.fn(async () => []),
|
||||
}),
|
||||
}));
|
||||
|
||||
function createApp() {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
(req as any).actor = {
|
||||
type: "board",
|
||||
userId: "local-board",
|
||||
companyIds: ["company-1"],
|
||||
source: "local_implicit",
|
||||
isInstanceAdmin: false,
|
||||
};
|
||||
next();
|
||||
});
|
||||
app.use("/api", issueRoutes({} as any, {} as any));
|
||||
app.use(errorHandler);
|
||||
return app;
|
||||
}
|
||||
|
||||
const legacyProjectLinkedIssue = {
|
||||
id: "11111111-1111-4111-8111-111111111111",
|
||||
companyId: "company-1",
|
||||
identifier: "PAP-581",
|
||||
title: "Legacy onboarding task",
|
||||
description: "Seed the first CEO task",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
projectId: "22222222-2222-4222-8222-222222222222",
|
||||
goalId: null,
|
||||
parentId: null,
|
||||
assigneeAgentId: "33333333-3333-4333-8333-333333333333",
|
||||
assigneeUserId: null,
|
||||
updatedAt: new Date("2026-03-24T12:00:00Z"),
|
||||
executionWorkspaceId: null,
|
||||
labels: [],
|
||||
labelIds: [],
|
||||
};
|
||||
|
||||
const projectGoal = {
|
||||
id: "44444444-4444-4444-8444-444444444444",
|
||||
companyId: "company-1",
|
||||
title: "Launch the company",
|
||||
description: null,
|
||||
level: "company",
|
||||
status: "active",
|
||||
parentId: null,
|
||||
ownerAgentId: null,
|
||||
createdAt: new Date("2026-03-20T00:00:00Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00Z"),
|
||||
};
|
||||
|
||||
describe("issue goal context routes", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockIssueService.getById.mockResolvedValue(legacyProjectLinkedIssue);
|
||||
mockIssueService.getAncestors.mockResolvedValue([]);
|
||||
mockIssueService.findMentionedProjectIds.mockResolvedValue([]);
|
||||
mockIssueService.getCommentCursor.mockResolvedValue({
|
||||
totalComments: 0,
|
||||
latestCommentId: null,
|
||||
latestCommentAt: null,
|
||||
});
|
||||
mockIssueService.getComment.mockResolvedValue(null);
|
||||
mockProjectService.getById.mockResolvedValue({
|
||||
id: legacyProjectLinkedIssue.projectId,
|
||||
companyId: "company-1",
|
||||
urlKey: "onboarding",
|
||||
goalId: projectGoal.id,
|
||||
goalIds: [projectGoal.id],
|
||||
goals: [{ id: projectGoal.id, title: projectGoal.title }],
|
||||
name: "Onboarding",
|
||||
description: null,
|
||||
status: "in_progress",
|
||||
leadAgentId: null,
|
||||
targetDate: null,
|
||||
color: null,
|
||||
pauseReason: null,
|
||||
pausedAt: null,
|
||||
executionWorkspacePolicy: null,
|
||||
codebase: {
|
||||
workspaceId: null,
|
||||
repoUrl: null,
|
||||
repoRef: null,
|
||||
defaultRef: null,
|
||||
repoName: null,
|
||||
localFolder: null,
|
||||
managedFolder: "/tmp/company-1/project-1",
|
||||
effectiveLocalFolder: "/tmp/company-1/project-1",
|
||||
origin: "managed_checkout",
|
||||
},
|
||||
workspaces: [],
|
||||
primaryWorkspace: null,
|
||||
archivedAt: null,
|
||||
createdAt: new Date("2026-03-20T00:00:00Z"),
|
||||
updatedAt: new Date("2026-03-20T00:00:00Z"),
|
||||
});
|
||||
mockProjectService.listByIds.mockResolvedValue([]);
|
||||
mockGoalService.getById.mockImplementation(async (id: string) =>
|
||||
id === projectGoal.id ? projectGoal : null,
|
||||
);
|
||||
mockGoalService.getDefaultCompanyGoal.mockResolvedValue(null);
|
||||
});
|
||||
|
||||
it("surfaces the project goal from GET /issues/:id when the issue has no direct goal", async () => {
|
||||
const res = await request(createApp()).get("/api/issues/11111111-1111-4111-8111-111111111111");
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.goalId).toBe(projectGoal.id);
|
||||
expect(res.body.goal).toEqual(
|
||||
expect.objectContaining({
|
||||
id: projectGoal.id,
|
||||
title: projectGoal.title,
|
||||
}),
|
||||
);
|
||||
expect(mockGoalService.getDefaultCompanyGoal).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("surfaces the project goal from GET /issues/:id/heartbeat-context", async () => {
|
||||
const res = await request(createApp()).get(
|
||||
"/api/issues/11111111-1111-4111-8111-111111111111/heartbeat-context",
|
||||
);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.issue.goalId).toBe(projectGoal.id);
|
||||
expect(res.body.goal).toEqual(
|
||||
expect.objectContaining({
|
||||
id: projectGoal.id,
|
||||
title: projectGoal.title,
|
||||
}),
|
||||
);
|
||||
expect(mockGoalService.getDefaultCompanyGoal).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,103 +1,43 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
activityLog,
|
||||
agents,
|
||||
applyPendingMigrations,
|
||||
companies,
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
issueComments,
|
||||
issueInboxArchives,
|
||||
issues,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { issueService } from "../services/issues.ts";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres issue service tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-issues-service-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
return { connectionString, dataDir, instance };
|
||||
}
|
||||
|
||||
describe("issueService.list participantAgentId", () => {
|
||||
describeEmbeddedPostgres("issueService.list participantAgentId", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let svc!: ReturnType<typeof issueService>;
|
||||
let instance: EmbeddedPostgresInstance | null = null;
|
||||
let dataDir = "";
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
const started = await startTempDatabase();
|
||||
db = createDb(started.connectionString);
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-issues-service-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
svc = issueService(db);
|
||||
instance = started.instance;
|
||||
dataDir = started.dataDir;
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
await db.delete(issueComments);
|
||||
await db.delete(issueInboxArchives);
|
||||
await db.delete(activityLog);
|
||||
await db.delete(issues);
|
||||
await db.delete(agents);
|
||||
@@ -105,10 +45,7 @@ describe("issueService.list participantAgentId", () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await instance?.stop();
|
||||
if (dataDir) {
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
});
|
||||
|
||||
it("returns issues an agent participated in across the supported signals", async () => {
|
||||
@@ -281,4 +218,99 @@ describe("issueService.list participantAgentId", () => {
|
||||
|
||||
expect(result.map((issue) => issue.id)).toEqual([matchedIssueId]);
|
||||
});
|
||||
|
||||
it("hides archived inbox issues until new external activity arrives", async () => {
|
||||
const companyId = randomUUID();
|
||||
const userId = "user-1";
|
||||
const otherUserId = "user-2";
|
||||
|
||||
await db.insert(companies).values({
|
||||
id: companyId,
|
||||
name: "Paperclip",
|
||||
issuePrefix: `T${companyId.replace(/-/g, "").slice(0, 6).toUpperCase()}`,
|
||||
requireBoardApprovalForNewAgents: false,
|
||||
});
|
||||
|
||||
const visibleIssueId = randomUUID();
|
||||
const archivedIssueId = randomUUID();
|
||||
const resurfacedIssueId = randomUUID();
|
||||
|
||||
await db.insert(issues).values([
|
||||
{
|
||||
id: visibleIssueId,
|
||||
companyId,
|
||||
title: "Visible issue",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
createdByUserId: userId,
|
||||
createdAt: new Date("2026-03-26T10:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-26T10:00:00.000Z"),
|
||||
},
|
||||
{
|
||||
id: archivedIssueId,
|
||||
companyId,
|
||||
title: "Archived issue",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
createdByUserId: userId,
|
||||
createdAt: new Date("2026-03-26T11:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-26T11:00:00.000Z"),
|
||||
},
|
||||
{
|
||||
id: resurfacedIssueId,
|
||||
companyId,
|
||||
title: "Resurfaced issue",
|
||||
status: "todo",
|
||||
priority: "medium",
|
||||
createdByUserId: userId,
|
||||
createdAt: new Date("2026-03-26T12:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-26T12:00:00.000Z"),
|
||||
},
|
||||
]);
|
||||
|
||||
await svc.archiveInbox(
|
||||
companyId,
|
||||
archivedIssueId,
|
||||
userId,
|
||||
new Date("2026-03-26T12:30:00.000Z"),
|
||||
);
|
||||
await svc.archiveInbox(
|
||||
companyId,
|
||||
resurfacedIssueId,
|
||||
userId,
|
||||
new Date("2026-03-26T13:00:00.000Z"),
|
||||
);
|
||||
|
||||
await db.insert(issueComments).values({
|
||||
companyId,
|
||||
issueId: resurfacedIssueId,
|
||||
authorUserId: otherUserId,
|
||||
body: "This should bring the issue back into Mine.",
|
||||
createdAt: new Date("2026-03-26T13:30:00.000Z"),
|
||||
updatedAt: new Date("2026-03-26T13:30:00.000Z"),
|
||||
});
|
||||
|
||||
const archivedFiltered = await svc.list(companyId, {
|
||||
touchedByUserId: userId,
|
||||
inboxArchivedByUserId: userId,
|
||||
});
|
||||
|
||||
expect(archivedFiltered.map((issue) => issue.id)).toEqual([
|
||||
resurfacedIssueId,
|
||||
visibleIssueId,
|
||||
]);
|
||||
|
||||
await svc.unarchiveInbox(companyId, archivedIssueId, userId);
|
||||
|
||||
const afterUnarchive = await svc.list(companyId, {
|
||||
touchedByUserId: userId,
|
||||
inboxArchivedByUserId: userId,
|
||||
});
|
||||
|
||||
expect(new Set(afterUnarchive.map((issue) => issue.id))).toEqual(new Set([
|
||||
visibleIssueId,
|
||||
archivedIssueId,
|
||||
resurfacedIssueId,
|
||||
]));
|
||||
});
|
||||
});
|
||||
|
||||
41
server/src/__tests__/normalize-agent-mention-token.test.ts
Normal file
41
server/src/__tests__/normalize-agent-mention-token.test.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { normalizeAgentMentionToken } from "../services/issues.ts";
|
||||
|
||||
describe("normalizeAgentMentionToken", () => {
|
||||
it("decodes hex numeric entities such as space ( )", () => {
|
||||
expect(normalizeAgentMentionToken("Baba ")).toBe("Baba");
|
||||
});
|
||||
|
||||
it("decodes decimal numeric entities", () => {
|
||||
expect(normalizeAgentMentionToken("Baba ")).toBe("Baba");
|
||||
});
|
||||
|
||||
it("decodes common named whitespace entities", () => {
|
||||
expect(normalizeAgentMentionToken("Baba ")).toBe("Baba");
|
||||
});
|
||||
|
||||
// Mid-token entity (review asked for this shape); we decode &→&, not strip to "Baba" (that broke M&M).
|
||||
it("decodes a named entity in the middle of the token", () => {
|
||||
expect(normalizeAgentMentionToken("Ba&ba")).toBe("Ba&ba");
|
||||
});
|
||||
|
||||
it("decodes & so agent names with ampersands still match", () => {
|
||||
expect(normalizeAgentMentionToken("M&M")).toBe("M&M");
|
||||
});
|
||||
|
||||
it("decodes additional named entities used in rich text (e.g. ©)", () => {
|
||||
expect(normalizeAgentMentionToken("Agent©Name")).toBe("Agent©Name");
|
||||
});
|
||||
|
||||
it("leaves unknown semicolon-terminated named references unchanged", () => {
|
||||
expect(normalizeAgentMentionToken("Baba¬arealentity;")).toBe("Baba¬arealentity;");
|
||||
});
|
||||
|
||||
it("returns plain names unchanged", () => {
|
||||
expect(normalizeAgentMentionToken("Baba")).toBe("Baba");
|
||||
});
|
||||
|
||||
it("trims after decoding entities", () => {
|
||||
expect(normalizeAgentMentionToken("Baba  ")).toBe("Baba");
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,4 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { eq } from "drizzle-orm";
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
@@ -11,11 +7,9 @@ import {
|
||||
activityLog,
|
||||
agentWakeupRequests,
|
||||
agents,
|
||||
applyPendingMigrations,
|
||||
companies,
|
||||
companyMemberships,
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
heartbeatRunEvents,
|
||||
heartbeatRuns,
|
||||
instanceSettings,
|
||||
@@ -26,6 +20,10 @@ import {
|
||||
routines,
|
||||
routineTriggers,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
import { accessService } from "../services/access.js";
|
||||
|
||||
@@ -78,82 +76,22 @@ vi.mock("../services/index.js", async () => {
|
||||
};
|
||||
});
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres routine route tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-routines-e2e-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
return { connectionString, dataDir, instance };
|
||||
}
|
||||
|
||||
describe("routine routes end-to-end", () => {
|
||||
describeEmbeddedPostgres("routine routes end-to-end", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let instance: EmbeddedPostgresInstance | null = null;
|
||||
let dataDir = "";
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
const started = await startTempDatabase();
|
||||
db = createDb(started.connectionString);
|
||||
instance = started.instance;
|
||||
dataDir = started.dataDir;
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-routines-e2e-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -174,10 +112,7 @@ describe("routine routes end-to-end", () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await instance?.stop();
|
||||
if (dataDir) {
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
});
|
||||
|
||||
async function createApp(actor: Record<string, unknown>) {
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import { createHmac, randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import net from "node:net";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
|
||||
import {
|
||||
activityLog,
|
||||
agents,
|
||||
applyPendingMigrations,
|
||||
companies,
|
||||
companySecrets,
|
||||
companySecretVersions,
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
heartbeatRuns,
|
||||
issues,
|
||||
projects,
|
||||
@@ -21,85 +15,29 @@ import {
|
||||
routines,
|
||||
routineTriggers,
|
||||
} from "@paperclipai/db";
|
||||
import {
|
||||
getEmbeddedPostgresTestSupport,
|
||||
startEmbeddedPostgresTestDatabase,
|
||||
} from "./helpers/embedded-postgres.js";
|
||||
import { issueService } from "../services/issues.ts";
|
||||
import { routineService } from "../services/routines.ts";
|
||||
|
||||
type EmbeddedPostgresInstance = {
|
||||
initialise(): Promise<void>;
|
||||
start(): Promise<void>;
|
||||
stop(): Promise<void>;
|
||||
};
|
||||
const embeddedPostgresSupport = await getEmbeddedPostgresTestSupport();
|
||||
const describeEmbeddedPostgres = embeddedPostgresSupport.supported ? describe : describe.skip;
|
||||
|
||||
type EmbeddedPostgresCtor = new (opts: {
|
||||
databaseDir: string;
|
||||
user: string;
|
||||
password: string;
|
||||
port: number;
|
||||
persistent: boolean;
|
||||
initdbFlags?: string[];
|
||||
onLog?: (message: unknown) => void;
|
||||
onError?: (message: unknown) => void;
|
||||
}) => EmbeddedPostgresInstance;
|
||||
|
||||
async function getEmbeddedPostgresCtor(): Promise<EmbeddedPostgresCtor> {
|
||||
const mod = await import("embedded-postgres");
|
||||
return mod.default as EmbeddedPostgresCtor;
|
||||
if (!embeddedPostgresSupport.supported) {
|
||||
console.warn(
|
||||
`Skipping embedded Postgres routines service tests on this host: ${embeddedPostgresSupport.reason ?? "unsupported environment"}`,
|
||||
);
|
||||
}
|
||||
|
||||
async function getAvailablePort(): Promise<number> {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("error", reject);
|
||||
server.listen(0, "127.0.0.1", () => {
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
server.close(() => reject(new Error("Failed to allocate test port")));
|
||||
return;
|
||||
}
|
||||
const { port } = address;
|
||||
server.close((error) => {
|
||||
if (error) reject(error);
|
||||
else resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function startTempDatabase() {
|
||||
const dataDir = fs.mkdtempSync(path.join(os.tmpdir(), "paperclip-routines-service-"));
|
||||
const port = await getAvailablePort();
|
||||
const EmbeddedPostgres = await getEmbeddedPostgresCtor();
|
||||
const instance = new EmbeddedPostgres({
|
||||
databaseDir: dataDir,
|
||||
user: "paperclip",
|
||||
password: "paperclip",
|
||||
port,
|
||||
persistent: true,
|
||||
initdbFlags: ["--encoding=UTF8", "--locale=C", "--lc-messages=C"],
|
||||
onLog: () => {},
|
||||
onError: () => {},
|
||||
});
|
||||
await instance.initialise();
|
||||
await instance.start();
|
||||
|
||||
const adminConnectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/postgres`;
|
||||
await ensurePostgresDatabase(adminConnectionString, "paperclip");
|
||||
const connectionString = `postgres://paperclip:paperclip@127.0.0.1:${port}/paperclip`;
|
||||
await applyPendingMigrations(connectionString);
|
||||
return { connectionString, dataDir, instance };
|
||||
}
|
||||
|
||||
describe("routine service live-execution coalescing", () => {
|
||||
describeEmbeddedPostgres("routine service live-execution coalescing", () => {
|
||||
let db!: ReturnType<typeof createDb>;
|
||||
let instance: EmbeddedPostgresInstance | null = null;
|
||||
let dataDir = "";
|
||||
let tempDb: Awaited<ReturnType<typeof startEmbeddedPostgresTestDatabase>> | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
const started = await startTempDatabase();
|
||||
db = createDb(started.connectionString);
|
||||
instance = started.instance;
|
||||
dataDir = started.dataDir;
|
||||
tempDb = await startEmbeddedPostgresTestDatabase("paperclip-routines-service-");
|
||||
db = createDb(tempDb.connectionString);
|
||||
}, 20_000);
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -117,10 +55,7 @@ describe("routine service live-execution coalescing", () => {
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await instance?.stop();
|
||||
if (dataDir) {
|
||||
fs.rmSync(dataDir, { recursive: true, force: true });
|
||||
}
|
||||
await tempDb?.cleanup();
|
||||
});
|
||||
|
||||
async function seedFixture(opts?: {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { execFile } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
@@ -13,6 +14,7 @@ import {
|
||||
stopRuntimeServicesForExecutionWorkspace,
|
||||
type RealizedExecutionWorkspace,
|
||||
} from "../services/workspace-runtime.ts";
|
||||
import { resolvePaperclipConfigPath } from "../paths.ts";
|
||||
import type { WorkspaceOperation } from "@paperclipai/shared";
|
||||
import type { WorkspaceOperationRecorder } from "../services/workspace-operations.ts";
|
||||
|
||||
@@ -124,6 +126,7 @@ afterEach(async () => {
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
delete process.env.PAPERCLIP_HOME;
|
||||
delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
delete process.env.PAPERCLIP_WORKTREES_DIR;
|
||||
delete process.env.DATABASE_URL;
|
||||
});
|
||||
|
||||
@@ -282,6 +285,156 @@ describe("realizeExecutionWorkspace", () => {
|
||||
await expect(fs.readFile(path.join(reused.cwd, ".paperclip-provision-created"), "utf8")).resolves.toBe("false\n");
|
||||
});
|
||||
|
||||
it("writes an isolated repo-local Paperclip config and worktree branding when provisioning", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
const previousCwd = process.cwd();
|
||||
const paperclipHome = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-home-"));
|
||||
const isolatedWorktreeHome = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktrees-"));
|
||||
const instanceId = "worktree-base";
|
||||
const sharedConfigDir = path.join(paperclipHome, "instances", instanceId);
|
||||
const sharedConfigPath = path.join(sharedConfigDir, "config.json");
|
||||
const sharedEnvPath = path.join(sharedConfigDir, ".env");
|
||||
|
||||
process.env.PAPERCLIP_HOME = paperclipHome;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = instanceId;
|
||||
process.env.PAPERCLIP_WORKTREES_DIR = isolatedWorktreeHome;
|
||||
|
||||
await fs.mkdir(sharedConfigDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
sharedConfigPath,
|
||||
JSON.stringify(
|
||||
{
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-26T00:00:00.000Z",
|
||||
source: "doctor",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(sharedConfigDir, "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(sharedConfigDir, "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(sharedConfigDir, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "auto",
|
||||
disableSignUp: false,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(sharedConfigDir, "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(sharedConfigDir, "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(sharedEnvPath, 'DATABASE_URL="postgres://worktree:test@db.example.com:6543/paperclip"\n', "utf8");
|
||||
|
||||
await fs.mkdir(path.join(repoRoot, "scripts"), { recursive: true });
|
||||
await fs.copyFile(
|
||||
fileURLToPath(new URL("../../../scripts/provision-worktree.sh", import.meta.url)),
|
||||
path.join(repoRoot, "scripts", "provision-worktree.sh"),
|
||||
);
|
||||
await runGit(repoRoot, ["add", "scripts/provision-worktree.sh"]);
|
||||
await runGit(repoRoot, ["commit", "-m", "Add worktree provision script"]);
|
||||
|
||||
try {
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
provisionCommand: "bash ./scripts/provision-worktree.sh",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-885",
|
||||
title: "Show worktree banner",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
const configPath = path.join(workspace.cwd, ".paperclip", "config.json");
|
||||
const envPath = path.join(workspace.cwd, ".paperclip", ".env");
|
||||
const envContents = await fs.readFile(envPath, "utf8");
|
||||
const configContents = JSON.parse(await fs.readFile(configPath, "utf8"));
|
||||
const configStats = await fs.lstat(configPath);
|
||||
const expectedInstanceId = "pap-885-show-worktree-banner";
|
||||
const expectedInstanceRoot = path.join(
|
||||
isolatedWorktreeHome,
|
||||
"instances",
|
||||
expectedInstanceId,
|
||||
);
|
||||
|
||||
expect(configStats.isSymbolicLink()).toBe(false);
|
||||
expect(configContents.database.embeddedPostgresDataDir).toBe(path.join(expectedInstanceRoot, "db"));
|
||||
expect(configContents.database.embeddedPostgresDataDir).not.toBe(path.join(sharedConfigDir, "db"));
|
||||
expect(configContents.server.port).not.toBe(3100);
|
||||
expect(configContents.secrets.localEncrypted.keyFilePath).toBe(
|
||||
path.join(expectedInstanceRoot, "secrets", "master.key"),
|
||||
);
|
||||
expect(envContents).not.toContain("DATABASE_URL=");
|
||||
expect(envContents).toContain(`PAPERCLIP_HOME=${JSON.stringify(isolatedWorktreeHome)}`);
|
||||
expect(envContents).toContain(`PAPERCLIP_INSTANCE_ID=${JSON.stringify(expectedInstanceId)}`);
|
||||
expect(envContents).toContain(`PAPERCLIP_CONFIG=${JSON.stringify(configPath)}`);
|
||||
expect(envContents).toContain("PAPERCLIP_IN_WORKTREE=true");
|
||||
expect(envContents).toContain(
|
||||
`PAPERCLIP_WORKTREE_NAME=${JSON.stringify("PAP-885-show-worktree-banner")}`,
|
||||
);
|
||||
|
||||
process.chdir(workspace.cwd);
|
||||
expect(resolvePaperclipConfigPath()).toBe(configPath);
|
||||
} finally {
|
||||
process.chdir(previousCwd);
|
||||
}
|
||||
});
|
||||
|
||||
it("records worktree setup and provision operations when a recorder is provided", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
const { recorder, operations } = createWorkspaceOperationRecorderDouble();
|
||||
|
||||
426
server/src/__tests__/worktree-config.test.ts
Normal file
426
server/src/__tests__/worktree-config.test.ts
Normal file
@@ -0,0 +1,426 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
applyRuntimePortSelectionToConfig,
|
||||
maybePersistWorktreeRuntimePorts,
|
||||
maybeRepairLegacyWorktreeConfigAndEnvFiles,
|
||||
} from "../worktree-config.js";
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const ORIGINAL_CWD = process.cwd();
|
||||
|
||||
afterEach(() => {
|
||||
process.chdir(ORIGINAL_CWD);
|
||||
|
||||
for (const key of Object.keys(process.env)) {
|
||||
if (!(key in ORIGINAL_ENV)) {
|
||||
delete process.env[key];
|
||||
}
|
||||
}
|
||||
for (const [key, value] of Object.entries(ORIGINAL_ENV)) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
function buildLegacyConfig(sharedRoot: string) {
|
||||
return {
|
||||
$meta: {
|
||||
version: 1,
|
||||
updatedAt: "2026-03-26T00:00:00.000Z",
|
||||
source: "configure",
|
||||
},
|
||||
database: {
|
||||
mode: "embedded-postgres" as const,
|
||||
embeddedPostgresDataDir: path.join(sharedRoot, "db"),
|
||||
embeddedPostgresPort: 54329,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(sharedRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file" as const,
|
||||
logDir: path.join(sharedRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted" as const,
|
||||
exposure: "private" as const,
|
||||
host: "127.0.0.1",
|
||||
port: 3100,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
auth: {
|
||||
baseUrlMode: "explicit" as const,
|
||||
publicBaseUrl: "http://127.0.0.1:3100",
|
||||
disableSignUp: false,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk" as const,
|
||||
localDisk: {
|
||||
baseDir: path.join(sharedRoot, "data", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted" as const,
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(sharedRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe("worktree config repair", () => {
|
||||
it("repairs legacy repo-local worktree config and env files into an isolated instance", async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-repair-"));
|
||||
const worktreeRoot = path.join(tempRoot, "PAP-884-ai-commits-component");
|
||||
const paperclipDir = path.join(worktreeRoot, ".paperclip");
|
||||
const configPath = path.join(paperclipDir, "config.json");
|
||||
const envPath = path.join(paperclipDir, ".env");
|
||||
const sharedRoot = path.join(tempRoot, ".paperclip", "instances", "default");
|
||||
const isolatedHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
|
||||
await fs.mkdir(paperclipDir, { recursive: true });
|
||||
await fs.writeFile(configPath, JSON.stringify(buildLegacyConfig(sharedRoot), null, 2) + "\n", "utf8");
|
||||
await fs.writeFile(
|
||||
envPath,
|
||||
[
|
||||
"# Paperclip environment variables",
|
||||
"PAPERCLIP_IN_WORKTREE=true",
|
||||
"PAPERCLIP_WORKTREE_NAME=PAP-884-ai-commits-component",
|
||||
"PAPERCLIP_AGENT_JWT_SECRET=shared-secret",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
process.chdir(worktreeRoot);
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.PAPERCLIP_WORKTREE_NAME = "PAP-884-ai-commits-component";
|
||||
process.env.PAPERCLIP_WORKTREES_DIR = isolatedHome;
|
||||
delete process.env.PAPERCLIP_HOME;
|
||||
delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
delete process.env.PAPERCLIP_CONFIG;
|
||||
delete process.env.PAPERCLIP_CONTEXT;
|
||||
|
||||
const result = maybeRepairLegacyWorktreeConfigAndEnvFiles();
|
||||
|
||||
expect(result).toEqual({
|
||||
repairedConfig: true,
|
||||
repairedEnv: true,
|
||||
});
|
||||
|
||||
const repairedConfig = JSON.parse(await fs.readFile(configPath, "utf8"));
|
||||
const repairedEnv = await fs.readFile(envPath, "utf8");
|
||||
const instanceRoot = path.join(isolatedHome, "instances", "pap-884-ai-commits-component");
|
||||
|
||||
expect(repairedConfig.database.embeddedPostgresDataDir).toBe(path.join(instanceRoot, "db"));
|
||||
expect(repairedConfig.database.backup.dir).toBe(path.join(instanceRoot, "data", "backups"));
|
||||
expect(repairedConfig.logging.logDir).toBe(path.join(instanceRoot, "logs"));
|
||||
expect(repairedConfig.storage.localDisk.baseDir).toBe(path.join(instanceRoot, "data", "storage"));
|
||||
expect(repairedConfig.secrets.localEncrypted.keyFilePath).toBe(path.join(instanceRoot, "secrets", "master.key"));
|
||||
expect(repairedEnv).toContain(`PAPERCLIP_HOME=${JSON.stringify(isolatedHome)}`);
|
||||
expect(repairedEnv).toContain('PAPERCLIP_INSTANCE_ID="pap-884-ai-commits-component"');
|
||||
expect(repairedEnv).toContain(`PAPERCLIP_CONFIG=${JSON.stringify(await fs.realpath(configPath))}`);
|
||||
expect(repairedEnv).toContain(`PAPERCLIP_CONTEXT=${JSON.stringify(path.join(isolatedHome, "context.json"))}`);
|
||||
expect(repairedEnv).toContain('PAPERCLIP_AGENT_JWT_SECRET="shared-secret"');
|
||||
expect(process.env.PAPERCLIP_HOME).toBe(isolatedHome);
|
||||
expect(process.env.PAPERCLIP_INSTANCE_ID).toBe("pap-884-ai-commits-component");
|
||||
});
|
||||
|
||||
it("avoids sibling worktree ports when repairing legacy configs", async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-repair-ports-"));
|
||||
const worktreeRoot = path.join(tempRoot, "PAP-880-thumbs-capture-for-evals-feature");
|
||||
const paperclipDir = path.join(worktreeRoot, ".paperclip");
|
||||
const configPath = path.join(paperclipDir, "config.json");
|
||||
const envPath = path.join(paperclipDir, ".env");
|
||||
const sharedRoot = path.join(tempRoot, ".paperclip", "instances", "default");
|
||||
const isolatedHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const siblingInstanceRoot = path.join(isolatedHome, "instances", "pap-878-create-a-mine-tab-in-inbox");
|
||||
|
||||
await fs.mkdir(paperclipDir, { recursive: true });
|
||||
await fs.mkdir(siblingInstanceRoot, { recursive: true });
|
||||
await fs.writeFile(configPath, JSON.stringify(buildLegacyConfig(sharedRoot), null, 2) + "\n", "utf8");
|
||||
await fs.writeFile(
|
||||
envPath,
|
||||
[
|
||||
"# Paperclip environment variables",
|
||||
"PAPERCLIP_IN_WORKTREE=true",
|
||||
"PAPERCLIP_WORKTREE_NAME=PAP-880-thumbs-capture-for-evals-feature",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
path.join(siblingInstanceRoot, "config.json"),
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildLegacyConfig(siblingInstanceRoot),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(siblingInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(siblingInstanceRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
"utf8",
|
||||
);
|
||||
|
||||
process.chdir(worktreeRoot);
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.PAPERCLIP_WORKTREE_NAME = "PAP-880-thumbs-capture-for-evals-feature";
|
||||
process.env.PAPERCLIP_WORKTREES_DIR = isolatedHome;
|
||||
|
||||
const result = maybeRepairLegacyWorktreeConfigAndEnvFiles();
|
||||
const repairedConfig = JSON.parse(await fs.readFile(configPath, "utf8"));
|
||||
|
||||
expect(result.repairedConfig).toBe(true);
|
||||
expect(repairedConfig.server.port).toBe(3102);
|
||||
expect(repairedConfig.database.embeddedPostgresPort).toBe(54331);
|
||||
});
|
||||
|
||||
it("rebalances duplicate ports for already isolated worktree configs", async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-rebalance-"));
|
||||
const isolatedHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const repoWorktreesRoot = path.join(tempRoot, "repo", ".paperclip", "worktrees");
|
||||
const siblingWorktreeRoot = path.join(repoWorktreesRoot, "PAP-878-create-a-mine-tab-in-inbox");
|
||||
const siblingInstanceRoot = path.join(isolatedHome, "instances", "pap-878-create-a-mine-tab-in-inbox");
|
||||
const currentWorktreeRoot = path.join(repoWorktreesRoot, "PAP-884-ai-commits-component");
|
||||
const paperclipDir = path.join(currentWorktreeRoot, ".paperclip");
|
||||
const configPath = path.join(paperclipDir, "config.json");
|
||||
const envPath = path.join(paperclipDir, ".env");
|
||||
const currentInstanceRoot = path.join(isolatedHome, "instances", "pap-884-ai-commits-component");
|
||||
const siblingConfigPath = path.join(siblingWorktreeRoot, ".paperclip", "config.json");
|
||||
|
||||
await fs.mkdir(paperclipDir, { recursive: true });
|
||||
await fs.mkdir(path.dirname(siblingConfigPath), { recursive: true });
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildLegacyConfig(currentInstanceRoot),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(currentInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(currentInstanceRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(currentInstanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(currentInstanceRoot, "data", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(currentInstanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
envPath,
|
||||
[
|
||||
"# Paperclip environment variables",
|
||||
"PAPERCLIP_IN_WORKTREE=true",
|
||||
"PAPERCLIP_WORKTREE_NAME=PAP-884-ai-commits-component",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
await fs.writeFile(
|
||||
siblingConfigPath,
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildLegacyConfig(siblingInstanceRoot),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(siblingInstanceRoot, "db"),
|
||||
embeddedPostgresPort: 54330,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(siblingInstanceRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
"utf8",
|
||||
);
|
||||
|
||||
process.chdir(currentWorktreeRoot);
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.PAPERCLIP_WORKTREE_NAME = "PAP-884-ai-commits-component";
|
||||
process.env.PAPERCLIP_WORKTREES_DIR = isolatedHome;
|
||||
|
||||
const result = maybeRepairLegacyWorktreeConfigAndEnvFiles();
|
||||
const repairedConfig = JSON.parse(await fs.readFile(configPath, "utf8"));
|
||||
|
||||
expect(result.repairedConfig).toBe(true);
|
||||
expect(repairedConfig.server.port).toBe(3102);
|
||||
expect(repairedConfig.database.embeddedPostgresPort).toBe(54331);
|
||||
});
|
||||
|
||||
it("persists runtime-selected worktree ports back into config", async () => {
|
||||
const tempRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-ports-"));
|
||||
const worktreeRoot = path.join(tempRoot, "PAP-878-create-a-mine-tab-in-inbox");
|
||||
const paperclipDir = path.join(worktreeRoot, ".paperclip");
|
||||
const configPath = path.join(paperclipDir, "config.json");
|
||||
const isolatedHome = path.join(tempRoot, ".paperclip-worktrees");
|
||||
const instanceRoot = path.join(isolatedHome, "instances", "pap-878-create-a-mine-tab-in-inbox");
|
||||
|
||||
await fs.mkdir(paperclipDir, { recursive: true });
|
||||
await fs.writeFile(
|
||||
configPath,
|
||||
JSON.stringify(
|
||||
{
|
||||
...buildLegacyConfig(instanceRoot),
|
||||
database: {
|
||||
mode: "embedded-postgres",
|
||||
embeddedPostgresDataDir: path.join(instanceRoot, "db"),
|
||||
embeddedPostgresPort: 54331,
|
||||
backup: {
|
||||
enabled: true,
|
||||
intervalMinutes: 60,
|
||||
retentionDays: 30,
|
||||
dir: path.join(instanceRoot, "data", "backups"),
|
||||
},
|
||||
},
|
||||
logging: {
|
||||
mode: "file",
|
||||
logDir: path.join(instanceRoot, "logs"),
|
||||
},
|
||||
server: {
|
||||
deploymentMode: "local_trusted",
|
||||
exposure: "private",
|
||||
host: "127.0.0.1",
|
||||
port: 3101,
|
||||
allowedHostnames: [],
|
||||
serveUi: true,
|
||||
},
|
||||
storage: {
|
||||
provider: "local_disk",
|
||||
localDisk: {
|
||||
baseDir: path.join(instanceRoot, "data", "storage"),
|
||||
},
|
||||
s3: {
|
||||
bucket: "paperclip",
|
||||
region: "us-east-1",
|
||||
prefix: "",
|
||||
forcePathStyle: false,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
provider: "local_encrypted",
|
||||
strictMode: false,
|
||||
localEncrypted: {
|
||||
keyFilePath: path.join(instanceRoot, "secrets", "master.key"),
|
||||
},
|
||||
},
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
"utf8",
|
||||
);
|
||||
|
||||
process.chdir(worktreeRoot);
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.PAPERCLIP_WORKTREE_NAME = "PAP-878-create-a-mine-tab-in-inbox";
|
||||
process.env.PAPERCLIP_HOME = isolatedHome;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "pap-878-create-a-mine-tab-in-inbox";
|
||||
process.env.PAPERCLIP_CONFIG = configPath;
|
||||
|
||||
maybePersistWorktreeRuntimePorts({
|
||||
serverPort: 3103,
|
||||
databasePort: 54335,
|
||||
});
|
||||
|
||||
const writtenConfig = JSON.parse(await fs.readFile(configPath, "utf8"));
|
||||
|
||||
expect(writtenConfig.server.port).toBe(3103);
|
||||
expect(writtenConfig.database.embeddedPostgresPort).toBe(54335);
|
||||
expect(writtenConfig.auth.publicBaseUrl).toBe("http://127.0.0.1:3103/");
|
||||
});
|
||||
|
||||
it("can update the in-memory config without rewriting env-driven ports", () => {
|
||||
const { config, changed } = applyRuntimePortSelectionToConfig(buildLegacyConfig("/tmp/shared"), {
|
||||
serverPort: 3104,
|
||||
databasePort: 54340,
|
||||
allowServerPortWrite: false,
|
||||
allowDatabasePortWrite: true,
|
||||
});
|
||||
|
||||
expect(changed).toBe(true);
|
||||
expect(config.server.port).toBe(3100);
|
||||
expect(config.database.embeddedPostgresPort).toBe(54340);
|
||||
expect(config.auth.publicBaseUrl).toBe("http://127.0.0.1:3104/");
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
export { getServerAdapter, listAdapterModels, listServerAdapters, findServerAdapter } from "./registry.js";
|
||||
export { getServerAdapter, listAdapterModels, listServerAdapters, findServerAdapter, detectAdapterModel } from "./registry.js";
|
||||
export type {
|
||||
ServerAdapterModule,
|
||||
AdapterExecutionContext,
|
||||
|
||||
@@ -70,6 +70,9 @@ import {
|
||||
execute as hermesExecute,
|
||||
testEnvironment as hermesTestEnvironment,
|
||||
sessionCodec as hermesSessionCodec,
|
||||
listSkills as hermesListSkills,
|
||||
syncSkills as hermesSyncSkills,
|
||||
detectModel as detectModelFromHermes,
|
||||
} from "hermes-paperclip-adapter/server";
|
||||
import {
|
||||
agentConfigurationDoc as hermesAgentConfigurationDoc,
|
||||
@@ -176,9 +179,12 @@ const hermesLocalAdapter: ServerAdapterModule = {
|
||||
execute: hermesExecute,
|
||||
testEnvironment: hermesTestEnvironment,
|
||||
sessionCodec: hermesSessionCodec,
|
||||
listSkills: hermesListSkills,
|
||||
syncSkills: hermesSyncSkills,
|
||||
models: hermesModels,
|
||||
supportsLocalAgentJwt: true,
|
||||
agentConfigurationDoc: hermesAgentConfigurationDoc,
|
||||
detectModel: () => detectModelFromHermes(),
|
||||
};
|
||||
|
||||
const adaptersByType = new Map<string, ServerAdapterModule>(
|
||||
@@ -219,6 +225,15 @@ export function listServerAdapters(): ServerAdapterModule[] {
|
||||
return Array.from(adaptersByType.values());
|
||||
}
|
||||
|
||||
export async function detectAdapterModel(
|
||||
type: string,
|
||||
): Promise<{ model: string; provider: string; source: string } | null> {
|
||||
const adapter = adaptersByType.get(type);
|
||||
if (!adapter?.detectModel) return null;
|
||||
const detected = await adapter.detectModel();
|
||||
return detected ? { model: detected.model, provider: detected.provider, source: detected.source } : null;
|
||||
}
|
||||
|
||||
export function findServerAdapter(type: string): ServerAdapterModule | null {
|
||||
return adaptersByType.get(type) ?? null;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { existsSync, realpathSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { config as loadDotenv } from "dotenv";
|
||||
import { resolvePaperclipEnvPath } from "./paths.js";
|
||||
import { maybeRepairLegacyWorktreeConfigAndEnvFiles } from "./worktree-config.js";
|
||||
import {
|
||||
AUTH_BASE_URL_MODES,
|
||||
DEPLOYMENT_EXPOSURES,
|
||||
@@ -36,6 +37,8 @@ if (!isSameFile && existsSync(CWD_ENV_PATH)) {
|
||||
loadDotenv({ path: CWD_ENV_PATH, override: false, quiet: true });
|
||||
}
|
||||
|
||||
maybeRepairLegacyWorktreeConfigAndEnvFiles();
|
||||
|
||||
type DatabaseMode = "embedded-postgres" | "postgres";
|
||||
|
||||
export interface Config {
|
||||
|
||||
36
server/src/dev-watch-ignore.ts
Normal file
36
server/src/dev-watch-ignore.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
function toGlobstarPath(candidate: string): string {
|
||||
return `${candidate.replaceAll(path.sep, "/")}/**`;
|
||||
}
|
||||
|
||||
function addIgnorePath(target: Set<string>, candidate: string): void {
|
||||
target.add(candidate);
|
||||
target.add(toGlobstarPath(candidate));
|
||||
try {
|
||||
const realPath = fs.realpathSync(candidate);
|
||||
target.add(realPath);
|
||||
target.add(toGlobstarPath(realPath));
|
||||
} catch {
|
||||
// Ignore paths that do not exist in the current checkout.
|
||||
}
|
||||
}
|
||||
|
||||
export function resolveServerDevWatchIgnorePaths(serverRoot: string): string[] {
|
||||
const ignorePaths = new Set<string>([
|
||||
"**/{node_modules,bower_components,vendor}/**",
|
||||
"**/.vite-temp/**",
|
||||
]);
|
||||
|
||||
for (const relativePath of [
|
||||
"../ui/node_modules",
|
||||
"../ui/node_modules/.vite-temp",
|
||||
"../ui/.vite",
|
||||
"../ui/dist",
|
||||
]) {
|
||||
addIgnorePath(ignorePaths, path.resolve(serverRoot, relativePath));
|
||||
}
|
||||
|
||||
return [...ignorePaths];
|
||||
}
|
||||
@@ -10,9 +10,11 @@ import { and, eq } from "drizzle-orm";
|
||||
import {
|
||||
createDb,
|
||||
ensurePostgresDatabase,
|
||||
formatEmbeddedPostgresError,
|
||||
getPostgresDataDirectory,
|
||||
inspectMigrations,
|
||||
applyPendingMigrations,
|
||||
createEmbeddedPostgresLogBuffer,
|
||||
reconcilePendingMigrationHistory,
|
||||
formatDatabaseBackupResult,
|
||||
runDatabaseBackup,
|
||||
@@ -30,6 +32,7 @@ import { heartbeatService, reconcilePersistedRuntimeServicesOnStartup, routineSe
|
||||
import { createStorageServiceFromConfig } from "./storage/index.js";
|
||||
import { printStartupBanner } from "./startup-banner.js";
|
||||
import { getBoardClaimWarningUrl, initializeBoardClaimChallenge } from "./board-claim.js";
|
||||
import { maybePersistWorktreeRuntimePorts } from "./worktree-config.js";
|
||||
|
||||
type BetterAuthSessionUser = {
|
||||
id: string;
|
||||
@@ -69,7 +72,7 @@ export interface StartedServer {
|
||||
}
|
||||
|
||||
export async function startServer(): Promise<StartedServer> {
|
||||
const config = loadConfig();
|
||||
let config = loadConfig();
|
||||
if (process.env.PAPERCLIP_SECRETS_PROVIDER === undefined) {
|
||||
process.env.PAPERCLIP_SECRETS_PROVIDER = config.secretsProvider;
|
||||
}
|
||||
@@ -94,8 +97,8 @@ export async function startServer(): Promise<StartedServer> {
|
||||
}
|
||||
|
||||
async function promptApplyMigrations(migrations: string[]): Promise<boolean> {
|
||||
if (process.env.PAPERCLIP_MIGRATION_PROMPT === "never") return false;
|
||||
if (process.env.PAPERCLIP_MIGRATION_AUTO_APPLY === "true") return true;
|
||||
if (process.env.PAPERCLIP_MIGRATION_PROMPT === "never") return false;
|
||||
if (!stdin.isTTY || !stdout.isTTY) return true;
|
||||
|
||||
const prompt = createInterface({ input: stdin, output: stdout });
|
||||
@@ -167,6 +170,18 @@ export async function startServer(): Promise<StartedServer> {
|
||||
const normalized = host.trim().toLowerCase();
|
||||
return normalized === "127.0.0.1" || normalized === "localhost" || normalized === "::1";
|
||||
}
|
||||
|
||||
function rewriteLocalUrlPort(rawUrl: string | undefined, port: number): string | undefined {
|
||||
if (!rawUrl) return undefined;
|
||||
try {
|
||||
const parsed = new URL(rawUrl);
|
||||
if (!isLoopbackHost(parsed.hostname)) return rawUrl;
|
||||
parsed.port = String(port);
|
||||
return parsed.toString();
|
||||
} catch {
|
||||
return rawUrl;
|
||||
}
|
||||
}
|
||||
|
||||
const LOCAL_BOARD_USER_ID = "local-board";
|
||||
const LOCAL_BOARD_USER_EMAIL = "local@paperclip.local";
|
||||
@@ -233,6 +248,7 @@ export async function startServer(): Promise<StartedServer> {
|
||||
let embeddedPostgresStartedByThisProcess = false;
|
||||
let migrationSummary: MigrationSummary = "skipped";
|
||||
let activeDatabaseConnectionString: string;
|
||||
let resolvedEmbeddedPostgresPort: number | null = null;
|
||||
let startupDbInfo:
|
||||
| { mode: "external-postgres"; connectionString: string }
|
||||
| { mode: "embedded-postgres"; dataDir: string; port: number };
|
||||
@@ -258,29 +274,31 @@ export async function startServer(): Promise<StartedServer> {
|
||||
const dataDir = resolve(config.embeddedPostgresDataDir);
|
||||
const configuredPort = config.embeddedPostgresPort;
|
||||
let port = configuredPort;
|
||||
const embeddedPostgresLogBuffer: string[] = [];
|
||||
const EMBEDDED_POSTGRES_LOG_BUFFER_LIMIT = 120;
|
||||
const logBuffer = createEmbeddedPostgresLogBuffer(120);
|
||||
const verboseEmbeddedPostgresLogs = process.env.PAPERCLIP_EMBEDDED_POSTGRES_VERBOSE === "true";
|
||||
const appendEmbeddedPostgresLog = (message: unknown) => {
|
||||
const text = typeof message === "string" ? message : message instanceof Error ? message.message : String(message ?? "");
|
||||
for (const lineRaw of text.split(/\r?\n/)) {
|
||||
logBuffer.append(message);
|
||||
if (!verboseEmbeddedPostgresLogs) {
|
||||
return;
|
||||
}
|
||||
const lines = typeof message === "string"
|
||||
? message.split(/\r?\n/)
|
||||
: message instanceof Error
|
||||
? [message.message]
|
||||
: [String(message ?? "")];
|
||||
for (const lineRaw of lines) {
|
||||
const line = lineRaw.trim();
|
||||
if (!line) continue;
|
||||
embeddedPostgresLogBuffer.push(line);
|
||||
if (embeddedPostgresLogBuffer.length > EMBEDDED_POSTGRES_LOG_BUFFER_LIMIT) {
|
||||
embeddedPostgresLogBuffer.splice(0, embeddedPostgresLogBuffer.length - EMBEDDED_POSTGRES_LOG_BUFFER_LIMIT);
|
||||
}
|
||||
if (verboseEmbeddedPostgresLogs) {
|
||||
logger.info({ embeddedPostgresLog: line }, "embedded-postgres");
|
||||
}
|
||||
logger.info({ embeddedPostgresLog: line }, "embedded-postgres");
|
||||
}
|
||||
};
|
||||
const logEmbeddedPostgresFailure = (phase: "initialise" | "start", err: unknown) => {
|
||||
if (embeddedPostgresLogBuffer.length > 0) {
|
||||
const recentLogs = logBuffer.getRecentLogs();
|
||||
if (recentLogs.length > 0) {
|
||||
logger.error(
|
||||
{
|
||||
phase,
|
||||
recentLogs: embeddedPostgresLogBuffer,
|
||||
recentLogs,
|
||||
err,
|
||||
},
|
||||
"Embedded PostgreSQL failed; showing buffered startup logs",
|
||||
@@ -357,7 +375,10 @@ export async function startServer(): Promise<StartedServer> {
|
||||
await embeddedPostgres.initialise();
|
||||
} catch (err) {
|
||||
logEmbeddedPostgresFailure("initialise", err);
|
||||
throw err;
|
||||
throw formatEmbeddedPostgresError(err, {
|
||||
fallbackMessage: `Failed to initialize embedded PostgreSQL cluster in ${dataDir} on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
logger.info(`Embedded PostgreSQL cluster already exists (${clusterVersionFile}); skipping init`);
|
||||
@@ -371,7 +392,10 @@ export async function startServer(): Promise<StartedServer> {
|
||||
await embeddedPostgres.start();
|
||||
} catch (err) {
|
||||
logEmbeddedPostgresFailure("start", err);
|
||||
throw err;
|
||||
throw formatEmbeddedPostgresError(err, {
|
||||
fallbackMessage: `Failed to start embedded PostgreSQL on port ${port}`,
|
||||
recentLogs: logBuffer.getRecentLogs(),
|
||||
});
|
||||
}
|
||||
embeddedPostgresStartedByThisProcess = true;
|
||||
}
|
||||
@@ -395,6 +419,7 @@ export async function startServer(): Promise<StartedServer> {
|
||||
db = createDb(embeddedConnectionString);
|
||||
logger.info("Embedded PostgreSQL ready");
|
||||
activeDatabaseConnectionString = embeddedConnectionString;
|
||||
resolvedEmbeddedPostgresPort = port;
|
||||
startupDbInfo = { mode: "embedded-postgres", dataDir, port };
|
||||
}
|
||||
|
||||
@@ -476,6 +501,19 @@ export async function startServer(): Promise<StartedServer> {
|
||||
}
|
||||
|
||||
const listenPort = await detectPort(config.port);
|
||||
if (listenPort !== config.port) {
|
||||
config.port = listenPort;
|
||||
}
|
||||
if (resolvedEmbeddedPostgresPort !== null && resolvedEmbeddedPostgresPort !== config.embeddedPostgresPort) {
|
||||
config.embeddedPostgresPort = resolvedEmbeddedPostgresPort;
|
||||
}
|
||||
if (config.authBaseUrlMode === "explicit" && config.authPublicBaseUrl) {
|
||||
config.authPublicBaseUrl = rewriteLocalUrlPort(config.authPublicBaseUrl, listenPort);
|
||||
}
|
||||
maybePersistWorktreeRuntimePorts({
|
||||
serverPort: listenPort,
|
||||
databasePort: resolvedEmbeddedPostgresPort,
|
||||
});
|
||||
const uiMode = config.uiDevMiddleware ? "vite-dev" : config.serveUi ? "static" : "none";
|
||||
const storageService = createStorageServiceFromConfig(config);
|
||||
const app = await createApp(db as any, {
|
||||
|
||||
@@ -1,9 +1,39 @@
|
||||
You are the CEO.
|
||||
You are the CEO. Your job is to lead the company, not to do individual contributor work. You own strategy, prioritization, and cross-functional coordination.
|
||||
|
||||
Your home directory is $AGENT_HOME. Everything personal to you -- life, memory, knowledge -- lives there. Other agents may have their own folders and you may update them when necessary.
|
||||
|
||||
Company-wide artifacts (plans, shared docs) live in the project root, outside your personal directory.
|
||||
|
||||
## Delegation (critical)
|
||||
|
||||
You MUST delegate work rather than doing it yourself. When a task is assigned to you:
|
||||
|
||||
1. **Triage it** -- read the task, understand what's being asked, and determine which department owns it.
|
||||
2. **Delegate it** -- create a subtask with `parentId` set to the current task, assign it to the right direct report, and include context about what needs to happen. Use these routing rules:
|
||||
- **Code, bugs, features, infra, devtools, technical tasks** → CTO
|
||||
- **Marketing, content, social media, growth, devrel** → CMO
|
||||
- **UX, design, user research, design-system** → UXDesigner
|
||||
- **Cross-functional or unclear** → break into separate subtasks for each department, or assign to the CTO if it's primarily technical with a design component
|
||||
- If the right report doesn't exist yet, use the `paperclip-create-agent` skill to hire one before delegating.
|
||||
3. **Do NOT write code, implement features, or fix bugs yourself.** Your reports exist for this. Even if a task seems small or quick, delegate it.
|
||||
4. **Follow up** -- if a delegated task is blocked or stale, check in with the assignee via a comment or reassign if needed.
|
||||
|
||||
## What you DO personally
|
||||
|
||||
- Set priorities and make product decisions
|
||||
- Resolve cross-team conflicts or ambiguity
|
||||
- Communicate with the board (human users)
|
||||
- Approve or reject proposals from your reports
|
||||
- Hire new agents when the team needs capacity
|
||||
- Unblock your direct reports when they escalate to you
|
||||
|
||||
## Keeping work moving
|
||||
|
||||
- Don't let tasks sit idle. If you delegate something, check that it's progressing.
|
||||
- If a report is blocked, help unblock them -- escalate to the board if needed.
|
||||
- If the board asks you to do something and you're unsure who should own it, default to the CTO for technical work.
|
||||
- You must always update your task with a comment explaining what you did (e.g., who you delegated to and why).
|
||||
|
||||
## Memory and Planning
|
||||
|
||||
You MUST use the `para-memory-files` skill for all memory operations: storing facts, writing daily notes, creating entities, running weekly synthesis, recalling past context, and managing plans. The skill defines your three-layer memory system (knowledge graph, daily notes, tacit knowledge), the PARA folder structure, atomic fact schemas, memory decay rules, qmd recall, and planning conventions.
|
||||
|
||||
@@ -1411,6 +1411,25 @@ function grantsFromDefaults(
|
||||
return result;
|
||||
}
|
||||
|
||||
export function agentJoinGrantsFromDefaults(
|
||||
defaultsPayload: Record<string, unknown> | null | undefined
|
||||
): Array<{
|
||||
permissionKey: (typeof PERMISSION_KEYS)[number];
|
||||
scope: Record<string, unknown> | null;
|
||||
}> {
|
||||
const grants = grantsFromDefaults(defaultsPayload, "agent");
|
||||
if (grants.some((grant) => grant.permissionKey === "tasks:assign")) {
|
||||
return grants;
|
||||
}
|
||||
return [
|
||||
...grants,
|
||||
{
|
||||
permissionKey: "tasks:assign",
|
||||
scope: null
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
type JoinRequestManagerCandidate = {
|
||||
id: string;
|
||||
role: string;
|
||||
@@ -2618,17 +2637,8 @@ export function accessRoutes(
|
||||
"member",
|
||||
"active"
|
||||
);
|
||||
await access.setPrincipalPermission(
|
||||
companyId,
|
||||
"agent",
|
||||
created.id,
|
||||
"tasks:assign",
|
||||
true,
|
||||
req.actor.userId ?? null
|
||||
);
|
||||
const grants = grantsFromDefaults(
|
||||
invite.defaultsPayload as Record<string, unknown> | null,
|
||||
"agent"
|
||||
const grants = agentJoinGrantsFromDefaults(
|
||||
invite.defaultsPayload as Record<string, unknown> | null
|
||||
);
|
||||
await access.setPrincipalGrants(
|
||||
companyId,
|
||||
|
||||
@@ -44,7 +44,7 @@ import {
|
||||
} from "../services/index.js";
|
||||
import { conflict, forbidden, notFound, unprocessable } from "../errors.js";
|
||||
import { assertBoard, assertCompanyAccess, assertInstanceAdmin, getActorInfo } from "./authz.js";
|
||||
import { findServerAdapter, listAdapterModels } from "../adapters/index.js";
|
||||
import { findServerAdapter, listAdapterModels, detectAdapterModel } from "../adapters/index.js";
|
||||
import { redactEventPayload } from "../redaction.js";
|
||||
import { redactCurrentUserValue } from "../log-redaction.js";
|
||||
import { renderOrgChartSvg, renderOrgChartPng, type OrgNode, type OrgChartStyle, ORG_CHART_STYLES } from "./org-chart-svg.js";
|
||||
@@ -671,6 +671,15 @@ export function agentRoutes(db: Db) {
|
||||
res.json(models);
|
||||
});
|
||||
|
||||
router.get("/companies/:companyId/adapters/:type/detect-model", async (req, res) => {
|
||||
const companyId = req.params.companyId as string;
|
||||
assertCompanyAccess(req, companyId);
|
||||
const type = req.params.type as string;
|
||||
|
||||
const detected = await detectAdapterModel(type);
|
||||
res.json(detected);
|
||||
});
|
||||
|
||||
router.post(
|
||||
"/companies/:companyId/adapters/:type/test-environment",
|
||||
validate(testAdapterEnvironmentSchema),
|
||||
|
||||
@@ -171,6 +171,33 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
return rawId;
|
||||
}
|
||||
|
||||
async function resolveIssueProjectAndGoal(issue: {
|
||||
companyId: string;
|
||||
projectId: string | null;
|
||||
goalId: string | null;
|
||||
}) {
|
||||
const projectPromise = issue.projectId ? projectsSvc.getById(issue.projectId) : Promise.resolve(null);
|
||||
const directGoalPromise = issue.goalId ? goalsSvc.getById(issue.goalId) : Promise.resolve(null);
|
||||
const [project, directGoal] = await Promise.all([projectPromise, directGoalPromise]);
|
||||
|
||||
if (directGoal) {
|
||||
return { project, goal: directGoal };
|
||||
}
|
||||
|
||||
const projectGoalId = project?.goalId ?? project?.goalIds[0] ?? null;
|
||||
if (projectGoalId) {
|
||||
const projectGoal = await goalsSvc.getById(projectGoalId);
|
||||
return { project, goal: projectGoal };
|
||||
}
|
||||
|
||||
if (!issue.projectId) {
|
||||
const defaultGoal = await goalsSvc.getDefaultCompanyGoal(issue.companyId);
|
||||
return { project, goal: defaultGoal };
|
||||
}
|
||||
|
||||
return { project, goal: null };
|
||||
}
|
||||
|
||||
// Resolve issue identifiers (e.g. "PAP-39") to UUIDs for all /issues/:id routes
|
||||
router.param("id", async (req, res, next, rawId) => {
|
||||
try {
|
||||
@@ -203,6 +230,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
assertCompanyAccess(req, companyId);
|
||||
const assigneeUserFilterRaw = req.query.assigneeUserId as string | undefined;
|
||||
const touchedByUserFilterRaw = req.query.touchedByUserId as string | undefined;
|
||||
const inboxArchivedByUserFilterRaw = req.query.inboxArchivedByUserId as string | undefined;
|
||||
const unreadForUserFilterRaw = req.query.unreadForUserId as string | undefined;
|
||||
const assigneeUserId =
|
||||
assigneeUserFilterRaw === "me" && req.actor.type === "board"
|
||||
@@ -212,6 +240,10 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
touchedByUserFilterRaw === "me" && req.actor.type === "board"
|
||||
? req.actor.userId
|
||||
: touchedByUserFilterRaw;
|
||||
const inboxArchivedByUserId =
|
||||
inboxArchivedByUserFilterRaw === "me" && req.actor.type === "board"
|
||||
? req.actor.userId
|
||||
: inboxArchivedByUserFilterRaw;
|
||||
const unreadForUserId =
|
||||
unreadForUserFilterRaw === "me" && req.actor.type === "board"
|
||||
? req.actor.userId
|
||||
@@ -225,6 +257,10 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
res.status(403).json({ error: "touchedByUserId=me requires board authentication" });
|
||||
return;
|
||||
}
|
||||
if (inboxArchivedByUserFilterRaw === "me" && (!inboxArchivedByUserId || req.actor.type !== "board")) {
|
||||
res.status(403).json({ error: "inboxArchivedByUserId=me requires board authentication" });
|
||||
return;
|
||||
}
|
||||
if (unreadForUserFilterRaw === "me" && (!unreadForUserId || req.actor.type !== "board")) {
|
||||
res.status(403).json({ error: "unreadForUserId=me requires board authentication" });
|
||||
return;
|
||||
@@ -236,6 +272,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
participantAgentId: req.query.participantAgentId as string | undefined,
|
||||
assigneeUserId,
|
||||
touchedByUserId,
|
||||
inboxArchivedByUserId,
|
||||
unreadForUserId,
|
||||
projectId: req.query.projectId as string | undefined,
|
||||
parentId: req.query.parentId as string | undefined,
|
||||
@@ -311,14 +348,9 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const [ancestors, project, goal, mentionedProjectIds, documentPayload] = await Promise.all([
|
||||
const [{ project, goal }, ancestors, mentionedProjectIds, documentPayload] = await Promise.all([
|
||||
resolveIssueProjectAndGoal(issue),
|
||||
svc.getAncestors(issue.id),
|
||||
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
|
||||
issue.goalId
|
||||
? goalsSvc.getById(issue.goalId)
|
||||
: !issue.projectId
|
||||
? goalsSvc.getDefaultCompanyGoal(issue.companyId)
|
||||
: null,
|
||||
svc.findMentionedProjectIds(issue.id),
|
||||
documentsSvc.getIssueDocumentPayload(issue),
|
||||
]);
|
||||
@@ -356,14 +388,9 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
? req.query.wakeCommentId.trim()
|
||||
: null;
|
||||
|
||||
const [ancestors, project, goal, commentCursor, wakeComment] = await Promise.all([
|
||||
const [{ project, goal }, ancestors, commentCursor, wakeComment] = await Promise.all([
|
||||
resolveIssueProjectAndGoal(issue),
|
||||
svc.getAncestors(issue.id),
|
||||
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
|
||||
issue.goalId
|
||||
? goalsSvc.getById(issue.goalId)
|
||||
: !issue.projectId
|
||||
? goalsSvc.getDefaultCompanyGoal(issue.companyId)
|
||||
: null,
|
||||
svc.getCommentCursor(issue.id),
|
||||
wakeCommentId ? svc.getComment(wakeCommentId) : null,
|
||||
]);
|
||||
@@ -686,6 +713,70 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
res.json(readState);
|
||||
});
|
||||
|
||||
router.post("/issues/:id/inbox-archive", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
if (req.actor.type !== "board") {
|
||||
res.status(403).json({ error: "Board authentication required" });
|
||||
return;
|
||||
}
|
||||
if (!req.actor.userId) {
|
||||
res.status(403).json({ error: "Board user context required" });
|
||||
return;
|
||||
}
|
||||
const archiveState = await svc.archiveInbox(issue.companyId, issue.id, req.actor.userId, new Date());
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.inbox_archived",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: { userId: req.actor.userId, archivedAt: archiveState.archivedAt },
|
||||
});
|
||||
res.json(archiveState);
|
||||
});
|
||||
|
||||
router.delete("/issues/:id/inbox-archive", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
if (req.actor.type !== "board") {
|
||||
res.status(403).json({ error: "Board authentication required" });
|
||||
return;
|
||||
}
|
||||
if (!req.actor.userId) {
|
||||
res.status(403).json({ error: "Board user context required" });
|
||||
return;
|
||||
}
|
||||
const removed = await svc.unarchiveInbox(issue.companyId, issue.id, req.actor.userId);
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.inbox_unarchived",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: { userId: req.actor.userId },
|
||||
});
|
||||
res.json(removed ?? { ok: true });
|
||||
});
|
||||
|
||||
router.get("/issues/:id/approvals", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
|
||||
@@ -9,6 +9,7 @@ const ROOT_KEY = "instructionsRootPath";
|
||||
const ENTRY_KEY = "instructionsEntryFile";
|
||||
const FILE_KEY = "instructionsFilePath";
|
||||
const PROMPT_KEY = "promptTemplate";
|
||||
/** @deprecated Use the managed instructions bundle system instead. */
|
||||
const BOOTSTRAP_PROMPT_KEY = "bootstrapPromptTemplate";
|
||||
const LEGACY_PROMPT_TEMPLATE_PATH = "promptTemplate.legacy.md";
|
||||
const IGNORED_INSTRUCTIONS_FILE_NAMES = new Set([".DS_Store", "Thumbs.db", "Desktop.ini"]);
|
||||
|
||||
@@ -1475,7 +1475,7 @@ function normalizePortableConfig(
|
||||
key === "instructionsRootPath" ||
|
||||
key === "instructionsEntryFile" ||
|
||||
key === "promptTemplate" ||
|
||||
key === "bootstrapPromptTemplate" ||
|
||||
key === "bootstrapPromptTemplate" || // deprecated — kept for backward compat
|
||||
key === "paperclipSkillSync"
|
||||
) continue;
|
||||
if (key === "env") continue;
|
||||
@@ -3895,7 +3895,7 @@ export function companyPortabilityService(db: Db, storage?: StorageService) {
|
||||
desiredSkills,
|
||||
);
|
||||
delete adapterConfigWithSkills.promptTemplate;
|
||||
delete adapterConfigWithSkills.bootstrapPromptTemplate;
|
||||
delete adapterConfigWithSkills.bootstrapPromptTemplate; // deprecated
|
||||
delete adapterConfigWithSkills.instructionsFilePath;
|
||||
delete adapterConfigWithSkills.instructionsBundleMode;
|
||||
delete adapterConfigWithSkills.instructionsRootPath;
|
||||
|
||||
@@ -3,28 +3,54 @@ type MaybeId = string | null | undefined;
|
||||
export function resolveIssueGoalId(input: {
|
||||
projectId: MaybeId;
|
||||
goalId: MaybeId;
|
||||
projectGoalId?: MaybeId;
|
||||
defaultGoalId: MaybeId;
|
||||
}): string | null {
|
||||
if (!input.projectId && !input.goalId) {
|
||||
return input.defaultGoalId ?? null;
|
||||
}
|
||||
return input.goalId ?? null;
|
||||
if (input.goalId) return input.goalId;
|
||||
if (input.projectId) return input.projectGoalId ?? null;
|
||||
return input.defaultGoalId ?? null;
|
||||
}
|
||||
|
||||
export function resolveNextIssueGoalId(input: {
|
||||
currentProjectId: MaybeId;
|
||||
currentGoalId: MaybeId;
|
||||
currentProjectGoalId?: MaybeId;
|
||||
projectId?: MaybeId;
|
||||
goalId?: MaybeId;
|
||||
projectGoalId?: MaybeId;
|
||||
defaultGoalId: MaybeId;
|
||||
}): string | null {
|
||||
const projectId =
|
||||
input.projectId !== undefined ? input.projectId : input.currentProjectId;
|
||||
const goalId =
|
||||
input.goalId !== undefined ? input.goalId : input.currentGoalId;
|
||||
const projectGoalId =
|
||||
input.projectGoalId !== undefined
|
||||
? input.projectGoalId
|
||||
: projectId
|
||||
? input.currentProjectGoalId
|
||||
: null;
|
||||
|
||||
if (!projectId && !goalId) {
|
||||
const resolveFallbackGoalId = (targetProjectId: MaybeId, targetProjectGoalId: MaybeId) => {
|
||||
if (targetProjectId) return targetProjectGoalId ?? null;
|
||||
return input.defaultGoalId ?? null;
|
||||
};
|
||||
|
||||
if (input.goalId !== undefined) {
|
||||
return input.goalId ?? resolveFallbackGoalId(projectId, projectGoalId);
|
||||
}
|
||||
return goalId ?? null;
|
||||
|
||||
const currentFallbackGoalId = resolveFallbackGoalId(
|
||||
input.currentProjectId,
|
||||
input.currentProjectGoalId,
|
||||
);
|
||||
const nextFallbackGoalId = resolveFallbackGoalId(projectId, projectGoalId);
|
||||
|
||||
if (!input.currentGoalId) {
|
||||
return nextFallbackGoalId;
|
||||
}
|
||||
|
||||
if (input.currentGoalId === currentFallbackGoalId) {
|
||||
return nextFallbackGoalId;
|
||||
}
|
||||
|
||||
return input.currentGoalId;
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
heartbeatRuns,
|
||||
executionWorkspaces,
|
||||
issueAttachments,
|
||||
issueInboxArchives,
|
||||
issueLabels,
|
||||
issueComments,
|
||||
issueDocuments,
|
||||
@@ -66,6 +67,7 @@ export interface IssueFilters {
|
||||
participantAgentId?: string;
|
||||
assigneeUserId?: string;
|
||||
touchedByUserId?: string;
|
||||
inboxArchivedByUserId?: string;
|
||||
unreadForUserId?: string;
|
||||
projectId?: string;
|
||||
parentId?: string;
|
||||
@@ -101,6 +103,7 @@ type IssueUserContextInput = {
|
||||
createdAt: Date | string;
|
||||
updatedAt: Date | string;
|
||||
};
|
||||
type ProjectGoalReader = Pick<Db, "select">;
|
||||
|
||||
function sameRunLock(checkoutRunId: string | null, actorRunId: string | null) {
|
||||
if (actorRunId) return checkoutRunId === actorRunId;
|
||||
@@ -113,6 +116,20 @@ function escapeLikePattern(value: string): string {
|
||||
return value.replace(/[\\%_]/g, "\\$&");
|
||||
}
|
||||
|
||||
async function getProjectDefaultGoalId(
|
||||
db: ProjectGoalReader,
|
||||
companyId: string,
|
||||
projectId: string | null | undefined,
|
||||
) {
|
||||
if (!projectId) return null;
|
||||
const row = await db
|
||||
.select({ goalId: projects.goalId })
|
||||
.from(projects)
|
||||
.where(and(eq(projects.id, projectId), eq(projects.companyId, companyId)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row?.goalId ?? null;
|
||||
}
|
||||
|
||||
function touchedByUserCondition(companyId: string, userId: string) {
|
||||
return sql<boolean>`
|
||||
(
|
||||
@@ -197,6 +214,36 @@ function myLastTouchAtExpr(companyId: string, userId: string) {
|
||||
`;
|
||||
}
|
||||
|
||||
function lastExternalCommentAtExpr(companyId: string, userId: string) {
|
||||
return sql<Date | null>`
|
||||
(
|
||||
SELECT MAX(${issueComments.createdAt})
|
||||
FROM ${issueComments}
|
||||
WHERE ${issueComments.issueId} = ${issues.id}
|
||||
AND ${issueComments.companyId} = ${companyId}
|
||||
AND (
|
||||
${issueComments.authorUserId} IS NULL
|
||||
OR ${issueComments.authorUserId} <> ${userId}
|
||||
)
|
||||
)
|
||||
`;
|
||||
}
|
||||
|
||||
function issueLastActivityAtExpr(companyId: string, userId: string) {
|
||||
const lastExternalCommentAt = lastExternalCommentAtExpr(companyId, userId);
|
||||
const myLastTouchAt = myLastTouchAtExpr(companyId, userId);
|
||||
return sql<Date>`
|
||||
COALESCE(
|
||||
${lastExternalCommentAt},
|
||||
CASE
|
||||
WHEN ${issues.updatedAt} > COALESCE(${myLastTouchAt}, to_timestamp(0))
|
||||
THEN ${issues.updatedAt}
|
||||
ELSE to_timestamp(0)
|
||||
END
|
||||
)
|
||||
`;
|
||||
}
|
||||
|
||||
function unreadForUserCondition(companyId: string, userId: string) {
|
||||
const touchedCondition = touchedByUserCondition(companyId, userId);
|
||||
const myLastTouchAt = myLastTouchAtExpr(companyId, userId);
|
||||
@@ -218,6 +265,55 @@ function unreadForUserCondition(companyId: string, userId: string) {
|
||||
`;
|
||||
}
|
||||
|
||||
function inboxVisibleForUserCondition(companyId: string, userId: string) {
|
||||
const issueLastActivityAt = issueLastActivityAtExpr(companyId, userId);
|
||||
return sql<boolean>`
|
||||
NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM ${issueInboxArchives}
|
||||
WHERE ${issueInboxArchives.issueId} = ${issues.id}
|
||||
AND ${issueInboxArchives.companyId} = ${companyId}
|
||||
AND ${issueInboxArchives.userId} = ${userId}
|
||||
AND ${issueInboxArchives.archivedAt} >= ${issueLastActivityAt}
|
||||
)
|
||||
`;
|
||||
}
|
||||
|
||||
/** Named entities commonly emitted in saved issue bodies; unknown `&name;` sequences are left unchanged. */
|
||||
const WELL_KNOWN_NAMED_HTML_ENTITIES: Readonly<Record<string, string>> = {
|
||||
amp: "&",
|
||||
apos: "'",
|
||||
copy: "\u00A9",
|
||||
gt: ">",
|
||||
lt: "<",
|
||||
nbsp: "\u00A0",
|
||||
quot: '"',
|
||||
ensp: "\u2002",
|
||||
emsp: "\u2003",
|
||||
thinsp: "\u2009",
|
||||
};
|
||||
|
||||
function decodeNumericHtmlEntity(digits: string, radix: 16 | 10): string | null {
|
||||
const n = Number.parseInt(digits, radix);
|
||||
if (Number.isNaN(n) || n < 0 || n > 0x10ffff) return null;
|
||||
try {
|
||||
return String.fromCodePoint(n);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Decodes HTML character references in a raw @mention capture so UI-encoded bodies match agent names. */
|
||||
export function normalizeAgentMentionToken(raw: string): string {
|
||||
let s = raw.replace(/&#x([0-9a-fA-F]+);/gi, (full, hex: string) => decodeNumericHtmlEntity(hex, 16) ?? full);
|
||||
s = s.replace(/&#([0-9]+);/g, (full, dec: string) => decodeNumericHtmlEntity(dec, 10) ?? full);
|
||||
s = s.replace(/&([a-z][a-z0-9]*);/gi, (full, name: string) => {
|
||||
const decoded = WELL_KNOWN_NAMED_HTML_ENTITIES[name.toLowerCase()];
|
||||
return decoded !== undefined ? decoded : full;
|
||||
});
|
||||
return s.trim();
|
||||
}
|
||||
|
||||
export function deriveIssueUserContext(
|
||||
issue: IssueUserContextInput,
|
||||
userId: string,
|
||||
@@ -506,8 +602,9 @@ export function issueService(db: Db) {
|
||||
list: async (companyId: string, filters?: IssueFilters) => {
|
||||
const conditions = [eq(issues.companyId, companyId)];
|
||||
const touchedByUserId = filters?.touchedByUserId?.trim() || undefined;
|
||||
const inboxArchivedByUserId = filters?.inboxArchivedByUserId?.trim() || undefined;
|
||||
const unreadForUserId = filters?.unreadForUserId?.trim() || undefined;
|
||||
const contextUserId = unreadForUserId ?? touchedByUserId;
|
||||
const contextUserId = unreadForUserId ?? touchedByUserId ?? inboxArchivedByUserId;
|
||||
const rawSearch = filters?.q?.trim() ?? "";
|
||||
const hasSearch = rawSearch.length > 0;
|
||||
const escapedSearch = hasSearch ? escapeLikePattern(rawSearch) : "";
|
||||
@@ -543,6 +640,9 @@ export function issueService(db: Db) {
|
||||
if (touchedByUserId) {
|
||||
conditions.push(touchedByUserCondition(companyId, touchedByUserId));
|
||||
}
|
||||
if (inboxArchivedByUserId) {
|
||||
conditions.push(inboxVisibleForUserCondition(companyId, inboxArchivedByUserId));
|
||||
}
|
||||
if (unreadForUserId) {
|
||||
conditions.push(unreadForUserCondition(companyId, unreadForUserId));
|
||||
}
|
||||
@@ -691,6 +791,42 @@ export function issueService(db: Db) {
|
||||
return row;
|
||||
},
|
||||
|
||||
archiveInbox: async (companyId: string, issueId: string, userId: string, archivedAt: Date = new Date()) => {
|
||||
const now = new Date();
|
||||
const [row] = await db
|
||||
.insert(issueInboxArchives)
|
||||
.values({
|
||||
companyId,
|
||||
issueId,
|
||||
userId,
|
||||
archivedAt,
|
||||
updatedAt: now,
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [issueInboxArchives.companyId, issueInboxArchives.issueId, issueInboxArchives.userId],
|
||||
set: {
|
||||
archivedAt,
|
||||
updatedAt: now,
|
||||
},
|
||||
})
|
||||
.returning();
|
||||
return row;
|
||||
},
|
||||
|
||||
unarchiveInbox: async (companyId: string, issueId: string, userId: string) => {
|
||||
const [row] = await db
|
||||
.delete(issueInboxArchives)
|
||||
.where(
|
||||
and(
|
||||
eq(issueInboxArchives.companyId, companyId),
|
||||
eq(issueInboxArchives.issueId, issueId),
|
||||
eq(issueInboxArchives.userId, userId),
|
||||
),
|
||||
)
|
||||
.returning();
|
||||
return row ?? null;
|
||||
},
|
||||
|
||||
getById: async (id: string) => {
|
||||
const row = await db
|
||||
.select()
|
||||
@@ -744,6 +880,7 @@ export function issueService(db: Db) {
|
||||
}
|
||||
return db.transaction(async (tx) => {
|
||||
const defaultCompanyGoal = await getDefaultCompanyGoal(tx, companyId);
|
||||
const projectGoalId = await getProjectDefaultGoalId(tx, companyId, issueData.projectId);
|
||||
let executionWorkspaceSettings =
|
||||
(issueData.executionWorkspaceSettings as Record<string, unknown> | null | undefined) ?? null;
|
||||
if (executionWorkspaceSettings == null && issueData.projectId) {
|
||||
@@ -795,6 +932,7 @@ export function issueService(db: Db) {
|
||||
goalId: resolveIssueGoalId({
|
||||
projectId: issueData.projectId,
|
||||
goalId: issueData.goalId,
|
||||
projectGoalId,
|
||||
defaultGoalId: defaultCompanyGoal?.id ?? null,
|
||||
}),
|
||||
...(projectWorkspaceId ? { projectWorkspaceId } : {}),
|
||||
@@ -895,11 +1033,21 @@ export function issueService(db: Db) {
|
||||
|
||||
return db.transaction(async (tx) => {
|
||||
const defaultCompanyGoal = await getDefaultCompanyGoal(tx, existing.companyId);
|
||||
const [currentProjectGoalId, nextProjectGoalId] = await Promise.all([
|
||||
getProjectDefaultGoalId(tx, existing.companyId, existing.projectId),
|
||||
getProjectDefaultGoalId(
|
||||
tx,
|
||||
existing.companyId,
|
||||
issueData.projectId !== undefined ? issueData.projectId : existing.projectId,
|
||||
),
|
||||
]);
|
||||
patch.goalId = resolveNextIssueGoalId({
|
||||
currentProjectId: existing.projectId,
|
||||
currentGoalId: existing.goalId,
|
||||
currentProjectGoalId,
|
||||
projectId: issueData.projectId,
|
||||
goalId: issueData.goalId,
|
||||
projectGoalId: nextProjectGoalId,
|
||||
defaultGoalId: defaultCompanyGoal?.id ?? null,
|
||||
});
|
||||
const updated = await tx
|
||||
@@ -1490,11 +1638,13 @@ export function issueService(db: Db) {
|
||||
const re = /\B@([^\s@,!?.]+)/g;
|
||||
const tokens = new Set<string>();
|
||||
let m: RegExpExecArray | null;
|
||||
while ((m = re.exec(body)) !== null) tokens.add(m[1].toLowerCase());
|
||||
while ((m = re.exec(body)) !== null) {
|
||||
const normalized = normalizeAgentMentionToken(m[1]);
|
||||
if (normalized) tokens.add(normalized.toLowerCase());
|
||||
}
|
||||
|
||||
const explicitAgentMentionIds = extractAgentMentionIds(body);
|
||||
if (tokens.size === 0 && explicitAgentMentionIds.length === 0) return [];
|
||||
|
||||
const rows = await db.select({ id: agents.id, name: agents.name })
|
||||
.from(agents).where(eq(agents.companyId, companyId));
|
||||
const resolved = new Set<string>(explicitAgentMentionIds);
|
||||
|
||||
467
server/src/worktree-config.ts
Normal file
467
server/src/worktree-config.ts
Normal file
@@ -0,0 +1,467 @@
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { PaperclipConfig } from "@paperclipai/shared";
|
||||
import { resolvePaperclipConfigPath, resolvePaperclipEnvPath } from "./paths.js";
|
||||
|
||||
function nonEmpty(value: string | null | undefined): string | null {
|
||||
return typeof value === "string" && value.trim().length > 0 ? value.trim() : null;
|
||||
}
|
||||
|
||||
function expandHomePrefix(value: string): string {
|
||||
if (value === "~") return os.homedir();
|
||||
if (value.startsWith("~/")) return path.resolve(os.homedir(), value.slice(2));
|
||||
return value;
|
||||
}
|
||||
|
||||
function resolveHomeAwarePath(value: string): string {
|
||||
return path.resolve(expandHomePrefix(value));
|
||||
}
|
||||
|
||||
function sanitizeWorktreeInstanceId(rawValue: string): string {
|
||||
const trimmed = rawValue.trim().toLowerCase();
|
||||
const normalized = trimmed
|
||||
.replace(/[^a-z0-9_-]+/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^[-_]+|[-_]+$/g, "");
|
||||
return normalized || "worktree";
|
||||
}
|
||||
|
||||
function isLoopbackHost(hostname: string): boolean {
|
||||
const value = hostname.trim().toLowerCase();
|
||||
return value === "127.0.0.1" || value === "localhost" || value === "::1";
|
||||
}
|
||||
|
||||
function rewriteLocalUrlPort(rawUrl: string | undefined, port: number): string | undefined {
|
||||
if (!rawUrl) return undefined;
|
||||
try {
|
||||
const parsed = new URL(rawUrl);
|
||||
if (!isLoopbackHost(parsed.hostname)) return rawUrl;
|
||||
parsed.port = String(port);
|
||||
return parsed.toString();
|
||||
} catch {
|
||||
return rawUrl;
|
||||
}
|
||||
}
|
||||
|
||||
function parseEnvFile(contents: string): Record<string, string> {
|
||||
const entries: Record<string, string> = {};
|
||||
|
||||
for (const rawLine of contents.split(/\r?\n/)) {
|
||||
const line = rawLine.trim();
|
||||
if (!line || line.startsWith("#")) continue;
|
||||
|
||||
const match = rawLine.match(/^\s*(?:export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$/);
|
||||
if (!match) continue;
|
||||
|
||||
const [, key, rawValue] = match;
|
||||
const value = rawValue.trim();
|
||||
if (!value) {
|
||||
entries[key] = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
(value.startsWith("\"") && value.endsWith("\"")) ||
|
||||
(value.startsWith("'") && value.endsWith("'"))
|
||||
) {
|
||||
entries[key] = value.slice(1, -1);
|
||||
continue;
|
||||
}
|
||||
|
||||
entries[key] = value.replace(/\s+#.*$/, "").trim();
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
function readEnvEntries(envPath: string): Record<string, string> {
|
||||
if (!fs.existsSync(envPath)) return {};
|
||||
return parseEnvFile(fs.readFileSync(envPath, "utf8"));
|
||||
}
|
||||
|
||||
function formatEnvEntries(entries: Record<string, string>): string {
|
||||
return [
|
||||
"# Paperclip environment variables",
|
||||
"# Generated by Paperclip worktree repair",
|
||||
...Object.entries(entries).map(([key, value]) => `${key}=${JSON.stringify(value)}`),
|
||||
"",
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function isPathInside(candidatePath: string, rootPath: string): boolean {
|
||||
const candidate = path.resolve(candidatePath);
|
||||
const root = path.resolve(rootPath);
|
||||
return candidate === root || candidate.startsWith(`${root}${path.sep}`);
|
||||
}
|
||||
|
||||
type WorktreeRuntimeContext = {
|
||||
configPath: string;
|
||||
envPath: string;
|
||||
worktreeName: string;
|
||||
instanceId: string;
|
||||
homeDir: string;
|
||||
instanceRoot: string;
|
||||
contextPath: string;
|
||||
embeddedPostgresDataDir: string;
|
||||
backupDir: string;
|
||||
logDir: string;
|
||||
storageDir: string;
|
||||
secretsKeyFilePath: string;
|
||||
};
|
||||
|
||||
function resolveWorktreeRuntimeContext(
|
||||
env: NodeJS.ProcessEnv,
|
||||
overrideConfigPath?: string,
|
||||
): WorktreeRuntimeContext | null {
|
||||
if (env.PAPERCLIP_IN_WORKTREE !== "true") return null;
|
||||
|
||||
const configPath = resolvePaperclipConfigPath(overrideConfigPath);
|
||||
const envPath = resolvePaperclipEnvPath(configPath);
|
||||
const worktreeRoot = path.resolve(path.dirname(configPath), "..");
|
||||
const worktreeName = nonEmpty(env.PAPERCLIP_WORKTREE_NAME) ?? path.basename(worktreeRoot);
|
||||
const instanceId = nonEmpty(env.PAPERCLIP_INSTANCE_ID) ?? sanitizeWorktreeInstanceId(worktreeName);
|
||||
const homeDir = resolveHomeAwarePath(
|
||||
nonEmpty(env.PAPERCLIP_HOME) ??
|
||||
nonEmpty(env.PAPERCLIP_WORKTREES_DIR) ??
|
||||
"~/.paperclip-worktrees",
|
||||
);
|
||||
const instanceRoot = path.resolve(homeDir, "instances", instanceId);
|
||||
|
||||
return {
|
||||
configPath,
|
||||
envPath,
|
||||
worktreeName,
|
||||
instanceId,
|
||||
homeDir,
|
||||
instanceRoot,
|
||||
contextPath: path.resolve(homeDir, "context.json"),
|
||||
embeddedPostgresDataDir: path.resolve(instanceRoot, "db"),
|
||||
backupDir: path.resolve(instanceRoot, "data", "backups"),
|
||||
logDir: path.resolve(instanceRoot, "logs"),
|
||||
storageDir: path.resolve(instanceRoot, "data", "storage"),
|
||||
secretsKeyFilePath: path.resolve(instanceRoot, "secrets", "master.key"),
|
||||
};
|
||||
}
|
||||
|
||||
function writeConfigFile(configPath: string, config: PaperclipConfig): void {
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n", { mode: 0o600 });
|
||||
}
|
||||
|
||||
function resolveRepoManagedWorktreesRoot(worktreeRoot: string): string | null {
|
||||
const normalized = path.resolve(worktreeRoot);
|
||||
const marker = `${path.sep}.paperclip${path.sep}worktrees${path.sep}`;
|
||||
const index = normalized.indexOf(marker);
|
||||
if (index === -1) return null;
|
||||
const repoRoot = normalized.slice(0, index);
|
||||
return path.resolve(repoRoot, ".paperclip", "worktrees");
|
||||
}
|
||||
|
||||
function collectSiblingWorktreePorts(context: WorktreeRuntimeContext): {
|
||||
serverPorts: Set<number>;
|
||||
databasePorts: Set<number>;
|
||||
} {
|
||||
const serverPorts = new Set<number>();
|
||||
const databasePorts = new Set<number>();
|
||||
const siblingConfigPaths = new Set<string>();
|
||||
const instancesDir = path.resolve(context.homeDir, "instances");
|
||||
if (fs.existsSync(instancesDir)) {
|
||||
for (const entry of fs.readdirSync(instancesDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory() || entry.name === context.instanceId) continue;
|
||||
|
||||
const siblingConfigPath = path.resolve(instancesDir, entry.name, "config.json");
|
||||
if (fs.existsSync(siblingConfigPath)) {
|
||||
siblingConfigPaths.add(siblingConfigPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const repoManagedWorktreesRoot = resolveRepoManagedWorktreesRoot(path.dirname(context.configPath));
|
||||
if (repoManagedWorktreesRoot && fs.existsSync(repoManagedWorktreesRoot)) {
|
||||
for (const entry of fs.readdirSync(repoManagedWorktreesRoot, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
|
||||
const siblingConfigPath = path.resolve(repoManagedWorktreesRoot, entry.name, ".paperclip", "config.json");
|
||||
if (path.resolve(siblingConfigPath) === path.resolve(context.configPath)) continue;
|
||||
if (fs.existsSync(siblingConfigPath)) {
|
||||
siblingConfigPaths.add(siblingConfigPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const siblingConfigPath of siblingConfigPaths) {
|
||||
try {
|
||||
const siblingConfig = JSON.parse(fs.readFileSync(siblingConfigPath, "utf8")) as PaperclipConfig;
|
||||
if (Number.isInteger(siblingConfig.server.port) && siblingConfig.server.port > 0) {
|
||||
serverPorts.add(siblingConfig.server.port);
|
||||
}
|
||||
if (
|
||||
siblingConfig.database.mode === "embedded-postgres" &&
|
||||
Number.isInteger(siblingConfig.database.embeddedPostgresPort) &&
|
||||
siblingConfig.database.embeddedPostgresPort > 0
|
||||
) {
|
||||
databasePorts.add(siblingConfig.database.embeddedPostgresPort);
|
||||
}
|
||||
} catch {
|
||||
// Ignore sibling configs that are missing or malformed.
|
||||
}
|
||||
}
|
||||
|
||||
return { serverPorts, databasePorts };
|
||||
}
|
||||
|
||||
function findNextUnclaimedPort(preferredPort: number, claimedPorts: Set<number>): number {
|
||||
let port = Math.max(1, Math.trunc(preferredPort));
|
||||
while (claimedPorts.has(port)) {
|
||||
port += 1;
|
||||
}
|
||||
return port;
|
||||
}
|
||||
|
||||
function buildIsolatedWorktreeConfig(
|
||||
config: PaperclipConfig,
|
||||
context: WorktreeRuntimeContext,
|
||||
portOverrides?: {
|
||||
serverPort?: number;
|
||||
databasePort?: number;
|
||||
},
|
||||
): PaperclipConfig {
|
||||
const serverPort = portOverrides?.serverPort ?? config.server.port;
|
||||
const databasePort =
|
||||
config.database.mode === "embedded-postgres"
|
||||
? portOverrides?.databasePort ?? config.database.embeddedPostgresPort
|
||||
: undefined;
|
||||
const nextConfig: PaperclipConfig = {
|
||||
...config,
|
||||
database: {
|
||||
...config.database,
|
||||
...(config.database.mode === "embedded-postgres"
|
||||
? {
|
||||
embeddedPostgresDataDir: context.embeddedPostgresDataDir,
|
||||
embeddedPostgresPort: databasePort ?? config.database.embeddedPostgresPort,
|
||||
backup: {
|
||||
...config.database.backup,
|
||||
dir: context.backupDir,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
server: {
|
||||
...config.server,
|
||||
port: serverPort,
|
||||
},
|
||||
logging: {
|
||||
...config.logging,
|
||||
logDir: context.logDir,
|
||||
},
|
||||
storage: {
|
||||
...config.storage,
|
||||
localDisk: {
|
||||
...config.storage.localDisk,
|
||||
baseDir: context.storageDir,
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
...config.secrets,
|
||||
localEncrypted: {
|
||||
...config.secrets.localEncrypted,
|
||||
keyFilePath: context.secretsKeyFilePath,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (config.auth.baseUrlMode === "explicit" && config.auth.publicBaseUrl) {
|
||||
nextConfig.auth = {
|
||||
...config.auth,
|
||||
publicBaseUrl: rewriteLocalUrlPort(config.auth.publicBaseUrl, serverPort),
|
||||
};
|
||||
}
|
||||
|
||||
return nextConfig;
|
||||
}
|
||||
|
||||
function needsWorktreeConfigRepair(
|
||||
config: PaperclipConfig,
|
||||
context: WorktreeRuntimeContext,
|
||||
): boolean {
|
||||
if (config.database.mode === "embedded-postgres") {
|
||||
if (!isPathInside(config.database.embeddedPostgresDataDir, context.instanceRoot)) {
|
||||
return true;
|
||||
}
|
||||
if (!isPathInside(config.database.backup.dir, context.instanceRoot)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!isPathInside(config.logging.logDir, context.instanceRoot)) {
|
||||
return true;
|
||||
}
|
||||
if (!isPathInside(config.storage.localDisk.baseDir, context.instanceRoot)) {
|
||||
return true;
|
||||
}
|
||||
if (!isPathInside(config.secrets.localEncrypted.keyFilePath, context.instanceRoot)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function applyRuntimePortSelectionToConfig(
|
||||
config: PaperclipConfig,
|
||||
input: {
|
||||
serverPort: number;
|
||||
databasePort?: number | null;
|
||||
allowServerPortWrite?: boolean;
|
||||
allowDatabasePortWrite?: boolean;
|
||||
},
|
||||
): { config: PaperclipConfig; changed: boolean } {
|
||||
let changed = false;
|
||||
let nextConfig = config;
|
||||
|
||||
if (input.allowServerPortWrite !== false && config.server.port !== input.serverPort) {
|
||||
nextConfig = {
|
||||
...nextConfig,
|
||||
server: {
|
||||
...nextConfig.server,
|
||||
port: input.serverPort,
|
||||
},
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (
|
||||
input.allowDatabasePortWrite !== false &&
|
||||
nextConfig.database.mode === "embedded-postgres" &&
|
||||
typeof input.databasePort === "number" &&
|
||||
nextConfig.database.embeddedPostgresPort !== input.databasePort
|
||||
) {
|
||||
nextConfig = {
|
||||
...nextConfig,
|
||||
database: {
|
||||
...nextConfig.database,
|
||||
embeddedPostgresPort: input.databasePort,
|
||||
},
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (nextConfig.auth.baseUrlMode === "explicit" && nextConfig.auth.publicBaseUrl) {
|
||||
const rewritten = rewriteLocalUrlPort(nextConfig.auth.publicBaseUrl, input.serverPort);
|
||||
if (rewritten && rewritten !== nextConfig.auth.publicBaseUrl) {
|
||||
nextConfig = {
|
||||
...nextConfig,
|
||||
auth: {
|
||||
...nextConfig.auth,
|
||||
publicBaseUrl: rewritten,
|
||||
},
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
|
||||
return { config: nextConfig, changed };
|
||||
}
|
||||
|
||||
export function maybeRepairLegacyWorktreeConfigAndEnvFiles(): {
|
||||
repairedConfig: boolean;
|
||||
repairedEnv: boolean;
|
||||
} {
|
||||
const context = resolveWorktreeRuntimeContext(process.env);
|
||||
if (!context) {
|
||||
return { repairedConfig: false, repairedEnv: false };
|
||||
}
|
||||
|
||||
process.env.PAPERCLIP_HOME = context.homeDir;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = context.instanceId;
|
||||
process.env.PAPERCLIP_CONFIG = context.configPath;
|
||||
process.env.PAPERCLIP_CONTEXT = context.contextPath;
|
||||
process.env.PAPERCLIP_WORKTREE_NAME = context.worktreeName;
|
||||
|
||||
let repairedConfig = false;
|
||||
if (fs.existsSync(context.configPath)) {
|
||||
try {
|
||||
const parsed = JSON.parse(fs.readFileSync(context.configPath, "utf8")) as PaperclipConfig;
|
||||
const siblingPorts = collectSiblingWorktreePorts(context);
|
||||
const hasSiblingPortCollision =
|
||||
siblingPorts.serverPorts.has(parsed.server.port) ||
|
||||
(parsed.database.mode === "embedded-postgres" &&
|
||||
siblingPorts.databasePorts.has(parsed.database.embeddedPostgresPort));
|
||||
|
||||
if (needsWorktreeConfigRepair(parsed, context) || hasSiblingPortCollision) {
|
||||
const selectedServerPort = findNextUnclaimedPort(
|
||||
parsed.server.port === 3100 ? 3101 : parsed.server.port,
|
||||
siblingPorts.serverPorts,
|
||||
);
|
||||
const selectedDatabasePort =
|
||||
parsed.database.mode === "embedded-postgres"
|
||||
? findNextUnclaimedPort(
|
||||
parsed.database.embeddedPostgresPort === 54329
|
||||
? 54330
|
||||
: parsed.database.embeddedPostgresPort,
|
||||
new Set([...siblingPorts.databasePorts, selectedServerPort]),
|
||||
)
|
||||
: undefined;
|
||||
|
||||
writeConfigFile(
|
||||
context.configPath,
|
||||
buildIsolatedWorktreeConfig(parsed, context, {
|
||||
serverPort: selectedServerPort,
|
||||
databasePort: selectedDatabasePort,
|
||||
}),
|
||||
);
|
||||
repairedConfig = true;
|
||||
}
|
||||
} catch {
|
||||
// Leave invalid configs to the normal startup validation path.
|
||||
}
|
||||
}
|
||||
|
||||
const existingEnvEntries = readEnvEntries(context.envPath);
|
||||
const desiredEnvEntries: Record<string, string> = {
|
||||
...existingEnvEntries,
|
||||
PAPERCLIP_HOME: context.homeDir,
|
||||
PAPERCLIP_INSTANCE_ID: context.instanceId,
|
||||
PAPERCLIP_CONFIG: context.configPath,
|
||||
PAPERCLIP_CONTEXT: context.contextPath,
|
||||
PAPERCLIP_IN_WORKTREE: "true",
|
||||
PAPERCLIP_WORKTREE_NAME: context.worktreeName,
|
||||
};
|
||||
|
||||
const repairedEnv = Object.entries(desiredEnvEntries).some(
|
||||
([key, value]) => existingEnvEntries[key] !== value,
|
||||
);
|
||||
|
||||
if (repairedEnv) {
|
||||
fs.mkdirSync(path.dirname(context.envPath), { recursive: true });
|
||||
fs.writeFileSync(context.envPath, formatEnvEntries(desiredEnvEntries), { mode: 0o600 });
|
||||
}
|
||||
|
||||
return { repairedConfig, repairedEnv };
|
||||
}
|
||||
|
||||
export function maybePersistWorktreeRuntimePorts(input: {
|
||||
serverPort: number;
|
||||
databasePort?: number | null;
|
||||
}): void {
|
||||
const context = resolveWorktreeRuntimeContext(process.env);
|
||||
if (!context || !fs.existsSync(context.configPath)) return;
|
||||
|
||||
let fileConfig: PaperclipConfig;
|
||||
try {
|
||||
fileConfig = JSON.parse(fs.readFileSync(context.configPath, "utf8")) as PaperclipConfig;
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
const { config, changed } = applyRuntimePortSelectionToConfig(fileConfig, {
|
||||
serverPort: input.serverPort,
|
||||
databasePort: input.databasePort,
|
||||
allowServerPortWrite: !nonEmpty(process.env.PORT),
|
||||
allowDatabasePortWrite: !nonEmpty(process.env.DATABASE_URL),
|
||||
});
|
||||
|
||||
if (changed) {
|
||||
writeConfigFile(context.configPath, config);
|
||||
}
|
||||
}
|
||||
11
ui/README.md
Normal file
11
ui/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# @paperclipai/ui
|
||||
|
||||
Published static assets for the Paperclip board UI.
|
||||
|
||||
## What gets published
|
||||
|
||||
The npm package contains the production build under `dist/`. It does not ship the UI source tree or workspace-only dependencies.
|
||||
|
||||
## Typical use
|
||||
|
||||
Install the package, then serve or copy the built files from `node_modules/@paperclipai/ui/dist`.
|
||||
@@ -1,13 +1,29 @@
|
||||
{
|
||||
"name": "@paperclipai/ui",
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"version": "0.3.1",
|
||||
"description": "Prebuilt Paperclip board UI assets.",
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/paperclipai/paperclip",
|
||||
"bugs": {
|
||||
"url": "https://github.com/paperclipai/paperclip/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/paperclipai/paperclip",
|
||||
"directory": "ui"
|
||||
},
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"preview": "vite preview",
|
||||
"typecheck": "tsc -b"
|
||||
"typecheck": "tsc -b",
|
||||
"clean": "rm -rf dist tsconfig.tsbuildinfo",
|
||||
"prepack": "rm -f package.dev.json && cp package.json package.dev.json && node ../scripts/generate-ui-package-json.mjs",
|
||||
"postpack": "if [ -f package.dev.json ]; then mv package.dev.json package.json; fi"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
@@ -25,6 +41,7 @@
|
||||
"@paperclipai/adapter-pi-local": "workspace:*",
|
||||
"@paperclipai/adapter-utils": "workspace:*",
|
||||
"@paperclipai/shared": "workspace:*",
|
||||
"hermes-paperclip-adapter": "^0.2.0",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@tanstack/react-query": "^5.90.21",
|
||||
|
||||
@@ -165,10 +165,11 @@ function boardRoutes() {
|
||||
<Route path="costs" element={<Costs />} />
|
||||
<Route path="activity" element={<Activity />} />
|
||||
<Route path="inbox" element={<InboxRootRedirect />} />
|
||||
<Route path="inbox/mine" element={<Inbox />} />
|
||||
<Route path="inbox/recent" element={<Inbox />} />
|
||||
<Route path="inbox/unread" element={<Inbox />} />
|
||||
<Route path="inbox/all" element={<Inbox />} />
|
||||
<Route path="inbox/new" element={<Navigate to="/inbox/recent" replace />} />
|
||||
<Route path="inbox/new" element={<Navigate to="/inbox/mine" replace />} />
|
||||
<Route path="design-guide" element={<DesignGuide />} />
|
||||
<Route path="tests/ux/runs" element={<RunTranscriptUxLab />} />
|
||||
<Route path=":pluginRoutePath" element={<PluginPage />} />
|
||||
|
||||
49
ui/src/adapters/hermes-local/config-fields.tsx
Normal file
49
ui/src/adapters/hermes-local/config-fields.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import type { AdapterConfigFieldsProps } from "../types";
|
||||
import {
|
||||
Field,
|
||||
DraftInput,
|
||||
} from "../../components/agent-config-primitives";
|
||||
import { ChoosePathButton } from "../../components/PathInstructionsModal";
|
||||
|
||||
const inputClass =
|
||||
"w-full rounded-md border border-border px-2.5 py-1.5 bg-transparent outline-none text-sm font-mono placeholder:text-muted-foreground/40";
|
||||
const instructionsFileHint =
|
||||
"Absolute path to a markdown file (e.g. AGENTS.md) that defines this agent's behavior. Injected into the system prompt at runtime.";
|
||||
|
||||
export function HermesLocalConfigFields({
|
||||
isCreate,
|
||||
values,
|
||||
set,
|
||||
config,
|
||||
eff,
|
||||
mark,
|
||||
hideInstructionsFile,
|
||||
}: AdapterConfigFieldsProps) {
|
||||
if (hideInstructionsFile) return null;
|
||||
return (
|
||||
<Field label="Agent instructions file" hint={instructionsFileHint}>
|
||||
<div className="flex items-center gap-2">
|
||||
<DraftInput
|
||||
value={
|
||||
isCreate
|
||||
? values!.instructionsFilePath ?? ""
|
||||
: eff(
|
||||
"adapterConfig",
|
||||
"instructionsFilePath",
|
||||
String(config.instructionsFilePath ?? ""),
|
||||
)
|
||||
}
|
||||
onCommit={(v) =>
|
||||
isCreate
|
||||
? set!({ instructionsFilePath: v })
|
||||
: mark("adapterConfig", "instructionsFilePath", v || undefined)
|
||||
}
|
||||
immediate
|
||||
className={inputClass}
|
||||
placeholder="/absolute/path/to/AGENTS.md"
|
||||
/>
|
||||
<ChoosePathButton />
|
||||
</div>
|
||||
</Field>
|
||||
);
|
||||
}
|
||||
12
ui/src/adapters/hermes-local/index.ts
Normal file
12
ui/src/adapters/hermes-local/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { UIAdapterModule } from "../types";
|
||||
import { parseHermesStdoutLine } from "hermes-paperclip-adapter/ui";
|
||||
import { HermesLocalConfigFields } from "./config-fields";
|
||||
import { buildHermesConfig } from "hermes-paperclip-adapter/ui";
|
||||
|
||||
export const hermesLocalUIAdapter: UIAdapterModule = {
|
||||
type: "hermes_local",
|
||||
label: "Hermes Agent",
|
||||
parseStdoutLine: parseHermesStdoutLine,
|
||||
ConfigFields: HermesLocalConfigFields,
|
||||
buildAdapterConfig: buildHermesConfig,
|
||||
};
|
||||
@@ -1,7 +1,9 @@
|
||||
import type { AdapterConfigFieldsProps } from "../types";
|
||||
import {
|
||||
Field,
|
||||
ToggleField,
|
||||
DraftInput,
|
||||
help,
|
||||
} from "../../components/agent-config-primitives";
|
||||
import { ChoosePathButton } from "../../components/PathInstructionsModal";
|
||||
|
||||
@@ -19,31 +21,52 @@ export function OpenCodeLocalConfigFields({
|
||||
mark,
|
||||
hideInstructionsFile,
|
||||
}: AdapterConfigFieldsProps) {
|
||||
if (hideInstructionsFile) return null;
|
||||
return (
|
||||
<Field label="Agent instructions file" hint={instructionsFileHint}>
|
||||
<div className="flex items-center gap-2">
|
||||
<DraftInput
|
||||
value={
|
||||
isCreate
|
||||
? values!.instructionsFilePath ?? ""
|
||||
: eff(
|
||||
"adapterConfig",
|
||||
"instructionsFilePath",
|
||||
String(config.instructionsFilePath ?? ""),
|
||||
)
|
||||
}
|
||||
onCommit={(v) =>
|
||||
isCreate
|
||||
? set!({ instructionsFilePath: v })
|
||||
: mark("adapterConfig", "instructionsFilePath", v || undefined)
|
||||
}
|
||||
immediate
|
||||
className={inputClass}
|
||||
placeholder="/absolute/path/to/AGENTS.md"
|
||||
/>
|
||||
<ChoosePathButton />
|
||||
</div>
|
||||
</Field>
|
||||
<>
|
||||
{!hideInstructionsFile && (
|
||||
<Field label="Agent instructions file" hint={instructionsFileHint}>
|
||||
<div className="flex items-center gap-2">
|
||||
<DraftInput
|
||||
value={
|
||||
isCreate
|
||||
? values!.instructionsFilePath ?? ""
|
||||
: eff(
|
||||
"adapterConfig",
|
||||
"instructionsFilePath",
|
||||
String(config.instructionsFilePath ?? ""),
|
||||
)
|
||||
}
|
||||
onCommit={(v) =>
|
||||
isCreate
|
||||
? set!({ instructionsFilePath: v })
|
||||
: mark("adapterConfig", "instructionsFilePath", v || undefined)
|
||||
}
|
||||
immediate
|
||||
className={inputClass}
|
||||
placeholder="/absolute/path/to/AGENTS.md"
|
||||
/>
|
||||
<ChoosePathButton />
|
||||
</div>
|
||||
</Field>
|
||||
)}
|
||||
<ToggleField
|
||||
label="Skip permissions"
|
||||
hint={help.dangerouslySkipPermissions}
|
||||
checked={
|
||||
isCreate
|
||||
? values!.dangerouslySkipPermissions
|
||||
: eff(
|
||||
"adapterConfig",
|
||||
"dangerouslySkipPermissions",
|
||||
config.dangerouslySkipPermissions !== false,
|
||||
)
|
||||
}
|
||||
onChange={(v) =>
|
||||
isCreate
|
||||
? set!({ dangerouslySkipPermissions: v })
|
||||
: mark("adapterConfig", "dangerouslySkipPermissions", v)
|
||||
}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { claudeLocalUIAdapter } from "./claude-local";
|
||||
import { codexLocalUIAdapter } from "./codex-local";
|
||||
import { cursorLocalUIAdapter } from "./cursor";
|
||||
import { geminiLocalUIAdapter } from "./gemini-local";
|
||||
import { hermesLocalUIAdapter } from "./hermes-local";
|
||||
import { openCodeLocalUIAdapter } from "./opencode-local";
|
||||
import { piLocalUIAdapter } from "./pi-local";
|
||||
import { openClawGatewayUIAdapter } from "./openclaw-gateway";
|
||||
@@ -13,6 +14,7 @@ const uiAdapters: UIAdapterModule[] = [
|
||||
claudeLocalUIAdapter,
|
||||
codexLocalUIAdapter,
|
||||
geminiLocalUIAdapter,
|
||||
hermesLocalUIAdapter,
|
||||
openCodeLocalUIAdapter,
|
||||
piLocalUIAdapter,
|
||||
cursorLocalUIAdapter,
|
||||
|
||||
@@ -27,6 +27,12 @@ export interface AdapterModel {
|
||||
label: string;
|
||||
}
|
||||
|
||||
export interface DetectedAdapterModel {
|
||||
model: string;
|
||||
provider: string;
|
||||
source: string;
|
||||
}
|
||||
|
||||
export interface ClaudeLoginResult {
|
||||
exitCode: number | null;
|
||||
signal: string | null;
|
||||
@@ -159,6 +165,10 @@ export const agentsApi = {
|
||||
api.get<AdapterModel[]>(
|
||||
`/companies/${encodeURIComponent(companyId)}/adapters/${encodeURIComponent(type)}/models`,
|
||||
),
|
||||
detectModel: (companyId: string, type: string) =>
|
||||
api.get<DetectedAdapterModel | null>(
|
||||
`/companies/${encodeURIComponent(companyId)}/adapters/${encodeURIComponent(type)}/detect-model`,
|
||||
),
|
||||
testEnvironment: (
|
||||
companyId: string,
|
||||
type: string,
|
||||
|
||||
@@ -21,6 +21,7 @@ export const issuesApi = {
|
||||
participantAgentId?: string;
|
||||
assigneeUserId?: string;
|
||||
touchedByUserId?: string;
|
||||
inboxArchivedByUserId?: string;
|
||||
unreadForUserId?: string;
|
||||
labelId?: string;
|
||||
originKind?: string;
|
||||
@@ -36,6 +37,7 @@ export const issuesApi = {
|
||||
if (filters?.participantAgentId) params.set("participantAgentId", filters.participantAgentId);
|
||||
if (filters?.assigneeUserId) params.set("assigneeUserId", filters.assigneeUserId);
|
||||
if (filters?.touchedByUserId) params.set("touchedByUserId", filters.touchedByUserId);
|
||||
if (filters?.inboxArchivedByUserId) params.set("inboxArchivedByUserId", filters.inboxArchivedByUserId);
|
||||
if (filters?.unreadForUserId) params.set("unreadForUserId", filters.unreadForUserId);
|
||||
if (filters?.labelId) params.set("labelId", filters.labelId);
|
||||
if (filters?.originKind) params.set("originKind", filters.originKind);
|
||||
@@ -51,6 +53,10 @@ export const issuesApi = {
|
||||
deleteLabel: (id: string) => api.delete<IssueLabel>(`/labels/${id}`),
|
||||
get: (id: string) => api.get<Issue>(`/issues/${id}`),
|
||||
markRead: (id: string) => api.post<{ id: string; lastReadAt: Date }>(`/issues/${id}/read`, {}),
|
||||
archiveFromInbox: (id: string) =>
|
||||
api.post<{ id: string; archivedAt: Date }>(`/issues/${id}/inbox-archive`, {}),
|
||||
unarchiveFromInbox: (id: string) =>
|
||||
api.delete<{ id: string; archivedAt: Date } | { ok: true }>(`/issues/${id}/inbox-archive`),
|
||||
create: (companyId: string, data: Record<string, unknown>) =>
|
||||
api.post<Issue>(`/companies/${companyId}/issues`, data),
|
||||
update: (id: string, data: Record<string, unknown>) => api.patch<Issue>(`/issues/${id}`, data),
|
||||
|
||||
@@ -248,9 +248,26 @@ export function AgentConfigForm(props: AgentConfigFormProps) {
|
||||
}
|
||||
if (overlay.adapterType !== undefined) {
|
||||
patch.adapterType = overlay.adapterType;
|
||||
// When adapter type changes, send only the new config — don't merge
|
||||
// with old config since old adapter fields are meaningless for the new type
|
||||
patch.adapterConfig = overlay.adapterConfig;
|
||||
// When adapter type changes, replace adapter-specific fields but preserve
|
||||
// adapter-agnostic fields (env, promptTemplate, etc.) that are shared
|
||||
// across all adapter types.
|
||||
const existing = (agent.adapterConfig ?? {}) as Record<string, unknown>;
|
||||
const adapterAgnosticKeys = [
|
||||
"env",
|
||||
"promptTemplate",
|
||||
"instructionsFilePath",
|
||||
"cwd",
|
||||
"timeoutSec",
|
||||
"graceSec",
|
||||
"bootstrapPromptTemplate",
|
||||
];
|
||||
const preserved: Record<string, unknown> = {};
|
||||
for (const key of adapterAgnosticKeys) {
|
||||
if (key in existing) {
|
||||
preserved[key] = existing[key];
|
||||
}
|
||||
}
|
||||
patch.adapterConfig = { ...preserved, ...overlay.adapterConfig };
|
||||
} else if (Object.keys(overlay.adapterConfig).length > 0) {
|
||||
const existing = (agent.adapterConfig ?? {}) as Record<string, unknown>;
|
||||
patch.adapterConfig = { ...existing, ...overlay.adapterConfig };
|
||||
@@ -296,9 +313,11 @@ export function AgentConfigForm(props: AgentConfigFormProps) {
|
||||
adapterType === "claude_local" ||
|
||||
adapterType === "codex_local" ||
|
||||
adapterType === "gemini_local" ||
|
||||
adapterType === "hermes_local" ||
|
||||
adapterType === "opencode_local" ||
|
||||
adapterType === "pi_local" ||
|
||||
adapterType === "cursor";
|
||||
const isHermesLocal = adapterType === "hermes_local";
|
||||
const showLegacyWorkingDirectoryField =
|
||||
isLocal && shouldShowLegacyWorkingDirectoryField({ isCreate, adapterConfig: config });
|
||||
const uiAdapter = useMemo(() => getUIAdapter(adapterType), [adapterType]);
|
||||
@@ -315,6 +334,22 @@ export function AgentConfigForm(props: AgentConfigFormProps) {
|
||||
enabled: Boolean(selectedCompanyId),
|
||||
});
|
||||
const models = fetchedModels ?? externalModels ?? [];
|
||||
const {
|
||||
data: detectedModelData,
|
||||
refetch: refetchDetectedModel,
|
||||
} = useQuery({
|
||||
queryKey: selectedCompanyId
|
||||
? queryKeys.agents.detectModel(selectedCompanyId, adapterType)
|
||||
: ["agents", "none", "detect-model", adapterType],
|
||||
queryFn: () => {
|
||||
if (!selectedCompanyId) {
|
||||
throw new Error("Select a company to detect the Hermes model");
|
||||
}
|
||||
return agentsApi.detectModel(selectedCompanyId, adapterType);
|
||||
},
|
||||
enabled: Boolean(selectedCompanyId && isHermesLocal),
|
||||
});
|
||||
const detectedModel = detectedModelData?.model ?? null;
|
||||
|
||||
const { data: companyAgents = [] } = useQuery({
|
||||
queryKey: selectedCompanyId ? queryKeys.agents.list(selectedCompanyId) : ["agents", "none", "list"],
|
||||
@@ -688,6 +723,8 @@ export function AgentConfigForm(props: AgentConfigFormProps) {
|
||||
? "codex"
|
||||
: adapterType === "gemini_local"
|
||||
? "gemini"
|
||||
: adapterType === "hermes_local"
|
||||
? "hermes"
|
||||
: adapterType === "pi_local"
|
||||
? "pi"
|
||||
: adapterType === "cursor"
|
||||
@@ -709,9 +746,18 @@ export function AgentConfigForm(props: AgentConfigFormProps) {
|
||||
}
|
||||
open={modelOpen}
|
||||
onOpenChange={setModelOpen}
|
||||
allowDefault={adapterType !== "opencode_local"}
|
||||
required={adapterType === "opencode_local"}
|
||||
allowDefault={adapterType !== "opencode_local" && adapterType !== "hermes_local"}
|
||||
required={adapterType === "opencode_local" || adapterType === "hermes_local"}
|
||||
groupByProvider={adapterType === "opencode_local"}
|
||||
creatable={adapterType === "hermes_local"}
|
||||
detectedModel={adapterType === "hermes_local" ? detectedModel : null}
|
||||
onDetectModel={adapterType === "hermes_local"
|
||||
? async () => {
|
||||
const result = await refetchDetectedModel();
|
||||
return result.data?.model ?? null;
|
||||
}
|
||||
: undefined}
|
||||
detectModelLabel={adapterType === "hermes_local" ? "Detect from Hermes config" : undefined}
|
||||
/>
|
||||
{fetchedModelsError && (
|
||||
<p className="text-xs text-destructive">
|
||||
@@ -976,7 +1022,7 @@ function AdapterEnvironmentResult({ result }: { result: AdapterEnvironmentTestRe
|
||||
|
||||
/* ---- Internal sub-components ---- */
|
||||
|
||||
const ENABLED_ADAPTER_TYPES = new Set(["claude_local", "codex_local", "gemini_local", "opencode_local", "pi_local", "cursor"]);
|
||||
const ENABLED_ADAPTER_TYPES = new Set(["claude_local", "codex_local", "gemini_local", "opencode_local", "pi_local", "cursor", "hermes_local"]);
|
||||
|
||||
/** Display list includes all real adapter types plus UI-only coming-soon entries. */
|
||||
const ADAPTER_DISPLAY_LIST: { value: string; label: string; comingSoon: boolean }[] = [
|
||||
@@ -1293,6 +1339,10 @@ function ModelDropdown({
|
||||
allowDefault,
|
||||
required,
|
||||
groupByProvider,
|
||||
creatable,
|
||||
detectedModel,
|
||||
onDetectModel,
|
||||
detectModelLabel,
|
||||
}: {
|
||||
models: AdapterModel[];
|
||||
value: string;
|
||||
@@ -1302,9 +1352,20 @@ function ModelDropdown({
|
||||
allowDefault: boolean;
|
||||
required: boolean;
|
||||
groupByProvider: boolean;
|
||||
creatable?: boolean;
|
||||
detectedModel?: string | null;
|
||||
onDetectModel?: () => Promise<string | null>;
|
||||
detectModelLabel?: string;
|
||||
}) {
|
||||
const [modelSearch, setModelSearch] = useState("");
|
||||
const [detectingModel, setDetectingModel] = useState(false);
|
||||
const selected = models.find((m) => m.id === value);
|
||||
const manualModel = modelSearch.trim();
|
||||
const canCreateManualModel = Boolean(
|
||||
creatable &&
|
||||
manualModel &&
|
||||
!models.some((m) => m.id.toLowerCase() === manualModel.toLowerCase()),
|
||||
);
|
||||
const filteredModels = useMemo(() => {
|
||||
return models.filter((m) => {
|
||||
if (!modelSearch.trim()) return true;
|
||||
@@ -1341,6 +1402,21 @@ function ModelDropdown({
|
||||
}));
|
||||
}, [filteredModels, groupByProvider]);
|
||||
|
||||
async function handleDetectModel() {
|
||||
if (!onDetectModel) return;
|
||||
setDetectingModel(true);
|
||||
try {
|
||||
const nextModel = await onDetectModel();
|
||||
if (nextModel) {
|
||||
onChange(nextModel);
|
||||
onOpenChange(false);
|
||||
setModelSearch("");
|
||||
}
|
||||
} finally {
|
||||
setDetectingModel(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Field label="Model" hint={help.model}>
|
||||
<Popover
|
||||
@@ -1351,7 +1427,7 @@ function ModelDropdown({
|
||||
}}
|
||||
>
|
||||
<PopoverTrigger asChild>
|
||||
<button className="inline-flex items-center gap-1.5 rounded-md border border-border px-2.5 py-1.5 text-sm hover:bg-accent/50 transition-colors w-full justify-between">
|
||||
<button type="button" className="inline-flex items-center gap-1.5 rounded-md border border-border px-2.5 py-1.5 text-sm hover:bg-accent/50 transition-colors w-full justify-between">
|
||||
<span className={cn(!value && "text-muted-foreground")}>
|
||||
{selected
|
||||
? selected.label
|
||||
@@ -1361,16 +1437,84 @@ function ModelDropdown({
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-[var(--radix-popover-trigger-width)] p-1" align="start">
|
||||
<input
|
||||
className="w-full px-2 py-1.5 text-xs bg-transparent outline-none border-b border-border mb-1 placeholder:text-muted-foreground/50"
|
||||
placeholder="Search models..."
|
||||
value={modelSearch}
|
||||
onChange={(e) => setModelSearch(e.target.value)}
|
||||
autoFocus
|
||||
/>
|
||||
<div className="relative mb-1">
|
||||
<input
|
||||
className="w-full px-2 py-1.5 pr-6 text-xs bg-transparent outline-none border-b border-border placeholder:text-muted-foreground/50"
|
||||
placeholder={creatable ? "Search models... (type to create)" : "Search models..."}
|
||||
value={modelSearch}
|
||||
onChange={(e) => setModelSearch(e.target.value)}
|
||||
autoFocus
|
||||
/>
|
||||
{modelSearch && (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute right-1.5 top-1/2 -translate-y-1/2 text-muted-foreground hover:text-foreground"
|
||||
onClick={() => setModelSearch("")}
|
||||
>
|
||||
<svg aria-hidden="true" focusable="false" className="h-3 w-3" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<line x1="18" y1="6" x2="6" y2="18" />
|
||||
<line x1="6" y1="6" x2="18" y2="18" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
{onDetectModel && !detectedModel && !modelSearch.trim() && (
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center gap-1.5 w-full px-2 py-1.5 text-xs rounded hover:bg-accent/50 text-muted-foreground"
|
||||
onClick={() => {
|
||||
void handleDetectModel();
|
||||
}}
|
||||
disabled={detectingModel}
|
||||
>
|
||||
<svg aria-hidden="true" focusable="false" className="h-3 w-3" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<path d="M21 12a9 9 0 0 0-9-9 9.75 9.75 0 0 0-6.74 2.74L3 8" />
|
||||
<path d="M3 3v5h5" />
|
||||
</svg>
|
||||
{detectingModel ? "Detecting..." : (detectModelLabel ?? "Detect from config")}
|
||||
</button>
|
||||
)}
|
||||
{value && !models.some((m) => m.id === value) && (
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
"flex items-center w-full px-2 py-1.5 text-sm rounded bg-accent/50",
|
||||
)}
|
||||
onClick={() => {
|
||||
onOpenChange(false);
|
||||
}}
|
||||
>
|
||||
<span className="block w-full text-left truncate font-mono text-xs" title={value}>
|
||||
{value}
|
||||
</span>
|
||||
<span className="shrink-0 ml-auto text-[9px] font-medium px-1.5 py-0.5 rounded-full bg-green-500/15 text-green-400 border border-green-500/20">
|
||||
current
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
{detectedModel && detectedModel !== value && (
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
"flex items-center w-full px-2 py-1.5 text-sm rounded hover:bg-accent/50",
|
||||
)}
|
||||
onClick={() => {
|
||||
onChange(detectedModel);
|
||||
onOpenChange(false);
|
||||
}}
|
||||
>
|
||||
<span className="block w-full text-left truncate font-mono text-xs" title={detectedModel}>
|
||||
{detectedModel}
|
||||
</span>
|
||||
<span className="shrink-0 ml-auto text-[9px] font-medium px-1.5 py-0.5 rounded-full bg-blue-500/15 text-blue-400 border border-blue-500/20">
|
||||
detected
|
||||
</span>
|
||||
</button>
|
||||
)}
|
||||
<div className="max-h-[240px] overflow-y-auto">
|
||||
{allowDefault && (
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
"flex items-center gap-2 w-full px-2 py-1.5 text-sm rounded hover:bg-accent/50",
|
||||
!value && "bg-accent",
|
||||
@@ -1383,6 +1527,20 @@ function ModelDropdown({
|
||||
Default
|
||||
</button>
|
||||
)}
|
||||
{canCreateManualModel && (
|
||||
<button
|
||||
type="button"
|
||||
className="flex items-center justify-between gap-2 w-full px-2 py-1.5 text-sm rounded hover:bg-accent/50"
|
||||
onClick={() => {
|
||||
onChange(manualModel);
|
||||
onOpenChange(false);
|
||||
setModelSearch("");
|
||||
}}
|
||||
>
|
||||
<span>Use manual model</span>
|
||||
<span className="text-xs font-mono text-muted-foreground">{manualModel}</span>
|
||||
</button>
|
||||
)}
|
||||
{groupedModels.map((group) => (
|
||||
<div key={group.provider} className="mb-1 last:mb-0">
|
||||
{groupByProvider && (
|
||||
@@ -1392,6 +1550,7 @@ function ModelDropdown({
|
||||
)}
|
||||
{group.entries.map((m) => (
|
||||
<button
|
||||
type="button"
|
||||
key={m.id}
|
||||
className={cn(
|
||||
"flex items-center w-full px-2 py-1.5 text-sm rounded hover:bg-accent/50",
|
||||
@@ -1409,8 +1568,14 @@ function ModelDropdown({
|
||||
))}
|
||||
</div>
|
||||
))}
|
||||
{filteredModels.length === 0 && (
|
||||
<p className="px-2 py-1.5 text-xs text-muted-foreground">No models found.</p>
|
||||
{filteredModels.length === 0 && !canCreateManualModel && (
|
||||
<div className="px-2 py-2 space-y-2">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{onDetectModel
|
||||
? "No Hermes model detected yet. Configure Hermes or enter a provider/model manually."
|
||||
: "No models found."}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</PopoverContent>
|
||||
|
||||
43
ui/src/components/HermesIcon.tsx
Normal file
43
ui/src/components/HermesIcon.tsx
Normal file
@@ -0,0 +1,43 @@
|
||||
import { cn } from "../lib/utils";
|
||||
|
||||
interface HermesIconProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hermes caduceus icon — winged staff with two intertwined serpents.
|
||||
* Replaces the generic Zap icon for the hermes_local adapter type.
|
||||
*
|
||||
* ⚕️ inspired but as the proper caduceus (Hermes' symbol): staff + two snakes + wings.
|
||||
*/
|
||||
export function HermesIcon({ className }: HermesIconProps) {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="1.5"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
className={cn(className)}
|
||||
>
|
||||
{/* Central staff */}
|
||||
<line x1="12" y1="6" x2="12" y2="23" />
|
||||
{/* Left serpent curves */}
|
||||
<path d="M12 8 C10 9 9.5 11 10.5 13 C11.5 15 10 17 12 18" />
|
||||
{/* Right serpent curves */}
|
||||
<path d="M12 8 C14 9 14.5 11 13.5 13 C12.5 15 14 17 12 18" />
|
||||
{/* Snake heads facing outward */}
|
||||
<circle cx="10" cy="8" r="0.8" fill="currentColor" stroke="none" />
|
||||
<circle cx="14" cy="8" r="0.8" fill="currentColor" stroke="none" />
|
||||
{/* Wings at top of staff */}
|
||||
<path d="M12 6 L8 3 L6 5 L9 6" strokeWidth="1.2" />
|
||||
<path d="M12 6 L16 3 L18 5 L15 6" strokeWidth="1.2" />
|
||||
{/* Wing feather details */}
|
||||
<line x1="7.5" y1="4" x2="7" y2="5.2" strokeWidth="1" />
|
||||
<line x1="16.5" y1="4" x2="17" y2="5.2" strokeWidth="1" />
|
||||
{/* Staff sphere at top */}
|
||||
<circle cx="12" cy="6.5" r="1.2" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user