Merge pull request #2052 from thedotmack/thedotmack/worktree-remap

feat(worktree): scope per worktree, cwd backfill, and merged-worktree adoption
This commit is contained in:
Alex Newman
2026-04-17 20:10:54 -07:00
committed by GitHub
55 changed files with 2003 additions and 1667 deletions

View File

@@ -1,136 +0,0 @@
<claude-mem-context>
# Recent Activity
### Oct 25, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #2374 | 2:55 PM | ✅ | Marketplace metadata version synchronized to 4.2.11 | ~157 |
### Oct 27, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #2757 | 1:23 AM | 🟣 | Released v4.3.3 with Configurable Session Display and First-Time Setup UX | ~391 |
### Nov 4, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #3706 | 9:47 PM | ✅ | Marketplace Plugin Version Synchronized to 5.0.2 | ~162 |
| #3655 | 3:43 PM | ✅ | Version bumped to 5.0.1 across project | ~354 |
### Nov 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4068 | 10:58 PM | ✅ | Committed v5.1.0 release with comprehensive release notes | ~486 |
| #4066 | 10:57 PM | ✅ | Updated marketplace.json version to 5.1.0 | ~192 |
| #3739 | 2:24 PM | ✅ | Updated version to 5.0.3 across project manifests | ~322 |
### Nov 6, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4099 | 1:13 PM | 🟣 | Theme Toggle for Light/Dark Mode | ~253 |
| #4096 | " | ✅ | Marketplace Metadata Version Sync | ~179 |
| #4092 | 1:12 PM | 🔵 | Marketplace Configuration for Claude-Mem Plugin | ~194 |
| #4078 | 12:50 PM | 🔴 | Fixed PM2 ENOENT error on Windows systems | ~286 |
| #4075 | 12:49 PM | ✅ | Marketplace plugin version synchronized to 5.1.1 | ~189 |
### Nov 7, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4612 | 6:33 PM | ✅ | Version Bumped to 5.2.0 Across All Package Metadata | ~359 |
| #4598 | 6:31 PM | ✅ | PR #69 Merged: cleanup/worker Branch Integration | ~469 |
| #4298 | 11:54 AM | 🔴 | Fixed PostToolUse Hook Schema Compliance | ~310 |
| #4295 | 11:53 AM | ✅ | Synchronized Plugin Marketplace Version to 5.1.4 | ~188 |
### Nov 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #5150 | 7:37 PM | 🟣 | Troubleshooting Skill Added to Claude-Mem Plugin | ~427 |
| #5133 | 7:29 PM | ✅ | Version 5.2.3 Released with Build Process | ~487 |
### Nov 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #5941 | 7:14 PM | ✅ | Marketplace Version Updated to 5.4.0 | ~157 |
### Nov 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6341 | 1:49 PM | ✅ | Version Bumped to 5.4.1 | ~239 |
### Nov 11, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6602 | 1:51 PM | ✅ | Version 5.4.5 Released to GitHub | ~279 |
| #6601 | " | ✅ | Version Patch Bump 5.4.4 to 5.4.5 | ~233 |
### Nov 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #8212 | 3:06 PM | 🔵 | Version Consistency Verification Across Multiple Configuration Files | ~238 |
### Nov 25, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #14882 | 1:32 PM | 🔵 | Marketplace Configuration Defines Plugin Version and Source Directory | ~366 |
### Nov 30, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #18064 | 10:52 PM | ✅ | Bumped version to 6.3.7 in marketplace.json | ~179 |
| #18060 | 10:51 PM | 🔵 | Read marketplace.json plugin manifest | ~190 |
### Dec 1, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #18428 | 3:33 PM | 🔵 | Version Conflict in Marketplace Configuration | ~191 |
### Dec 4, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20049 | 3:23 PM | ✅ | Updated marketplace.json version to 6.5.2 | ~203 |
### Dec 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22559 | 1:08 AM | ✅ | Version 7.0.3 committed to repository | ~261 |
| #22551 | 1:07 AM | ✅ | Marketplace metadata updated to version 7.0.3 | ~179 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23440 | 2:25 PM | ✅ | Marketplace Configuration Updated to 7.0.8 | ~188 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26799 | 11:39 PM | ✅ | Marketplace Manifest Version Updated to 7.2.3 | ~248 |
| #26796 | " | ✅ | Version Bumped to 7.2.3 in marketplace.json | ~259 |
| #26792 | 11:38 PM | 🔵 | Current Version Confirmed as 7.2.2 Across All Configuration Files | ~291 |
### Dec 16, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28306 | 10:08 PM | 🔵 | Marketplace Configuration Also Shows Version 7.3.3 | ~220 |
| #27555 | 4:48 PM | ✅ | Version bump committed to main branch | ~242 |
| #27553 | " | ✅ | Version consistency verified across all configuration files | ~195 |
| #27551 | 4:47 PM | ✅ | Marketplace.json version updated to 7.3.1 | ~207 |
</claude-mem-context>

View File

@@ -1,29 +0,0 @@
# Project-Level Skills
This directory contains skills **for developing and maintaining the claude-mem project itself**, not skills that are released as part of the plugin.
## Distinction
**Project Skills** (`.claude/skills/`):
- Used by developers working on claude-mem
- Not included in the plugin distribution
- Project-specific workflows (version bumps, release management, etc.)
- Not synced to `~/.claude/plugins/marketplaces/thedotmack/`
**Plugin Skills** (`plugin/skills/`):
- Released as part of the claude-mem plugin
- Available to all users who install the plugin
- General-purpose memory search functionality
- Synced to user installations via `npm run sync-marketplace`
## Skills in This Directory
### version-bump
Manages semantic versioning for the claude-mem project itself. Handles updating all three version files (package.json, marketplace.json, plugin.json), creating git tags, and GitHub releases.
**Usage**: Only for claude-mem maintainers releasing new versions.
## Adding New Skills
**For claude-mem development** → Add to `.claude/skills/`
**For end users** → Add to `plugin/skills/` (gets distributed with plugin)

570
.plan/worktree-adoption.md Normal file
View File

@@ -0,0 +1,570 @@
# Merged-Worktree Adoption
**Goal**: When a worktree's branch is merged into its parent, the worktree's observations become part of the parent project's observation list — without data movement, destructive schema changes, or lost provenance.
**Approach**: Add a nullable `merged_into_project` column to observations and session_summaries, extend query predicates with `OR merged_into_project = :parent`, propagate the same metadata to Chroma embeddings for semantic-search consistency, detect merges via git (authoritative), run adoption automatically on worker startup, and offer a CLI escape hatch for squash-merges.
**Key design decisions**:
- `observations.project` is **immutable provenance** — never overwritten.
- Merged-status is a **virtual pointer**, not a data move.
- **Chroma metadata stays in lockstep with SQLite** (full consistent sync, not lazy SQL expansion). Single source of truth per row.
- Detection is **git-authoritative** (`git worktree list --porcelain` + `git branch --merged`), with a manual CLI override for squash-merges.
---
## Phase 0 — Documentation Discovery (COMPLETE)
Findings consolidated from three parallel discovery subagents. The following are the ONLY APIs/patterns to copy from. Do not invent alternatives.
### Allowed APIs (copy from these locations)
| Need | File | Lines | What to copy |
|---|---|---|---|
| Migration idempotency via marker file | `src/services/infrastructure/ProcessManager.ts` | 680830 | `runOneTimeCwdRemap` structure, marker file pattern `.cwd-remap-applied-v1` |
| Worker startup wiring | `src/services/worker-service.ts` | 363365 | Call site inside `initializeBackground()`, invoked before `dbManager.initialize()` |
| `ALTER TABLE ADD COLUMN` idempotency | `src/services/sqlite/migrations/runner.ts` | 131141 | `PRAGMA table_info(<table>)` guard before `ALTER TABLE ... ADD COLUMN` |
| Column addition example | `src/services/sqlite/migrations/runner.ts` | 495 | `db.run('ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0')` |
| Observations schema | `src/services/sqlite/migrations/runner.ts` | 8296 | Existing columns + indices (do not duplicate) |
| `schema_versions` marker table | `src/services/sqlite/migrations/runner.ts` | 5158 | `INSERT OR IGNORE INTO schema_versions ...` — used only when numbered migration |
| Logger | `src/utils/logger.ts` | 18 | Components: `SYSTEM`, `DB`, `CHROMA_SYNC`. Use `logger.info/warn/error('SYSTEM', ...)` |
| Worktree detection | `src/utils/worktree.ts` | 184 | `detectWorktree(cwd): WorktreeInfo { isWorktree, worktreeName, parentRepoPath, parentProjectName }` |
| Project-name derivation | `src/utils/project-name.ts` | 73119 | `getProjectContext(cwd): ProjectContext { primary, parent, isWorktree, allProjects }` |
| Multi-project read (WHERE to extend) | `src/services/context/ObservationCompiler.ts` | 111160 | `queryObservationsMulti``WHERE o.project IN (${projectPlaceholders})` |
| Same, for summaries | `src/services/context/ObservationCompiler.ts` | 168196 | Parallel summary-fetching query with `ss.project IN (...)` |
| Context injection endpoint | `src/services/worker/http/routes/SearchRoutes.ts` | 211253 | `handleContextInject` wires `projects` comma-separated query param into `generateContext` |
| Context entry point | `src/services/context/ContextBuilder.ts` | 126183 | `generateContext()` picks `queryObservationsMulti` when `projects.length > 1` |
| Chroma metadata attach (observations) | `src/services/sync/ChromaSync.ts` | 132140 | `baseMetadata` object — includes `project`, `sqlite_id`, etc. This is where `merged_into_project` is added. |
| Chroma collection architecture | `src/services/sync/ChromaSync.ts` | 806 (comment) | **Single shared collection `cm__claude-mem`**, scoped by metadata. Do NOT create a per-merged collection. |
| Chroma filter build (read side) | `src/services/sync/SearchManager.ts` | 174177 | `whereFilter = { project: options.project }` — extended with `$or` in Phase 3 |
| Chroma update API | `src/services/sync/ChromaSync.ts` (grep) | — | `chroma_update_documents` via MCP — used by existing sync flows |
| CLI entrypoint switch | `src/npx-cli/index.ts` | 28169 | Plain `switch (command)`, dynamic `import()` of `./commands/<name>.ts`. No commander/cac. |
| Admin-script template | `scripts/cwd-remap.ts` | 1186 | Bun shebang, argv parsing, `--apply` gate, dry-run default |
| UI observation card | `src/ui/viewer/components/ObservationCard.tsx` | 58 | `<span className="card-project">{observation.project}</span>` — where the merged badge is added |
### Anti-patterns (do NOT do these)
- Do NOT overwrite `observations.project` or `session_summaries.project`. These are immutable provenance.
- Do NOT create a new Chroma collection for merged observations. Deployment uses a single shared `cm__claude-mem` collection.
- Do NOT introduce a `gh` CLI dependency. Codebase has no `gh` usage outside `.github/workflows/`. Use `git` subprocesses only.
- Do NOT use SQLite's unsupported `ALTER TABLE ... ADD COLUMN IF NOT EXISTS` syntax. Use the `PRAGMA table_info` guard instead.
- Do NOT use a CLI framework (commander, cac, yargs). The codebase uses hand-rolled `switch (command)` + `process.argv.slice(2)`.
- Do NOT mutate `ProjectContext.allProjects` to inject merged children. The reverse lookup lives in the SQL/Chroma query predicates, not in `ProjectContext`.
- Do NOT run the lazy "SQL-expand projects then filter Chroma" approach. We want Chroma metadata to be the authoritative filter for semantic search.
---
## Phase 1 — Schema migration
**What to implement**: One nullable column + one index on each of `observations` and `session_summaries`. Idempotent via `PRAGMA table_info` guard.
### Files touched
- `src/services/sqlite/migrations/runner.ts`
### Implementation
Add a new method `ensureMergedIntoProjectColumns()` on `MigrationRunner`, modeled on the pattern at lines 131141:
```typescript
private ensureMergedIntoProjectColumns(): void {
const obsCols = this.db
.query('PRAGMA table_info(observations)')
.all() as TableColumnInfo[];
if (!obsCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE observations ADD COLUMN merged_into_project TEXT');
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_observations_merged_into ON observations(merged_into_project)'
);
}
const sumCols = this.db
.query('PRAGMA table_info(session_summaries)')
.all() as TableColumnInfo[];
if (!sumCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE session_summaries ADD COLUMN merged_into_project TEXT');
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_summaries_merged_into ON session_summaries(merged_into_project)'
);
}
}
```
Call from `runAllMigrations()` — append immediately after the last existing `ensure*` method so it runs on every worker startup. The `PRAGMA table_info` check is O(1) and makes re-runs cheap.
### Verification
- Start the worker. Migration logs show no error.
- `sqlite3 ~/.claude-mem/claude-mem.db ".schema observations"` shows `merged_into_project TEXT`.
- Same for `session_summaries`.
- Restart worker → no ALTER TABLE error (guard worked).
- `sqlite3 ~/.claude-mem/claude-mem.db ".indices observations"` lists `idx_observations_merged_into`.
### Anti-pattern guards
- Do NOT use `ALTER TABLE ... ADD COLUMN IF NOT EXISTS` — SQLite does not support it.
- Do NOT bump `schema_versions` for this migration. That table is for numbered migration history; the column-existence check is self-idempotent.
---
## Phase 2 — Adoption engine (SQLite + Chroma consistent)
**What to implement**: A single function that, given a parent repo path, detects all merged-worktree branches and stamps `merged_into_project` on both SQLite rows AND Chroma metadata in the same logical operation. Reused by worker startup (Phase 4) and CLI (Phase 5).
### Files touched
- `src/services/infrastructure/WorktreeAdoption.ts` (new)
- `src/services/sync/ChromaSync.ts` — add `updateMergedIntoProject(sqliteIds: number[], mergedIntoProject: string): Promise<void>`
### Public API
```typescript
export interface AdoptionResult {
repoPath: string;
parentProject: string;
scannedWorktrees: number;
mergedBranches: string[]; // branches classified as merged
adoptedObservations: number; // SQLite rows stamped
adoptedSummaries: number;
chromaUpdates: number; // Chroma docs patched
chromaFailed: number;
dryRun: boolean;
errors: Array<{ worktree: string; error: string }>;
}
export async function adoptMergedWorktrees(opts: {
repoPath?: string; // defaults to process.cwd()
dataDirectory?: string; // defaults to DATA_DIR
dryRun?: boolean;
onlyBranch?: string; // manual override for squash-merge case
}): Promise<AdoptionResult>;
```
### Implementation outline
Mirror `runOneTimeCwdRemap` in `ProcessManager.ts:680830` for DB lifecycle (open, transaction, finally-close). Add Chroma sync step after SQL commit.
1. **Resolve main repo path**
- `const mainRepo = execSync('git rev-parse --git-common-dir', { cwd: opts.repoPath ?? process.cwd() })` — strip `/.git` suffix to get the working tree root.
- This pattern is used in `scripts/cwd-remap.ts:4851`. Copy that handling verbatim.
2. **Resolve parent project name**
- `const parentProject = getProjectContext(mainRepo).primary` — imported from `src/utils/project-name.ts`.
3. **Enumerate worktrees**
- `git -C <mainRepo> worktree list --porcelain` → parse `worktree <path>`, `branch refs/heads/<name>` lines.
- Filter out the main worktree entry (its path equals `mainRepo`).
4. **Classify as merged**
- If `opts.onlyBranch` provided: include only that branch (squash-merge escape hatch).
- Else: `git -C <mainRepo> branch --merged HEAD --format='%(refname:short)'` → intersect with worktree branch list.
5. **Resolve worktree project names**
- For each merged worktree path, `const worktreeProject = getProjectContext(worktreePath).primary` → yields the composite `parent/worktree` name.
6. **SQL transaction** (model on `ProcessManager.ts:745760, 808`)
- Open DB via `new Database(dbPath)` (manage own handle — must close before `dbManager.initialize()` runs).
- For each merged worktree:
- `SELECT id FROM observations WHERE project = ? AND merged_into_project IS NULL` → collect sqlite IDs to later push to Chroma.
- `UPDATE observations SET merged_into_project = ? WHERE project = ? AND merged_into_project IS NULL`.
- Same for `session_summaries`.
- Commit transaction.
- If `dryRun`, roll back instead.
7. **Chroma metadata sync** (full consistent — NOT lazy)
- For the set of sqlite IDs just stamped, call `ChromaSync.updateMergedIntoProject(sqliteIds, parentProject)`.
- `ChromaSync.updateMergedIntoProject` implementation:
```typescript
async updateMergedIntoProject(sqliteIds: number[], mergedIntoProject: string): Promise<void> {
if (sqliteIds.length === 0) return;
// Batch: look up Chroma doc IDs via metadata filter on sqlite_id, then patch.
const where = { sqlite_id: { $in: sqliteIds } };
const existing = await chromaMcp.callTool('chroma_get_documents', {
collection_name: this.collectionName,
where,
include: ['metadatas']
});
const docIds: string[] = existing.ids ?? [];
const metadatas: Record<string, unknown>[] = (existing.metadatas ?? []).map(m => ({
...m,
merged_into_project: mergedIntoProject
}));
if (docIds.length === 0) return;
await chromaMcp.callTool('chroma_update_documents', {
collection_name: this.collectionName,
ids: docIds,
metadatas
});
}
```
- On Chroma error: log via `logger.error('CHROMA_SYNC', ...)`, increment `chromaFailed`, but do NOT roll back SQL. SQL is source of truth; a subsequent run will retry the Chroma patch (idempotent — metadata set to same value is a no-op).
8. **Logging**
- `logger.info('SYSTEM', 'Worktree adoption applied', { parentProject, adoptedObservations, adoptedSummaries, chromaUpdates, chromaFailed, mergedBranches })`.
- On per-worktree error: `logger.warn('SYSTEM', 'Worktree adoption skipped branch', { worktree, error })` — collect in `errors[]`, continue.
9. **Re-adoption safety net**
- Because Chroma updates can fail independently, add a secondary SQL-side reconciliation: on each adoption run, also find `observations WHERE merged_into_project IS NOT NULL` whose Chroma metadata lacks the field. Run the same `updateMergedIntoProject` on that delta.
- Keep this bounded: only reconcile rows adopted in the last N days (e.g. 30) to avoid full-table scans.
### Verification
- Dry-run against a repo with one known-merged worktree: result shows correct `adoptedObservations`, DB unchanged, no Chroma writes.
- Real run: `SELECT COUNT(*) FROM observations WHERE merged_into_project IS NOT NULL` matches `adoptedObservations`.
- Chroma: `chroma_get_documents` with `where: { merged_into_project: 'claude-mem' }` returns the same row count.
- Re-run: `adoptedObservations = 0`, `chromaUpdates = 0` (both idempotent).
- Simulate Chroma outage (stop chroma): adoption logs `CHROMA_SYNC` error, `chromaFailed > 0`, SQL still stamps. Next run with Chroma back up reconciles the delta.
### Anti-pattern guards
- Do NOT rollback SQL on Chroma failure. SQL is authoritative; Chroma is a derived index.
- Do NOT call Chroma per-row. Batch by sqlite_id set to minimize round-trips.
- Do NOT adopt branches not in `git branch --merged HEAD` unless `onlyBranch` override is explicit.
- Do NOT touch observations whose `project` is not a composite worktree name. The worktree-name match is the safety gate.
- Do NOT skip the `merged_into_project IS NULL` clause on UPDATE — this is what makes the run idempotent.
---
## Phase 3 — Query plumbing (SQLite + Chroma $or)
**What to implement**: Extend the two multi-project read queries in `ObservationCompiler.ts` and the Chroma filter in `SearchManager.ts` to treat `merged_into_project` as a second match axis. Direct Chroma `$or` filter — no SQL-side expansion dance.
### Files touched
- `src/services/context/ObservationCompiler.ts`
- `src/services/sync/SearchManager.ts`
### 3a. SQLite WHERE-clause extension
`src/services/context/ObservationCompiler.ts:111160` (`queryObservationsMulti`): change
```sql
WHERE o.project IN (${projectPlaceholders})
```
to
```sql
WHERE (o.project IN (${projectPlaceholders})
OR o.merged_into_project IN (${projectPlaceholders}))
```
Double-bind the `projects` array:
```typescript
.all(
...projects, // for o.project IN (...)
...projects, // for o.merged_into_project IN (...)
...typeArray,
...conceptArray,
...(platformSource ? [platformSource] : []),
config.totalObservationCount
)
```
`src/services/context/ObservationCompiler.ts:168196` (summary variant): apply the same extension, using `ss.merged_into_project`.
### 3b. Chroma filter extension
`src/services/sync/SearchManager.ts:174177`:
```typescript
if (options.project) {
const projectFilter = {
$or: [
{ project: options.project },
{ merged_into_project: options.project }
]
};
whereFilter = whereFilter
? { $and: [whereFilter, projectFilter] }
: projectFilter;
}
```
When `options.project` is an array (if that path exists — grep first), build a flat `$or` over both fields × all requested projects.
### 3c. New-observation Chroma metadata
`src/services/sync/ChromaSync.ts:132140` — extend `baseMetadata`:
```typescript
const baseMetadata: Record<string, string | number | null> = {
sqlite_id: obs.id,
doc_type: 'observation',
memory_session_id: obs.memory_session_id,
project: obs.project,
merged_into_project: obs.merged_into_project ?? null, // NEW
created_at_epoch: obs.created_at_epoch,
type: obs.type || 'discovery',
title: obs.title || 'Untitled'
};
```
This makes every new observation Chroma-compatible with the Phase 3b filter from the first sync. For existing rows, Phase 2's adoption engine patches metadata retroactively.
**Check Chroma metadata type constraints**: Chroma rejects `null` in metadata — confirm via a quick test. If `null` is rejected, OMIT the field when unset (use `if (obs.merged_into_project) baseMetadata.merged_into_project = obs.merged_into_project;`).
### 3d. ContextBuilder compatibility check
`src/services/context/ContextBuilder.ts:126183` — no change needed. `projects = input?.projects ?? context.allProjects` stays as-is; the extended WHERE clause in Phase 3a does all the work.
### Verification
- Before adoption: context-inject API for `claude-mem` returns N observations.
- After adoption of `claude-mem/dar-es-salaam`: API returns N + M (M = count of dar-es-salaam's own observations).
- Semantic search via Chroma (`/search` endpoint or MCP) with `project=claude-mem` returns dar-es-salaam-origin rows too.
- Worktree-local queries (`projects=[claude-mem, claude-mem/dar-es-salaam]`) still return `[parent + own]` unchanged.
- SQL EXPLAIN on the extended WHERE shows it uses `idx_observations_project` OR `idx_observations_merged_into` (both indices hit).
### Anti-pattern guards
- Do NOT lose the `o.project` filter — it's still required (merged-row predicate is additive, not a replacement).
- Do NOT forget to double-bind `projects` in the prepared statement — placeholder count must match argument count.
- Do NOT add a subquery or JOIN for merged discovery. A flat `OR` + index is faster.
- Do NOT write `null` into Chroma metadata if Chroma rejects it. Use the "omit if unset" pattern.
---
## Phase 4 — Automatic trigger on worker startup
**What to implement**: Call `adoptMergedWorktrees()` during worker startup, immediately after `runOneTimeCwdRemap()`. **Not** marker-gated — it runs every worker startup because git state evolves and the engine is idempotent.
### Files touched
- `src/services/worker-service.ts`
### Implementation
Import alongside existing `ProcessManager` imports at lines 4153:
```typescript
import { adoptMergedWorktrees } from './infrastructure/WorktreeAdoption.js';
```
Insert immediately after the existing `runOneTimeCwdRemap()` call at lines 363365:
```typescript
runOneTimeCwdRemap();
try {
const result = await adoptMergedWorktrees({});
if (result.adoptedObservations > 0 || result.chromaUpdates > 0) {
logger.info('SYSTEM', 'Merged worktrees adopted on startup', result);
}
if (result.errors.length > 0) {
logger.warn('SYSTEM', 'Worktree adoption had per-branch errors', { errors: result.errors });
}
} catch (err) {
logger.error('SYSTEM', 'Worktree adoption failed (non-fatal)', {}, err as Error);
}
```
**DB lifecycle note**: `adoptMergedWorktrees` must manage its own DB handle (open + close) before `dbManager.initialize()` runs at line 380. Mirror `runOneTimeCwdRemap`'s finally-block pattern.
### Verification
- Restart worker. Log shows "Merged worktrees adopted on startup" only on first run after a new merge lands.
- Subsequent restarts log nothing (idempotent).
- Simulate adoption exception (e.g., rename git temporarily): log shows error, worker startup continues successfully.
- Build-and-sync restart picks up new merges without manual intervention.
### Anti-pattern guards
- Do NOT block worker startup on adoption failure. Wrap in try/catch; swallow + log.
- Do NOT run adoption after `dbManager.initialize()`. The engine manages its own DB handle; two handles at once risk lock contention.
- Do NOT await Chroma sync before returning SQL success. Internally, yes; but don't make worker startup hang on Chroma I/O — cap with a reasonable timeout inside the engine.
---
## Phase 5 — CLI escape hatch
**What to implement**: `claude-mem adopt [--branch <name>] [--dry-run]` — covers squash-merge where `git branch --merged` returns nothing, and provides a manual override for any adoption run.
### Files touched
- `src/npx-cli/commands/adopt.ts` (new)
- `src/npx-cli/index.ts` (add `case 'adopt'`)
- `scripts/adopt-worktrees.ts` (new, optional — admin script for bulk ops)
### 5a. Command module
`src/npx-cli/commands/adopt.ts` — follow shape of sibling commands (dynamic-imported by the switch):
```typescript
import pc from 'picocolors';
import { adoptMergedWorktrees } from '../../services/infrastructure/WorktreeAdoption.js';
export interface AdoptCommandOptions {
dryRun?: boolean;
onlyBranch?: string;
}
export async function runAdoptCommand(opts: AdoptCommandOptions): Promise<void> {
const result = await adoptMergedWorktrees({
dryRun: opts.dryRun,
onlyBranch: opts.onlyBranch
});
console.log(pc.bold(`\nWorktree adoption ${result.dryRun ? pc.yellow('(dry-run)') : pc.green('(applied)')}`));
console.log(` Parent project: ${result.parentProject}`);
console.log(` Worktrees scanned: ${result.scannedWorktrees}`);
console.log(` Merged branches: ${result.mergedBranches.join(', ') || '(none)'}`);
console.log(` Observations adopted: ${result.adoptedObservations}`);
console.log(` Summaries adopted: ${result.adoptedSummaries}`);
console.log(` Chroma docs updated: ${result.chromaUpdates}`);
if (result.chromaFailed > 0) {
console.log(pc.yellow(` Chroma sync failures: ${result.chromaFailed} (will retry on next run)`));
}
for (const err of result.errors) {
console.log(pc.red(` ! ${err.worktree}: ${err.error}`));
}
}
```
### 5b. CLI switch
`src/npx-cli/index.ts` — add between existing cases, following the pattern at lines 28169:
```typescript
case 'adopt': {
const dryRun = args.includes('--dry-run');
const branchIndex = args.indexOf('--branch');
const onlyBranch = branchIndex !== -1 ? args[branchIndex + 1] : undefined;
const { runAdoptCommand } = await import('./commands/adopt.js');
await runAdoptCommand({ dryRun, onlyBranch });
break;
}
```
### 5c. Admin script (optional)
`scripts/adopt-worktrees.ts` — Bun shebang script for users without the plugin installed. Model on `scripts/cwd-remap.ts:1186`. Default: dry-run. Pass `--apply` to commit.
### Verification
- `npx claude-mem adopt --dry-run` in a repo with merged worktrees prints what WOULD be adopted without writing.
- `npx claude-mem adopt` writes + prints counts.
- `npx claude-mem adopt --branch feature/foo` forces adoption of that branch even if `git branch --merged` doesn't include it (squash case).
- `bun scripts/adopt-worktrees.ts --apply` equivalent to the CLI.
- Help text / unknown command still reports the existing error (CLI pattern preserved).
### Anti-pattern guards
- Do NOT require running from the worktree. Detection always resolves up to the common-dir, regardless of cwd.
- Do NOT default to `--apply`. Dry-run first matches `scripts/cwd-remap.ts` ergonomics.
- Do NOT introduce `commander`, `yargs`, `cac`. Stay with the existing hand-rolled parser.
---
## Phase 6 — UI surfacing
**What to implement**: When the viewer shows an observation in a parent-project context that originated in a merged worktree, display a "merged from <worktree>" badge so provenance is visible. Keep the original `project` field rendered too.
### Files touched
- `src/ui/viewer/components/ObservationCard.tsx`
- Type definition for `Observation` — wherever `.project` is declared, add `merged_into_project?: string | null`.
- Observation serializer on the worker → UI path (grep for `doc_type: 'observation'` or `serializeObservation` to find it).
- CSS file for ObservationCard styles.
### Implementation
Locate the current label render at `src/ui/viewer/components/ObservationCard.tsx:58`:
```tsx
<span className="card-project">{observation.project}</span>
```
Extend to:
```tsx
<span className="card-project">{observation.project}</span>
{observation.merged_into_project && (
<span className="card-merged-badge" title={`Merged into ${observation.merged_into_project}`}>
merged → {observation.merged_into_project}
</span>
)}
```
Add CSS for `.card-merged-badge` — subtle secondary chip style (muted color, smaller font). Match existing `.card-source` / `.card-project` aesthetics.
### Verification
- After adoption, open viewer at `http://localhost:37777`, select the parent project. Merged observations show both their origin worktree name AND the "merged →" badge.
- Worktree view (if still addressable) shows no badge (badge only renders when `merged_into_project` is set; a worktree viewing its own observations would not see it, since in that view `merged_into_project` is the PARENT name, not the current project).
- Hover tooltip shows full target project name.
### Anti-pattern guards
- Do NOT hide merged observations in the parent view. The goal is visibility.
- Do NOT replace `project` display with `merged_into_project`. Both are meaningful: `project` = origin, `merged_into_project` = current home.
- Do NOT require a UI setting toggle to show the badge. Default on.
---
## Phase 7 — Verification pass
### Unit tests
- `adoptMergedWorktrees({ dryRun: true })` against a fixture repo with `[merged, unmerged, squash-merged]` worktrees → classification matches expectation.
- `ChromaSync.updateMergedIntoProject` on an empty `sqliteIds` array → no-op, no Chroma call.
- Extended `queryObservationsMulti` with a mixed set of `project` and `merged_into_project` matches → returns union, sorted by `created_at_epoch DESC`.
### Integration tests
- Start worker → create synthetic observations under `claude-mem/test-wt` → simulate branch merge (`git merge`) → restart worker → context-inject API for `claude-mem` returns test-wt observations.
- Same flow with a squash-merge → auto-adoption misses → run `claude-mem adopt --branch test-wt` → API now returns them.
- Re-run `claude-mem adopt` twice: second run reports `adoptedObservations: 0, chromaUpdates: 0`.
### Anti-pattern grep checks
Run before landing:
```bash
# No one renamed the project field
rg "UPDATE observations SET project" src/
# (Expected: zero hits other than the existing CWD remap)
# Adoption only touches via IS NULL guard
rg "merged_into_project" src/ -C2
# (Expected: all UPDATE sites include "IS NULL" predicate)
# CLI registered
rg "case 'adopt'" src/npx-cli/index.ts
# (Expected: one hit)
# Chroma metadata extension present
rg "merged_into_project" src/services/sync/ChromaSync.ts
# (Expected: hits in baseMetadata and updateMergedIntoProject)
# No gh CLI introduced
rg "\\bgh\\s+(pr|issue|api)" src/ scripts/
# (Expected: zero hits outside .github/workflows/)
```
### Documentation cross-check
- ObservationCompiler WHERE clause matches the shape used by the shipped worktree-reads-parent feature — both clauses symmetric, visible in a single read of the file.
- Chroma metadata field name `merged_into_project` matches SQLite column name exactly (no `mergedIntoProject`, `merged_project`, etc.).
- CLI `--branch` flag accepts the same format as worktree composite names.
---
## Summary
| Phase | Files touched | New LOC (approx.) |
|---|---|---|
| 1. Schema | `src/services/sqlite/migrations/runner.ts` | ~25 |
| 2. Adoption engine | `src/services/infrastructure/WorktreeAdoption.ts` (new), `src/services/sync/ChromaSync.ts` (new method) | ~200 |
| 3. Query plumbing | `src/services/context/ObservationCompiler.ts`, `src/services/sync/SearchManager.ts`, `src/services/sync/ChromaSync.ts` | ~40 |
| 4. Auto-trigger | `src/services/worker-service.ts` | ~15 |
| 5. CLI | `src/npx-cli/commands/adopt.ts` (new), `src/npx-cli/index.ts`, `scripts/adopt-worktrees.ts` (new) | ~100 |
| 6. UI | `src/ui/viewer/components/ObservationCard.tsx`, Observation type, serializer, CSS | ~20 |
| 7. Tests + verification | scattered | — |
| **Total** | | **~400 LOC** |
**Reversibility**: `UPDATE observations SET merged_into_project = NULL` + a Chroma `update_documents` call with the field omitted restores pre-adoption state completely. Nothing is destroyed.
**Architecture fit**: Mirrors the just-shipped CWD remap migration (`runOneTimeCwdRemap`) for structure, lifecycle, and logging conventions. Chroma metadata sync matches the existing per-observation attach pattern.
**Blast radius**: Zero risk to existing data (no writes to `project` field). Chroma additions are metadata-only (embeddings untouched). Query extensions are additive OR clauses — existing queries still return what they did.

View File

@@ -1,83 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Nov 6, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4241 | 11:19 PM | 🟣 | Object-Oriented Architecture Design Document Created | ~662 |
| #4240 | 11:11 PM | 🟣 | Worker Service Rewrite Blueprint Created | ~541 |
| #4239 | 11:07 PM | 🟣 | Comprehensive Worker Service Performance Analysis Document Created | ~541 |
| #4238 | 10:59 PM | 🔵 | Overhead Analysis Document Checked | ~203 |
### Nov 7, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4609 | 6:33 PM | ✅ | PR #69 Successfully Merged to Main Branch | ~516 |
| #4600 | 6:31 PM | 🟣 | Added Worker Service Documentation Suite | ~441 |
| #4597 | " | 🔄 | Worker Service Refactored to Object-Oriented Architecture | ~473 |
### Nov 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #5539 | 10:20 PM | 🔵 | Harsh critical audit of context-hook reveals systematic anti-patterns | ~3154 |
| #5497 | 9:29 PM | 🔵 | Harsh critical audit of context-hook reveals systematic anti-patterns | ~2815 |
| #5495 | 9:28 PM | 🔵 | Context Hook Audit Reveals Project Anti-Patterns | ~660 |
| #5476 | 9:17 PM | 🔵 | Critical Code Audit Identified 14 Anti-Patterns in Context Hook | ~887 |
| #5391 | 8:45 PM | 🔵 | Critical Code Quality Audit of Context Hook Implementation | ~720 |
| #5150 | 7:37 PM | 🟣 | Troubleshooting Skill Added to Claude-Mem Plugin | ~427 |
### Nov 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6161 | 11:55 PM | 🔵 | YC W26 Application Research and Preparation Completed for Claude-Mem | ~1628 |
| #6155 | 11:47 PM | ✅ | Comprehensive Y Combinator Winter 2026 Application Notes Created | ~1045 |
| #5979 | 7:58 PM | 🔵 | Smart Contextualization Feature Architecture | ~560 |
| #5971 | 7:49 PM | 🔵 | Hooks Reference Documentation Structure | ~448 |
| #5929 | 7:08 PM | ✅ | Documentation Updates for v5.4.0 Skill-Based Search Migration | ~604 |
| #5927 | " | ✅ | Updated Configuration Documentation for Skill-Based Search | ~497 |
| #5920 | 7:05 PM | ✅ | Renamed Architecture Documentation File Reference | ~271 |
### Nov 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #11515 | 8:22 PM | 🔵 | Smart Contextualization Architecture Retrieved with Command Hook Pattern Details | ~502 |
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22294 | 9:43 PM | 🔵 | Documentation Site Structure Located | ~359 |
### Dec 12, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24430 | 8:27 PM | ✅ | Removed Final Platform Check Reference from Linux Section | ~320 |
| #24429 | " | ✅ | Final Platform Check Reference Removal from Linux Section | ~274 |
| #24428 | " | ✅ | Corrected Second Line Number Reference for Migration Marker Logic | ~267 |
| #24427 | 8:26 PM | ✅ | Updated Line Number Reference for PM2 Cleanup Implementation | ~260 |
| #24426 | " | ✅ | Removed Platform Check from Manual Marker Deletion Scenario | ~338 |
| #24425 | " | ✅ | Removed Platform Check from Fresh Install Scenario Flow | ~314 |
| #24424 | 8:25 PM | ✅ | Renumbered Manual Marker Deletion Scenario | ~285 |
| #24423 | " | ✅ | Renumbered Fresh Install Scenario | ~243 |
| #24422 | " | ✅ | Removed Obsolete Windows Platform Detection Scenario | ~311 |
| #24421 | " | ✅ | Removed Platform Check from macOS Migration Documentation | ~294 |
| #24420 | 8:24 PM | ✅ | Platform Check Removed from Migration Documentation | ~288 |
| #24417 | 8:16 PM | ✅ | Code Reference Example Updated to Reflect Actual Cross-Platform Implementation | ~366 |
| #24416 | " | ✅ | Architecture Decision Documentation Updated to Reflect Cross-Platform PM2 Cleanup Rationale | ~442 |
| #24415 | 8:15 PM | ✅ | Migration Marker Lifecycle Documentation Updated for Unified Cross-Platform Behavior | ~463 |
| #24414 | " | ✅ | Platform Comparison Table Updated to Reflect Unified Cross-Platform Migration | ~351 |
| #24413 | " | ✅ | Windows Platform-Specific Documentation Completely Rewritten for Unified Migration | ~428 |
| #24412 | " | ✅ | User Experience Timeline Updated for Cross-Platform PM2 Cleanup | ~291 |
| #24411 | 8:14 PM | ✅ | Migration Marker Lifecycle Documentation Updated for All Platforms | ~277 |
| #24410 | " | ✅ | Marker File Platform Behavior Documentation Updated for Unified Migration | ~282 |
| #24409 | " | ✅ | Migration Steps Documentation Updated for Cross-Platform PM2 Cleanup | ~278 |
| #24408 | 8:13 PM | ✅ | PM2 Migration Documentation Updated to Remove Windows Platform Check | ~280 |
</claude-mem-context>

View File

@@ -1,88 +0,0 @@
# Claude-Mem Public Documentation
## What This Folder Is
This `docs/public/` folder contains the **Mintlify documentation site** - the official user-facing documentation for claude-mem. It's a structured documentation platform with a specific file format and organization.
## Folder Structure
```
docs/
├── public/ ← You are here (Mintlify MDX files)
│ ├── *.mdx - User-facing documentation pages
│ ├── docs.json - Mintlify configuration and navigation
│ ├── architecture/ - Technical architecture docs
│ ├── usage/ - User guides and workflows
│ └── *.webp, *.gif - Assets (logos, screenshots)
└── context/ ← Internal documentation (DO NOT put here)
└── *.md - Planning docs, audits, references
```
## File Requirements
### Mintlify Documentation Files (.mdx)
All official documentation files must be:
- Written in `.mdx` format (Markdown with JSX support)
- Listed in `docs.json` navigation structure
- Follow Mintlify's schema and conventions
The documentation is organized into these sections:
- **Get Started**: Introduction, installation, usage guides
- **Best Practices**: Context engineering, progressive disclosure
- **Configuration & Development**: Settings, dev workflow, troubleshooting
- **Architecture**: System design, components, technical details
### Configuration File
`docs.json` defines:
- Site metadata (name, description, theme)
- Navigation structure
- Branding (logos, colors)
- Footer links and social media
## What Does NOT Belong Here
**Planning documents, design docs, and reference materials go in `/docs/context/` instead:**
Files that belong in `/docs/context/` (NOT here):
- Planning documents (`*-plan.md`, `*-outline.md`)
- Implementation analysis (`*-audit.md`, `*-code-reference.md`)
- Error tracking (`typescript-errors.md`)
- Internal design documents
- PR review responses
- Reference materials (like `agent-sdk-ref.md`)
- Work-in-progress documentation
## How to Add Official Documentation
1. Create a new `.mdx` file in the appropriate subdirectory
2. Add the file path to `docs.json` navigation
3. Use Mintlify's frontmatter and components
4. Follow the existing documentation style
5. Test locally: `npx mintlify dev`
## Development Workflow
**For contributors working on claude-mem:**
- Read `/CLAUDE.md` in the project root for development instructions
- Place planning/design docs in `/docs/context/`
- Only add user-facing documentation to `/docs/public/`
- Test documentation locally with Mintlify CLI before committing
## Testing Documentation
```bash
# Validate docs structure
npx mintlify validate
# Check for broken links
npx mintlify broken-links
# Run local dev server
npx mintlify dev
```
## Summary
**Simple Rule**:
- `/docs/public/` = Official user documentation (Mintlify .mdx files) ← YOU ARE HERE
- `/docs/context/` = Internal docs, plans, references, audits

View File

@@ -1,21 +0,0 @@
<claude-mem-context>
# Recent Activity
### Nov 6, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4091 | 1:12 PM | 🔵 | Claude Plugin Configuration Structure | ~170 |
### Nov 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #5739 | 4:43 PM | 🔵 | Plugin Metadata Configuration | ~199 |
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22284 | 9:41 PM | 🔵 | Claude Plugin Metadata Configuration | ~183 |
</claude-mem-context>

View File

@@ -1,9 +0,0 @@
<claude-mem-context>
# Recent Activity
### Jan 10, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #39050 | 3:44 PM | 🔵 | Plugin commands directory is empty | ~255 |
</claude-mem-context>

View File

@@ -1,35 +0,0 @@
<claude-mem-context>
# Recent Activity
### Oct 25, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #2518 | 11:47 PM | 🔴 | Removed Invalid 'matcher' Field from SessionStart Hook | ~228 |
| #2517 | " | 🔵 | Project hooks.json Template Also Empty | ~222 |
| #2501 | 11:11 PM | 🔵 | Context Hook Fails Due to Missing @anthropic-ai/sdk Dependency | ~245 |
### Oct 27, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #2718 | 12:00 AM | 🔴 | Removed incorrect failOnError configuration from hook | ~165 |
### Nov 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #11518 | 8:22 PM | 🔵 | Smart Contextualization Switched from Skill to HTTP API | ~498 |
### Dec 24, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32309 | 3:09 PM | 🔵 | Claude-mem hooks system configuration structure | ~435 |
### Jan 9, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #38802 | 5:11 PM | 🔵 | Claude-Mem Hook Configuration Architecture | ~450 |
</claude-mem-context>

View File

@@ -1,125 +0,0 @@
Never read built source files in this directory. These are compiled outputs — read the source files in `src/` instead.
<claude-mem-context>
# Recent Activity
### Dec 4, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20052 | 3:23 PM | ✅ | Built and deployed version 6.5.2 to marketplace | ~321 |
### Dec 7, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #21251 | 6:06 PM | 🔵 | Context Hook Plugin Architecture and Worker Communication | ~405 |
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22092 | 6:40 PM | 🔵 | Queue Depth Check Not Found in Minified Code | ~217 |
| #22091 | " | 🔵 | Save Hook Script Structure Revealed | ~472 |
| #22085 | 6:34 PM | 🔵 | Examined pre-tool-use-hook.js implementation showing timing-only logic | ~330 |
### Dec 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22557 | 1:08 AM | ✅ | Build completed for version 7.0.3 | ~342 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23444 | 2:25 PM | 🟣 | Build Pipeline Execution Successful | ~293 |
### Dec 11, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24057 | 2:56 PM | ✅ | Hook Scripts Shebang Verification | ~294 |
| #24056 | 2:55 PM | ✅ | Worker CLI Shebang Verification | ~258 |
| #24055 | " | ✅ | Build Successful with Bun Runtime Shebangs | ~355 |
### Dec 12, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24636 | 10:46 PM | 🔵 | Duplicate Smart Install Scripts in Project Structure | ~288 |
| #24635 | " | 🔵 | Claude-Mem Smart Install Script Architecture | ~371 |
| #24359 | 7:00 PM | 🟣 | Phase 1 Critical Code Fixes Completed via Agent Task | ~441 |
| #24358 | 6:59 PM | ✅ | Completed Phase 1 Code Fixes for better-sqlite3 Migration | ~385 |
| #24357 | " | ✅ | Removed createRequire Import from smart-install.js | ~284 |
| #24356 | " | ✅ | Removed Native Module Verification from main() Function | ~384 |
| #24355 | " | ✅ | Removed better-sqlite3 Error Detection from runNpmInstall() | ~324 |
| #24354 | 6:58 PM | ✅ | Removed getWindowsErrorHelp() Function from smart-install.js | ~356 |
| #24353 | " | ✅ | Removed verifyNativeModules() Function from smart-install.js | ~340 |
| #24352 | " | ✅ | Removed better-sqlite3 Existence Check from needsInstall() | ~266 |
| #24351 | " | ✅ | Removed BETTER_SQLITE3_PATH Constant from smart-install.js | ~226 |
| #24344 | 6:56 PM | 🔵 | smart-install.js Contains Obsolete better-sqlite3 Dependencies | ~380 |
### Dec 13, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #25286 | 8:41 PM | 🔵 | New Hook Fails with Node.js Path Error | ~298 |
| #25285 | " | 🔵 | Context Hook Runs Successfully with Node.js | ~306 |
| #25283 | " | 🔵 | Bun Wrapper Analysis: Fallback Detection System | ~416 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26800 | 11:39 PM | ✅ | Version 7.2.3 Build Complete With Worker Restart Fix | ~394 |
| #26791 | 11:38 PM | ✅ | Phase 3 Complete: Project Built Successfully With Worker Restart Fix | ~446 |
| #26720 | 11:23 PM | 🔵 | Smart Install Handles Dependencies But No Worker Coordination | ~468 |
| #26719 | " | 🔵 | Worker CLI Provides Start/Stop/Restart Commands With Health Check Validation | ~490 |
| #26718 | " | 🔵 | Worker CLI Restart Implementation Details | ~452 |
| #26717 | 11:22 PM | 🔵 | Context Hook Worker Startup Logic Handles Initial Start But Not Post-Update Restart | ~485 |
| #26716 | " | 🔵 | Context Hook Worker Startup Logic Revealed | ~538 |
| #26715 | " | 🔵 | Smart Install Script Handles Dependency Installation Without Worker Restart | ~430 |
| #26052 | 7:13 PM | 🔵 | Examined Minified Context Hook Source Code | ~285 |
| #25686 | 4:22 PM | 🔵 | SessionRoutes tracks missing last_user_message errors at two different locations | ~456 |
| #25685 | " | 🔵 | Progress summary generation system uses Claude to create XML-formatted session checkpoints | ~461 |
### Dec 16, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27554 | 4:48 PM | ✅ | Project built successfully with version 7.3.1 | ~306 |
### Dec 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28924 | 7:29 PM | 🔵 | Plugin MCP Server Uses Bun Runtime | ~283 |
### Dec 26, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32983 | 11:04 PM | 🟣 | Complete build and deployment pipeline executed | ~260 |
### Jan 4, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36873 | 1:55 AM | 🔵 | Smart-Install Script Analyzed for Homebrew Path Implementation | ~466 |
### Jan 7, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #38169 | 7:21 PM | 🔵 | SessionStart Hook Output Pattern Investigation Complete | ~464 |
| #38168 | " | 🔵 | Smart-Install Script Outputs All Status Messages to stderr via console.error | ~438 |
| #38167 | 7:20 PM | 🔵 | Context-Hook Uses stdin Event Handlers for Non-TTY JSON Output Mode | ~396 |
| #38166 | " | 🔵 | User-Message-Hook Executes at Top Level with Await and Exit Code 1 | ~423 |
| #38165 | " | 🔵 | Context-Hook Has Minimal Console Output in Compiled Code | ~333 |
| #38164 | " | 🔵 | Worker-Service Script is Large 1575-Line Multi-Purpose Service Manager | ~352 |
| #38163 | 7:19 PM | 🔵 | Worker-Service Script Uses console.log and console.error for Output | ~385 |
| #38162 | " | 🔵 | Smart-Install Script Auto-Installs Bun and UV Dependencies | ~495 |
| #38161 | " | 🔵 | User-Message-Hook Outputs to stderr and Exits with Code 1 | ~211 |
| #38160 | 7:18 PM | 🔵 | Context-Hook Returns JSON with hookSpecificOutput Structure | ~470 |
</claude-mem-context>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,60 +0,0 @@
<claude-mem-context>
# Recent Activity
### Nov 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #3910 | 8:28 PM | ✅ | Refined stats counter visual design | ~343 |
| #3909 | " | 🟣 | Added clarifying descriptions to settings UI | ~335 |
| #3812 | 6:08 PM | 🟣 | Enhanced card typography and centered content layout | ~358 |
### Nov 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #5133 | 7:29 PM | ✅ | Version 5.2.3 Released with Build Process | ~487 |
| #4916 | 1:49 PM | ⚖️ | Claude Mem Pro Premium Offering Implementation Plan Finalized | ~946 |
| #4902 | 1:35 PM | 🟣 | Claude Mem Pro Premium Project Initialization | ~679 |
| #4901 | 1:31 PM | ⚖️ | Premium claude-mem Project Architecture and Planning | ~797 |
### Dec 1, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #18480 | 3:39 PM | ✅ | Successfully Rebuilt Plugin After Merge Conflict Resolution | ~294 |
### Dec 4, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20052 | 3:23 PM | ✅ | Built and deployed version 6.5.2 to marketplace | ~321 |
### Dec 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22557 | 1:08 AM | ✅ | Build completed for version 7.0.3 | ~342 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23444 | 2:25 PM | 🟣 | Build Pipeline Execution Successful | ~293 |
### Dec 16, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27554 | 4:48 PM | ✅ | Project built successfully with version 7.3.1 | ~306 |
### Dec 26, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32983 | 11:04 PM | 🟣 | Complete build and deployment pipeline executed | ~260 |
| #32965 | 10:53 PM | 🔵 | Found plugin/ui/viewer.html - potential styling source | ~201 |
| #32966 | " | 🔵 | viewer.html contains modal CSS including modal-header and modal-body | ~218 |
| #32967 | " | 🔵 | ContextSettingsModal.tsx uses CSS classes defined in viewer.html | ~218 |
| #32968 | " | 🔵 | Need to add CSS for footer to viewer.html | ~223 |
</claude-mem-context>

File diff suppressed because one or more lines are too long

View File

@@ -1130,6 +1130,19 @@
color: var(--color-text-muted);
}
/* Merged-into-parent provenance badge */
.card-merged-badge {
padding: 1px 6px;
border-radius: 3px;
font-size: 9px;
font-weight: 500;
letter-spacing: 0.02em;
color: var(--color-text-muted);
background: var(--color-type-badge-bg);
border: 1px solid var(--color-border-primary);
opacity: 0.85;
}
.summary-card {
border-color: var(--color-border-summary);
background: var(--color-bg-summary);

View File

@@ -1,22 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 19, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30153 | 8:24 PM | 🔵 | Context Builder Creates Formatted Email Investigation Context | ~384 |
| #30152 | " | 🔵 | Ragtime Current Implementation: Manual Context Injection Via buildContextForEmail | ~357 |
### Dec 20, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30437 | 4:23 PM | 🔵 | Ragtime processes emails through Claude Agent SDK with claude-mem plugin | ~397 |
| #30436 | 4:22 PM | 🔵 | Ragtime displays worker URL on localhost:37777 | ~219 |
| #30340 | 3:42 PM | 🔄 | Relocated simple ragtime.ts to ragtime folder | ~219 |
| #30339 | 3:41 PM | ✅ | Deleted overengineered ragtime.ts script | ~201 |
| #30336 | 3:40 PM | 🔵 | Ragtime Email Corpus Processor Architecture | ~495 |
| #30335 | " | 🔵 | Ragtime Uses Separate Noncommercial License | ~259 |
| #30252 | 3:17 PM | 🟣 | Multi-Format Email Corpus Loader | ~436 |
</claude-mem-context>

View File

@@ -1 +0,0 @@
Never read built source files in this directory. These are compiled outputs — read the source files in `src/` instead.

View File

@@ -1,137 +0,0 @@
# Error Handling Anti-Pattern Rules
This folder contains `detect-error-handling-antipatterns.ts` - run it before committing any error handling changes.
## The Try-Catch Problem That Cost 10 Hours
A single overly-broad try-catch block wasted 10 hours of debugging time by silently swallowing errors.
**This pattern is BANNED.**
## BEFORE You Write Any Try-Catch
**RUN THIS TEST FIRST:**
```bash
bun run scripts/anti-pattern-test/detect-error-handling-antipatterns.ts
```
**You MUST answer these 5 questions to the user BEFORE writing try-catch:**
1. **What SPECIFIC error am I catching?** (Name the error type: `FileNotFoundError`, `NetworkTimeout`, `ValidationError`)
2. **Show documentation proving this error can occur** (Link to docs or show me the source code)
3. **Why can't this error be prevented?** (If it can be prevented, prevent it instead)
4. **What will the catch block DO?** (Must include logging + either rethrow OR explicit fallback)
5. **Why shouldn't this error propagate?** (Justify swallowing it rather than letting caller handle)
**If you cannot answer ALL 5 questions with specifics, DO NOT write the try-catch.**
## FORBIDDEN PATTERNS (Zero Tolerance)
### CRITICAL - Never Allowed
```typescript
// FORBIDDEN: Empty catch
try {
doSomething();
} catch {}
// FORBIDDEN: Catch without logging
try {
doSomething();
} catch (error) {
return null; // Silent failure!
}
// FORBIDDEN: Large try blocks (>10 lines)
try {
// 50 lines of code
// Multiple operations
// Different failure modes
} catch (error) {
logger.error('Something failed'); // Which thing?!
}
// FORBIDDEN: Promise empty catch
promise.catch(() => {}); // Error disappears into void
// FORBIDDEN: Try-catch to fix TypeScript errors
try {
// @ts-ignore
const value = response.propertyThatDoesntExist;
} catch {}
```
### ALLOWED Patterns
```typescript
// GOOD: Specific, logged, explicit handling
try {
await fetch(url);
} catch (error) {
if (error instanceof NetworkError) {
logger.warn('SYNC', 'Network request failed, will retry', { url }, error);
return null; // Explicit: null means "fetch failed"
}
throw error; // Unexpected errors propagate
}
// GOOD: Minimal scope, clear recovery
try {
JSON.parse(data);
} catch (error) {
logger.error('CONFIG', 'Corrupt settings file, using defaults', {}, error);
return DEFAULT_SETTINGS;
}
// GOOD: Fire-and-forget with logging
backgroundTask()
.catch(error => logger.warn('BACKGROUND', 'Task failed', {}, error));
// GOOD: Ignored anti-pattern for genuine hot paths only
try {
checkIfProcessAlive(pid);
} catch (error) {
// [ANTI-PATTERN IGNORED]: Tight loop checking 100s of PIDs during cleanup
return false;
}
```
## Ignoring Anti-Patterns (Rare)
**Only for genuine hot paths** where logging would cause performance problems:
```typescript
// [ANTI-PATTERN IGNORED]: Reason why logging is impossible
```
**Rules:**
- **Hot paths only** - code in tight loops called 1000s of times
- If you can add logging, ADD LOGGING - don't ignore
- Valid examples:
- "Tight loop checking process exit status during cleanup"
- "Health check polling every 100ms"
- Invalid examples:
- "Expected JSON parse failures" - Just add logger.debug
- "Common fallback path" - Just add logger.debug
## The Meta-Rule
**UNCERTAINTY TRIGGERS RESEARCH, NOT TRY-CATCH**
When you're unsure if a property exists or a method signature is correct:
1. **READ** the source code or documentation
2. **VERIFY** with the Read tool
3. **USE** TypeScript types to catch errors at compile time
4. **WRITE** code you KNOW is correct
Never use try-catch to paper over uncertainty. That wastes hours of debugging time later.
## Critical Path Protection
These files are **NEVER** allowed to have catch-and-continue:
- `SDKAgent.ts` - Errors must propagate, not hide
- `GeminiAgent.ts` - Must fail loud, not silent
- `OpenRouterAgent.ts` - Must fail loud, not silent
- `SessionStore.ts` - Database errors must propagate
- `worker-service.ts` - Core service errors must be visible
On critical paths, prefer **NO TRY-CATCH** and let errors propagate naturally.

185
scripts/cwd-remap.ts Normal file
View File

@@ -0,0 +1,185 @@
#!/usr/bin/env bun
/**
* cwd-remap — Rewrite sdk_sessions.project (+ observations.project,
* session_summaries.project) using the cwd captured per-message in
* pending_messages.cwd as the single source of truth.
*
* For each distinct cwd:
* - git -C <cwd> rev-parse --git-dir AND --git-common-dir
* If they differ → worktree. parent = basename(dirname(common-dir)),
* project = parent/<basename(cwd)>.
* Else → project = basename(cwd).
* - If the directory doesn't exist, or git errors, skip that cwd.
*
* Usage:
* bun scripts/cwd-remap.ts # dry-run (default)
* bun scripts/cwd-remap.ts --apply # write updates in a single transaction
*/
import { Database } from 'bun:sqlite';
import { homedir } from 'os';
import { join, basename, dirname } from 'path';
import { existsSync, copyFileSync } from 'fs';
import { spawnSync } from 'child_process';
const DB_PATH = join(homedir(), '.claude-mem', 'claude-mem.db');
const APPLY = process.argv.includes('--apply');
type Classification =
| { kind: 'main'; project: string }
| { kind: 'worktree'; project: string; parent: string }
| { kind: 'skip'; reason: string };
function git(cwd: string, args: string[]): string | null {
const r = spawnSync('git', ['-C', cwd, ...args], { encoding: 'utf8' });
if (r.status !== 0) {
const stderr = (r.stderr ?? '').trim();
if (stderr && !/not a git repository/i.test(stderr)) {
console.error(`git ${args.join(' ')} failed in ${cwd}: ${stderr}`);
}
return null;
}
return r.stdout.trim();
}
function classify(cwd: string): Classification {
if (!existsSync(cwd)) return { kind: 'skip', reason: 'cwd-missing' };
const gitDir = git(cwd, ['rev-parse', '--absolute-git-dir']);
if (!gitDir) return { kind: 'skip', reason: 'not-a-git-repo' };
const commonDir = git(cwd, ['rev-parse', '--path-format=absolute', '--git-common-dir']);
if (!commonDir) return { kind: 'skip', reason: 'no-common-dir' };
// Use the worktree root, not the cwd — a session may be in a subdir.
const toplevel = git(cwd, ['rev-parse', '--show-toplevel']);
if (!toplevel) return { kind: 'skip', reason: 'no-toplevel' };
const leaf = basename(toplevel);
if (gitDir === commonDir) {
return { kind: 'main', project: leaf };
}
// worktree: common-dir = <parent-repo>/.git (normal) or <parent>.git (bare).
// Normal: dirname strips the trailing /.git. Bare: strip the .git suffix.
const parentRepoDir = commonDir.endsWith('/.git')
? dirname(commonDir)
: commonDir.replace(/\.git$/, '');
const parent = basename(parentRepoDir);
return { kind: 'worktree', project: `${parent}/${leaf}`, parent };
}
function main() {
if (!existsSync(DB_PATH)) {
console.error(`DB not found at ${DB_PATH}`);
process.exit(1);
}
if (APPLY) {
const backup = `${DB_PATH}.bak-cwd-remap-${Date.now()}`;
copyFileSync(DB_PATH, backup);
console.log(`Backup created: ${backup}`);
}
const db = new Database(DB_PATH);
const cwdRows = db.prepare(`
SELECT cwd, COUNT(*) AS messages
FROM pending_messages
WHERE cwd IS NOT NULL AND cwd != ''
GROUP BY cwd
`).all() as Array<{ cwd: string; messages: number }>;
console.log(`Classifying ${cwdRows.length} distinct cwds via git...`);
const byCwd = new Map<string, Classification>();
const counts = { main: 0, worktree: 0, skip: 0 };
for (const { cwd } of cwdRows) {
const c = classify(cwd);
byCwd.set(cwd, c);
counts[c.kind]++;
}
console.log(` main=${counts.main} worktree=${counts.worktree} skip=${counts.skip}`);
// Skipped cwds (so user sees what's missing)
const skipped = [...byCwd.entries()].filter(([, c]) => c.kind === 'skip') as Array<[string, Extract<Classification, { kind: 'skip' }>]>;
if (skipped.length) {
console.log('\nSkipped cwds:');
for (const [cwd, c] of skipped) console.log(` [${c.reason}] ${cwd}`);
}
// Per-session target: use the EARLIEST pending_messages.cwd for each session.
// (Dominant-cwd is wrong: claude-mem's own hooks run from nested dirs like
// `.context/claude-mem/` and dominate the count, misattributing the session.)
const sessionRows = db.prepare(`
SELECT s.id AS session_id, s.memory_session_id, s.content_session_id, s.project AS old_project, p.cwd
FROM sdk_sessions s
JOIN pending_messages p ON p.content_session_id = s.content_session_id
WHERE p.cwd IS NOT NULL AND p.cwd != ''
AND p.id = (
SELECT MIN(p2.id) FROM pending_messages p2
WHERE p2.content_session_id = s.content_session_id
AND p2.cwd IS NOT NULL AND p2.cwd != ''
)
`).all() as Array<{ session_id: number; memory_session_id: string | null; content_session_id: string; old_project: string; cwd: string }>;
type Target = { sessionId: number; memorySessionId: string | null; contentSessionId: string; oldProject: string; newProject: string; cwd: string };
const perSession = new Map<number, Target>();
for (const r of sessionRows) {
const c = byCwd.get(r.cwd);
if (!c || c.kind === 'skip') continue;
perSession.set(r.session_id, {
sessionId: r.session_id,
memorySessionId: r.memory_session_id,
contentSessionId: r.content_session_id,
oldProject: r.old_project,
newProject: c.project,
cwd: r.cwd,
});
}
const targets = [...perSession.values()].filter(t => t.oldProject !== t.newProject);
console.log(`\nSessions linked to a classified cwd: ${perSession.size}`);
console.log(`Sessions whose project would change: ${targets.length}`);
const summary = new Map<string, number>();
for (const t of targets) {
const key = `${t.oldProject}${t.newProject}`;
summary.set(key, (summary.get(key) ?? 0) + 1);
}
const rows = [...summary.entries()]
.map(([mapping, n]) => ({ mapping, sessions: n }))
.sort((a, b) => b.sessions - a.sessions);
console.log('\nTop mappings:');
console.table(rows.slice(0, 30));
if (rows.length > 30) console.log(` …and ${rows.length - 30} more mappings`);
if (!APPLY) {
console.log('\nDry-run only. Re-run with --apply to perform UPDATEs.');
db.close();
return;
}
const updSession = db.prepare('UPDATE sdk_sessions SET project = ? WHERE id = ?');
const updObs = db.prepare('UPDATE observations SET project = ? WHERE memory_session_id = ?');
const updSum = db.prepare('UPDATE session_summaries SET project = ? WHERE memory_session_id = ?');
let sessionN = 0, obsN = 0, sumN = 0;
const tx = db.transaction(() => {
for (const t of targets) {
sessionN += updSession.run(t.newProject, t.sessionId).changes;
if (t.memorySessionId) {
obsN += updObs.run(t.newProject, t.memorySessionId).changes;
sumN += updSum.run(t.newProject, t.memorySessionId).changes;
}
}
});
tx();
console.log(`\nApplied. sessions=${sessionN} observations=${obsN} session_summaries=${sumN}`);
db.close();
}
main();

View File

@@ -1,3 +0,0 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -1,34 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23825 | 11:12 PM | ✅ | Worker Port Set to 38888 for Migration Phase | ~283 |
| #23824 | " | 🔵 | Worker Port Sourced from getWorkerPort() Utility | ~247 |
| #23816 | 10:52 PM | 🟣 | Worker CLI Command Interface Created | ~325 |
### Dec 11, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24060 | 2:58 PM | 🔴 | Worker CLI Start Command Exit Behavior Fixed | ~232 |
### Dec 12, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24359 | 7:00 PM | 🟣 | Phase 1 Critical Code Fixes Completed via Agent Task | ~441 |
| #24358 | 6:59 PM | ✅ | Completed Phase 1 Code Fixes for better-sqlite3 Migration | ~385 |
| #24348 | 6:57 PM | 🔴 | Added Defensive Break Statement to worker-cli.ts Restart Case | ~269 |
| #24345 | " | 🔵 | worker-cli.ts Missing Break Statement in Switch Case | ~318 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26766 | 11:30 PM | ⚖️ | Root Cause Identified: Missing Post-Install Worker Restart Trigger in Plugin Update Flow | ~604 |
| #26722 | 11:23 PM | 🔵 | Worker CLI TypeScript Source Shows Simple ProcessManager Delegation | ~394 |
| #26721 | " | 🔵 | Worker CLI Source Code Shows Simple Restart Logic Without Delays | ~425 |
</claude-mem-context>

View File

@@ -1,3 +0,0 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -1,3 +0,0 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -101,6 +101,44 @@ export function runStatusCommand(): void {
spawnBunWorkerCommand('status');
}
/**
* Stamp merged-worktree provenance on observations/summaries and keep Chroma
* metadata in lockstep. Delegates to the worker-service.cjs `adopt` subcommand
* so adoption runs in Bun (needed for bun:sqlite) while preserving the user's
* working directory — that's what the engine uses to locate the parent repo.
*/
export function runAdoptCommand(extraArgs: string[] = []): void {
ensureInstalledOrExit();
const bunPath = resolveBunOrExit();
const workerScript = workerServiceScriptPath();
if (!existsSync(workerScript)) {
console.error(pc.red(`Worker script not found at: ${workerScript}`));
console.error('The installation may be corrupted. Try: npx claude-mem install');
process.exit(1);
}
// Pass user's cwd explicitly via --cwd because we override cwd on spawn to
// marketplaceDirectory() (required for the worker's own file resolution).
const userCwd = process.cwd();
const args = [workerScript, 'adopt', '--cwd', userCwd, ...extraArgs];
const child = spawn(bunPath, args, {
stdio: 'inherit',
cwd: marketplaceDirectory(),
env: process.env,
});
child.on('error', (error) => {
console.error(pc.red(`Failed to start Bun: ${error.message}`));
process.exit(1);
});
child.on('close', (exitCode) => {
process.exit(exitCode ?? 0);
});
}
/**
* Search the worker API at `GET /api/search?query=<query>`.
*/

View File

@@ -52,6 +52,7 @@ ${pc.bold('Runtime Commands')} (requires Bun, delegates to installed plugin):
${pc.cyan('npx claude-mem restart')} Restart worker service
${pc.cyan('npx claude-mem status')} Show worker status
${pc.cyan('npx claude-mem search <query>')} Search observations
${pc.cyan('npx claude-mem adopt [--dry-run] [--branch <name>]')} Stamp merged worktrees into parent project
${pc.cyan('npx claude-mem transcript watch')} Start transcript watcher
${pc.bold('IDE Identifiers')}:
@@ -145,6 +146,13 @@ async function main(): Promise<void> {
break;
}
// -- Adopt merged worktrees -------------------------------------------
case 'adopt': {
const { runAdoptCommand } = await import('./commands/runtime.js');
runAdoptCommand(args.slice(1));
break;
}
// -- Transcript --------------------------------------------------------
case 'transcript': {
const subCommand = args[1]?.toLowerCase();

View File

@@ -1,61 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23832 | 11:15 PM | 🔵 | Current worker-service.ts Lacks Admin Endpoints | ~393 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26740 | 11:26 PM | 🔵 | Worker Service Refactored to Orchestrator with Background Initialization | ~421 |
| #26739 | 11:25 PM | 🔵 | Worker Service Architecture Uses Domain Services and Background Initialization | ~438 |
| #26255 | 8:31 PM | 🔵 | Context Generator Timeline Rendering Logic Details File Grouping Implementation | ~397 |
| #26251 | 8:30 PM | 🔵 | Worker Service Orchestrates Domain Services and Route Handlers | ~292 |
| #26246 | 8:29 PM | 🔵 | Context Generator Implements Rich Date-Grouped Timeline Format | ~468 |
### Dec 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28548 | 4:49 PM | 🔵 | Worker service cleanup method uses Unix-specific process management | ~323 |
| #28446 | 4:23 PM | 🔵 | Worker Service Refactored to Orchestrator Pattern | ~529 |
### Dec 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29340 | 3:11 PM | ✅ | Constructor Initialization Comment Updated | ~267 |
| #29339 | " | ✅ | Class Member Comment Updated in WorkerService | ~267 |
| #29338 | " | ✅ | Service Import Comment Updated | ~222 |
| #29337 | 3:10 PM | ✅ | Terminology Update in Worker Service Documentation | ~268 |
| #29239 | 12:11 AM | 🔵 | Worker Service Refactored as Domain-Driven Orchestrator | ~477 |
### Dec 20, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30808 | 6:05 PM | 🔴 | Fixed worker readiness check to fail on initialization errors | ~315 |
| #30800 | 6:03 PM | 🔵 | Dual Error Logging in Background Initialization | ~367 |
| #30799 | " | 🔵 | Background Initialization Invocation Pattern | ~365 |
| #30797 | " | 🔵 | Background Initialization Sequence and Error Handler Confirmed | ~450 |
| #30795 | 6:02 PM | 🔵 | Readiness Endpoint Returns 503 During Initialization | ~397 |
| #30793 | " | 🔵 | Dual Initialization State Tracking Pattern | ~388 |
| #30791 | " | 🔵 | Worker Service Constructor Defers SearchRoutes Initialization | ~387 |
| #30790 | " | 🔵 | Initialization Promise Resolver Pattern Located | ~321 |
| #30788 | " | 🔵 | Worker Service Initialization Resolves Promise Despite Errors | ~388 |
### Jan 1, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #35654 | 11:29 PM | ✅ | Added APPROVED OVERRIDE annotation for instruction loading HTTP route error handler | ~339 |
| #35651 | 11:28 PM | ✅ | Added APPROVED OVERRIDE annotation for shutdown error handler with process.exit | ~354 |
| #35649 | " | ✅ | Added APPROVED OVERRIDE annotation for readiness check retry loop error handling | ~374 |
| #35647 | " | ✅ | Added APPROVED OVERRIDE annotation for port availability probe error handling | ~327 |
| #35646 | " | ✅ | Added APPROVED OVERRIDE annotation for Cursor context file update error handling | ~342 |
| #35643 | 11:27 PM | ✅ | Added APPROVED OVERRIDE annotation for PID file cleanup error handling | ~320 |
</claude-mem-context>

View File

@@ -130,11 +130,14 @@ export async function generateContext(
const config = loadContextConfig();
const cwd = input?.cwd ?? process.cwd();
const context = getProjectContext(cwd);
const project = context.primary;
const platformSource = input?.platform_source;
// Use provided projects array (for worktree support) or fall back to all known projects
const projects = input?.projects ?? context.allProjects;
// Single source of truth: explicit projects override cwd-derived context.
// `project` (used for header + single-project query) is always the last entry
// of `projects` so the empty-state header and the query target stay in sync
// when a caller passes `projects` without a matching cwd (e.g. worker route).
const projects = input?.projects?.length ? input.projects : context.allProjects;
const project = projects[projects.length - 1] ?? context.primary;
// Full mode: fetch all observations but keep normal rendering (level 1 summaries)
if (input?.full) {

View File

@@ -52,7 +52,7 @@ export function queryObservations(
o.created_at_epoch
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
WHERE o.project = ?
WHERE (o.project = ? OR o.merged_into_project = ?)
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(o.concepts)
@@ -62,6 +62,7 @@ export function queryObservations(
ORDER BY o.created_at_epoch DESC
LIMIT ?
`).all(
project,
project,
...typeArray,
...conceptArray,
@@ -93,12 +94,12 @@ export function querySummaries(
ss.created_at_epoch
FROM session_summaries ss
LEFT JOIN sdk_sessions s ON ss.memory_session_id = s.memory_session_id
WHERE ss.project = ?
WHERE (ss.project = ? OR ss.merged_into_project = ?)
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY ss.created_at_epoch DESC
LIMIT ?
`).all(
...[project, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD]
...[project, project, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD]
) as SessionSummary[];
}
@@ -141,7 +142,8 @@ export function queryObservationsMulti(
o.project
FROM observations o
LEFT JOIN sdk_sessions s ON o.memory_session_id = s.memory_session_id
WHERE o.project IN (${projectPlaceholders})
WHERE (o.project IN (${projectPlaceholders})
OR o.merged_into_project IN (${projectPlaceholders}))
AND type IN (${typePlaceholders})
AND EXISTS (
SELECT 1 FROM json_each(o.concepts)
@@ -151,6 +153,7 @@ export function queryObservationsMulti(
ORDER BY o.created_at_epoch DESC
LIMIT ?
`).all(
...projects,
...projects,
...typeArray,
...conceptArray,
@@ -189,11 +192,12 @@ export function querySummariesMulti(
ss.project
FROM session_summaries ss
LEFT JOIN sdk_sessions s ON ss.memory_session_id = s.memory_session_id
WHERE ss.project IN (${projectPlaceholders})
WHERE (ss.project IN (${projectPlaceholders})
OR ss.merged_into_project IN (${projectPlaceholders}))
${platformSource ? "AND COALESCE(s.platform_source, 'claude') = ?" : ''}
ORDER BY ss.created_at_epoch DESC
LIMIT ?
`).all(...projects, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
`).all(...projects, ...projects, ...(platformSource ? [platformSource] : []), config.sessionCount + SUMMARY_LOOKAHEAD) as SessionSummary[];
}
/**

View File

@@ -1,12 +0,0 @@
<claude-mem-context>
# Recent Activity
<!-- This section is auto-generated by claude-mem. Edit content outside the tags. -->
### Jan 25, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #41877 | 12:09 PM | ⚖️ | Deploy Existing Consumer Preview Without Creating New Packages | ~361 |
| #41873 | 12:03 PM | 🔵 | Claude-mem mode configuration system types documented | ~504 |
</claude-mem-context>

View File

@@ -1,10 +0,0 @@
<claude-mem-context>
# Recent Activity
### Jan 4, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36864 | 1:52 AM | 🔵 | ProcessManager Module Imports Reviewed | ~245 |
| #36860 | 1:50 AM | 🔵 | ProcessManager Source Code Reviewed for WMIC Implementation | ~608 |
</claude-mem-context>

View File

@@ -10,8 +10,8 @@
import path from 'path';
import { homedir } from 'os';
import { existsSync, writeFileSync, readFileSync, unlinkSync, mkdirSync, rmSync, statSync, utimesSync } from 'fs';
import { exec, execSync, spawn } from 'child_process';
import { existsSync, writeFileSync, readFileSync, unlinkSync, mkdirSync, rmSync, statSync, utimesSync, copyFileSync } from 'fs';
import { exec, execSync, spawn, spawnSync } from 'child_process';
import { promisify } from 'util';
import { logger } from '../../utils/logger.js';
import { HOOK_TIMEOUTS } from '../../shared/hook-constants.js';
@@ -677,6 +677,161 @@ export function runOneTimeChromaMigration(dataDirectory?: string): void {
logger.info('SYSTEM', 'Chroma migration marker written', { markerPath });
}
const CWD_REMAP_MARKER_FILENAME = '.cwd-remap-applied-v1';
type CwdClassification =
| { kind: 'main'; project: string }
| { kind: 'worktree'; project: string }
| { kind: 'skip' };
function gitQuery(cwd: string, args: string[]): string | null {
const r = spawnSync('git', ['-C', cwd, ...args], {
encoding: 'utf8',
timeout: 5000
});
if (r.status !== 0) return null;
return (r.stdout ?? '').trim();
}
function classifyCwdForRemap(cwd: string): CwdClassification {
if (!existsSync(cwd)) return { kind: 'skip' };
const gitDir = gitQuery(cwd, ['rev-parse', '--absolute-git-dir']);
if (!gitDir) return { kind: 'skip' };
const commonDir = gitQuery(cwd, ['rev-parse', '--path-format=absolute', '--git-common-dir']);
if (!commonDir) return { kind: 'skip' };
const toplevel = gitQuery(cwd, ['rev-parse', '--show-toplevel']);
if (!toplevel) return { kind: 'skip' };
const leaf = path.basename(toplevel);
if (gitDir === commonDir) {
return { kind: 'main', project: leaf };
}
const parentRepoDir = commonDir.endsWith('/.git')
? path.dirname(commonDir)
: commonDir.replace(/\.git$/, '');
const parent = path.basename(parentRepoDir);
return { kind: 'worktree', project: `${parent}/${leaf}` };
}
/**
* One-time remap of sdk_sessions.project (+ observations.project,
* session_summaries.project) using the cwd captured in pending_messages.cwd
* as the source of truth. Required because pre-worktree builds stored bare
* project names that collide across parent/worktree checkouts.
*
* Backs up the DB before writes. Idempotent via marker file. Skips silently
* if the DB or pending_messages table doesn't exist yet (fresh install).
*
* @param dataDirectory - Override for DATA_DIR (used in tests)
*/
export function runOneTimeCwdRemap(dataDirectory?: string): void {
const effectiveDataDir = dataDirectory ?? DATA_DIR;
const markerPath = path.join(effectiveDataDir, CWD_REMAP_MARKER_FILENAME);
const dbPath = path.join(effectiveDataDir, 'claude-mem.db');
if (existsSync(markerPath)) {
logger.debug('SYSTEM', 'cwd-remap marker exists, skipping');
return;
}
if (!existsSync(dbPath)) {
mkdirSync(effectiveDataDir, { recursive: true });
writeFileSync(markerPath, new Date().toISOString());
logger.debug('SYSTEM', 'No DB present, cwd-remap marker written without work', { dbPath });
return;
}
logger.warn('SYSTEM', 'Running one-time cwd-based project remap', { dbPath });
let db: import('bun:sqlite').Database | null = null;
try {
const { Database } = require('bun:sqlite') as typeof import('bun:sqlite');
const probe = new Database(dbPath, { readonly: true });
const hasPending = probe.prepare(
"SELECT name FROM sqlite_master WHERE type='table' AND name='pending_messages'"
).get() as { name: string } | undefined;
probe.close();
if (!hasPending) {
mkdirSync(effectiveDataDir, { recursive: true });
writeFileSync(markerPath, new Date().toISOString());
logger.info('SYSTEM', 'pending_messages table not present, cwd-remap skipped');
return;
}
const backup = `${dbPath}.bak-cwd-remap-${Date.now()}`;
copyFileSync(dbPath, backup);
logger.info('SYSTEM', 'DB backed up before cwd-remap', { backup });
db = new Database(dbPath);
const cwdRows = db.prepare(`
SELECT cwd FROM pending_messages
WHERE cwd IS NOT NULL AND cwd != ''
GROUP BY cwd
`).all() as Array<{ cwd: string }>;
const byCwd = new Map<string, CwdClassification>();
for (const { cwd } of cwdRows) byCwd.set(cwd, classifyCwdForRemap(cwd));
const sessionRows = db.prepare(`
SELECT s.id AS session_id, s.memory_session_id, s.project AS old_project, p.cwd
FROM sdk_sessions s
JOIN pending_messages p ON p.content_session_id = s.content_session_id
WHERE p.cwd IS NOT NULL AND p.cwd != ''
AND p.id = (
SELECT MIN(p2.id) FROM pending_messages p2
WHERE p2.content_session_id = s.content_session_id
AND p2.cwd IS NOT NULL AND p2.cwd != ''
)
`).all() as Array<{ session_id: number; memory_session_id: string | null; old_project: string; cwd: string }>;
type Target = { sessionId: number; memorySessionId: string | null; newProject: string };
const targets: Target[] = [];
for (const r of sessionRows) {
const c = byCwd.get(r.cwd);
if (!c || c.kind === 'skip') continue;
if (r.old_project === c.project) continue;
targets.push({ sessionId: r.session_id, memorySessionId: r.memory_session_id, newProject: c.project });
}
if (targets.length === 0) {
logger.info('SYSTEM', 'cwd-remap: no sessions need updating');
} else {
const updSession = db.prepare('UPDATE sdk_sessions SET project = ? WHERE id = ?');
const updObs = db.prepare('UPDATE observations SET project = ? WHERE memory_session_id = ?');
const updSum = db.prepare('UPDATE session_summaries SET project = ? WHERE memory_session_id = ?');
let sessionN = 0, obsN = 0, sumN = 0;
const tx = db.transaction(() => {
for (const t of targets) {
sessionN += updSession.run(t.newProject, t.sessionId).changes;
if (t.memorySessionId) {
obsN += updObs.run(t.newProject, t.memorySessionId).changes;
sumN += updSum.run(t.newProject, t.memorySessionId).changes;
}
}
});
tx();
logger.info('SYSTEM', 'cwd-remap applied', { sessions: sessionN, observations: obsN, summaries: sumN, backup });
}
mkdirSync(effectiveDataDir, { recursive: true });
writeFileSync(markerPath, new Date().toISOString());
logger.info('SYSTEM', 'cwd-remap marker written', { markerPath });
} catch (err) {
logger.error('SYSTEM', 'cwd-remap failed, marker not written (will retry on next startup)', {}, err as Error);
} finally {
db?.close();
}
}
/**
* Spawn a detached daemon process
* Returns the child PID or undefined if spawn failed

View File

@@ -0,0 +1,414 @@
/**
* WorktreeAdoption - Stamp observations from merged worktrees into their parent project.
*
* Given a parent repo path, this engine:
* 1. Uses git to enumerate worktrees of the parent repo.
* 2. Classifies each worktree's branch as "merged" (in `git branch --merged HEAD`)
* or manually overridden via `onlyBranch` (for squash-merge detection).
* 3. Stamps `merged_into_project` on `observations` and `session_summaries` rows
* whose `project` matches the composite `parent/worktree` name.
* 4. Propagates the same metadata to Chroma so semantic search includes the
* adopted rows under the parent project.
*
* `project` is never overwritten — it remains immutable provenance. The
* `merged_into_project` column is a virtual pointer that query layers OR into
* their WHERE predicates.
*
* DB lifecycle mirrors `runOneTimeCwdRemap` in ProcessManager.ts: we manage our
* own Database handle (open -> transaction -> close in finally) so this engine
* can be called on worker startup before `dbManager.initialize()` without
* contending on the shared handle.
*/
import path from 'path';
import { homedir } from 'os';
import { existsSync } from 'fs';
import { spawnSync } from 'child_process';
import { logger } from '../../utils/logger.js';
import { getProjectContext } from '../../utils/project-name.js';
import { ChromaSync } from '../sync/ChromaSync.js';
const DEFAULT_DATA_DIR = path.join(homedir(), '.claude-mem');
export interface AdoptionResult {
repoPath: string;
parentProject: string;
scannedWorktrees: number;
mergedBranches: string[];
adoptedObservations: number;
adoptedSummaries: number;
chromaUpdates: number;
chromaFailed: number;
dryRun: boolean;
errors: Array<{ worktree: string; error: string }>;
}
interface WorktreeEntry {
path: string;
branch: string | null;
}
const GIT_TIMEOUT_MS = 5000;
class DryRunRollback extends Error {
constructor() {
super('dry-run rollback');
this.name = 'DryRunRollback';
}
}
function gitCapture(cwd: string, args: string[]): string | null {
const r = spawnSync('git', ['-C', cwd, ...args], {
encoding: 'utf8',
timeout: GIT_TIMEOUT_MS
});
if (r.status !== 0) return null;
return (r.stdout ?? '').trim();
}
/**
* Resolve the main working-tree root for an arbitrary cwd inside a repo or worktree.
* Mirrors the handling in `scripts/cwd-remap.ts:48-51`.
*/
function resolveMainRepoPath(cwd: string): string | null {
const commonDir = gitCapture(cwd, [
'rev-parse',
'--path-format=absolute',
'--git-common-dir'
]);
if (!commonDir) return null;
// Normal: common-dir is "<repo>/.git". Bare: strip the trailing ".git".
const mainRoot = commonDir.endsWith('/.git')
? path.dirname(commonDir)
: commonDir.replace(/\.git$/, '');
return existsSync(mainRoot) ? mainRoot : null;
}
function listWorktrees(mainRepo: string): WorktreeEntry[] {
const raw = gitCapture(mainRepo, ['worktree', 'list', '--porcelain']);
if (!raw) return [];
const entries: WorktreeEntry[] = [];
let current: Partial<WorktreeEntry> = {};
for (const line of raw.split('\n')) {
if (line.startsWith('worktree ')) {
if (current.path) entries.push({ path: current.path, branch: current.branch ?? null });
current = { path: line.slice('worktree '.length).trim(), branch: null };
} else if (line.startsWith('branch ')) {
// `branch refs/heads/<name>` — strip the ref prefix.
const refName = line.slice('branch '.length).trim();
current.branch = refName.startsWith('refs/heads/')
? refName.slice('refs/heads/'.length)
: refName;
} else if (line === '' && current.path) {
entries.push({ path: current.path, branch: current.branch ?? null });
current = {};
}
}
if (current.path) entries.push({ path: current.path, branch: current.branch ?? null });
return entries;
}
function listMergedBranches(mainRepo: string): Set<string> {
const raw = gitCapture(mainRepo, [
'branch',
'--merged',
'HEAD',
'--format=%(refname:short)'
]);
if (!raw) return new Set();
return new Set(
raw.split('\n').map(b => b.trim()).filter(b => b.length > 0)
);
}
/**
* Stamp `merged_into_project` on observations and session_summaries for every
* worktree of `opts.repoPath` whose branch has been merged into the parent's HEAD.
*
* SQL writes are idempotent: an UPDATE only touches rows where
* `merged_into_project IS NULL`. `result.adoptedObservations` / `adoptedSummaries`
* reflect the actual SQL changes on each run.
*
* Chroma patches are self-healing: the Chroma id set is built from ALL
* observations whose `project` matches a merged worktree (both unadopted rows
* AND rows previously stamped to this parent), and `updateMergedIntoProject`
* is idempotent, so a transient Chroma failure on an earlier run is retried
* automatically on the next adoption pass. `result.chromaUpdates` therefore
* counts the total Chroma writes performed this pass (which may exceed
* `adoptedObservations` when retries happen).
*/
export async function adoptMergedWorktrees(opts: {
repoPath?: string;
dataDirectory?: string;
dryRun?: boolean;
onlyBranch?: string;
} = {}): Promise<AdoptionResult> {
const dataDirectory = opts.dataDirectory ?? DEFAULT_DATA_DIR;
const dryRun = opts.dryRun ?? false;
const startCwd = opts.repoPath ?? process.cwd();
const mainRepo = resolveMainRepoPath(startCwd);
const parentProject = mainRepo ? getProjectContext(mainRepo).primary : '';
const result: AdoptionResult = {
repoPath: mainRepo ?? startCwd,
parentProject,
scannedWorktrees: 0,
mergedBranches: [],
adoptedObservations: 0,
adoptedSummaries: 0,
chromaUpdates: 0,
chromaFailed: 0,
dryRun,
errors: []
};
if (!mainRepo) {
logger.debug('SYSTEM', 'Worktree adoption skipped (not a git repo)', { startCwd });
return result;
}
const dbPath = path.join(dataDirectory, 'claude-mem.db');
if (!existsSync(dbPath)) {
logger.debug('SYSTEM', 'Worktree adoption skipped (no DB yet)', { dbPath });
return result;
}
const allWorktrees = listWorktrees(mainRepo);
const childWorktrees = allWorktrees.filter(w => w.path !== mainRepo);
result.scannedWorktrees = childWorktrees.length;
if (childWorktrees.length === 0) {
return result;
}
let targets: WorktreeEntry[];
if (opts.onlyBranch) {
targets = childWorktrees.filter(w => w.branch === opts.onlyBranch);
} else {
const merged = listMergedBranches(mainRepo);
targets = childWorktrees.filter(w => w.branch !== null && merged.has(w.branch));
}
result.mergedBranches = targets
.map(t => t.branch)
.filter((b): b is string => b !== null);
if (targets.length === 0) {
return result;
}
const adoptedSqliteIds: number[] = [];
let db: import('bun:sqlite').Database | null = null;
try {
const { Database } = require('bun:sqlite') as typeof import('bun:sqlite');
db = new Database(dbPath);
// Schema guard: adoption may be invoked on worker startup before
// DatabaseManager runs migrations. If the `merged_into_project` column
// isn't present yet, prepared statements below will fail with
// "no such column", silently skipping adoption until the next restart.
// Return early so the next boot (post-migration) picks this up.
interface ColumnInfo { name: string }
const obsColumns = db
.prepare('PRAGMA table_info(observations)')
.all() as ColumnInfo[];
const sumColumns = db
.prepare('PRAGMA table_info(session_summaries)')
.all() as ColumnInfo[];
const obsHasColumn = obsColumns.some(c => c.name === 'merged_into_project');
const sumHasColumn = sumColumns.some(c => c.name === 'merged_into_project');
if (!obsHasColumn || !sumHasColumn) {
logger.debug(
'SYSTEM',
'Worktree adoption skipped (merged_into_project column missing; will run after migration)',
{ obsHasColumn, sumHasColumn }
);
return result;
}
// Select ALL observations for the worktree project (both unadopted rows
// AND rows already stamped to this parent), not just unadopted ones. This
// ensures a transient Chroma failure on a prior run gets retried the next
// time adoption executes: SQL may already be stamped, but we re-include
// those ids in the Chroma patch set (updateMergedIntoProject is idempotent
// — it replays the same metadata write).
const selectObsForPatch = db.prepare(
`SELECT id FROM observations
WHERE project = ?
AND (merged_into_project IS NULL OR merged_into_project = ?)`
);
const updateObs = db.prepare(
'UPDATE observations SET merged_into_project = ? WHERE project = ? AND merged_into_project IS NULL'
);
const updateSum = db.prepare(
'UPDATE session_summaries SET merged_into_project = ? WHERE project = ? AND merged_into_project IS NULL'
);
const tx = db.transaction(() => {
for (const wt of targets) {
try {
const worktreeProject = getProjectContext(wt.path).primary;
const rows = selectObsForPatch.all(
worktreeProject,
parentProject
) as Array<{ id: number }>;
for (const r of rows) adoptedSqliteIds.push(r.id);
// updateObs/updateSum only touch WHERE merged_into_project IS NULL,
// so .changes reflects only newly-adopted rows (not the re-patched ones).
const obsChanges = updateObs.run(parentProject, worktreeProject).changes;
const sumChanges = updateSum.run(parentProject, worktreeProject).changes;
result.adoptedObservations += obsChanges;
result.adoptedSummaries += sumChanges;
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
logger.warn('SYSTEM', 'Worktree adoption skipped branch', {
worktree: wt.path,
branch: wt.branch,
error: message
});
result.errors.push({ worktree: wt.path, error: message });
}
}
if (dryRun) {
// Throw a dedicated error to force rollback. Caught below by instanceof check.
throw new DryRunRollback();
}
});
try {
tx();
} catch (err) {
if (err instanceof DryRunRollback) {
// Rolled back as intended for dry-run — counts are still useful.
} else {
throw err;
}
}
} finally {
db?.close();
}
if (!dryRun && adoptedSqliteIds.length > 0) {
const chromaSync = new ChromaSync('claude-mem');
try {
await chromaSync.updateMergedIntoProject(adoptedSqliteIds, parentProject);
result.chromaUpdates = adoptedSqliteIds.length;
} catch (err) {
logger.error(
'CHROMA_SYNC',
'Worktree adoption Chroma patch failed (SQL already committed)',
{ parentProject, sqliteIdCount: adoptedSqliteIds.length },
err as Error
);
result.chromaFailed = adoptedSqliteIds.length;
} finally {
await chromaSync.close();
}
}
if (
result.adoptedObservations > 0 ||
result.adoptedSummaries > 0 ||
result.chromaUpdates > 0 ||
result.errors.length > 0
) {
logger.info('SYSTEM', 'Worktree adoption applied', {
parentProject,
dryRun,
scannedWorktrees: result.scannedWorktrees,
mergedBranches: result.mergedBranches,
adoptedObservations: result.adoptedObservations,
adoptedSummaries: result.adoptedSummaries,
chromaUpdates: result.chromaUpdates,
chromaFailed: result.chromaFailed,
errors: result.errors.length
});
}
return result;
}
/**
* Run adoption once per distinct parent repo referenced by recorded cwds.
*
* Worker startup adoption cannot use `process.cwd()` as a seed — the daemon is
* spawned with cwd=marketplace-plugin-dir, which isn't a git repo. Instead, we
* derive candidate parent repos from `pending_messages.cwd` (the user's actual
* working directories), dedupe via `resolveMainRepoPath`, and run adoption
* against each. Failures on individual repos are logged but don't short-circuit
* the others.
*
* Safe to call before `dbManager.initialize()`: opens its own short-lived DB
* handle (readonly) to enumerate cwds, then delegates to `adoptMergedWorktrees`
* which opens its own writable handle.
*/
export async function adoptMergedWorktreesForAllKnownRepos(opts: {
dataDirectory?: string;
dryRun?: boolean;
} = {}): Promise<AdoptionResult[]> {
const dataDirectory = opts.dataDirectory ?? DEFAULT_DATA_DIR;
const dbPath = path.join(dataDirectory, 'claude-mem.db');
const results: AdoptionResult[] = [];
if (!existsSync(dbPath)) {
logger.debug('SYSTEM', 'Worktree adoption skipped (no DB yet)', { dbPath });
return results;
}
const uniqueParents = new Set<string>();
let db: import('bun:sqlite').Database | null = null;
try {
const { Database } = require('bun:sqlite') as typeof import('bun:sqlite');
db = new Database(dbPath, { readonly: true });
const hasPending = db.prepare(
"SELECT name FROM sqlite_master WHERE type='table' AND name='pending_messages'"
).get() as { name: string } | undefined;
if (!hasPending) {
logger.debug('SYSTEM', 'Worktree adoption skipped (pending_messages table missing)');
return results;
}
const cwdRows = db.prepare(`
SELECT cwd FROM pending_messages
WHERE cwd IS NOT NULL AND cwd != ''
GROUP BY cwd
`).all() as Array<{ cwd: string }>;
for (const { cwd } of cwdRows) {
const mainRepo = resolveMainRepoPath(cwd);
if (mainRepo) uniqueParents.add(mainRepo);
}
} finally {
db?.close();
}
if (uniqueParents.size === 0) {
logger.debug('SYSTEM', 'Worktree adoption found no known parent repos');
return results;
}
for (const repoPath of uniqueParents) {
try {
const result = await adoptMergedWorktrees({
repoPath,
dataDirectory,
dryRun: opts.dryRun
});
results.push(result);
} catch (err) {
logger.warn(
'SYSTEM',
'Worktree adoption failed for parent repo (continuing)',
{ repoPath, error: err instanceof Error ? err.message : String(err) }
);
}
}
return results;
}

View File

@@ -1,93 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22310 | 9:46 PM | 🟣 | Complete Hook Lifecycle Documentation Generated | ~603 |
| #22305 | 9:45 PM | 🔵 | Session Summary Storage and Status Lifecycle | ~472 |
| #22304 | " | 🔵 | Session Creation Idempotency and Observation Storage | ~481 |
| #22303 | " | 🔵 | SessionStore CRUD Operations for Hook Integration | ~392 |
| #22300 | 9:44 PM | 🔵 | SessionStore Database Management and Schema Migrations | ~455 |
| #22299 | " | 🔵 | Database Schema and Entity Types | ~460 |
| #21976 | 5:24 PM | 🟣 | storeObservation Saves tool_use_id to Database | ~298 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23808 | 10:42 PM | 🔵 | migrations.ts Already Migrated to bun:sqlite | ~312 |
| #23807 | " | 🔵 | SessionSearch.ts Already Migrated to bun:sqlite | ~321 |
| #23805 | " | 🔵 | Database.ts Already Migrated to bun:sqlite | ~290 |
| #23784 | 9:59 PM | ✅ | SessionStore.ts db.pragma() Converted to db.query().all() Pattern | ~198 |
| #23783 | 9:58 PM | ✅ | SessionStore.ts Migration004 Multi-Statement db.exec() Converted to db.run() | ~220 |
| #23782 | " | ✅ | SessionStore.ts initializeSchema() db.exec() Converted to db.run() | ~197 |
| #23781 | " | ✅ | SessionStore.ts Constructor PRAGMA Calls Converted to db.run() | ~215 |
| #23780 | " | ✅ | SessionStore.ts Type Annotation Updated | ~183 |
| #23779 | " | ✅ | SessionStore.ts Import Updated to bun:sqlite | ~237 |
| #23778 | 9:57 PM | ✅ | Database.ts Import Updated to bun:sqlite | ~177 |
| #23777 | " | 🔵 | SessionStore.ts Current Implementation - better-sqlite3 Import and API Usage | ~415 |
| #23776 | " | 🔵 | migrations.ts Current Implementation - better-sqlite3 Import | ~285 |
| #23775 | " | 🔵 | Database.ts Current Implementation - better-sqlite3 Import | ~286 |
| #23774 | " | 🔵 | SessionSearch.ts Current Implementation - better-sqlite3 Import | ~309 |
| #23671 | 8:36 PM | 🔵 | getUserPromptsByIds Method Implementation with Filtering and Ordering | ~326 |
| #23670 | " | 🔵 | getUserPromptsByIds Method Location in SessionStore | ~145 |
| #23635 | 8:10 PM | 🔴 | Fixed SessionStore.ts Concepts Filter SQL Parameter Bug | ~297 |
| #23634 | " | 🔵 | SessionStore.ts Concepts Filter Bug Confirmed at Line 849 | ~356 |
| #23522 | 5:27 PM | 🔵 | Complete TypeScript Type Definitions for Database Entities | ~433 |
| #23521 | " | 🔵 | Database Schema Structure with 7 Migration Versions | ~461 |
### Dec 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29868 | 8:19 PM | 🔵 | SessionStore Architecture Review for Mode Metadata Addition | ~350 |
| #29243 | 12:13 AM | 🔵 | Observations Table Schema Migration: Text Field Made Nullable | ~496 |
| #29241 | 12:12 AM | 🔵 | Migration001: Core Schema for Sessions, Memories, Overviews, Diagnostics, Transcripts | ~555 |
| #29238 | 12:11 AM | 🔵 | Observation Type Schema Evolution: Five to Six Types | ~331 |
| #29237 | " | 🔵 | SQLite SessionStore with Schema Migrations and WAL Mode | ~520 |
### Dec 21, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #31622 | 8:26 PM | 🔄 | Completed SessionStore logging standardization | ~270 |
| #31621 | " | 🔄 | Standardized error logging for boundary timestamps query | ~253 |
| #31620 | " | 🔄 | Standardized error logging in getTimelineAroundObservation | ~252 |
| #31619 | " | 🔄 | Replaced console.log with logger.debug in SessionStore | ~263 |
### Dec 27, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33213 | 9:04 PM | 🔵 | SessionStore Implements KISS Session ID Threading via INSERT OR IGNORE Pattern | ~673 |
### Dec 28, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33548 | 10:59 PM | ✅ | Reverted memory_session_id NULL Initialization to contentSessionId Placeholder | ~421 |
| #33546 | 10:57 PM | 🔴 | Fixed createSDKSession to Initialize memory_session_id as NULL | ~406 |
| #33545 | " | 🔵 | createSDKSession Sets memory_session_id Equal to content_session_id Initially | ~378 |
| #33544 | " | 🔵 | SessionStore Migration 17 Already Renamed Session ID Columns | ~451 |
### Jan 2, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36028 | 9:20 PM | 🔄 | Try-Catch Block Removed from Database Migration | ~291 |
### Jan 3, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36653 | 11:03 PM | 🔵 | storeObservation Method Signature Shows Parameter Named memorySessionId | ~474 |
| #36652 | " | 🔵 | createSDKSession Implementation Confirms NULL Initialization With Security Rationale | ~488 |
| #36650 | 11:02 PM | 🔵 | Phase 1 Analysis Reveals Implementation-Test Mismatch on NULL vs Placeholder Initialization | ~687 |
| #36649 | " | 🔵 | SessionStore Implementation Reveals NULL-Based Memory Session ID Initialization Pattern | ~770 |
| #36175 | 6:52 PM | ✅ | MigrationRunner Re-exported from Migrations.ts | ~405 |
| #36172 | " | 🔵 | Migrations.ts Contains Legacy Migration System | ~650 |
| #36163 | 6:48 PM | 🔵 | SessionStore Method Inventory and Extraction Boundaries | ~692 |
| #36162 | 6:47 PM | 🔵 | SessionStore Architecture and Migration History | ~593 |
</claude-mem-context>

View File

@@ -1,5 +1,5 @@
import { Database } from 'bun:sqlite';
import { DATA_DIR, DB_PATH, ensureDir } from '../../shared/paths.js';
import { DATA_DIR, DB_PATH, ensureDir, OBSERVER_SESSIONS_PROJECT } from '../../shared/paths.js';
import { logger } from '../../utils/logger.js';
import {
TableColumnInfo,
@@ -65,6 +65,7 @@ export class SessionStore {
this.addSessionCustomTitleColumn();
this.addSessionPlatformSourceColumn();
this.addObservationModelColumns();
this.ensureMergedIntoProjectColumns();
}
/**
@@ -944,6 +945,36 @@ export class SessionStore {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(26, new Date().toISOString());
}
/**
* Ensure merged_into_project columns + indices exist on observations and session_summaries.
*
* Self-idempotent via PRAGMA table_info guard — does NOT bump schema_versions.
* Mirrors MigrationRunner.ensureMergedIntoProjectColumns so bundled artifacts
* that embed SessionStore (e.g. context-generator.cjs) stay schema-consistent
* with the standalone migration path.
*/
private ensureMergedIntoProjectColumns(): void {
const obsCols = this.db
.query('PRAGMA table_info(observations)')
.all() as TableColumnInfo[];
if (!obsCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE observations ADD COLUMN merged_into_project TEXT');
}
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_observations_merged_into ON observations(merged_into_project)'
);
const sumCols = this.db
.query('PRAGMA table_info(session_summaries)')
.all() as TableColumnInfo[];
if (!sumCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE session_summaries ADD COLUMN merged_into_project TEXT');
}
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_summaries_merged_into ON session_summaries(merged_into_project)'
);
}
/**
* Update the memory session ID for a session
* Called by SDKAgent when it captures the session ID from the first SDK message
@@ -1192,8 +1223,9 @@ export class SessionStore {
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
AND project != ?
`;
const params: unknown[] = [];
const params: unknown[] = [OBSERVER_SESSIONS_PROJECT];
if (normalizedPlatformSource) {
query += ' AND COALESCE(platform_source, ?) = ?';
@@ -1218,9 +1250,10 @@ export class SessionStore {
MAX(started_at_epoch) as latest_epoch
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
AND project != ?
GROUP BY COALESCE(platform_source, '${DEFAULT_PLATFORM_SOURCE}'), project
ORDER BY latest_epoch DESC
`).all() as Array<{ platform_source: string; project: string; latest_epoch: number }>;
`).all(OBSERVER_SESSIONS_PROJECT) as Array<{ platform_source: string; project: string; latest_epoch: number }>;
const projects: string[] = [];
const seenProjects = new Set<string>();

View File

@@ -37,6 +37,7 @@ export class MigrationRunner {
this.addSessionCustomTitleColumn();
this.createObservationFeedbackTable();
this.addSessionPlatformSourceColumn();
this.ensureMergedIntoProjectColumns();
}
/**
@@ -922,4 +923,33 @@ export class MigrationRunner {
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(25, new Date().toISOString());
}
/**
* Ensure merged_into_project columns + indices exist on observations and session_summaries.
*
* Self-idempotent via PRAGMA table_info guard — does NOT bump schema_versions.
* Supports merged-worktree adoption: a nullable pointer that lets a worktree's rows
* be surfaced under the parent project's observation list without data movement.
*/
private ensureMergedIntoProjectColumns(): void {
const obsCols = this.db
.query('PRAGMA table_info(observations)')
.all() as TableColumnInfo[];
if (!obsCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE observations ADD COLUMN merged_into_project TEXT');
}
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_observations_merged_into ON observations(merged_into_project)'
);
const sumCols = this.db
.query('PRAGMA table_info(session_summaries)')
.all() as TableColumnInfo[];
if (!sumCols.some(c => c.name === 'merged_into_project')) {
this.db.run('ALTER TABLE session_summaries ADD COLUMN merged_into_project TEXT');
}
this.db.run(
'CREATE INDEX IF NOT EXISTS idx_summaries_merged_into ON session_summaries(merged_into_project)'
);
}
}

View File

@@ -6,7 +6,7 @@
import { createHash } from 'crypto';
import { Database } from 'bun:sqlite';
import { logger } from '../../../utils/logger.js';
import { getCurrentProjectName } from '../../../shared/paths.js';
import { getProjectContext } from '../../../utils/project-name.js';
import type { ObservationInput, StoreObservationResult } from './types.js';
/** Deduplication window: observations with the same content hash within this window are skipped */
@@ -62,7 +62,7 @@ export function storeObservation(
const timestampIso = new Date(timestampEpoch).toISOString();
// Guard against empty project string (race condition where project isn't set yet)
const resolvedProject = project || getCurrentProjectName();
const resolvedProject = project || getProjectContext(process.cwd()).primary;
// Content-hash deduplication
const contentHash = computeObservationContentHash(memorySessionId, observation.title, observation.narrative);

View File

@@ -8,6 +8,7 @@
import type { Database } from 'bun:sqlite';
import type { ObservationRecord, SessionSummaryRecord, UserPromptRecord } from '../../../types/database.js';
import { logger } from '../../../utils/logger.js';
import { OBSERVER_SESSIONS_PROJECT } from '../../../shared/paths.js';
/**
* Timeline result containing observations, sessions, and prompts within a time window
@@ -210,9 +211,10 @@ export function getAllProjects(db: Database): string[] {
SELECT DISTINCT project
FROM sdk_sessions
WHERE project IS NOT NULL AND project != ''
AND project != ?
ORDER BY project ASC
`);
const rows = stmt.all() as Array<{ project: string }>;
const rows = stmt.all(OBSERVER_SESSIONS_PROJECT) as Array<{ project: string }>;
return rows.map(row => row.project);
}

View File

@@ -28,6 +28,7 @@ interface StoredObservation {
id: number;
memory_session_id: string;
project: string;
merged_into_project: string | null;
text: string | null;
type: string;
title: string | null;
@@ -47,6 +48,7 @@ interface StoredSummary {
id: number;
memory_session_id: string;
project: string;
merged_into_project: string | null;
request: string | null;
investigated: string | null;
learned: string | null;
@@ -129,11 +131,12 @@ export class ChromaSync {
const files_read = parseFileList(obs.files_read);
const files_modified = parseFileList(obs.files_modified);
const baseMetadata: Record<string, string | number> = {
const baseMetadata: Record<string, string | number | null> = {
sqlite_id: obs.id,
doc_type: 'observation',
memory_session_id: obs.memory_session_id,
project: obs.project,
merged_into_project: obs.merged_into_project ?? null,
created_at_epoch: obs.created_at_epoch,
type: obs.type || 'discovery',
title: obs.title || 'Untitled'
@@ -190,11 +193,12 @@ export class ChromaSync {
private formatSummaryDocs(summary: StoredSummary): ChromaDocument[] {
const documents: ChromaDocument[] = [];
const baseMetadata: Record<string, string | number> = {
const baseMetadata: Record<string, string | number | null> = {
sqlite_id: summary.id,
doc_type: 'session_summary',
memory_session_id: summary.memory_session_id,
project: summary.project,
merged_into_project: summary.merged_into_project ?? null,
created_at_epoch: summary.created_at_epoch,
prompt_number: summary.prompt_number || 0
};
@@ -346,6 +350,7 @@ export class ChromaSync {
id: observationId,
memory_session_id: memorySessionId,
project: project,
merged_into_project: null,
text: null, // Legacy field, not used
type: obs.type,
title: obs.title,
@@ -390,6 +395,7 @@ export class ChromaSync {
id: summaryId,
memory_session_id: memorySessionId,
project: project,
merged_into_project: null,
request: summary.request,
investigated: summary.investigated,
learned: summary.learned,
@@ -830,6 +836,72 @@ export class ChromaSync {
}
}
/**
* Stamp `merged_into_project` on every Chroma document whose metadata
* `sqlite_id` is in the provided set. Used by the worktree adoption engine
* to keep Chroma's metadata in lockstep with SQLite after a parent branch
* absorbs a worktree branch via merge.
*
* Batched: fetches docs by `sqlite_id IN sqliteIds`, rewrites metadata with
* the new field, and calls `chroma_update_documents` once per page of up to
* BATCH_SIZE ids. Idempotent — re-running with the same value is a no-op
* because the write doesn't depend on the prior value.
*/
async updateMergedIntoProject(
sqliteIds: number[],
mergedIntoProject: string
): Promise<void> {
if (sqliteIds.length === 0) return;
await this.ensureCollectionExists();
const chromaMcp = ChromaMcpManager.getInstance();
let totalPatched = 0;
// Chunk the sqlite_id set to keep each Chroma call bounded.
for (let i = 0; i < sqliteIds.length; i += this.BATCH_SIZE) {
const idBatch = sqliteIds.slice(i, i + this.BATCH_SIZE);
const existing = await chromaMcp.callTool('chroma_get_documents', {
collection_name: this.collectionName,
where: { sqlite_id: { $in: idBatch } },
include: ['metadatas']
}) as { ids?: string[]; metadatas?: Array<Record<string, any> | null> };
const docIds: string[] = existing?.ids ?? [];
if (docIds.length === 0) continue;
const metadatas = (existing?.metadatas ?? []).map(m => {
// Merge old metadata with the new field, then filter out null/undefined/''
// to match the sanitization other callTool sites apply (chroma-mcp
// rejects null values in metadata).
const merged: Record<string, any> = {
...(m ?? {}),
merged_into_project: mergedIntoProject
};
return Object.fromEntries(
Object.entries(merged).filter(
([, v]) => v !== null && v !== undefined && v !== ''
)
);
});
await chromaMcp.callTool('chroma_update_documents', {
collection_name: this.collectionName,
ids: docIds,
metadatas
});
totalPatched += docIds.length;
}
logger.info('CHROMA_SYNC', 'merged_into_project metadata patched', {
collection: this.collectionName,
mergedIntoProject,
sqliteIdCount: sqliteIds.length,
chromaDocsPatched: totalPatched
});
}
/**
* Close the ChromaSync instance
* ChromaMcpManager is a singleton and manages its own lifecycle

View File

@@ -45,6 +45,7 @@ import {
getPlatformTimeout,
aggressiveStartupCleanup,
runOneTimeChromaMigration,
runOneTimeCwdRemap,
cleanStalePidFile,
isProcessAlive,
spawnDaemon,
@@ -58,6 +59,7 @@ import {
httpShutdown
} from './infrastructure/HealthMonitor.js';
import { performGracefulShutdown } from './infrastructure/GracefulShutdown.js';
import { adoptMergedWorktrees, adoptMergedWorktreesForAllKnownRepos } from './infrastructure/WorktreeAdoption.js';
// Server imports
import { Server } from './server/Server.js';
@@ -359,6 +361,34 @@ export class WorkerService {
runOneTimeChromaMigration();
}
// One-time remap of pre-worktree project names using pending_messages.cwd.
// Must run before dbManager.initialize() so we don't hold the DB open.
runOneTimeCwdRemap();
// Stamp merged worktrees so their observations surface under the parent
// project. Runs every startup (not marker-gated) because git state evolves
// and the engine is fully idempotent. Must also precede dbManager.initialize().
//
// The worker daemon is spawned with cwd=marketplace-plugin-dir (not a git
// repo), so we can't seed adoption with process.cwd(). Instead, discover
// parent repos from recorded pending_messages.cwd values.
try {
const adoptions = await adoptMergedWorktreesForAllKnownRepos({});
for (const adoption of adoptions) {
if (adoption.adoptedObservations > 0 || adoption.adoptedSummaries > 0 || adoption.chromaUpdates > 0) {
logger.info('SYSTEM', 'Merged worktrees adopted on startup', adoption);
}
if (adoption.errors.length > 0) {
logger.warn('SYSTEM', 'Worktree adoption had per-branch errors', {
repoPath: adoption.repoPath,
errors: adoption.errors
});
}
}
} catch (err) {
logger.error('SYSTEM', 'Worktree adoption failed (non-fatal)', {}, err as Error);
}
// Initialize ChromaMcpManager only if Chroma is enabled
const chromaEnabled = settings.CLAUDE_MEM_CHROMA_ENABLED !== 'false';
if (chromaEnabled) {
@@ -1187,6 +1217,45 @@ async function main() {
break;
}
case 'adopt': {
const dryRun = process.argv.includes('--dry-run');
const branchIndex = process.argv.indexOf('--branch');
const branchValue = branchIndex !== -1 ? process.argv[branchIndex + 1] : undefined;
if (branchIndex !== -1 && (!branchValue || branchValue.startsWith('--'))) {
console.error('Usage: adopt [--dry-run] [--branch <branch>] [--cwd <path>]');
process.exit(1);
}
const onlyBranch = branchValue;
// Honor an explicit --cwd override so the NPX CLI can pass through the
// user's working directory (the spawn sets cwd to the marketplace dir).
const cwdIndex = process.argv.indexOf('--cwd');
const cwdValue = cwdIndex !== -1 ? process.argv[cwdIndex + 1] : undefined;
if (cwdIndex !== -1 && (!cwdValue || cwdValue.startsWith('--'))) {
console.error('Usage: adopt [--dry-run] [--branch <branch>] [--cwd <path>]');
process.exit(1);
}
const repoPath = cwdValue ?? process.cwd();
const result = await adoptMergedWorktrees({ repoPath, dryRun, onlyBranch });
const tag = result.dryRun ? '(dry-run)' : '(applied)';
console.log(`\nWorktree adoption ${tag}`);
console.log(` Parent project: ${result.parentProject || '(unknown)'}`);
console.log(` Repo: ${result.repoPath}`);
console.log(` Worktrees scanned: ${result.scannedWorktrees}`);
console.log(` Merged branches: ${result.mergedBranches.join(', ') || '(none)'}`);
console.log(` Observations adopted: ${result.adoptedObservations}`);
console.log(` Summaries adopted: ${result.adoptedSummaries}`);
console.log(` Chroma docs updated: ${result.chromaUpdates}`);
if (result.chromaFailed > 0) {
console.log(` Chroma sync failures: ${result.chromaFailed} (will retry on next run)`);
}
for (const err of result.errors) {
console.log(` ! ${err.worktree}: ${err.error}`);
}
process.exit(0);
}
case '--daemon':
default: {
// GUARD 1: Refuse to start if another worker is already alive (PID check).

View File

@@ -124,6 +124,7 @@ export interface Observation {
id: number;
memory_session_id: string; // Renamed from sdk_session_id
project: string;
merged_into_project: string | null;
platform_source: string;
type: string;
title: string;

View File

@@ -1,123 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23673 | 8:36 PM | ✅ | Add Project Filter Parameter to Session and Prompt Hydration in Search | ~306 |
| #23596 | 5:54 PM | ⚖️ | Import/Export Bug Fix Priority and Scope | ~415 |
| #23595 | 5:53 PM | 🔴 | SearchManager Returns Wrong Format for Empty Results | ~320 |
| #23594 | " | 🔵 | SearchManager Search Method Control Flow | ~313 |
| #23591 | 5:51 PM | 🔵 | SearchManager JSON Response Structure | ~231 |
| #23590 | " | 🔵 | Import/Export Feature Status Review | ~490 |
| #23583 | 5:50 PM | 🔵 | SearchManager Hybrid Search Architecture | ~495 |
### Dec 13, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #25191 | 8:04 PM | 🔵 | ChromaSync Instantiated in DatabaseManager Constructor | ~315 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26263 | 8:32 PM | 🔵 | SearchManager Timeline Methods Use Rich Formatting, Search Method Uses Flat Tables | ~464 |
| #26243 | 8:29 PM | 🔵 | FormattingService Provides Basic Table Format Without Dates or File Grouping | ~390 |
| #26240 | " | 🔵 | SearchManager Formats Results as Tables, Timeline Uses Rich Date-Grouped Format | ~416 |
| #26108 | 7:43 PM | ✅ | changes() Method Format Logic Removed | ~401 |
| #26107 | " | ✅ | changes() Method Format Parameter Removed | ~317 |
| #26106 | 7:42 PM | ✅ | decisions() Method Format Logic Removed | ~405 |
| #26105 | " | ✅ | decisions() Method Format Parameter Removed | ~310 |
| #26104 | " | ✅ | Main search() Method Format Handling Removed | ~430 |
| #26103 | 7:41 PM | ✅ | FormattingService.ts Rewritten to Table Format | ~457 |
| #26102 | " | 🔵 | SearchManager.ts Format Parameter Removal Status | ~478 |
### Dec 15, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27043 | 6:04 PM | 🔵 | Subagent confirms no version switcher UI exists, only orphaned backend infrastructure | ~539 |
| #27041 | 6:03 PM | 🔵 | Branch switching code isolated to two backend files, no frontend UI components | ~473 |
| #27037 | 6:02 PM | 🔵 | Branch switching functionality exists in SettingsRoutes with UI switcher removal intent | ~463 |
### Dec 16, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #27727 | 5:45 PM | 🔵 | SearchManager returns raw data arrays when format=json is specified | ~349 |
### Dec 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28473 | 4:25 PM | 🔵 | PaginationHelper LIMIT+1 Trick and Project Path Sanitization | ~499 |
| #28458 | 4:24 PM | 🔵 | SDK Agent Observer-Only Event-Driven Query Loop | ~513 |
| #28455 | " | 🔵 | Event-Driven Session Manager with Zero-Latency Queuing | ~566 |
### Dec 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29240 | 12:12 AM | 🔵 | SDK Agent Event-Driven Query Loop with Tool Restrictions | ~507 |
### Dec 20, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #31100 | 8:01 PM | 🔵 | Summary and Memory Message Generation in SDK Agent | ~324 |
### Dec 25, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32616 | 8:43 PM | 🔵 | Comprehensive analysis of "enable billing" setting and its impact on rate limiting | ~533 |
| #32599 | 8:40 PM | 🔄 | Added validation and explicit default for Gemini model configuration | ~393 |
| #32598 | " | 🔵 | Gemini configuration loaded from settings or environment variables | ~363 |
| #32591 | 8:38 PM | 🔴 | Removed Unsupported Gemini Model from Agent | ~282 |
| #32583 | " | 🔵 | Gemini Agent Implementation Details | ~434 |
| #32543 | 7:29 PM | 🔄 | Rate limiting applied conditionally based on billing status | ~164 |
| #32542 | " | 🔄 | Query Gemini now accepts billing status | ~163 |
| #32541 | " | 🔄 | Gemini config now includes billing status | ~182 |
| #32540 | " | 🔄 | Rate limiting logic refactored for Gemini billing | ~164 |
### Dec 26, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32949 | 10:55 PM | 🔵 | Complete settings persistence flow for Xiaomi MIMO v2 Flash model | ~320 |
| #32948 | 10:53 PM | 🔵 | OpenRouterAgent uses CLAUDE_MEM_OPENROUTER_MODEL setting with Xiaomi as default | ~183 |
### Dec 27, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33215 | 9:06 PM | 🔵 | SessionManager Implements Event-Driven Lifecycle with Database-First Persistence and Auto-Initialization | ~853 |
| #33214 | " | 🔵 | SDKAgent Implements Event-Driven Query Loop with Init/Continuation Prompt Selection | ~769 |
### Dec 28, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #33551 | 11:00 PM | 🔵 | GeminiAgent Does Not Implement Resume Functionality | ~307 |
| #33550 | " | 🔵 | OpenRouterAgent Does Not Implement Resume Functionality | ~294 |
| #33549 | 10:59 PM | 🔴 | SDKAgent Now Checks memorySessionId Differs From contentSessionId Before Resume | ~419 |
| #33547 | " | 🔵 | All Agents Call storeObservation with contentSessionId Instead of memorySessionId | ~407 |
| #33543 | 10:56 PM | 🔵 | SDKAgent Already Implements Memory Session ID Capture and Resume Logic | ~467 |
| #33542 | " | 🔵 | SessionManager Already Uses Renamed Session ID Fields | ~390 |
### Dec 30, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #34504 | 2:31 PM | 🔵 | SDKAgent V2 Message Handling and Processing Flow Detailed | ~583 |
| #34459 | 2:23 PM | 🔵 | Complete SDKAgent V2 Architecture with Comprehensive Message Processing | ~619 |
| #34453 | 2:21 PM | 🔵 | Memory Agent Configured as Observer-Only | ~379 |
### Jan 4, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36853 | 1:49 AM | 🔵 | GeminiAgent Implementation Reviewed for Model Support | ~555 |
</claude-mem-context>

View File

@@ -9,6 +9,7 @@
import { DatabaseManager } from './DatabaseManager.js';
import { logger } from '../../utils/logger.js';
import { OBSERVER_SESSIONS_PROJECT } from '../../shared/paths.js';
import type { PaginatedResult, Observation, Summary, UserPrompt } from '../worker-types.js';
export class PaginationHelper {
@@ -24,15 +25,17 @@ export class PaginationHelper {
* Uses first occurrence of project name from left (project root)
*/
private stripProjectPath(filePath: string, projectName: string): string {
const marker = `/${projectName}/`;
// Composite names ("parent/worktree") don't appear in on-disk paths for
// standard git worktrees — only the checkout basename does. Match on the
// leaf segment so the heuristic works regardless of worktree layout.
const leaf = projectName.includes('/') ? projectName.split('/').pop()! : projectName;
const marker = `/${leaf}/`;
const index = filePath.indexOf(marker);
if (index !== -1) {
// Strip everything before and including the project name
return filePath.substring(index + marker.length);
}
// Fallback: return original path if project name not found
return filePath;
}
@@ -78,6 +81,7 @@ export class PaginationHelper {
o.id,
o.memory_session_id,
o.project,
o.merged_into_project,
COALESCE(s.platform_source, 'claude') as platform_source,
o.type,
o.title,
@@ -98,8 +102,14 @@ export class PaginationHelper {
const conditions: string[] = [];
if (project) {
conditions.push('o.project = ?');
params.push(project);
// Include adopted merged-worktree rows so the parent project's view
// surfaces observations that originated under its merged children.
conditions.push('(o.project = ? OR o.merged_into_project = ?)');
params.push(project, project);
} else {
// Hide internal observer-session rows from the unfiltered UI list.
conditions.push('o.project != ?');
params.push(OBSERVER_SESSIONS_PROJECT);
}
if (platformSource) {
conditions.push(`COALESCE(s.platform_source, 'claude') = ?`);
@@ -154,8 +164,13 @@ export class PaginationHelper {
const conditions: string[] = [];
if (project) {
conditions.push('ss.project = ?');
params.push(project);
// Include adopted merged-worktree summaries so the parent project's view
// surfaces rows that originated under its merged children.
conditions.push('(ss.project = ? OR ss.merged_into_project = ?)');
params.push(project, project);
} else {
// Hide internal observer-session rows from the unfiltered UI list.
conditions.push("ss.project != 'observer-sessions'");
}
if (platformSource) {
@@ -207,6 +222,9 @@ export class PaginationHelper {
if (project) {
conditions.push('s.project = ?');
params.push(project);
} else {
// Hide internal observer-session rows from the unfiltered UI list.
conditions.push("s.project != 'observer-sessions'");
}
if (platformSource) {

View File

@@ -13,7 +13,6 @@
* - TimelineBuilder: Timeline construction
*/
import { basename } from 'path';
import { SessionSearch } from '../sqlite/SessionSearch.js';
import { SessionStore } from '../sqlite/SessionStore.js';
import { ChromaSync } from '../sync/ChromaSync.js';
@@ -22,6 +21,7 @@ import { TimelineService } from './TimelineService.js';
import type { TimelineItem } from './TimelineService.js';
import type { ObservationSearchResult, SessionSummarySearchResult, UserPromptSearchResult } from '../sqlite/types.js';
import { logger } from '../../utils/logger.js';
import { getProjectContext } from '../../utils/project-name.js';
import { formatDate, formatTime, formatDateTime, extractFirstFile, groupByDate, estimateTokens } from '../../shared/timeline-formatting.js';
import { ModeManager } from '../domain/ModeManager.js';
@@ -170,8 +170,16 @@ export class SearchManager {
// Include project in the Chroma where clause to scope vector search.
// Without this, larger projects dominate the top-N results and smaller
// projects get crowded out before the post-hoc SQLite filter.
// Match both native-provenance rows (project) and adopted merged-worktree
// rows (merged_into_project) so a parent-project query surfaces its
// merged children's observations too.
if (options.project) {
const projectFilter = { project: options.project };
const projectFilter = {
$or: [
{ project: options.project },
{ merged_into_project: options.project }
]
};
whereFilter = whereFilter
? { $and: [whereFilter, projectFilter] }
: projectFilter;
@@ -1319,7 +1327,7 @@ export class SearchManager {
* Tool handler: get_recent_context
*/
async getRecentContext(args: any): Promise<any> {
const project = args.project || basename(process.cwd());
const project = args.project || getProjectContext(process.cwd()).primary;
const limit = args.limit || 3;
const sessions = this.sessionStore.getRecentSessionsWithStatus(project, limit);

View File

@@ -1,113 +0,0 @@
<claude-mem-context>
# Recent Activity
### Nov 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6295 | 1:18 PM | 🔵 | Path Configuration Structure for claude-mem | ~305 |
### Dec 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20730 | 9:06 PM | 🔵 | Path Configuration Module with ESM/CJS Compatibility | ~578 |
| #20718 | 9:00 PM | 🔵 | Worker Service Auto-Start and Health Check System | ~448 |
| #20410 | 7:21 PM | 🔵 | Path utilities provide cross-runtime directory management with Claude integration support | ~478 |
| #20409 | 7:20 PM | 🔵 | Worker utilities provide automatic PM2 startup with health checking and port configuration | ~479 |
### Dec 9, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23141 | 6:42 PM | 🔵 | Located getSettingsPath Function in paths.ts | ~261 |
| #23134 | 6:41 PM | ✅ | Set CLAUDE_MEM_SKIP_TOOLS Default Value in SettingsDefaultsManager | ~261 |
| #23133 | " | ✅ | Added CLAUDE_MEM_SKIP_TOOLS to SettingsDefaults Interface | ~231 |
| #23131 | 6:40 PM | 🔵 | SettingsDefaultsManager Structure and Configuration Schema | ~363 |
| #22858 | 2:28 PM | 🔄 | Removed Brittle save.md Validation from paths.ts | ~305 |
| #22852 | 2:26 PM | 🔵 | Located save.md Validation Logic in paths.ts | ~255 |
| #22805 | 2:01 PM | 🔵 | Early Settings Silent Failure Point Identified | ~363 |
| #22803 | " | 🔵 | Worker Utilities Current Implementation Review | ~390 |
| #22518 | 12:59 AM | 🔵 | Worker Utils StartWorker Implementation Uses Plugin Root for PM2 | ~311 |
### Dec 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #23831 | 11:15 PM | 🔵 | Current hook-error-handler.ts References PM2 | ~277 |
| #23830 | " | 🔵 | Current worker-utils.ts Implementation Uses PM2 | ~431 |
| #23812 | 10:49 PM | 🔵 | Current Worker Startup Uses PM2 and PowerShell; Phase 2 Will Replace | ~428 |
| #23811 | " | 🔵 | Existing Paths Configuration for Phase 2 Reference | ~297 |
### Dec 12, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #24405 | 8:12 PM | 🔵 | PM2 Legacy Cleanup Migration in Worker Startup | ~303 |
| #24400 | 8:10 PM | 🔵 | Retrieved PM2 Cleanup Implementation Details from Memory | ~355 |
| #24362 | 7:00 PM | 🟣 | Implemented PM2 Cleanup One-Time Marker in worker-utils.ts | ~376 |
| #24361 | " | ✅ | Added File System Imports to worker-utils.ts for PM2 Marker | ~263 |
| #24360 | " | 🔵 | worker-utils.ts Contains PM2 Cleanup Logic Without One-Time Marker | ~390 |
### Dec 13, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #25088 | 7:18 PM | 🟣 | Added CLAUDE_MEM_EMBEDDING_FUNCTION to Settings Interface | ~269 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #26790 | 11:38 PM | 🔴 | Fixed Undefined Port Variable in Error Logger | ~340 |
| #26789 | " | 🔴 | Fixed Undefined Port Variable in Error Logging | ~316 |
| #26788 | " | 🔵 | Worker Utils Already Imports Required Dependencies for Implementation | ~283 |
| #26787 | " | 🟣 | Phase 2 Complete: Pre-Restart Delay Added to Version Mismatch Handler | ~436 |
| #26786 | " | 🟣 | Phase 2 Complete: Pre-Restart Delay Added to ensureWorkerVersionMatches Function | ~420 |
| #26785 | 11:37 PM | 🟣 | Phase 1 Complete: PRE_RESTART_SETTLE_DELAY Constant Added to Hook Timeouts | ~351 |
| #26784 | " | 🟣 | Phase 1 Complete: PRE_RESTART_SETTLE_DELAY Constant Added to HOOK_TIMEOUTS | ~370 |
| #26783 | " | 🔵 | Hook Constants File Defines Timeout Values and Platform Multiplier | ~452 |
| #26782 | " | 🔵 | hook-constants.ts Defines Timeout Constants With Windows Platform Multiplier | ~418 |
| #26766 | 11:30 PM | ⚖️ | Root Cause Identified: Missing Post-Install Worker Restart Trigger in Plugin Update Flow | ~604 |
| #26765 | " | 🔵 | Explore Agent Confirms Root Cause: No Proactive Worker Restart After Plugin Updates | ~613 |
| #26732 | 11:25 PM | 🔵 | Worker Utils Implements Version Mismatch Detection and Auto-Restart | ~516 |
| #26731 | 11:24 PM | 🔵 | ensureWorkerRunning Implementation Shows 2.5 Second Startup Wait With Version Check | ~522 |
| #25695 | 4:27 PM | 🟣 | Added comprehensive error logging to transcript parser for debugging message extraction failures | ~473 |
| #25693 | 4:24 PM | 🔵 | Transcript parser extracts messages from JSONL file by scanning backwards for role-specific entries | ~491 |
### Dec 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #28464 | 4:25 PM | 🔵 | Platform-Adjusted Hook Timeout Configuration | ~468 |
| #28461 | " | 🔵 | Dual ESM/CJS Path Resolution System | ~479 |
| #28452 | 4:23 PM | 🔵 | Worker Version Matching and Auto-Restart System | ~510 |
### Dec 18, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #29797 | 7:09 PM | 🔵 | Settings System Uses CLAUDE_MEM_MODE for Mode Selection | ~353 |
| #29234 | 12:10 AM | 🔵 | Centralized Settings Management with Environment Defaults | ~394 |
### Dec 20, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #31086 | 7:59 PM | 🔵 | Transcript Parser Extracts Messages from JSONL Hook Files | ~327 |
| #30939 | 6:57 PM | 🔵 | Worker Utils File Examined for Error Handling Inconsistency | ~393 |
| #30855 | 6:22 PM | 🔵 | Transcript Parser Content Format Handling Examined | ~406 |
### Dec 25, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32616 | 8:43 PM | 🔵 | Comprehensive analysis of "enable billing" setting and its impact on rate limiting | ~533 |
| #32538 | 7:28 PM | ✅ | Set default Gemini billing to disabled | ~164 |
### Jan 7, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #38175 | 7:26 PM | 🔵 | Complete Claude-Mem Hook Output Architecture Documented | ~530 |
</claude-mem-context>

View File

@@ -75,6 +75,10 @@ export const VECTOR_DB_DIR = join(DATA_DIR, 'vector-db');
// Sessions here won't appear in user's `claude --resume` for their actual projects
export const OBSERVER_SESSIONS_DIR = join(DATA_DIR, 'observer-sessions');
// Project name assigned to observer sessions (basename of OBSERVER_SESSIONS_DIR).
// UI queries filter this out so internal worker sessions don't pollute project lists.
export const OBSERVER_SESSIONS_PROJECT = basename(OBSERVER_SESSIONS_DIR);
// Claude integration paths
export const CLAUDE_SETTINGS_PATH = join(CLAUDE_CONFIG_DIR, 'settings.json');
export const CLAUDE_COMMANDS_DIR = join(CLAUDE_CONFIG_DIR, 'commands');

View File

@@ -1130,6 +1130,19 @@
color: var(--color-text-muted);
}
/* Merged-into-parent provenance badge */
.card-merged-badge {
padding: 1px 6px;
border-radius: 3px;
font-size: 9px;
font-weight: 500;
letter-spacing: 0.02em;
color: var(--color-text-muted);
background: var(--color-type-badge-bg);
border: 1px solid var(--color-border-primary);
opacity: 0.85;
}
.summary-card {
border-color: var(--color-border-summary);
background: var(--color-bg-summary);

View File

@@ -56,6 +56,11 @@ export function ObservationCard({ observation }: ObservationCardProps) {
{observation.platform_source || 'claude'}
</span>
<span className="card-project">{observation.project}</span>
{observation.merged_into_project && (
<span className="card-merged-badge" title={`Merged into ${observation.merged_into_project}`}>
merged {observation.merged_into_project}
</span>
)}
</div>
<div className="view-mode-toggles">
{hasFactsContent && (

View File

@@ -1,9 +0,0 @@
<claude-mem-context>
# Recent Activity
### Dec 26, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #32982 | 11:04 PM | 🔵 | Read default settings configuration file | ~233 |
</claude-mem-context>

View File

@@ -2,6 +2,7 @@ export interface Observation {
id: number;
memory_session_id: string;
project: string;
merged_into_project?: string | null;
platform_source: string;
type: string;
title: string | null;

View File

@@ -1,58 +0,0 @@
<claude-mem-context>
# Recent Activity
### Nov 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #4035 | 10:24 PM | 🔵 | logger.ts file exists but is empty | ~220 |
### Nov 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6521 | 5:43 PM | 🔵 | Code Review: Enhanced HTTP Logging and Double Entries Bug Fix | ~482 |
### Nov 17, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #10019 | 12:14 AM | 🔵 | TranscriptParser Utility: JSONL Parsing with Type-Safe Entry Filtering | ~569 |
### Nov 23, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #14626 | 6:25 PM | 🔵 | Stop Hook Summary Not in Transcript Validator Schema | ~359 |
### Nov 28, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #17238 | 11:34 PM | 🔵 | Existing TranscriptParser TypeScript implementation handles nested message structure | ~493 |
### Dec 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20407 | 7:20 PM | 🔵 | Tag stripping utilities implement dual-tag privacy system with ReDoS protection | ~415 |
### Dec 8, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #22310 | 9:46 PM | 🟣 | Complete Hook Lifecycle Documentation Generated | ~603 |
| #22306 | 9:45 PM | 🔵 | Dual-Tag Privacy System with ReDoS Protection | ~461 |
### Dec 14, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #25691 | 4:24 PM | 🔵 | happy_path_error__with_fallback utility logs errors to silent.log and returns fallback values | ~460 |
### Dec 20, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #30883 | 6:38 PM | 🔵 | Tag-Stripping DRY Violation Analysis | ~152 |
</claude-mem-context>

View File

@@ -58,21 +58,26 @@ export function getProjectName(cwd: string | null | undefined): string {
* Project context with worktree awareness
*/
export interface ProjectContext {
/** Canonical project name for writes/queries (parent repo in worktrees) */
/** Canonical project name for writes/queries; `parent/worktree` when in a worktree */
primary: string;
/** Parent project name if in a worktree, null otherwise */
parent: string | null;
/** True if currently in a worktree */
isWorktree: boolean;
/** All projects to query: [primary] for main repo, [parentRepo, worktreeName] for worktree */
/** Projects to query for reads. In a worktree: `[parent, composite]` so
* main-repo context flows into every worktree while sibling worktrees stay
* isolated. In the main repo: `[primary]`. Writes always use `.primary`. */
allProjects: string[];
}
/**
* Get project context with worktree detection.
*
* When in a worktree, returns both the worktree project name and parent project name
* for unified timeline queries.
* Each worktree is its own bucket. When in a worktree, `primary` is the
* composite `parent/worktree` (e.g. `claude-mem/dar-es-salaam`) so worktrees
* are uniquely identified and grouped under their parent project without
* mixing observations across them. In the main repo, `primary` is just the
* project basename.
*
* @param cwd - Current working directory (absolute path)
* @returns ProjectContext with worktree info
@@ -88,14 +93,12 @@ export function getProjectContext(cwd: string | null | undefined): ProjectContex
const worktreeInfo = detectWorktree(expandedCwd);
if (worktreeInfo.isWorktree && worktreeInfo.parentProjectName) {
// In a worktree: use parent project name as primary so observations
// are stored under the same project as the main repo (#1081, #1500, #1819)
const allProjects = Array.from(new Set([worktreeInfo.parentProjectName, cwdProjectName]));
const composite = `${worktreeInfo.parentProjectName}/${cwdProjectName}`;
return {
primary: worktreeInfo.parentProjectName,
primary: composite,
parent: worktreeInfo.parentProjectName,
isWorktree: true,
allProjects
allProjects: [worktreeInfo.parentProjectName, composite]
};
}

View File

@@ -1,58 +0,0 @@
<claude-mem-context>
# Recent Activity
### Nov 10, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #6358 | 3:14 PM | 🔵 | SDK Agent Spatial Awareness Implementation | ~309 |
### Nov 21, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #13289 | 2:20 PM | 🟣 | Comprehensive Test Suite for Transcript Transformation | ~320 |
### Nov 23, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #14617 | 6:15 PM | 🟣 | Test Suite Successfully Passing - All 8 Tests Green | ~498 |
| #14615 | 6:14 PM | 🟣 | YAGNI-Focused Test Suite for Transcript Transformation | ~457 |
### Dec 5, 2025
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #20732 | 9:07 PM | 🔵 | Smart Install Version Marker Tests for Upgrade Detection | ~452 |
| #20399 | 7:17 PM | 🔵 | Smart install tests validate version tracking with backward compatibility | ~311 |
| #20392 | 7:15 PM | 🔵 | Memory tag stripping tests validate dual-tag system for JSON context filtering | ~404 |
| #20391 | " | 🔵 | User prompt tag stripping tests validate privacy controls for memory exclusion | ~182 |
### Jan 3, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36663 | 11:06 PM | ✅ | Third Validation Test Updated: Resume Safety Check Now Uses NULL Comparison | ~417 |
| #36662 | " | ✅ | Second Validation Test Updated: Post-Capture Check Now Uses NULL Comparison | ~418 |
| #36661 | 11:05 PM | ✅ | First Validation Test Updated: Placeholder Detection Now Checks for NULL | ~482 |
| #36660 | " | ✅ | Updated Session ID Usage Validation Test Header to Reflect NULL-Based Architecture | ~588 |
| #36659 | " | ✅ | Sixth Test Fix: Updated Multi-Observation Test to Use Memory Session ID | ~486 |
| #36658 | " | ✅ | Fifth Test Fix: Updated storeSummary Tests to Use Actual Memory Session ID After Capture | ~555 |
| #36657 | 11:04 PM | ✅ | Fourth Test Fix: Updated storeObservation Tests to Use Actual Memory Session ID After Capture | ~547 |
| #36656 | " | ✅ | Third Test Fix: Updated getSessionById Test to Expect NULL for Uncaptured Memory Session ID | ~436 |
| #36655 | " | ✅ | Second Test Fix: Updated updateMemorySessionId Test to Expect NULL Before Update | ~395 |
| #36654 | " | ✅ | First Test Fix: Updated Memory Session ID Initialization Test to Expect NULL | ~426 |
| #36650 | 11:02 PM | 🔵 | Phase 1 Analysis Reveals Implementation-Test Mismatch on NULL vs Placeholder Initialization | ~687 |
| #36648 | " | 🔵 | Session ID Refactor Test Suite Documents Database Migration 17 and Dual ID System | ~651 |
| #36647 | 11:01 PM | 🔵 | SessionStore Test Suite Validates Prompt Counting and Timestamp Override Features | ~506 |
| #36646 | " | 🔵 | Session ID Architecture Revealed Through Test File Analysis | ~611 |
### Jan 4, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36858 | 1:50 AM | 🟣 | Phase 1 Implementation Completed via Subagent | ~499 |
| #36854 | 1:49 AM | 🟣 | gemini-3-flash Model Tests Added to GeminiAgent Test Suite | ~470 |
| #36851 | " | 🔵 | GeminiAgent Test Structure Analyzed | ~565 |
</claude-mem-context>

View File

@@ -1,13 +0,0 @@
<claude-mem-context>
# Recent Activity
### Jan 4, 2026
| ID | Time | T | Title | Read |
|----|------|---|-------|------|
| #36870 | 1:54 AM | 🟣 | Phase 2 Implementation Completed via Subagent | ~572 |
| #36866 | 1:53 AM | 🔄 | WMIC Test Refactored to Use Direct Logic Testing | ~533 |
| #36865 | 1:52 AM | ✅ | WMIC Test File Updated with Improved Mock Implementation | ~370 |
| #36863 | 1:51 AM | 🟣 | WMIC Parsing Test File Created | ~581 |
| #36861 | " | 🔵 | Existing ProcessManager Test File Structure Analyzed | ~516 |
</claude-mem-context>

View File

@@ -1,3 +0,0 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -97,7 +97,7 @@ describe('getProjectContext', () => {
expect(ctx.parent).toBeNull();
});
describe('worktree regression (#1081, #1500, #1819)', () => {
describe('worktree isolation', () => {
let tmp: string;
let mainRepo: string;
let worktreeCheckout: string;
@@ -125,21 +125,18 @@ describe('getProjectContext', () => {
rmSync(tmp, { recursive: true, force: true });
});
it('uses parent project name as primary when in a worktree', () => {
it('uses parent/worktree composite as primary when in a worktree', () => {
const ctx = getProjectContext(worktreeCheckout);
expect(ctx.isWorktree).toBe(true);
expect(ctx.primary).toBe('main-repo');
expect(ctx.primary).toBe('main-repo/my-worktree');
expect(ctx.parent).toBe('main-repo');
expect(ctx.allProjects).toEqual(['main-repo', 'my-worktree']);
expect(ctx.allProjects).toEqual(['main-repo', 'main-repo/my-worktree']);
});
it('write-path call sites resolve to parent project in worktrees', () => {
// Mirrors the pattern used by session-init.ts and SessionRoutes.ts:
// const project = getProjectContext(cwd).primary;
// This must resolve to the parent repo, not the worktree name,
// so observations are stored under the correct project.
it('write-path call sites resolve to composite name in worktrees', () => {
const project = getProjectContext(worktreeCheckout).primary;
expect(project).toBe('main-repo');
expect(project).toBe('main-repo/my-worktree');
expect(project).not.toBe('main-repo');
expect(project).not.toBe('my-worktree');
});
});