mirror of
https://github.com/thedotmack/claude-mem
synced 2026-04-25 17:15:04 +02:00
Systematic cleanup of every error handling anti-pattern detected by the automated scanner. 289 issues fixed via code changes, 12 approved with specific technical justifications. Changes across 90 files: - GENERIC_CATCH (141): Added instanceof Error type discrimination - LARGE_TRY_BLOCK (82): Extracted helper methods to narrow try scope to ≤10 lines - NO_LOGGING_IN_CATCH (65): Added logger/console calls for error visibility - CATCH_AND_CONTINUE_CRITICAL_PATH (10): Added throw/return or approved overrides - ERROR_STRING_MATCHING (2): Approved with rationale (no typed error classes) - ERROR_MESSAGE_GUESSING (1): Replaced chained .includes() with documented pattern array - PROMISE_CATCH_NO_LOGGING (1): Added logging to .catch() handler Also fixes a detector bug where nested try/catch inside a catch block corrupted brace-depth tracking, causing false positives. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1019 lines
43 KiB
TypeScript
1019 lines
43 KiB
TypeScript
import { Database } from 'bun:sqlite';
|
|
import { logger } from '../../../utils/logger.js';
|
|
import {
|
|
TableColumnInfo,
|
|
IndexInfo,
|
|
TableNameRow,
|
|
SchemaVersion
|
|
} from '../../../types/database.js';
|
|
import { DEFAULT_PLATFORM_SOURCE } from '../../../shared/platform-source.js';
|
|
|
|
/**
|
|
* MigrationRunner handles all database schema migrations
|
|
* Extracted from SessionStore to separate concerns
|
|
*/
|
|
export class MigrationRunner {
|
|
constructor(private db: Database) {}
|
|
|
|
/**
|
|
* Run all migrations in order
|
|
* This is the only public method - all migrations are internal
|
|
*/
|
|
runAllMigrations(): void {
|
|
this.initializeSchema();
|
|
this.ensureWorkerPortColumn();
|
|
this.ensurePromptTrackingColumns();
|
|
this.removeSessionSummariesUniqueConstraint();
|
|
this.addObservationHierarchicalFields();
|
|
this.makeObservationsTextNullable();
|
|
this.createUserPromptsTable();
|
|
this.ensureDiscoveryTokensColumn();
|
|
this.createPendingMessagesTable();
|
|
this.renameSessionIdColumns();
|
|
this.repairSessionIdColumnRename();
|
|
this.addFailedAtEpochColumn();
|
|
this.addOnUpdateCascadeToForeignKeys();
|
|
this.addObservationContentHashColumn();
|
|
this.addSessionCustomTitleColumn();
|
|
this.createObservationFeedbackTable();
|
|
this.addSessionPlatformSourceColumn();
|
|
this.ensureMergedIntoProjectColumns();
|
|
this.addObservationSubagentColumns();
|
|
}
|
|
|
|
/**
|
|
* Initialize database schema (migration004)
|
|
*
|
|
* ALWAYS creates core tables using CREATE TABLE IF NOT EXISTS — safe to run
|
|
* regardless of schema_versions state. This fixes issue #979 where the old
|
|
* DatabaseManager migration system (versions 1-7) shared the schema_versions
|
|
* table, causing maxApplied > 0 and skipping core table creation entirely.
|
|
*/
|
|
private initializeSchema(): void {
|
|
// Create schema_versions table if it doesn't exist
|
|
this.db.run(`
|
|
CREATE TABLE IF NOT EXISTS schema_versions (
|
|
id INTEGER PRIMARY KEY,
|
|
version INTEGER UNIQUE NOT NULL,
|
|
applied_at TEXT NOT NULL
|
|
)
|
|
`);
|
|
|
|
// Always create core tables — IF NOT EXISTS makes this idempotent
|
|
this.db.run(`
|
|
CREATE TABLE IF NOT EXISTS sdk_sessions (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
content_session_id TEXT UNIQUE NOT NULL,
|
|
memory_session_id TEXT UNIQUE,
|
|
project TEXT NOT NULL,
|
|
platform_source TEXT NOT NULL DEFAULT 'claude',
|
|
user_prompt TEXT,
|
|
started_at TEXT NOT NULL,
|
|
started_at_epoch INTEGER NOT NULL,
|
|
completed_at TEXT,
|
|
completed_at_epoch INTEGER,
|
|
status TEXT CHECK(status IN ('active', 'completed', 'failed')) NOT NULL DEFAULT 'active'
|
|
);
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_claude_id ON sdk_sessions(content_session_id);
|
|
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_sdk_id ON sdk_sessions(memory_session_id);
|
|
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_project ON sdk_sessions(project);
|
|
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_status ON sdk_sessions(status);
|
|
CREATE INDEX IF NOT EXISTS idx_sdk_sessions_started ON sdk_sessions(started_at_epoch DESC);
|
|
|
|
CREATE TABLE IF NOT EXISTS observations (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT NOT NULL,
|
|
project TEXT NOT NULL,
|
|
text TEXT NOT NULL,
|
|
type TEXT NOT NULL,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
);
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_observations_sdk_session ON observations(memory_session_id);
|
|
CREATE INDEX IF NOT EXISTS idx_observations_project ON observations(project);
|
|
CREATE INDEX IF NOT EXISTS idx_observations_type ON observations(type);
|
|
CREATE INDEX IF NOT EXISTS idx_observations_created ON observations(created_at_epoch DESC);
|
|
|
|
CREATE TABLE IF NOT EXISTS session_summaries (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT UNIQUE NOT NULL,
|
|
project TEXT NOT NULL,
|
|
request TEXT,
|
|
investigated TEXT,
|
|
learned TEXT,
|
|
completed TEXT,
|
|
next_steps TEXT,
|
|
files_read TEXT,
|
|
files_edited TEXT,
|
|
notes TEXT,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
);
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_session_summaries_sdk_session ON session_summaries(memory_session_id);
|
|
CREATE INDEX IF NOT EXISTS idx_session_summaries_project ON session_summaries(project);
|
|
CREATE INDEX IF NOT EXISTS idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
|
|
`);
|
|
|
|
// Record migration004 as applied (OR IGNORE handles re-runs safely)
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(4, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Ensure worker_port column exists (migration 5)
|
|
*
|
|
* NOTE: Version 5 conflicts with old DatabaseManager migration005 (which drops orphaned tables).
|
|
* We check actual column state rather than relying solely on version tracking.
|
|
*/
|
|
private ensureWorkerPortColumn(): void {
|
|
// Check actual column existence — don't rely on version tracking alone (issue #979)
|
|
const tableInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
|
|
const hasWorkerPort = tableInfo.some(col => col.name === 'worker_port');
|
|
|
|
if (!hasWorkerPort) {
|
|
this.db.run('ALTER TABLE sdk_sessions ADD COLUMN worker_port INTEGER');
|
|
logger.debug('DB', 'Added worker_port column to sdk_sessions table');
|
|
}
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(5, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Ensure prompt tracking columns exist (migration 6)
|
|
*
|
|
* NOTE: Version 6 conflicts with old DatabaseManager migration006 (which creates FTS5 tables).
|
|
* We check actual column state rather than relying solely on version tracking.
|
|
*/
|
|
private ensurePromptTrackingColumns(): void {
|
|
// Check actual column existence — don't rely on version tracking alone (issue #979)
|
|
// Check sdk_sessions for prompt_counter
|
|
const sessionsInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
|
|
const hasPromptCounter = sessionsInfo.some(col => col.name === 'prompt_counter');
|
|
|
|
if (!hasPromptCounter) {
|
|
this.db.run('ALTER TABLE sdk_sessions ADD COLUMN prompt_counter INTEGER DEFAULT 0');
|
|
logger.debug('DB', 'Added prompt_counter column to sdk_sessions table');
|
|
}
|
|
|
|
// Check observations for prompt_number
|
|
const observationsInfo = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const obsHasPromptNumber = observationsInfo.some(col => col.name === 'prompt_number');
|
|
|
|
if (!obsHasPromptNumber) {
|
|
this.db.run('ALTER TABLE observations ADD COLUMN prompt_number INTEGER');
|
|
logger.debug('DB', 'Added prompt_number column to observations table');
|
|
}
|
|
|
|
// Check session_summaries for prompt_number
|
|
const summariesInfo = this.db.query('PRAGMA table_info(session_summaries)').all() as TableColumnInfo[];
|
|
const sumHasPromptNumber = summariesInfo.some(col => col.name === 'prompt_number');
|
|
|
|
if (!sumHasPromptNumber) {
|
|
this.db.run('ALTER TABLE session_summaries ADD COLUMN prompt_number INTEGER');
|
|
logger.debug('DB', 'Added prompt_number column to session_summaries table');
|
|
}
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(6, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Remove UNIQUE constraint from session_summaries.memory_session_id (migration 7)
|
|
*
|
|
* NOTE: Version 7 conflicts with old DatabaseManager migration007 (which adds discovery_tokens).
|
|
* We check actual constraint state rather than relying solely on version tracking.
|
|
*/
|
|
private removeSessionSummariesUniqueConstraint(): void {
|
|
// Check actual constraint state — don't rely on version tracking alone (issue #979)
|
|
const summariesIndexes = this.db.query('PRAGMA index_list(session_summaries)').all() as IndexInfo[];
|
|
const hasUniqueConstraint = summariesIndexes.some(idx => idx.unique === 1 && idx.origin !== 'pk');
|
|
|
|
if (!hasUniqueConstraint) {
|
|
// Already migrated (no constraint exists)
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(7, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
logger.debug('DB', 'Removing UNIQUE constraint from session_summaries.memory_session_id');
|
|
|
|
// Begin transaction
|
|
this.db.run('BEGIN TRANSACTION');
|
|
|
|
// Clean up leftover temp table from a previously-crashed run
|
|
this.db.run('DROP TABLE IF EXISTS session_summaries_new');
|
|
|
|
// Create new table without UNIQUE constraint
|
|
this.db.run(`
|
|
CREATE TABLE session_summaries_new (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT NOT NULL,
|
|
project TEXT NOT NULL,
|
|
request TEXT,
|
|
investigated TEXT,
|
|
learned TEXT,
|
|
completed TEXT,
|
|
next_steps TEXT,
|
|
files_read TEXT,
|
|
files_edited TEXT,
|
|
notes TEXT,
|
|
prompt_number INTEGER,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
|
|
)
|
|
`);
|
|
|
|
// Copy data from old table
|
|
this.db.run(`
|
|
INSERT INTO session_summaries_new
|
|
SELECT id, memory_session_id, project, request, investigated, learned,
|
|
completed, next_steps, files_read, files_edited, notes,
|
|
prompt_number, created_at, created_at_epoch
|
|
FROM session_summaries
|
|
`);
|
|
|
|
// Drop old table
|
|
this.db.run('DROP TABLE session_summaries');
|
|
|
|
// Rename new table
|
|
this.db.run('ALTER TABLE session_summaries_new RENAME TO session_summaries');
|
|
|
|
// Recreate indexes
|
|
this.db.run(`
|
|
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(memory_session_id);
|
|
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
|
|
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
|
|
`);
|
|
|
|
// Commit transaction
|
|
this.db.run('COMMIT');
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(7, new Date().toISOString());
|
|
|
|
logger.debug('DB', 'Successfully removed UNIQUE constraint from session_summaries.memory_session_id');
|
|
}
|
|
|
|
/**
|
|
* Add hierarchical fields to observations table (migration 8)
|
|
*/
|
|
private addObservationHierarchicalFields(): void {
|
|
// Check if migration already applied
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(8) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Check if new fields already exist
|
|
const tableInfo = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const hasTitle = tableInfo.some(col => col.name === 'title');
|
|
|
|
if (hasTitle) {
|
|
// Already migrated
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(8, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
logger.debug('DB', 'Adding hierarchical fields to observations table');
|
|
|
|
// Add new columns
|
|
this.db.run(`
|
|
ALTER TABLE observations ADD COLUMN title TEXT;
|
|
ALTER TABLE observations ADD COLUMN subtitle TEXT;
|
|
ALTER TABLE observations ADD COLUMN facts TEXT;
|
|
ALTER TABLE observations ADD COLUMN narrative TEXT;
|
|
ALTER TABLE observations ADD COLUMN concepts TEXT;
|
|
ALTER TABLE observations ADD COLUMN files_read TEXT;
|
|
ALTER TABLE observations ADD COLUMN files_modified TEXT;
|
|
`);
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(8, new Date().toISOString());
|
|
|
|
logger.debug('DB', 'Successfully added hierarchical fields to observations table');
|
|
}
|
|
|
|
/**
|
|
* Make observations.text nullable (migration 9)
|
|
* The text field is deprecated in favor of structured fields (title, subtitle, narrative, etc.)
|
|
*/
|
|
private makeObservationsTextNullable(): void {
|
|
// Check if migration already applied
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(9) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Check if text column is already nullable
|
|
const tableInfo = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const textColumn = tableInfo.find(col => col.name === 'text');
|
|
|
|
if (!textColumn || textColumn.notnull === 0) {
|
|
// Already migrated or text column doesn't exist
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(9, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
logger.debug('DB', 'Making observations.text nullable');
|
|
|
|
// Begin transaction
|
|
this.db.run('BEGIN TRANSACTION');
|
|
|
|
// Clean up leftover temp table from a previously-crashed run
|
|
this.db.run('DROP TABLE IF EXISTS observations_new');
|
|
|
|
// Create new table with text as nullable
|
|
this.db.run(`
|
|
CREATE TABLE observations_new (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT NOT NULL,
|
|
project TEXT NOT NULL,
|
|
text TEXT,
|
|
type TEXT NOT NULL,
|
|
title TEXT,
|
|
subtitle TEXT,
|
|
facts TEXT,
|
|
narrative TEXT,
|
|
concepts TEXT,
|
|
files_read TEXT,
|
|
files_modified TEXT,
|
|
prompt_number INTEGER,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE
|
|
)
|
|
`);
|
|
|
|
// Copy data from old table (all existing columns)
|
|
this.db.run(`
|
|
INSERT INTO observations_new
|
|
SELECT id, memory_session_id, project, text, type, title, subtitle, facts,
|
|
narrative, concepts, files_read, files_modified, prompt_number,
|
|
created_at, created_at_epoch
|
|
FROM observations
|
|
`);
|
|
|
|
// Drop old table
|
|
this.db.run('DROP TABLE observations');
|
|
|
|
// Rename new table
|
|
this.db.run('ALTER TABLE observations_new RENAME TO observations');
|
|
|
|
// Recreate indexes
|
|
this.db.run(`
|
|
CREATE INDEX idx_observations_sdk_session ON observations(memory_session_id);
|
|
CREATE INDEX idx_observations_project ON observations(project);
|
|
CREATE INDEX idx_observations_type ON observations(type);
|
|
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
|
|
`);
|
|
|
|
// Commit transaction
|
|
this.db.run('COMMIT');
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(9, new Date().toISOString());
|
|
|
|
logger.debug('DB', 'Successfully made observations.text nullable');
|
|
}
|
|
|
|
/**
|
|
* Create user_prompts table with FTS5 support (migration 10)
|
|
*/
|
|
private createUserPromptsTable(): void {
|
|
// Check if migration already applied
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(10) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Check if table already exists
|
|
const tableInfo = this.db.query('PRAGMA table_info(user_prompts)').all() as TableColumnInfo[];
|
|
if (tableInfo.length > 0) {
|
|
// Already migrated
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(10, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
logger.debug('DB', 'Creating user_prompts table with FTS5 support');
|
|
|
|
// Begin transaction
|
|
this.db.run('BEGIN TRANSACTION');
|
|
|
|
// Create main table (using content_session_id since memory_session_id is set asynchronously by worker)
|
|
this.db.run(`
|
|
CREATE TABLE user_prompts (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
content_session_id TEXT NOT NULL,
|
|
prompt_number INTEGER NOT NULL,
|
|
prompt_text TEXT NOT NULL,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(content_session_id) REFERENCES sdk_sessions(content_session_id) ON DELETE CASCADE
|
|
);
|
|
|
|
CREATE INDEX idx_user_prompts_claude_session ON user_prompts(content_session_id);
|
|
CREATE INDEX idx_user_prompts_created ON user_prompts(created_at_epoch DESC);
|
|
CREATE INDEX idx_user_prompts_prompt_number ON user_prompts(prompt_number);
|
|
CREATE INDEX idx_user_prompts_lookup ON user_prompts(content_session_id, prompt_number);
|
|
`);
|
|
|
|
// Create FTS5 virtual table — skip if FTS5 is unavailable (e.g., Bun on Windows #791).
|
|
// The user_prompts table itself is still created; only FTS indexing is skipped.
|
|
try {
|
|
this.createUserPromptsFTS();
|
|
} catch (ftsError) {
|
|
logger.warn('DB', 'FTS5 not available — user_prompts_fts skipped (search uses ChromaDB)', {}, ftsError instanceof Error ? ftsError : undefined);
|
|
}
|
|
|
|
// Commit transaction
|
|
this.db.run('COMMIT');
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(10, new Date().toISOString());
|
|
|
|
logger.debug('DB', 'Successfully created user_prompts table');
|
|
}
|
|
|
|
/**
|
|
* Create FTS5 virtual table and sync triggers for user_prompts.
|
|
* Extracted from createUserPromptsTable to keep try block small.
|
|
*/
|
|
private createUserPromptsFTS(): void {
|
|
this.db.run(`
|
|
CREATE VIRTUAL TABLE user_prompts_fts USING fts5(
|
|
prompt_text,
|
|
content='user_prompts',
|
|
content_rowid='id'
|
|
);
|
|
`);
|
|
|
|
this.db.run(`
|
|
CREATE TRIGGER user_prompts_ai AFTER INSERT ON user_prompts BEGIN
|
|
INSERT INTO user_prompts_fts(rowid, prompt_text)
|
|
VALUES (new.id, new.prompt_text);
|
|
END;
|
|
|
|
CREATE TRIGGER user_prompts_ad AFTER DELETE ON user_prompts BEGIN
|
|
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
|
|
VALUES('delete', old.id, old.prompt_text);
|
|
END;
|
|
|
|
CREATE TRIGGER user_prompts_au AFTER UPDATE ON user_prompts BEGIN
|
|
INSERT INTO user_prompts_fts(user_prompts_fts, rowid, prompt_text)
|
|
VALUES('delete', old.id, old.prompt_text);
|
|
INSERT INTO user_prompts_fts(rowid, prompt_text)
|
|
VALUES (new.id, new.prompt_text);
|
|
END;
|
|
`);
|
|
}
|
|
|
|
/**
|
|
* Ensure discovery_tokens column exists (migration 11)
|
|
* CRITICAL: This migration was incorrectly using version 7 (which was already taken by removeSessionSummariesUniqueConstraint)
|
|
* The duplicate version number may have caused migration tracking issues in some databases
|
|
*/
|
|
private ensureDiscoveryTokensColumn(): void {
|
|
// Check if migration already applied to avoid unnecessary re-runs
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(11) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Check if discovery_tokens column exists in observations table
|
|
const observationsInfo = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const obsHasDiscoveryTokens = observationsInfo.some(col => col.name === 'discovery_tokens');
|
|
|
|
if (!obsHasDiscoveryTokens) {
|
|
this.db.run('ALTER TABLE observations ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
|
|
logger.debug('DB', 'Added discovery_tokens column to observations table');
|
|
}
|
|
|
|
// Check if discovery_tokens column exists in session_summaries table
|
|
const summariesInfo = this.db.query('PRAGMA table_info(session_summaries)').all() as TableColumnInfo[];
|
|
const sumHasDiscoveryTokens = summariesInfo.some(col => col.name === 'discovery_tokens');
|
|
|
|
if (!sumHasDiscoveryTokens) {
|
|
this.db.run('ALTER TABLE session_summaries ADD COLUMN discovery_tokens INTEGER DEFAULT 0');
|
|
logger.debug('DB', 'Added discovery_tokens column to session_summaries table');
|
|
}
|
|
|
|
// Record migration only after successful column verification/addition
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(11, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Create pending_messages table for persistent work queue (migration 16)
|
|
* Messages are persisted before processing and deleted after success.
|
|
* Enables recovery from SDK hangs and worker crashes.
|
|
*/
|
|
private createPendingMessagesTable(): void {
|
|
// Check if migration already applied
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(16) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Check if table already exists
|
|
const tables = this.db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='pending_messages'").all() as TableNameRow[];
|
|
if (tables.length > 0) {
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(16, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
logger.debug('DB', 'Creating pending_messages table');
|
|
|
|
this.db.run(`
|
|
CREATE TABLE pending_messages (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
session_db_id INTEGER NOT NULL,
|
|
content_session_id TEXT NOT NULL,
|
|
message_type TEXT NOT NULL CHECK(message_type IN ('observation', 'summarize')),
|
|
tool_name TEXT,
|
|
tool_input TEXT,
|
|
tool_response TEXT,
|
|
cwd TEXT,
|
|
last_user_message TEXT,
|
|
last_assistant_message TEXT,
|
|
prompt_number INTEGER,
|
|
status TEXT NOT NULL DEFAULT 'pending' CHECK(status IN ('pending', 'processing', 'processed', 'failed')),
|
|
retry_count INTEGER NOT NULL DEFAULT 0,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
started_processing_at_epoch INTEGER,
|
|
completed_at_epoch INTEGER,
|
|
FOREIGN KEY (session_db_id) REFERENCES sdk_sessions(id) ON DELETE CASCADE
|
|
)
|
|
`);
|
|
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_pending_messages_session ON pending_messages(session_db_id)');
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_pending_messages_status ON pending_messages(status)');
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_pending_messages_claude_session ON pending_messages(content_session_id)');
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(16, new Date().toISOString());
|
|
|
|
logger.debug('DB', 'pending_messages table created successfully');
|
|
}
|
|
|
|
/**
|
|
* Rename session ID columns for semantic clarity (migration 17)
|
|
* - claude_session_id -> content_session_id (user's observed session)
|
|
* - sdk_session_id -> memory_session_id (memory agent's session for resume)
|
|
*
|
|
* IDEMPOTENT: Checks each table individually before renaming.
|
|
* This handles databases in any intermediate state (partial migration, fresh install, etc.)
|
|
*/
|
|
private renameSessionIdColumns(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(17) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
logger.debug('DB', 'Checking session ID columns for semantic clarity rename');
|
|
|
|
let renamesPerformed = 0;
|
|
|
|
// Helper to safely rename a column if it exists
|
|
const safeRenameColumn = (table: string, oldCol: string, newCol: string): boolean => {
|
|
const tableInfo = this.db.query(`PRAGMA table_info(${table})`).all() as TableColumnInfo[];
|
|
const hasOldCol = tableInfo.some(col => col.name === oldCol);
|
|
const hasNewCol = tableInfo.some(col => col.name === newCol);
|
|
|
|
if (hasNewCol) {
|
|
// Already renamed, nothing to do
|
|
return false;
|
|
}
|
|
|
|
if (hasOldCol) {
|
|
// SQLite 3.25+ supports ALTER TABLE RENAME COLUMN
|
|
this.db.run(`ALTER TABLE ${table} RENAME COLUMN ${oldCol} TO ${newCol}`);
|
|
logger.debug('DB', `Renamed ${table}.${oldCol} to ${newCol}`);
|
|
return true;
|
|
}
|
|
|
|
// Neither column exists - table might not exist or has different schema
|
|
logger.warn('DB', `Column ${oldCol} not found in ${table}, skipping rename`);
|
|
return false;
|
|
};
|
|
|
|
// Rename in sdk_sessions table
|
|
if (safeRenameColumn('sdk_sessions', 'claude_session_id', 'content_session_id')) renamesPerformed++;
|
|
if (safeRenameColumn('sdk_sessions', 'sdk_session_id', 'memory_session_id')) renamesPerformed++;
|
|
|
|
// Rename in pending_messages table
|
|
if (safeRenameColumn('pending_messages', 'claude_session_id', 'content_session_id')) renamesPerformed++;
|
|
|
|
// Rename in observations table
|
|
if (safeRenameColumn('observations', 'sdk_session_id', 'memory_session_id')) renamesPerformed++;
|
|
|
|
// Rename in session_summaries table
|
|
if (safeRenameColumn('session_summaries', 'sdk_session_id', 'memory_session_id')) renamesPerformed++;
|
|
|
|
// Rename in user_prompts table
|
|
if (safeRenameColumn('user_prompts', 'claude_session_id', 'content_session_id')) renamesPerformed++;
|
|
|
|
// Record migration
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(17, new Date().toISOString());
|
|
|
|
if (renamesPerformed > 0) {
|
|
logger.debug('DB', `Successfully renamed ${renamesPerformed} session ID columns`);
|
|
} else {
|
|
logger.debug('DB', 'No session ID column renames needed (already up to date)');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Repair session ID column renames (migration 19)
|
|
* DEPRECATED: Migration 17 is now fully idempotent and handles all cases.
|
|
* This migration is kept for backwards compatibility but does nothing.
|
|
*/
|
|
private repairSessionIdColumnRename(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(19) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
// Migration 17 now handles all column rename cases idempotently.
|
|
// Just record this migration as applied.
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(19, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Add failed_at_epoch column to pending_messages (migration 20)
|
|
* Used by markSessionMessagesFailed() for error recovery tracking
|
|
*/
|
|
private addFailedAtEpochColumn(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(20) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
const tableInfo = this.db.query('PRAGMA table_info(pending_messages)').all() as TableColumnInfo[];
|
|
const hasColumn = tableInfo.some(col => col.name === 'failed_at_epoch');
|
|
|
|
if (!hasColumn) {
|
|
this.db.run('ALTER TABLE pending_messages ADD COLUMN failed_at_epoch INTEGER');
|
|
logger.debug('DB', 'Added failed_at_epoch column to pending_messages table');
|
|
}
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(20, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Add ON UPDATE CASCADE to FK constraints on observations and session_summaries (migration 21)
|
|
*
|
|
* Both tables have FK(memory_session_id) -> sdk_sessions(memory_session_id) with ON DELETE CASCADE
|
|
* but missing ON UPDATE CASCADE. This causes FK constraint violations when code updates
|
|
* sdk_sessions.memory_session_id while child rows still reference the old value.
|
|
*
|
|
* SQLite doesn't support ALTER TABLE for FK changes, so we recreate both tables.
|
|
*/
|
|
private addOnUpdateCascadeToForeignKeys(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(21) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
logger.debug('DB', 'Adding ON UPDATE CASCADE to FK constraints on observations and session_summaries');
|
|
|
|
// PRAGMA foreign_keys must be set outside a transaction
|
|
this.db.run('PRAGMA foreign_keys = OFF');
|
|
this.db.run('BEGIN TRANSACTION');
|
|
|
|
try {
|
|
this.recreateObservationsWithUpdateCascade();
|
|
this.recreateSessionSummariesWithUpdateCascade();
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(21, new Date().toISOString());
|
|
this.db.run('COMMIT');
|
|
this.db.run('PRAGMA foreign_keys = ON');
|
|
|
|
logger.debug('DB', 'Successfully added ON UPDATE CASCADE to FK constraints');
|
|
} catch (error) {
|
|
this.db.run('ROLLBACK');
|
|
this.db.run('PRAGMA foreign_keys = ON');
|
|
if (error instanceof Error) {
|
|
throw error;
|
|
}
|
|
throw new Error(`Migration 21 failed: ${String(error)}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Recreate observations table with ON UPDATE CASCADE FK constraint.
|
|
* Called within a transaction by addOnUpdateCascadeToForeignKeys.
|
|
*/
|
|
private recreateObservationsWithUpdateCascade(): void {
|
|
// Drop FTS triggers first (they reference the observations table)
|
|
this.db.run('DROP TRIGGER IF EXISTS observations_ai');
|
|
this.db.run('DROP TRIGGER IF EXISTS observations_ad');
|
|
this.db.run('DROP TRIGGER IF EXISTS observations_au');
|
|
|
|
// Clean up leftover temp table from a previously-crashed run
|
|
this.db.run('DROP TABLE IF EXISTS observations_new');
|
|
|
|
this.db.run(`
|
|
CREATE TABLE observations_new (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT NOT NULL,
|
|
project TEXT NOT NULL,
|
|
text TEXT,
|
|
type TEXT NOT NULL,
|
|
title TEXT,
|
|
subtitle TEXT,
|
|
facts TEXT,
|
|
narrative TEXT,
|
|
concepts TEXT,
|
|
files_read TEXT,
|
|
files_modified TEXT,
|
|
prompt_number INTEGER,
|
|
discovery_tokens INTEGER DEFAULT 0,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
)
|
|
`);
|
|
|
|
this.db.run(`
|
|
INSERT INTO observations_new
|
|
SELECT id, memory_session_id, project, text, type, title, subtitle, facts,
|
|
narrative, concepts, files_read, files_modified, prompt_number,
|
|
discovery_tokens, created_at, created_at_epoch
|
|
FROM observations
|
|
`);
|
|
|
|
this.db.run('DROP TABLE observations');
|
|
this.db.run('ALTER TABLE observations_new RENAME TO observations');
|
|
|
|
this.db.run(`
|
|
CREATE INDEX idx_observations_sdk_session ON observations(memory_session_id);
|
|
CREATE INDEX idx_observations_project ON observations(project);
|
|
CREATE INDEX idx_observations_type ON observations(type);
|
|
CREATE INDEX idx_observations_created ON observations(created_at_epoch DESC);
|
|
`);
|
|
|
|
// Recreate FTS triggers only if observations_fts exists
|
|
const hasFTS = (this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='observations_fts'").all() as { name: string }[]).length > 0;
|
|
if (hasFTS) {
|
|
this.db.run(`
|
|
CREATE TRIGGER IF NOT EXISTS observations_ai AFTER INSERT ON observations BEGIN
|
|
INSERT INTO observations_fts(rowid, title, subtitle, narrative, text, facts, concepts)
|
|
VALUES (new.id, new.title, new.subtitle, new.narrative, new.text, new.facts, new.concepts);
|
|
END;
|
|
|
|
CREATE TRIGGER IF NOT EXISTS observations_ad AFTER DELETE ON observations BEGIN
|
|
INSERT INTO observations_fts(observations_fts, rowid, title, subtitle, narrative, text, facts, concepts)
|
|
VALUES('delete', old.id, old.title, old.subtitle, old.narrative, old.text, old.facts, old.concepts);
|
|
END;
|
|
|
|
CREATE TRIGGER IF NOT EXISTS observations_au AFTER UPDATE ON observations BEGIN
|
|
INSERT INTO observations_fts(observations_fts, rowid, title, subtitle, narrative, text, facts, concepts)
|
|
VALUES('delete', old.id, old.title, old.subtitle, old.narrative, old.text, old.facts, old.concepts);
|
|
INSERT INTO observations_fts(rowid, title, subtitle, narrative, text, facts, concepts)
|
|
VALUES (new.id, new.title, new.subtitle, new.narrative, new.text, new.facts, new.concepts);
|
|
END;
|
|
`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Recreate session_summaries table with ON UPDATE CASCADE FK constraint.
|
|
* Called within a transaction by addOnUpdateCascadeToForeignKeys.
|
|
*/
|
|
private recreateSessionSummariesWithUpdateCascade(): void {
|
|
// Clean up leftover temp table from a previously-crashed run
|
|
this.db.run('DROP TABLE IF EXISTS session_summaries_new');
|
|
|
|
this.db.run(`
|
|
CREATE TABLE session_summaries_new (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
memory_session_id TEXT NOT NULL,
|
|
project TEXT NOT NULL,
|
|
request TEXT,
|
|
investigated TEXT,
|
|
learned TEXT,
|
|
completed TEXT,
|
|
next_steps TEXT,
|
|
files_read TEXT,
|
|
files_edited TEXT,
|
|
notes TEXT,
|
|
prompt_number INTEGER,
|
|
discovery_tokens INTEGER DEFAULT 0,
|
|
created_at TEXT NOT NULL,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
FOREIGN KEY(memory_session_id) REFERENCES sdk_sessions(memory_session_id) ON DELETE CASCADE ON UPDATE CASCADE
|
|
)
|
|
`);
|
|
|
|
this.db.run(`
|
|
INSERT INTO session_summaries_new
|
|
SELECT id, memory_session_id, project, request, investigated, learned,
|
|
completed, next_steps, files_read, files_edited, notes,
|
|
prompt_number, discovery_tokens, created_at, created_at_epoch
|
|
FROM session_summaries
|
|
`);
|
|
|
|
// Drop session_summaries FTS triggers before dropping the table
|
|
this.db.run('DROP TRIGGER IF EXISTS session_summaries_ai');
|
|
this.db.run('DROP TRIGGER IF EXISTS session_summaries_ad');
|
|
this.db.run('DROP TRIGGER IF EXISTS session_summaries_au');
|
|
|
|
this.db.run('DROP TABLE session_summaries');
|
|
this.db.run('ALTER TABLE session_summaries_new RENAME TO session_summaries');
|
|
|
|
this.db.run(`
|
|
CREATE INDEX idx_session_summaries_sdk_session ON session_summaries(memory_session_id);
|
|
CREATE INDEX idx_session_summaries_project ON session_summaries(project);
|
|
CREATE INDEX idx_session_summaries_created ON session_summaries(created_at_epoch DESC);
|
|
`);
|
|
|
|
// Recreate session_summaries FTS triggers if FTS table exists
|
|
const hasSummariesFTS = (this.db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='session_summaries_fts'").all() as { name: string }[]).length > 0;
|
|
if (hasSummariesFTS) {
|
|
this.db.run(`
|
|
CREATE TRIGGER IF NOT EXISTS session_summaries_ai AFTER INSERT ON session_summaries BEGIN
|
|
INSERT INTO session_summaries_fts(rowid, request, investigated, learned, completed, next_steps, notes)
|
|
VALUES (new.id, new.request, new.investigated, new.learned, new.completed, new.next_steps, new.notes);
|
|
END;
|
|
|
|
CREATE TRIGGER IF NOT EXISTS session_summaries_ad AFTER DELETE ON session_summaries BEGIN
|
|
INSERT INTO session_summaries_fts(session_summaries_fts, rowid, request, investigated, learned, completed, next_steps, notes)
|
|
VALUES('delete', old.id, old.request, old.investigated, old.learned, old.completed, old.next_steps, old.notes);
|
|
END;
|
|
|
|
CREATE TRIGGER IF NOT EXISTS session_summaries_au AFTER UPDATE ON session_summaries BEGIN
|
|
INSERT INTO session_summaries_fts(session_summaries_fts, rowid, request, investigated, learned, completed, next_steps, notes)
|
|
VALUES('delete', old.id, old.request, old.investigated, old.learned, old.completed, old.next_steps, old.notes);
|
|
INSERT INTO session_summaries_fts(rowid, request, investigated, learned, completed, next_steps, notes)
|
|
VALUES (new.id, new.request, new.investigated, new.learned, new.completed, new.next_steps, new.notes);
|
|
END;
|
|
`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Add content_hash column to observations for deduplication (migration 22)
|
|
* Prevents duplicate observations from being stored when the same content is processed multiple times.
|
|
* Backfills existing rows with unique random hashes so they don't block new inserts.
|
|
*/
|
|
private addObservationContentHashColumn(): void {
|
|
// Check actual schema first — cross-machine DB sync can leave schema_versions
|
|
// claiming this migration ran while the column is actually missing (e.g. migration 21
|
|
// recreated the table without content_hash on the synced machine).
|
|
const tableInfo = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const hasColumn = tableInfo.some(col => col.name === 'content_hash');
|
|
|
|
if (hasColumn) {
|
|
// Column exists — just ensure version record is present
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(22, new Date().toISOString());
|
|
return;
|
|
}
|
|
|
|
this.db.run('ALTER TABLE observations ADD COLUMN content_hash TEXT');
|
|
// Backfill existing rows with unique random hashes
|
|
this.db.run("UPDATE observations SET content_hash = substr(hex(randomblob(8)), 1, 16) WHERE content_hash IS NULL");
|
|
// Index for fast dedup lookups
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_content_hash ON observations(content_hash, created_at_epoch)');
|
|
logger.debug('DB', 'Added content_hash column to observations table with backfill and index');
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(22, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Add custom_title column to sdk_sessions for agent attribution (migration 23)
|
|
* Allows callers (e.g. Maestro agents) to label sessions with a human-readable name.
|
|
*/
|
|
private addSessionCustomTitleColumn(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(23) as SchemaVersion | undefined;
|
|
if (applied) return;
|
|
|
|
const tableInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
|
|
const hasColumn = tableInfo.some(col => col.name === 'custom_title');
|
|
|
|
if (!hasColumn) {
|
|
this.db.run('ALTER TABLE sdk_sessions ADD COLUMN custom_title TEXT');
|
|
logger.debug('DB', 'Added custom_title column to sdk_sessions table');
|
|
}
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(23, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Create observation_feedback table for tracking observation usage signals.
|
|
* Foundation for tier routing optimization and future Thompson Sampling.
|
|
* Schema version 24.
|
|
*/
|
|
private createObservationFeedbackTable(): void {
|
|
const applied = this.db.query('SELECT 1 FROM schema_versions WHERE version = 24').get();
|
|
if (applied) return;
|
|
|
|
this.db.run(`
|
|
CREATE TABLE IF NOT EXISTS observation_feedback (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
observation_id INTEGER NOT NULL,
|
|
signal_type TEXT NOT NULL,
|
|
session_db_id INTEGER,
|
|
created_at_epoch INTEGER NOT NULL,
|
|
metadata TEXT,
|
|
FOREIGN KEY (observation_id) REFERENCES observations(id) ON DELETE CASCADE
|
|
)
|
|
`);
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_feedback_observation ON observation_feedback(observation_id)');
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_feedback_signal ON observation_feedback(signal_type)');
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(24, new Date().toISOString());
|
|
logger.debug('DB', 'Created observation_feedback table for usage tracking');
|
|
}
|
|
|
|
/**
|
|
* Add platform_source column to sdk_sessions for Claude/Codex isolation (migration 25)
|
|
*/
|
|
private addSessionPlatformSourceColumn(): void {
|
|
const tableInfo = this.db.query('PRAGMA table_info(sdk_sessions)').all() as TableColumnInfo[];
|
|
const hasColumn = tableInfo.some(col => col.name === 'platform_source');
|
|
const indexInfo = this.db.query('PRAGMA index_list(sdk_sessions)').all() as IndexInfo[];
|
|
const hasIndex = indexInfo.some(index => index.name === 'idx_sdk_sessions_platform_source');
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(25) as SchemaVersion | undefined;
|
|
|
|
if (applied && hasColumn && hasIndex) return;
|
|
|
|
if (!hasColumn) {
|
|
this.db.run(`ALTER TABLE sdk_sessions ADD COLUMN platform_source TEXT NOT NULL DEFAULT '${DEFAULT_PLATFORM_SOURCE}'`);
|
|
logger.debug('DB', 'Added platform_source column to sdk_sessions table');
|
|
}
|
|
|
|
this.db.run(`
|
|
UPDATE sdk_sessions
|
|
SET platform_source = '${DEFAULT_PLATFORM_SOURCE}'
|
|
WHERE platform_source IS NULL OR platform_source = ''
|
|
`);
|
|
|
|
if (!hasIndex) {
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_sdk_sessions_platform_source ON sdk_sessions(platform_source)');
|
|
}
|
|
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(25, new Date().toISOString());
|
|
}
|
|
|
|
/**
|
|
* Ensure merged_into_project columns + indices exist on observations and session_summaries.
|
|
*
|
|
* Self-idempotent via PRAGMA table_info guard — does NOT bump schema_versions.
|
|
* Supports merged-worktree adoption: a nullable pointer that lets a worktree's rows
|
|
* be surfaced under the parent project's observation list without data movement.
|
|
*/
|
|
private ensureMergedIntoProjectColumns(): void {
|
|
const obsCols = this.db
|
|
.query('PRAGMA table_info(observations)')
|
|
.all() as TableColumnInfo[];
|
|
if (!obsCols.some(c => c.name === 'merged_into_project')) {
|
|
this.db.run('ALTER TABLE observations ADD COLUMN merged_into_project TEXT');
|
|
}
|
|
this.db.run(
|
|
'CREATE INDEX IF NOT EXISTS idx_observations_merged_into ON observations(merged_into_project)'
|
|
);
|
|
|
|
const sumCols = this.db
|
|
.query('PRAGMA table_info(session_summaries)')
|
|
.all() as TableColumnInfo[];
|
|
if (!sumCols.some(c => c.name === 'merged_into_project')) {
|
|
this.db.run('ALTER TABLE session_summaries ADD COLUMN merged_into_project TEXT');
|
|
}
|
|
this.db.run(
|
|
'CREATE INDEX IF NOT EXISTS idx_summaries_merged_into ON session_summaries(merged_into_project)'
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Add agent_type and agent_id columns to observations and pending_messages (migration 27).
|
|
*
|
|
* Labels observation rows with the originating Claude Code subagent identity so
|
|
* downstream queries can distinguish main-session work from subagent work.
|
|
* Main-session rows keep NULL for both columns.
|
|
*
|
|
* Also threads the same columns through pending_messages so the label survives
|
|
* between enqueue (hook) and SDK-agent processing (which re-inserts into observations).
|
|
*/
|
|
private addObservationSubagentColumns(): void {
|
|
const applied = this.db.prepare('SELECT version FROM schema_versions WHERE version = ?').get(27) as SchemaVersion | undefined;
|
|
|
|
const obsCols = this.db.query('PRAGMA table_info(observations)').all() as TableColumnInfo[];
|
|
const obsHasAgentType = obsCols.some(c => c.name === 'agent_type');
|
|
const obsHasAgentId = obsCols.some(c => c.name === 'agent_id');
|
|
|
|
if (!obsHasAgentType) {
|
|
this.db.run('ALTER TABLE observations ADD COLUMN agent_type TEXT');
|
|
logger.debug('DB', 'Added agent_type column to observations table');
|
|
}
|
|
if (!obsHasAgentId) {
|
|
this.db.run('ALTER TABLE observations ADD COLUMN agent_id TEXT');
|
|
logger.debug('DB', 'Added agent_id column to observations table');
|
|
}
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_type ON observations(agent_type)');
|
|
this.db.run('CREATE INDEX IF NOT EXISTS idx_observations_agent_id ON observations(agent_id)');
|
|
|
|
const pendingCols = this.db.query('PRAGMA table_info(pending_messages)').all() as TableColumnInfo[];
|
|
if (pendingCols.length > 0) {
|
|
const pendingHasAgentType = pendingCols.some(c => c.name === 'agent_type');
|
|
const pendingHasAgentId = pendingCols.some(c => c.name === 'agent_id');
|
|
if (!pendingHasAgentType) {
|
|
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_type TEXT');
|
|
logger.debug('DB', 'Added agent_type column to pending_messages table');
|
|
}
|
|
if (!pendingHasAgentId) {
|
|
this.db.run('ALTER TABLE pending_messages ADD COLUMN agent_id TEXT');
|
|
logger.debug('DB', 'Added agent_id column to pending_messages table');
|
|
}
|
|
}
|
|
|
|
if (!applied) {
|
|
this.db.prepare('INSERT OR IGNORE INTO schema_versions (version, applied_at) VALUES (?, ?)').run(27, new Date().toISOString());
|
|
}
|
|
}
|
|
}
|