diff --git a/.gitignore b/.gitignore index 156b92c7..b89f1dc8 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,9 @@ commands.html # Local test installs .claude/ +# Cursor IDE — local agents/skills bundle (never commit) +.cursor/ + # Build artifacts (committed to npm, not git) hooks/dist/ diff --git a/CHANGELOG.md b/CHANGELOG.md index a45e9324..052e44c1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,14 @@ Format follows [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - **`@gsd-build/sdk` — Phase 1 typed query foundation** — Registry-based `gsd-sdk query` command, classified errors (`GSDQueryError`), and unit-tested handlers under `sdk/src/query/` (state, roadmap, phase lifecycle, init, config, validation, and related domains). Implements incremental SDK-first migration scope approved in #2083; builds on validated work from #2007 / `feat/sdk-foundation` without migrating workflows or removing `gsd-tools.cjs` in this phase. - **Flow diagram directive for phase researcher** — `gsd-phase-researcher` now enforces data-flow architecture diagrams instead of file-listing diagrams. Language-agnostic directive added to agent prompt and research template. (#2139) +### Fixed + +- **SDK query layer (PR review hardening)** — `commit-to-subrepo` uses realpath-aware path containment and sanitized commit messages; `state.planned-phase` uses the STATE.md lockfile; `verifyKeyLinks` mitigates ReDoS on frontmatter patterns; frontmatter handlers resolve paths under the real project root; phase directory names reject `..` and separators; `gsd-sdk` restores strict CLI parsing by stripping `--pick` before `parseArgs`; `QueryRegistry.commands()` for enumeration; `todoComplete` uses static error imports. + +### Changed + +- **SDK query follow-up (tests, docs, registry)** — Expanded `QUERY_MUTATION_COMMANDS` for event emission; stale lock cleanup uses PID liveness (`process.kill(pid, 0)`) when a lock file exists; `searchJsonEntries` is depth-bounded (`MAX_JSON_SEARCH_DEPTH`); removed unnecessary `readdirSync`/`Dirent` casts across query handlers; added `sdk/src/query/QUERY-HANDLERS.md` (error vs `{ data.error }`, mutations, locks, intel limits); unit tests for intel, profile, uat, skills, summary, websearch, workstream, registry vs `QUERY_MUTATION_COMMANDS`, and frontmatter extract/splice round-trip. + ## [1.35.0] - 2026-04-10 ### Added diff --git a/sdk/src/cli.test.ts b/sdk/src/cli.test.ts index c0cc70a6..b7c9deda 100644 --- a/sdk/src/cli.test.ts +++ b/sdk/src/cli.test.ts @@ -100,10 +100,20 @@ describe('parseCliArgs', () => { expect(result.maxBudget).toBe(15); }); - it('ignores unknown options (non-strict for --pick support)', () => { - // strict: false allows --pick and other query-specific flags - const result = parseCliArgs(['--unknown-flag']); - expect(result.command).toBeUndefined(); + it('rejects unknown options (strict parser)', () => { + expect(() => parseCliArgs(['--unknown-flag'])).toThrow(); + }); + + it('rejects unknown flags on run command', () => { + expect(() => parseCliArgs(['run', 'hello', '--not-a-real-option'])).toThrow(); + }); + + it('parses query with --pick stripped before strict parse', () => { + const result = parseCliArgs([ + 'query', 'state.load', '--pick', 'data', '--project-dir', 'C:\\tmp\\proj', + ]); + expect(result.command).toBe('query'); + expect(result.projectDir).toBe('C:\\tmp\\proj'); }); // ─── Init command parsing ────────────────────────────────────────────── diff --git a/sdk/src/cli.ts b/sdk/src/cli.ts index bc242fde..63baf5ab 100644 --- a/sdk/src/cli.ts +++ b/sdk/src/cli.ts @@ -36,13 +36,27 @@ export interface ParsedCliArgs { version: boolean; } +/** + * Strip `--pick ` from argv before parseArgs so the global parser stays strict. + * Query dispatch removes --pick separately in main(); this only affects CLI parsing. + */ +function argvForCliParse(argv: string[]): string[] { + if (argv[0] !== 'query') return argv; + const copy = [...argv]; + const pickIdx = copy.indexOf('--pick'); + if (pickIdx !== -1 && pickIdx + 1 < copy.length) { + copy.splice(pickIdx, 2); + } + return copy; +} + /** * Parse CLI arguments into a structured object. * Exported for testing — the main() function uses this internally. */ export function parseCliArgs(argv: string[]): ParsedCliArgs { const { values, positionals } = parseArgs({ - args: argv, + args: argvForCliParse(argv), options: { 'project-dir': { type: 'string', default: process.cwd() }, 'ws-port': { type: 'string' }, @@ -54,7 +68,7 @@ export function parseCliArgs(argv: string[]): ParsedCliArgs { version: { type: 'boolean', short: 'v', default: false }, }, allowPositionals: true, - strict: false, + strict: true, }); const command = positionals[0] as string | undefined; diff --git a/sdk/src/query/QUERY-HANDLERS.md b/sdk/src/query/QUERY-HANDLERS.md new file mode 100644 index 00000000..0f2b71bd --- /dev/null +++ b/sdk/src/query/QUERY-HANDLERS.md @@ -0,0 +1,26 @@ +# Query handler conventions (`sdk/src/query/`) + +This document records contracts for the typed query layer consumed by `gsd-sdk query` and programmatic `createRegistry()` callers. + +## Error handling + +- **Validation and programmer errors**: Handlers throw `GSDError` with an `ErrorClassification` (e.g. missing required args, invalid phase). The CLI maps these to exit codes via `exitCodeFor()`. +- **Expected domain failures**: Handlers return `{ data: { error: string, ... } }` for cases that are not exceptional in normal use (file not found, intel disabled, todo missing, etc.). Callers must check `data.error` when present. +- Do not mix both styles for the same failure mode in new code: prefer **throw** for "caller must fix input"; prefer **`data.error`** for "operation could not complete in this project state." + +## Mutation commands and events + +- `QUERY_MUTATION_COMMANDS` in `index.ts` lists every command name (including space-delimited aliases) that performs durable writes. It drives optional `GSDEventStream` wrapping so mutations emit structured events. +- Init composition handlers (`init.*`) are **not** included: they return JSON for workflows; agents perform filesystem work. + +## Session correlation (`sessionId`) + +- Mutation events include `sessionId: ''` until a future phase threads session identifiers through the query dispatch path. Consumers should not rely on `sessionId` for correlation today. + +## Lockfiles (`state-mutation.ts`) + +- `STATE.md` (and ROADMAP) locks use a sibling `.lock` file with the holder's PID. Stale locks are cleared when the PID no longer exists (`process.kill(pid, 0)` fails) or when the lock file is older than the existing time-based threshold. + +## Intel JSON search + +- `searchJsonEntries` in `intel.ts` caps recursion depth (`MAX_JSON_SEARCH_DEPTH`) to avoid stack overflow on pathological nested JSON. diff --git a/sdk/src/query/commit.ts b/sdk/src/query/commit.ts index 762c7a70..145b7bf4 100644 --- a/sdk/src/query/commit.ts +++ b/sdk/src/query/commit.ts @@ -18,9 +18,9 @@ */ import { readFile } from 'node:fs/promises'; -import { join } from 'node:path'; import { spawnSync } from 'node:child_process'; -import { planningPaths } from './helpers.js'; +import { GSDError } from '../errors.js'; +import { planningPaths, resolvePathUnderProject } from './helpers.js'; import type { QueryHandler } from './utils.js'; // ─── execGit ────────────────────────────────────────────────────────────── @@ -227,11 +227,20 @@ export const commitToSubrepo: QueryHandler = async (args, projectDir) => { return { data: { committed: false, reason: 'commit message required' } }; } + const sanitized = sanitizeCommitMessage(message); + if (!sanitized && message) { + return { data: { committed: false, reason: 'commit message empty after sanitization' } }; + } + try { for (const file of files) { - const resolved = join(projectDir, file); - if (!resolved.startsWith(projectDir)) { - return { data: { committed: false, reason: `file path escapes project: ${file}` } }; + try { + await resolvePathUnderProject(projectDir, file); + } catch (err) { + if (err instanceof GSDError) { + return { data: { committed: false, reason: `${err.message}: ${file}` } }; + } + throw err; } } @@ -239,7 +248,7 @@ export const commitToSubrepo: QueryHandler = async (args, projectDir) => { spawnSync('git', ['-C', projectDir, 'add', ...fileArgs], { stdio: 'pipe' }); const commitResult = spawnSync( - 'git', ['-C', projectDir, 'commit', '-m', message], + 'git', ['-C', projectDir, 'commit', '-m', sanitized], { stdio: 'pipe', encoding: 'utf-8' }, ); if (commitResult.status !== 0) { @@ -251,7 +260,7 @@ export const commitToSubrepo: QueryHandler = async (args, projectDir) => { { encoding: 'utf-8' }, ); const hash = hashResult.stdout.trim(); - return { data: { committed: true, hash, message } }; + return { data: { committed: true, hash, message: sanitized } }; } catch (err) { return { data: { committed: false, reason: String(err) } }; } diff --git a/sdk/src/query/frontmatter-mutation.test.ts b/sdk/src/query/frontmatter-mutation.test.ts index 51116faa..3bcb0be9 100644 --- a/sdk/src/query/frontmatter-mutation.test.ts +++ b/sdk/src/query/frontmatter-mutation.test.ts @@ -232,3 +232,28 @@ describe('frontmatterValidate', () => { expect(FRONTMATTER_SCHEMAS).toHaveProperty('verification'); }); }); + +// ─── Round-trip (extract → reconstruct → splice) ─────────────────────────── + +describe('frontmatter round-trip', () => { + it('preserves scalar and list fields through extract + splice', () => { + const original = `--- +phase: "01" +plan: "02" +type: execute +wave: 1 +depends_on: [] +tags: [a, b] +--- +# Title +`; + const fm = extractFrontmatter(original) as Record; + const spliced = spliceFrontmatter('# Title\n', fm); + expect(spliced.startsWith('---\n')).toBe(true); + const round = extractFrontmatter(spliced) as Record; + expect(String(round.phase)).toBe('01'); + // YAML may round-trip wave as number or string depending on parser output + expect(Number(round.wave)).toBe(1); + expect(Array.isArray(round.tags)).toBe(true); + }); +}); diff --git a/sdk/src/query/frontmatter-mutation.ts b/sdk/src/query/frontmatter-mutation.ts index 073cb598..c42a62e6 100644 --- a/sdk/src/query/frontmatter-mutation.ts +++ b/sdk/src/query/frontmatter-mutation.ts @@ -18,10 +18,9 @@ */ import { readFile, writeFile } from 'node:fs/promises'; -import { join, isAbsolute } from 'node:path'; import { GSDError, ErrorClassification } from '../errors.js'; import { extractFrontmatter } from './frontmatter.js'; -import { normalizeMd } from './helpers.js'; +import { normalizeMd, resolvePathUnderProject } from './helpers.js'; import type { QueryHandler } from './utils.js'; // ─── FRONTMATTER_SCHEMAS ────────────────────────────────────────────────── @@ -178,7 +177,15 @@ export const frontmatterSet: QueryHandler = async (args, projectDir) => { throw new GSDError('file path contains null bytes', ErrorClassification.Validation); } - const fullPath = isAbsolute(filePath) ? filePath : join(projectDir, filePath); + let fullPath: string; + try { + fullPath = await resolvePathUnderProject(projectDir, filePath); + } catch (err) { + if (err instanceof GSDError) { + return { data: { error: err.message, path: filePath } }; + } + throw err; + } let content: string; try { @@ -220,7 +227,15 @@ export const frontmatterMerge: QueryHandler = async (args, projectDir) => { throw new GSDError('file path contains null bytes', ErrorClassification.Validation); } - const fullPath = isAbsolute(filePath) ? filePath : join(projectDir, filePath); + let fullPath: string; + try { + fullPath = await resolvePathUnderProject(projectDir, filePath); + } catch (err) { + if (err instanceof GSDError) { + return { data: { error: err.message, path: filePath } }; + } + throw err; + } let content: string; try { @@ -285,7 +300,15 @@ export const frontmatterValidate: QueryHandler = async (args, projectDir) => { ); } - const fullPath = isAbsolute(filePath) ? filePath : join(projectDir, filePath); + let fullPath: string; + try { + fullPath = await resolvePathUnderProject(projectDir, filePath); + } catch (err) { + if (err instanceof GSDError) { + return { data: { error: err.message, path: filePath } }; + } + throw err; + } let content: string; try { diff --git a/sdk/src/query/frontmatter.ts b/sdk/src/query/frontmatter.ts index 5ed9915c..a9dbc3fa 100644 --- a/sdk/src/query/frontmatter.ts +++ b/sdk/src/query/frontmatter.ts @@ -17,10 +17,9 @@ */ import { readFile } from 'node:fs/promises'; -import { join, isAbsolute } from 'node:path'; import { GSDError, ErrorClassification } from '../errors.js'; import type { QueryHandler } from './utils.js'; -import { escapeRegex } from './helpers.js'; +import { escapeRegex, resolvePathUnderProject } from './helpers.js'; // ─── splitInlineArray ─────────────────────────────────────────────────────── @@ -329,7 +328,15 @@ export const frontmatterGet: QueryHandler = async (args, projectDir) => { throw new GSDError('file path contains null bytes', ErrorClassification.Validation); } - const fullPath = isAbsolute(filePath) ? filePath : join(projectDir, filePath); + let fullPath: string; + try { + fullPath = await resolvePathUnderProject(projectDir, filePath); + } catch (err) { + if (err instanceof GSDError) { + return { data: { error: err.message, path: filePath } }; + } + throw err; + } let content: string; try { diff --git a/sdk/src/query/helpers.test.ts b/sdk/src/query/helpers.test.ts index 159cb84c..320971dc 100644 --- a/sdk/src/query/helpers.test.ts +++ b/sdk/src/query/helpers.test.ts @@ -2,7 +2,11 @@ * Unit tests for shared query helpers. */ -import { describe, it, expect } from 'vitest'; +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { GSDError } from '../errors.js'; import { escapeRegex, normalizePhaseName, @@ -13,6 +17,7 @@ import { stateExtractField, planningPaths, normalizeMd, + resolvePathUnderProject, } from './helpers.js'; // ─── escapeRegex ──────────────────────────────────────────────────────────── @@ -223,3 +228,27 @@ describe('normalizeMd', () => { expect(result).toBe(input); }); }); + +// ─── resolvePathUnderProject ──────────────────────────────────────────────── + +describe('resolvePathUnderProject', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-path-')); + await writeFile(join(tmpDir, 'safe.md'), 'x', 'utf-8'); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('resolves a relative file under the project root', async () => { + const p = await resolvePathUnderProject(tmpDir, 'safe.md'); + expect(p.endsWith('safe.md')).toBe(true); + }); + + it('rejects paths that escape the project root', async () => { + await expect(resolvePathUnderProject(tmpDir, '../../etc/passwd')).rejects.toThrow(GSDError); + }); +}); diff --git a/sdk/src/query/helpers.ts b/sdk/src/query/helpers.ts index 802ce2dc..4de9a848 100644 --- a/sdk/src/query/helpers.ts +++ b/sdk/src/query/helpers.ts @@ -17,7 +17,9 @@ * ``` */ -import { join } from 'node:path'; +import { join, relative, resolve, isAbsolute, normalize } from 'node:path'; +import { realpath } from 'node:fs/promises'; +import { GSDError, ErrorClassification } from '../errors.js'; // ─── Types ────────────────────────────────────────────────────────────────── @@ -322,3 +324,30 @@ export function planningPaths(projectDir: string): PlanningPaths { requirements: toPosixPath(join(base, 'REQUIREMENTS.md')), }; } + +// ─── resolvePathUnderProject ─────────────────────────────────────────────── + +/** + * Resolve a user-supplied path against the project and ensure it cannot escape + * the real project root (prefix checks are insufficient; symlinks are handled + * via realpath). + * + * @param projectDir - Project root directory + * @param userPath - Relative or absolute path from user input + * @returns Canonical resolved path within the project + */ +export async function resolvePathUnderProject(projectDir: string, userPath: string): Promise { + const projectReal = await realpath(projectDir); + const candidate = isAbsolute(userPath) ? normalize(userPath) : resolve(projectReal, userPath); + let realCandidate: string; + try { + realCandidate = await realpath(candidate); + } catch { + realCandidate = candidate; + } + const rel = relative(projectReal, realCandidate); + if (rel.startsWith('..') || (isAbsolute(rel) && rel.length > 0)) { + throw new GSDError('path escapes project directory', ErrorClassification.Validation); + } + return realCandidate; +} diff --git a/sdk/src/query/index.ts b/sdk/src/query/index.ts index 94fdc47f..5ee81379 100644 --- a/sdk/src/query/index.ts +++ b/sdk/src/query/index.ts @@ -89,28 +89,46 @@ export { extractField } from './registry.js'; // ─── Mutation commands set ──────────────────────────────────────────────── /** - * Set of command names that represent mutation operations. - * Used to wire event emission after successful dispatch. + * Command names that perform durable writes (disk, git, or global profile store). + * Used to wire event emission after successful dispatch. Both dotted and + * space-delimited aliases must be listed when both exist. + * + * See QUERY-HANDLERS.md for semantics. Init composition handlers are omitted + * (they emit JSON for workflows; agents perform writes). */ -const MUTATION_COMMANDS = new Set([ +export const QUERY_MUTATION_COMMANDS = new Set([ 'state.update', 'state.patch', 'state.begin-phase', 'state.advance-plan', 'state.record-metric', 'state.update-progress', 'state.add-decision', 'state.add-blocker', 'state.resolve-blocker', 'state.record-session', - 'frontmatter.set', 'frontmatter.merge', 'frontmatter.validate', + 'state.planned-phase', 'state planned-phase', + 'frontmatter.set', 'frontmatter.merge', 'frontmatter.validate', 'frontmatter validate', 'config-set', 'config-set-model-profile', 'config-new-project', 'config-ensure-section', - 'commit', 'check-commit', - 'template.fill', 'template.select', + 'commit', 'check-commit', 'commit-to-subrepo', + 'template.fill', 'template.select', 'template select', 'validate.health', 'validate health', 'phase.add', 'phase.insert', 'phase.remove', 'phase.complete', 'phase.scaffold', 'phases.clear', 'phases.archive', 'phase add', 'phase insert', 'phase remove', 'phase complete', 'phase scaffold', 'phases clear', 'phases archive', + 'roadmap.update-plan-progress', 'roadmap update-plan-progress', + 'requirements.mark-complete', 'requirements mark-complete', + 'todo.complete', 'todo complete', + 'milestone.complete', 'milestone complete', + 'workstream.create', 'workstream.set', 'workstream.complete', 'workstream.progress', + 'workstream create', 'workstream set', 'workstream complete', 'workstream progress', + 'docs-init', + 'learnings.copy', 'learnings copy', + 'intel.snapshot', 'intel.patch-meta', 'intel snapshot', 'intel patch-meta', + 'write-profile', 'generate-claude-profile', 'generate-dev-preferences', 'generate-claude-md', ]); // ─── Event builder ──────────────────────────────────────────────────────── /** * Build a mutation event based on the command prefix and result. + * + * `sessionId` is empty until a future phase wires session correlation into + * the query layer; see QUERY-HANDLERS.md. */ function buildMutationEvent(cmd: string, args: string[], result: QueryResult): GSDEvent { const base = { @@ -118,14 +136,37 @@ function buildMutationEvent(cmd: string, args: string[], result: QueryResult): G sessionId: '', }; - if (cmd.startsWith('state.')) { + if (cmd.startsWith('template.') || cmd.startsWith('template ')) { + const data = result.data as Record | null; return { ...base, - type: GSDEventType.StateMutation, + type: GSDEventType.TemplateFill, + templateType: (data?.template as string) ?? args[0] ?? '', + path: (data?.path as string) ?? args[1] ?? '', + created: (data?.created as boolean) ?? false, + } as GSDTemplateFillEvent; + } + + if (cmd === 'commit' || cmd === 'check-commit' || cmd === 'commit-to-subrepo') { + const data = result.data as Record | null; + return { + ...base, + type: GSDEventType.GitCommit, + hash: (data?.hash as string) ?? null, + committed: (data?.committed as boolean) ?? false, + reason: (data?.reason as string) ?? '', + } as GSDGitCommitEvent; + } + + if (cmd.startsWith('frontmatter.') || cmd.startsWith('frontmatter ')) { + return { + ...base, + type: GSDEventType.FrontmatterMutation, command: cmd, - fields: args.slice(0, 2), + file: args[0] ?? '', + fields: args.slice(1), success: true, - } as GSDStateMutationEvent; + } as GSDFrontmatterMutationEvent; } if (cmd.startsWith('config-')) { @@ -138,26 +179,14 @@ function buildMutationEvent(cmd: string, args: string[], result: QueryResult): G } as GSDConfigMutationEvent; } - if (cmd.startsWith('frontmatter.')) { + if (cmd.startsWith('validate.') || cmd.startsWith('validate ')) { return { ...base, - type: GSDEventType.FrontmatterMutation, + type: GSDEventType.ConfigMutation, command: cmd, - file: args[0] ?? '', - fields: args.slice(1), + key: args[0] ?? '', success: true, - } as GSDFrontmatterMutationEvent; - } - - if (cmd === 'commit' || cmd === 'check-commit') { - const data = result.data as Record | null; - return { - ...base, - type: GSDEventType.GitCommit, - hash: (data?.hash as string) ?? null, - committed: (data?.committed as boolean) ?? false, - reason: (data?.reason as string) ?? '', - } as GSDGitCommitEvent; + } as GSDConfigMutationEvent; } if (cmd.startsWith('phase.') || cmd.startsWith('phase ') || cmd.startsWith('phases.') || cmd.startsWith('phases ')) { @@ -170,25 +199,24 @@ function buildMutationEvent(cmd: string, args: string[], result: QueryResult): G } as GSDStateMutationEvent; } - if (cmd.startsWith('validate.') || cmd.startsWith('validate ')) { + if (cmd.startsWith('state.') || cmd.startsWith('state ')) { return { ...base, - type: GSDEventType.ConfigMutation, + type: GSDEventType.StateMutation, command: cmd, - key: args[0] ?? '', + fields: args.slice(0, 2), success: true, - } as GSDConfigMutationEvent; + } as GSDStateMutationEvent; } - // template.fill / template.select - const data = result.data as Record | null; + // roadmap, requirements, todo, milestone, workstream, intel, profile, learnings, docs-init return { ...base, - type: GSDEventType.TemplateFill, - templateType: (data?.template as string) ?? args[0] ?? '', - path: (data?.path as string) ?? args[1] ?? '', - created: (data?.created as boolean) ?? false, - } as GSDTemplateFillEvent; + type: GSDEventType.StateMutation, + command: cmd, + fields: args.slice(0, 2), + success: true, + } as GSDStateMutationEvent; } // ─── Factory ─────────────────────────────────────────────────────────────── @@ -408,7 +436,7 @@ export function createRegistry(eventStream?: GSDEventStream): QueryRegistry { // Wire event emission for mutation commands if (eventStream) { - for (const cmd of MUTATION_COMMANDS) { + for (const cmd of QUERY_MUTATION_COMMANDS) { const original = registry.getHandler(cmd); if (original) { registry.register(cmd, async (args: string[], projectDir: string) => { diff --git a/sdk/src/query/init-complex.ts b/sdk/src/query/init-complex.ts index 1fbf0f18..3438e98c 100644 --- a/sdk/src/query/init-complex.ts +++ b/sdk/src/query/init-complex.ts @@ -18,7 +18,7 @@ * ``` */ -import { existsSync, readdirSync, statSync } from 'node:fs'; +import { existsSync, readdirSync, statSync, type Dirent } from 'node:fs'; import { readFile } from 'node:fs/promises'; import { join, relative } from 'node:path'; import { homedir } from 'node:os'; @@ -90,9 +90,9 @@ export const initNewProject: QueryHandler = async (_args, projectDir) => { function findCodeFiles(dir: string, depth: number): boolean { if (depth > 3) return false; - let entries: Array<{ isDirectory(): boolean; isFile(): boolean; name: string }>; + let entries: Dirent[]; try { - entries = readdirSync(dir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; isFile(): boolean; name: string }>; + entries = readdirSync(dir, { withFileTypes: true }); } catch { return false; } @@ -202,7 +202,7 @@ export const initProgress: QueryHandler = async (_args, projectDir) => { // Scan phase directories try { const entries = readdirSync(paths.phases, { withFileTypes: true }); - const dirs = (entries as unknown as Array<{ isDirectory(): boolean; name: string }>) + const dirs = entries .filter(e => e.isDirectory()) .map(e => e.name) .sort((a, b) => { @@ -339,7 +339,7 @@ export const initManager: QueryHandler = async (_args, projectDir) => { // Pre-compute directory listing once let phaseDirEntries: string[] = []; try { - phaseDirEntries = (readdirSync(paths.phases, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>) + phaseDirEntries = readdirSync(paths.phases, { withFileTypes: true }) .filter(e => e.isDirectory()) .map(e => e.name); } catch { /* intentionally empty */ } diff --git a/sdk/src/query/init.ts b/sdk/src/query/init.ts index 507e5a46..aac46c03 100644 --- a/sdk/src/query/init.ts +++ b/sdk/src/query/init.ts @@ -17,7 +17,7 @@ * ``` */ -import { existsSync, readdirSync, readFileSync, statSync } from 'node:fs'; +import { existsSync, readdirSync, readFileSync, statSync, type Dirent } from 'node:fs'; import { readFile, readdir } from 'node:fs/promises'; import { join, relative, basename } from 'node:path'; import { execSync } from 'node:child_process'; @@ -830,9 +830,9 @@ export const initListWorkspaces: QueryHandler = async (_args, _projectDir) => { const workspaces: Array> = []; if (existsSync(defaultBase)) { - let entries: Array<{ isDirectory(): boolean; name: string }> = []; + let entries: Dirent[] = []; try { - entries = readdirSync(defaultBase, { withFileTypes: true }) as unknown as typeof entries; + entries = readdirSync(defaultBase, { withFileTypes: true }); } catch { entries = []; } for (const entry of entries) { if (!entry.isDirectory()) continue; diff --git a/sdk/src/query/intel.test.ts b/sdk/src/query/intel.test.ts new file mode 100644 index 00000000..ad9d7f0d --- /dev/null +++ b/sdk/src/query/intel.test.ts @@ -0,0 +1,90 @@ +/** + * Tests for intel query handlers and JSON search helpers. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, writeFile, mkdir, rm, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { + searchJsonEntries, + MAX_JSON_SEARCH_DEPTH, + intelStatus, + intelSnapshot, +} from './intel.js'; + +describe('searchJsonEntries', () => { + it('finds matches in shallow objects', () => { + const data = { files: [{ name: 'AuthService' }, { name: 'Other' }] }; + const found = searchJsonEntries(data, 'auth'); + expect(found.length).toBeGreaterThan(0); + }); + + it('stops at max depth without throwing', () => { + let nested: Record = { leaf: 'findme' }; + for (let i = 0; i < MAX_JSON_SEARCH_DEPTH + 5; i++) { + nested = { inner: nested }; + } + const found = searchJsonEntries({ root: nested }, 'findme'); + expect(Array.isArray(found)).toBe(true); + }); +}); + +describe('intelStatus', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-intel-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + await writeFile(join(tmpDir, '.planning', 'config.json'), JSON.stringify({ model_profile: 'balanced' })); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns disabled when intel.enabled is not true', async () => { + const r = await intelStatus([], tmpDir); + const data = r.data as Record; + expect(data.disabled).toBe(true); + }); + + it('returns file map when intel is enabled', async () => { + await writeFile( + join(tmpDir, '.planning', 'config.json'), + JSON.stringify({ model_profile: 'balanced', intel: { enabled: true } }), + ); + const r = await intelStatus([], tmpDir); + const data = r.data as Record; + expect(data.disabled).not.toBe(true); + expect(data.files).toBeDefined(); + }); +}); + +describe('intelSnapshot', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-intel-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + await writeFile( + join(tmpDir, '.planning', 'config.json'), + JSON.stringify({ model_profile: 'balanced', intel: { enabled: true } }), + ); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('writes .last-refresh.json when intel is enabled', async () => { + await mkdir(join(tmpDir, '.planning', 'intel'), { recursive: true }); + await writeFile(join(tmpDir, '.planning', 'intel', 'stack.json'), JSON.stringify({ _meta: { updated_at: new Date().toISOString() } })); + const r = await intelSnapshot([], tmpDir); + const data = r.data as Record; + expect(data.saved).toBe(true); + const snap = await readFile(join(tmpDir, '.planning', 'intel', '.last-refresh.json'), 'utf-8'); + expect(JSON.parse(snap)).toHaveProperty('hashes'); + }); +}); diff --git a/sdk/src/query/intel.ts b/sdk/src/query/intel.ts index a347b629..3c895fdb 100644 --- a/sdk/src/query/intel.ts +++ b/sdk/src/query/intel.ts @@ -74,27 +74,32 @@ function hashFile(filePath: string): string | null { } } -function searchJsonEntries(data: unknown, term: string): unknown[] { +/** Max recursion depth when walking JSON for intel queries (avoids stack overflow). */ +export const MAX_JSON_SEARCH_DEPTH = 48; + +export function searchJsonEntries(data: unknown, term: string, depth = 0): unknown[] { const lowerTerm = term.toLowerCase(); const results: unknown[] = []; + if (depth > MAX_JSON_SEARCH_DEPTH) return results; if (!data || typeof data !== 'object') return results; - function matchesInValue(value: unknown): boolean { + function matchesInValue(value: unknown, d: number): boolean { + if (d > MAX_JSON_SEARCH_DEPTH) return false; if (typeof value === 'string') return value.toLowerCase().includes(lowerTerm); - if (Array.isArray(value)) return value.some(v => matchesInValue(v)); - if (value && typeof value === 'object') return Object.values(value as object).some(v => matchesInValue(v)); + if (Array.isArray(value)) return value.some(v => matchesInValue(v, d + 1)); + if (value && typeof value === 'object') return Object.values(value as object).some(v => matchesInValue(v, d + 1)); return false; } if (Array.isArray(data)) { for (const entry of data) { - if (matchesInValue(entry)) results.push(entry); + if (matchesInValue(entry, depth + 1)) results.push(entry); } } else { for (const [, value] of Object.entries(data as object)) { if (Array.isArray(value)) { for (const entry of value) { - if (matchesInValue(entry)) results.push(entry); + if (matchesInValue(entry, depth + 1)) results.push(entry); } } } diff --git a/sdk/src/query/phase-lifecycle.ts b/sdk/src/query/phase-lifecycle.ts index fcb2584c..edf383cf 100644 --- a/sdk/src/query/phase-lifecycle.ts +++ b/sdk/src/query/phase-lifecycle.ts @@ -45,6 +45,19 @@ function assertNoNullBytes(value: string, label: string): void { } } +/** Reject `..` or path separators in phase directory names. */ +function assertSafePhaseDirName(dirName: string, label = 'phase directory'): void { + if (/[/\\]|\.\./.test(dirName)) { + throw new GSDError(`${label} contains invalid path segments`, ErrorClassification.Validation); + } +} + +function assertSafeProjectCode(code: string): void { + if (code && /[/\\]|\.\./.test(code)) { + throw new GSDError('project_code contains invalid characters', ErrorClassification.Validation); + } +} + // ─── Slug generation (inline) ──────────────────────────────────────────── /** Generate kebab-case slug from description. Port of generateSlugInternal. */ @@ -150,6 +163,7 @@ export const phaseAdd: QueryHandler = async (args, projectDir) => { // Optional project code prefix (e.g., 'CK' -> 'CK-01-foundation') const projectCode = (config.project_code as string) || ''; + assertSafeProjectCode(projectCode); const prefix = projectCode ? `${projectCode}-` : ''; let newPhaseId: number | string = ''; @@ -164,6 +178,7 @@ export const phaseAdd: QueryHandler = async (args, projectDir) => { if (!newPhaseId) { throw new GSDError('--id required when phase_naming is "custom"', ErrorClassification.Validation); } + assertSafePhaseDirName(String(newPhaseId), 'custom phase id'); dirName = `${prefix}${newPhaseId}-${slug}`; } else { // Sequential mode: find highest integer phase number (in current milestone only) @@ -182,6 +197,8 @@ export const phaseAdd: QueryHandler = async (args, projectDir) => { dirName = `${prefix}${paddedNum}-${slug}`; } + assertSafePhaseDirName(dirName); + const dirPath = join(planningPaths(projectDir).phases, dirName); // Create directory with .gitkeep so git tracks empty folders @@ -293,8 +310,10 @@ export const phaseInsert: QueryHandler = async (args, projectDir) => { insertConfig = JSON.parse(await readFile(planningPaths(projectDir).config, 'utf-8')); } catch { /* use defaults */ } const projectCode = (insertConfig.project_code as string) || ''; + assertSafeProjectCode(projectCode); const pfx = projectCode ? `${projectCode}-` : ''; dirName = `${pfx}${decimalPhase}-${slug}`; + assertSafePhaseDirName(dirName); const dirPath = join(phasesDir, dirName); // Create directory with .gitkeep @@ -421,6 +440,7 @@ export const phaseScaffold: QueryHandler = async (args, projectDir) => { } const slug = generateSlugInternal(name); const dirNameNew = `${padded}-${slug}`; + assertSafePhaseDirName(dirNameNew, 'scaffold phase directory'); const phasesParent = planningPaths(projectDir).phases; await mkdir(phasesParent, { recursive: true }); const dirPath = join(phasesParent, dirNameNew); diff --git a/sdk/src/query/pipeline.ts b/sdk/src/query/pipeline.ts index 76a18a29..1fbcd791 100644 --- a/sdk/src/query/pipeline.ts +++ b/sdk/src/query/pipeline.ts @@ -55,11 +55,7 @@ export type PipelineStage = 'prepare' | 'execute' | 'finalize'; function collectFiles(dir: string, base: string): string[] { const results: string[] = []; if (!existsSync(dir)) return results; - const entries = readdirSync(dir, { withFileTypes: true }) as unknown as Array<{ - isDirectory(): boolean; - isFile(): boolean; - name: string; - }>; + const entries = readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { const fullPath = join(dir, entry.name); const relPath = relative(base, fullPath); @@ -159,8 +155,9 @@ export function wrapWithPipeline( // as event emission wiring in index.ts const commandsToWrap: string[] = []; - // We need to enumerate commands. QueryRegistry doesn't expose keys directly, - // so we wrap the register method temporarily to collect known commands, + // Enumerate mutation commands via the caller-provided set. QueryRegistry also + // exposes commands() for full command lists when needed by tooling. + // We wrap the register method temporarily to collect known commands, // then restore. Instead, we use the mutation commands set + a marker approach: // wrap mutation commands for dry-run, and wrap all via onPrepare/onFinalize. // diff --git a/sdk/src/query/profile.test.ts b/sdk/src/query/profile.test.ts new file mode 100644 index 00000000..9308efe1 --- /dev/null +++ b/sdk/src/query/profile.test.ts @@ -0,0 +1,54 @@ +/** + * Tests for profile / learnings query handlers (filesystem writes use temp dirs). + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, writeFile, mkdir, rm, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { writeProfile, learningsCopy } from './profile.js'; + +describe('writeProfile', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-profile-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('writes USER-PROFILE.md from --input JSON', async () => { + const analysisPath = join(tmpDir, 'analysis.json'); + await writeFile(analysisPath, JSON.stringify({ communication_style: 'terse' }), 'utf-8'); + const result = await writeProfile(['--input', analysisPath], tmpDir); + const data = result.data as Record; + expect(data.written).toBe(true); + const md = await readFile(join(tmpDir, '.planning', 'USER-PROFILE.md'), 'utf-8'); + expect(md).toContain('User Developer Profile'); + expect(md).toMatch(/Communication Style/i); + }); +}); + +describe('learningsCopy', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-learn-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns copied:false when LEARNINGS.md is missing', async () => { + const result = await learningsCopy([], tmpDir); + const data = result.data as Record; + expect(data.copied).toBe(false); + expect(data.reason).toContain('LEARNINGS'); + }); +}); diff --git a/sdk/src/query/profile.ts b/sdk/src/query/profile.ts index 69f7299e..e8a51e12 100644 --- a/sdk/src/query/profile.ts +++ b/sdk/src/query/profile.ts @@ -212,7 +212,7 @@ export const scanSessions: QueryHandler = async (_args, _projectDir) => { let sessionCount = 0; try { - const projectDirs = readdirSync(SESSIONS_DIR, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const projectDirs = readdirSync(SESSIONS_DIR, { withFileTypes: true }); for (const pDir of projectDirs.filter(e => e.isDirectory())) { const pPath = join(SESSIONS_DIR, pDir.name); const sessions = readdirSync(pPath).filter(f => f.endsWith('.jsonl')); @@ -232,7 +232,7 @@ export const profileSample: QueryHandler = async (_args, _projectDir) => { let projectsSampled = 0; try { - const projectDirs = readdirSync(SESSIONS_DIR, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const projectDirs = readdirSync(SESSIONS_DIR, { withFileTypes: true }); for (const pDir of projectDirs.filter(e => e.isDirectory()).slice(0, 5)) { const pPath = join(SESSIONS_DIR, pDir.name); const sessions = readdirSync(pPath).filter(f => f.endsWith('.jsonl')).slice(0, 3); diff --git a/sdk/src/query/progress.ts b/sdk/src/query/progress.ts index e0915e26..e7d75221 100644 --- a/sdk/src/query/progress.ts +++ b/sdk/src/query/progress.ts @@ -17,6 +17,7 @@ import { readFile, readdir } from 'node:fs/promises'; import { existsSync, readdirSync, readFileSync, mkdirSync, writeFileSync, unlinkSync } from 'node:fs'; import { join, relative } from 'node:path'; +import { GSDError, ErrorClassification } from '../errors.js'; import { comparePhaseNum, normalizePhaseName, planningPaths, toPosixPath } from './helpers.js'; import { getMilestoneInfo, roadmapAnalyze } from './roadmap.js'; import type { QueryHandler } from './utils.js'; @@ -137,7 +138,7 @@ export const statsJson: QueryHandler = async (_args, projectDir) => { if (existsSync(paths.phases)) { try { - const entries = readdirSync(paths.phases, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(paths.phases, { withFileTypes: true }); for (const entry of entries) { if (!entry.isDirectory()) continue; phasesTotal++; @@ -242,10 +243,7 @@ export const listTodos: QueryHandler = async (args, projectDir) => { export const todoComplete: QueryHandler = async (args, projectDir) => { const filename = args[0]; if (!filename) { - throw new (await import('../errors.js')).GSDError( - 'filename required for todo complete', - (await import('../errors.js')).ErrorClassification.Validation, - ); + throw new GSDError('filename required for todo complete', ErrorClassification.Validation); } const pendingDir = join(projectDir, '.planning', 'todos', 'pending'); @@ -253,10 +251,7 @@ export const todoComplete: QueryHandler = async (args, projectDir) => { const sourcePath = join(pendingDir, filename); if (!existsSync(sourcePath)) { - throw new (await import('../errors.js')).GSDError( - `Todo not found: ${filename}`, - (await import('../errors.js')).ErrorClassification.Validation, - ); + throw new GSDError(`Todo not found: ${filename}`, ErrorClassification.Validation); } mkdirSync(completedDir, { recursive: true }); diff --git a/sdk/src/query/registry.test.ts b/sdk/src/query/registry.test.ts index 967ea919..38217567 100644 --- a/sdk/src/query/registry.test.ts +++ b/sdk/src/query/registry.test.ts @@ -4,7 +4,7 @@ import { describe, it, expect, vi } from 'vitest'; import { QueryRegistry, extractField } from './registry.js'; -import { createRegistry } from './index.js'; +import { createRegistry, QUERY_MUTATION_COMMANDS } from './index.js'; import type { QueryResult } from './utils.js'; // ─── extractField ────────────────────────────────────────────────────────── @@ -87,6 +87,26 @@ describe('QueryRegistry', () => { await expect(registry.dispatch('unknown-cmd', ['arg1'], '/tmp/project')) .rejects.toThrow('Unknown command: "unknown-cmd"'); }); + + it('commands() returns all registered command names', () => { + const registry = new QueryRegistry(); + registry.register('alpha', async () => ({ data: 1 })); + registry.register('beta', async () => ({ data: 2 })); + expect(registry.commands().sort()).toEqual(['alpha', 'beta']); + }); +}); + +// ─── QUERY_MUTATION_COMMANDS vs registry ─────────────────────────────────── + +describe('QUERY_MUTATION_COMMANDS', () => { + it('has a registered handler for every mutation command name', () => { + const registry = createRegistry(); + const missing: string[] = []; + for (const cmd of QUERY_MUTATION_COMMANDS) { + if (!registry.has(cmd)) missing.push(cmd); + } + expect(missing).toEqual([]); + }); }); // ─── createRegistry ──────────────────────────────────────────────────────── diff --git a/sdk/src/query/registry.ts b/sdk/src/query/registry.ts index 60212ecd..8a89f7a3 100644 --- a/sdk/src/query/registry.ts +++ b/sdk/src/query/registry.ts @@ -86,6 +86,13 @@ export class QueryRegistry { return this.handlers.has(command); } + /** + * List all registered command names (for tooling, pipelines, and tests). + */ + commands(): string[] { + return Array.from(this.handlers.keys()); + } + /** * Get the handler for a command without dispatching. * diff --git a/sdk/src/query/skills.test.ts b/sdk/src/query/skills.test.ts new file mode 100644 index 00000000..af2428c0 --- /dev/null +++ b/sdk/src/query/skills.test.ts @@ -0,0 +1,30 @@ +/** + * Tests for agent skills query handler. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, mkdir, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { agentSkills } from './skills.js'; + +describe('agentSkills', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-skills-')); + await mkdir(join(tmpDir, '.cursor', 'skills', 'my-skill'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns deduped skill names from project skill dirs', async () => { + const r = await agentSkills(['gsd-executor'], tmpDir); + const data = r.data as Record; + expect(data.skill_count).toBeGreaterThan(0); + expect((data.skills as string[]).length).toBeGreaterThan(0); + }); +}); diff --git a/sdk/src/query/skills.ts b/sdk/src/query/skills.ts index 4d8dbc86..7e3a4667 100644 --- a/sdk/src/query/skills.ts +++ b/sdk/src/query/skills.ts @@ -33,7 +33,7 @@ export const agentSkills: QueryHandler = async (args, projectDir) => { for (const dir of skillDirs) { if (!existsSync(dir)) continue; try { - const entries = readdirSync(dir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory()) skills.push(entry.name); } diff --git a/sdk/src/query/state-mutation.ts b/sdk/src/query/state-mutation.ts index e75e918b..564c3a69 100644 --- a/sdk/src/query/state-mutation.ts +++ b/sdk/src/query/state-mutation.ts @@ -112,11 +112,32 @@ function updateCurrentPositionFields(content: string, fields: Record { + try { + const raw = await readFile(lockPath, 'utf-8'); + const pid = parseInt(raw.trim(), 10); + if (!Number.isFinite(pid) || pid <= 0) return true; + try { + process.kill(pid, 0); + return false; + } catch { + return true; + } + } catch { + return null; + } +} + /** * Acquire a lockfile for STATE.md operations. * * Uses O_CREAT|O_EXCL for atomic creation. Retries up to 10 times with - * 200ms + jitter delay. Cleans stale locks older than 10 seconds. + * 200ms + jitter delay. Cleans stale locks when the holder PID is dead, or when + * the lock file is older than 10 seconds (existing heuristic). * * @param statePath - Path to STATE.md * @returns Path to the lockfile @@ -136,6 +157,11 @@ export async function acquireStateLock(statePath: string): Promise { } catch (err: unknown) { if (err instanceof Error && (err as NodeJS.ErrnoException).code === 'EEXIST') { try { + const dead = await isLockProcessDead(lockPath); + if (dead === true) { + await unlink(lockPath); + continue; + } const s = await stat(lockPath); if (Date.now() - s.mtimeMs > 10000) { await unlink(lockPath); @@ -714,22 +740,20 @@ export const statePlannedPhase: QueryHandler = async (args, projectDir) => { const phaseArg = args.find((a, i) => args[i - 1] === '--phase') || args[0]; const nameArg = args.find((a, i) => args[i - 1] === '--name') || ''; const plansArg = args.find((a, i) => args[i - 1] === '--plans') || '0'; - const paths = planningPaths(projectDir); if (!phaseArg) { return { data: { updated: false, reason: '--phase argument required' } }; } try { - let content = await readFile(paths.state, 'utf-8'); const timestamp = new Date().toISOString(); const record = `\n**Planned Phase:** ${phaseArg} (${nameArg}) — ${plansArg} plans — ${timestamp}\n`; - if (/\*\*Planned Phase:\*\*/.test(content)) { - content = content.replace(/\*\*Planned Phase:\*\*[^\n]*\n/, record); - } else { - content += record; - } - await writeFile(paths.state, content, 'utf-8'); + await readModifyWriteStateMd(projectDir, (body) => { + if (/\*\*Planned Phase:\*\*/.test(body)) { + return body.replace(/\*\*Planned Phase:\*\*[^\n]*\n/, record); + } + return body + record; + }); return { data: { updated: true, phase: phaseArg, name: nameArg, plans: plansArg } }; } catch { return { data: { updated: false, reason: 'STATE.md not found or unreadable' } }; diff --git a/sdk/src/query/summary.test.ts b/sdk/src/query/summary.test.ts new file mode 100644 index 00000000..96bf5151 --- /dev/null +++ b/sdk/src/query/summary.test.ts @@ -0,0 +1,55 @@ +/** + * Tests for summary / history digest handlers. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, writeFile, mkdir, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { summaryExtract, historyDigest } from './summary.js'; + +describe('summaryExtract', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-sum-')); + await mkdir(join(tmpDir, '.planning', 'phases', '01-x'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('extracts headings from a summary file', async () => { + const rel = '.planning/phases/01-x/01-SUMMARY.md'; + await writeFile( + join(tmpDir, '.planning', 'phases', '01-x', '01-SUMMARY.md'), + '# Summary\n\n## What Was Done\n\nBuilt the thing.\n\n## Tests\n\nUnit tests pass.\n', + 'utf-8', + ); + const r = await summaryExtract([rel], tmpDir); + const data = r.data as Record>; + expect(data.sections.what_was_done).toContain('Built'); + }); +}); + +describe('historyDigest', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-hist-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns digest object for project without phases', async () => { + const r = await historyDigest([], tmpDir); + const data = r.data as Record; + expect(data.phases).toBeDefined(); + expect(data.decisions).toBeDefined(); + }); +}); diff --git a/sdk/src/query/summary.ts b/sdk/src/query/summary.ts index a19e94b1..1abb2eb5 100644 --- a/sdk/src/query/summary.ts +++ b/sdk/src/query/summary.ts @@ -62,7 +62,7 @@ export const historyDigest: QueryHandler = async (_args, projectDir) => { const milestonesDir = join(projectDir, '.planning', 'milestones'); if (existsSync(milestonesDir)) { try { - const milestoneEntries = readdirSync(milestonesDir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const milestoneEntries = readdirSync(milestonesDir, { withFileTypes: true }); const archivedPhaseDirs = milestoneEntries .filter(e => e.isDirectory() && /^v[\d.]+-phases$/.test(e.name)) .map(e => e.name) @@ -70,7 +70,7 @@ export const historyDigest: QueryHandler = async (_args, projectDir) => { for (const archiveName of archivedPhaseDirs) { const archivePath = join(milestonesDir, archiveName); try { - const dirs = readdirSync(archivePath, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const dirs = readdirSync(archivePath, { withFileTypes: true }); for (const d of dirs.filter(e => e.isDirectory()).sort((a, b) => a.name.localeCompare(b.name))) { allPhaseDirs.push({ name: d.name, fullPath: join(archivePath, d.name) }); } @@ -82,7 +82,7 @@ export const historyDigest: QueryHandler = async (_args, projectDir) => { // Current phases if (existsSync(paths.phases)) { try { - const currentDirs = readdirSync(paths.phases, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const currentDirs = readdirSync(paths.phases, { withFileTypes: true }); for (const d of currentDirs.filter(e => e.isDirectory()).sort((a, b) => a.name.localeCompare(b.name))) { allPhaseDirs.push({ name: d.name, fullPath: join(paths.phases, d.name) }); } diff --git a/sdk/src/query/uat.test.ts b/sdk/src/query/uat.test.ts new file mode 100644 index 00000000..fb8c0d27 --- /dev/null +++ b/sdk/src/query/uat.test.ts @@ -0,0 +1,73 @@ +/** + * Tests for UAT query handlers. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, writeFile, mkdir, rm } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { uatRenderCheckpoint, auditUat } from './uat.js'; + +const SAMPLE_UAT = `--- +status: draft +--- +# UAT + +## Current Test + +number: 1 +name: Login flow +expected: | + User can sign in + +## Other +`; + +describe('uatRenderCheckpoint', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-uat-')); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns error when --file is missing', async () => { + const r = await uatRenderCheckpoint([], tmpDir); + const data = r.data as Record; + expect(data.error).toBeDefined(); + }); + + it('renders checkpoint for valid UAT file', async () => { + const f = join(tmpDir, '01-UAT.md'); + await writeFile(f, SAMPLE_UAT, 'utf-8'); + const r = await uatRenderCheckpoint(['--file', '01-UAT.md'], tmpDir); + const data = r.data as Record; + expect(data.checkpoint).toBeDefined(); + expect(String(data.checkpoint)).toContain('CHECKPOINT'); + expect(data.test_number).toBe(1); + }); +}); + +describe('auditUat', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-uat-audit-')); + await mkdir(join(tmpDir, '.planning', 'phases', '01-x'), { recursive: true }); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns empty results when no UAT files', async () => { + const r = await auditUat([], tmpDir); + const data = r.data as Record; + expect(Array.isArray(data.results)).toBe(true); + expect((data.summary as Record).total_files).toBe(0); + }); +}); diff --git a/sdk/src/query/uat.ts b/sdk/src/query/uat.ts index d223e9f0..3ff86453 100644 --- a/sdk/src/query/uat.ts +++ b/sdk/src/query/uat.ts @@ -142,7 +142,7 @@ export const auditUat: QueryHandler = async (_args, projectDir) => { } const results: Record[] = []; - const entries = readdirSync(paths.phases, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(paths.phases, { withFileTypes: true }); for (const entry of entries.filter(e => e.isDirectory())) { const phaseMatch = entry.name.match(/^(\d+[A-Z]?(?:\.\d+)*)/i); diff --git a/sdk/src/query/validate.test.ts b/sdk/src/query/validate.test.ts index 014d91f4..f611f6ff 100644 --- a/sdk/src/query/validate.test.ts +++ b/sdk/src/query/validate.test.ts @@ -10,7 +10,21 @@ import { join } from 'node:path'; import { tmpdir, homedir } from 'node:os'; import { GSDError } from '../errors.js'; -import { verifyKeyLinks, validateConsistency, validateHealth } from './validate.js'; +import { verifyKeyLinks, validateConsistency, validateHealth, regexForKeyLinkPattern } from './validate.js'; + +// ─── regexForKeyLinkPattern ──────────────────────────────────────────────── + +describe('regexForKeyLinkPattern', () => { + it('preserves normal regex patterns used in key_links', () => { + const re = regexForKeyLinkPattern('import.*foo.*from.*target'); + expect(re.test("import { foo } from './target.js';")).toBe(true); + }); + + it('falls back to literal match for nested-quantifier patterns', () => { + const re = regexForKeyLinkPattern('(a+)+'); + expect(re.source).toContain('\\'); + }); +}); // ─── verifyKeyLinks ──────────────────────────────────────────────────────── @@ -198,7 +212,7 @@ must_haves: expect(links[0].detail).toBe('Target referenced in source'); }); - it('returns Invalid regex pattern for bad regex', async () => { + it('falls back to literal match when regex syntax is invalid', async () => { await writeFile(join(tmpDir, 'source.ts'), 'const x = 1;'); await writeFile(join(tmpDir, 'target.ts'), 'const y = 2;'); @@ -227,7 +241,7 @@ must_haves: const data = result.data as Record; const links = data.links as Array>; expect(links[0].verified).toBe(false); - expect((links[0].detail as string).startsWith('Invalid regex pattern')).toBe(true); + expect((links[0].detail as string)).toContain('not found'); }); it('returns error when no must_haves.key_links in plan', async () => { diff --git a/sdk/src/query/validate.ts b/sdk/src/query/validate.ts index 082639e7..a8e7fea2 100644 --- a/sdk/src/query/validate.ts +++ b/sdk/src/query/validate.ts @@ -16,13 +16,38 @@ import { readFile, readdir, writeFile } from 'node:fs/promises'; import { existsSync } from 'node:fs'; -import { join, isAbsolute, resolve } from 'node:path'; +import { join, resolve } from 'node:path'; import { homedir } from 'node:os'; import { GSDError, ErrorClassification } from '../errors.js'; import { extractFrontmatter, parseMustHavesBlock } from './frontmatter.js'; -import { escapeRegex, normalizePhaseName, planningPaths } from './helpers.js'; +import { escapeRegex, normalizePhaseName, planningPaths, resolvePathUnderProject } from './helpers.js'; import type { QueryHandler } from './utils.js'; +/** Max length for key_links regex patterns (ReDoS mitigation). */ +const MAX_KEY_LINK_PATTERN_LEN = 512; + +/** + * Build a RegExp for must_haves key_links pattern matching. + * Long or nested-quantifier patterns fall back to a literal match via escapeRegex. + */ +export function regexForKeyLinkPattern(pattern: string): RegExp { + if (typeof pattern !== 'string' || pattern.length === 0) { + return /$^/; + } + if (pattern.length > MAX_KEY_LINK_PATTERN_LEN) { + return new RegExp(escapeRegex(pattern.slice(0, MAX_KEY_LINK_PATTERN_LEN))); + } + // Mitigate catastrophic backtracking on nested quantifier forms + if (/\([^)]*[\+\*][^)]*\)[\+\*]/.test(pattern)) { + return new RegExp(escapeRegex(pattern)); + } + try { + return new RegExp(pattern); + } catch { + return new RegExp(escapeRegex(pattern)); + } +} + // ─── verifyKeyLinks ─────────────────────────────────────────────────────── /** @@ -48,7 +73,15 @@ export const verifyKeyLinks: QueryHandler = async (args, projectDir) => { throw new GSDError('file path contains null bytes', ErrorClassification.Validation); } - const fullPath = isAbsolute(planFilePath) ? planFilePath : join(projectDir, planFilePath); + let fullPath: string; + try { + fullPath = await resolvePathUnderProject(projectDir, planFilePath); + } catch (err) { + if (err instanceof GSDError) { + return { data: { error: err.message, path: planFilePath } }; + } + throw err; + } let content: string; try { @@ -77,37 +110,33 @@ export const verifyKeyLinks: QueryHandler = async (args, projectDir) => { let sourceContent: string | null = null; try { - sourceContent = await readFile(join(projectDir, check.from), 'utf-8'); + const fromPath = await resolvePathUnderProject(projectDir, check.from); + sourceContent = await readFile(fromPath, 'utf-8'); } catch { - // Source file not found + // Source file not found or path invalid } if (!sourceContent) { check.detail = 'Source file not found'; } else if (linkObj.pattern) { - // T-12-05: Wrap new RegExp in try/catch - try { - const regex = new RegExp(linkObj.pattern as string); - if (regex.test(sourceContent)) { - check.verified = true; - check.detail = 'Pattern found in source'; - } else { - // Try target file - let targetContent: string | null = null; - try { - targetContent = await readFile(join(projectDir, check.to), 'utf-8'); - } catch { - // Target file not found - } - if (targetContent && regex.test(targetContent)) { - check.verified = true; - check.detail = 'Pattern found in target'; - } else { - check.detail = `Pattern "${linkObj.pattern}" not found in source or target`; - } + const regex = regexForKeyLinkPattern(linkObj.pattern as string); + if (regex.test(sourceContent)) { + check.verified = true; + check.detail = 'Pattern found in source'; + } else { + let targetContent: string | null = null; + try { + const toPath = await resolvePathUnderProject(projectDir, check.to); + targetContent = await readFile(toPath, 'utf-8'); + } catch { + // Target file not found + } + if (targetContent && regex.test(targetContent)) { + check.verified = true; + check.detail = 'Pattern found in target'; + } else { + check.detail = `Pattern "${linkObj.pattern}" not found in source or target`; } - } catch { - check.detail = `Invalid regex pattern: ${linkObj.pattern}`; } } else { // No pattern: check if target path is referenced in source content diff --git a/sdk/src/query/verify.ts b/sdk/src/query/verify.ts index 0fdddf96..1af5c5df 100644 --- a/sdk/src/query/verify.ts +++ b/sdk/src/query/verify.ts @@ -558,7 +558,7 @@ export const verifySchemaDrift: QueryHandler = async (args, projectDir) => { return { data: { valid: true, issues: [], checked: 0 } }; } - const entries = readdirSync(phasesDir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(phasesDir, { withFileTypes: true }); let checked = 0; for (const entry of entries) { diff --git a/sdk/src/query/websearch.test.ts b/sdk/src/query/websearch.test.ts new file mode 100644 index 00000000..a56c4b95 --- /dev/null +++ b/sdk/src/query/websearch.test.ts @@ -0,0 +1,31 @@ +/** + * Tests for websearch handler (no network when API key unset). + */ + +import { describe, it, expect } from 'vitest'; +import { websearch } from './websearch.js'; + +describe('websearch', () => { + it('returns available:false when BRAVE_API_KEY is not set', async () => { + const prev = process.env.BRAVE_API_KEY; + delete process.env.BRAVE_API_KEY; + const r = await websearch(['test query'], '/tmp'); + const data = r.data as Record; + expect(data.available).toBe(false); + if (prev !== undefined) process.env.BRAVE_API_KEY = prev; + }); + + it('returns error when query is missing and BRAVE_API_KEY is set', async () => { + const prev = process.env.BRAVE_API_KEY; + process.env.BRAVE_API_KEY = 'test-dummy-key'; + try { + const r = await websearch([], '/tmp'); + const data = r.data as Record; + expect(data.available).toBe(false); + expect(data.error).toBe('Query required'); + } finally { + if (prev !== undefined) process.env.BRAVE_API_KEY = prev; + else delete process.env.BRAVE_API_KEY; + } + }); +}); diff --git a/sdk/src/query/workstream.test.ts b/sdk/src/query/workstream.test.ts new file mode 100644 index 00000000..063f777d --- /dev/null +++ b/sdk/src/query/workstream.test.ts @@ -0,0 +1,51 @@ +/** + * Tests for workstream query handlers. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtemp, mkdir, rm, writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +import { workstreamList, workstreamCreate } from './workstream.js'; + +describe('workstreamList', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-ws-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + await writeFile(join(tmpDir, '.planning', 'config.json'), JSON.stringify({ model_profile: 'balanced' })); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('returns flat mode when no workstreams directory', async () => { + const r = await workstreamList([], tmpDir); + const data = r.data as Record; + expect(data.mode).toBe('flat'); + expect(Array.isArray(data.workstreams)).toBe(true); + }); +}); + +describe('workstreamCreate', () => { + let tmpDir: string; + + beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'gsd-ws2-')); + await mkdir(join(tmpDir, '.planning'), { recursive: true }); + await writeFile(join(tmpDir, '.planning', 'config.json'), JSON.stringify({ model_profile: 'balanced' })); + }); + + afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); + }); + + it('creates workstream directory tree', async () => { + const r = await workstreamCreate(['test-ws'], tmpDir); + const data = r.data as Record; + expect(data.created).toBe(true); + }); +}); diff --git a/sdk/src/query/workstream.ts b/sdk/src/query/workstream.ts index 046d1ed8..f8b2bc10 100644 --- a/sdk/src/query/workstream.ts +++ b/sdk/src/query/workstream.ts @@ -71,7 +71,7 @@ export const workstreamList: QueryHandler = async (_args, projectDir) => { const dir = workstreamsDir(projectDir); if (!existsSync(dir)) return { data: { mode: 'flat', workstreams: [], message: 'No workstreams — operating in flat mode' } }; try { - const entries = readdirSync(dir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(dir, { withFileTypes: true }); const workstreams = entries.filter(e => e.isDirectory()).map(e => e.name); return { data: { mode: 'workstream', workstreams, count: workstreams.length } }; } catch { @@ -212,7 +212,7 @@ export const workstreamComplete: QueryHandler = async (args, projectDir) => { const filesMoved: string[] = []; try { - const entries = readdirSync(wsDir, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>; + const entries = readdirSync(wsDir, { withFileTypes: true }); for (const entry of entries) { renameSync(join(wsDir, entry.name), join(archivePath, entry.name)); filesMoved.push(entry.name); @@ -230,7 +230,7 @@ export const workstreamComplete: QueryHandler = async (args, projectDir) => { let remainingWs = 0; try { - remainingWs = (readdirSync(wsRoot, { withFileTypes: true }) as unknown as Array<{ isDirectory(): boolean; name: string }>) + remainingWs = readdirSync(wsRoot, { withFileTypes: true }) .filter(e => e.isDirectory()).length; if (remainingWs === 0) rmdirSync(wsRoot); } catch { /* best-effort */ }