Files
get-shit-done/tests/forensics.test.cjs
Rezolv d3a79917fa feat: Phase 2 caller migration — gsd-sdk query in workflows, agents, commands (#2179)
* feat: Phase 2 caller migration — gsd-sdk query in workflows (#2122)

Cherry-picked orchestration rewrites from feat/sdk-foundation (#2008, 4018fee) onto current main, resolving conflicts to keep upstream worktree guards and post-merge test gate. SDK stub registry omitted (out of Phase 2 scope per #2122).

Refs: #2122 #2008
Made-with: Cursor

* docs: add gsd-sdk query migration blurb

Made-with: Cursor

* docs(workflows): extend Phase 2 gsd-sdk query caller migration

- Swap node gsd-tools.cjs for gsd-sdk query in review, plan-phase, execute-plan,
  ship, extract_learnings, ai-integration-phase, eval-review, next, thread
- Document graphify CJS-only in gsd-planner; dual-path in CLI-TOOLS and ARCHITECTURE
- Update tests: workstreams gsd-sdk path, thread frontmatter.get, workspace init.*,
  CRLF-safe autonomous frontmatter parse
- CHANGELOG: Phase 2 caller migration scope

Made-with: Cursor

* docs(phase2): USER-GUIDE + remaining gsd-sdk query call sites

- USER-GUIDE: dual-path CLI section; state validate/sync use full CJS path
- Commands: debug (config-get+tdd), quick (security note), intel Task prompt
- Agent: gsd-debug-session-manager resolve-model via jq
- Workflows: milestone-summary, forensics, next, complete-milestone/verify-work
  (audit-open CJS notes), discuss-phase, progress, verify-phase, add/insert/remove
  phase, transition, manager, quick workflow; remove-phase commit without --files
- Test: quick-session-management accepts frontmatter.get
- CHANGELOG: Phase 2 follow-up bullet

Made-with: Cursor

* docs(phase2): align gsd-sdk query examples in commands and agents

- init.* query names; frontmatter.get uses positional field name
- state.* handlers use positional args; commit uses positional paths
- CJS-only notes for from-gsd2 and graphify; learnings.query wording
- CHANGELOG: Phase 2 orchestration doc pass

Made-with: Cursor

* docs(phase2): normalize gsd-sdk query commit to positional file paths

- Strip --files from commit examples in workflows, references, commands
- Keep commit-to-subrepo ... --files (separate handler)
- git-planning-commit.md: document positional args
- Tests: new-project commit line, state.record-session, gates CRLF, roadmap.analyze
- CHANGELOG [Unreleased]

Made-with: Cursor

* feat(sdk): gsd-sdk query parity with gsd-tools and PR 2179 registry fixes

- Route query via longest-prefix match and dotted single-token expansion; fall back
  to runGsdToolsQuery (same argv as node gsd-tools.cjs) for full CLI coverage.
- Parse gsd-sdk query permissively so gsd-tools flags (--json, --verify, etc.) are
  not rejected by strict parseArgs.
- resolveGsdToolsPath: honor GSD_TOOLS_PATH; prefer bundled get-shit-done copy
  over project .claude installs; export runGsdToolsQuery from the SDK.
- Fix gsd-tools audit-open (core.output; pass object for --json JSON).
- Register summary-extract as alias of summary.extract; fix audit-fix workflow to
  call audit-uat instead of invalid init.audit-uat (PR review).

Updates QUERY-HANDLERS.md and CHANGELOG [Unreleased].

Made-with: Cursor

* fix(sdk): Phase 2 scope — Trek-e review (#2179, #2122)

- Remove gsd-sdk query passthrough to gsd-tools.cjs; drop GSD_TOOLS_PATH
- Consolidate argv routing in resolveQueryArgv(); update USAGE and QUERY-HANDLERS
- Surface @file: read failures in GSDTools.parseOutput
- execute-plan: defer Task Commit Protocol to gsd-executor
- stale-colon-refs: skip .planning/ and root CLAUDE.md (gitignored overlays)
- CHANGELOG [Unreleased]: maintainer review and routing notes

Made-with: Cursor
2026-04-15 22:46:31 -04:00

244 lines
8.3 KiB
JavaScript

/**
* GSD Forensics Tests
*
* Validates the forensics command and workflow files exist,
* follow expected patterns, and cover all anomaly detection types.
*/
const { test, describe, beforeEach, afterEach } = require('node:test');
const assert = require('node:assert/strict');
const fs = require('fs');
const path = require('path');
const os = require('os');
const repoRoot = path.resolve(__dirname, '..');
const commandPath = path.join(repoRoot, 'commands', 'gsd', 'forensics.md');
const workflowPath = path.join(repoRoot, 'get-shit-done', 'workflows', 'forensics.md');
describe('forensics command', () => {
test('command file exists', () => {
assert.ok(fs.existsSync(commandPath), 'commands/gsd/forensics.md should exist');
});
test('command has correct frontmatter', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(content.includes('name: gsd:forensics'), 'should have correct command name');
assert.ok(content.includes('type: prompt'), 'should have type: prompt');
assert.ok(content.includes('argument-hint'), 'should have argument-hint');
});
test('command references workflow in execution_context', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(
content.includes('workflows/forensics.md'),
'should reference the forensics workflow'
);
});
test('command has success_criteria section', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(content.includes('<success_criteria>'), 'should have success_criteria');
});
test('command has critical_rules section', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(content.includes('<critical_rules>'), 'should have critical_rules');
});
test('command enforces read-only investigation', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(
content.toLowerCase().includes('read-only') || content.toLowerCase().includes('do not modify'),
'should enforce read-only investigation'
);
});
test('command requires evidence-grounded findings', () => {
const content = fs.readFileSync(commandPath, 'utf-8');
assert.ok(
content.includes('Ground findings') || content.includes('cite specific'),
'should require evidence-grounded analysis'
);
});
});
describe('forensics workflow', () => {
test('workflow file exists', () => {
assert.ok(fs.existsSync(workflowPath), 'workflows/forensics.md should exist');
});
test('workflow gathers evidence from all data sources', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
const sources = [
'git log',
'git status',
'STATE.md',
'ROADMAP.md',
'PLAN.md',
'SUMMARY.md',
'VERIFICATION.md',
'SESSION_REPORT',
'worktree',
];
for (const source of sources) {
assert.ok(
content.includes(source),
`workflow should reference data source: ${source}`
);
}
});
test('workflow detects all 6 anomaly types', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
const anomalies = [
'Stuck Loop',
'Missing Artifact',
'Abandoned Work',
'Crash',
'Scope Drift',
'Test Regression',
];
for (const anomaly of anomalies) {
assert.ok(
content.includes(anomaly),
`workflow should detect anomaly: ${anomaly}`
);
}
});
test('workflow writes report to forensics directory', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('.planning/forensics/report-'),
'should write to .planning/forensics/'
);
});
test('workflow includes redaction rules', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('Redaction') || content.includes('redact'),
'should include data redaction rules'
);
});
test('workflow offers interactive investigation', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('dig deeper') || content.includes('Interactive'),
'should offer interactive follow-up'
);
});
test('workflow offers GitHub issue creation', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('gh issue create'),
'should offer to create GitHub issue from findings'
);
});
test('workflow updates STATE.md', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('state record-session') || content.includes('state.record-session'),
'should update STATE.md via state record-session (CJS or gsd-sdk query)'
);
});
test('workflow has confidence levels for anomalies', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('HIGH') && content.includes('MEDIUM') && content.includes('LOW'),
'anomalies should have confidence levels'
);
});
});
describe('forensics report structure', () => {
test('report template has all required sections', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
const sections = [
'Evidence Summary',
'Git Activity',
'Planning State',
'Artifact Completeness',
'Anomalies Detected',
'Root Cause Hypothesis',
'Recommended Actions',
];
for (const section of sections) {
assert.ok(
content.includes(section),
`report should include section: "${section}"`
);
}
});
test('report includes artifact completeness table', () => {
const content = fs.readFileSync(workflowPath, 'utf-8');
assert.ok(
content.includes('PLAN') && content.includes('CONTEXT') && content.includes('RESEARCH') &&
content.includes('SUMMARY') && content.includes('VERIFICATION'),
'artifact table should check all 5 artifact types'
);
});
});
describe('forensics fixture-based tests', () => {
let tmpDir;
beforeEach(() => {
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-forensics-test-'));
});
afterEach(() => {
if (tmpDir) fs.rmSync(tmpDir, { recursive: true, force: true });
});
test('detects missing artifacts in phase structure', () => {
// Phase 1: complete
const phase1 = path.join(tmpDir, '.planning', 'phases', '01-setup');
fs.mkdirSync(phase1, { recursive: true });
fs.writeFileSync(path.join(phase1, '01-PLAN-A.md'), 'plan');
fs.writeFileSync(path.join(phase1, '01-SUMMARY.md'), 'summary');
fs.writeFileSync(path.join(phase1, '01-VERIFICATION.md'), 'verification');
// Phase 2: missing SUMMARY and VERIFICATION (anomaly)
const phase2 = path.join(tmpDir, '.planning', 'phases', '02-core');
fs.mkdirSync(phase2, { recursive: true });
fs.writeFileSync(path.join(phase2, '02-PLAN-A.md'), 'plan');
// Verify detection
const p1Files = fs.readdirSync(phase1);
const p2Files = fs.readdirSync(phase2);
assert.ok(p1Files.some(f => f.includes('SUMMARY')), 'phase 1 has SUMMARY');
assert.ok(p1Files.some(f => f.includes('VERIFICATION')), 'phase 1 has VERIFICATION');
assert.ok(!p2Files.some(f => f.includes('SUMMARY')), 'phase 2 missing SUMMARY (anomaly)');
assert.ok(!p2Files.some(f => f.includes('VERIFICATION')), 'phase 2 missing VERIFICATION (anomaly)');
});
test('forensics report directory can be created', () => {
const forensicsDir = path.join(tmpDir, '.planning', 'forensics');
fs.mkdirSync(forensicsDir, { recursive: true });
const reportPath = path.join(forensicsDir, 'report-20260321-150000.md');
fs.writeFileSync(reportPath, '# Forensic Report\n');
assert.ok(fs.existsSync(reportPath), 'report file should be created');
const content = fs.readFileSync(reportPath, 'utf-8');
assert.ok(content.includes('Forensic Report'), 'report should have header');
});
test('handles project with no .planning directory', () => {
// No .planning/ at all
const planningExists = fs.existsSync(path.join(tmpDir, '.planning'));
assert.strictEqual(planningExists, false, 'no .planning/ should exist');
// Forensics should still work with git data
const forensicsDir = path.join(tmpDir, '.planning', 'forensics');
fs.mkdirSync(forensicsDir, { recursive: true });
assert.ok(fs.existsSync(forensicsDir), 'forensics dir created on demand');
});
});