mirror of
https://github.com/glittercowboy/get-shit-done
synced 2026-05-05 23:02:20 +02:00
Compare commits
7 Commits
release/1.
...
fix/2916-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
006cdafe8f | ||
|
|
8051bc4fd8 | ||
|
|
444db1714b | ||
|
|
6dce1de4a7 | ||
|
|
abb2cb63f6 | ||
|
|
8cbdbdd2de | ||
|
|
951d5bf7c0 |
6
.githooks/pre-commit
Executable file
6
.githooks/pre-commit
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if git diff --cached --name-only | grep -Eq "^sdk/src/query/command-manifest\.|^sdk/src/query/command-aliases\.generated\.ts$|^get-shit-done/bin/lib/command-aliases\.generated\.cjs$|^sdk/scripts/gen-command-aliases\.ts$"; then
|
||||
npm run check:alias-drift
|
||||
fi
|
||||
48
.githooks/pre-push
Executable file
48
.githooks/pre-push
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
zero_sha='0000000000000000000000000000000000000000'
|
||||
blocked_regex="${GSD_BLOCKED_AUTHOR_REGEX:-}"
|
||||
|
||||
# Local-only guard: no-op unless the developer opts in via env var, e.g.
|
||||
# export GSD_BLOCKED_AUTHOR_REGEX='@example-corp\.com$'
|
||||
if [[ -z "$blocked_regex" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
violations=()
|
||||
|
||||
while read -r local_ref local_sha remote_ref remote_sha; do
|
||||
# branch/tag deletion
|
||||
if [[ "$local_sha" == "$zero_sha" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$remote_sha" == "$zero_sha" ]]; then
|
||||
# New remote ref: inspect commits not already on any remote
|
||||
commit_list=$(git rev-list "$local_sha" --not --remotes)
|
||||
else
|
||||
commit_list=$(git rev-list "$remote_sha..$local_sha")
|
||||
fi
|
||||
|
||||
while read -r commit; do
|
||||
[[ -z "$commit" ]] && continue
|
||||
author_email=$(git show -s --format='%ae' "$commit")
|
||||
lower_email=$(printf '%s' "$author_email" | tr '[:upper:]' '[:lower:]')
|
||||
if printf '%s' "$lower_email" | grep -Eq "$blocked_regex"; then
|
||||
violations+=("$commit <$author_email>")
|
||||
fi
|
||||
done <<< "$commit_list"
|
||||
done
|
||||
|
||||
if [[ ${#violations[@]} -gt 0 ]]; then
|
||||
{
|
||||
echo "Push blocked: commit author email matched local blocked regex ($blocked_regex)."
|
||||
echo "Rewrite author info before pushing these commits:"
|
||||
for v in "${violations[@]}"; do
|
||||
echo " - $v"
|
||||
done
|
||||
echo "Suggested fix: git rebase -i <base> --exec \"git commit --amend --no-edit --author='Your Name <non-enterprise@email>'\""
|
||||
} >&2
|
||||
exit 1
|
||||
fi
|
||||
12
.github/workflows/test.yml
vendored
12
.github/workflows/test.yml
vendored
@@ -88,6 +88,18 @@ jobs:
|
||||
- name: Build SDK dist (required by installer)
|
||||
run: npm run build:sdk
|
||||
|
||||
# Seam contract gate: keep manifest -> generated aliases -> registry/CJS adapters aligned.
|
||||
# Run once per workflow on the primary Linux node to avoid redundant matrix cost.
|
||||
- name: SDK seam coverage tests
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.node-version == 24
|
||||
shell: bash
|
||||
run: cd sdk && npx vitest run src/query/command-seam-coverage.test.ts
|
||||
|
||||
- name: SDK generated alias artifact drift check
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.node-version == 24
|
||||
shell: bash
|
||||
run: node sdk/scripts/check-command-aliases-fresh.mjs
|
||||
|
||||
- name: Run tests with coverage
|
||||
shell: bash
|
||||
run: npm run test:coverage
|
||||
|
||||
@@ -26,6 +26,7 @@ Format follows [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
RC. (#2833)
|
||||
|
||||
### Changed — 1.40.0-rc.1
|
||||
- **Planning workspace seam extracted from `core.cjs` into `planning-workspace.cjs`** — path/workstream/lock behavior now lives in a dedicated module (`planningDir`, `planningPaths`, `planningRoot`, active-workstream routing, `withPlanningLock`). `core.cjs` keeps compatibility re-exports while call-sites migrate to direct imports, improving locality and reducing coupling. (#2900)
|
||||
- **Skill surface consolidated 86 → 59 `commands/gsd/*.md` entries** — four new
|
||||
grouped skills (`capture`, `phase`, `config`, `workspace`) replace clusters of
|
||||
micro-skills. Six existing parents absorb wrap-up and sub-operations as flags:
|
||||
@@ -38,6 +39,7 @@ Format follows [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
|
||||
posting a comment that points to the contribution guide. (#2872)
|
||||
|
||||
### Fixed — 1.40.0-rc.1
|
||||
- **`gap-analysis` now parses non-`REQ-` requirement IDs and ignores traceability table headers** — `parseRequirements()` no longer hard-codes the `REQ-` prefix and now accepts uppercase prefixed IDs such as `TST-01`, `BACK-07`, and `INSP-04`; markdown table header rows (for example `| REQ-ID | ... |`) are excluded so header tokens are not reported as phantom uncovered requirements. Added regression coverage for mixed-prefix REQUIREMENTS files with traceability tables. (#2897)
|
||||
- **Gemini slash commands namespaced as `/gsd:<cmd>` instead of `/gsd-<cmd>`** —
|
||||
Gemini CLI namespaces commands under `gsd:`, so `/gsd-plan-phase` was unexecutable.
|
||||
Body-text references in commands, agents, banners, and patch-reapply hints are now
|
||||
|
||||
@@ -345,6 +345,73 @@ node --test tests/core.test.cjs
|
||||
npm run test:coverage
|
||||
```
|
||||
|
||||
### Pre-PR Seam Checks (Manifest/Alias Routing)
|
||||
|
||||
If you touched any of the command-manifest or generated alias files, run:
|
||||
|
||||
```bash
|
||||
npm run check:alias-drift
|
||||
```
|
||||
|
||||
This verifies generated alias artifacts are in sync with manifest source-of-truth.
|
||||
|
||||
Optional local pre-commit hook entry (Git-native):
|
||||
|
||||
```bash
|
||||
# one-time setup
|
||||
mkdir -p .githooks
|
||||
cat > .githooks/pre-commit <<'EOF'
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if git diff --cached --name-only | grep -Eq "^sdk/src/query/command-manifest\.|^sdk/src/query/command-aliases\.generated\.ts$|^get-shit-done/bin/lib/command-aliases\.generated\.cjs$|^sdk/scripts/gen-command-aliases\.ts$"; then
|
||||
npm run check:alias-drift
|
||||
fi
|
||||
EOF
|
||||
chmod +x .githooks/pre-commit
|
||||
git config core.hooksPath .githooks
|
||||
```
|
||||
|
||||
Optional local pre-push hook to block a private author-email pattern:
|
||||
|
||||
```bash
|
||||
# set locally in your shell profile (example)
|
||||
export GSD_BLOCKED_AUTHOR_REGEX='@example-corp\\.com$'
|
||||
|
||||
cat > .githooks/pre-push <<'EOF'
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
zero_sha='0000000000000000000000000000000000000000'
|
||||
blocked_regex="${GSD_BLOCKED_AUTHOR_REGEX:-}"
|
||||
[[ -z "$blocked_regex" ]] && exit 0
|
||||
violations=()
|
||||
|
||||
while read -r local_ref local_sha remote_ref remote_sha; do
|
||||
[[ "$local_sha" == "$zero_sha" ]] && continue
|
||||
if [[ "$remote_sha" == "$zero_sha" ]]; then
|
||||
commits=$(git rev-list "$local_sha" --not --remotes)
|
||||
else
|
||||
commits=$(git rev-list "$remote_sha..$local_sha")
|
||||
fi
|
||||
while read -r commit; do
|
||||
[[ -z "$commit" ]] && continue
|
||||
email=$(git show -s --format='%ae' "$commit" | tr '[:upper:]' '[:lower:]')
|
||||
if printf '%s' "$email" | grep -Eq "$blocked_regex"; then
|
||||
violations+=("$commit <$email>")
|
||||
fi
|
||||
done <<< "$commits"
|
||||
done
|
||||
|
||||
if [[ ${#violations[@]} -gt 0 ]]; then
|
||||
echo "Push blocked: commit author email matched local blocked regex ($blocked_regex)." >&2
|
||||
printf ' - %s\n' "${violations[@]}" >&2
|
||||
exit 1
|
||||
fi
|
||||
EOF
|
||||
chmod +x .githooks/pre-push
|
||||
```
|
||||
|
||||
### CI Test Quality Checks
|
||||
|
||||
The following checks run on every PR in addition to the test suite:
|
||||
|
||||
@@ -257,12 +257,13 @@ See [`docs/INVENTORY.md`](INVENTORY.md#hooks-11-shipped) for the authoritative 1
|
||||
|
||||
### CLI Tools (`get-shit-done/bin/`)
|
||||
|
||||
Node.js CLI utility (`gsd-tools.cjs`) with domain modules split across `get-shit-done/bin/lib/` (see [`docs/INVENTORY.md`](INVENTORY.md#cli-modules-24-shipped) for the authoritative roster):
|
||||
Node.js CLI utility (`gsd-tools.cjs`) with domain modules split across `get-shit-done/bin/lib/` (see [`docs/INVENTORY.md`](INVENTORY.md#cli-modules-33-shipped) for the authoritative roster):
|
||||
|
||||
|
||||
| Module | Responsibility |
|
||||
| ---------------------- | --------------------------------------------------------------------------------------------------- |
|
||||
| `core.cjs` | Error handling, output formatting, shared utilities |
|
||||
| `core.cjs` | Error handling, output formatting, shared utilities; compatibility re-exports for planning helpers |
|
||||
| `planning-workspace.cjs` | Planning seam (`planningDir`, `planningPaths`, active workstream routing, `.planning/.lock`) |
|
||||
| `state.cjs` | STATE.md parsing, updating, progression, metrics |
|
||||
| `phase.cjs` | Phase directory operations, decimal numbering, plan indexing |
|
||||
| `roadmap.cjs` | ROADMAP.md parsing, phase extraction, plan progress |
|
||||
|
||||
@@ -452,9 +452,10 @@ User-facing entry point: `/gsd-graphify` (see [Command Reference](COMMANDS.md#gs
|
||||
|
||||
| Module | File | Exports |
|
||||
|--------|------|---------|
|
||||
| Core | `lib/core.cjs` | `error()`, `output()`, `parseArgs()`, shared utilities |
|
||||
| Core | `lib/core.cjs` | `error()`, `output()`, `parseArgs()`, shared utilities, compatibility re-exports |
|
||||
| State | `lib/state.cjs` | All `state` subcommands, `state-snapshot` |
|
||||
| Phase | `lib/phase.cjs` | Phase CRUD, `find-phase`, `phase-plan-index`, `phases list` |
|
||||
| Planning Workspace | `lib/planning-workspace.cjs` | Planning seam: `planningDir`, `planningPaths`, active workstream routing, `.planning/.lock` |
|
||||
| Roadmap | `lib/roadmap.cjs` | Roadmap parsing, phase extraction, progress updates |
|
||||
| Config | `lib/config.cjs` | Config read/write, section initialization |
|
||||
| Verify | `lib/verify.cjs` | All verification and validation commands |
|
||||
|
||||
@@ -246,6 +246,7 @@
|
||||
"cli_modules": [
|
||||
"artifacts.cjs",
|
||||
"audit.cjs",
|
||||
"command-aliases.generated.cjs",
|
||||
"commands.cjs",
|
||||
"config-schema.cjs",
|
||||
"config.cjs",
|
||||
@@ -258,22 +259,30 @@
|
||||
"gap-checker.cjs",
|
||||
"graphify.cjs",
|
||||
"gsd2-import.cjs",
|
||||
"init-command-router.cjs",
|
||||
"init.cjs",
|
||||
"install-profiles.cjs",
|
||||
"intel.cjs",
|
||||
"learnings.cjs",
|
||||
"milestone.cjs",
|
||||
"model-profiles.cjs",
|
||||
"phase-command-router.cjs",
|
||||
"phase.cjs",
|
||||
"phases-command-router.cjs",
|
||||
"planning-workspace.cjs",
|
||||
"profile-output.cjs",
|
||||
"profile-pipeline.cjs",
|
||||
"roadmap-command-router.cjs",
|
||||
"roadmap.cjs",
|
||||
"schema-detect.cjs",
|
||||
"secrets.cjs",
|
||||
"security.cjs",
|
||||
"state-command-router.cjs",
|
||||
"state.cjs",
|
||||
"template.cjs",
|
||||
"uat.cjs",
|
||||
"validate-command-router.cjs",
|
||||
"verify-command-router.cjs",
|
||||
"verify.cjs",
|
||||
"workstream.cjs"
|
||||
],
|
||||
@@ -291,4 +300,4 @@
|
||||
"gsd-workflow-guard.js"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -348,7 +348,7 @@ The `gsd-planner` agent is decomposed into a core agent plus reference modules t
|
||||
|
||||
---
|
||||
|
||||
## CLI Modules (32 shipped)
|
||||
## CLI Modules (41 shipped)
|
||||
|
||||
Full listing: `get-shit-done/bin/lib/*.cjs`.
|
||||
|
||||
@@ -356,11 +356,12 @@ Full listing: `get-shit-done/bin/lib/*.cjs`.
|
||||
|--------|----------------|
|
||||
| `artifacts.cjs` | Canonical artifact registry — known `.planning/` root file names; used by `gsd-health` W019 lint |
|
||||
| `audit.cjs` | Audit dispatch, audit open sessions, audit storage helpers |
|
||||
| `command-aliases.generated.cjs` | Generated CJS alias/subcommand metadata for manifest-backed family routers |
|
||||
| `commands.cjs` | Misc CLI commands (slug, timestamp, todos, scaffolding, stats) |
|
||||
| `config-schema.cjs` | Single source of truth for `VALID_CONFIG_KEYS` and dynamic key patterns; imported by both the validator and the config-schema-docs parity test |
|
||||
| `config.cjs` | `config.json` read/write, section initialization; imports validator from `config-schema.cjs` |
|
||||
| `context-utilization.cjs` | Pure classifier for `gsd-health --context` — turns (tokensUsed, contextWindow) into a `{ percent, state }` triage result against the 60%/70% fracture-point thresholds (#2792) |
|
||||
| `core.cjs` | Error handling, output formatting, shared utilities, runtime fallbacks |
|
||||
| `core.cjs` | Error handling, output formatting, shared utilities, runtime fallbacks; compatibility re-exports for planning-workspace helpers |
|
||||
| `decisions.cjs` | Shared parser for CONTEXT.md `<decisions>` blocks (D-NN entries); used by `gap-checker.cjs` and intended for #2492 plan/verify decision gates |
|
||||
| `docs.cjs` | Docs-update workflow init, Markdown scanning, monorepo detection |
|
||||
| `drift.cjs` | Post-execute codebase structural drift detector (#2003): classifies file changes into new-dir/barrel/migration/route categories and round-trips `last_mapped_commit` frontmatter |
|
||||
@@ -368,22 +369,30 @@ Full listing: `get-shit-done/bin/lib/*.cjs`.
|
||||
| `gap-checker.cjs` | Post-planning gap analysis (#2493): unified REQUIREMENTS.md + CONTEXT.md decisions vs PLAN.md coverage report (`gsd-tools gap-analysis`) |
|
||||
| `graphify.cjs` | Knowledge-graph build/query/status/diff for `/gsd-graphify` |
|
||||
| `gsd2-import.cjs` | External-plan ingest for `/gsd-from-gsd2` |
|
||||
| `init-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools init` |
|
||||
| `init.cjs` | Compound context loading for each workflow type |
|
||||
| `install-profiles.cjs` | Install profile allowlist + skill staging for `--minimal` install (#2762); single source of truth for which `gsd-*` skills/agents land in runtime config dirs |
|
||||
| `intel.cjs` | Codebase intel store backing `/gsd-intel` and `gsd-intel-updater` |
|
||||
| `learnings.cjs` | Cross-phase learnings extraction for `/gsd-extract-learnings` |
|
||||
| `milestone.cjs` | Milestone archival, requirements marking |
|
||||
| `model-profiles.cjs` | Model profile resolution table (authoritative profile data) |
|
||||
| `phase-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools phase` |
|
||||
| `phase.cjs` | Phase directory operations, decimal numbering, plan indexing |
|
||||
| `phases-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools phases` |
|
||||
| `planning-workspace.cjs` | Planning path/workstream seam (`planningDir`, `planningPaths`, active-workstream routing, `.planning/.lock` orchestration) |
|
||||
| `profile-output.cjs` | Profile rendering, USER-PROFILE.md and dev-preferences.md generation |
|
||||
| `profile-pipeline.cjs` | User behavioral profiling data pipeline, session file scanning |
|
||||
| `roadmap-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools roadmap` |
|
||||
| `roadmap.cjs` | ROADMAP.md parsing, phase extraction, plan progress |
|
||||
| `schema-detect.cjs` | Schema-drift detection for ORM patterns (Prisma, Drizzle, etc.) |
|
||||
| `secrets.cjs` | Secret-config masking convention (`****<last-4>`) for integration keys managed by `/gsd-settings-integrations` — keeps plaintext out of `config-set` output |
|
||||
| `security.cjs` | Path traversal prevention, prompt injection detection, safe JSON/shell helpers |
|
||||
| `state-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools state` |
|
||||
| `state.cjs` | STATE.md parsing, updating, progression, metrics |
|
||||
| `template.cjs` | Template selection and filling with variable substitution |
|
||||
| `uat.cjs` | UAT file parsing, verification debt tracking, audit-uat support |
|
||||
| `validate-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools validate` |
|
||||
| `verify-command-router.cjs` | Thin CJS subcommand router adapter for `gsd-tools verify` |
|
||||
| `verify.cjs` | Plan structure, phase completeness, reference, commit validation |
|
||||
| `workstream.cjs` | Workstream CRUD, migration, session-scoped active pointer |
|
||||
|
||||
|
||||
@@ -172,7 +172,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const core = require('./lib/core.cjs');
|
||||
const { error, findProjectRoot, getActiveWorkstream } = core;
|
||||
const { error, findProjectRoot } = core;
|
||||
const { getActiveWorkstream } = require('./lib/planning-workspace.cjs');
|
||||
const state = require('./lib/state.cjs');
|
||||
const phase = require('./lib/phase.cjs');
|
||||
const roadmap = require('./lib/roadmap.cjs');
|
||||
@@ -189,6 +190,13 @@ const workstream = require('./lib/workstream.cjs');
|
||||
const docs = require('./lib/docs.cjs');
|
||||
const learnings = require('./lib/learnings.cjs');
|
||||
const gapChecker = require('./lib/gap-checker.cjs');
|
||||
const { routeStateCommand } = require('./lib/state-command-router.cjs');
|
||||
const { routeVerifyCommand } = require('./lib/verify-command-router.cjs');
|
||||
const { routeInitCommand } = require('./lib/init-command-router.cjs');
|
||||
const { routePhaseCommand } = require('./lib/phase-command-router.cjs');
|
||||
const { routePhasesCommand } = require('./lib/phases-command-router.cjs');
|
||||
const { routeValidateCommand } = require('./lib/validate-command-router.cjs');
|
||||
const { routeRoadmapCommand } = require('./lib/roadmap-command-router.cjs');
|
||||
|
||||
// ─── Arg parsing helpers ──────────────────────────────────────────────────────
|
||||
|
||||
@@ -429,73 +437,14 @@ function extractField(obj, fieldPath) {
|
||||
async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
switch (command) {
|
||||
case 'state': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'json') {
|
||||
state.cmdStateJson(cwd, raw);
|
||||
} else if (subcommand === 'update') {
|
||||
state.cmdStateUpdate(cwd, args[2], args[3]);
|
||||
} else if (subcommand === 'get') {
|
||||
state.cmdStateGet(cwd, args[2], raw);
|
||||
} else if (subcommand === 'patch') {
|
||||
const patches = {};
|
||||
for (let i = 2; i < args.length; i += 2) {
|
||||
const key = args[i].replace(/^--/, '');
|
||||
const value = args[i + 1];
|
||||
if (key && value !== undefined) {
|
||||
patches[key] = value;
|
||||
}
|
||||
}
|
||||
state.cmdStatePatch(cwd, patches, raw);
|
||||
} else if (subcommand === 'advance-plan') {
|
||||
state.cmdStateAdvancePlan(cwd, raw);
|
||||
} else if (subcommand === 'record-metric') {
|
||||
const { phase: p, plan, duration, tasks, files } = parseNamedArgs(args, ['phase', 'plan', 'duration', 'tasks', 'files']);
|
||||
state.cmdStateRecordMetric(cwd, { phase: p, plan, duration, tasks, files }, raw);
|
||||
} else if (subcommand === 'update-progress') {
|
||||
state.cmdStateUpdateProgress(cwd, raw);
|
||||
} else if (subcommand === 'add-decision') {
|
||||
const { phase: p, summary, 'summary-file': summary_file, rationale, 'rationale-file': rationale_file } = parseNamedArgs(args, ['phase', 'summary', 'summary-file', 'rationale', 'rationale-file']);
|
||||
state.cmdStateAddDecision(cwd, { phase: p, summary, summary_file, rationale: rationale || '', rationale_file }, raw);
|
||||
} else if (subcommand === 'add-blocker') {
|
||||
const { text, 'text-file': text_file } = parseNamedArgs(args, ['text', 'text-file']);
|
||||
state.cmdStateAddBlocker(cwd, { text, text_file }, raw);
|
||||
} else if (subcommand === 'resolve-blocker') {
|
||||
state.cmdStateResolveBlocker(cwd, parseNamedArgs(args, ['text']).text, raw);
|
||||
} else if (subcommand === 'record-session') {
|
||||
const { 'stopped-at': stopped_at, 'resume-file': resume_file } = parseNamedArgs(args, ['stopped-at', 'resume-file']);
|
||||
state.cmdStateRecordSession(cwd, { stopped_at, resume_file: resume_file || 'None' }, raw);
|
||||
} else if (subcommand === 'begin-phase') {
|
||||
const { phase: p, name, plans } = parseNamedArgs(args, ['phase', 'name', 'plans']);
|
||||
state.cmdStateBeginPhase(cwd, p, name, plans !== null ? parseInt(plans, 10) : null, raw);
|
||||
} else if (subcommand === 'signal-waiting') {
|
||||
const { type, question, options, phase: p } = parseNamedArgs(args, ['type', 'question', 'options', 'phase']);
|
||||
state.cmdSignalWaiting(cwd, type, question, options, p, raw);
|
||||
} else if (subcommand === 'signal-resume') {
|
||||
state.cmdSignalResume(cwd, raw);
|
||||
} else if (subcommand === 'planned-phase') {
|
||||
const { phase: p, name, plans } = parseNamedArgs(args, ['phase', 'name', 'plans']);
|
||||
state.cmdStatePlannedPhase(cwd, p, plans !== null ? parseInt(plans, 10) : null, raw);
|
||||
} else if (subcommand === 'validate') {
|
||||
state.cmdStateValidate(cwd, raw);
|
||||
} else if (subcommand === 'sync') {
|
||||
const { verify } = parseNamedArgs(args, [], ['verify']);
|
||||
state.cmdStateSync(cwd, { verify }, raw);
|
||||
} else if (subcommand === 'prune') {
|
||||
const { 'keep-recent': keepRecent, 'dry-run': dryRun } = parseNamedArgs(args, ['keep-recent'], ['dry-run']);
|
||||
state.cmdStatePrune(cwd, { keepRecent: keepRecent || '3', dryRun: !!dryRun }, raw);
|
||||
} else if (subcommand === 'complete-phase') {
|
||||
state.cmdStateCompletePhase(cwd, raw);
|
||||
} else if (subcommand === 'milestone-switch') {
|
||||
// Bug #2630: reset STATE.md frontmatter + Current Position for new milestone.
|
||||
// NB: the flag is `--milestone`, not `--version` — gsd-tools reserves
|
||||
// `--version` as a globally-invalid help flag (see NEVER_VALID_FLAGS above).
|
||||
const { milestone, name } = parseNamedArgs(args, ['milestone', 'name']);
|
||||
state.cmdStateMilestoneSwitch(cwd, milestone, name, raw);
|
||||
} else if (subcommand === undefined || subcommand === 'load') {
|
||||
state.cmdStateLoad(cwd, raw);
|
||||
} else {
|
||||
error(`Unknown state subcommand: "${subcommand}". Available: load, json, get, patch, update, advance-plan, record-metric, update-progress, add-decision, add-blocker, resolve-blocker, record-session, begin-phase, signal-waiting, signal-resume, planned-phase, validate, sync, prune, complete-phase, milestone-switch`);
|
||||
}
|
||||
routeStateCommand({
|
||||
state,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
parseNamedArgs,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -589,27 +538,13 @@ async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
}
|
||||
|
||||
case 'verify': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'plan-structure') {
|
||||
verify.cmdVerifyPlanStructure(cwd, args[2], raw);
|
||||
} else if (subcommand === 'phase-completeness') {
|
||||
verify.cmdVerifyPhaseCompleteness(cwd, args[2], raw);
|
||||
} else if (subcommand === 'references') {
|
||||
verify.cmdVerifyReferences(cwd, args[2], raw);
|
||||
} else if (subcommand === 'commits') {
|
||||
verify.cmdVerifyCommits(cwd, args.slice(2), raw);
|
||||
} else if (subcommand === 'artifacts') {
|
||||
verify.cmdVerifyArtifacts(cwd, args[2], raw);
|
||||
} else if (subcommand === 'key-links') {
|
||||
verify.cmdVerifyKeyLinks(cwd, args[2], raw);
|
||||
} else if (subcommand === 'schema-drift') {
|
||||
const skipFlag = args.includes('--skip');
|
||||
verify.cmdVerifySchemaDrift(cwd, args[2], skipFlag, raw);
|
||||
} else if (subcommand === 'codebase-drift') {
|
||||
verify.cmdVerifyCodebaseDrift(cwd, raw);
|
||||
} else {
|
||||
error('Unknown verify subcommand. Available: plan-structure, phase-completeness, references, commits, artifacts, key-links, schema-drift, codebase-drift');
|
||||
}
|
||||
routeVerifyCommand({
|
||||
verify,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -679,37 +614,25 @@ async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
}
|
||||
|
||||
case 'phases': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'list') {
|
||||
const typeIndex = args.indexOf('--type');
|
||||
const phaseIndex = args.indexOf('--phase');
|
||||
const options = {
|
||||
type: typeIndex !== -1 ? args[typeIndex + 1] : null,
|
||||
phase: phaseIndex !== -1 ? args[phaseIndex + 1] : null,
|
||||
includeArchived: args.includes('--include-archived'),
|
||||
};
|
||||
phase.cmdPhasesList(cwd, options, raw);
|
||||
} else if (subcommand === 'clear') {
|
||||
milestone.cmdPhasesClear(cwd, raw, args.slice(2));
|
||||
} else {
|
||||
error('Unknown phases subcommand. Available: list, clear');
|
||||
}
|
||||
routePhasesCommand({
|
||||
phase,
|
||||
milestone,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'roadmap': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'get-phase') {
|
||||
roadmap.cmdRoadmapGetPhase(cwd, args[2], raw);
|
||||
} else if (subcommand === 'analyze') {
|
||||
roadmap.cmdRoadmapAnalyze(cwd, raw);
|
||||
} else if (subcommand === 'update-plan-progress') {
|
||||
roadmap.cmdRoadmapUpdatePlanProgress(cwd, args[2], raw);
|
||||
} else if (subcommand === 'annotate-dependencies') {
|
||||
roadmap.cmdRoadmapAnnotateDependencies(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown roadmap subcommand. Available: get-phase, analyze, update-plan-progress, annotate-dependencies');
|
||||
}
|
||||
routeRoadmapCommand({
|
||||
roadmap,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -731,42 +654,13 @@ async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
}
|
||||
|
||||
case 'phase': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'next-decimal') {
|
||||
phase.cmdPhaseNextDecimal(cwd, args[2], raw);
|
||||
} else if (subcommand === 'add') {
|
||||
const idIdx = args.indexOf('--id');
|
||||
let customId = null;
|
||||
const descArgs = [];
|
||||
for (let i = 2; i < args.length; i++) {
|
||||
if (args[i] === '--id' && i + 1 < args.length) {
|
||||
customId = args[i + 1];
|
||||
i++; // skip value
|
||||
} else {
|
||||
descArgs.push(args[i]);
|
||||
}
|
||||
}
|
||||
phase.cmdPhaseAdd(cwd, descArgs.join(' '), raw, customId);
|
||||
} else if (subcommand === 'add-batch') {
|
||||
// Accepts JSON array of descriptions via --descriptions '[...]' or positional args
|
||||
const descFlagIdx = args.indexOf('--descriptions');
|
||||
let descriptions;
|
||||
if (descFlagIdx !== -1 && args[descFlagIdx + 1]) {
|
||||
try { descriptions = JSON.parse(args[descFlagIdx + 1]); } catch (e) { error('--descriptions must be a JSON array'); }
|
||||
} else {
|
||||
descriptions = args.slice(2).filter(a => a !== '--raw');
|
||||
}
|
||||
phase.cmdPhaseAddBatch(cwd, descriptions, raw);
|
||||
} else if (subcommand === 'insert') {
|
||||
phase.cmdPhaseInsert(cwd, args[2], args.slice(3).join(' '), raw);
|
||||
} else if (subcommand === 'remove') {
|
||||
const forceFlag = args.includes('--force');
|
||||
phase.cmdPhaseRemove(cwd, args[2], { force: forceFlag }, raw);
|
||||
} else if (subcommand === 'complete') {
|
||||
phase.cmdPhaseComplete(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown phase subcommand. Available: next-decimal, add, add-batch, insert, remove, complete');
|
||||
}
|
||||
routePhaseCommand({
|
||||
phase,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -783,58 +677,15 @@ async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
}
|
||||
|
||||
case 'validate': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'consistency') {
|
||||
verify.cmdValidateConsistency(cwd, raw);
|
||||
} else if (subcommand === 'health') {
|
||||
const repairFlag = args.includes('--repair');
|
||||
const backfillFlag = args.includes('--backfill');
|
||||
verify.cmdValidateHealth(cwd, { repair: repairFlag, backfill: backfillFlag }, raw);
|
||||
} else if (subcommand === 'agents') {
|
||||
verify.cmdValidateAgents(cwd, raw);
|
||||
} else if (subcommand === 'context') {
|
||||
// The model self-reports tokensUsed and contextWindow — the SDK has
|
||||
// no privileged access to either. Recommendation copy lives here
|
||||
// (the renderer), not in the classifier, so it can change without
|
||||
// re-validating the math layer.
|
||||
const opts = parseNamedArgs(args, ['tokens-used', 'context-window']);
|
||||
if (opts['tokens-used'] === null) {
|
||||
error('--tokens-used <integer> is required for `validate context`');
|
||||
break;
|
||||
}
|
||||
if (opts['context-window'] === null) {
|
||||
error('--context-window <integer> is required for `validate context`');
|
||||
break;
|
||||
}
|
||||
const { classifyContextUtilization, STATES } = require('./lib/context-utilization.cjs');
|
||||
const RECOMMENDATIONS = {
|
||||
[STATES.HEALTHY]: null,
|
||||
[STATES.WARNING]: 'Context is approaching the fracture zone — consider /gsd-thread to continue in a fresh window.',
|
||||
[STATES.CRITICAL]: 'Reasoning quality may degrade past 70% utilization (fracture point). Run /gsd-thread now to preserve output quality.',
|
||||
};
|
||||
let classified;
|
||||
try {
|
||||
classified = classifyContextUtilization(Number(opts['tokens-used']), Number(opts['context-window']));
|
||||
} catch (e) {
|
||||
// Translate the classifier's TypeError into a CLI-shaped error
|
||||
// message that names the offending flag.
|
||||
const flag = /tokensUsed/.test(e.message) ? '--tokens-used' : '--context-window';
|
||||
error(`${flag} must be a non-negative integer (window > 0), got the values supplied`);
|
||||
break;
|
||||
}
|
||||
const result = { ...classified, recommendation: RECOMMENDATIONS[classified.state] };
|
||||
if (args.includes('--json')) {
|
||||
core.output(result, raw);
|
||||
} else {
|
||||
const lines = [`Context utilization: ${result.percent}% (${result.state})`];
|
||||
if (result.recommendation) lines.push(result.recommendation);
|
||||
// Use core.output's rawValue path for the sync-flush guarantee
|
||||
// — process.stdout.write can be truncated on process exit.
|
||||
core.output(result, true, lines.join('\n'));
|
||||
}
|
||||
} else {
|
||||
error('Unknown validate subcommand. Available: consistency, health, agents, context');
|
||||
}
|
||||
routeValidateCommand({
|
||||
verify,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
parseNamedArgs,
|
||||
output: core.output,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -903,66 +754,14 @@ async function runCommand(command, args, cwd, raw, defaultValue) {
|
||||
}
|
||||
|
||||
case 'init': {
|
||||
const workflow = args[1];
|
||||
switch (workflow) {
|
||||
case 'execute-phase': {
|
||||
const { validate: epValidate, tdd: epTdd } = parseNamedArgs(args, [], ['validate', 'tdd']);
|
||||
init.cmdInitExecutePhase(cwd, args[2], raw, { validate: epValidate, tdd: epTdd });
|
||||
break;
|
||||
}
|
||||
case 'plan-phase': {
|
||||
const { validate: ppValidate, tdd: ppTdd } = parseNamedArgs(args, [], ['validate', 'tdd']);
|
||||
init.cmdInitPlanPhase(cwd, args[2], raw, { validate: ppValidate, tdd: ppTdd });
|
||||
break;
|
||||
}
|
||||
case 'new-project':
|
||||
init.cmdInitNewProject(cwd, raw);
|
||||
break;
|
||||
case 'new-milestone':
|
||||
init.cmdInitNewMilestone(cwd, raw);
|
||||
break;
|
||||
case 'quick':
|
||||
init.cmdInitQuick(cwd, args.slice(2).join(' '), raw);
|
||||
break;
|
||||
case 'ingest-docs':
|
||||
init.cmdInitIngestDocs(cwd, raw);
|
||||
break;
|
||||
case 'resume':
|
||||
init.cmdInitResume(cwd, raw);
|
||||
break;
|
||||
case 'verify-work':
|
||||
init.cmdInitVerifyWork(cwd, args[2], raw);
|
||||
break;
|
||||
case 'phase-op':
|
||||
init.cmdInitPhaseOp(cwd, args[2], raw);
|
||||
break;
|
||||
case 'todos':
|
||||
init.cmdInitTodos(cwd, args[2], raw);
|
||||
break;
|
||||
case 'milestone-op':
|
||||
init.cmdInitMilestoneOp(cwd, raw);
|
||||
break;
|
||||
case 'map-codebase':
|
||||
init.cmdInitMapCodebase(cwd, raw);
|
||||
break;
|
||||
case 'progress':
|
||||
init.cmdInitProgress(cwd, raw);
|
||||
break;
|
||||
case 'manager':
|
||||
init.cmdInitManager(cwd, raw);
|
||||
break;
|
||||
case 'new-workspace':
|
||||
init.cmdInitNewWorkspace(cwd, raw);
|
||||
break;
|
||||
case 'list-workspaces':
|
||||
init.cmdInitListWorkspaces(cwd, raw);
|
||||
break;
|
||||
case 'remove-workspace':
|
||||
init.cmdInitRemoveWorkspace(cwd, args[2], raw);
|
||||
break;
|
||||
default:
|
||||
error(`Unknown init workflow: ${workflow}\nAvailable: execute-phase, plan-phase, new-project, new-milestone, quick, ingest-docs, resume, verify-work, phase-op, todos, milestone-op, map-codebase, progress, manager, new-workspace, list-workspaces, remove-workspace`);
|
||||
}
|
||||
routeInitCommand({
|
||||
init,
|
||||
args,
|
||||
cwd,
|
||||
raw,
|
||||
parseNamedArgs,
|
||||
error,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { planningDir, toPosixPath } = require('./core.cjs');
|
||||
const { toPosixPath } = require('./core.cjs');
|
||||
const { planningDir } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { requireSafePath, sanitizeForDisplay } = require('./security.cjs');
|
||||
|
||||
|
||||
118
get-shit-done/bin/lib/command-aliases.generated.cjs
Normal file
118
get-shit-done/bin/lib/command-aliases.generated.cjs
Normal file
@@ -0,0 +1,118 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* GENERATED FILE — state.*, verify.*, init.*, phase.*, phases.*, validate.*, and roadmap.* alias/subcommand metadata for CJS routing.
|
||||
* Source: sdk/src/query/command-manifest.{state,verify,init,phase,phases,validate,roadmap}.ts
|
||||
*/
|
||||
|
||||
const STATE_COMMAND_ALIASES = [
|
||||
{ canonical: 'state.load', aliases: [], subcommand: 'load', mutation: false },
|
||||
{ canonical: 'state.json', aliases: ['state json'], subcommand: 'json', mutation: false },
|
||||
{ canonical: 'state.get', aliases: ['state get'], subcommand: 'get', mutation: false },
|
||||
{ canonical: 'state.update', aliases: ['state update'], subcommand: 'update', mutation: true },
|
||||
{ canonical: 'state.patch', aliases: ['state patch'], subcommand: 'patch', mutation: true },
|
||||
{ canonical: 'state.begin-phase', aliases: ['state begin-phase'], subcommand: 'begin-phase', mutation: true },
|
||||
{ canonical: 'state.advance-plan', aliases: ['state advance-plan'], subcommand: 'advance-plan', mutation: true },
|
||||
{ canonical: 'state.record-metric', aliases: ['state record-metric'], subcommand: 'record-metric', mutation: true },
|
||||
{ canonical: 'state.update-progress', aliases: ['state update-progress'], subcommand: 'update-progress', mutation: true },
|
||||
{ canonical: 'state.add-decision', aliases: ['state add-decision'], subcommand: 'add-decision', mutation: true },
|
||||
{ canonical: 'state.add-blocker', aliases: ['state add-blocker'], subcommand: 'add-blocker', mutation: true },
|
||||
{ canonical: 'state.resolve-blocker', aliases: ['state resolve-blocker'], subcommand: 'resolve-blocker', mutation: true },
|
||||
{ canonical: 'state.record-session', aliases: ['state record-session'], subcommand: 'record-session', mutation: true },
|
||||
{ canonical: 'state.signal-waiting', aliases: ['state signal-waiting'], subcommand: 'signal-waiting', mutation: true },
|
||||
{ canonical: 'state.signal-resume', aliases: ['state signal-resume'], subcommand: 'signal-resume', mutation: true },
|
||||
{ canonical: 'state.planned-phase', aliases: ['state planned-phase'], subcommand: 'planned-phase', mutation: true },
|
||||
{ canonical: 'state.validate', aliases: ['state validate'], subcommand: 'validate', mutation: false },
|
||||
{ canonical: 'state.sync', aliases: ['state sync'], subcommand: 'sync', mutation: true },
|
||||
{ canonical: 'state.prune', aliases: ['state prune'], subcommand: 'prune', mutation: true },
|
||||
{ canonical: 'state.milestone-switch', aliases: ['state milestone-switch'], subcommand: 'milestone-switch', mutation: true },
|
||||
{ canonical: 'state.add-roadmap-evolution', aliases: ['state add-roadmap-evolution'], subcommand: 'add-roadmap-evolution', mutation: true },
|
||||
];
|
||||
|
||||
const VERIFY_COMMAND_ALIASES = [
|
||||
{ canonical: 'verify.plan-structure', aliases: ['verify plan-structure'], subcommand: 'plan-structure', mutation: false },
|
||||
{ canonical: 'verify.phase-completeness', aliases: ['verify phase-completeness'], subcommand: 'phase-completeness', mutation: false },
|
||||
{ canonical: 'verify.references', aliases: ['verify references'], subcommand: 'references', mutation: false },
|
||||
{ canonical: 'verify.commits', aliases: ['verify commits'], subcommand: 'commits', mutation: false },
|
||||
{ canonical: 'verify.artifacts', aliases: ['verify artifacts'], subcommand: 'artifacts', mutation: false },
|
||||
{ canonical: 'verify.key-links', aliases: ['verify key-links'], subcommand: 'key-links', mutation: false },
|
||||
{ canonical: 'verify.schema-drift', aliases: ['verify schema-drift'], subcommand: 'schema-drift', mutation: false },
|
||||
{ canonical: 'verify.codebase-drift', aliases: ['verify codebase-drift'], subcommand: 'codebase-drift', mutation: false },
|
||||
];
|
||||
|
||||
const INIT_COMMAND_ALIASES = [
|
||||
{ canonical: 'init.execute-phase', aliases: ['init execute-phase'], subcommand: 'execute-phase', mutation: false },
|
||||
{ canonical: 'init.plan-phase', aliases: ['init plan-phase'], subcommand: 'plan-phase', mutation: false },
|
||||
{ canonical: 'init.new-project', aliases: ['init new-project'], subcommand: 'new-project', mutation: false },
|
||||
{ canonical: 'init.new-milestone', aliases: ['init new-milestone'], subcommand: 'new-milestone', mutation: false },
|
||||
{ canonical: 'init.quick', aliases: ['init quick'], subcommand: 'quick', mutation: false },
|
||||
{ canonical: 'init.ingest-docs', aliases: ['init ingest-docs'], subcommand: 'ingest-docs', mutation: false },
|
||||
{ canonical: 'init.resume', aliases: ['init resume'], subcommand: 'resume', mutation: false },
|
||||
{ canonical: 'init.verify-work', aliases: ['init verify-work'], subcommand: 'verify-work', mutation: false },
|
||||
{ canonical: 'init.phase-op', aliases: ['init phase-op'], subcommand: 'phase-op', mutation: false },
|
||||
{ canonical: 'init.todos', aliases: ['init todos'], subcommand: 'todos', mutation: false },
|
||||
{ canonical: 'init.milestone-op', aliases: ['init milestone-op'], subcommand: 'milestone-op', mutation: false },
|
||||
{ canonical: 'init.map-codebase', aliases: ['init map-codebase'], subcommand: 'map-codebase', mutation: false },
|
||||
{ canonical: 'init.progress', aliases: ['init progress'], subcommand: 'progress', mutation: false },
|
||||
{ canonical: 'init.manager', aliases: ['init manager'], subcommand: 'manager', mutation: false },
|
||||
{ canonical: 'init.new-workspace', aliases: ['init new-workspace'], subcommand: 'new-workspace', mutation: false },
|
||||
{ canonical: 'init.list-workspaces', aliases: ['init list-workspaces'], subcommand: 'list-workspaces', mutation: false },
|
||||
{ canonical: 'init.remove-workspace', aliases: ['init remove-workspace'], subcommand: 'remove-workspace', mutation: false },
|
||||
];
|
||||
|
||||
const PHASE_COMMAND_ALIASES = [
|
||||
{ canonical: 'phase.list-plans', aliases: ['phase list-plans'], subcommand: 'list-plans', mutation: false },
|
||||
{ canonical: 'phase.list-artifacts', aliases: ['phase list-artifacts'], subcommand: 'list-artifacts', mutation: false },
|
||||
{ canonical: 'phase.next-decimal', aliases: ['phase next-decimal'], subcommand: 'next-decimal', mutation: false },
|
||||
{ canonical: 'phase.add', aliases: ['phase add'], subcommand: 'add', mutation: true },
|
||||
{ canonical: 'phase.add-batch', aliases: ['phase add-batch'], subcommand: 'add-batch', mutation: true },
|
||||
{ canonical: 'phase.insert', aliases: ['phase insert'], subcommand: 'insert', mutation: true },
|
||||
{ canonical: 'phase.remove', aliases: ['phase remove'], subcommand: 'remove', mutation: true },
|
||||
{ canonical: 'phase.complete', aliases: ['phase complete'], subcommand: 'complete', mutation: true },
|
||||
{ canonical: 'phase.scaffold', aliases: ['phase scaffold'], subcommand: 'scaffold', mutation: true },
|
||||
];
|
||||
|
||||
const PHASES_COMMAND_ALIASES = [
|
||||
{ canonical: 'phases.list', aliases: ['phases list'], subcommand: 'list', mutation: false },
|
||||
{ canonical: 'phases.clear', aliases: ['phases clear'], subcommand: 'clear', mutation: true },
|
||||
{ canonical: 'phases.archive', aliases: ['phases archive'], subcommand: 'archive', mutation: true },
|
||||
];
|
||||
|
||||
const VALIDATE_COMMAND_ALIASES = [
|
||||
{ canonical: 'validate.consistency', aliases: ['validate consistency'], subcommand: 'consistency', mutation: false },
|
||||
{ canonical: 'validate.health', aliases: ['validate health'], subcommand: 'health', mutation: false },
|
||||
{ canonical: 'validate.agents', aliases: ['validate agents'], subcommand: 'agents', mutation: false },
|
||||
{ canonical: 'validate.context', aliases: ['validate context'], subcommand: 'context', mutation: false },
|
||||
];
|
||||
|
||||
const ROADMAP_COMMAND_ALIASES = [
|
||||
{ canonical: 'roadmap.analyze', aliases: ['roadmap analyze'], subcommand: 'analyze', mutation: false },
|
||||
{ canonical: 'roadmap.get-phase', aliases: ['roadmap get-phase'], subcommand: 'get-phase', mutation: false },
|
||||
{ canonical: 'roadmap.update-plan-progress', aliases: ['roadmap update-plan-progress'], subcommand: 'update-plan-progress', mutation: true },
|
||||
{ canonical: 'roadmap.annotate-dependencies', aliases: ['roadmap annotate-dependencies'], subcommand: 'annotate-dependencies', mutation: true },
|
||||
];
|
||||
|
||||
const STATE_SUBCOMMANDS = STATE_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const VERIFY_SUBCOMMANDS = VERIFY_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const INIT_SUBCOMMANDS = INIT_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const PHASE_SUBCOMMANDS = PHASE_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const PHASES_SUBCOMMANDS = PHASES_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const VALIDATE_SUBCOMMANDS = VALIDATE_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
const ROADMAP_SUBCOMMANDS = ROADMAP_COMMAND_ALIASES.map((entry) => entry.subcommand);
|
||||
|
||||
module.exports = {
|
||||
STATE_COMMAND_ALIASES,
|
||||
VERIFY_COMMAND_ALIASES,
|
||||
INIT_COMMAND_ALIASES,
|
||||
PHASE_COMMAND_ALIASES,
|
||||
PHASES_COMMAND_ALIASES,
|
||||
VALIDATE_COMMAND_ALIASES,
|
||||
ROADMAP_COMMAND_ALIASES,
|
||||
STATE_SUBCOMMANDS,
|
||||
VERIFY_SUBCOMMANDS,
|
||||
INIT_SUBCOMMANDS,
|
||||
PHASE_SUBCOMMANDS,
|
||||
PHASES_SUBCOMMANDS,
|
||||
VALIDATE_SUBCOMMANDS,
|
||||
ROADMAP_SUBCOMMANDS,
|
||||
};
|
||||
@@ -4,7 +4,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { safeReadFile, loadConfig, isGitIgnored, execGit, normalizePhaseName, comparePhaseNum, getArchivedPhaseDirs, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, resolveModelInternal, stripShippedMilestones, extractCurrentMilestone, planningDir, planningPaths, toPosixPath, output, error, findPhaseInternal, extractOneLinerFromBody, getRoadmapPhaseInternal } = require('./core.cjs');
|
||||
const { safeReadFile, loadConfig, isGitIgnored, execGit, normalizePhaseName, comparePhaseNum, getArchivedPhaseDirs, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, resolveModelInternal, stripShippedMilestones, extractCurrentMilestone, toPosixPath, output, error, findPhaseInternal, extractOneLinerFromBody, getRoadmapPhaseInternal } = require('./core.cjs');
|
||||
const { planningDir, planningPaths } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
||||
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { output, error, planningDir, withPlanningLock, CONFIG_DEFAULTS, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { output, error, CONFIG_DEFAULTS, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { planningDir, withPlanningLock } = require('./planning-workspace.cjs');
|
||||
const {
|
||||
VALID_PROFILES,
|
||||
getAgentToModelMapForProfile,
|
||||
|
||||
@@ -5,37 +5,17 @@
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const { execSync, execFileSync, spawnSync } = require('child_process');
|
||||
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
||||
|
||||
const WORKSTREAM_SESSION_ENV_KEYS = [
|
||||
'GSD_SESSION_KEY',
|
||||
'CODEX_THREAD_ID',
|
||||
'CLAUDE_SESSION_ID',
|
||||
'CLAUDE_CODE_SSE_PORT',
|
||||
'OPENCODE_SESSION_ID',
|
||||
'GEMINI_SESSION_ID',
|
||||
'CURSOR_SESSION_ID',
|
||||
'WINDSURF_SESSION_ID',
|
||||
'TERM_SESSION_ID',
|
||||
'WT_SESSION',
|
||||
'TMUX_PANE',
|
||||
'ZELLIJ_SESSION_NAME',
|
||||
];
|
||||
|
||||
let cachedControllingTtyToken = null;
|
||||
let didProbeControllingTtyToken = false;
|
||||
|
||||
// Track all .planning/.lock files held by this process so they can be removed
|
||||
// on exit. process.on('exit') fires even on process.exit(1), unlike try/finally
|
||||
// which is skipped when error() calls process.exit(1) inside a locked region (#1916).
|
||||
const _heldPlanningLocks = new Set();
|
||||
process.on('exit', () => {
|
||||
for (const lockPath of _heldPlanningLocks) {
|
||||
try { fs.unlinkSync(lockPath); } catch { /* already gone */ }
|
||||
}
|
||||
});
|
||||
// Compatibility shim: new imports should use planning-workspace.cjs directly.
|
||||
const {
|
||||
planningDir,
|
||||
planningRoot,
|
||||
planningPaths,
|
||||
withPlanningLock,
|
||||
getActiveWorkstream,
|
||||
setActiveWorkstream,
|
||||
} = require('./planning-workspace.cjs');
|
||||
|
||||
// ─── Path helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -804,304 +784,7 @@ function pruneOrphanedWorktrees(repoRoot) {
|
||||
return pruned;
|
||||
}
|
||||
|
||||
/**
|
||||
* Acquire a file-based lock for .planning/ writes.
|
||||
* Prevents concurrent worktrees from corrupting shared planning files.
|
||||
* Lock is auto-released after the callback completes.
|
||||
*/
|
||||
function withPlanningLock(cwd, fn) {
|
||||
const lockPath = path.join(planningDir(cwd), '.lock');
|
||||
const lockTimeout = 10000; // 10 seconds
|
||||
const retryDelay = 100;
|
||||
const start = Date.now();
|
||||
|
||||
// Ensure .planning/ exists
|
||||
try { fs.mkdirSync(planningDir(cwd), { recursive: true }); } catch { /* ok */ }
|
||||
|
||||
while (Date.now() - start < lockTimeout) {
|
||||
try {
|
||||
// Atomic create — fails if file exists
|
||||
fs.writeFileSync(lockPath, JSON.stringify({
|
||||
pid: process.pid,
|
||||
cwd,
|
||||
acquired: new Date().toISOString(),
|
||||
}), { flag: 'wx' });
|
||||
|
||||
// Register for exit-time cleanup so process.exit(1) inside a locked region
|
||||
// cannot leave a stale lock file (#1916).
|
||||
_heldPlanningLocks.add(lockPath);
|
||||
|
||||
// Lock acquired — run the function
|
||||
try {
|
||||
return fn();
|
||||
} finally {
|
||||
_heldPlanningLocks.delete(lockPath);
|
||||
try { fs.unlinkSync(lockPath); } catch { /* already released */ }
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
// Lock exists — check if stale (>30s old)
|
||||
try {
|
||||
const stat = fs.statSync(lockPath);
|
||||
if (Date.now() - stat.mtimeMs > 30000) {
|
||||
fs.unlinkSync(lockPath);
|
||||
continue; // retry
|
||||
}
|
||||
} catch { continue; }
|
||||
|
||||
// Wait and retry (cross-platform, no shell dependency)
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 100);
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
// Timeout — force acquire (stale lock recovery)
|
||||
try { fs.unlinkSync(lockPath); } catch { /* ok */ }
|
||||
return fn();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the .planning directory path, project- and workstream-aware.
|
||||
*
|
||||
* Resolution order:
|
||||
* 1. If GSD_PROJECT is set (env var or explicit `project` arg), routes to
|
||||
* `.planning/{project}/` — supports multi-project workspaces where several
|
||||
* independent projects share a single `.planning/` root directory (e.g.,
|
||||
* an Obsidian vault or monorepo knowledge base used as a command center).
|
||||
* 2. If GSD_WORKSTREAM is set, routes to `.planning/workstreams/{ws}/`.
|
||||
* 3. Otherwise returns `.planning/`.
|
||||
*
|
||||
* GSD_PROJECT and GSD_WORKSTREAM can be combined:
|
||||
* `.planning/{project}/workstreams/{ws}/`
|
||||
*
|
||||
* @param {string} cwd - project root
|
||||
* @param {string} [ws] - explicit workstream name; if omitted, checks GSD_WORKSTREAM env var
|
||||
* @param {string} [project] - explicit project name; if omitted, checks GSD_PROJECT env var
|
||||
*/
|
||||
function planningDir(cwd, ws, project) {
|
||||
if (project === undefined) project = process.env.GSD_PROJECT || null;
|
||||
if (ws === undefined) ws = process.env.GSD_WORKSTREAM || null;
|
||||
|
||||
// Reject path separators and traversal components in project/workstream names
|
||||
const BAD_SEGMENT = /[/\\]|\.\./;
|
||||
if (project && BAD_SEGMENT.test(project)) {
|
||||
throw new Error(`GSD_PROJECT contains invalid path characters: ${project}`);
|
||||
}
|
||||
if (ws && BAD_SEGMENT.test(ws)) {
|
||||
throw new Error(`GSD_WORKSTREAM contains invalid path characters: ${ws}`);
|
||||
}
|
||||
|
||||
let base = path.join(cwd, '.planning');
|
||||
if (project) base = path.join(base, project);
|
||||
if (ws) base = path.join(base, 'workstreams', ws);
|
||||
return base;
|
||||
}
|
||||
|
||||
/** Always returns the root .planning/ path, ignoring workstreams and projects. For shared resources. */
|
||||
function planningRoot(cwd) {
|
||||
return path.join(cwd, '.planning');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get common .planning file paths, project-and-workstream-aware.
|
||||
*
|
||||
* All paths route through planningDir(cwd, ws), which honors the GSD_PROJECT
|
||||
* env var and active workstream. This matches loadConfig() above (line 256),
|
||||
* which has always read config.json via planningDir(cwd). Previously project
|
||||
* and config were resolved against the unrouted .planning/ root, which broke
|
||||
* `gsd-tools config-get` in multi-project layouts (the CRUD writers and the
|
||||
* reader pointed at different files).
|
||||
*/
|
||||
function planningPaths(cwd, ws) {
|
||||
const base = planningDir(cwd, ws);
|
||||
return {
|
||||
planning: base,
|
||||
state: path.join(base, 'STATE.md'),
|
||||
roadmap: path.join(base, 'ROADMAP.md'),
|
||||
project: path.join(base, 'PROJECT.md'),
|
||||
config: path.join(base, 'config.json'),
|
||||
phases: path.join(base, 'phases'),
|
||||
requirements: path.join(base, 'REQUIREMENTS.md'),
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Active Workstream Detection ─────────────────────────────────────────────
|
||||
|
||||
function sanitizeWorkstreamSessionToken(value) {
|
||||
if (value === null || value === undefined) return null;
|
||||
const token = String(value).trim().replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/^_+|_+$/g, '');
|
||||
return token ? token.slice(0, 160) : null;
|
||||
}
|
||||
|
||||
function probeControllingTtyToken() {
|
||||
if (didProbeControllingTtyToken) return cachedControllingTtyToken;
|
||||
didProbeControllingTtyToken = true;
|
||||
|
||||
// `tty` reads stdin. When stdin is already non-interactive, spawning it only
|
||||
// adds avoidable failures on the routing hot path and cannot reveal a stable token.
|
||||
if (!(process.stdin && process.stdin.isTTY)) {
|
||||
return cachedControllingTtyToken;
|
||||
}
|
||||
|
||||
try {
|
||||
const ttyPath = execFileSync('tty', [], {
|
||||
encoding: 'utf-8',
|
||||
stdio: ['inherit', 'pipe', 'ignore'],
|
||||
}).trim();
|
||||
if (ttyPath && ttyPath !== 'not a tty') {
|
||||
const token = sanitizeWorkstreamSessionToken(ttyPath.replace(/^\/dev\//, ''));
|
||||
if (token) cachedControllingTtyToken = `tty-${token}`;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return cachedControllingTtyToken;
|
||||
}
|
||||
|
||||
function getControllingTtyToken() {
|
||||
for (const envKey of ['TTY', 'SSH_TTY']) {
|
||||
const token = sanitizeWorkstreamSessionToken(process.env[envKey]);
|
||||
if (token) return `tty-${token.replace(/^dev_/, '')}`;
|
||||
}
|
||||
|
||||
return probeControllingTtyToken();
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a deterministic session key for workstream-local routing.
|
||||
*
|
||||
* Order:
|
||||
* 1. Explicit runtime/session env vars (`GSD_SESSION_KEY`, `CODEX_THREAD_ID`, etc.)
|
||||
* 2. Terminal identity exposed via `TTY` or `SSH_TTY`
|
||||
* 3. One best-effort `tty` probe when stdin is interactive
|
||||
* 4. `null`, which tells callers to use the legacy shared pointer fallback
|
||||
*/
|
||||
function getWorkstreamSessionKey() {
|
||||
for (const envKey of WORKSTREAM_SESSION_ENV_KEYS) {
|
||||
const raw = process.env[envKey];
|
||||
const token = sanitizeWorkstreamSessionToken(raw);
|
||||
if (token) return `${envKey.toLowerCase().replace(/[^a-z0-9]+/g, '-')}-${token}`;
|
||||
}
|
||||
|
||||
return getControllingTtyToken();
|
||||
}
|
||||
|
||||
function getSessionScopedWorkstreamFile(cwd) {
|
||||
const sessionKey = getWorkstreamSessionKey();
|
||||
if (!sessionKey) return null;
|
||||
|
||||
// Use realpathSync.native so the hash is derived from the canonical filesystem
|
||||
// path. On Windows, path.resolve returns whatever case the caller supplied,
|
||||
// while realpathSync.native returns the case the OS recorded — they differ on
|
||||
// case-insensitive NTFS, producing different hashes and different tmpdir slots.
|
||||
// Fall back to path.resolve when the directory does not yet exist.
|
||||
let planningAbs;
|
||||
try {
|
||||
planningAbs = fs.realpathSync.native(planningRoot(cwd));
|
||||
} catch {
|
||||
planningAbs = path.resolve(planningRoot(cwd));
|
||||
}
|
||||
const projectId = crypto
|
||||
.createHash('sha1')
|
||||
.update(planningAbs)
|
||||
.digest('hex')
|
||||
.slice(0, 16);
|
||||
|
||||
const dirPath = path.join(os.tmpdir(), 'gsd-workstream-sessions', projectId);
|
||||
return {
|
||||
sessionKey,
|
||||
dirPath,
|
||||
filePath: path.join(dirPath, sessionKey),
|
||||
};
|
||||
}
|
||||
|
||||
function clearActiveWorkstreamPointer(filePath, cleanupDirPath) {
|
||||
try { fs.unlinkSync(filePath); } catch {}
|
||||
|
||||
// Session-scoped pointers for a repo share one tmp directory. Only remove it
|
||||
// when it is empty so clearing or self-healing one session never deletes siblings.
|
||||
// Explicitly check remaining entries rather than relying on rmdirSync throwing
|
||||
// ENOTEMPTY — that error is not raised reliably on Windows.
|
||||
if (cleanupDirPath) {
|
||||
try {
|
||||
const remaining = fs.readdirSync(cleanupDirPath);
|
||||
if (remaining.length === 0) {
|
||||
fs.rmdirSync(cleanupDirPath);
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pointer files are self-healing: invalid names or deleted-workstream pointers
|
||||
* are removed on read so the session falls back to `null` instead of carrying
|
||||
* silent stale state forward. Session-scoped callers may also prune an empty
|
||||
* per-project tmp directory; shared `.planning/active-workstream` callers do not.
|
||||
*/
|
||||
function readActiveWorkstreamPointer(filePath, cwd, cleanupDirPath = null) {
|
||||
try {
|
||||
const name = fs.readFileSync(filePath, 'utf-8').trim();
|
||||
if (!name || !/^[a-zA-Z0-9_-]+$/.test(name)) {
|
||||
clearActiveWorkstreamPointer(filePath, cleanupDirPath);
|
||||
return null;
|
||||
}
|
||||
const wsDir = path.join(planningRoot(cwd), 'workstreams', name);
|
||||
if (!fs.existsSync(wsDir)) {
|
||||
clearActiveWorkstreamPointer(filePath, cleanupDirPath);
|
||||
return null;
|
||||
}
|
||||
return name;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the active workstream name.
|
||||
*
|
||||
* Resolution priority:
|
||||
* 1. Session-scoped pointer (tmpdir) when the runtime exposes a stable session key
|
||||
* 2. Legacy shared `.planning/active-workstream` file when no session key is available
|
||||
*
|
||||
* The shared file is intentionally ignored when a session key exists so multiple
|
||||
* concurrent sessions do not overwrite each other's active workstream.
|
||||
*/
|
||||
function getActiveWorkstream(cwd) {
|
||||
const sessionScoped = getSessionScopedWorkstreamFile(cwd);
|
||||
if (sessionScoped) {
|
||||
return readActiveWorkstreamPointer(sessionScoped.filePath, cwd, sessionScoped.dirPath);
|
||||
}
|
||||
|
||||
const sharedFilePath = path.join(planningRoot(cwd), 'active-workstream');
|
||||
return readActiveWorkstreamPointer(sharedFilePath, cwd);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the active workstream. Pass null to clear.
|
||||
*
|
||||
* When a stable session key is available, this updates a tmpdir-backed
|
||||
* session-scoped pointer. Otherwise it falls back to the legacy shared
|
||||
* `.planning/active-workstream` file for backward compatibility.
|
||||
*/
|
||||
function setActiveWorkstream(cwd, name) {
|
||||
const sessionScoped = getSessionScopedWorkstreamFile(cwd);
|
||||
const filePath = sessionScoped
|
||||
? sessionScoped.filePath
|
||||
: path.join(planningRoot(cwd), 'active-workstream');
|
||||
|
||||
if (!name) {
|
||||
clearActiveWorkstreamPointer(filePath, sessionScoped ? sessionScoped.dirPath : null);
|
||||
return;
|
||||
}
|
||||
if (!/^[a-zA-Z0-9_-]+$/.test(name)) {
|
||||
throw new Error('Invalid workstream name: must be alphanumeric, hyphens, and underscores only');
|
||||
}
|
||||
|
||||
if (sessionScoped) {
|
||||
fs.mkdirSync(sessionScoped.dirPath, { recursive: true });
|
||||
}
|
||||
fs.writeFileSync(filePath, name + '\n', 'utf-8');
|
||||
}
|
||||
// ─── Planning workspace (pathing + active workstream + lock) moved to planning-workspace.cjs ───
|
||||
|
||||
// ─── Phase utilities ──────────────────────────────────────────────────────────
|
||||
|
||||
@@ -2155,6 +1838,7 @@ module.exports = {
|
||||
toPosixPath,
|
||||
extractOneLinerFromBody,
|
||||
resolveWorktreeRoot,
|
||||
// Deprecated re-exports — prefer direct import from planning-workspace.cjs
|
||||
withPlanningLock,
|
||||
findProjectRoot,
|
||||
detectSubRepos,
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { planningPaths, planningDir, escapeRegex, output, error } = require('./core.cjs');
|
||||
const { escapeRegex, output, error } = require('./core.cjs');
|
||||
const { planningPaths, planningDir } = require('./planning-workspace.cjs');
|
||||
const { parseDecisions } = require('./decisions.cjs');
|
||||
|
||||
/**
|
||||
@@ -30,7 +31,10 @@ function parseRequirements(reqMd) {
|
||||
const out = [];
|
||||
const seen = new Set();
|
||||
|
||||
const checkboxRe = /^\s*-\s*\[[x ]\]\s*\*\*(REQ-[A-Za-z0-9_-]+)\*\*\s*(.*)$/gm;
|
||||
// Prefix-agnostic ID format: REQ-01, TST-01, BACK-07, INSP-04, etc.
|
||||
const ID_PATTERN = '[A-Z][A-Z0-9]*-[A-Za-z0-9_-]+';
|
||||
|
||||
const checkboxRe = new RegExp(`^\\s*-\\s*\\[[x ]\\]\\s*\\*\\*(${ID_PATTERN})\\*\\*\\s*(.*)$`, 'gm');
|
||||
let cm = checkboxRe.exec(reqMd);
|
||||
while (cm !== null) {
|
||||
const id = cm[1];
|
||||
@@ -41,15 +45,25 @@ function parseRequirements(reqMd) {
|
||||
cm = checkboxRe.exec(reqMd);
|
||||
}
|
||||
|
||||
const tableRe = /\|\s*(REQ-[A-Za-z0-9_-]+)\s*\|/g;
|
||||
let tm = tableRe.exec(reqMd);
|
||||
while (tm !== null) {
|
||||
const tableFirstCellRe = new RegExp(`^\\s*\\|\\s*(${ID_PATTERN})\\s*\\|`);
|
||||
const separatorRowRe = /^\s*\|[\s:|-]+\|\s*$/;
|
||||
const lines = reqMd.split(/\r?\n/);
|
||||
|
||||
for (let i = 0; i < lines.length; i += 1) {
|
||||
const line = lines[i];
|
||||
if (!line.includes('|')) continue;
|
||||
|
||||
// Skip markdown table separator rows and header rows immediately preceding them.
|
||||
if (separatorRowRe.test(line)) continue;
|
||||
if (i + 1 < lines.length && separatorRowRe.test(lines[i + 1])) continue;
|
||||
|
||||
const tm = tableFirstCellRe.exec(line);
|
||||
if (!tm) continue;
|
||||
const id = tm[1];
|
||||
if (!seen.has(id)) {
|
||||
seen.add(id);
|
||||
out.push({ id, text: '' });
|
||||
}
|
||||
tm = tableRe.exec(reqMd);
|
||||
}
|
||||
|
||||
return out;
|
||||
|
||||
70
get-shit-done/bin/lib/init-command-router.cjs
Normal file
70
get-shit-done/bin/lib/init-command-router.cjs
Normal file
@@ -0,0 +1,70 @@
|
||||
'use strict';
|
||||
|
||||
const { INIT_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
function routeInitCommand({ init, args, cwd, raw, parseNamedArgs, error }) {
|
||||
const workflow = args[1];
|
||||
switch (workflow) {
|
||||
case 'execute-phase': {
|
||||
const { validate: epValidate, tdd: epTdd } = parseNamedArgs(args, [], ['validate', 'tdd']);
|
||||
init.cmdInitExecutePhase(cwd, args[2], raw, { validate: epValidate, tdd: epTdd });
|
||||
break;
|
||||
}
|
||||
case 'plan-phase': {
|
||||
const { validate: ppValidate, tdd: ppTdd } = parseNamedArgs(args, [], ['validate', 'tdd']);
|
||||
init.cmdInitPlanPhase(cwd, args[2], raw, { validate: ppValidate, tdd: ppTdd });
|
||||
break;
|
||||
}
|
||||
case 'new-project':
|
||||
init.cmdInitNewProject(cwd, raw);
|
||||
break;
|
||||
case 'new-milestone':
|
||||
init.cmdInitNewMilestone(cwd, raw);
|
||||
break;
|
||||
case 'quick':
|
||||
init.cmdInitQuick(cwd, args.slice(2).join(' '), raw);
|
||||
break;
|
||||
case 'ingest-docs':
|
||||
init.cmdInitIngestDocs(cwd, raw);
|
||||
break;
|
||||
case 'resume':
|
||||
init.cmdInitResume(cwd, raw);
|
||||
break;
|
||||
case 'verify-work':
|
||||
init.cmdInitVerifyWork(cwd, args[2], raw);
|
||||
break;
|
||||
case 'phase-op':
|
||||
init.cmdInitPhaseOp(cwd, args[2], raw);
|
||||
break;
|
||||
case 'todos':
|
||||
init.cmdInitTodos(cwd, args[2], raw);
|
||||
break;
|
||||
case 'milestone-op':
|
||||
init.cmdInitMilestoneOp(cwd, raw);
|
||||
break;
|
||||
case 'map-codebase':
|
||||
init.cmdInitMapCodebase(cwd, raw);
|
||||
break;
|
||||
case 'progress':
|
||||
init.cmdInitProgress(cwd, raw);
|
||||
break;
|
||||
case 'manager':
|
||||
init.cmdInitManager(cwd, raw);
|
||||
break;
|
||||
case 'new-workspace':
|
||||
init.cmdInitNewWorkspace(cwd, raw);
|
||||
break;
|
||||
case 'list-workspaces':
|
||||
init.cmdInitListWorkspaces(cwd, raw);
|
||||
break;
|
||||
case 'remove-workspace':
|
||||
init.cmdInitRemoveWorkspace(cwd, args[2], raw);
|
||||
break;
|
||||
default:
|
||||
error(`Unknown init workflow: ${workflow}\nAvailable: ${INIT_SUBCOMMANDS.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routeInitCommand,
|
||||
};
|
||||
@@ -5,7 +5,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { loadConfig, resolveModelInternal, findPhaseInternal, getRoadmapPhaseInternal, pathExistsInternal, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, normalizePhaseName, planningPaths, planningDir, planningRoot, toPosixPath, output, error, checkAgentsInstalled, phaseTokenMatches } = require('./core.cjs');
|
||||
const { loadConfig, resolveModelInternal, findPhaseInternal, getRoadmapPhaseInternal, pathExistsInternal, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, normalizePhaseName, toPosixPath, output, error, checkAgentsInstalled, phaseTokenMatches } = require('./core.cjs');
|
||||
const { planningPaths, planningDir, planningRoot } = require('./planning-workspace.cjs');
|
||||
|
||||
// Accept all bold/colon variants of the Requirements header (#2769):
|
||||
// **Requirements:** / **Requirements**: / **Requirements** : render the
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, getMilestonePhaseFilter, extractOneLinerFromBody, normalizeMd, planningPaths, output, error, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { escapeRegex, getMilestonePhaseFilter, extractOneLinerFromBody, normalizeMd, output, error, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { planningPaths } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { writeStateMd, stateReplaceFieldWithFallback } = require('./state.cjs');
|
||||
|
||||
|
||||
49
get-shit-done/bin/lib/phase-command-router.cjs
Normal file
49
get-shit-done/bin/lib/phase-command-router.cjs
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict';
|
||||
|
||||
const { PHASE_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
function routePhaseCommand({ phase, args, cwd, raw, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'next-decimal') {
|
||||
phase.cmdPhaseNextDecimal(cwd, args[2], raw);
|
||||
} else if (subcommand === 'add') {
|
||||
let customId = null;
|
||||
const descArgs = [];
|
||||
for (let i = 2; i < args.length; i++) {
|
||||
if (args[i] === '--id' && i + 1 < args.length) {
|
||||
customId = args[i + 1];
|
||||
i++;
|
||||
} else {
|
||||
descArgs.push(args[i]);
|
||||
}
|
||||
}
|
||||
phase.cmdPhaseAdd(cwd, descArgs.join(' '), raw, customId);
|
||||
} else if (subcommand === 'add-batch') {
|
||||
const descFlagIdx = args.indexOf('--descriptions');
|
||||
let descriptions;
|
||||
if (descFlagIdx !== -1 && args[descFlagIdx + 1]) {
|
||||
try {
|
||||
descriptions = JSON.parse(args[descFlagIdx + 1]);
|
||||
} catch {
|
||||
error('--descriptions must be a JSON array');
|
||||
}
|
||||
} else {
|
||||
descriptions = args.slice(2).filter(a => a !== '--raw');
|
||||
}
|
||||
phase.cmdPhaseAddBatch(cwd, descriptions, raw);
|
||||
} else if (subcommand === 'insert') {
|
||||
phase.cmdPhaseInsert(cwd, args[2], args.slice(3).join(' '), raw);
|
||||
} else if (subcommand === 'remove') {
|
||||
const forceFlag = args.includes('--force');
|
||||
phase.cmdPhaseRemove(cwd, args[2], { force: forceFlag }, raw);
|
||||
} else if (subcommand === 'complete') {
|
||||
phase.cmdPhaseComplete(cwd, args[2], raw);
|
||||
} else {
|
||||
error(`Unknown phase subcommand. Available: ${PHASE_SUBCOMMANDS.filter((s) => s !== 'list-plans' && s !== 'list-artifacts' && s !== 'scaffold').join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routePhaseCommand,
|
||||
};
|
||||
@@ -4,10 +4,52 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, loadConfig, normalizePhaseName, comparePhaseNum, findPhaseInternal, getArchivedPhaseDirs, generateSlugInternal, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone, toPosixPath, planningDir, withPlanningLock, output, error, readSubdirectories, phaseTokenMatches, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { escapeRegex, loadConfig, normalizePhaseName, comparePhaseNum, findPhaseInternal, getArchivedPhaseDirs, generateSlugInternal, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone, toPosixPath, output, error, readSubdirectories, phaseTokenMatches, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { planningDir, withPlanningLock } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { writeStateMd, readModifyWriteStateMd, stateExtractField, stateReplaceField, stateReplaceFieldWithFallback, updatePerformanceMetricsSection } = require('./state.cjs');
|
||||
|
||||
// #2893 — strict canonical filter: `{padded_phase}-{NN}-PLAN.md` or `PLAN.md`.
|
||||
// Documented in agents/gsd-planner.md (write_phase_prompt step). The wider
|
||||
// "looks like a plan but isn't canonical" probe below is used to surface a
|
||||
// loud warning instead of silently returning zero plans.
|
||||
const isCanonicalPlanFile = (f) => f.endsWith('-PLAN.md') || f === 'PLAN.md';
|
||||
|
||||
// Any .md file with PLAN anywhere in the basename — the diagnostic net for
|
||||
// catching agent deviations like `01-PLAN-01-foundation.md` (#2893).
|
||||
// Excludes derivative files (`-PLAN-OUTLINE.md`, `*.pre-bounce.md`, etc.) that
|
||||
// the planner legitimately produces alongside canonical plans.
|
||||
const PLAN_OUTLINE_RE = /-PLAN-OUTLINE\.md$/i;
|
||||
const PLAN_PRE_BOUNCE_RE = /-PLAN.*\.pre-bounce\.md$/i;
|
||||
const looksLikePlanFile = (f) =>
|
||||
/\.md$/i.test(f)
|
||||
&& /PLAN/i.test(f)
|
||||
&& !PLAN_OUTLINE_RE.test(f)
|
||||
&& !PLAN_PRE_BOUNCE_RE.test(f);
|
||||
|
||||
/**
|
||||
* Detect plan-shaped files that the canonical filter would reject. Returns
|
||||
* a warning string when offenders exist, else null. Centralised so every
|
||||
* read site (phase-plan-index, phases list --type plans, find-phase) emits
|
||||
* the same message.
|
||||
*
|
||||
* @param {string[]} dirFiles — readdirSync output for one phase directory
|
||||
* @param {string[]} matchedFiles — what the canonical filter accepted
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function describeNonCanonicalPlans(dirFiles, matchedFiles) {
|
||||
const matched = new Set(matchedFiles);
|
||||
const offenders = dirFiles.filter((f) => looksLikePlanFile(f) && !matched.has(f));
|
||||
if (offenders.length === 0) return null;
|
||||
return (
|
||||
`Found ${offenders.length} plan-shaped file(s) in this phase that don't match the canonical ` +
|
||||
`naming convention "{padded_phase}-{NN}-PLAN.md" (or bare "PLAN.md") and were skipped: ` +
|
||||
offenders.map((f) => `"${f}"`).join(', ') +
|
||||
`. Rename to the canonical form (e.g. "01-01-PLAN.md") so the executor can detect them. ` +
|
||||
`See agents/gsd-planner.md write_phase_prompt step for the full contract.`
|
||||
);
|
||||
}
|
||||
|
||||
function cmdPhasesList(cwd, options, raw) {
|
||||
const phasesDir = path.join(planningDir(cwd), 'phases');
|
||||
const { type, phase, includeArchived } = options;
|
||||
@@ -52,13 +94,18 @@ function cmdPhasesList(cwd, options, raw) {
|
||||
// If listing files of a specific type
|
||||
if (type) {
|
||||
const files = [];
|
||||
const warnings = [];
|
||||
for (const dir of dirs) {
|
||||
const dirPath = path.join(phasesDir, dir);
|
||||
const dirFiles = fs.readdirSync(dirPath);
|
||||
|
||||
let filtered;
|
||||
if (type === 'plans') {
|
||||
filtered = dirFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
||||
filtered = dirFiles.filter(isCanonicalPlanFile);
|
||||
// #2893 — surface plan-shaped files the canonical filter rejected
|
||||
// so callers (executor init, etc.) don't silently see zero plans.
|
||||
const w = describeNonCanonicalPlans(dirFiles, filtered);
|
||||
if (w) warnings.push(`${dir}: ${w}`);
|
||||
} else if (type === 'summaries') {
|
||||
filtered = dirFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
} else {
|
||||
@@ -73,6 +120,7 @@ function cmdPhasesList(cwd, options, raw) {
|
||||
count: files.length,
|
||||
phase_dir: phase ? dirs[0].replace(/^\d+(?:\.\d+)*-?/, '') : null,
|
||||
};
|
||||
if (warnings.length) result.warning = warnings.join(' | ');
|
||||
output(result, raw, files.join('\n'));
|
||||
return;
|
||||
}
|
||||
@@ -176,8 +224,10 @@ function cmdFindPhase(cwd, phase, raw) {
|
||||
|
||||
const phaseDir = path.join(phasesDir, match);
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const plans = phaseFiles.filter(isCanonicalPlanFile).sort();
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').sort();
|
||||
// #2893 — same diagnostic as phase-plan-index for consistency.
|
||||
const planNamingWarning = describeNonCanonicalPlans(phaseFiles, plans);
|
||||
|
||||
const result = {
|
||||
found: true,
|
||||
@@ -187,6 +237,7 @@ function cmdFindPhase(cwd, phase, raw) {
|
||||
plans,
|
||||
summaries,
|
||||
};
|
||||
if (planNamingWarning) result.warning = planNamingWarning;
|
||||
|
||||
output(result, raw, result.directory);
|
||||
} catch {
|
||||
@@ -229,8 +280,11 @@ function cmdPhasePlanIndex(cwd, phase, raw) {
|
||||
|
||||
// Get all files in phase directory
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
const planFiles = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const planFiles = phaseFiles.filter(isCanonicalPlanFile).sort();
|
||||
const summaryFiles = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
// #2893 — surface plan-shaped files the canonical filter rejected so a
|
||||
// misnamed plan never silently produces plan_count: 0 at executor init.
|
||||
const planNamingWarning = describeNonCanonicalPlans(phaseFiles, planFiles);
|
||||
|
||||
// Build set of plan IDs with summaries
|
||||
const completedPlanIds = new Set(
|
||||
@@ -305,6 +359,7 @@ function cmdPhasePlanIndex(cwd, phase, raw) {
|
||||
incomplete,
|
||||
has_checkpoints: hasCheckpoints,
|
||||
};
|
||||
if (planNamingWarning) result.warning = planNamingWarning;
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
36
get-shit-done/bin/lib/phases-command-router.cjs
Normal file
36
get-shit-done/bin/lib/phases-command-router.cjs
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
const { PHASES_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
/**
|
||||
* Manifest-backed phases subcommand router.
|
||||
* Keeps gsd-tools.cjs thin while preserving current CJS semantics:
|
||||
* - list
|
||||
* - clear
|
||||
*
|
||||
* Note: `archive` is currently SDK-only (`phases.archive` handler in SDK query
|
||||
* registry). CJS `gsd-tools phases` intentionally supports list/clear only.
|
||||
*/
|
||||
function routePhasesCommand({ phase, milestone, args, cwd, raw, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'list') {
|
||||
const typeIndex = args.indexOf('--type');
|
||||
const phaseIndex = args.indexOf('--phase');
|
||||
const options = {
|
||||
type: typeIndex !== -1 ? args[typeIndex + 1] : null,
|
||||
phase: phaseIndex !== -1 ? args[phaseIndex + 1] : null,
|
||||
includeArchived: args.includes('--include-archived'),
|
||||
};
|
||||
phase.cmdPhasesList(cwd, options, raw);
|
||||
} else if (subcommand === 'clear') {
|
||||
milestone.cmdPhasesClear(cwd, raw, args.slice(2));
|
||||
} else {
|
||||
const cjsSupported = PHASES_SUBCOMMANDS.filter((s) => s !== 'archive');
|
||||
error(`Unknown phases subcommand. Available: ${cjsSupported.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routePhasesCommand,
|
||||
};
|
||||
371
get-shit-done/bin/lib/planning-workspace.cjs
Normal file
371
get-shit-done/bin/lib/planning-workspace.cjs
Normal file
@@ -0,0 +1,371 @@
|
||||
/**
|
||||
* Planning Workspace — .planning path resolution + active workstream routing.
|
||||
*
|
||||
* This module owns the planning workspace seam:
|
||||
* - planningDir/planningRoot/planningPaths
|
||||
* - active workstream pointer policy (session-scoped > shared)
|
||||
* - pointer storage adapters (session/shared/memory)
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const crypto = require('crypto');
|
||||
const { execFileSync } = require('child_process');
|
||||
|
||||
const WORKSTREAM_SESSION_ENV_KEYS = [
|
||||
'GSD_SESSION_KEY',
|
||||
'CODEX_THREAD_ID',
|
||||
'CLAUDE_SESSION_ID',
|
||||
'CLAUDE_CODE_SSE_PORT',
|
||||
'OPENCODE_SESSION_ID',
|
||||
'GEMINI_SESSION_ID',
|
||||
'CURSOR_SESSION_ID',
|
||||
'WINDSURF_SESSION_ID',
|
||||
'TERM_SESSION_ID',
|
||||
'WT_SESSION',
|
||||
'TMUX_PANE',
|
||||
'ZELLIJ_SESSION_NAME',
|
||||
];
|
||||
|
||||
let cachedControllingTtyToken = null;
|
||||
let didProbeControllingTtyToken = false;
|
||||
|
||||
// Track .planning/.lock files held by this process so they can be removed on exit.
|
||||
const _heldPlanningLocks = new Set();
|
||||
process.on('exit', () => {
|
||||
for (const lockPath of _heldPlanningLocks) {
|
||||
try { fs.unlinkSync(lockPath); } catch { /* already gone */ }
|
||||
}
|
||||
});
|
||||
|
||||
function planningDir(cwd, ws, project) {
|
||||
if (project === undefined) project = process.env.GSD_PROJECT || null;
|
||||
if (ws === undefined) ws = process.env.GSD_WORKSTREAM || null;
|
||||
|
||||
// Reject path separators and traversal components in project/workstream names
|
||||
const BAD_SEGMENT = /[/\\]|\.\./;
|
||||
if (project && BAD_SEGMENT.test(project)) {
|
||||
throw new Error(`GSD_PROJECT contains invalid path characters: ${project}`);
|
||||
}
|
||||
if (ws && BAD_SEGMENT.test(ws)) {
|
||||
throw new Error(`GSD_WORKSTREAM contains invalid path characters: ${ws}`);
|
||||
}
|
||||
|
||||
let base = path.join(cwd, '.planning');
|
||||
if (project) base = path.join(base, project);
|
||||
if (ws) base = path.join(base, 'workstreams', ws);
|
||||
return base;
|
||||
}
|
||||
|
||||
function planningRoot(cwd) {
|
||||
return path.join(cwd, '.planning');
|
||||
}
|
||||
|
||||
function planningPaths(cwd, ws) {
|
||||
const base = planningDir(cwd, ws);
|
||||
return {
|
||||
planning: base,
|
||||
state: path.join(base, 'STATE.md'),
|
||||
roadmap: path.join(base, 'ROADMAP.md'),
|
||||
project: path.join(base, 'PROJECT.md'),
|
||||
config: path.join(base, 'config.json'),
|
||||
phases: path.join(base, 'phases'),
|
||||
requirements: path.join(base, 'REQUIREMENTS.md'),
|
||||
};
|
||||
}
|
||||
|
||||
function sanitizeWorkstreamSessionToken(value) {
|
||||
if (value === null || value === undefined) return null;
|
||||
const token = String(value).trim().replace(/[^a-zA-Z0-9._-]+/g, '_').replace(/^_+|_+$/g, '');
|
||||
return token ? token.slice(0, 160) : null;
|
||||
}
|
||||
|
||||
function probeControllingTtyToken() {
|
||||
if (didProbeControllingTtyToken) return cachedControllingTtyToken;
|
||||
didProbeControllingTtyToken = true;
|
||||
|
||||
// `tty` reads stdin. When stdin is already non-interactive, spawning it only
|
||||
// adds avoidable failures on the routing hot path and cannot reveal a stable token.
|
||||
if (!(process.stdin && process.stdin.isTTY)) {
|
||||
return cachedControllingTtyToken;
|
||||
}
|
||||
|
||||
try {
|
||||
const ttyPath = execFileSync('tty', [], {
|
||||
encoding: 'utf-8',
|
||||
stdio: ['inherit', 'pipe', 'ignore'],
|
||||
}).trim();
|
||||
if (ttyPath && ttyPath !== 'not a tty') {
|
||||
const token = sanitizeWorkstreamSessionToken(ttyPath.replace(/^\/dev\//, ''));
|
||||
if (token) cachedControllingTtyToken = `tty-${token}`;
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return cachedControllingTtyToken;
|
||||
}
|
||||
|
||||
function getControllingTtyToken() {
|
||||
for (const envKey of ['TTY', 'SSH_TTY']) {
|
||||
const token = sanitizeWorkstreamSessionToken(process.env[envKey]);
|
||||
if (token) return `tty-${token.replace(/^dev_/, '')}`;
|
||||
}
|
||||
|
||||
return probeControllingTtyToken();
|
||||
}
|
||||
|
||||
function getWorkstreamSessionKey() {
|
||||
for (const envKey of WORKSTREAM_SESSION_ENV_KEYS) {
|
||||
const raw = process.env[envKey];
|
||||
const token = sanitizeWorkstreamSessionToken(raw);
|
||||
if (token) return `${envKey.toLowerCase().replace(/[^a-z0-9]+/g, '-')}-${token}`;
|
||||
}
|
||||
|
||||
return getControllingTtyToken();
|
||||
}
|
||||
|
||||
function getSessionScopedWorkstreamFile(cwd, fixedSessionKey) {
|
||||
const sessionKey = fixedSessionKey || getWorkstreamSessionKey();
|
||||
if (!sessionKey) return null;
|
||||
|
||||
// Use realpathSync.native so the hash is derived from the canonical filesystem
|
||||
// path. On Windows, path.resolve returns whatever case the caller supplied,
|
||||
// while realpathSync.native returns the case the OS recorded — they differ on
|
||||
// case-insensitive NTFS, producing different hashes and different tmpdir slots.
|
||||
// Fall back to path.resolve when the directory does not yet exist.
|
||||
let planningAbs;
|
||||
try {
|
||||
planningAbs = fs.realpathSync.native(planningRoot(cwd));
|
||||
} catch {
|
||||
planningAbs = path.resolve(planningRoot(cwd));
|
||||
}
|
||||
const projectId = crypto
|
||||
.createHash('sha1')
|
||||
.update(planningAbs)
|
||||
.digest('hex')
|
||||
.slice(0, 16);
|
||||
|
||||
const dirPath = path.join(os.tmpdir(), 'gsd-workstream-sessions', projectId);
|
||||
return {
|
||||
sessionKey,
|
||||
dirPath,
|
||||
filePath: path.join(dirPath, sessionKey),
|
||||
};
|
||||
}
|
||||
|
||||
function createSharedPointerAdapter(cwd) {
|
||||
const filePath = path.join(planningRoot(cwd), 'active-workstream');
|
||||
return {
|
||||
read() {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf-8').trim() || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
write(name) {
|
||||
fs.writeFileSync(filePath, name + '\n', 'utf-8');
|
||||
},
|
||||
clear() {
|
||||
try { fs.unlinkSync(filePath); } catch {}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createSessionScopedPointerAdapter(cwd, fixedSessionKey) {
|
||||
const scoped = getSessionScopedWorkstreamFile(cwd, fixedSessionKey);
|
||||
if (!scoped) return null;
|
||||
|
||||
return {
|
||||
read() {
|
||||
try {
|
||||
return fs.readFileSync(scoped.filePath, 'utf-8').trim() || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
write(name) {
|
||||
fs.mkdirSync(scoped.dirPath, { recursive: true });
|
||||
fs.writeFileSync(scoped.filePath, name + '\n', 'utf-8');
|
||||
},
|
||||
clear() {
|
||||
try { fs.unlinkSync(scoped.filePath); } catch {}
|
||||
try {
|
||||
const remaining = fs.readdirSync(scoped.dirPath);
|
||||
if (remaining.length === 0) {
|
||||
fs.rmdirSync(scoped.dirPath);
|
||||
}
|
||||
} catch {}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createMemoryPointerAdapter(initialName = null) {
|
||||
let value = initialName;
|
||||
return {
|
||||
read() {
|
||||
return value;
|
||||
},
|
||||
write(name) {
|
||||
value = name;
|
||||
},
|
||||
clear() {
|
||||
value = null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function pickActiveWorkstreamAdapter(cwd, opts = {}) {
|
||||
if (opts.activeWorkstreamAdapter) {
|
||||
return opts.activeWorkstreamAdapter;
|
||||
}
|
||||
|
||||
const sessionKey = getWorkstreamSessionKey();
|
||||
if (sessionKey) {
|
||||
if (opts.activeWorkstreamAdapters && opts.activeWorkstreamAdapters.session) {
|
||||
return opts.activeWorkstreamAdapters.session;
|
||||
}
|
||||
return createSessionScopedPointerAdapter(cwd, sessionKey);
|
||||
}
|
||||
|
||||
if (opts.activeWorkstreamAdapters && opts.activeWorkstreamAdapters.shared) {
|
||||
return opts.activeWorkstreamAdapters.shared;
|
||||
}
|
||||
return createSharedPointerAdapter(cwd);
|
||||
}
|
||||
|
||||
function validateWorkstreamName(name) {
|
||||
return /^[a-zA-Z0-9_-]+$/.test(name);
|
||||
}
|
||||
|
||||
function withPlanningLock(cwd, fn) {
|
||||
const lockPath = path.join(planningDir(cwd), '.lock');
|
||||
const lockTimeout = 10000; // 10 seconds
|
||||
const start = Date.now();
|
||||
|
||||
// Ensure .planning/ exists
|
||||
try { fs.mkdirSync(planningDir(cwd), { recursive: true }); } catch { /* ok */ }
|
||||
|
||||
function runWithHeldLock() {
|
||||
// Atomic create — fails if file exists
|
||||
fs.writeFileSync(lockPath, JSON.stringify({
|
||||
pid: process.pid,
|
||||
cwd,
|
||||
acquired: new Date().toISOString(),
|
||||
}), { flag: 'wx' });
|
||||
|
||||
_heldPlanningLocks.add(lockPath);
|
||||
|
||||
// Lock acquired — run the function
|
||||
try {
|
||||
return fn();
|
||||
} finally {
|
||||
_heldPlanningLocks.delete(lockPath);
|
||||
try { fs.unlinkSync(lockPath); } catch { /* already released */ }
|
||||
}
|
||||
}
|
||||
|
||||
while (Date.now() - start < lockTimeout) {
|
||||
try {
|
||||
return runWithHeldLock();
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
// Lock exists — check if stale (>30s old)
|
||||
try {
|
||||
const stat = fs.statSync(lockPath);
|
||||
if (Date.now() - stat.mtimeMs > 30000) {
|
||||
fs.unlinkSync(lockPath);
|
||||
continue; // retry
|
||||
}
|
||||
} catch { continue; }
|
||||
|
||||
// Wait and retry (cross-platform, no shell dependency)
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, 100);
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// Timeout — stale-lock recovery, then re-acquire atomically before entering critical section.
|
||||
try { fs.unlinkSync(lockPath); } catch { /* ok */ }
|
||||
return runWithHeldLock();
|
||||
}
|
||||
|
||||
function createPlanningWorkspace(cwd, opts = {}) {
|
||||
return {
|
||||
paths: {
|
||||
dir(ws, project) {
|
||||
return planningDir(cwd, ws, project);
|
||||
},
|
||||
root() {
|
||||
return planningRoot(cwd);
|
||||
},
|
||||
all(ws) {
|
||||
return planningPaths(cwd, ws);
|
||||
},
|
||||
},
|
||||
activeWorkstream: {
|
||||
get() {
|
||||
const adapter = pickActiveWorkstreamAdapter(cwd, opts);
|
||||
if (!adapter) return null;
|
||||
|
||||
const name = adapter.read();
|
||||
if (!name || !validateWorkstreamName(name)) {
|
||||
adapter.clear();
|
||||
return null;
|
||||
}
|
||||
|
||||
const wsDir = path.join(planningRoot(cwd), 'workstreams', name);
|
||||
if (!fs.existsSync(wsDir)) {
|
||||
adapter.clear();
|
||||
return null;
|
||||
}
|
||||
|
||||
return name;
|
||||
},
|
||||
set(name) {
|
||||
const adapter = pickActiveWorkstreamAdapter(cwd, opts);
|
||||
if (!adapter) return;
|
||||
|
||||
if (!name) {
|
||||
adapter.clear();
|
||||
return;
|
||||
}
|
||||
if (!validateWorkstreamName(name)) {
|
||||
throw new Error('Invalid workstream name: must be alphanumeric, hyphens, and underscores only');
|
||||
}
|
||||
|
||||
const wsDir = path.join(planningRoot(cwd), 'workstreams', name);
|
||||
fs.mkdirSync(wsDir, { recursive: true });
|
||||
adapter.write(name);
|
||||
},
|
||||
clear() {
|
||||
const adapter = pickActiveWorkstreamAdapter(cwd, opts);
|
||||
if (!adapter) return;
|
||||
adapter.clear();
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function getActiveWorkstream(cwd) {
|
||||
return createPlanningWorkspace(cwd).activeWorkstream.get();
|
||||
}
|
||||
|
||||
function setActiveWorkstream(cwd, name) {
|
||||
createPlanningWorkspace(cwd).activeWorkstream.set(name);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createPlanningWorkspace,
|
||||
createSharedPointerAdapter,
|
||||
createSessionScopedPointerAdapter,
|
||||
createMemoryPointerAdapter,
|
||||
planningDir,
|
||||
planningRoot,
|
||||
planningPaths,
|
||||
withPlanningLock,
|
||||
getActiveWorkstream,
|
||||
setActiveWorkstream,
|
||||
};
|
||||
23
get-shit-done/bin/lib/roadmap-command-router.cjs
Normal file
23
get-shit-done/bin/lib/roadmap-command-router.cjs
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict';
|
||||
|
||||
const { ROADMAP_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
function routeRoadmapCommand({ roadmap, args, cwd, raw, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'get-phase') {
|
||||
roadmap.cmdRoadmapGetPhase(cwd, args[2], raw);
|
||||
} else if (subcommand === 'analyze') {
|
||||
roadmap.cmdRoadmapAnalyze(cwd, raw);
|
||||
} else if (subcommand === 'update-plan-progress') {
|
||||
roadmap.cmdRoadmapUpdatePlanProgress(cwd, args[2], raw);
|
||||
} else if (subcommand === 'annotate-dependencies') {
|
||||
roadmap.cmdRoadmapAnnotateDependencies(cwd, args[2], raw);
|
||||
} else {
|
||||
error(`Unknown roadmap subcommand. Available: ${ROADMAP_SUBCOMMANDS.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routeRoadmapCommand,
|
||||
};
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, normalizePhaseName, planningPaths, withPlanningLock, output, error, findPhaseInternal, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone, phaseTokenMatches, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { escapeRegex, normalizePhaseName, output, error, findPhaseInternal, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone, phaseTokenMatches, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { planningPaths, withPlanningLock } = require('./planning-workspace.cjs');
|
||||
|
||||
/**
|
||||
* Coerce an arbitrary YAML scalar/object into a string for cross-cutting
|
||||
|
||||
90
get-shit-done/bin/lib/state-command-router.cjs
Normal file
90
get-shit-done/bin/lib/state-command-router.cjs
Normal file
@@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
const { STATE_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
/**
|
||||
* Manifest-backed state subcommand router.
|
||||
* Keeps gsd-tools.cjs thin while preserving existing command semantics.
|
||||
*/
|
||||
function routeStateCommand({ state, args, cwd, raw, parseNamedArgs, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'json') {
|
||||
state.cmdStateJson(cwd, raw);
|
||||
} else if (subcommand === 'update') {
|
||||
state.cmdStateUpdate(cwd, args[2], args[3]);
|
||||
} else if (subcommand === 'get') {
|
||||
state.cmdStateGet(cwd, args[2], raw);
|
||||
} else if (subcommand === 'patch') {
|
||||
const patches = {};
|
||||
for (let i = 2; i < args.length; i += 2) {
|
||||
const key = args[i].replace(/^--/, '');
|
||||
const value = args[i + 1];
|
||||
if (key && value !== undefined) {
|
||||
patches[key] = value;
|
||||
}
|
||||
}
|
||||
state.cmdStatePatch(cwd, patches, raw);
|
||||
} else if (subcommand === 'advance-plan') {
|
||||
state.cmdStateAdvancePlan(cwd, raw);
|
||||
} else if (subcommand === 'record-metric') {
|
||||
const { phase: p, plan, duration, tasks, files } = parseNamedArgs(args, ['phase', 'plan', 'duration', 'tasks', 'files']);
|
||||
state.cmdStateRecordMetric(cwd, { phase: p, plan, duration, tasks, files }, raw);
|
||||
} else if (subcommand === 'update-progress') {
|
||||
state.cmdStateUpdateProgress(cwd, raw);
|
||||
} else if (subcommand === 'add-decision') {
|
||||
const { phase: p, summary, 'summary-file': summary_file, rationale, 'rationale-file': rationale_file } = parseNamedArgs(args, ['phase', 'summary', 'summary-file', 'rationale', 'rationale-file']);
|
||||
state.cmdStateAddDecision(cwd, { phase: p, summary, summary_file, rationale: rationale || '', rationale_file }, raw);
|
||||
} else if (subcommand === 'add-blocker') {
|
||||
const { text, 'text-file': text_file } = parseNamedArgs(args, ['text', 'text-file']);
|
||||
state.cmdStateAddBlocker(cwd, { text, text_file }, raw);
|
||||
} else if (subcommand === 'resolve-blocker') {
|
||||
state.cmdStateResolveBlocker(cwd, parseNamedArgs(args, ['text']).text, raw);
|
||||
} else if (subcommand === 'record-session') {
|
||||
const { 'stopped-at': stopped_at, 'resume-file': resume_file } = parseNamedArgs(args, ['stopped-at', 'resume-file']);
|
||||
state.cmdStateRecordSession(cwd, { stopped_at, resume_file: resume_file || 'None' }, raw);
|
||||
} else if (subcommand === 'begin-phase') {
|
||||
const { phase: p, name, plans } = parseNamedArgs(args, ['phase', 'name', 'plans']);
|
||||
const parsedPlans = plans == null ? null : Number.parseInt(plans, 10);
|
||||
if (plans != null && Number.isNaN(parsedPlans)) {
|
||||
return error('Invalid --plans value. Expected an integer.');
|
||||
}
|
||||
state.cmdStateBeginPhase(cwd, p, name, parsedPlans, raw);
|
||||
} else if (subcommand === 'signal-waiting') {
|
||||
const { type, question, options, phase: p } = parseNamedArgs(args, ['type', 'question', 'options', 'phase']);
|
||||
state.cmdSignalWaiting(cwd, type, question, options, p, raw);
|
||||
} else if (subcommand === 'signal-resume') {
|
||||
state.cmdSignalResume(cwd, raw);
|
||||
} else if (subcommand === 'planned-phase') {
|
||||
const { phase: p, plans } = parseNamedArgs(args, ['phase', 'name', 'plans']);
|
||||
const parsedPlans = plans == null ? null : Number.parseInt(plans, 10);
|
||||
if (plans != null && Number.isNaN(parsedPlans)) {
|
||||
return error('Invalid --plans value. Expected an integer.');
|
||||
}
|
||||
state.cmdStatePlannedPhase(cwd, p, parsedPlans, raw);
|
||||
} else if (subcommand === 'validate') {
|
||||
state.cmdStateValidate(cwd, raw);
|
||||
} else if (subcommand === 'sync') {
|
||||
const { verify } = parseNamedArgs(args, [], ['verify']);
|
||||
state.cmdStateSync(cwd, { verify }, raw);
|
||||
} else if (subcommand === 'prune') {
|
||||
const { 'keep-recent': keepRecent, 'dry-run': dryRun } = parseNamedArgs(args, ['keep-recent'], ['dry-run']);
|
||||
state.cmdStatePrune(cwd, { keepRecent: keepRecent || '3', dryRun: !!dryRun }, raw);
|
||||
} else if (subcommand === 'complete-phase') {
|
||||
state.cmdStateCompletePhase(cwd, raw);
|
||||
} else if (subcommand === 'milestone-switch') {
|
||||
const { milestone, name } = parseNamedArgs(args, ['milestone', 'name']);
|
||||
state.cmdStateMilestoneSwitch(cwd, milestone, name, raw);
|
||||
} else if (subcommand === 'add-roadmap-evolution') {
|
||||
error('state add-roadmap-evolution is SDK-only. Use: gsd-sdk query state.add-roadmap-evolution ...');
|
||||
} else if (subcommand === undefined || subcommand === 'load') {
|
||||
state.cmdStateLoad(cwd, raw);
|
||||
} else {
|
||||
const available = ['load', 'complete-phase', ...STATE_SUBCOMMANDS.filter((s) => s !== 'load')];
|
||||
error(`Unknown state subcommand: "${subcommand}". Available: ${available.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routeStateCommand,
|
||||
};
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, loadConfig, getMilestoneInfo, getMilestonePhaseFilter, normalizeMd, planningDir, planningPaths, output, error, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { escapeRegex, loadConfig, getMilestoneInfo, getMilestonePhaseFilter, normalizeMd, output, error, atomicWriteFileSync } = require('./core.cjs');
|
||||
const { planningDir, planningPaths } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter, reconstructFrontmatter } = require('./frontmatter.cjs');
|
||||
|
||||
// Cache disk scan results from buildStateFrontmatter per cwd per process (#1967).
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { normalizePhaseName, findPhaseInternal, generateSlugInternal, normalizeMd, toPosixPath, planningDir, output, error } = require('./core.cjs');
|
||||
const { normalizePhaseName, findPhaseInternal, generateSlugInternal, normalizeMd, toPosixPath, output, error } = require('./core.cjs');
|
||||
const { planningDir } = require('./planning-workspace.cjs');
|
||||
const { reconstructFrontmatter } = require('./frontmatter.cjs');
|
||||
|
||||
function cmdTemplateSelect(cwd, planPath, raw) {
|
||||
|
||||
@@ -7,7 +7,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { output, error, getMilestonePhaseFilter, planningDir, toPosixPath } = require('./core.cjs');
|
||||
const { output, error, getMilestonePhaseFilter, toPosixPath } = require('./core.cjs');
|
||||
const { planningDir } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { requireSafePath, sanitizeForDisplay } = require('./security.cjs');
|
||||
|
||||
|
||||
55
get-shit-done/bin/lib/validate-command-router.cjs
Normal file
55
get-shit-done/bin/lib/validate-command-router.cjs
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
const { VALIDATE_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
function routeValidateCommand({ verify, args, cwd, raw, parseNamedArgs, output, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'consistency') {
|
||||
verify.cmdValidateConsistency(cwd, raw);
|
||||
} else if (subcommand === 'health') {
|
||||
const repairFlag = args.includes('--repair');
|
||||
const backfillFlag = args.includes('--backfill');
|
||||
verify.cmdValidateHealth(cwd, { repair: repairFlag, backfill: backfillFlag }, raw);
|
||||
} else if (subcommand === 'agents') {
|
||||
verify.cmdValidateAgents(cwd, raw);
|
||||
} else if (subcommand === 'context') {
|
||||
const opts = parseNamedArgs(args, ['tokens-used', 'context-window']);
|
||||
if (opts['tokens-used'] === null) {
|
||||
error('--tokens-used <integer> is required for `validate context`');
|
||||
return;
|
||||
}
|
||||
if (opts['context-window'] === null) {
|
||||
error('--context-window <integer> is required for `validate context`');
|
||||
return;
|
||||
}
|
||||
const { classifyContextUtilization, STATES } = require('./context-utilization.cjs');
|
||||
const RECOMMENDATIONS = {
|
||||
[STATES.HEALTHY]: null,
|
||||
[STATES.WARNING]: 'Context is approaching the fracture zone — consider /gsd-thread to continue in a fresh window.',
|
||||
[STATES.CRITICAL]: 'Reasoning quality may degrade past 70% utilization (fracture point). Run /gsd-thread now to preserve output quality.',
|
||||
};
|
||||
let classified;
|
||||
try {
|
||||
classified = classifyContextUtilization(Number(opts['tokens-used']), Number(opts['context-window']));
|
||||
} catch (e) {
|
||||
const flag = /tokensUsed/.test(e.message) ? '--tokens-used' : '--context-window';
|
||||
error(`${flag} must be a non-negative integer (window > 0), got the values supplied`);
|
||||
return;
|
||||
}
|
||||
const result = { ...classified, recommendation: RECOMMENDATIONS[classified.state] };
|
||||
if (args.includes('--json')) {
|
||||
output(result, raw);
|
||||
} else {
|
||||
const lines = [`Context utilization: ${result.percent}% (${result.state})`];
|
||||
if (result.recommendation) lines.push(result.recommendation);
|
||||
output(result, true, lines.join('\n'));
|
||||
}
|
||||
} else {
|
||||
error(`Unknown validate subcommand. Available: ${VALIDATE_SUBCOMMANDS.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routeValidateCommand,
|
||||
};
|
||||
34
get-shit-done/bin/lib/verify-command-router.cjs
Normal file
34
get-shit-done/bin/lib/verify-command-router.cjs
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
|
||||
const { VERIFY_SUBCOMMANDS } = require('./command-aliases.generated.cjs');
|
||||
|
||||
function routeVerifyCommand({ verify, args, cwd, raw, error }) {
|
||||
const subcommand = args[1];
|
||||
|
||||
if (subcommand === 'plan-structure') {
|
||||
verify.cmdVerifyPlanStructure(cwd, args[2], raw);
|
||||
} else if (subcommand === 'phase-completeness') {
|
||||
verify.cmdVerifyPhaseCompleteness(cwd, args[2], raw);
|
||||
} else if (subcommand === 'references') {
|
||||
verify.cmdVerifyReferences(cwd, args[2], raw);
|
||||
} else if (subcommand === 'commits') {
|
||||
verify.cmdVerifyCommits(cwd, args.slice(2), raw);
|
||||
} else if (subcommand === 'artifacts') {
|
||||
verify.cmdVerifyArtifacts(cwd, args[2], raw);
|
||||
} else if (subcommand === 'key-links') {
|
||||
verify.cmdVerifyKeyLinks(cwd, args[2], raw);
|
||||
} else if (subcommand === 'schema-drift') {
|
||||
const rest = args.slice(2);
|
||||
const skipFlag = rest.includes('--skip');
|
||||
const phaseArg = rest.find((arg) => !arg.startsWith('-'));
|
||||
verify.cmdVerifySchemaDrift(cwd, phaseArg, skipFlag, raw);
|
||||
} else if (subcommand === 'codebase-drift') {
|
||||
verify.cmdVerifyCodebaseDrift(cwd, raw);
|
||||
} else {
|
||||
error(`Unknown verify subcommand. Available: ${VERIFY_SUBCOMMANDS.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
routeVerifyCommand,
|
||||
};
|
||||
@@ -5,7 +5,8 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { safeReadFile, loadConfig, normalizePhaseName, escapeRegex, execGit, findPhaseInternal, getMilestoneInfo, stripShippedMilestones, extractCurrentMilestone, planningDir, output, error, checkAgentsInstalled, CONFIG_DEFAULTS } = require('./core.cjs');
|
||||
const { safeReadFile, loadConfig, normalizePhaseName, escapeRegex, execGit, findPhaseInternal, getMilestoneInfo, stripShippedMilestones, extractCurrentMilestone, output, error, checkAgentsInstalled, CONFIG_DEFAULTS } = require('./core.cjs');
|
||||
const { planningDir } = require('./planning-workspace.cjs');
|
||||
const { extractFrontmatter, parseMustHavesBlock } = require('./frontmatter.cjs');
|
||||
const { writeStateMd } = require('./state.cjs');
|
||||
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { output, error, planningPaths, planningRoot, toPosixPath, getMilestoneInfo, generateSlugInternal, setActiveWorkstream, getActiveWorkstream, filterPlanFiles, filterSummaryFiles, readSubdirectories } = require('./core.cjs');
|
||||
const { output, error, toPosixPath, getMilestoneInfo, generateSlugInternal, filterPlanFiles, filterSummaryFiles, readSubdirectories } = require('./core.cjs');
|
||||
const { planningPaths, planningRoot, setActiveWorkstream, getActiveWorkstream } = require('./planning-workspace.cjs');
|
||||
const { stateExtractField } = require('./state.cjs');
|
||||
|
||||
// ─── Migration ──────────────────────────────────────────────────────────────
|
||||
|
||||
@@ -56,6 +56,7 @@
|
||||
"scripts": {
|
||||
"build:hooks": "node scripts/build-hooks.js",
|
||||
"build:sdk": "cd sdk && npm ci && npm run build",
|
||||
"check:alias-drift": "cd sdk && npm run check:alias-drift",
|
||||
"prepublishOnly": "npm run build:hooks && npm run build:sdk",
|
||||
"pretest": "npm run build:sdk",
|
||||
"pretest:coverage": "npm run build:sdk",
|
||||
|
||||
4
sdk/package-lock.json
generated
4
sdk/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@gsd-build/sdk",
|
||||
"version": "0.1.0",
|
||||
"version": "1.39.0-rc.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@gsd-build/sdk",
|
||||
"version": "0.1.0",
|
||||
"version": "1.39.0-rc.4",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/claude-agent-sdk": "^0.2.84",
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"check:alias-drift": "npm run build && node scripts/check-command-aliases-fresh.mjs",
|
||||
"prepublishOnly": "rm -rf dist && tsc && chmod +x dist/cli.js",
|
||||
"test": "vitest run",
|
||||
"test:unit": "vitest run --project unit",
|
||||
|
||||
87
sdk/scripts/check-command-aliases-fresh.mjs
Normal file
87
sdk/scripts/check-command-aliases-fresh.mjs
Normal file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env node
|
||||
import { createRequire } from 'node:module';
|
||||
import { resolve, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const here = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const {
|
||||
STATE_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.state.js');
|
||||
const {
|
||||
VERIFY_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.verify.js');
|
||||
const {
|
||||
INIT_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.init.js');
|
||||
const {
|
||||
PHASE_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.phase.js');
|
||||
const {
|
||||
PHASES_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.phases.js');
|
||||
const {
|
||||
VALIDATE_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.validate.js');
|
||||
const {
|
||||
ROADMAP_COMMAND_MANIFEST,
|
||||
} = await import('../dist/query/command-manifest.roadmap.js');
|
||||
|
||||
const {
|
||||
STATE_COMMAND_ALIASES,
|
||||
VERIFY_COMMAND_ALIASES,
|
||||
INIT_COMMAND_ALIASES,
|
||||
PHASE_COMMAND_ALIASES,
|
||||
PHASES_COMMAND_ALIASES,
|
||||
VALIDATE_COMMAND_ALIASES,
|
||||
ROADMAP_COMMAND_ALIASES,
|
||||
} = await import('../dist/query/command-aliases.generated.js');
|
||||
|
||||
const cjsAliases = require(resolve(here, '..', '..', 'get-shit-done', 'bin', 'lib', 'command-aliases.generated.cjs'));
|
||||
|
||||
function toAliasEntries(manifest, family) {
|
||||
const prefix = `${family}.`;
|
||||
return manifest.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: [...entry.aliases],
|
||||
subcommand: entry.canonical.slice(prefix.length),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
}
|
||||
|
||||
function assertEqual(label, actual, expected) {
|
||||
const a = JSON.stringify(actual);
|
||||
const e = JSON.stringify(expected);
|
||||
if (a !== e) {
|
||||
throw new Error(
|
||||
`${label} drift detected. Regenerate command alias artifacts and commit them.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const expectedState = toAliasEntries(STATE_COMMAND_MANIFEST, 'state');
|
||||
const expectedVerify = toAliasEntries(VERIFY_COMMAND_MANIFEST, 'verify');
|
||||
const expectedInit = toAliasEntries(INIT_COMMAND_MANIFEST, 'init');
|
||||
const expectedPhase = toAliasEntries(PHASE_COMMAND_MANIFEST, 'phase');
|
||||
const expectedPhases = toAliasEntries(PHASES_COMMAND_MANIFEST, 'phases');
|
||||
const expectedValidate = toAliasEntries(VALIDATE_COMMAND_MANIFEST, 'validate');
|
||||
const expectedRoadmap = toAliasEntries(ROADMAP_COMMAND_MANIFEST, 'roadmap');
|
||||
|
||||
assertEqual('TS STATE_COMMAND_ALIASES', STATE_COMMAND_ALIASES, expectedState);
|
||||
assertEqual('TS VERIFY_COMMAND_ALIASES', VERIFY_COMMAND_ALIASES, expectedVerify);
|
||||
assertEqual('TS INIT_COMMAND_ALIASES', INIT_COMMAND_ALIASES, expectedInit);
|
||||
assertEqual('TS PHASE_COMMAND_ALIASES', PHASE_COMMAND_ALIASES, expectedPhase);
|
||||
assertEqual('TS PHASES_COMMAND_ALIASES', PHASES_COMMAND_ALIASES, expectedPhases);
|
||||
assertEqual('TS VALIDATE_COMMAND_ALIASES', VALIDATE_COMMAND_ALIASES, expectedValidate);
|
||||
assertEqual('TS ROADMAP_COMMAND_ALIASES', ROADMAP_COMMAND_ALIASES, expectedRoadmap);
|
||||
|
||||
assertEqual('CJS STATE_COMMAND_ALIASES', cjsAliases.STATE_COMMAND_ALIASES, expectedState);
|
||||
assertEqual('CJS VERIFY_COMMAND_ALIASES', cjsAliases.VERIFY_COMMAND_ALIASES, expectedVerify);
|
||||
assertEqual('CJS INIT_COMMAND_ALIASES', cjsAliases.INIT_COMMAND_ALIASES, expectedInit);
|
||||
assertEqual('CJS PHASE_COMMAND_ALIASES', cjsAliases.PHASE_COMMAND_ALIASES, expectedPhase);
|
||||
assertEqual('CJS PHASES_COMMAND_ALIASES', cjsAliases.PHASES_COMMAND_ALIASES, expectedPhases);
|
||||
assertEqual('CJS VALIDATE_COMMAND_ALIASES', cjsAliases.VALIDATE_COMMAND_ALIASES, expectedValidate);
|
||||
assertEqual('CJS ROADMAP_COMMAND_ALIASES', cjsAliases.ROADMAP_COMMAND_ALIASES, expectedRoadmap);
|
||||
|
||||
console.log('command alias artifacts are fresh');
|
||||
124
sdk/scripts/gen-command-aliases.ts
Normal file
124
sdk/scripts/gen-command-aliases.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Build-time alias generator skeleton for command-manifest-driven routing.
|
||||
*
|
||||
* This pilot commits generated artifacts directly; this script documents and
|
||||
* preserves the generation seam so future command families can be migrated
|
||||
* without hand-maintained alias duplication.
|
||||
*/
|
||||
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { STATE_COMMAND_MANIFEST } from '../src/query/command-manifest.state.js';
|
||||
import { VERIFY_COMMAND_MANIFEST } from '../src/query/command-manifest.verify.js';
|
||||
import { INIT_COMMAND_MANIFEST } from '../src/query/command-manifest.init.js';
|
||||
import { PHASE_COMMAND_MANIFEST } from '../src/query/command-manifest.phase.js';
|
||||
import { PHASES_COMMAND_MANIFEST } from '../src/query/command-manifest.phases.js';
|
||||
import { VALIDATE_COMMAND_MANIFEST } from '../src/query/command-manifest.validate.js';
|
||||
import { ROADMAP_COMMAND_MANIFEST } from '../src/query/command-manifest.roadmap.js';
|
||||
|
||||
function toSubcommand(canonical: string, family: 'state' | 'verify' | 'init' | 'phase' | 'phases' | 'validate' | 'roadmap'): string {
|
||||
const prefix = `${family}.`;
|
||||
return canonical.startsWith(prefix) ? canonical.slice(prefix.length) : canonical;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const stateEntries = STATE_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'state'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const verifyEntries = VERIFY_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'verify'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const initEntries = INIT_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'init'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const phaseEntries = PHASE_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'phase'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const phasesEntries = PHASES_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'phases'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const validateEntries = VALIDATE_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'validate'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const roadmapEntries = ROADMAP_COMMAND_MANIFEST.map((entry) => ({
|
||||
canonical: entry.canonical,
|
||||
aliases: entry.aliases,
|
||||
subcommand: toSubcommand(entry.canonical, 'roadmap'),
|
||||
mutation: entry.mutation,
|
||||
}));
|
||||
|
||||
const outPath = fileURLToPath(new URL('../src/query/command-aliases.generated.ts', import.meta.url));
|
||||
const header = `/**\n * GENERATED FILE — command alias expansion for state.*, verify.*, init.*, phase.*, phases.*, validate.*, and roadmap.* pilots.\n * Source: sdk/src/query/command-manifest.{state,verify,init,phase,phases,validate,roadmap}.ts\n */\n\n`;
|
||||
const body = [
|
||||
`export const STATE_COMMAND_ALIASES = ${JSON.stringify(stateEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const VERIFY_COMMAND_ALIASES = ${JSON.stringify(verifyEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const INIT_COMMAND_ALIASES = ${JSON.stringify(initEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const PHASE_COMMAND_ALIASES = ${JSON.stringify(phaseEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const PHASES_COMMAND_ALIASES = ${JSON.stringify(phasesEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const VALIDATE_COMMAND_ALIASES = ${JSON.stringify(validateEntries, null, 2)} as const;`,
|
||||
'',
|
||||
`export const ROADMAP_COMMAND_ALIASES = ${JSON.stringify(roadmapEntries, null, 2)} as const;`,
|
||||
'',
|
||||
'export const STATE_SUBCOMMANDS = new Set<string>(STATE_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const VERIFY_SUBCOMMANDS = new Set<string>(VERIFY_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const INIT_SUBCOMMANDS = new Set<string>(INIT_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const PHASE_SUBCOMMANDS = new Set<string>(PHASE_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const PHASES_SUBCOMMANDS = new Set<string>(PHASES_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const VALIDATE_SUBCOMMANDS = new Set<string>(VALIDATE_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'export const ROADMAP_SUBCOMMANDS = new Set<string>(ROADMAP_COMMAND_ALIASES.map((entry) => entry.subcommand));',
|
||||
'',
|
||||
'export const STATE_MUTATION_COMMANDS: readonly string[] = STATE_COMMAND_ALIASES',
|
||||
' .filter((entry) => entry.mutation)',
|
||||
' .flatMap((entry) => [entry.canonical, ...entry.aliases]);',
|
||||
'',
|
||||
'export const PHASE_MUTATION_COMMANDS: readonly string[] = PHASE_COMMAND_ALIASES',
|
||||
' .filter((entry) => entry.mutation)',
|
||||
' .flatMap((entry) => [entry.canonical, ...entry.aliases]);',
|
||||
'',
|
||||
'export const PHASES_MUTATION_COMMANDS: readonly string[] = PHASES_COMMAND_ALIASES',
|
||||
' .filter((entry) => entry.mutation)',
|
||||
' .flatMap((entry) => [entry.canonical, ...entry.aliases]);',
|
||||
'',
|
||||
'export const ROADMAP_MUTATION_COMMANDS: readonly string[] = ROADMAP_COMMAND_ALIASES',
|
||||
' .filter((entry) => entry.mutation)',
|
||||
' .flatMap((entry) => [entry.canonical, ...entry.aliases]);',
|
||||
'',
|
||||
].join('\n');
|
||||
await writeFile(outPath, header + body, 'utf-8');
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -4,4 +4,14 @@
|
||||
* tests land; other mutations rely on `MUTATION_DEFERRED_REASON` in golden-policy.
|
||||
*/
|
||||
|
||||
export const GOLDEN_MUTATION_SUBPROCESS_COVERED: readonly string[] = [];
|
||||
export const GOLDEN_MUTATION_SUBPROCESS_COVERED: readonly string[] = [
|
||||
'state.update',
|
||||
'state.patch',
|
||||
'state.begin-phase',
|
||||
'state.sync',
|
||||
'phase.add',
|
||||
'phase.add-batch',
|
||||
'phase.insert',
|
||||
'phases.clear',
|
||||
'roadmap.update-plan-progress',
|
||||
];
|
||||
|
||||
@@ -35,6 +35,73 @@ function omitAgentInstallFields(data: Record<string, unknown>): Record<string, u
|
||||
return o;
|
||||
}
|
||||
|
||||
const MINIMAL_STATE = `---
|
||||
gsd_state_version: 1.0
|
||||
milestone: v3.0
|
||||
milestone_name: SDK-First Migration
|
||||
status: executing
|
||||
---
|
||||
|
||||
# Project State
|
||||
|
||||
## Current Position
|
||||
|
||||
Phase: 10 (Read-Only Queries) — EXECUTING
|
||||
Plan: 2 of 3
|
||||
Status: Executing Phase 10
|
||||
Last activity: 2026-04-08 -- Phase 10 execution started
|
||||
|
||||
Progress: [░░░░░░░░░░] 50%
|
||||
`;
|
||||
|
||||
async function setupMinimalStateProject(root: string): Promise<void> {
|
||||
await mkdir(join(root, '.planning', 'phases'), { recursive: true });
|
||||
await writeFile(join(root, '.planning', 'STATE.md'), MINIMAL_STATE, 'utf-8');
|
||||
await writeFile(
|
||||
join(root, '.planning', 'ROADMAP.md'),
|
||||
'# Roadmap\n\n## Current Milestone: v3.0 SDK-First Migration\n\n### Phase 10: Read-Only Queries\n',
|
||||
'utf-8',
|
||||
);
|
||||
await writeFile(join(root, '.planning', 'config.json'), '{"model_profile":"balanced"}', 'utf-8');
|
||||
}
|
||||
|
||||
async function setupPhasesFixture(root: string): Promise<void> {
|
||||
await setupMinimalStateProject(root);
|
||||
const phasesRoot = join(root, '.planning', 'phases');
|
||||
await mkdir(join(phasesRoot, '10-read-only-queries'), { recursive: true });
|
||||
await mkdir(join(phasesRoot, '11-foundation-cleanup'), { recursive: true });
|
||||
await mkdir(join(phasesRoot, '999-backlog'), { recursive: true });
|
||||
await writeFile(join(phasesRoot, '10-read-only-queries', '10-01-PLAN.md'), '# plan\n', 'utf-8');
|
||||
await writeFile(join(phasesRoot, '10-read-only-queries', '10-02-PLAN.md'), '# plan\n', 'utf-8');
|
||||
await writeFile(join(phasesRoot, '11-foundation-cleanup', '11-01-SUMMARY.md'), '# summary\n', 'utf-8');
|
||||
|
||||
await writeFile(
|
||||
join(root, '.planning', 'ROADMAP.md'),
|
||||
[
|
||||
'# Roadmap',
|
||||
'',
|
||||
'| Phase | Plans | Status | Completed |',
|
||||
'|---|---|---|---|',
|
||||
'| 10. | 0/2 | Planned | |',
|
||||
'| 11. | 1/1 | Complete | 2026-04-01 |',
|
||||
'',
|
||||
'### Phase 10: Read-Only Queries',
|
||||
'',
|
||||
'**Plans:** 0/2 plans executed',
|
||||
'',
|
||||
'Plans:',
|
||||
'- [ ] 10-01',
|
||||
'- [ ] 10-02',
|
||||
'',
|
||||
'### Phase 11: Foundation Cleanup',
|
||||
].join('\n'),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const archivedRoot = join(root, '.planning', 'milestones', 'v0.9-phases', '09-legacy-foundation');
|
||||
await mkdir(archivedRoot, { recursive: true });
|
||||
}
|
||||
|
||||
describe('Golden file tests', () => {
|
||||
describe('generate-slug', () => {
|
||||
it('SDK output matches gsd-tools.cjs and checked-in golden fixture (fixture must track CLI, not SDK alone)', async () => {
|
||||
@@ -121,6 +188,43 @@ describe('Golden file tests', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('roadmap parity (subprocess parity)', () => {
|
||||
async function withFreshRoadmapProjects(): Promise<{ gsdDir: string; sdkDir: string }> {
|
||||
const suffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
const gsdDir = join(tmpdir(), `gsd-golden-roadmap-gsd-${suffix}`);
|
||||
const sdkDir = join(tmpdir(), `gsd-golden-roadmap-sdk-${suffix}`);
|
||||
await setupPhasesFixture(gsdDir);
|
||||
await setupPhasesFixture(sdkDir);
|
||||
return { gsdDir, sdkDir };
|
||||
}
|
||||
|
||||
it('roadmap.get-phase matches gsd-tools.cjs on fixture', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshRoadmapProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('roadmap', ['get-phase', '10'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('roadmap.get-phase', ['10'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('roadmap.update-plan-progress matches gsd-tools.cjs on fixture', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshRoadmapProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('roadmap', ['update-plan-progress', '10'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('roadmap.update-plan-progress', ['10'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('progress', () => {
|
||||
it('SDK JSON matches gsd-tools.cjs (`progress json`)', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('progress', ['json'], REPO_ROOT);
|
||||
@@ -168,6 +272,188 @@ describe('Golden file tests', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('state mutations (subprocess parity)', () => {
|
||||
let tmpDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = join(tmpdir(), `gsd-golden-state-${Date.now()}-${Math.random().toString(36).slice(2)}`);
|
||||
await setupMinimalStateProject(tmpDir);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('state.update matches gsd-tools.cjs', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('state', ['update', 'Status', 'Executing SDK'], tmpDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('state.update', ['Status', 'Executing SDK'], tmpDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
|
||||
it('state.patch matches gsd-tools.cjs', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('state', ['patch', '--status', 'Patched via parity'], tmpDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('state.patch', ['--status', 'Patched via parity'], tmpDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
|
||||
it('state.begin-phase matches gsd-tools.cjs', async () => {
|
||||
const argv = ['begin-phase', '--phase', '11', '--name', 'State Pilot', '--plans', '3'];
|
||||
const gsdOutput = await captureGsdToolsOutput('state', argv, tmpDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('state.begin-phase', ['--phase', '11', '--name', 'State Pilot', '--plans', '3'], tmpDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
|
||||
it('state.sync --verify matches gsd-tools.cjs', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('state', ['sync', '--verify'], tmpDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('state.sync', ['--verify'], tmpDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('phase mutations (subprocess parity)', () => {
|
||||
async function withFreshPhaseProjects(): Promise<{ gsdDir: string; sdkDir: string }> {
|
||||
const suffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
const gsdDir = join(tmpdir(), `gsd-golden-phase-gsd-${suffix}`);
|
||||
const sdkDir = join(tmpdir(), `gsd-golden-phase-sdk-${suffix}`);
|
||||
await setupMinimalStateProject(gsdDir);
|
||||
await setupMinimalStateProject(sdkDir);
|
||||
return { gsdDir, sdkDir };
|
||||
}
|
||||
|
||||
it('phase.add matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhaseProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phase', ['add', 'Phase parity add'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phase.add', ['Phase parity add'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phase.add-batch matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhaseProjects();
|
||||
try {
|
||||
const argv = ['add-batch', '--descriptions', '["Batch A","Batch B"]'];
|
||||
const gsdOutput = await captureGsdToolsOutput('phase', argv, gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phase.add-batch', ['--descriptions', '["Batch A","Batch B"]'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phase.insert matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhaseProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phase', ['insert', '10', 'Inserted parity phase'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phase.insert', ['10', 'Inserted parity phase'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('phases parity (subprocess parity)', () => {
|
||||
async function withFreshPhasesProjects(): Promise<{ gsdDir: string; sdkDir: string }> {
|
||||
const suffix = `${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
const gsdDir = join(tmpdir(), `gsd-golden-phases-gsd-${suffix}`);
|
||||
const sdkDir = join(tmpdir(), `gsd-golden-phases-sdk-${suffix}`);
|
||||
await setupPhasesFixture(gsdDir);
|
||||
await setupPhasesFixture(sdkDir);
|
||||
return { gsdDir, sdkDir };
|
||||
}
|
||||
|
||||
it('phases.list matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['list'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.list', [], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phases.list --type plans matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['list', '--type', 'plans'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.list', ['--type', 'plans'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phases.list --type summaries matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['list', '--type', 'summaries'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.list', ['--type', 'summaries'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phases.list --phase 10 matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['list', '--phase', '10'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.list', ['--phase', '10'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phases.list --include-archived matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['list', '--include-archived'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.list', ['--include-archived'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('phases.clear --confirm matches gsd-tools.cjs', async () => {
|
||||
const { gsdDir, sdkDir } = await withFreshPhasesProjects();
|
||||
try {
|
||||
const gsdOutput = await captureGsdToolsOutput('phases', ['clear', '--confirm'], gsdDir);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('phases.clear', ['--confirm'], sdkDir);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
} finally {
|
||||
await rm(gsdDir, { recursive: true, force: true });
|
||||
await rm(sdkDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('current-timestamp', () => {
|
||||
it('SDK full format matches gsd-tools.cjs output structure', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('current-timestamp', ['full'], PROJECT_DIR) as { timestamp: string };
|
||||
@@ -241,6 +527,24 @@ describe('Golden file tests', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate.health', () => {
|
||||
it('SDK JSON matches gsd-tools.cjs', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('validate', ['health'], REPO_ROOT);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('validate.health', [], REPO_ROOT);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validate.agents', () => {
|
||||
it('SDK JSON matches gsd-tools.cjs', async () => {
|
||||
const gsdOutput = await captureGsdToolsOutput('validate', ['agents'], REPO_ROOT);
|
||||
const registry = createRegistry();
|
||||
const sdkResult = await registry.dispatch('validate.agents', [], REPO_ROOT);
|
||||
expect(sdkResult.data).toEqual(gsdOutput);
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Init composition handler golden tests ─────────────────────────────
|
||||
|
||||
describe('init.execute-phase', () => {
|
||||
|
||||
@@ -35,6 +35,10 @@ import { PhaseRunner } from './phase-runner.js';
|
||||
import { ContextEngine } from './context-engine.js';
|
||||
import { PromptFactory } from './phase-prompt.js';
|
||||
|
||||
export { PlanningJournal } from './planning-journal.js';
|
||||
export type { PlanningEvent, PlanningEventActor, PlanningJournalAppendInput } from './planning-journal.js';
|
||||
export { PlanningRuntime } from './planning-runtime.js';
|
||||
|
||||
// ─── GSD class ───────────────────────────────────────────────────────────────
|
||||
|
||||
export class GSD {
|
||||
|
||||
70
sdk/src/planning-journal.test.ts
Normal file
70
sdk/src/planning-journal.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { mkdtemp, readFile } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { PlanningJournal } from './planning-journal.js';
|
||||
|
||||
describe('PlanningJournal', () => {
|
||||
it('appends events with monotonic source sequence numbers', async () => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'gsd-journal-'));
|
||||
const journal = new PlanningJournal({ projectDir: dir, sourceId: 'daemon-1', runId: 'run-1' });
|
||||
|
||||
const first = await journal.append({
|
||||
projectId: 'project-1',
|
||||
type: 'plan.next',
|
||||
actor: { type: 'agent', id: 'agent-1' },
|
||||
payload: { itemId: 'item-1' },
|
||||
idempotencyKey: 'next-1',
|
||||
});
|
||||
const second = await journal.append({
|
||||
projectId: 'project-1',
|
||||
type: 'plan.done',
|
||||
actor: { type: 'agent', id: 'agent-1' },
|
||||
payload: { itemId: 'item-1' },
|
||||
idempotencyKey: 'done-1',
|
||||
});
|
||||
|
||||
expect(first.source.seq).toBe(1);
|
||||
expect(second.source.seq).toBe(2);
|
||||
expect(await journal.readAll()).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('replays an existing event for duplicate idempotency keys', async () => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'gsd-journal-'));
|
||||
const journal = new PlanningJournal({ projectDir: dir, sourceId: 'sdk-1', runId: 'run-1' });
|
||||
|
||||
const first = await journal.append({
|
||||
projectId: 'project-1',
|
||||
type: 'plan.checkpoint',
|
||||
actor: { type: 'agent', id: 'agent-1' },
|
||||
payload: { summary: 'Progress' },
|
||||
idempotencyKey: 'checkpoint-1',
|
||||
});
|
||||
const replay = await journal.append({
|
||||
projectId: 'project-1',
|
||||
type: 'plan.checkpoint',
|
||||
actor: { type: 'agent', id: 'agent-1' },
|
||||
payload: { summary: 'Progress' },
|
||||
idempotencyKey: 'checkpoint-1',
|
||||
});
|
||||
|
||||
expect(replay.id).toBe(first.id);
|
||||
expect(await journal.readAll()).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('writes jsonl under .gsd/journal.jsonl', async () => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'gsd-journal-'));
|
||||
const journal = new PlanningJournal({ projectDir: dir, sourceId: 'sdk-1', runId: 'run-1' });
|
||||
await journal.append({
|
||||
projectId: 'project-1',
|
||||
type: 'plan.status',
|
||||
actor: { type: 'agent', id: 'agent-1' },
|
||||
payload: {},
|
||||
idempotencyKey: 'status-1',
|
||||
});
|
||||
|
||||
const raw = await readFile(join(dir, '.gsd', 'journal.jsonl'), 'utf8');
|
||||
expect(raw.trim().split('\n')).toHaveLength(1);
|
||||
expect(JSON.parse(raw).schemaVersion).toBe(1);
|
||||
});
|
||||
});
|
||||
153
sdk/src/planning-journal.ts
Normal file
153
sdk/src/planning-journal.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { appendFile, mkdir, readFile, rename, writeFile } from 'node:fs/promises';
|
||||
import { createHash, randomUUID } from 'node:crypto';
|
||||
import { join } from 'node:path';
|
||||
|
||||
export type PlanningEventActor = {
|
||||
type: 'human' | 'agent' | 'runtime' | 'verifier' | 'system';
|
||||
id: string;
|
||||
role?: string;
|
||||
sessionId?: string;
|
||||
taskId?: string;
|
||||
};
|
||||
|
||||
export type PlanningEvent = {
|
||||
id: string;
|
||||
schemaVersion: 1;
|
||||
projectionVersion: number;
|
||||
projectId: string;
|
||||
source: { id: string; kind: 'sdk' | 'daemon' | 'cloud' | 'import'; seq: number; cursor?: string };
|
||||
runId: string;
|
||||
workstreamId?: string;
|
||||
planId?: string;
|
||||
itemId?: string;
|
||||
actor: PlanningEventActor;
|
||||
authority: 'local' | 'cloud' | 'human_approved' | 'system';
|
||||
type: string;
|
||||
idempotencyKey: string;
|
||||
causationId?: string;
|
||||
occurredAt: string;
|
||||
payload: Record<string, unknown>;
|
||||
evidenceIds: string[];
|
||||
parentEventIds: string[];
|
||||
trace: Record<string, unknown>;
|
||||
requestHash: string;
|
||||
};
|
||||
|
||||
export type PlanningJournalAppendInput = {
|
||||
projectId: string;
|
||||
type: string;
|
||||
actor: PlanningEventActor;
|
||||
payload: Record<string, unknown>;
|
||||
idempotencyKey: string;
|
||||
planId?: string;
|
||||
itemId?: string;
|
||||
workstreamId?: string;
|
||||
evidenceIds?: string[];
|
||||
parentEventIds?: string[];
|
||||
causationId?: string;
|
||||
trace?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
export class PlanningJournal {
|
||||
private readonly path: string;
|
||||
|
||||
constructor(
|
||||
private readonly options: {
|
||||
projectDir: string;
|
||||
sourceId: string;
|
||||
runId: string;
|
||||
sourceKind?: 'sdk' | 'daemon' | 'cloud' | 'import';
|
||||
projectionVersion?: number;
|
||||
},
|
||||
) {
|
||||
this.path = join(options.projectDir, '.gsd', 'journal.jsonl');
|
||||
}
|
||||
|
||||
async append(input: PlanningJournalAppendInput): Promise<PlanningEvent> {
|
||||
const existing = await this.findByIdempotency(input.idempotencyKey);
|
||||
const requestHash = hashRequest(input);
|
||||
if (existing) {
|
||||
if (existing.requestHash !== requestHash) {
|
||||
throw new Error(`conflicting idempotency key: ${input.idempotencyKey}`);
|
||||
}
|
||||
return existing;
|
||||
}
|
||||
|
||||
const events = await this.readAll();
|
||||
const event: PlanningEvent = {
|
||||
id: randomUUID(),
|
||||
schemaVersion: 1,
|
||||
projectionVersion: this.options.projectionVersion ?? 1,
|
||||
projectId: input.projectId,
|
||||
source: {
|
||||
id: this.options.sourceId,
|
||||
kind: this.options.sourceKind ?? 'sdk',
|
||||
seq: events.filter((candidate) => candidate.source.id === this.options.sourceId).length + 1,
|
||||
},
|
||||
runId: this.options.runId,
|
||||
workstreamId: input.workstreamId,
|
||||
planId: input.planId,
|
||||
itemId: input.itemId,
|
||||
actor: input.actor,
|
||||
authority: 'local',
|
||||
type: input.type,
|
||||
idempotencyKey: input.idempotencyKey,
|
||||
causationId: input.causationId,
|
||||
occurredAt: new Date().toISOString(),
|
||||
payload: input.payload,
|
||||
evidenceIds: input.evidenceIds ?? [],
|
||||
parentEventIds: input.parentEventIds ?? [],
|
||||
trace: input.trace ?? {},
|
||||
requestHash,
|
||||
};
|
||||
|
||||
await mkdir(join(this.options.projectDir, '.gsd'), { recursive: true });
|
||||
await appendFile(this.path, `${JSON.stringify(event)}\n`, 'utf8');
|
||||
return event;
|
||||
}
|
||||
|
||||
async readAll(): Promise<PlanningEvent[]> {
|
||||
let raw = '';
|
||||
try {
|
||||
raw = await readFile(this.path, 'utf8');
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
return raw
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.map((line) => JSON.parse(line) as PlanningEvent);
|
||||
}
|
||||
|
||||
async compact(events: PlanningEvent[]): Promise<void> {
|
||||
await mkdir(join(this.options.projectDir, '.gsd'), { recursive: true });
|
||||
const tmp = `${this.path}.tmp`;
|
||||
await writeFile(
|
||||
tmp,
|
||||
events.map((event) => JSON.stringify(event)).join('\n') + (events.length ? '\n' : ''),
|
||||
'utf8',
|
||||
);
|
||||
await rename(tmp, this.path);
|
||||
}
|
||||
|
||||
private async findByIdempotency(idempotencyKey: string): Promise<PlanningEvent | null> {
|
||||
const events = await this.readAll();
|
||||
return events.find((event) => event.idempotencyKey === idempotencyKey) ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
function hashRequest(input: PlanningJournalAppendInput): string {
|
||||
return createHash('sha256')
|
||||
.update(
|
||||
JSON.stringify({
|
||||
projectId: input.projectId,
|
||||
type: input.type,
|
||||
payload: input.payload,
|
||||
planId: input.planId,
|
||||
itemId: input.itemId,
|
||||
actor: input.actor,
|
||||
}),
|
||||
)
|
||||
.digest('hex');
|
||||
}
|
||||
29
sdk/src/planning-runtime.test.ts
Normal file
29
sdk/src/planning-runtime.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { mkdtemp } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { PlanningRuntime } from './planning-runtime.js';
|
||||
|
||||
describe('PlanningRuntime', () => {
|
||||
it('records intent events through the durable journal', async () => {
|
||||
const dir = await mkdtemp(join(tmpdir(), 'gsd-runtime-'));
|
||||
const runtime = new PlanningRuntime({
|
||||
projectDir: dir,
|
||||
projectId: 'project-1',
|
||||
runId: 'run-1',
|
||||
sourceId: 'sdk-1',
|
||||
actor: { type: 'agent', id: 'agent-1', role: 'executor' },
|
||||
});
|
||||
|
||||
await runtime.status({ idempotencyKey: 'status-1' });
|
||||
await runtime.next({ idempotencyKey: 'next-1', createPlan: { title: 'Plan', items: [{ title: 'Item' }] } });
|
||||
await runtime.checkpoint({ idempotencyKey: 'checkpoint-1', summary: 'Progress' });
|
||||
|
||||
const events = await runtime.journal.readAll();
|
||||
expect(events.map((event) => event.type)).toEqual([
|
||||
'plan.status',
|
||||
'plan.next',
|
||||
'plan.checkpoint',
|
||||
]);
|
||||
});
|
||||
});
|
||||
100
sdk/src/planning-runtime.ts
Normal file
100
sdk/src/planning-runtime.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { PlanningJournal, type PlanningEventActor } from './planning-journal.js';
|
||||
|
||||
type RuntimeOptions = {
|
||||
projectDir: string;
|
||||
projectId: string;
|
||||
runId: string;
|
||||
sourceId: string;
|
||||
actor: PlanningEventActor;
|
||||
};
|
||||
|
||||
type RuntimeMeta = {
|
||||
idempotencyKey: string;
|
||||
planId?: string;
|
||||
itemId?: string;
|
||||
};
|
||||
|
||||
type NextInput = RuntimeMeta & {
|
||||
selector?: { itemId?: string; titleIncludes?: string };
|
||||
createPlan?: { title: string; items: Array<{ title: string; description?: string; dependsOn?: string[] }> };
|
||||
};
|
||||
|
||||
type CheckpointInput = RuntimeMeta & {
|
||||
summary?: string;
|
||||
subTasks?: Array<{ id?: string; text: string }>;
|
||||
agentCriteria?: Array<{ id?: string; text: string }>;
|
||||
criteriaMet?: string[];
|
||||
blocked?: { reason: string; nextAction?: string };
|
||||
};
|
||||
|
||||
type DoneInput = RuntimeMeta & {
|
||||
summary: string;
|
||||
blockers?: string[];
|
||||
criteriaMet?: string[];
|
||||
evidenceRefs?: string[];
|
||||
evidencePolicy?: 'auto' | 'explicit' | 'waive';
|
||||
evidenceWaiverReason?: string;
|
||||
advance?: boolean;
|
||||
};
|
||||
|
||||
export class PlanningRuntime {
|
||||
readonly journal: PlanningJournal;
|
||||
|
||||
constructor(private readonly options: RuntimeOptions) {
|
||||
this.journal = new PlanningJournal({
|
||||
projectDir: options.projectDir,
|
||||
sourceId: options.sourceId,
|
||||
runId: options.runId,
|
||||
sourceKind: 'sdk',
|
||||
});
|
||||
}
|
||||
|
||||
status(input: RuntimeMeta) {
|
||||
return this.record('plan.status', input, {});
|
||||
}
|
||||
|
||||
next(input: NextInput) {
|
||||
return this.record('plan.next', input, {
|
||||
selector: input.selector,
|
||||
createPlan: input.createPlan,
|
||||
});
|
||||
}
|
||||
|
||||
checkpoint(input: CheckpointInput) {
|
||||
return this.record('plan.checkpoint', input, {
|
||||
summary: input.summary,
|
||||
subTasks: input.subTasks,
|
||||
agentCriteria: input.agentCriteria,
|
||||
criteriaMet: input.criteriaMet,
|
||||
blocked: input.blocked,
|
||||
});
|
||||
}
|
||||
|
||||
sync(input: RuntimeMeta & { cursor?: string }) {
|
||||
return this.record('plan.sync', input, { cursor: input.cursor });
|
||||
}
|
||||
|
||||
done(input: DoneInput) {
|
||||
return this.record('plan.done', input, {
|
||||
summary: input.summary,
|
||||
blockers: input.blockers,
|
||||
criteriaMet: input.criteriaMet,
|
||||
evidenceRefs: input.evidenceRefs,
|
||||
evidencePolicy: input.evidencePolicy ?? 'auto',
|
||||
evidenceWaiverReason: input.evidenceWaiverReason,
|
||||
advance: input.advance ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
private record(type: string, input: RuntimeMeta, payload: Record<string, unknown>) {
|
||||
return this.journal.append({
|
||||
projectId: this.options.projectId,
|
||||
type,
|
||||
actor: this.options.actor,
|
||||
planId: input.planId,
|
||||
itemId: input.itemId,
|
||||
idempotencyKey: input.idempotencyKey,
|
||||
payload,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,20 @@ This document records contracts for the typed query layer consumed by `gsd-sdk q
|
||||
- CJS `**summary-extract**` → SDK `**summary.extract**` / `**summary extract**` / `**history-digest**` (see `index.ts`).
|
||||
- CJS top-level `**scaffold <type> ...**` → SDK `**phase.scaffold**` / `**phase scaffold**` with the scaffold type as the first argument (no separate `scaffold` alias on the registry).
|
||||
|
||||
### Manifest-backed family ownership
|
||||
|
||||
These families are sourced from `command-manifest.*.ts` files and expanded into generated alias artifacts (`command-aliases.generated.ts` + CJS mirror):
|
||||
|
||||
- `state.*` → `command-manifest.state.ts`
|
||||
- `verify.*` → `command-manifest.verify.ts`
|
||||
- `init.*` → `command-manifest.init.ts`
|
||||
- `phase.*` → `command-manifest.phase.ts`
|
||||
- `phases.*` → `command-manifest.phases.ts`
|
||||
- `validate.*` → `command-manifest.validate.ts`
|
||||
- `roadmap.*` → `command-manifest.roadmap.ts`
|
||||
|
||||
CJS routing seams mirror these families with thin adapters (`state/verify/init/phase/phases/validate/roadmap-command-router.cjs`) so `gsd-tools.cjs` stays orchestration-only.
|
||||
|
||||
## `gsd-sdk query` routing
|
||||
|
||||
1. **`normalizeQueryCommand()`** (`normalize-query-command.ts`) — maps the first argv tokens to the same **command + subcommand** patterns as `gsd-tools` `runCommand()` where needed (e.g. `state json` → `state.json`, `init execute-phase 9` → `init.execute-phase` with args `['9']`, `scaffold …` → `phase.scaffold`). Re-exported from **`@gsd-build/sdk`** and **`createRegistry`’s module** (`sdk/src/query/index.ts`) so programmatic callers can mirror CLI tokenization without importing a deep path.
|
||||
|
||||
122
sdk/src/query/command-aliases.generated.ts
Normal file
122
sdk/src/query/command-aliases.generated.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* GENERATED FILE — command alias expansion for state.*, verify.*, init.*, phase.*, phases.*, validate.*, and roadmap.* pilots.
|
||||
* Source: sdk/src/query/command-manifest.{state,verify,init,phase,phases,validate,roadmap}.ts
|
||||
*/
|
||||
|
||||
export interface FamilyCommandAlias {
|
||||
canonical: string;
|
||||
aliases: string[];
|
||||
subcommand: string;
|
||||
mutation: boolean;
|
||||
}
|
||||
|
||||
export const STATE_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'state.load', aliases: [], subcommand: 'load', mutation: false },
|
||||
{ canonical: 'state.json', aliases: ['state json'], subcommand: 'json', mutation: false },
|
||||
{ canonical: 'state.get', aliases: ['state get'], subcommand: 'get', mutation: false },
|
||||
{ canonical: 'state.update', aliases: ['state update'], subcommand: 'update', mutation: true },
|
||||
{ canonical: 'state.patch', aliases: ['state patch'], subcommand: 'patch', mutation: true },
|
||||
{ canonical: 'state.begin-phase', aliases: ['state begin-phase'], subcommand: 'begin-phase', mutation: true },
|
||||
{ canonical: 'state.advance-plan', aliases: ['state advance-plan'], subcommand: 'advance-plan', mutation: true },
|
||||
{ canonical: 'state.record-metric', aliases: ['state record-metric'], subcommand: 'record-metric', mutation: true },
|
||||
{ canonical: 'state.update-progress', aliases: ['state update-progress'], subcommand: 'update-progress', mutation: true },
|
||||
{ canonical: 'state.add-decision', aliases: ['state add-decision'], subcommand: 'add-decision', mutation: true },
|
||||
{ canonical: 'state.add-blocker', aliases: ['state add-blocker'], subcommand: 'add-blocker', mutation: true },
|
||||
{ canonical: 'state.resolve-blocker', aliases: ['state resolve-blocker'], subcommand: 'resolve-blocker', mutation: true },
|
||||
{ canonical: 'state.record-session', aliases: ['state record-session'], subcommand: 'record-session', mutation: true },
|
||||
{ canonical: 'state.signal-waiting', aliases: ['state signal-waiting'], subcommand: 'signal-waiting', mutation: true },
|
||||
{ canonical: 'state.signal-resume', aliases: ['state signal-resume'], subcommand: 'signal-resume', mutation: true },
|
||||
{ canonical: 'state.planned-phase', aliases: ['state planned-phase'], subcommand: 'planned-phase', mutation: true },
|
||||
{ canonical: 'state.validate', aliases: ['state validate'], subcommand: 'validate', mutation: false },
|
||||
{ canonical: 'state.sync', aliases: ['state sync'], subcommand: 'sync', mutation: true },
|
||||
{ canonical: 'state.prune', aliases: ['state prune'], subcommand: 'prune', mutation: true },
|
||||
{ canonical: 'state.milestone-switch', aliases: ['state milestone-switch'], subcommand: 'milestone-switch', mutation: true },
|
||||
{ canonical: 'state.add-roadmap-evolution', aliases: ['state add-roadmap-evolution'], subcommand: 'add-roadmap-evolution', mutation: true },
|
||||
] as const;
|
||||
|
||||
export const VERIFY_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'verify.plan-structure', aliases: ['verify plan-structure'], subcommand: 'plan-structure', mutation: false },
|
||||
{ canonical: 'verify.phase-completeness', aliases: ['verify phase-completeness'], subcommand: 'phase-completeness', mutation: false },
|
||||
{ canonical: 'verify.references', aliases: ['verify references'], subcommand: 'references', mutation: false },
|
||||
{ canonical: 'verify.commits', aliases: ['verify commits'], subcommand: 'commits', mutation: false },
|
||||
{ canonical: 'verify.artifacts', aliases: ['verify artifacts'], subcommand: 'artifacts', mutation: false },
|
||||
{ canonical: 'verify.key-links', aliases: ['verify key-links'], subcommand: 'key-links', mutation: false },
|
||||
{ canonical: 'verify.schema-drift', aliases: ['verify schema-drift'], subcommand: 'schema-drift', mutation: false },
|
||||
{ canonical: 'verify.codebase-drift', aliases: ['verify codebase-drift'], subcommand: 'codebase-drift', mutation: false },
|
||||
] as const;
|
||||
|
||||
export const INIT_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'init.execute-phase', aliases: ['init execute-phase'], subcommand: 'execute-phase', mutation: false },
|
||||
{ canonical: 'init.plan-phase', aliases: ['init plan-phase'], subcommand: 'plan-phase', mutation: false },
|
||||
{ canonical: 'init.new-project', aliases: ['init new-project'], subcommand: 'new-project', mutation: false },
|
||||
{ canonical: 'init.new-milestone', aliases: ['init new-milestone'], subcommand: 'new-milestone', mutation: false },
|
||||
{ canonical: 'init.quick', aliases: ['init quick'], subcommand: 'quick', mutation: false },
|
||||
{ canonical: 'init.ingest-docs', aliases: ['init ingest-docs'], subcommand: 'ingest-docs', mutation: false },
|
||||
{ canonical: 'init.resume', aliases: ['init resume'], subcommand: 'resume', mutation: false },
|
||||
{ canonical: 'init.verify-work', aliases: ['init verify-work'], subcommand: 'verify-work', mutation: false },
|
||||
{ canonical: 'init.phase-op', aliases: ['init phase-op'], subcommand: 'phase-op', mutation: false },
|
||||
{ canonical: 'init.todos', aliases: ['init todos'], subcommand: 'todos', mutation: false },
|
||||
{ canonical: 'init.milestone-op', aliases: ['init milestone-op'], subcommand: 'milestone-op', mutation: false },
|
||||
{ canonical: 'init.map-codebase', aliases: ['init map-codebase'], subcommand: 'map-codebase', mutation: false },
|
||||
{ canonical: 'init.progress', aliases: ['init progress'], subcommand: 'progress', mutation: false },
|
||||
{ canonical: 'init.manager', aliases: ['init manager'], subcommand: 'manager', mutation: false },
|
||||
{ canonical: 'init.new-workspace', aliases: ['init new-workspace'], subcommand: 'new-workspace', mutation: false },
|
||||
{ canonical: 'init.list-workspaces', aliases: ['init list-workspaces'], subcommand: 'list-workspaces', mutation: false },
|
||||
{ canonical: 'init.remove-workspace', aliases: ['init remove-workspace'], subcommand: 'remove-workspace', mutation: false },
|
||||
] as const;
|
||||
|
||||
export const PHASE_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'phase.list-plans', aliases: ['phase list-plans'], subcommand: 'list-plans', mutation: false },
|
||||
{ canonical: 'phase.list-artifacts', aliases: ['phase list-artifacts'], subcommand: 'list-artifacts', mutation: false },
|
||||
{ canonical: 'phase.next-decimal', aliases: ['phase next-decimal'], subcommand: 'next-decimal', mutation: false },
|
||||
{ canonical: 'phase.add', aliases: ['phase add'], subcommand: 'add', mutation: true },
|
||||
{ canonical: 'phase.add-batch', aliases: ['phase add-batch'], subcommand: 'add-batch', mutation: true },
|
||||
{ canonical: 'phase.insert', aliases: ['phase insert'], subcommand: 'insert', mutation: true },
|
||||
{ canonical: 'phase.remove', aliases: ['phase remove'], subcommand: 'remove', mutation: true },
|
||||
{ canonical: 'phase.complete', aliases: ['phase complete'], subcommand: 'complete', mutation: true },
|
||||
{ canonical: 'phase.scaffold', aliases: ['phase scaffold'], subcommand: 'scaffold', mutation: true },
|
||||
] as const;
|
||||
|
||||
export const PHASES_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'phases.list', aliases: ['phases list'], subcommand: 'list', mutation: false },
|
||||
{ canonical: 'phases.clear', aliases: ['phases clear'], subcommand: 'clear', mutation: true },
|
||||
{ canonical: 'phases.archive', aliases: ['phases archive'], subcommand: 'archive', mutation: true },
|
||||
] as const;
|
||||
|
||||
export const VALIDATE_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'validate.consistency', aliases: ['validate consistency'], subcommand: 'consistency', mutation: false },
|
||||
{ canonical: 'validate.health', aliases: ['validate health'], subcommand: 'health', mutation: false },
|
||||
{ canonical: 'validate.agents', aliases: ['validate agents'], subcommand: 'agents', mutation: false },
|
||||
{ canonical: 'validate.context', aliases: ['validate context'], subcommand: 'context', mutation: false },
|
||||
] as const;
|
||||
|
||||
export const ROADMAP_COMMAND_ALIASES: readonly FamilyCommandAlias[] = [
|
||||
{ canonical: 'roadmap.analyze', aliases: ['roadmap analyze'], subcommand: 'analyze', mutation: false },
|
||||
{ canonical: 'roadmap.get-phase', aliases: ['roadmap get-phase'], subcommand: 'get-phase', mutation: false },
|
||||
{ canonical: 'roadmap.update-plan-progress', aliases: ['roadmap update-plan-progress'], subcommand: 'update-plan-progress', mutation: true },
|
||||
{ canonical: 'roadmap.annotate-dependencies', aliases: ['roadmap annotate-dependencies'], subcommand: 'annotate-dependencies', mutation: true },
|
||||
] as const;
|
||||
|
||||
export const STATE_SUBCOMMANDS = new Set<string>(STATE_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const VERIFY_SUBCOMMANDS = new Set<string>(VERIFY_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const INIT_SUBCOMMANDS = new Set<string>(INIT_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const PHASE_SUBCOMMANDS = new Set<string>(PHASE_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const PHASES_SUBCOMMANDS = new Set<string>(PHASES_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const VALIDATE_SUBCOMMANDS = new Set<string>(VALIDATE_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
export const ROADMAP_SUBCOMMANDS = new Set<string>(ROADMAP_COMMAND_ALIASES.map((entry) => entry.subcommand));
|
||||
|
||||
export const STATE_MUTATION_COMMANDS: readonly string[] = STATE_COMMAND_ALIASES
|
||||
.filter((entry) => entry.mutation)
|
||||
.flatMap((entry) => [entry.canonical, ...entry.aliases]);
|
||||
|
||||
export const PHASE_MUTATION_COMMANDS: readonly string[] = PHASE_COMMAND_ALIASES
|
||||
.filter((entry) => entry.mutation)
|
||||
.flatMap((entry) => [entry.canonical, ...entry.aliases]);
|
||||
|
||||
export const PHASES_MUTATION_COMMANDS: readonly string[] = PHASES_COMMAND_ALIASES
|
||||
.filter((entry) => entry.mutation)
|
||||
.flatMap((entry) => [entry.canonical, ...entry.aliases]);
|
||||
|
||||
export const ROADMAP_MUTATION_COMMANDS: readonly string[] = ROADMAP_COMMAND_ALIASES
|
||||
.filter((entry) => entry.mutation)
|
||||
.flatMap((entry) => [entry.canonical, ...entry.aliases]);
|
||||
24
sdk/src/query/command-manifest.init.ts
Normal file
24
sdk/src/query/command-manifest.init.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical init.* command manifest.
|
||||
*/
|
||||
export const INIT_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'init', canonical: 'init.execute-phase', aliases: ['init execute-phase'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.plan-phase', aliases: ['init plan-phase'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.new-project', aliases: ['init new-project'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.new-milestone', aliases: ['init new-milestone'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.quick', aliases: ['init quick'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.ingest-docs', aliases: ['init ingest-docs'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.resume', aliases: ['init resume'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.verify-work', aliases: ['init verify-work'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.phase-op', aliases: ['init phase-op'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.todos', aliases: ['init todos'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.milestone-op', aliases: ['init milestone-op'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.map-codebase', aliases: ['init map-codebase'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.progress', aliases: ['init progress'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.manager', aliases: ['init manager'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.new-workspace', aliases: ['init new-workspace'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.list-workspaces', aliases: ['init list-workspaces'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'init', canonical: 'init.remove-workspace', aliases: ['init remove-workspace'], mutation: false, outputMode: 'json' },
|
||||
] as const;
|
||||
16
sdk/src/query/command-manifest.phase.ts
Normal file
16
sdk/src/query/command-manifest.phase.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical phase.* command manifest.
|
||||
*/
|
||||
export const PHASE_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'phase', canonical: 'phase.list-plans', aliases: ['phase list-plans'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.list-artifacts', aliases: ['phase list-artifacts'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.next-decimal', aliases: ['phase next-decimal'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.add', aliases: ['phase add'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.add-batch', aliases: ['phase add-batch'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.insert', aliases: ['phase insert'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.remove', aliases: ['phase remove'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.complete', aliases: ['phase complete'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phase', canonical: 'phase.scaffold', aliases: ['phase scaffold'], mutation: true, outputMode: 'json' },
|
||||
] as const;
|
||||
11
sdk/src/query/command-manifest.phases.ts
Normal file
11
sdk/src/query/command-manifest.phases.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical phases.* command manifest.
|
||||
* Note: `phases.archive` is SDK-only; CJS `gsd-tools phases` currently supports list/clear.
|
||||
*/
|
||||
export const PHASES_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'phases', canonical: 'phases.list', aliases: ['phases list'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'phases', canonical: 'phases.clear', aliases: ['phases clear'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'phases', canonical: 'phases.archive', aliases: ['phases archive'], mutation: true, outputMode: 'json' },
|
||||
] as const;
|
||||
11
sdk/src/query/command-manifest.roadmap.ts
Normal file
11
sdk/src/query/command-manifest.roadmap.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical roadmap.* command manifest.
|
||||
*/
|
||||
export const ROADMAP_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'roadmap', canonical: 'roadmap.analyze', aliases: ['roadmap analyze'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'roadmap', canonical: 'roadmap.get-phase', aliases: ['roadmap get-phase'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'roadmap', canonical: 'roadmap.update-plan-progress', aliases: ['roadmap update-plan-progress'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'roadmap', canonical: 'roadmap.annotate-dependencies', aliases: ['roadmap annotate-dependencies'], mutation: true, outputMode: 'json' },
|
||||
] as const;
|
||||
31
sdk/src/query/command-manifest.state.ts
Normal file
31
sdk/src/query/command-manifest.state.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical state.* command manifest.
|
||||
*
|
||||
* Source of truth for the state family seam. Adapters derive registry aliases,
|
||||
* mutation classification, and CJS subcommand routing metadata from this list.
|
||||
*/
|
||||
export const STATE_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'state', canonical: 'state.load', aliases: [], mutation: false, outputMode: 'raw' },
|
||||
{ family: 'state', canonical: 'state.json', aliases: ['state json'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.get', aliases: ['state get'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.update', aliases: ['state update'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.patch', aliases: ['state patch'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.begin-phase', aliases: ['state begin-phase'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.advance-plan', aliases: ['state advance-plan'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.record-metric', aliases: ['state record-metric'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.update-progress', aliases: ['state update-progress'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.add-decision', aliases: ['state add-decision'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.add-blocker', aliases: ['state add-blocker'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.resolve-blocker', aliases: ['state resolve-blocker'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.record-session', aliases: ['state record-session'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.signal-waiting', aliases: ['state signal-waiting'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.signal-resume', aliases: ['state signal-resume'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.planned-phase', aliases: ['state planned-phase'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.validate', aliases: ['state validate'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.sync', aliases: ['state sync'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.prune', aliases: ['state prune'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.milestone-switch', aliases: ['state milestone-switch'], mutation: true, outputMode: 'json' },
|
||||
{ family: 'state', canonical: 'state.add-roadmap-evolution', aliases: ['state add-roadmap-evolution'], mutation: true, outputMode: 'json' },
|
||||
] as const;
|
||||
17
sdk/src/query/command-manifest.ts
Normal file
17
sdk/src/query/command-manifest.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { STATE_COMMAND_MANIFEST } from './command-manifest.state.js';
|
||||
import { VERIFY_COMMAND_MANIFEST } from './command-manifest.verify.js';
|
||||
import { INIT_COMMAND_MANIFEST } from './command-manifest.init.js';
|
||||
import { PHASE_COMMAND_MANIFEST } from './command-manifest.phase.js';
|
||||
import { PHASES_COMMAND_MANIFEST } from './command-manifest.phases.js';
|
||||
import { VALIDATE_COMMAND_MANIFEST } from './command-manifest.validate.js';
|
||||
import { ROADMAP_COMMAND_MANIFEST } from './command-manifest.roadmap.js';
|
||||
|
||||
export const COMMAND_MANIFEST = [
|
||||
...STATE_COMMAND_MANIFEST,
|
||||
...VERIFY_COMMAND_MANIFEST,
|
||||
...INIT_COMMAND_MANIFEST,
|
||||
...PHASE_COMMAND_MANIFEST,
|
||||
...PHASES_COMMAND_MANIFEST,
|
||||
...VALIDATE_COMMAND_MANIFEST,
|
||||
...ROADMAP_COMMAND_MANIFEST,
|
||||
] as const;
|
||||
11
sdk/src/query/command-manifest.types.ts
Normal file
11
sdk/src/query/command-manifest.types.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export type CommandFamily = 'state' | 'verify' | 'init' | 'phase' | 'phases' | 'validate' | 'roadmap';
|
||||
|
||||
export type OutputMode = 'json' | 'raw';
|
||||
|
||||
export interface CommandManifestEntry {
|
||||
family: CommandFamily;
|
||||
canonical: string;
|
||||
aliases: string[];
|
||||
mutation: boolean;
|
||||
outputMode: OutputMode;
|
||||
}
|
||||
11
sdk/src/query/command-manifest.validate.ts
Normal file
11
sdk/src/query/command-manifest.validate.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical validate.* command manifest.
|
||||
*/
|
||||
export const VALIDATE_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'validate', canonical: 'validate.consistency', aliases: ['validate consistency'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'validate', canonical: 'validate.health', aliases: ['validate health'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'validate', canonical: 'validate.agents', aliases: ['validate agents'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'validate', canonical: 'validate.context', aliases: ['validate context'], mutation: false, outputMode: 'json' },
|
||||
] as const;
|
||||
15
sdk/src/query/command-manifest.verify.ts
Normal file
15
sdk/src/query/command-manifest.verify.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { CommandManifestEntry } from './command-manifest.types.js';
|
||||
|
||||
/**
|
||||
* Canonical verify.* command manifest.
|
||||
*/
|
||||
export const VERIFY_COMMAND_MANIFEST: readonly CommandManifestEntry[] = [
|
||||
{ family: 'verify', canonical: 'verify.plan-structure', aliases: ['verify plan-structure'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.phase-completeness', aliases: ['verify phase-completeness'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.references', aliases: ['verify references'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.commits', aliases: ['verify commits'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.artifacts', aliases: ['verify artifacts'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.key-links', aliases: ['verify key-links'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.schema-drift', aliases: ['verify schema-drift'], mutation: false, outputMode: 'json' },
|
||||
{ family: 'verify', canonical: 'verify.codebase-drift', aliases: ['verify codebase-drift'], mutation: false, outputMode: 'json' },
|
||||
] as const;
|
||||
118
sdk/src/query/command-seam-coverage.test.ts
Normal file
118
sdk/src/query/command-seam-coverage.test.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
import { createRegistry } from './index.js';
|
||||
import { STATE_COMMAND_MANIFEST } from './command-manifest.state.js';
|
||||
import { VERIFY_COMMAND_MANIFEST } from './command-manifest.verify.js';
|
||||
import { INIT_COMMAND_MANIFEST } from './command-manifest.init.js';
|
||||
import { PHASE_COMMAND_MANIFEST } from './command-manifest.phase.js';
|
||||
import { PHASES_COMMAND_MANIFEST } from './command-manifest.phases.js';
|
||||
import { VALIDATE_COMMAND_MANIFEST } from './command-manifest.validate.js';
|
||||
import { ROADMAP_COMMAND_MANIFEST } from './command-manifest.roadmap.js';
|
||||
import {
|
||||
STATE_COMMAND_ALIASES,
|
||||
VERIFY_COMMAND_ALIASES,
|
||||
INIT_COMMAND_ALIASES,
|
||||
PHASE_COMMAND_ALIASES,
|
||||
PHASES_COMMAND_ALIASES,
|
||||
VALIDATE_COMMAND_ALIASES,
|
||||
ROADMAP_COMMAND_ALIASES,
|
||||
} from './command-aliases.generated.js';
|
||||
|
||||
function subcommandFor(canonical: string, family: 'state' | 'verify' | 'init' | 'phase' | 'phases' | 'validate' | 'roadmap'): string {
|
||||
return canonical.slice(`${family}.`.length);
|
||||
}
|
||||
|
||||
describe('command seam coverage (manifest -> generated -> adapters)', () => {
|
||||
it('state/verify/init/phase/phases/validate/roadmap manifest canonicals are present in generated alias artifacts', () => {
|
||||
const generated = new Map<string, { aliases: string[]; subcommand: string; mutation: boolean }>();
|
||||
for (const entry of [...STATE_COMMAND_ALIASES, ...VERIFY_COMMAND_ALIASES, ...INIT_COMMAND_ALIASES, ...PHASE_COMMAND_ALIASES, ...PHASES_COMMAND_ALIASES, ...VALIDATE_COMMAND_ALIASES, ...ROADMAP_COMMAND_ALIASES]) {
|
||||
generated.set(entry.canonical, { aliases: [...entry.aliases], subcommand: entry.subcommand, mutation: !!entry.mutation });
|
||||
}
|
||||
|
||||
for (const entry of STATE_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'state'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of VERIFY_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'verify'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of INIT_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'init'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of PHASE_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'phase'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of PHASES_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'phases'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of VALIDATE_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'validate'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
|
||||
for (const entry of ROADMAP_COMMAND_MANIFEST) {
|
||||
const g = generated.get(entry.canonical);
|
||||
expect(g, `missing generated canonical ${entry.canonical}`).toBeTruthy();
|
||||
expect(g?.subcommand).toBe(subcommandFor(entry.canonical, 'roadmap'));
|
||||
expect(g?.aliases ?? []).toEqual(entry.aliases);
|
||||
expect(g?.mutation).toBe(entry.mutation);
|
||||
}
|
||||
});
|
||||
|
||||
it('registry has every canonical + alias for migrated families', () => {
|
||||
const registry = createRegistry();
|
||||
for (const entry of [...STATE_COMMAND_ALIASES, ...VERIFY_COMMAND_ALIASES, ...INIT_COMMAND_ALIASES, ...PHASE_COMMAND_ALIASES, ...PHASES_COMMAND_ALIASES, ...VALIDATE_COMMAND_ALIASES, ...ROADMAP_COMMAND_ALIASES]) {
|
||||
expect(registry.has(entry.canonical), `missing registry canonical ${entry.canonical}`).toBe(true);
|
||||
for (const alias of entry.aliases) {
|
||||
expect(registry.has(alias), `missing registry alias ${alias}`).toBe(true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('CJS seam adapters export expected router functions', () => {
|
||||
const require = createRequire(import.meta.url);
|
||||
const stateRouter = require('../../../get-shit-done/bin/lib/state-command-router.cjs');
|
||||
const verifyRouter = require('../../../get-shit-done/bin/lib/verify-command-router.cjs');
|
||||
const initRouter = require('../../../get-shit-done/bin/lib/init-command-router.cjs');
|
||||
const phaseRouter = require('../../../get-shit-done/bin/lib/phase-command-router.cjs');
|
||||
const phasesRouter = require('../../../get-shit-done/bin/lib/phases-command-router.cjs');
|
||||
const validateRouter = require('../../../get-shit-done/bin/lib/validate-command-router.cjs');
|
||||
const roadmapRouter = require('../../../get-shit-done/bin/lib/roadmap-command-router.cjs');
|
||||
|
||||
expect(typeof stateRouter.routeStateCommand).toBe('function');
|
||||
expect(typeof verifyRouter.routeVerifyCommand).toBe('function');
|
||||
expect(typeof initRouter.routeInitCommand).toBe('function');
|
||||
expect(typeof phaseRouter.routePhaseCommand).toBe('function');
|
||||
expect(typeof phasesRouter.routePhasesCommand).toBe('function');
|
||||
expect(typeof validateRouter.routeValidateCommand).toBe('function');
|
||||
expect(typeof roadmapRouter.routeRoadmapCommand).toBe('function');
|
||||
});
|
||||
});
|
||||
@@ -20,6 +20,19 @@ import { frontmatterGet } from './frontmatter.js';
|
||||
import { configGet, configPath, resolveModel } from './config-query.js';
|
||||
import { stateJson, stateGet, stateSnapshot } from './state.js';
|
||||
import { stateProjectLoad } from './state-project-load.js';
|
||||
import {
|
||||
STATE_COMMAND_ALIASES,
|
||||
STATE_MUTATION_COMMANDS,
|
||||
VERIFY_COMMAND_ALIASES,
|
||||
INIT_COMMAND_ALIASES,
|
||||
PHASE_COMMAND_ALIASES,
|
||||
PHASE_MUTATION_COMMANDS,
|
||||
PHASES_COMMAND_ALIASES,
|
||||
PHASES_MUTATION_COMMANDS,
|
||||
VALIDATE_COMMAND_ALIASES,
|
||||
ROADMAP_COMMAND_ALIASES,
|
||||
ROADMAP_MUTATION_COMMANDS,
|
||||
} from './command-aliases.generated.js';
|
||||
import { findPhase, phasePlanIndex } from './phase.js';
|
||||
import { phaseListPlans, phaseListArtifacts } from './phase-list-queries.js';
|
||||
import { planTaskStructure } from './plan-task-structure.js';
|
||||
@@ -126,28 +139,14 @@ export { normalizeQueryCommand } from './normalize-query-command.js';
|
||||
* (they emit JSON for workflows; agents perform writes).
|
||||
*/
|
||||
export const QUERY_MUTATION_COMMANDS = new Set<string>([
|
||||
'state.update', 'state.patch', 'state.begin-phase', 'state.advance-plan',
|
||||
'state.record-metric', 'state.update-progress', 'state.add-decision',
|
||||
'state.add-blocker', 'state.resolve-blocker', 'state.record-session',
|
||||
'state.planned-phase', 'state planned-phase',
|
||||
'state.signal-waiting', 'state signal-waiting',
|
||||
'state.signal-resume', 'state signal-resume',
|
||||
'state.sync', 'state sync',
|
||||
'state.prune', 'state prune',
|
||||
'state.milestone-switch', 'state milestone-switch',
|
||||
'state.add-roadmap-evolution', 'state add-roadmap-evolution',
|
||||
...STATE_MUTATION_COMMANDS,
|
||||
'frontmatter.set', 'frontmatter.merge', 'frontmatter.validate', 'frontmatter validate',
|
||||
'config-set', 'config-set-model-profile', 'config-new-project', 'config-ensure-section',
|
||||
'commit', 'check-commit', 'commit-to-subrepo',
|
||||
'template.fill', 'template.select', 'template select',
|
||||
'validate.health', 'validate health',
|
||||
'validate.context', 'validate context',
|
||||
'phase.add', 'phase.add-batch', 'phase.insert', 'phase.remove', 'phase.complete',
|
||||
'phase.scaffold', 'phases.clear', 'phases.archive',
|
||||
'phase add', 'phase add-batch', 'phase insert', 'phase remove', 'phase complete',
|
||||
'phase scaffold', 'phases clear', 'phases archive',
|
||||
'roadmap.update-plan-progress', 'roadmap update-plan-progress',
|
||||
'roadmap.annotate-dependencies', 'roadmap annotate-dependencies',
|
||||
...PHASE_MUTATION_COMMANDS,
|
||||
...PHASES_MUTATION_COMMANDS,
|
||||
...ROADMAP_MUTATION_COMMANDS,
|
||||
'requirements.mark-complete', 'requirements mark-complete',
|
||||
'todo.complete', 'todo complete',
|
||||
'milestone.complete', 'milestone complete',
|
||||
@@ -284,22 +283,62 @@ export function createRegistry(
|
||||
registry.register('config-get', configGet);
|
||||
registry.register('config-path', configPath);
|
||||
registry.register('resolve-model', resolveModel);
|
||||
registry.register('state.load', stateProjectLoad);
|
||||
registry.register('state.json', stateJson);
|
||||
registry.register('state.get', stateGet);
|
||||
const stateHandlers: Record<string, QueryHandler> = {
|
||||
'state.load': stateProjectLoad,
|
||||
'state.json': stateJson,
|
||||
'state.get': stateGet,
|
||||
'state.update': stateUpdate,
|
||||
'state.patch': statePatch,
|
||||
'state.begin-phase': stateBeginPhase,
|
||||
'state.advance-plan': stateAdvancePlan,
|
||||
'state.record-metric': stateRecordMetric,
|
||||
'state.update-progress': stateUpdateProgress,
|
||||
'state.add-decision': stateAddDecision,
|
||||
'state.add-blocker': stateAddBlocker,
|
||||
'state.resolve-blocker': stateResolveBlocker,
|
||||
'state.record-session': stateRecordSession,
|
||||
'state.signal-waiting': stateSignalWaiting,
|
||||
'state.signal-resume': stateSignalResume,
|
||||
'state.planned-phase': statePlannedPhase,
|
||||
'state.validate': stateValidate,
|
||||
'state.sync': stateSync,
|
||||
'state.prune': statePrune,
|
||||
'state.milestone-switch': stateMilestoneSwitch,
|
||||
'state.add-roadmap-evolution': stateAddRoadmapEvolution,
|
||||
};
|
||||
|
||||
for (const entry of STATE_COMMAND_ALIASES) {
|
||||
const handler = stateHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
registry.register('state-snapshot', stateSnapshot);
|
||||
registry.register('find-phase', findPhase);
|
||||
registry.register('phase-plan-index', phasePlanIndex);
|
||||
registry.register('phase.list-plans', phaseListPlans);
|
||||
registry.register('phase list-plans', phaseListPlans);
|
||||
registry.register('phase.list-artifacts', phaseListArtifacts);
|
||||
registry.register('phase list-artifacts', phaseListArtifacts);
|
||||
registry.register('plan.task-structure', planTaskStructure);
|
||||
registry.register('plan task-structure', planTaskStructure);
|
||||
registry.register('requirements.extract-from-plans', requirementsExtractFromPlans);
|
||||
registry.register('requirements extract-from-plans', requirementsExtractFromPlans);
|
||||
registry.register('roadmap.analyze', roadmapAnalyze);
|
||||
registry.register('roadmap.get-phase', roadmapGetPhase);
|
||||
const roadmapHandlers: Record<string, QueryHandler> = {
|
||||
'roadmap.analyze': roadmapAnalyze,
|
||||
'roadmap.get-phase': roadmapGetPhase,
|
||||
'roadmap.update-plan-progress': roadmapUpdatePlanProgress,
|
||||
'roadmap.annotate-dependencies': roadmapAnnotateDependencies,
|
||||
};
|
||||
|
||||
for (const entry of ROADMAP_COMMAND_ALIASES) {
|
||||
const handler = roadmapHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
registry.register('progress', progressJson);
|
||||
registry.register('progress.json', progressJson);
|
||||
|
||||
@@ -309,32 +348,6 @@ export function createRegistry(
|
||||
registry.register('frontmatter.validate', frontmatterValidate);
|
||||
registry.register('frontmatter validate', frontmatterValidate);
|
||||
|
||||
// State mutation handlers
|
||||
registry.register('state.update', stateUpdate);
|
||||
registry.register('state.patch', statePatch);
|
||||
registry.register('state.begin-phase', stateBeginPhase);
|
||||
registry.register('state.advance-plan', stateAdvancePlan);
|
||||
registry.register('state.record-metric', stateRecordMetric);
|
||||
registry.register('state.update-progress', stateUpdateProgress);
|
||||
registry.register('state.add-decision', stateAddDecision);
|
||||
registry.register('state.add-blocker', stateAddBlocker);
|
||||
registry.register('state.resolve-blocker', stateResolveBlocker);
|
||||
registry.register('state.record-session', stateRecordSession);
|
||||
registry.register('state.signal-waiting', stateSignalWaiting);
|
||||
registry.register('state.signal-resume', stateSignalResume);
|
||||
registry.register('state.validate', stateValidate);
|
||||
registry.register('state.sync', stateSync);
|
||||
registry.register('state.prune', statePrune);
|
||||
registry.register('state.milestone-switch', stateMilestoneSwitch);
|
||||
registry.register('state.add-roadmap-evolution', stateAddRoadmapEvolution);
|
||||
registry.register('state milestone-switch', stateMilestoneSwitch);
|
||||
registry.register('state add-roadmap-evolution', stateAddRoadmapEvolution);
|
||||
registry.register('state signal-waiting', stateSignalWaiting);
|
||||
registry.register('state signal-resume', stateSignalResume);
|
||||
registry.register('state validate', stateValidate);
|
||||
registry.register('state sync', stateSync);
|
||||
registry.register('state prune', statePrune);
|
||||
|
||||
// Config mutation handlers
|
||||
registry.register('config-set', configSet);
|
||||
registry.register('config-set-model-profile', configSetModelProfile);
|
||||
@@ -350,19 +363,26 @@ export function createRegistry(
|
||||
registry.register('template.select', templateSelect);
|
||||
registry.register('template select', templateSelect);
|
||||
|
||||
// Verification handlers
|
||||
registry.register('verify.plan-structure', verifyPlanStructure);
|
||||
registry.register('verify plan-structure', verifyPlanStructure);
|
||||
registry.register('verify.phase-completeness', verifyPhaseCompleteness);
|
||||
registry.register('verify phase-completeness', verifyPhaseCompleteness);
|
||||
registry.register('verify.artifacts', verifyArtifacts);
|
||||
registry.register('verify artifacts', verifyArtifacts);
|
||||
registry.register('verify.key-links', verifyKeyLinks);
|
||||
registry.register('verify key-links', verifyKeyLinks);
|
||||
registry.register('verify.commits', verifyCommits);
|
||||
registry.register('verify commits', verifyCommits);
|
||||
registry.register('verify.references', verifyReferences);
|
||||
registry.register('verify references', verifyReferences);
|
||||
const verifyHandlers: Record<string, QueryHandler> = {
|
||||
'verify.plan-structure': verifyPlanStructure,
|
||||
'verify.phase-completeness': verifyPhaseCompleteness,
|
||||
'verify.references': verifyReferences,
|
||||
'verify.commits': verifyCommits,
|
||||
'verify.artifacts': verifyArtifacts,
|
||||
'verify.key-links': verifyKeyLinks,
|
||||
'verify.schema-drift': verifySchemaDrift,
|
||||
'verify.codebase-drift': verifyCodebaseDrift,
|
||||
};
|
||||
|
||||
for (const entry of VERIFY_COMMAND_ALIASES) {
|
||||
const handler = verifyHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
registry.register('verify-summary', verifySummary);
|
||||
registry.register('verify.summary', verifySummary);
|
||||
registry.register('verify summary', verifySummary);
|
||||
@@ -377,14 +397,21 @@ export function createRegistry(
|
||||
registry.register('check decision-coverage-plan', checkDecisionCoveragePlan);
|
||||
registry.register('check.decision-coverage-verify', checkDecisionCoverageVerify);
|
||||
registry.register('check decision-coverage-verify', checkDecisionCoverageVerify);
|
||||
registry.register('validate.consistency', validateConsistency);
|
||||
registry.register('validate consistency', validateConsistency);
|
||||
registry.register('validate.health', validateHealth);
|
||||
registry.register('validate health', validateHealth);
|
||||
registry.register('validate.agents', validateAgents);
|
||||
registry.register('validate agents', validateAgents);
|
||||
registry.register('validate.context', validateContext);
|
||||
registry.register('validate context', validateContext);
|
||||
const validateHandlers: Record<string, QueryHandler> = {
|
||||
'validate.consistency': validateConsistency,
|
||||
'validate.health': validateHealth,
|
||||
'validate.agents': validateAgents,
|
||||
'validate.context': validateContext,
|
||||
};
|
||||
|
||||
for (const entry of VALIDATE_COMMAND_ALIASES) {
|
||||
const handler = validateHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
// Decision routing (SDK-only — no `gsd-tools.cjs` mirror yet; see QUERY-HANDLERS.md)
|
||||
registry.register('check.config-gates', checkConfigGates);
|
||||
@@ -406,82 +433,75 @@ export function createRegistry(
|
||||
registry.register('check.ship-ready', checkShipReady);
|
||||
registry.register('check ship-ready', checkShipReady);
|
||||
|
||||
// Phase lifecycle handlers
|
||||
registry.register('phase.add', phaseAdd);
|
||||
registry.register('phase.add-batch', phaseAddBatch);
|
||||
registry.register('phase.insert', phaseInsert);
|
||||
registry.register('phase.remove', phaseRemove);
|
||||
registry.register('phase.complete', phaseComplete);
|
||||
registry.register('phase.scaffold', phaseScaffold);
|
||||
registry.register('phases.clear', phasesClear);
|
||||
registry.register('phases.archive', phasesArchive);
|
||||
registry.register('phases.list', phasesList);
|
||||
registry.register('phase.next-decimal', phaseNextDecimal);
|
||||
// Space-delimited aliases for CJS compatibility
|
||||
registry.register('phase add', phaseAdd);
|
||||
registry.register('phase add-batch', phaseAddBatch);
|
||||
registry.register('phase insert', phaseInsert);
|
||||
registry.register('phase remove', phaseRemove);
|
||||
registry.register('phase complete', phaseComplete);
|
||||
registry.register('phase scaffold', phaseScaffold);
|
||||
registry.register('phases clear', phasesClear);
|
||||
registry.register('phases archive', phasesArchive);
|
||||
registry.register('phases list', phasesList);
|
||||
registry.register('phase next-decimal', phaseNextDecimal);
|
||||
const phaseHandlers: Record<string, QueryHandler> = {
|
||||
'phase.list-plans': phaseListPlans,
|
||||
'phase.list-artifacts': phaseListArtifacts,
|
||||
'phase.add': phaseAdd,
|
||||
'phase.add-batch': phaseAddBatch,
|
||||
'phase.insert': phaseInsert,
|
||||
'phase.remove': phaseRemove,
|
||||
'phase.complete': phaseComplete,
|
||||
'phase.scaffold': phaseScaffold,
|
||||
'phase.next-decimal': phaseNextDecimal,
|
||||
};
|
||||
|
||||
// Init composition handlers
|
||||
registry.register('init.execute-phase', initExecutePhase);
|
||||
registry.register('init.plan-phase', initPlanPhase);
|
||||
registry.register('init.new-milestone', initNewMilestone);
|
||||
registry.register('init.quick', initQuick);
|
||||
registry.register('init.resume', initResume);
|
||||
registry.register('init.verify-work', initVerifyWork);
|
||||
registry.register('init.phase-op', initPhaseOp);
|
||||
registry.register('init.todos', initTodos);
|
||||
registry.register('init.milestone-op', initMilestoneOp);
|
||||
registry.register('init.map-codebase', initMapCodebase);
|
||||
registry.register('init.new-workspace', initNewWorkspace);
|
||||
registry.register('init.list-workspaces', initListWorkspaces);
|
||||
registry.register('init.remove-workspace', initRemoveWorkspace);
|
||||
registry.register('init.ingest-docs', initIngestDocs);
|
||||
// Space-delimited aliases for CJS compatibility
|
||||
registry.register('init execute-phase', initExecutePhase);
|
||||
registry.register('init plan-phase', initPlanPhase);
|
||||
registry.register('init new-milestone', initNewMilestone);
|
||||
registry.register('init quick', initQuick);
|
||||
registry.register('init resume', initResume);
|
||||
registry.register('init verify-work', initVerifyWork);
|
||||
registry.register('init phase-op', initPhaseOp);
|
||||
registry.register('init todos', initTodos);
|
||||
registry.register('init milestone-op', initMilestoneOp);
|
||||
registry.register('init map-codebase', initMapCodebase);
|
||||
registry.register('init new-workspace', initNewWorkspace);
|
||||
registry.register('init list-workspaces', initListWorkspaces);
|
||||
registry.register('init remove-workspace', initRemoveWorkspace);
|
||||
registry.register('init ingest-docs', initIngestDocs);
|
||||
for (const entry of PHASE_COMMAND_ALIASES) {
|
||||
const handler = phaseHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
// Complex init handlers
|
||||
registry.register('init.new-project', initNewProject);
|
||||
registry.register('init.progress', initProgress);
|
||||
registry.register('init.manager', initManager);
|
||||
registry.register('init new-project', initNewProject);
|
||||
registry.register('init progress', initProgress);
|
||||
registry.register('init manager', initManager);
|
||||
const phasesHandlers: Record<string, QueryHandler> = {
|
||||
'phases.list': phasesList,
|
||||
'phases.clear': phasesClear,
|
||||
'phases.archive': phasesArchive,
|
||||
};
|
||||
|
||||
for (const entry of PHASES_COMMAND_ALIASES) {
|
||||
const handler = phasesHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
const initHandlers: Record<string, QueryHandler> = {
|
||||
'init.execute-phase': initExecutePhase,
|
||||
'init.plan-phase': initPlanPhase,
|
||||
'init.new-project': initNewProject,
|
||||
'init.new-milestone': initNewMilestone,
|
||||
'init.quick': initQuick,
|
||||
'init.ingest-docs': initIngestDocs,
|
||||
'init.resume': initResume,
|
||||
'init.verify-work': initVerifyWork,
|
||||
'init.phase-op': initPhaseOp,
|
||||
'init.todos': initTodos,
|
||||
'init.milestone-op': initMilestoneOp,
|
||||
'init.map-codebase': initMapCodebase,
|
||||
'init.progress': initProgress,
|
||||
'init.manager': initManager,
|
||||
'init.new-workspace': initNewWorkspace,
|
||||
'init.list-workspaces': initListWorkspaces,
|
||||
'init.remove-workspace': initRemoveWorkspace,
|
||||
};
|
||||
|
||||
for (const entry of INIT_COMMAND_ALIASES) {
|
||||
const handler = initHandlers[entry.canonical];
|
||||
if (!handler) continue;
|
||||
registry.register(entry.canonical, handler);
|
||||
for (const alias of entry.aliases) {
|
||||
registry.register(alias, handler);
|
||||
}
|
||||
}
|
||||
|
||||
// Domain-specific handlers (fully implemented)
|
||||
registry.register('agent-skills', agentSkills);
|
||||
registry.register('roadmap.update-plan-progress', roadmapUpdatePlanProgress);
|
||||
registry.register('roadmap update-plan-progress', roadmapUpdatePlanProgress);
|
||||
registry.register('roadmap.annotate-dependencies', roadmapAnnotateDependencies);
|
||||
registry.register('roadmap annotate-dependencies', roadmapAnnotateDependencies);
|
||||
registry.register('requirements.mark-complete', requirementsMarkComplete);
|
||||
registry.register('requirements mark-complete', requirementsMarkComplete);
|
||||
registry.register('state.planned-phase', statePlannedPhase);
|
||||
registry.register('state planned-phase', statePlannedPhase);
|
||||
registry.register('verify.schema-drift', verifySchemaDrift);
|
||||
registry.register('verify schema-drift', verifySchemaDrift);
|
||||
registry.register('verify.codebase-drift', verifyCodebaseDrift);
|
||||
registry.register('verify codebase-drift', verifyCodebaseDrift);
|
||||
registry.register('todo.match-phase', todoMatchPhase);
|
||||
registry.register('todo match-phase', todoMatchPhase);
|
||||
registry.register('list-todos', listTodos);
|
||||
|
||||
@@ -7,15 +7,28 @@ describe('normalizeQueryCommand', () => {
|
||||
expect(normalizeQueryCommand('state', ['validate'])).toEqual(['state.validate', []]);
|
||||
});
|
||||
|
||||
it('merges verify known subcommands only', () => {
|
||||
expect(normalizeQueryCommand('verify', ['plan-structure', 'x.md'])).toEqual(['verify.plan-structure', ['x.md']]);
|
||||
expect(normalizeQueryCommand('verify', ['unknown-op'])).toEqual(['verify', ['unknown-op']]);
|
||||
});
|
||||
|
||||
it('maps bare state to state.load', () => {
|
||||
expect(normalizeQueryCommand('state', [])).toEqual(['state.load', []]);
|
||||
});
|
||||
|
||||
it('does not merge unknown state subcommands', () => {
|
||||
expect(normalizeQueryCommand('state', ['not-a-subcommand'])).toEqual(['state', ['not-a-subcommand']]);
|
||||
});
|
||||
|
||||
it('merges init workflows', () => {
|
||||
expect(normalizeQueryCommand('init', ['execute-phase', '9'])).toEqual(['init.execute-phase', ['9']]);
|
||||
expect(normalizeQueryCommand('init', ['new-project'])).toEqual(['init.new-project', []]);
|
||||
});
|
||||
|
||||
it('does not merge unknown init subcommands', () => {
|
||||
expect(normalizeQueryCommand('init', ['made-up-init-op', 'x'])).toEqual(['init', ['made-up-init-op', 'x']]);
|
||||
});
|
||||
|
||||
it('maps scaffold to phase.scaffold', () => {
|
||||
expect(normalizeQueryCommand('scaffold', ['phase-dir', '--phase', '1'])).toEqual([
|
||||
'phase.scaffold',
|
||||
@@ -33,7 +46,7 @@ describe('normalizeQueryCommand', () => {
|
||||
expect(normalizeQueryCommand('generate-slug', ['Hello'])).toEqual(['generate-slug', ['Hello']]);
|
||||
});
|
||||
|
||||
it('merges phase add-batch for future handler', () => {
|
||||
it('merges check/route helper commands', () => {
|
||||
expect(normalizeQueryCommand('check', ['config-gates', 'plan-phase'])).toEqual([
|
||||
'check.config-gates',
|
||||
['plan-phase'],
|
||||
@@ -41,10 +54,49 @@ describe('normalizeQueryCommand', () => {
|
||||
expect(normalizeQueryCommand('check', ['phase-ready', '3'])).toEqual(['check.phase-ready', ['3']]);
|
||||
expect(normalizeQueryCommand('check', ['auto-mode'])).toEqual(['check.auto-mode', []]);
|
||||
expect(normalizeQueryCommand('route', ['next-action'])).toEqual(['route.next-action', []]);
|
||||
});
|
||||
|
||||
it('merges known phase subcommands and preserves unknown ones', () => {
|
||||
expect(normalizeQueryCommand('phase', ['add-batch', '--descriptions', '[]'])).toEqual([
|
||||
'phase.add-batch',
|
||||
['--descriptions', '[]'],
|
||||
]);
|
||||
expect(normalizeQueryCommand('phase', ['made-up-phase-op', 'x'])).toEqual([
|
||||
'phase',
|
||||
['made-up-phase-op', 'x'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('merges known phases subcommands and preserves unknown ones', () => {
|
||||
expect(normalizeQueryCommand('phases', ['clear', 'v1.0'])).toEqual([
|
||||
'phases.clear',
|
||||
['v1.0'],
|
||||
]);
|
||||
expect(normalizeQueryCommand('phases', ['made-up-phases-op', 'x'])).toEqual([
|
||||
'phases',
|
||||
['made-up-phases-op', 'x'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('merges known validate subcommands and preserves unknown ones', () => {
|
||||
expect(normalizeQueryCommand('validate', ['consistency'])).toEqual([
|
||||
'validate.consistency',
|
||||
[],
|
||||
]);
|
||||
expect(normalizeQueryCommand('validate', ['made-up-validate-op', 'x'])).toEqual([
|
||||
'validate',
|
||||
['made-up-validate-op', 'x'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('merges known roadmap subcommands and preserves unknown ones', () => {
|
||||
expect(normalizeQueryCommand('roadmap', ['analyze'])).toEqual([
|
||||
'roadmap.analyze',
|
||||
[],
|
||||
]);
|
||||
expect(normalizeQueryCommand('roadmap', ['made-up-roadmap-op', 'x'])).toEqual([
|
||||
'roadmap',
|
||||
['made-up-roadmap-op', 'x'],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,16 +7,23 @@
|
||||
* under `runCommand()` so two-token (and longer) invocations resolve to dotted registry names.
|
||||
*/
|
||||
|
||||
import {
|
||||
STATE_SUBCOMMANDS,
|
||||
VERIFY_SUBCOMMANDS,
|
||||
INIT_SUBCOMMANDS,
|
||||
PHASE_SUBCOMMANDS,
|
||||
PHASES_SUBCOMMANDS,
|
||||
VALIDATE_SUBCOMMANDS,
|
||||
ROADMAP_SUBCOMMANDS,
|
||||
} from './command-aliases.generated.js';
|
||||
|
||||
const MERGE_FIRST_WITH_SUBCOMMAND = new Set<string>([
|
||||
'state',
|
||||
'template',
|
||||
'frontmatter',
|
||||
'verify',
|
||||
'phase',
|
||||
'phases',
|
||||
'roadmap',
|
||||
'requirements',
|
||||
'validate',
|
||||
'init',
|
||||
'workstream',
|
||||
'intel',
|
||||
@@ -43,6 +50,62 @@ export function normalizeQueryCommand(command: string, args: string[]): [string,
|
||||
return ['state.load', []];
|
||||
}
|
||||
|
||||
if (command === 'state' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (STATE_SUBCOMMANDS.has(sub)) {
|
||||
return [`state.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'verify' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (VERIFY_SUBCOMMANDS.has(sub)) {
|
||||
return [`verify.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'init' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (INIT_SUBCOMMANDS.has(sub)) {
|
||||
return [`init.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'phase' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (PHASE_SUBCOMMANDS.has(sub)) {
|
||||
return [`phase.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'phases' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (PHASES_SUBCOMMANDS.has(sub)) {
|
||||
return [`phases.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'validate' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (VALIDATE_SUBCOMMANDS.has(sub)) {
|
||||
return [`validate.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (command === 'roadmap' && args.length > 0) {
|
||||
const sub = args[0];
|
||||
if (ROADMAP_SUBCOMMANDS.has(sub)) {
|
||||
return [`roadmap.${sub}`, args.slice(1)];
|
||||
}
|
||||
return [command, args];
|
||||
}
|
||||
|
||||
if (MERGE_FIRST_WITH_SUBCOMMAND.has(command) && args.length > 0) {
|
||||
const sub = args[0];
|
||||
return [`${command}.${sub}`, args.slice(1)];
|
||||
|
||||
@@ -14,6 +14,7 @@ const path = require('path');
|
||||
|
||||
const REPO_ROOT = path.join(__dirname, '..');
|
||||
const REGISTRY_FILE = path.join(REPO_ROOT, 'sdk', 'src', 'query', 'index.ts');
|
||||
const COMMAND_ALIASES_FILE = path.join(REPO_ROOT, 'get-shit-done', 'bin', 'lib', 'command-aliases.generated.cjs');
|
||||
|
||||
// Prose tokens that repeatedly appear after `gsd-sdk query` in English
|
||||
// documentation but aren't real command names.
|
||||
@@ -41,9 +42,39 @@ const SKIP_DIRS = new Set(['node_modules', '.git', 'dist', 'build']);
|
||||
function collectRegisteredNames() {
|
||||
const src = fs.readFileSync(REGISTRY_FILE, 'utf8');
|
||||
const names = new Set();
|
||||
|
||||
// Static registrations in index.ts
|
||||
const re = /registry\.register\(\s*['"]([^'"]+)['"]/g;
|
||||
let m;
|
||||
while ((m = re.exec(src)) !== null) names.add(m[1]);
|
||||
|
||||
// Manifest-generated family aliases registered via loop in index.ts.
|
||||
// Keep this in sync with command-manifest-driven routing seams.
|
||||
try {
|
||||
// eslint-disable-next-line global-require, import/no-dynamic-require
|
||||
const aliases = require(COMMAND_ALIASES_FILE);
|
||||
const familyArrays = [
|
||||
aliases.STATE_COMMAND_ALIASES,
|
||||
aliases.VERIFY_COMMAND_ALIASES,
|
||||
aliases.INIT_COMMAND_ALIASES,
|
||||
aliases.PHASE_COMMAND_ALIASES,
|
||||
aliases.PHASES_COMMAND_ALIASES,
|
||||
aliases.VALIDATE_COMMAND_ALIASES,
|
||||
aliases.ROADMAP_COMMAND_ALIASES,
|
||||
];
|
||||
for (const arr of familyArrays) {
|
||||
if (!Array.isArray(arr)) continue;
|
||||
for (const entry of arr) {
|
||||
if (entry?.canonical) names.add(entry.canonical);
|
||||
if (Array.isArray(entry?.aliases)) {
|
||||
for (const alias of entry.aliases) names.add(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// If generated aliases are unavailable, fall back to static extraction only.
|
||||
}
|
||||
|
||||
return names;
|
||||
}
|
||||
|
||||
|
||||
@@ -140,16 +140,17 @@ describe('#1916 lock cleanup on process.exit()', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('core.cjs .planning/.lock is removed after a command exits with an error', () => {
|
||||
// The withPlanningLock in core.cjs also needs exit cleanup.
|
||||
const coreSrc = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'get-shit-done', 'bin', 'lib', 'core.cjs'),
|
||||
test('planning workspace lock owner registers exit cleanup', () => {
|
||||
// withPlanningLock moved from core.cjs to planning-workspace.cjs.
|
||||
// The lock owner must keep module-level process exit cleanup.
|
||||
const workspaceSrc = fs.readFileSync(
|
||||
path.join(__dirname, '..', 'get-shit-done', 'bin', 'lib', 'planning-workspace.cjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
assert.ok(
|
||||
coreSrc.includes("process.on('exit'"),
|
||||
"core.cjs must register process.on('exit', ...) to clean up held planning lock files"
|
||||
workspaceSrc.includes("process.on('exit'"),
|
||||
"planning-workspace.cjs must register process.on('exit', ...) to clean up held planning lock files"
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -222,6 +222,139 @@ describe('phase-plan-index command', () => {
|
||||
assert.deepStrictEqual(output.waves, {}, 'waves should be empty');
|
||||
assert.deepStrictEqual(output.incomplete, [], 'incomplete should be empty');
|
||||
assert.strictEqual(output.has_checkpoints, false, 'no checkpoints');
|
||||
assert.ok(output.warning === undefined, 'truly empty dir must not emit a warning');
|
||||
});
|
||||
|
||||
// #2893 — when the planner produces filenames that don't match the canonical
|
||||
// `{padded_phase}-{NN}-PLAN.md` contract, the executor used to silently see
|
||||
// plan_count: 0 with no signal. Now the response must include a `warning`
|
||||
// field naming every offender, so the user gets an actionable error instead
|
||||
// of "execute-phase blocked, no clue why".
|
||||
test('non-canonical plan filenames surface a warning naming each offender (#2893)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
|
||||
// The reporter's exact symptom: planner wrote `{phase-id}-PLAN-{N}-{slug}.md`.
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-01-foundation.md'), '---\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-02-api.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('phase-plan-index 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.strictEqual(output.plans.length, 0, 'non-canonical files are not silently accepted');
|
||||
assert.ok(typeof output.warning === 'string', 'warning field must be present');
|
||||
assert.ok(output.warning.includes('01-PLAN-01-foundation.md'), 'warning names the first offender');
|
||||
assert.ok(output.warning.includes('01-PLAN-02-api.md'), 'warning names the second offender');
|
||||
assert.ok(
|
||||
output.warning.includes('{padded_phase}-{NN}-PLAN.md'),
|
||||
'warning cites the canonical pattern so user knows what to rename to',
|
||||
);
|
||||
});
|
||||
|
||||
test('canonical plans suppress the warning even alongside derivative files (#2893)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
|
||||
// Canonical plan + the legitimate derivative artifacts the planner emits.
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.md'), '---\nwave: 1\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-PLAN-OUTLINE.md'), '# outline\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.pre-bounce.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('phase-plan-index 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.strictEqual(output.plans.length, 1, 'canonical plan detected');
|
||||
assert.ok(
|
||||
output.warning === undefined,
|
||||
`outline and pre-bounce files must not trigger the warning, got: ${output.warning}`,
|
||||
);
|
||||
});
|
||||
|
||||
// #2893 parity — find-phase reads the same phase directory and applies the
|
||||
// same canonical filter, so it must emit the same warning shape. Without
|
||||
// these tests the two code paths could silently diverge.
|
||||
test('find-phase: non-canonical plan filenames surface the same warning (#2893 parity)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-01-foundation.md'), '---\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-02-api.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('find-phase 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.strictEqual(output.found, true, 'phase directory found');
|
||||
assert.deepStrictEqual(output.plans, [], 'non-canonical files are not silently accepted');
|
||||
assert.ok(typeof output.warning === 'string', 'warning field must be present');
|
||||
assert.ok(output.warning.includes('01-PLAN-01-foundation.md'), 'warning names the first offender');
|
||||
assert.ok(output.warning.includes('01-PLAN-02-api.md'), 'warning names the second offender');
|
||||
assert.ok(
|
||||
output.warning.includes('{padded_phase}-{NN}-PLAN.md'),
|
||||
'warning cites the canonical pattern',
|
||||
);
|
||||
});
|
||||
|
||||
test('find-phase: canonical plans + derivatives suppress the warning (#2893 parity)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.md'), '---\nwave: 1\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-PLAN-OUTLINE.md'), '# outline\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.pre-bounce.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('find-phase 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.deepStrictEqual(output.plans, ['03-01-PLAN.md'], 'canonical plan detected');
|
||||
assert.ok(
|
||||
output.warning === undefined,
|
||||
`outline and pre-bounce files must not trigger the warning, got: ${output.warning}`,
|
||||
);
|
||||
});
|
||||
|
||||
// #2893 parity — `phases list --type plans` aggregates across phase dirs
|
||||
// and prefixes each warning with `${dir}: ` so the user can locate the
|
||||
// offending phase. Test mirrors the find-phase pair but accounts for that
|
||||
// prefix in the assertion.
|
||||
test('phases list --type plans: non-canonical filenames surface a per-dir warning (#2893 parity)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-01-foundation.md'), '---\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '01-PLAN-02-api.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('phases list --type plans --phase 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.deepStrictEqual(output.files, [], 'non-canonical files are not silently accepted');
|
||||
assert.ok(typeof output.warning === 'string', 'warning field must be present');
|
||||
assert.ok(output.warning.includes('03-api:'), 'warning is prefixed with the offending phase dir');
|
||||
assert.ok(output.warning.includes('01-PLAN-01-foundation.md'), 'warning names the first offender');
|
||||
assert.ok(output.warning.includes('01-PLAN-02-api.md'), 'warning names the second offender');
|
||||
assert.ok(
|
||||
output.warning.includes('{padded_phase}-{NN}-PLAN.md'),
|
||||
'warning cites the canonical pattern',
|
||||
);
|
||||
});
|
||||
|
||||
test('phases list --type plans: canonical plans suppress the warning (#2893 parity)', () => {
|
||||
const phaseDir = path.join(tmpDir, '.planning', 'phases', '03-api');
|
||||
fs.mkdirSync(phaseDir, { recursive: true });
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.md'), '---\nwave: 1\n---\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-PLAN-OUTLINE.md'), '# outline\n');
|
||||
fs.writeFileSync(path.join(phaseDir, '03-01-PLAN.pre-bounce.md'), '---\n---\n');
|
||||
|
||||
const result = runGsdTools('phases list --type plans --phase 03', tmpDir);
|
||||
assert.ok(result.success, `Command failed: ${result.error}`);
|
||||
|
||||
const output = JSON.parse(result.output);
|
||||
assert.deepStrictEqual(output.files, ['03-01-PLAN.md'], 'canonical plan detected');
|
||||
assert.ok(
|
||||
output.warning === undefined,
|
||||
`outline and pre-bounce files must not trigger the warning, got: ${output.warning}`,
|
||||
);
|
||||
});
|
||||
|
||||
test('extracts single plan with frontmatter', () => {
|
||||
|
||||
74
tests/phases-command-router.test.cjs
Normal file
74
tests/phases-command-router.test.cjs
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict';
|
||||
|
||||
const { describe, test } = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { routePhasesCommand } = require('../get-shit-done/bin/lib/phases-command-router.cjs');
|
||||
|
||||
describe('phases-command-router', () => {
|
||||
test('routes phases list with parsed options', () => {
|
||||
const calls = [];
|
||||
const phase = {
|
||||
cmdPhasesList: (cwd, options, raw) => calls.push({ cwd, options, raw }),
|
||||
};
|
||||
|
||||
routePhasesCommand({
|
||||
phase,
|
||||
milestone: {},
|
||||
args: ['phases', 'list', '--type', 'plans', '--phase', '10', '--include-archived'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: true,
|
||||
error: (msg) => {
|
||||
throw new Error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(calls.length, 1);
|
||||
assert.deepEqual(calls[0], {
|
||||
cwd: '/tmp/proj',
|
||||
options: { type: 'plans', phase: '10', includeArchived: true },
|
||||
raw: true,
|
||||
});
|
||||
});
|
||||
|
||||
test('routes phases clear with trailing args', () => {
|
||||
const calls = [];
|
||||
const milestone = {
|
||||
cmdPhasesClear: (cwd, raw, trailing) => calls.push({ cwd, raw, trailing }),
|
||||
};
|
||||
|
||||
routePhasesCommand({
|
||||
phase: {},
|
||||
milestone,
|
||||
args: ['phases', 'clear', '--confirm'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
error: (msg) => {
|
||||
throw new Error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(calls.length, 1);
|
||||
assert.deepEqual(calls[0], {
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
trailing: ['--confirm'],
|
||||
});
|
||||
});
|
||||
|
||||
test('errors on unknown phases subcommand', () => {
|
||||
let message = null;
|
||||
routePhasesCommand({
|
||||
phase: {},
|
||||
milestone: {},
|
||||
args: ['phases', 'archive'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
error: (msg) => {
|
||||
message = msg;
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(message, 'Unknown phases subcommand. Available: list, clear');
|
||||
});
|
||||
});
|
||||
167
tests/planning-workspace.test.cjs
Normal file
167
tests/planning-workspace.test.cjs
Normal file
@@ -0,0 +1,167 @@
|
||||
const { test, describe, beforeEach, afterEach } = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
createPlanningWorkspace,
|
||||
createMemoryPointerAdapter,
|
||||
planningDir,
|
||||
planningPaths,
|
||||
withPlanningLock,
|
||||
getActiveWorkstream,
|
||||
setActiveWorkstream,
|
||||
} = require('../get-shit-done/bin/lib/planning-workspace.cjs');
|
||||
|
||||
const core = require('../get-shit-done/bin/lib/core.cjs');
|
||||
|
||||
describe('planning-workspace: planningDir/planningPaths parity', () => {
|
||||
const cwd = '/fake/repo';
|
||||
let savedProject;
|
||||
let savedWorkstream;
|
||||
|
||||
beforeEach(() => {
|
||||
savedProject = process.env.GSD_PROJECT;
|
||||
savedWorkstream = process.env.GSD_WORKSTREAM;
|
||||
delete process.env.GSD_PROJECT;
|
||||
delete process.env.GSD_WORKSTREAM;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (savedProject !== undefined) process.env.GSD_PROJECT = savedProject;
|
||||
else delete process.env.GSD_PROJECT;
|
||||
if (savedWorkstream !== undefined) process.env.GSD_WORKSTREAM = savedWorkstream;
|
||||
else delete process.env.GSD_WORKSTREAM;
|
||||
});
|
||||
|
||||
test('matches expected path resolution', () => {
|
||||
assert.strictEqual(planningDir(cwd, null, null), path.join(cwd, '.planning'));
|
||||
assert.strictEqual(planningDir(cwd, 'feature-x', null), path.join(cwd, '.planning', 'workstreams', 'feature-x'));
|
||||
assert.strictEqual(planningDir(cwd, 'feature-x', 'my-app'), path.join(cwd, '.planning', 'my-app', 'workstreams', 'feature-x'));
|
||||
|
||||
const paths = planningPaths(cwd, 'feature-x');
|
||||
assert.strictEqual(paths.planning, path.join(cwd, '.planning', 'workstreams', 'feature-x'));
|
||||
assert.strictEqual(paths.state, path.join(cwd, '.planning', 'workstreams', 'feature-x', 'STATE.md'));
|
||||
assert.strictEqual(paths.config, path.join(cwd, '.planning', 'workstreams', 'feature-x', 'config.json'));
|
||||
});
|
||||
|
||||
test('rejects traversal and path separators', () => {
|
||||
assert.throws(() => planningDir(cwd, null, '../../etc'), /invalid path characters/);
|
||||
assert.throws(() => planningDir(cwd, 'foo/bar', null), /invalid path characters/);
|
||||
assert.throws(() => planningDir(cwd, 'foo\\bar', null), /invalid path characters/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('planning-workspace: session adapter precedence', () => {
|
||||
let savedSession;
|
||||
|
||||
beforeEach(() => {
|
||||
savedSession = process.env.GSD_SESSION_KEY;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (savedSession !== undefined) process.env.GSD_SESSION_KEY = savedSession;
|
||||
else delete process.env.GSD_SESSION_KEY;
|
||||
});
|
||||
|
||||
test('uses session adapter over shared adapter when session key exists', () => {
|
||||
process.env.GSD_SESSION_KEY = 'session-123';
|
||||
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-planning-precedence-'));
|
||||
try {
|
||||
fs.mkdirSync(path.join(tmpDir, '.planning', 'workstreams', 'session-ws'), { recursive: true });
|
||||
fs.mkdirSync(path.join(tmpDir, '.planning', 'workstreams', 'shared-ws'), { recursive: true });
|
||||
|
||||
const session = createMemoryPointerAdapter('session-ws');
|
||||
const shared = createMemoryPointerAdapter('shared-ws');
|
||||
const workspace = createPlanningWorkspace(tmpDir, {
|
||||
activeWorkstreamAdapters: { session, shared },
|
||||
});
|
||||
|
||||
assert.strictEqual(workspace.activeWorkstream.get(), 'session-ws');
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('planning-workspace: self-heal behavior', () => {
|
||||
test('clears invalid pointer names and returns null', () => {
|
||||
const adapter = createMemoryPointerAdapter('bad/name');
|
||||
const workspace = createPlanningWorkspace('/fake/repo', {
|
||||
activeWorkstreamAdapter: adapter,
|
||||
});
|
||||
|
||||
assert.strictEqual(workspace.activeWorkstream.get(), null);
|
||||
assert.strictEqual(adapter.read(), null);
|
||||
});
|
||||
|
||||
test('clears stale pointers when workstream directory is gone', () => {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-planning-workspace-'));
|
||||
try {
|
||||
fs.mkdirSync(path.join(tmpDir, '.planning', 'workstreams'), { recursive: true });
|
||||
const adapter = createMemoryPointerAdapter('ghost');
|
||||
const workspace = createPlanningWorkspace(tmpDir, {
|
||||
activeWorkstreamAdapter: adapter,
|
||||
});
|
||||
|
||||
assert.strictEqual(workspace.activeWorkstream.get(), null);
|
||||
assert.strictEqual(adapter.read(), null);
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('planning-workspace: lock seam', () => {
|
||||
test('exports withPlanningLock and acquires/release lock', () => {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-planning-lock-'));
|
||||
try {
|
||||
const result = withPlanningLock(tmpDir, () => 'ok');
|
||||
assert.strictEqual(result, 'ok');
|
||||
assert.ok(!fs.existsSync(path.join(tmpDir, '.planning', '.lock')));
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('core compatibility adapter: planning workspace functions', () => {
|
||||
let savedSession;
|
||||
|
||||
beforeEach(() => {
|
||||
savedSession = process.env.GSD_SESSION_KEY;
|
||||
delete process.env.GSD_SESSION_KEY;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (savedSession !== undefined) process.env.GSD_SESSION_KEY = savedSession;
|
||||
else delete process.env.GSD_SESSION_KEY;
|
||||
});
|
||||
|
||||
test('core and planning-workspace expose matching behavior', () => {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-core-compat-'));
|
||||
try {
|
||||
fs.mkdirSync(path.join(tmpDir, '.planning', 'workstreams', 'alpha'), { recursive: true });
|
||||
|
||||
core.setActiveWorkstream(tmpDir, 'alpha');
|
||||
assert.strictEqual(core.getActiveWorkstream(tmpDir), 'alpha');
|
||||
assert.strictEqual(getActiveWorkstream(tmpDir), 'alpha');
|
||||
|
||||
assert.strictEqual(
|
||||
core.planningDir(tmpDir, 'feature-x', 'my-project'),
|
||||
planningDir(tmpDir, 'feature-x', 'my-project')
|
||||
);
|
||||
assert.deepStrictEqual(
|
||||
core.planningPaths(tmpDir, 'feature-x'),
|
||||
planningPaths(tmpDir, 'feature-x')
|
||||
);
|
||||
|
||||
setActiveWorkstream(tmpDir, null);
|
||||
assert.strictEqual(core.getActiveWorkstream(tmpDir), null);
|
||||
} finally {
|
||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -240,6 +240,59 @@ describe('gap-analysis CLI (#2493)', () => {
|
||||
assert.deepStrictEqual(reqRows, ['REQ-01', 'REQ-02', 'REQ-10']);
|
||||
});
|
||||
|
||||
test('parses non-REQ prefixes and ignores traceability header tokens', () => {
|
||||
const requirementsMd = [
|
||||
'# Requirements',
|
||||
'',
|
||||
'| REQ-ID | Phase | Plan(s) |',
|
||||
'|--------|-------|---------|',
|
||||
'| TST-01 | Phase 01 | TBD |',
|
||||
'| BACK-07 | Phase 01 | TBD |',
|
||||
'',
|
||||
'- [ ] **INSP-04** Inspector requirement.',
|
||||
].join('\n');
|
||||
fs.writeFileSync(path.join(tmpDir, '.planning', 'REQUIREMENTS.md'), `${requirementsMd}\n`);
|
||||
|
||||
writePlan('01', '# Plan\n\nCovers TST-01, BACK-07, and INSP-04.\n');
|
||||
|
||||
const r = runGsdTools(['gap-analysis', '--phase-dir', phaseDir], tmpDir);
|
||||
assert.ok(r.success, r.error);
|
||||
const out = JSON.parse(r.output);
|
||||
|
||||
const reqRows = out.rows.filter(x => x.source === 'REQUIREMENTS.md');
|
||||
const ids = reqRows.map(x => x.item);
|
||||
|
||||
assert.deepStrictEqual(ids, ['BACK-07', 'INSP-04', 'TST-01']);
|
||||
assert.ok(!ids.includes('REQ-ID'), 'traceability header token must not be parsed as a requirement ID');
|
||||
assert.ok(reqRows.every(x => x.status === 'Covered'));
|
||||
});
|
||||
|
||||
test('does not parse requirement-like IDs from non-first table columns', () => {
|
||||
const requirementsMd = [
|
||||
'# Requirements',
|
||||
'',
|
||||
'| REQ-ID | Phase | Plan(s) |',
|
||||
'|--------|-------|---------|',
|
||||
'| TST-01 | Phase 01 | PLAN-01 |',
|
||||
'| BACK-07 | Phase 01 | PLAN-02 |',
|
||||
].join('\n');
|
||||
fs.writeFileSync(path.join(tmpDir, '.planning', 'REQUIREMENTS.md'), `${requirementsMd}\n`);
|
||||
|
||||
writePlan('01', '# Plan\n\nCovers TST-01 and BACK-07 only.\n');
|
||||
|
||||
const r = runGsdTools(['gap-analysis', '--phase-dir', phaseDir], tmpDir);
|
||||
assert.ok(r.success, r.error);
|
||||
const out = JSON.parse(r.output);
|
||||
|
||||
const ids = out.rows
|
||||
.filter(x => x.source === 'REQUIREMENTS.md')
|
||||
.map(x => x.item);
|
||||
|
||||
assert.deepStrictEqual(ids, ['BACK-07', 'TST-01']);
|
||||
assert.ok(!ids.includes('PLAN-01'));
|
||||
assert.ok(!ids.includes('PLAN-02'));
|
||||
});
|
||||
|
||||
test('REQUIREMENTS.md missing → CONTEXT-only run still works', () => {
|
||||
writeContext([{ id: 'D-01', text: 'foo' }]);
|
||||
writePlan('01', '# Plan mentioning D-01\n');
|
||||
|
||||
67
tests/precommit-alias-drift-hook.test.cjs
Normal file
67
tests/precommit-alias-drift-hook.test.cjs
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict';
|
||||
|
||||
const { describe, test } = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
const { execFileSync } = require('node:child_process');
|
||||
const { createTempDir, cleanup } = require('./helpers.cjs');
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const HOOK_PATH = path.join(ROOT, '.githooks', 'pre-commit');
|
||||
|
||||
function writeExec(filePath, content) {
|
||||
fs.writeFileSync(filePath, content, { mode: 0o755 });
|
||||
}
|
||||
|
||||
describe('.githooks/pre-commit alias drift guard', () => {
|
||||
test('runs npm check when staged files include command-manifest/alias artifacts', (t) => {
|
||||
const tmpDir = createTempDir('gsd-precommit-hook-');
|
||||
t.after(() => cleanup(tmpDir));
|
||||
|
||||
const binDir = path.join(tmpDir, 'bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
|
||||
writeExec(path.join(binDir, 'git'), `#!/usr/bin/env bash\nprintf "%s\\n" "${'sdk/src/query/command-manifest.phase.ts'}"\n`);
|
||||
writeExec(path.join(binDir, 'npm'), `#!/usr/bin/env bash\nprintf "called" > "$GSD_TEST_NPM_MARKER"\n`);
|
||||
|
||||
const marker = path.join(tmpDir, 'npm-called.txt');
|
||||
|
||||
execFileSync('bash', [HOOK_PATH], {
|
||||
cwd: ROOT,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH: `${binDir}:${process.env.PATH}`,
|
||||
GSD_TEST_NPM_MARKER: marker,
|
||||
},
|
||||
stdio: 'pipe',
|
||||
});
|
||||
|
||||
assert.ok(fs.existsSync(marker), 'expected npm run check:alias-drift to be invoked');
|
||||
});
|
||||
|
||||
test('does not run npm check when staged files are unrelated', (t) => {
|
||||
const tmpDir = createTempDir('gsd-precommit-hook-');
|
||||
t.after(() => cleanup(tmpDir));
|
||||
|
||||
const binDir = path.join(tmpDir, 'bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
|
||||
writeExec(path.join(binDir, 'git'), `#!/usr/bin/env bash\nprintf "%s\\n" "README.md"\n`);
|
||||
writeExec(path.join(binDir, 'npm'), `#!/usr/bin/env bash\nprintf "called" > "$GSD_TEST_NPM_MARKER"\n`);
|
||||
|
||||
const marker = path.join(tmpDir, 'npm-called.txt');
|
||||
|
||||
execFileSync('bash', [HOOK_PATH], {
|
||||
cwd: ROOT,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH: `${binDir}:${process.env.PATH}`,
|
||||
GSD_TEST_NPM_MARKER: marker,
|
||||
},
|
||||
stdio: 'pipe',
|
||||
});
|
||||
|
||||
assert.ok(!fs.existsSync(marker), 'expected npm check to be skipped for unrelated staged files');
|
||||
});
|
||||
});
|
||||
90
tests/prepush-enterprise-email-hook.test.cjs
Normal file
90
tests/prepush-enterprise-email-hook.test.cjs
Normal file
@@ -0,0 +1,90 @@
|
||||
'use strict';
|
||||
|
||||
const { describe, test } = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
const fs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
const { execFileSync } = require('node:child_process');
|
||||
const { createTempDir, cleanup } = require('./helpers.cjs');
|
||||
|
||||
const ROOT = path.resolve(__dirname, '..');
|
||||
const HOOK_PATH = path.join(ROOT, '.githooks', 'pre-push');
|
||||
|
||||
function writeExec(filePath, content) {
|
||||
fs.writeFileSync(filePath, content, { mode: 0o755 });
|
||||
}
|
||||
|
||||
describe('.githooks/pre-push enterprise email guard', () => {
|
||||
test('blocks push when any to-be-pushed commit matches local blocked regex', (t) => {
|
||||
const tmpDir = createTempDir('gsd-prepush-hook-');
|
||||
t.after(() => cleanup(tmpDir));
|
||||
|
||||
const binDir = path.join(tmpDir, 'bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
|
||||
writeExec(path.join(binDir, 'git'), `#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ "$1" == "rev-list" ]]; then
|
||||
echo "c1"
|
||||
echo "c2"
|
||||
exit 0
|
||||
fi
|
||||
if [[ "$1" == "show" ]]; then
|
||||
commit="$(printf '%s\n' "$@" | tail -n 1)"
|
||||
if [[ "$commit" == "c1" ]]; then
|
||||
echo "trekkie@nomorestars.com"
|
||||
else
|
||||
echo "person@example-corp.com"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
`);
|
||||
|
||||
assert.throws(() => {
|
||||
execFileSync('bash', [HOOK_PATH], {
|
||||
cwd: ROOT,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH: `${binDir}:${process.env.PATH}`,
|
||||
GSD_BLOCKED_AUTHOR_REGEX: '@example-corp\\.com$',
|
||||
},
|
||||
input: 'refs/heads/pr refs-local-sha refs/heads/pr refs-remote-sha\n',
|
||||
stdio: 'pipe',
|
||||
});
|
||||
}, /Push blocked: commit author email matched local blocked regex/);
|
||||
});
|
||||
|
||||
test('allows push when to-be-pushed commits are non-enterprise emails', (t) => {
|
||||
const tmpDir = createTempDir('gsd-prepush-hook-');
|
||||
t.after(() => cleanup(tmpDir));
|
||||
|
||||
const binDir = path.join(tmpDir, 'bin');
|
||||
fs.mkdirSync(binDir, { recursive: true });
|
||||
|
||||
writeExec(path.join(binDir, 'git'), `#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
if [[ "$1" == "rev-list" ]]; then
|
||||
echo "c1"
|
||||
echo "c2"
|
||||
exit 0
|
||||
fi
|
||||
if [[ "$1" == "show" ]]; then
|
||||
echo "trekkie@nomorestars.com"
|
||||
exit 0
|
||||
fi
|
||||
exit 1
|
||||
`);
|
||||
|
||||
execFileSync('bash', [HOOK_PATH], {
|
||||
cwd: ROOT,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH: `${binDir}:${process.env.PATH}`,
|
||||
GSD_BLOCKED_AUTHOR_REGEX: '@example-corp\\.com$',
|
||||
},
|
||||
input: 'refs/heads/pr refs-local-sha refs/heads/pr refs-remote-sha\n',
|
||||
stdio: 'pipe',
|
||||
});
|
||||
});
|
||||
});
|
||||
76
tests/roadmap-command-router.test.cjs
Normal file
76
tests/roadmap-command-router.test.cjs
Normal file
@@ -0,0 +1,76 @@
|
||||
'use strict';
|
||||
|
||||
const { describe, test } = require('node:test');
|
||||
const assert = require('node:assert/strict');
|
||||
|
||||
const { routeRoadmapCommand } = require('../get-shit-done/bin/lib/roadmap-command-router.cjs');
|
||||
|
||||
describe('roadmap-command-router', () => {
|
||||
test('routes roadmap analyze', () => {
|
||||
const calls = [];
|
||||
const roadmap = {
|
||||
cmdRoadmapAnalyze: (cwd, raw) => calls.push({ cwd, raw }),
|
||||
};
|
||||
|
||||
routeRoadmapCommand({
|
||||
roadmap,
|
||||
args: ['roadmap', 'analyze'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: true,
|
||||
error: (msg) => {
|
||||
throw new Error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(calls.length, 1);
|
||||
assert.deepEqual(calls[0], { cwd: '/tmp/proj', raw: true });
|
||||
});
|
||||
|
||||
test('routes roadmap get-phase and update-plan-progress with phase arg', () => {
|
||||
const calls = [];
|
||||
const roadmap = {
|
||||
cmdRoadmapGetPhase: (cwd, phase, raw) => calls.push({ kind: 'get', cwd, phase, raw }),
|
||||
cmdRoadmapUpdatePlanProgress: (cwd, phase, raw) => calls.push({ kind: 'update', cwd, phase, raw }),
|
||||
};
|
||||
|
||||
routeRoadmapCommand({
|
||||
roadmap,
|
||||
args: ['roadmap', 'get-phase', '10'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
error: (msg) => {
|
||||
throw new Error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
routeRoadmapCommand({
|
||||
roadmap,
|
||||
args: ['roadmap', 'update-plan-progress', '10'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
error: (msg) => {
|
||||
throw new Error(msg);
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, [
|
||||
{ kind: 'get', cwd: '/tmp/proj', phase: '10', raw: false },
|
||||
{ kind: 'update', cwd: '/tmp/proj', phase: '10', raw: false },
|
||||
]);
|
||||
});
|
||||
|
||||
test('errors on unknown roadmap subcommand', () => {
|
||||
let message = null;
|
||||
routeRoadmapCommand({
|
||||
roadmap: {},
|
||||
args: ['roadmap', 'nonsense'],
|
||||
cwd: '/tmp/proj',
|
||||
raw: false,
|
||||
error: (msg) => {
|
||||
message = msg;
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(message, 'Unknown roadmap subcommand. Available: analyze, get-phase, update-plan-progress, annotate-dependencies');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user