diff --git a/commands/project.md b/commands/project.md
index e34fd55..515c45b 100644
--- a/commands/project.md
+++ b/commands/project.md
@@ -64,185 +64,7 @@ fi
-**Detect existing project state from five signal sources:**
-
-Check five signals to determine what already exists for this project:
-
-```bash
-# Signal checks
-P=false # .planning/PROJECT.md exists
-R=false # .planning/ROADMAP.md exists
-S=false # .planning/STATE.md exists
-M=false # .mgw/project.json exists
-G=0 # GitHub milestone count
-
-[ -f "${REPO_ROOT}/.planning/PROJECT.md" ] && P=true
-[ -f "${REPO_ROOT}/.planning/ROADMAP.md" ] && R=true
-[ -f "${REPO_ROOT}/.planning/STATE.md" ] && S=true
-[ -f "${REPO_ROOT}/.mgw/project.json" ] && M=true
-
-G=$(gh api "repos/${REPO}/milestones" --jq 'length' 2>/dev/null || echo 0)
-```
-
-**Classify into STATE_CLASS:**
-
-| State | P | R | S | M | G | Meaning |
-|---|---|---|---|---|---|---|
-| Fresh | false | false | false | false | 0 | Clean slate — no GSD, no MGW |
-| GSD-Only | true | false | false | false | 0 | PROJECT.md present but no roadmap yet |
-| GSD-Mid-Exec | true | true | true | false | 0 | GSD in progress, MGW not yet linked |
-| Aligned | true | — | — | true | >0 | Both MGW + GitHub consistent with each other |
-| Diverged | — | — | — | true | >0 | MGW + GitHub present but inconsistent |
-| Extend | true | — | — | true | >0 | All milestones in project.json are done |
-
-```bash
-# Classification logic
-STATE_CLASS="Fresh"
-EXTEND_MODE=false
-
-if [ "$M" = "true" ] && [ "$G" -gt 0 ]; then
- # Check if all milestones are complete (Extend detection)
- ALL_COMPLETE=$(python3 -c "
-import json
-p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
-milestones = p.get('milestones', [])
-current = p.get('current_milestone', 1)
-# All complete when current_milestone exceeds array length
-# (milestone.md increments current_milestone after completing each)
-all_done = current > len(milestones) and len(milestones) > 0
-print('true' if all_done else 'false')
-")
-
- if [ "$ALL_COMPLETE" = "true" ]; then
- STATE_CLASS="Extend"
- EXTEND_MODE=true
- EXISTING_MILESTONE_COUNT=$(python3 -c "import json; print(len(json.load(open('${REPO_ROOT}/.mgw/project.json'))['milestones']))")
- EXISTING_PHASE_COUNT=$(python3 -c "import json; print(max((int(k) for k in json.load(open('${REPO_ROOT}/.mgw/project.json')).get('phase_map',{}).keys()), default=0))")
- else
- # M=true, G>0, not all done — check consistency (Aligned vs Diverged)
- GH_MILESTONE_COUNT=$G
- LOCAL_MILESTONE_COUNT=$(python3 -c "import json; print(len(json.load(open('${REPO_ROOT}/.mgw/project.json')).get('milestones', [])))")
-
- # Consistency: milestone counts match and names overlap
- CONSISTENCY_OK=$(python3 -c "
-import json, subprocess, sys
-local = json.load(open('${REPO_ROOT}/.mgw/project.json'))
-local_names = set(m['name'] for m in local.get('milestones', []))
-local_count = len(local_names)
-gh_count = ${GH_MILESTONE_COUNT}
-
-# Count mismatch is a drift signal (allow off-by-one for in-flight)
-if abs(local_count - gh_count) > 1:
- print('false')
- sys.exit(0)
-
-# Name overlap check: at least 50% of local milestone names found on GitHub
-result = subprocess.run(
- ['gh', 'api', 'repos/${REPO}/milestones', '--jq', '[.[].title]'],
- capture_output=True, text=True
-)
-try:
- gh_names = set(json.loads(result.stdout))
- overlap = len(local_names & gh_names)
- print('true' if overlap >= max(1, local_count // 2) else 'false')
-except Exception:
- print('false')
-")
-
- if [ "$CONSISTENCY_OK" = "true" ]; then
- STATE_CLASS="Aligned"
- else
- STATE_CLASS="Diverged"
- fi
- fi
-elif [ "$M" = "false" ] && [ "$G" -eq 0 ]; then
- # No MGW state, no GitHub milestones — GSD signals determine class
- if [ "$P" = "true" ] && [ "$R" = "true" ] && [ "$S" = "true" ]; then
- STATE_CLASS="GSD-Mid-Exec"
- elif [ "$P" = "true" ] && [ "$R" = "true" ]; then
- STATE_CLASS="GSD-Mid-Exec"
- elif [ "$P" = "true" ]; then
- STATE_CLASS="GSD-Only"
- else
- STATE_CLASS="Fresh"
- fi
-fi
-
-echo "State detected: ${STATE_CLASS} (P=${P} R=${R} S=${S} M=${M} G=${G})"
-```
-
-**Route by STATE_CLASS:**
-
-```bash
-case "$STATE_CLASS" in
- "Fresh")
- # Proceed to gather_inputs (standard flow)
- ;;
-
- "GSD-Only"|"GSD-Mid-Exec")
- # GSD artifacts exist but MGW not initialized — delegate to align_from_gsd
- # (proceed to align_from_gsd step)
- ;;
-
- "Aligned")
- # MGW + GitHub consistent — display status and offer extend mode
- TOTAL_ISSUES=$(python3 -c "
-import json
-p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
-print(sum(len(m.get('issues', [])) for m in p.get('milestones', [])))
-")
- echo ""
- echo "Project already initialized and aligned with GitHub."
- echo " Milestones: ${LOCAL_MILESTONE_COUNT} local / ${GH_MILESTONE_COUNT} on GitHub"
- echo " Issues: ${TOTAL_ISSUES} tracked in project.json"
- echo ""
- echo "What would you like to do?"
- echo ""
- echo " 1) Continue with /mgw:milestone (execute next milestone)"
- echo " 2) Add new milestones to this project (extend mode)"
- echo " 3) View full status (/mgw:status)"
- echo ""
- read -p "Choose [1/2/3]: " ALIGNED_CHOICE
- case "$ALIGNED_CHOICE" in
- 2)
- echo ""
- echo "Entering extend mode — new milestones will be added to the existing project."
- EXTEND_MODE=true
- EXISTING_MILESTONE_COUNT=${LOCAL_MILESTONE_COUNT}
- EXISTING_PHASE_COUNT=$(python3 -c "
-import json
-p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
-print(sum(len(m.get('phases', [])) for m in p.get('milestones', [])))
-")
- echo "Phase numbering will continue from phase ${EXISTING_PHASE_COUNT}."
- # Fall through to gather_inputs — do NOT exit
- ;;
- 3)
- echo ""
- echo "Run /mgw:status to view the full project status dashboard."
- exit 0
- ;;
- *)
- echo ""
- echo "Run /mgw:milestone to execute the next milestone."
- exit 0
- ;;
- esac
- ;;
-
- "Diverged")
- # MGW + GitHub inconsistent — delegate to reconcile_drift
- # (proceed to reconcile_drift step)
- ;;
-
- "Extend")
- # All milestones done — entering extend mode
- echo "All ${EXISTING_MILESTONE_COUNT} milestones complete. Entering extend mode."
- echo "Phase numbering will continue from phase ${EXISTING_PHASE_COUNT}."
- # Proceed to gather_inputs in extend mode (EXTEND_MODE=true already set)
- ;;
-esac
-```
+@workflows/detect-state.md
diff --git a/test/github.test.cjs b/test/github.test.cjs
new file mode 100644
index 0000000..c207730
--- /dev/null
+++ b/test/github.test.cjs
@@ -0,0 +1,684 @@
+'use strict';
+
+/**
+ * test/github.test.cjs — Unit tests for lib/github.cjs
+ *
+ * Strategy: module cache invalidation + mock.method on childProcess.execSync.
+ *
+ * Before each test:
+ * 1. Evict lib/github.cjs from require.cache
+ * 2. mock.method(childProcess, 'execSync', () => fixture)
+ * 3. Re-require lib/github.cjs so it captures the mock at bind time
+ *
+ * This avoids real gh CLI calls entirely.
+ */
+
+const { describe, it, beforeEach, mock } = require('node:test');
+const assert = require('node:assert/strict');
+const childProcess = require('child_process');
+const path = require('path');
+
+const GITHUB_MODULE = path.resolve(__dirname, '..', 'lib', 'github.cjs');
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Reload lib/github.cjs with execSync replaced by a fake that returns
+ * `returnValue` (as a Buffer / string — the real execSync returns a string
+ * when encoding is specified, and run() calls .trim() on the result).
+ *
+ * @param {string} returnValue - Raw string the fake execSync should return
+ * @returns {{ github: object, spy: import('node:test').MockFunctionContext }}
+ */
+function loadWithMock(returnValue) {
+ // 1. Evict cached module so the re-require picks up the fresh mock
+ delete require.cache[GITHUB_MODULE];
+
+ // 2. Install mock — mock.method replaces the property on the live object
+ const spy = mock.method(childProcess, 'execSync', (_cmd, _opts) => returnValue);
+
+ // 3. Re-require — github.cjs does `const { execSync } = require('child_process')`
+ // at module scope, so evicting + re-requiring is the only reliable way to
+ // make it bind to the mocked function.
+ const github = require(GITHUB_MODULE);
+
+ return { github, spy };
+}
+
+/**
+ * Reload lib/github.cjs with execSync replaced by a fake that throws `error`.
+ */
+function loadWithThrow(error) {
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', () => { throw error; });
+ return require(GITHUB_MODULE);
+}
+
+/**
+ * Restore mocks after each test so they don't bleed across describe blocks.
+ */
+function restoreMocks() {
+ mock.restoreAll();
+ delete require.cache[GITHUB_MODULE];
+}
+
+// ---------------------------------------------------------------------------
+// Fixtures — pre-baked JSON strings matching gh CLI output shapes
+// ---------------------------------------------------------------------------
+
+const FX = {
+ repo: 'snipcodeit/mgw',
+
+ issue: JSON.stringify({
+ number: 42,
+ title: 'Fix everything',
+ state: 'OPEN',
+ labels: [{ name: 'bug' }],
+ milestone: { title: 'v1.0', number: 1 },
+ assignees: [{ login: 'hat' }],
+ body: 'Body text'
+ }),
+
+ issueList: JSON.stringify([
+ { number: 1, title: 'First', state: 'OPEN', labels: [], milestone: null, assignees: [] },
+ { number: 2, title: 'Second', state: 'OPEN', labels: [], milestone: null, assignees: [] }
+ ]),
+
+ milestone: JSON.stringify({
+ number: 3,
+ title: 'v1.0',
+ state: 'open',
+ open_issues: 2,
+ closed_issues: 5
+ }),
+
+ rateLimit: JSON.stringify({
+ resources: {
+ core: { remaining: 4999, limit: 5000, reset: 1700000000 }
+ }
+ }),
+
+ closedMilestone: JSON.stringify({
+ number: 3,
+ title: 'v1.0',
+ state: 'closed',
+ open_issues: 0,
+ closed_issues: 7
+ }),
+
+ releaseOutput: 'https://github.com/snipcodeit/mgw/releases/tag/v1.0.0',
+
+ project: JSON.stringify({ number: 7, url: 'https://github.com/orgs/snipcodeit/projects/7' }),
+
+ addItemOutput: 'PVT_kwDOABC123',
+
+ repoMeta: JSON.stringify({
+ id: 'R_kgDOABC',
+ discussionCategories: {
+ nodes: [
+ { id: 'DIC_kwDOABC', name: 'Announcements' },
+ { id: 'DIC_kwDOXYZ', name: 'General' }
+ ]
+ }
+ }),
+
+ discussionResult: JSON.stringify({
+ url: 'https://github.com/snipcodeit/mgw/discussions/99'
+ })
+};
+
+// ---------------------------------------------------------------------------
+// getRepo
+// ---------------------------------------------------------------------------
+
+describe('getRepo', () => {
+ beforeEach(restoreMocks);
+
+ it('returns the repo nameWithOwner string', () => {
+ const { github, spy } = loadWithMock(FX.repo);
+ const result = github.getRepo();
+
+ assert.equal(result, 'snipcodeit/mgw');
+ assert.equal(spy.mock.calls.length, 1);
+ assert.ok(
+ spy.mock.calls[0].arguments[0].includes('gh repo view'),
+ 'should call gh repo view'
+ );
+ assert.ok(
+ spy.mock.calls[0].arguments[0].includes('nameWithOwner'),
+ 'should request nameWithOwner field'
+ );
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('gh: not found'));
+ assert.throws(() => github.getRepo(), /gh: not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// getIssue
+// ---------------------------------------------------------------------------
+
+describe('getIssue', () => {
+ beforeEach(restoreMocks);
+
+ it('returns parsed issue object', () => {
+ const { github, spy } = loadWithMock(FX.issue);
+ const result = github.getIssue(42);
+
+ assert.equal(result.number, 42);
+ assert.equal(result.title, 'Fix everything');
+ assert.equal(result.state, 'OPEN');
+ assert.deepEqual(result.labels, [{ name: 'bug' }]);
+ });
+
+ it('constructs correct gh issue view command', () => {
+ const { github, spy } = loadWithMock(FX.issue);
+ github.getIssue(42);
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh issue view 42'), 'should include issue number');
+ assert.ok(cmd.includes('number,title,state,labels,milestone,assignees,body'), 'should request all fields');
+ });
+
+ it('works with string issue number', () => {
+ const { github } = loadWithMock(FX.issue);
+ const result = github.getIssue('42');
+ assert.equal(result.number, 42);
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('issue not found'));
+ assert.throws(() => github.getIssue(99), /issue not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// listIssues
+// ---------------------------------------------------------------------------
+
+describe('listIssues', () => {
+ beforeEach(restoreMocks);
+
+ it('returns parsed array of issues with no filters', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ const result = github.listIssues();
+
+ assert.ok(Array.isArray(result), 'should return array');
+ assert.equal(result.length, 2);
+ assert.equal(result[0].number, 1);
+ assert.equal(result[1].title, 'Second');
+ });
+
+ it('builds base command correctly', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues();
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh issue list'), 'should start with gh issue list');
+ assert.ok(cmd.includes('--json number,title,state,labels,milestone,assignees'), 'should request correct fields');
+ });
+
+ it('appends --label flag when filter.label is set', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues({ label: 'bug' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--label'), 'should include --label flag');
+ assert.ok(cmd.includes('bug'), 'should include label value');
+ });
+
+ it('appends --milestone flag when filter.milestone is set', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues({ milestone: 'v1.0' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--milestone'), 'should include --milestone flag');
+ });
+
+ it('appends --assignee flag when filter.assignee is set and not "all"', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues({ assignee: 'hat' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--assignee'), 'should include --assignee flag');
+ });
+
+ it('omits --assignee when filter.assignee is "all"', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues({ assignee: 'all' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(!cmd.includes('--assignee'), 'should NOT include --assignee for "all"');
+ });
+
+ it('appends --state flag when filter.state is set', () => {
+ const { github, spy } = loadWithMock(FX.issueList);
+ github.listIssues({ state: 'closed' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--state closed'), 'should include --state flag');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('rate limit exceeded'));
+ assert.throws(() => github.listIssues(), /rate limit exceeded/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// getMilestone
+// ---------------------------------------------------------------------------
+
+describe('getMilestone', () => {
+ beforeEach(restoreMocks);
+
+ it('returns parsed milestone object', () => {
+ // getMilestone calls getRepo() first, then fetches the milestone.
+ // We return FX.repo for the first call, FX.milestone for the second.
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (_cmd, _opts) => {
+ callCount++;
+ return callCount === 1 ? FX.repo : FX.milestone;
+ });
+ const github = require(GITHUB_MODULE);
+
+ const result = github.getMilestone(3);
+ assert.equal(result.number, 3);
+ assert.equal(result.title, 'v1.0');
+ assert.equal(result.state, 'open');
+ });
+
+ it('constructs correct gh api repos/{repo}/milestones/{number} command', () => {
+ let callCount = 0;
+ const calls = [];
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (cmd, _opts) => {
+ callCount++;
+ calls.push(cmd);
+ return callCount === 1 ? FX.repo : FX.milestone;
+ });
+ const github = require(GITHUB_MODULE);
+
+ github.getMilestone(3);
+ assert.equal(calls.length, 2);
+ assert.ok(calls[0].includes('gh repo view'), 'first call should be getRepo');
+ assert.ok(calls[1].includes('gh api repos/snipcodeit/mgw/milestones/3'), 'second call should be getMilestone');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('milestone not found'));
+ assert.throws(() => github.getMilestone(99), /milestone not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// getRateLimit
+// ---------------------------------------------------------------------------
+
+describe('getRateLimit', () => {
+ beforeEach(restoreMocks);
+
+ it('returns core rate limit fields', () => {
+ const { github } = loadWithMock(FX.rateLimit);
+ const result = github.getRateLimit();
+
+ assert.equal(result.remaining, 4999);
+ assert.equal(result.limit, 5000);
+ assert.equal(result.reset, 1700000000);
+ });
+
+ it('constructs correct gh api rate_limit command', () => {
+ const { github, spy } = loadWithMock(FX.rateLimit);
+ github.getRateLimit();
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh api rate_limit'), 'should call gh api rate_limit');
+ });
+
+ it('does not include extra fields beyond remaining/limit/reset', () => {
+ const { github } = loadWithMock(FX.rateLimit);
+ const result = github.getRateLimit();
+
+ const keys = Object.keys(result);
+ assert.deepEqual(keys.sort(), ['limit', 'remaining', 'reset']);
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('network error'));
+ assert.throws(() => github.getRateLimit(), /network error/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// closeMilestone
+// ---------------------------------------------------------------------------
+
+describe('closeMilestone', () => {
+ beforeEach(restoreMocks);
+
+ it('returns parsed updated milestone JSON', () => {
+ const { github } = loadWithMock(FX.closedMilestone);
+ const result = github.closeMilestone('snipcodeit/mgw', 3);
+
+ assert.equal(result.state, 'closed');
+ assert.equal(result.number, 3);
+ });
+
+ it('constructs correct PATCH command', () => {
+ const { github, spy } = loadWithMock(FX.closedMilestone);
+ github.closeMilestone('snipcodeit/mgw', 3);
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh api repos/snipcodeit/mgw/milestones/3'), 'should target correct milestone');
+ assert.ok(cmd.includes('--method PATCH'), 'should use PATCH method');
+ assert.ok(cmd.includes('-f state=closed'), 'should send state=closed');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('forbidden'));
+ assert.throws(() => github.closeMilestone('snipcodeit/mgw', 3), /forbidden/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// createRelease
+// ---------------------------------------------------------------------------
+
+describe('createRelease', () => {
+ beforeEach(restoreMocks);
+
+ it('returns raw output string from gh release create', () => {
+ const { github } = loadWithMock(FX.releaseOutput);
+ const result = github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0');
+
+ assert.equal(result, FX.releaseOutput);
+ });
+
+ it('constructs base command with tag, repo, and title', () => {
+ const { github, spy } = loadWithMock(FX.releaseOutput);
+ github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0');
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh release create'), 'should call gh release create');
+ assert.ok(cmd.includes('v1.0.0'), 'should include tag');
+ assert.ok(cmd.includes('snipcodeit/mgw'), 'should include repo');
+ assert.ok(cmd.includes('Release v1.0.0'), 'should include title');
+ });
+
+ it('appends --notes when opts.notes is provided', () => {
+ const { github, spy } = loadWithMock(FX.releaseOutput);
+ github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0', { notes: 'Bug fixes' });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--notes'), 'should include --notes flag');
+ assert.ok(cmd.includes('Bug fixes'), 'should include notes content');
+ });
+
+ it('appends --draft when opts.draft is true', () => {
+ const { github, spy } = loadWithMock(FX.releaseOutput);
+ github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0', { draft: true });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--draft'), 'should include --draft flag');
+ });
+
+ it('appends --prerelease when opts.prerelease is true', () => {
+ const { github, spy } = loadWithMock(FX.releaseOutput);
+ github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0', { prerelease: true });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('--prerelease'), 'should include --prerelease flag');
+ });
+
+ it('does not append --draft or --prerelease when opts are false', () => {
+ const { github, spy } = loadWithMock(FX.releaseOutput);
+ github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Release v1.0.0', { draft: false, prerelease: false });
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(!cmd.includes('--draft'), 'should NOT include --draft when false');
+ assert.ok(!cmd.includes('--prerelease'), 'should NOT include --prerelease when false');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('tag already exists'));
+ assert.throws(() => github.createRelease('snipcodeit/mgw', 'v1.0.0', 'Dup'), /tag already exists/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// createProject
+// ---------------------------------------------------------------------------
+
+describe('createProject', () => {
+ beforeEach(restoreMocks);
+
+ it('returns { number, url } from parsed JSON', () => {
+ const { github } = loadWithMock(FX.project);
+ const result = github.createProject('snipcodeit', 'My Board');
+
+ assert.equal(result.number, 7);
+ assert.equal(result.url, 'https://github.com/orgs/snipcodeit/projects/7');
+ });
+
+ it('constructs correct gh project create command', () => {
+ const { github, spy } = loadWithMock(FX.project);
+ github.createProject('snipcodeit', 'My Board');
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh project create'), 'should call gh project create');
+ assert.ok(cmd.includes('snipcodeit'), 'should include owner');
+ assert.ok(cmd.includes('My Board'), 'should include title');
+ assert.ok(cmd.includes('--format json'), 'should request json format');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('org not found'));
+ assert.throws(() => github.createProject('bad-org', 'Board'), /org not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// addItemToProject
+// ---------------------------------------------------------------------------
+
+describe('addItemToProject', () => {
+ beforeEach(restoreMocks);
+
+ it('returns the raw item ID string', () => {
+ const { github } = loadWithMock(FX.addItemOutput);
+ const result = github.addItemToProject('snipcodeit', 7, 'https://github.com/snipcodeit/mgw/issues/1');
+
+ assert.equal(result, FX.addItemOutput);
+ });
+
+ it('constructs correct gh project item-add command', () => {
+ const { github, spy } = loadWithMock(FX.addItemOutput);
+ github.addItemToProject('snipcodeit', 7, 'https://github.com/snipcodeit/mgw/issues/1');
+
+ const cmd = spy.mock.calls[0].arguments[0];
+ assert.ok(cmd.includes('gh project item-add 7'), 'should include project number');
+ assert.ok(cmd.includes('snipcodeit'), 'should include owner');
+ assert.ok(cmd.includes('https://github.com/snipcodeit/mgw/issues/1'), 'should include issue URL');
+ });
+
+ it('propagates execSync errors', () => {
+ const github = loadWithThrow(new Error('project not found'));
+ assert.throws(() => github.addItemToProject('snipcodeit', 99, 'https://github.com/snipcodeit/mgw/issues/1'), /project not found/);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// postMilestoneStartAnnouncement
+// ---------------------------------------------------------------------------
+
+describe('postMilestoneStartAnnouncement', () => {
+ beforeEach(restoreMocks);
+
+ const baseOpts = {
+ repo: 'snipcodeit/mgw',
+ milestoneName: 'v3.5',
+ milestoneNumber: 5,
+ boardUrl: 'https://github.com/orgs/snipcodeit/projects/7',
+ issues: [
+ { number: 134, title: 'Write tests', assignee: 'hat', gsdRoute: 'execute' }
+ ],
+ firstIssueNumber: 134
+ };
+
+ it('returns { posted: true, method: "discussion", url } when Discussions succeed', () => {
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (_cmd, _opts) => {
+ callCount++;
+ // First call: repoMeta GraphQL query
+ if (callCount === 1) return FX.repoMeta;
+ // Second call: createDiscussion mutation
+ return FX.discussionResult;
+ });
+ const github = require(GITHUB_MODULE);
+
+ const result = github.postMilestoneStartAnnouncement(baseOpts);
+ assert.equal(result.posted, true);
+ assert.equal(result.method, 'discussion');
+ assert.equal(result.url, 'https://github.com/snipcodeit/mgw/discussions/99');
+ });
+
+ it('falls back to issue comment when Discussions are not available', () => {
+ // Return repoMeta WITHOUT an Announcements category
+ const repoMetaNoAnnouncements = JSON.stringify({
+ id: 'R_kgDOABC',
+ discussionCategories: { nodes: [{ id: 'DIC_kwDOXYZ', name: 'General' }] }
+ });
+
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (_cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) return repoMetaNoAnnouncements;
+ // Second call is the fallback comment
+ return '';
+ });
+ const github = require(GITHUB_MODULE);
+
+ const result = github.postMilestoneStartAnnouncement(baseOpts);
+ assert.equal(result.posted, true);
+ assert.equal(result.method, 'comment');
+ assert.equal(result.url, null);
+ });
+
+ it('falls back to issue comment when GraphQL throws', () => {
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (_cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) throw new Error('Discussions not enabled');
+ // Second call is the fallback comment
+ return '';
+ });
+ const github = require(GITHUB_MODULE);
+
+ const result = github.postMilestoneStartAnnouncement(baseOpts);
+ assert.equal(result.posted, true);
+ assert.equal(result.method, 'comment');
+ });
+
+ it('returns { posted: false, method: "none" } when both paths fail', () => {
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', () => { throw new Error('all failed'); });
+ const github = require(GITHUB_MODULE);
+
+ const result = github.postMilestoneStartAnnouncement(baseOpts);
+ assert.equal(result.posted, false);
+ assert.equal(result.method, 'none');
+ assert.equal(result.url, null);
+ });
+
+ it('returns { posted: false } when no repo or firstIssueNumber is provided', () => {
+ // No repo → skip GraphQL; no firstIssueNumber → skip comment fallback
+ const { github } = loadWithMock('');
+ const result = github.postMilestoneStartAnnouncement({
+ milestoneName: 'v3.5',
+ issues: []
+ });
+
+ assert.equal(result.posted, false);
+ assert.equal(result.method, 'none');
+ });
+
+ it('includes boardUrl line in constructed body when boardUrl is provided', () => {
+ let capturedBody = '';
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) return FX.repoMeta;
+ // Capture the createDiscussion call to inspect the body
+ capturedBody = cmd;
+ return FX.discussionResult;
+ });
+ const github = require(GITHUB_MODULE);
+
+ github.postMilestoneStartAnnouncement(baseOpts);
+ // The second execSync call contains the mutation with the board URL embedded
+ assert.ok(capturedBody.includes('https://github.com/orgs/snipcodeit/projects/7'), 'body should include board URL');
+ });
+
+ it('uses "_(not configured)_" when boardUrl is not provided', () => {
+ let capturedBody = '';
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) return FX.repoMeta;
+ capturedBody = cmd;
+ return FX.discussionResult;
+ });
+ const github = require(GITHUB_MODULE);
+
+ github.postMilestoneStartAnnouncement({ ...baseOpts, boardUrl: undefined });
+ assert.ok(capturedBody.includes('not configured'), 'body should include "not configured" when no board URL');
+ });
+
+ it('includes issue table rows in constructed body', () => {
+ let capturedBody = '';
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) return FX.repoMeta;
+ capturedBody = cmd;
+ return FX.discussionResult;
+ });
+ const github = require(GITHUB_MODULE);
+
+ github.postMilestoneStartAnnouncement(baseOpts);
+ assert.ok(capturedBody.includes('#134'), 'body should include issue number');
+ assert.ok(capturedBody.includes('Write tests'), 'body should include issue title');
+ assert.ok(capturedBody.includes('@hat'), 'body should include assignee');
+ });
+
+ it('renders "—" for unassigned issues in body', () => {
+ let capturedBody = '';
+ let callCount = 0;
+ delete require.cache[GITHUB_MODULE];
+ mock.method(childProcess, 'execSync', (cmd, _opts) => {
+ callCount++;
+ if (callCount === 1) return FX.repoMeta;
+ capturedBody = cmd;
+ return FX.discussionResult;
+ });
+ const github = require(GITHUB_MODULE);
+
+ github.postMilestoneStartAnnouncement({
+ ...baseOpts,
+ issues: [{ number: 1, title: 'Unassigned issue', assignee: null, gsdRoute: 'execute' }]
+ });
+ // The em dash "—" appears as the assignee placeholder
+ assert.ok(capturedBody.includes('\\u2014') || capturedBody.includes('—'), 'body should include em dash for unassigned');
+ });
+});
diff --git a/test/state.test.cjs b/test/state.test.cjs
new file mode 100644
index 0000000..64f5455
--- /dev/null
+++ b/test/state.test.cjs
@@ -0,0 +1,546 @@
+'use strict';
+
+/**
+ * test/state.test.cjs — Unit tests for lib/state.cjs
+ *
+ * Isolation strategy:
+ * - fs.mkdtempSync() creates a real temp directory per test suite
+ * - process.cwd is overridden to point at the temp dir so getMgwDir()
+ * and all derived paths stay sandboxed
+ * - afterEach removes .mgw/ inside the temp dir for clean state
+ * - The temp dir itself is cleaned up in after() on each describe block
+ *
+ * All 9 exported functions are covered:
+ * getMgwDir, getActiveDir, getCompletedDir,
+ * loadProjectState, writeProjectState, loadActiveIssue,
+ * mergeProjectState, migrateProjectState, resolveActiveMilestoneIndex
+ */
+
+const { describe, it, before, beforeEach, after, afterEach } = require('node:test');
+const assert = require('node:assert/strict');
+const fs = require('fs');
+const path = require('path');
+const os = require('os');
+
+const STATE_MODULE = path.resolve(__dirname, '..', 'lib', 'state.cjs');
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+/**
+ * Reload lib/state.cjs fresh (evict module cache so process.cwd override
+ * takes effect on each load).
+ */
+function loadState() {
+ delete require.cache[STATE_MODULE];
+ return require(STATE_MODULE);
+}
+
+/**
+ * Override process.cwd to return tmpDir for the duration of each test.
+ * Returns a restore function.
+ */
+function overrideCwd(tmpDir) {
+ const original = process.cwd.bind(process);
+ process.cwd = () => tmpDir;
+ return () => { process.cwd = original; };
+}
+
+/**
+ * Remove .mgw/ inside tmpDir if it exists.
+ */
+function cleanMgw(tmpDir) {
+ const mgwDir = path.join(tmpDir, '.mgw');
+ if (fs.existsSync(mgwDir)) {
+ fs.rmSync(mgwDir, { recursive: true, force: true });
+ }
+}
+
+// ---------------------------------------------------------------------------
+// getMgwDir, getActiveDir, getCompletedDir
+// ---------------------------------------------------------------------------
+
+describe('getMgwDir / getActiveDir / getCompletedDir', () => {
+ let tmpDir;
+ let restoreCwd;
+
+ before(() => {
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mgw-state-test-'));
+ });
+
+ beforeEach(() => {
+ restoreCwd = overrideCwd(tmpDir);
+ });
+
+ afterEach(() => {
+ restoreCwd();
+ cleanMgw(tmpDir);
+ delete require.cache[STATE_MODULE];
+ });
+
+ after(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ it('getMgwDir returns /.mgw', () => {
+ const state = loadState();
+ assert.equal(state.getMgwDir(), path.join(tmpDir, '.mgw'));
+ });
+
+ it('getActiveDir returns /.mgw/active', () => {
+ const state = loadState();
+ assert.equal(state.getActiveDir(), path.join(tmpDir, '.mgw', 'active'));
+ });
+
+ it('getCompletedDir returns /.mgw/completed', () => {
+ const state = loadState();
+ assert.equal(state.getCompletedDir(), path.join(tmpDir, '.mgw', 'completed'));
+ });
+
+ it('all three paths share the same .mgw/ prefix', () => {
+ const state = loadState();
+ const mgw = state.getMgwDir();
+ assert.ok(state.getActiveDir().startsWith(mgw));
+ assert.ok(state.getCompletedDir().startsWith(mgw));
+ });
+});
+
+// ---------------------------------------------------------------------------
+// loadProjectState / writeProjectState
+// ---------------------------------------------------------------------------
+
+describe('loadProjectState / writeProjectState', () => {
+ let tmpDir;
+ let restoreCwd;
+
+ before(() => {
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mgw-state-test-'));
+ });
+
+ beforeEach(() => {
+ restoreCwd = overrideCwd(tmpDir);
+ });
+
+ afterEach(() => {
+ restoreCwd();
+ cleanMgw(tmpDir);
+ delete require.cache[STATE_MODULE];
+ });
+
+ after(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ it('loadProjectState returns null when .mgw/ does not exist', () => {
+ const state = loadState();
+ assert.equal(state.loadProjectState(), null);
+ });
+
+ it('loadProjectState returns null when project.json is missing', () => {
+ const state = loadState();
+ fs.mkdirSync(path.join(tmpDir, '.mgw'), { recursive: true });
+ assert.equal(state.loadProjectState(), null);
+ });
+
+ it('loadProjectState returns null when project.json is invalid JSON', () => {
+ const state = loadState();
+ const mgwDir = path.join(tmpDir, '.mgw');
+ fs.mkdirSync(mgwDir, { recursive: true });
+ fs.writeFileSync(path.join(mgwDir, 'project.json'), '{ broken json }', 'utf-8');
+ assert.equal(state.loadProjectState(), null);
+ });
+
+ it('writeProjectState creates .mgw/ if it does not exist', () => {
+ const state = loadState();
+ const mgwDir = path.join(tmpDir, '.mgw');
+ assert.ok(!fs.existsSync(mgwDir));
+ state.writeProjectState({ name: 'test' });
+ assert.ok(fs.existsSync(mgwDir));
+ });
+
+ it('writeProjectState serialises state to project.json', () => {
+ const state = loadState();
+ const payload = { name: 'mgw', version: '0.1.0', milestones: [] };
+ state.writeProjectState(payload);
+ const raw = fs.readFileSync(path.join(tmpDir, '.mgw', 'project.json'), 'utf-8');
+ assert.deepEqual(JSON.parse(raw), payload);
+ });
+
+ it('loadProjectState round-trips through writeProjectState', () => {
+ const state = loadState();
+ const payload = { project: 'test', active_gsd_milestone: 'v1.0', milestones: [{ gsd_milestone_id: 'v1.0' }] };
+ state.writeProjectState(payload);
+ const loaded = state.loadProjectState();
+ assert.deepEqual(loaded, payload);
+ });
+
+ it('writeProjectState overwrites existing project.json', () => {
+ const state = loadState();
+ state.writeProjectState({ name: 'first' });
+ state.writeProjectState({ name: 'second' });
+ assert.equal(state.loadProjectState().name, 'second');
+ });
+});
+
+// ---------------------------------------------------------------------------
+// loadActiveIssue
+// ---------------------------------------------------------------------------
+
+describe('loadActiveIssue', () => {
+ let tmpDir;
+ let restoreCwd;
+
+ before(() => {
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mgw-state-test-'));
+ });
+
+ beforeEach(() => {
+ restoreCwd = overrideCwd(tmpDir);
+ });
+
+ afterEach(() => {
+ restoreCwd();
+ cleanMgw(tmpDir);
+ delete require.cache[STATE_MODULE];
+ });
+
+ after(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ it('returns null when active/ directory does not exist', () => {
+ const state = loadState();
+ assert.equal(state.loadActiveIssue(42), null);
+ });
+
+ it('returns null when no matching file exists in active/', () => {
+ const state = loadState();
+ const activeDir = path.join(tmpDir, '.mgw', 'active');
+ fs.mkdirSync(activeDir, { recursive: true });
+ assert.equal(state.loadActiveIssue(42), null);
+ });
+
+ it('returns parsed JSON for a matching active issue file', () => {
+ const state = loadState();
+ const activeDir = path.join(tmpDir, '.mgw', 'active');
+ fs.mkdirSync(activeDir, { recursive: true });
+ const issue = { number: 42, title: 'Fix bug', pipeline_stage: 'executing' };
+ fs.writeFileSync(path.join(activeDir, '42-fix-bug.json'), JSON.stringify(issue), 'utf-8');
+ const loaded = state.loadActiveIssue(42);
+ assert.deepEqual(loaded, issue);
+ });
+
+ it('matches by numeric prefix (string number arg)', () => {
+ const state = loadState();
+ const activeDir = path.join(tmpDir, '.mgw', 'active');
+ fs.mkdirSync(activeDir, { recursive: true });
+ const issue = { number: 7, title: 'String test' };
+ fs.writeFileSync(path.join(activeDir, '7-string-test.json'), JSON.stringify(issue), 'utf-8');
+ const loaded = state.loadActiveIssue('7');
+ assert.deepEqual(loaded, issue);
+ });
+
+ it('does not match a file whose prefix is a superset (e.g. 42 should not match 420-*.json)', () => {
+ const state = loadState();
+ const activeDir = path.join(tmpDir, '.mgw', 'active');
+ fs.mkdirSync(activeDir, { recursive: true });
+ fs.writeFileSync(path.join(activeDir, '420-unrelated.json'), JSON.stringify({ number: 420 }), 'utf-8');
+ assert.equal(state.loadActiveIssue(42), null);
+ });
+
+ it('returns null when active issue file contains invalid JSON', () => {
+ const state = loadState();
+ const activeDir = path.join(tmpDir, '.mgw', 'active');
+ fs.mkdirSync(activeDir, { recursive: true });
+ fs.writeFileSync(path.join(activeDir, '99-bad.json'), '{ not json', 'utf-8');
+ assert.equal(state.loadActiveIssue(99), null);
+ });
+});
+
+// ---------------------------------------------------------------------------
+// mergeProjectState
+// ---------------------------------------------------------------------------
+
+describe('mergeProjectState', () => {
+ let tmpDir;
+ let restoreCwd;
+
+ before(() => {
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mgw-state-test-'));
+ });
+
+ beforeEach(() => {
+ restoreCwd = overrideCwd(tmpDir);
+ });
+
+ afterEach(() => {
+ restoreCwd();
+ cleanMgw(tmpDir);
+ delete require.cache[STATE_MODULE];
+ });
+
+ after(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ it('throws when no existing project state is found', () => {
+ const state = loadState();
+ assert.throws(
+ () => state.mergeProjectState([], {}, 1),
+ /No existing project state found/
+ );
+ });
+
+ it('appends new milestones to existing milestones array', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [{ title: 'v1.0' }], phase_map: {} });
+ const result = state.mergeProjectState([{ title: 'v2.0' }], {}, 2);
+ assert.equal(result.milestones.length, 2);
+ assert.equal(result.milestones[1].title, 'v2.0');
+ });
+
+ it('merges phase_map — new keys added, existing keys preserved (no overwrite)', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [], phase_map: { '1': 'existing' } });
+ const result = state.mergeProjectState([], { '2': 'new', '1': 'overwrite-attempt' }, 1);
+ // Existing key '1' must not be overwritten
+ assert.equal(result.phase_map['1'], 'existing');
+ // New key '2' must be added
+ assert.equal(result.phase_map['2'], 'new');
+ });
+
+ it('sets active_gsd_milestone when activeGsdMilestone param is provided', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [], phase_map: {} });
+ const result = state.mergeProjectState([], {}, 1, 'v2.0');
+ assert.equal(result.active_gsd_milestone, 'v2.0');
+ });
+
+ it('sets legacy current_milestone when active_gsd_milestone is not in use and no activeGsdMilestone param', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [], phase_map: {} });
+ const result = state.mergeProjectState([], {}, 3);
+ assert.equal(result.current_milestone, 3);
+ });
+
+ it('does NOT update current_milestone when active_gsd_milestone already exists in state', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [], phase_map: {}, active_gsd_milestone: 'v1.0' });
+ const result = state.mergeProjectState([], {}, 99);
+ // current_milestone should NOT have been set
+ assert.ok(result.current_milestone === undefined || result.current_milestone !== 99);
+ });
+
+ it('persists the merged result to disk', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [], phase_map: {} });
+ state.mergeProjectState([{ title: 'v3.0' }], {}, 1);
+ const onDisk = state.loadProjectState();
+ assert.equal(onDisk.milestones.length, 1);
+ assert.equal(onDisk.milestones[0].title, 'v3.0');
+ });
+});
+
+// ---------------------------------------------------------------------------
+// migrateProjectState
+// ---------------------------------------------------------------------------
+
+describe('migrateProjectState', () => {
+ let tmpDir;
+ let restoreCwd;
+
+ before(() => {
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mgw-state-test-'));
+ });
+
+ beforeEach(() => {
+ restoreCwd = overrideCwd(tmpDir);
+ });
+
+ afterEach(() => {
+ restoreCwd();
+ cleanMgw(tmpDir);
+ delete require.cache[STATE_MODULE];
+ });
+
+ after(() => {
+ fs.rmSync(tmpDir, { recursive: true, force: true });
+ });
+
+ it('returns null when no project.json exists', () => {
+ const state = loadState();
+ assert.equal(state.migrateProjectState(), null);
+ });
+
+ it('adds active_gsd_milestone: null when field is missing', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [] });
+ const result = state.migrateProjectState();
+ assert.ok(result.hasOwnProperty('active_gsd_milestone'));
+ assert.equal(result.active_gsd_milestone, null);
+ });
+
+ it('does NOT overwrite active_gsd_milestone when it already exists', () => {
+ const state = loadState();
+ state.writeProjectState({ active_gsd_milestone: 'v1.0', milestones: [] });
+ const result = state.migrateProjectState();
+ assert.equal(result.active_gsd_milestone, 'v1.0');
+ });
+
+ it('adds gsd_milestone_id, gsd_state, roadmap_archived_at to milestones missing those fields', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [{ title: 'v1.0' }] });
+ const result = state.migrateProjectState();
+ const m = result.milestones[0];
+ assert.ok(m.hasOwnProperty('gsd_milestone_id'));
+ assert.ok(m.hasOwnProperty('gsd_state'));
+ assert.ok(m.hasOwnProperty('roadmap_archived_at'));
+ assert.equal(m.gsd_milestone_id, null);
+ assert.equal(m.gsd_state, null);
+ assert.equal(m.roadmap_archived_at, null);
+ });
+
+ it('does NOT overwrite existing gsd_milestone_id / gsd_state / roadmap_archived_at', () => {
+ const state = loadState();
+ state.writeProjectState({
+ milestones: [{
+ title: 'v1.0',
+ gsd_milestone_id: 'v1.0',
+ gsd_state: 'completed',
+ roadmap_archived_at: '2025-01-01T00:00:00Z'
+ }]
+ });
+ const result = state.migrateProjectState();
+ const m = result.milestones[0];
+ assert.equal(m.gsd_milestone_id, 'v1.0');
+ assert.equal(m.gsd_state, 'completed');
+ assert.equal(m.roadmap_archived_at, '2025-01-01T00:00:00Z');
+ });
+
+ it('is idempotent — running twice yields the same result', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [{ title: 'v1.0' }] });
+ const first = state.migrateProjectState();
+ const second = state.migrateProjectState();
+ assert.deepEqual(first, second);
+ });
+
+ it('persists migration changes to disk', () => {
+ const state = loadState();
+ state.writeProjectState({ milestones: [{ title: 'v1.0' }] });
+ state.migrateProjectState();
+ const onDisk = state.loadProjectState();
+ assert.ok(onDisk.hasOwnProperty('active_gsd_milestone'));
+ assert.ok(onDisk.milestones[0].hasOwnProperty('gsd_milestone_id'));
+ });
+
+ it('handles state with no milestones array gracefully', () => {
+ const state = loadState();
+ state.writeProjectState({ name: 'test' });
+ const result = state.migrateProjectState();
+ assert.ok(result.hasOwnProperty('active_gsd_milestone'));
+ });
+});
+
+// ---------------------------------------------------------------------------
+// resolveActiveMilestoneIndex
+// ---------------------------------------------------------------------------
+
+describe('resolveActiveMilestoneIndex', () => {
+ it('returns -1 for null state', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ assert.equal(state.resolveActiveMilestoneIndex(null), -1);
+ });
+
+ it('returns -1 for undefined state', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ assert.equal(state.resolveActiveMilestoneIndex(undefined), -1);
+ });
+
+ it('new schema: resolves active_gsd_milestone string to correct 0-based index', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = {
+ active_gsd_milestone: 'v2.0',
+ milestones: [
+ { gsd_milestone_id: 'v1.0' },
+ { gsd_milestone_id: 'v2.0' },
+ { gsd_milestone_id: 'v3.0' }
+ ]
+ };
+ assert.equal(state.resolveActiveMilestoneIndex(s), 1);
+ });
+
+ it('new schema: returns -1 when active_gsd_milestone does not match any milestone (dangling reference)', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = {
+ active_gsd_milestone: 'v99.0',
+ milestones: [
+ { gsd_milestone_id: 'v1.0' },
+ { gsd_milestone_id: 'v2.0' }
+ ]
+ };
+ assert.equal(state.resolveActiveMilestoneIndex(s), -1);
+ });
+
+ it('new schema: takes precedence over current_milestone when both are present', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ // current_milestone=1 (0-based: 0) vs active_gsd_milestone='v2.0' (0-based: 1)
+ const s = {
+ active_gsd_milestone: 'v2.0',
+ current_milestone: 1,
+ milestones: [
+ { gsd_milestone_id: 'v1.0' },
+ { gsd_milestone_id: 'v2.0' }
+ ]
+ };
+ assert.equal(state.resolveActiveMilestoneIndex(s), 1);
+ });
+
+ it('legacy schema: converts current_milestone (1-indexed) to 0-based index', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = {
+ current_milestone: 3,
+ milestones: [
+ { title: 'v1.0' },
+ { title: 'v2.0' },
+ { title: 'v3.0' }
+ ]
+ };
+ assert.equal(state.resolveActiveMilestoneIndex(s), 2);
+ });
+
+ it('legacy schema: current_milestone=1 maps to index 0', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = { current_milestone: 1, milestones: [{ title: 'v1.0' }] };
+ assert.equal(state.resolveActiveMilestoneIndex(s), 0);
+ });
+
+ it('returns -1 when neither active_gsd_milestone nor current_milestone is set', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = { milestones: [{ gsd_milestone_id: 'v1.0' }] };
+ assert.equal(state.resolveActiveMilestoneIndex(s), -1);
+ });
+
+ it('handles empty milestones array with active_gsd_milestone set', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = { active_gsd_milestone: 'v1.0', milestones: [] };
+ assert.equal(state.resolveActiveMilestoneIndex(s), -1);
+ });
+
+ it('handles missing milestones key entirely', () => {
+ delete require.cache[STATE_MODULE];
+ const state = require(STATE_MODULE);
+ const s = { active_gsd_milestone: 'v1.0' };
+ assert.equal(state.resolveActiveMilestoneIndex(s), -1);
+ });
+});
diff --git a/workflows/detect-state.md b/workflows/detect-state.md
new file mode 100644
index 0000000..d4a7bfd
--- /dev/null
+++ b/workflows/detect-state.md
@@ -0,0 +1,222 @@
+
+Reusable state detection workflow for MGW commands. Reads five signal sources and
+classifies the project into one of six STATE_CLASS values. Any command that needs to
+know the current project state (project.md, status.md, milestone.md, sync.md) can
+reference this workflow instead of re-implementing the detection logic inline.
+
+
+## Input Contract
+
+Requires the following variables to be set before invoking:
+
+| Variable | Source | Description |
+|----------|--------|-------------|
+| `REPO_ROOT` | `git rev-parse --show-toplevel` | Absolute path to the repo root |
+| `REPO` | `gh repo view --json nameWithOwner -q .nameWithOwner` | GitHub owner/repo slug |
+
+## Output Contract
+
+Sets the following variables for downstream steps to branch on:
+
+| Variable | Type | Values |
+|----------|------|--------|
+| `STATE_CLASS` | string | `Fresh`, `GSD-Only`, `GSD-Mid-Exec`, `Aligned`, `Diverged`, `Extend` |
+| `EXTEND_MODE` | bool | `true` when STATE_CLASS is Extend or user chose extend in Aligned |
+| `EXISTING_MILESTONE_COUNT` | int | Number of milestones in project.json (Extend/Aligned paths only) |
+| `EXISTING_PHASE_COUNT` | int | Highest phase number in project.json phase_map (Extend/Aligned paths only) |
+| `LOCAL_MILESTONE_COUNT` | int | Count of milestones in project.json (Aligned/Diverged paths only) |
+| `GH_MILESTONE_COUNT` | int | Count of milestones on GitHub (Aligned/Diverged paths only) |
+
+## The Five Signals
+
+| Signal | What It Checks |
+|--------|---------------|
+| `P` | `.planning/PROJECT.md` exists |
+| `R` | `.planning/ROADMAP.md` exists |
+| `S` | `.planning/STATE.md` exists |
+| `M` | `.mgw/project.json` exists |
+| `G` | Count of GitHub milestones via `gh api` |
+
+## The Six State Classes
+
+| State | P | R | S | M | G | Meaning |
+|---|---|---|---|---|---|---|
+| Fresh | false | false | false | false | 0 | Clean slate — no GSD, no MGW |
+| GSD-Only | true | false | false | false | 0 | PROJECT.md present but no roadmap yet |
+| GSD-Mid-Exec | true | true | true | false | 0 | GSD in progress, MGW not yet linked |
+| Aligned | true | — | — | true | >0 | Both MGW + GitHub consistent with each other |
+| Diverged | — | — | — | true | >0 | MGW + GitHub present but inconsistent |
+| Extend | true | — | — | true | >0 | All milestones in project.json are done |
+
+## Step: detect_state
+
+**Detect existing project state from five signal sources:**
+
+Check five signals to determine what already exists for this project:
+
+```bash
+# Signal checks
+P=false # .planning/PROJECT.md exists
+R=false # .planning/ROADMAP.md exists
+S=false # .planning/STATE.md exists
+M=false # .mgw/project.json exists
+G=0 # GitHub milestone count
+
+[ -f "${REPO_ROOT}/.planning/PROJECT.md" ] && P=true
+[ -f "${REPO_ROOT}/.planning/ROADMAP.md" ] && R=true
+[ -f "${REPO_ROOT}/.planning/STATE.md" ] && S=true
+[ -f "${REPO_ROOT}/.mgw/project.json" ] && M=true
+
+G=$(gh api "repos/${REPO}/milestones" --jq 'length' 2>/dev/null || echo 0)
+```
+
+**Classify into STATE_CLASS:**
+
+```bash
+# Classification logic
+STATE_CLASS="Fresh"
+EXTEND_MODE=false
+
+if [ "$M" = "true" ] && [ "$G" -gt 0 ]; then
+ # Check if all milestones are complete (Extend detection)
+ ALL_COMPLETE=$(python3 -c "
+import json
+p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
+milestones = p.get('milestones', [])
+current = p.get('current_milestone', 1)
+# All complete when current_milestone exceeds array length
+# (milestone.md increments current_milestone after completing each)
+all_done = current > len(milestones) and len(milestones) > 0
+print('true' if all_done else 'false')
+")
+
+ if [ "$ALL_COMPLETE" = "true" ]; then
+ STATE_CLASS="Extend"
+ EXTEND_MODE=true
+ EXISTING_MILESTONE_COUNT=$(python3 -c "import json; print(len(json.load(open('${REPO_ROOT}/.mgw/project.json'))['milestones']))")
+ EXISTING_PHASE_COUNT=$(python3 -c "import json; print(max((int(k) for k in json.load(open('${REPO_ROOT}/.mgw/project.json')).get('phase_map',{}).keys()), default=0))")
+ else
+ # M=true, G>0, not all done — check consistency (Aligned vs Diverged)
+ GH_MILESTONE_COUNT=$G
+ LOCAL_MILESTONE_COUNT=$(python3 -c "import json; print(len(json.load(open('${REPO_ROOT}/.mgw/project.json')).get('milestones', [])))")
+
+ # Consistency: milestone counts match and names overlap
+ CONSISTENCY_OK=$(python3 -c "
+import json, subprocess, sys
+local = json.load(open('${REPO_ROOT}/.mgw/project.json'))
+local_names = set(m['name'] for m in local.get('milestones', []))
+local_count = len(local_names)
+gh_count = ${GH_MILESTONE_COUNT}
+
+# Count mismatch is a drift signal (allow off-by-one for in-flight)
+if abs(local_count - gh_count) > 1:
+ print('false')
+ sys.exit(0)
+
+# Name overlap check: at least 50% of local milestone names found on GitHub
+result = subprocess.run(
+ ['gh', 'api', 'repos/${REPO}/milestones', '--jq', '[.[].title]'],
+ capture_output=True, text=True
+)
+try:
+ gh_names = set(json.loads(result.stdout))
+ overlap = len(local_names & gh_names)
+ print('true' if overlap >= max(1, local_count // 2) else 'false')
+except Exception:
+ print('false')
+")
+
+ if [ "$CONSISTENCY_OK" = "true" ]; then
+ STATE_CLASS="Aligned"
+ else
+ STATE_CLASS="Diverged"
+ fi
+ fi
+elif [ "$M" = "false" ] && [ "$G" -eq 0 ]; then
+ # No MGW state, no GitHub milestones — GSD signals determine class
+ if [ "$P" = "true" ] && [ "$R" = "true" ] && [ "$S" = "true" ]; then
+ STATE_CLASS="GSD-Mid-Exec"
+ elif [ "$P" = "true" ] && [ "$R" = "true" ]; then
+ STATE_CLASS="GSD-Mid-Exec"
+ elif [ "$P" = "true" ]; then
+ STATE_CLASS="GSD-Only"
+ else
+ STATE_CLASS="Fresh"
+ fi
+fi
+
+echo "State detected: ${STATE_CLASS} (P=${P} R=${R} S=${S} M=${M} G=${G})"
+```
+
+**Route by STATE_CLASS:**
+
+```bash
+case "$STATE_CLASS" in
+ "Fresh")
+ # Proceed to gather_inputs (standard flow)
+ ;;
+
+ "GSD-Only"|"GSD-Mid-Exec")
+ # GSD artifacts exist but MGW not initialized — delegate to align_from_gsd
+ # (proceed to align_from_gsd step)
+ ;;
+
+ "Aligned")
+ # MGW + GitHub consistent — display status and offer extend mode
+ TOTAL_ISSUES=$(python3 -c "
+import json
+p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
+print(sum(len(m.get('issues', [])) for m in p.get('milestones', [])))
+")
+ echo ""
+ echo "Project already initialized and aligned with GitHub."
+ echo " Milestones: ${LOCAL_MILESTONE_COUNT} local / ${GH_MILESTONE_COUNT} on GitHub"
+ echo " Issues: ${TOTAL_ISSUES} tracked in project.json"
+ echo ""
+ echo "What would you like to do?"
+ echo ""
+ echo " 1) Continue with /mgw:milestone (execute next milestone)"
+ echo " 2) Add new milestones to this project (extend mode)"
+ echo " 3) View full status (/mgw:status)"
+ echo ""
+ read -p "Choose [1/2/3]: " ALIGNED_CHOICE
+ case "$ALIGNED_CHOICE" in
+ 2)
+ echo ""
+ echo "Entering extend mode — new milestones will be added to the existing project."
+ EXTEND_MODE=true
+ EXISTING_MILESTONE_COUNT=${LOCAL_MILESTONE_COUNT}
+ EXISTING_PHASE_COUNT=$(python3 -c "
+import json
+p = json.load(open('${REPO_ROOT}/.mgw/project.json'))
+print(sum(len(m.get('phases', [])) for m in p.get('milestones', [])))
+")
+ echo "Phase numbering will continue from phase ${EXISTING_PHASE_COUNT}."
+ # Fall through to gather_inputs — do NOT exit
+ ;;
+ 3)
+ echo ""
+ echo "Run /mgw:status to view the full project status dashboard."
+ exit 0
+ ;;
+ *)
+ echo ""
+ echo "Run /mgw:milestone to execute the next milestone."
+ exit 0
+ ;;
+ esac
+ ;;
+
+ "Diverged")
+ # MGW + GitHub inconsistent — delegate to reconcile_drift
+ # (proceed to reconcile_drift step)
+ ;;
+
+ "Extend")
+ # All milestones done — entering extend mode
+ echo "All ${EXISTING_MILESTONE_COUNT} milestones complete. Entering extend mode."
+ echo "Phase numbering will continue from phase ${EXISTING_PHASE_COUNT}."
+ # Proceed to gather_inputs in extend mode (EXTEND_MODE=true already set)
+ ;;
+esac
+```