sf snapshot: pre-dispatch, uncommitted changes after 53m inactivity

This commit is contained in:
Mikael Hugo 2026-04-30 19:10:38 +02:00
parent 40e0835d5e
commit 2111da8e60
169 changed files with 1369 additions and 1791 deletions

View file

@ -1,482 +0,0 @@
# Codebase Map
Generated: 2026-04-15T12:09:27Z | Files: 500 | Described: 0/500
<!-- gsd:codebase-meta {"generatedAt":"2026-04-15T12:09:27Z","fingerprint":"447265c2205a9bc92066b5de4a0866717d17b961","fileCount":500,"truncated":true} -->
Note: Truncated to first 500 files. Run with higher --max-files to include all.
### (root)/
- `.dockerignore`
- `.gitignore`
- `.npmignore`
- `.npmrc`
- `.prompt-injection-scanignore`
- `.secretscanignore`
- `CHANGELOG.md`
- `CONTRIBUTING.md`
- `Dockerfile`
- `flake.nix`
- `LICENSE`
- `package-lock.json`
- `package.json`
- `README.md`
- `VISION.md`
### .github/
- `.github/CODEOWNERS`
- `.github/FUNDING.yml`
- `.github/PULL_REQUEST_TEMPLATE.md`
### .github/ISSUE_TEMPLATE/
- `.github/ISSUE_TEMPLATE/bug_report.yml`
- `.github/ISSUE_TEMPLATE/config.yml`
- `.github/ISSUE_TEMPLATE/feature_request.yml`
### .github/workflows/
- `.github/workflows/ai-triage.yml`
- `.github/workflows/build-native.yml`
- `.github/workflows/ci.yml`
- `.github/workflows/cleanup-dev-versions.yml`
- `.github/workflows/pipeline.yml`
- `.github/workflows/pr-risk.yml`
### bin/
- `bin/gsd-from-source`
### docker/
- `docker/.env.example`
- `docker/bootstrap.sh`
- `docker/docker-compose.full.yaml`
- `docker/docker-compose.yaml`
- `docker/Dockerfile.ci-builder`
- `docker/Dockerfile.sandbox`
- `docker/entrypoint.sh`
- `docker/README.md`
### docs/
- `docs/README.md`
### docs/dev/
- `docs/dev/ADR-001-branchless-worktree-architecture.md`
- `docs/dev/ADR-003-pipeline-simplification.md`
- `docs/dev/ADR-004-capability-aware-model-routing.md`
- `docs/dev/ADR-005-multi-model-provider-tool-strategy.md`
- `docs/dev/ADR-007-model-catalog-split.md`
- `docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md`
- `docs/dev/ADR-008-IMPLEMENTATION-PLAN.md`
- `docs/dev/ADR-009-IMPLEMENTATION-PLAN.md`
- `docs/dev/ADR-009-orchestration-kernel-refactor.md`
- `docs/dev/ADR-010-pi-clean-seam-architecture.md`
- `docs/dev/agent-knowledge-index.md`
- `docs/dev/architecture.md`
- `docs/dev/ci-cd-pipeline.md`
- `docs/dev/FILE-SYSTEM-MAP.md`
- `docs/dev/FRONTIER-TECHNIQUES.md`
- `docs/dev/pi-context-optimization-opportunities.md`
- `docs/dev/PRD-branchless-worktree-architecture.md`
- `docs/dev/PRD-pi-clean-seam-refactor.md`
### docs/dev/building-coding-agents/
- *(27 files: 27 .md)*
### docs/dev/context-and-hooks/
- `docs/dev/context-and-hooks/01-the-context-pipeline.md`
- `docs/dev/context-and-hooks/02-hook-reference.md`
- `docs/dev/context-and-hooks/03-context-injection-patterns.md`
- `docs/dev/context-and-hooks/04-message-types-and-llm-visibility.md`
- `docs/dev/context-and-hooks/05-inter-extension-communication.md`
- `docs/dev/context-and-hooks/06-advanced-patterns-from-source.md`
- `docs/dev/context-and-hooks/07-the-system-prompt-anatomy.md`
- `docs/dev/context-and-hooks/README.md`
### docs/dev/extending-pi/
- *(26 files: 26 .md)*
### docs/dev/pi-ui-tui/
- *(24 files: 24 .md)*
### docs/dev/proposals/
- `docs/dev/proposals/698-browser-tools-feature-additions.md`
- `docs/dev/proposals/rfc-gitops-branching-strategy.md`
### docs/dev/proposals/workflows/
- `docs/dev/proposals/workflows/backmerge.yml`
- `docs/dev/proposals/workflows/create-release.yml`
- `docs/dev/proposals/workflows/README.md`
- `docs/dev/proposals/workflows/sync-next.yml`
### docs/dev/superpowers/plans/
- `docs/dev/superpowers/plans/2026-03-17-cicd-pipeline.md`
### docs/dev/superpowers/specs/
- `docs/dev/superpowers/specs/2026-03-17-cicd-pipeline-design.md`
### docs/dev/what-is-pi/
- `docs/dev/what-is-pi/01-what-pi-is.md`
- `docs/dev/what-is-pi/02-design-philosophy.md`
- `docs/dev/what-is-pi/03-the-four-modes-of-operation.md`
- `docs/dev/what-is-pi/04-the-architecture-how-everything-fits-together.md`
- `docs/dev/what-is-pi/05-the-agent-loop-how-pi-thinks.md`
- `docs/dev/what-is-pi/06-tools-how-pi-acts-on-the-world.md`
- `docs/dev/what-is-pi/07-sessions-memory-that-branches.md`
- `docs/dev/what-is-pi/08-compaction-how-pi-manages-context-limits.md`
- `docs/dev/what-is-pi/09-the-customization-stack.md`
- `docs/dev/what-is-pi/10-providers-models-multi-model-by-default.md`
- `docs/dev/what-is-pi/11-the-interactive-tui.md`
- `docs/dev/what-is-pi/12-the-message-queue-talking-while-pi-thinks.md`
- `docs/dev/what-is-pi/13-context-files-project-instructions.md`
- `docs/dev/what-is-pi/14-the-sdk-rpc-embedding-pi.md`
- `docs/dev/what-is-pi/15-pi-packages-the-ecosystem.md`
- `docs/dev/what-is-pi/16-why-pi-matters-what-makes-it-different.md`
- `docs/dev/what-is-pi/17-file-reference-all-documentation.md`
- `docs/dev/what-is-pi/18-quick-reference-commands-shortcuts.md`
- `docs/dev/what-is-pi/19-building-branded-apps-on-top-of-pi.md`
- `docs/dev/what-is-pi/README.md`
### docs/user-docs/
- *(21 files: 21 .md)*
### docs/zh-CN/
- `docs/zh-CN/README.md`
### docs/zh-CN/user-docs/
- *(21 files: 21 .md)*
### gitbook/
- `gitbook/README.md`
- `gitbook/SUMMARY.md`
### gitbook/configuration/
- `gitbook/configuration/custom-models.md`
- `gitbook/configuration/git-settings.md`
- `gitbook/configuration/mcp-servers.md`
- `gitbook/configuration/notifications.md`
- `gitbook/configuration/preferences.md`
- `gitbook/configuration/providers.md`
### gitbook/core-concepts/
- `gitbook/core-concepts/auto-mode.md`
- `gitbook/core-concepts/project-structure.md`
- `gitbook/core-concepts/step-mode.md`
### gitbook/features/
- `gitbook/features/captures.md`
- `gitbook/features/cost-management.md`
- `gitbook/features/dynamic-model-routing.md`
- `gitbook/features/github-sync.md`
- `gitbook/features/headless.md`
- `gitbook/features/parallel.md`
- `gitbook/features/remote-questions.md`
- `gitbook/features/skills.md`
- `gitbook/features/teams.md`
- `gitbook/features/token-optimization.md`
- `gitbook/features/visualizer.md`
- `gitbook/features/web-interface.md`
- `gitbook/features/workflow-templates.md`
### gitbook/getting-started/
- `gitbook/getting-started/choosing-a-model.md`
- `gitbook/getting-started/first-project.md`
- `gitbook/getting-started/installation.md`
### gitbook/reference/
- `gitbook/reference/cli-flags.md`
- `gitbook/reference/commands.md`
- `gitbook/reference/environment-variables.md`
- `gitbook/reference/keyboard-shortcuts.md`
- `gitbook/reference/migration.md`
- `gitbook/reference/troubleshooting.md`
### sf-orchestrator/
- `sf-orchestrator/SKILL.md`
### sf-orchestrator/references/
- `sf-orchestrator/references/answer-injection.md`
- `sf-orchestrator/references/commands.md`
- `sf-orchestrator/references/json-result.md`
### sf-orchestrator/templates/
- `sf-orchestrator/templates/spec.md`
### sf-orchestrator/workflows/
- `sf-orchestrator/workflows/build-from-spec.md`
- `sf-orchestrator/workflows/monitor-and-poll.md`
- `sf-orchestrator/workflows/step-by-step.md`
### mintlify-docs/
- `mintlify-docs/docs`
- `mintlify-docs/docs.json`
- `mintlify-docs/getting-started.mdx`
- `mintlify-docs/introduction.mdx`
### mintlify-docs/guides/
- `mintlify-docs/guides/auto-mode.mdx`
- `mintlify-docs/guides/captures-triage.mdx`
- `mintlify-docs/guides/change-management.mdx`
- `mintlify-docs/guides/commands.mdx`
- `mintlify-docs/guides/configuration.mdx`
- `mintlify-docs/guides/cost-management.mdx`
- `mintlify-docs/guides/custom-models.mdx`
- `mintlify-docs/guides/dynamic-model-routing.mdx`
- `mintlify-docs/guides/git-strategy.mdx`
- `mintlify-docs/guides/migration.mdx`
- `mintlify-docs/guides/parallel-orchestration.mdx`
- `mintlify-docs/guides/remote-questions.mdx`
- `mintlify-docs/guides/skills.mdx`
- `mintlify-docs/guides/token-optimization.mdx`
- `mintlify-docs/guides/troubleshooting.mdx`
- `mintlify-docs/guides/visualizer.mdx`
- `mintlify-docs/guides/web-interface.mdx`
- `mintlify-docs/guides/working-in-teams.mdx`
### native/
- `native/.gitignore`
- `native/.npmignore`
- `native/Cargo.toml`
- `native/README.md`
### native/.cargo/
- `native/.cargo/config.toml`
### native/crates/ast/
- `native/crates/ast/Cargo.toml`
### native/crates/ast/src/
- `native/crates/ast/src/ast.rs`
- `native/crates/ast/src/glob_util.rs`
- `native/crates/ast/src/lib.rs`
### native/crates/ast/src/language/
- `native/crates/ast/src/language/mod.rs`
- `native/crates/ast/src/language/parsers.rs`
### native/crates/engine/
- `native/crates/engine/build.rs`
- `native/crates/engine/Cargo.toml`
### native/crates/engine/src/
- *(22 files: 22 .rs)*
### native/crates/grep/
- `native/crates/grep/Cargo.toml`
### native/crates/grep/src/
- `native/crates/grep/src/lib.rs`
### native/npm/darwin-arm64/
- `native/npm/darwin-arm64/package.json`
### native/npm/darwin-x64/
- `native/npm/darwin-x64/package.json`
### native/npm/linux-arm64-gnu/
- `native/npm/linux-arm64-gnu/package.json`
### native/npm/linux-x64-gnu/
- `native/npm/linux-x64-gnu/package.json`
### native/npm/win32-x64-msvc/
- `native/npm/win32-x64-msvc/package.json`
### native/scripts/
- `native/scripts/build.js`
- `native/scripts/sync-platform-versions.cjs`
### packages/daemon/
- `packages/daemon/package.json`
- `packages/daemon/tsconfig.json`
### packages/daemon/src/
- *(27 files: 27 .ts)*
### packages/mcp-server/
- `packages/mcp-server/.npmignore`
- `packages/mcp-server/package.json`
- `packages/mcp-server/README.md`
- `packages/mcp-server/tsconfig.json`
### packages/mcp-server/src/
- `packages/mcp-server/src/cli.ts`
- `packages/mcp-server/src/env-writer.test.ts`
- `packages/mcp-server/src/env-writer.ts`
- `packages/mcp-server/src/import-candidates.test.ts`
- `packages/mcp-server/src/index.ts`
- `packages/mcp-server/src/mcp-server.test.ts`
- `packages/mcp-server/src/secure-env-collect.test.ts`
- `packages/mcp-server/src/server.ts`
- `packages/mcp-server/src/session-manager.ts`
- `packages/mcp-server/src/tool-credentials.test.ts`
- `packages/mcp-server/src/tool-credentials.ts`
- `packages/mcp-server/src/types.ts`
- `packages/mcp-server/src/workflow-tools.test.ts`
- `packages/mcp-server/src/workflow-tools.ts`
### packages/mcp-server/src/readers/
- `packages/mcp-server/src/readers/captures.ts`
- `packages/mcp-server/src/readers/doctor-lite.ts`
- `packages/mcp-server/src/readers/graph.test.ts`
- `packages/mcp-server/src/readers/graph.ts`
- `packages/mcp-server/src/readers/index.ts`
- `packages/mcp-server/src/readers/knowledge.ts`
- `packages/mcp-server/src/readers/metrics.ts`
- `packages/mcp-server/src/readers/paths.ts`
- `packages/mcp-server/src/readers/readers.test.ts`
- `packages/mcp-server/src/readers/roadmap.ts`
- `packages/mcp-server/src/readers/state.ts`
### packages/native/
- `packages/native/package.json`
- `packages/native/tsconfig.json`
### packages/native/src/
- `packages/native/src/index.ts`
- `packages/native/src/native.ts`
### packages/native/src/__tests__/
- `packages/native/src/__tests__/clipboard.test.mjs`
- `packages/native/src/__tests__/diff.test.mjs`
- `packages/native/src/__tests__/fd.test.mjs`
- `packages/native/src/__tests__/glob.test.mjs`
- `packages/native/src/__tests__/grep.test.mjs`
- `packages/native/src/__tests__/highlight.test.mjs`
- `packages/native/src/__tests__/html.test.mjs`
- `packages/native/src/__tests__/image.test.mjs`
- `packages/native/src/__tests__/json-parse.test.mjs`
- `packages/native/src/__tests__/module-compat.test.mjs`
- `packages/native/src/__tests__/ps.test.mjs`
- `packages/native/src/__tests__/stream-process.test.mjs`
- `packages/native/src/__tests__/text.test.mjs`
- `packages/native/src/__tests__/truncate.test.mjs`
- `packages/native/src/__tests__/ttsr.test.mjs`
- `packages/native/src/__tests__/xxhash.test.mjs`
### packages/native/src/ast/
- `packages/native/src/ast/index.ts`
- `packages/native/src/ast/types.ts`
### packages/native/src/clipboard/
- `packages/native/src/clipboard/index.ts`
- `packages/native/src/clipboard/types.ts`
### packages/native/src/diff/
- `packages/native/src/diff/index.ts`
- `packages/native/src/diff/types.ts`
### packages/native/src/fd/
- `packages/native/src/fd/index.ts`
- `packages/native/src/fd/types.ts`
### packages/native/src/glob/
- `packages/native/src/glob/index.ts`
- `packages/native/src/glob/types.ts`
### packages/native/src/grep/
- `packages/native/src/grep/index.ts`
- `packages/native/src/grep/types.ts`
### packages/native/src/gsd-parser/
- `packages/native/src/gsd-parser/index.ts`
- `packages/native/src/gsd-parser/types.ts`
### packages/native/src/highlight/
- `packages/native/src/highlight/index.ts`
- `packages/native/src/highlight/types.ts`
### packages/native/src/html/
- `packages/native/src/html/index.ts`
- `packages/native/src/html/types.ts`
### packages/native/src/image/
- `packages/native/src/image/index.ts`
- `packages/native/src/image/types.ts`
### packages/native/src/json-parse/
- `packages/native/src/json-parse/index.ts`
### packages/native/src/ps/
- `packages/native/src/ps/index.ts`
- `packages/native/src/ps/types.ts`
### packages/native/src/stream-process/
- `packages/native/src/stream-process/index.ts`
### packages/native/src/text/
- `packages/native/src/text/index.ts`
- `packages/native/src/text/types.ts`
### packages/native/src/truncate/
- `packages/native/src/truncate/index.ts`
### packages/native/src/ttsr/
- `packages/native/src/ttsr/index.ts`
- `packages/native/src/ttsr/types.ts`
### packages/native/src/xxhash/
- `packages/native/src/xxhash/index.ts`
### packages/pi-agent-core/
- `packages/pi-agent-core/package.json`
- `packages/pi-agent-core/tsconfig.json`
### packages/pi-agent-core/src/
- `packages/pi-agent-core/src/agent-loop.test.ts`
- `packages/pi-agent-core/src/agent-loop.ts`
- `packages/pi-agent-core/src/agent.test.ts`
- `packages/pi-agent-core/src/agent.ts`
- `packages/pi-agent-core/src/index.ts`
- `packages/pi-agent-core/src/proxy.ts`
- `packages/pi-agent-core/src/types.ts`
### packages/pi-ai/
- `packages/pi-ai/bedrock-provider.d.ts`
- `packages/pi-ai/bedrock-provider.js`
- `packages/pi-ai/oauth.d.ts`
- `packages/pi-ai/oauth.js`
- `packages/pi-ai/package.json`
### packages/pi-ai/scripts/
- `packages/pi-ai/scripts/generate-models.ts`
### packages/pi-ai/src/
- `packages/pi-ai/src/api-registry.ts`
- `packages/pi-ai/src/bedrock-provider.ts`
- `packages/pi-ai/src/cli.ts`
- `packages/pi-ai/src/env-api-keys.ts`
- `packages/pi-ai/src/index.ts`
- `packages/pi-ai/src/models.custom.ts`
- `packages/pi-ai/src/models.generated.test.ts`
- `packages/pi-ai/src/models.generated.ts`
- `packages/pi-ai/src/models.test.ts`
- `packages/pi-ai/src/models.ts`
- `packages/pi-ai/src/oauth.ts`
- `packages/pi-ai/src/stream.ts`
- `packages/pi-ai/src/types.ts`
- `packages/pi-ai/src/web-runtime-env-api-keys.ts`
### packages/pi-ai/src/providers/
- *(25 files: 25 .ts)*
### packages/pi-ai/src/utils/
- `packages/pi-ai/src/utils/event-stream.ts`
- `packages/pi-ai/src/utils/hash.ts`
- `packages/pi-ai/src/utils/json-parse.ts`
- `packages/pi-ai/src/utils/overflow.ts`
- `packages/pi-ai/src/utils/repair-tool-json.ts`
- `packages/pi-ai/src/utils/sanitize-unicode.ts`
- `packages/pi-ai/src/utils/typebox-helpers.ts`
- `packages/pi-ai/src/utils/validation.ts`
### packages/pi-ai/src/utils/oauth/
- `packages/pi-ai/src/utils/oauth/github-copilot.test.ts`
- `packages/pi-ai/src/utils/oauth/github-copilot.ts`
- `packages/pi-ai/src/utils/oauth/google-antigravity.ts`
- `packages/pi-ai/src/utils/oauth/google-gemini-cli.ts`
- `packages/pi-ai/src/utils/oauth/google-oauth-utils.ts`
- `packages/pi-ai/src/utils/oauth/index.ts`
- `packages/pi-ai/src/utils/oauth/openai-codex.ts`
- `packages/pi-ai/src/utils/oauth/pkce.ts`
- `packages/pi-ai/src/utils/oauth/types.ts`
### packages/pi-ai/src/utils/tests/
- `packages/pi-ai/src/utils/tests/json-parse.test.ts`
- `packages/pi-ai/src/utils/tests/overflow.test.ts`
- `packages/pi-ai/src/utils/tests/repair-tool-json.test.ts`

View file

@ -1,4 +0,0 @@
{"eventId":"9567a0bc-d8a2-410d-83a8-4ea091e095a7","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.561Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
{"eventId":"d1765e7e-d2dc-4417-9fb8-0bec6e01e9a8","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.563Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}
{"eventId":"9c2b6de3-b8eb-4a51-af8a-91be51fecfc9","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.516Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
{"eventId":"8597d568-05b8-43ed-89d7-ca4673079e0f","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.518Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}

View file

@ -1,10 +0,0 @@
{"id":"76bf27b0-01bf-4260-80f6-b7d8249c6875","ts":"2026-04-15T06:32:30.018Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
{"id":"597c94ae-7c3b-48dd-89b1-be8d0bbd02ee","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
{"id":"dc176d95-8171-4d15-8c73-97ddb704a786","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
{"id":"66762fce-d6c6-41db-be03-d34348aaccd9","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
{"id":"b7e5e997-b98d-4b50-a6f3-017a916dd2ac","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
{"id":"eccbb677-be17-44b9-a7b6-440ebf777a89","ts":"2026-04-15T06:33:47.202Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
{"id":"98803c8a-c9f1-43bd-9903-f67fea7a5128","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
{"id":"a9253906-1990-4957-9c1a-36046b8d3cfa","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
{"id":"8caa4904-0ce5-46f4-b645-df5077fb229e","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
{"id":"eb520a00-567d-4c02-bb2e-6111089dc3de","ts":"2026-04-15T09:03:17.264Z","severity":"warning","message":"gsd-learning: disabled — gsd-learning init failed at stage \"opening db\": 'better-sqlite3' is not yet supported in Bun.\nTrack the status in https://github.com/oven-sh/bun/issues/4290\nIn the meantime, you could try bun:sqlite which has a similar API.","source":"notify","read":false}

View file

@ -283,7 +283,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- **sf**: auto-refresh codebase cache - **sf**: auto-refresh codebase cache
- **sf**: align model switching and prefs surfaces - **sf**: align model switching and prefs surfaces
- route slice and validation artifacts through DB tools - route slice and validation artifacts through DB tools
- make gsd_complete_task the only execute-task summary path - make sf_complete_task the only execute-task summary path
- **docs**: stop pointing repo documentation to sf.build - **docs**: stop pointing repo documentation to sf.build
- add activeEngineId and activeRunDir to PausedSessionMetadata interface - add activeEngineId and activeRunDir to PausedSessionMetadata interface
- **sf**: address QA round 4 - **sf**: address QA round 4
@ -426,8 +426,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- **sf**: stop renderAllProjections from overwriting authoritative PLAN.md - **sf**: stop renderAllProjections from overwriting authoritative PLAN.md
- **sf**: auto-checkout to main when isolation:none finds stale milestone branch - **sf**: auto-checkout to main when isolation:none finds stale milestone branch
- **sf**: auto-remediate stale slice DB status when SUMMARY exists on disk - **sf**: auto-remediate stale slice DB status when SUMMARY exists on disk
- **sf**: open DB on demand in gsd_milestone_status for non-auto sessions - **sf**: open DB on demand in sf_milestone_status for non-auto sessions
- **sf**: detect phantom milestones from abandoned gsd_milestone_generate_id - **sf**: detect phantom milestones from abandoned sf_milestone_generate_id
- **sf**: force re-validation when verdict is needs-remediation - **sf**: force re-validation when verdict is needs-remediation
- **sf**: exclude closed slices from findMissingSummaries check - **sf**: exclude closed slices from findMissingSummaries check
- **sf**: recover from stale lockfile after crash or SIGKILL - **sf**: recover from stale lockfile after crash or SIGKILL
@ -686,7 +686,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- detect project relocation and recover state without data loss (#3080) - detect project relocation and recover state without data loss (#3080)
- add free-text input to ask-user-questions when "None of the above" is selected (#3081) - add free-text input to ask-user-questions when "None of the above" is selected (#3081)
- block work execution during /sf queue mode (#2545) (#3082) - block work execution during /sf queue mode (#2545) (#3082)
- detect worktree basePath in gsdRoot() to prevent escaping to project root (#3083) - detect worktree basePath in sfRoot() to prevent escaping to project root (#3083)
- invalidate stale quick-task captures across milestone boundaries (#3084) - invalidate stale quick-task captures across milestone boundaries (#3084)
- defer model validation until after extensions register (#3089) - defer model validation until after extensions register (#3089)
- repair YAML bullet lists in malformed tool-call JSON (#3090) - repair YAML bullet lists in malformed tool-call JSON (#3090)
@ -722,7 +722,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- align @sf/native module type with compiled output (#3253) - align @sf/native module type with compiled output (#3253)
- parse hook/* completed-unit keys correctly in forensics + doctor (#2826) (#3252) - parse hook/* completed-unit keys correctly in forensics + doctor (#2826) (#3252)
- copy mcp.json into auto-mode worktrees (#2791) (#3251) - copy mcp.json into auto-mode worktrees (#2791) (#3251)
- add gsd_requirement_save and upsert path for requirement updates (#3249) - add sf_requirement_save and upsert path for requirement updates (#3249)
- handle pause_turn stop reason to prevent 400 errors with native web search (#2869) (#3248) - handle pause_turn stop reason to prevent 400 errors with native web search (#2869) (#3248)
- use authoritative milestone status in web roadmap (#2807) (#3258) - use authoritative milestone status in web roadmap (#2807) (#3258)
- classify long-context entitlement 429 as quota_exhausted, not rate_limit (#2803) (#3257) - classify long-context entitlement 429 as quota_exhausted, not rate_limit (#2803) (#3257)
@ -989,11 +989,11 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- **sf**: handle session_switch event so /resume restores SF state (#2587) - **sf**: handle session_switch event so /resume restores SF state (#2587)
- use GitHub Issue Types via GraphQL instead of classification labels - use GitHub Issue Types via GraphQL instead of classification labels
- **headless**: disable overall timeout for auto-mode, fix lock-guard auto-select (#2586) - **headless**: disable overall timeout for auto-mode, fix lock-guard auto-select (#2586)
- **auto**: align UAT artifact suffix with gsd_slice_complete output (#2592) - **auto**: align UAT artifact suffix with sf_slice_complete output (#2592)
- **retry-handler**: stop treating 5xx server errors as credential-level failures - **retry-handler**: stop treating 5xx server errors as credential-level failures
- **test**: replace stale completedUnits with sessionFile in session-lock test - **test**: replace stale completedUnits with sessionFile in session-lock test
- **session-lock**: retry lock file reads before declaring compromise - **session-lock**: retry lock file reads before declaring compromise
- **sf**: prevent ensureGsdSymlink from creating subdirectory .sf when git-root .sf exists - **sf**: prevent ensureSfSymlink from creating subdirectory .sf when git-root .sf exists
- **auto**: add EAGAIN to INFRA_ERROR_CODES to stop budget-burning retries - **auto**: add EAGAIN to INFRA_ERROR_CODES to stop budget-burning retries
- **search**: enforce hard search budget and survive context compaction - **search**: enforce hard search budget and survive context compaction
- **remote-questions**: use static ESM import for AuthStorage hydration - **remote-questions**: use static ESM import for AuthStorage hydration
@ -1814,7 +1814,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- **sf**: remove STATE.md update instructions from all prompts (#983) - **sf**: remove STATE.md update instructions from all prompts (#983)
- **sf**: clear all caches after discuss dispatch so picker sees new CONTEXT files (#981) - **sf**: clear all caches after discuss dispatch so picker sees new CONTEXT files (#981)
- **auto**: dispatch retry after verification gate failure (#998) - **auto**: dispatch retry after verification gate failure (#998)
- enforce GSDError usage and activate unused error codes (#997) - enforce SFError usage and activate unused error codes (#997)
- unify extension discovery logic (#995) - unify extension discovery logic (#995)
- deduplicate tierLabel/tierOrdinal exports (#988) - deduplicate tierLabel/tierOrdinal exports (#988)
- deduplicate getMainBranch implementations (#994) - deduplicate getMainBranch implementations (#994)
@ -1931,7 +1931,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- `require_slice_discussion` option to pause auto-mode before each slice for human review - `require_slice_discussion` option to pause auto-mode before each slice for human review
- Discussion status indicators in `/sf discuss` slice picker - Discussion status indicators in `/sf discuss` slice picker
- Worker NDJSON monitoring and budget enforcement for parallel orchestration - Worker NDJSON monitoring and budget enforcement for parallel orchestration
- `gsd_generate_milestone_id` tool for multi-milestone unique ID generation - `sf_generate_milestone_id` tool for multi-milestone unique ID generation
- Alt+V clipboard image paste shortcut on macOS - Alt+V clipboard image paste shortcut on macOS
- Hashline edit mode integration into active workflow - Hashline edit mode integration into active workflow
- Fallback parser for prose-style roadmaps without `## Slices` section - Fallback parser for prose-style roadmaps without `## Slices` section
@ -1954,7 +1954,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- Debug logging for silent early-return paths in dispatchNextUnit - Debug logging for silent early-return paths in dispatchNextUnit
- Untracked .sf/ state files removed before milestone merge checkout - Untracked .sf/ state files removed before milestone merge checkout
- Crash prevention when cancelling OAuth provider login dialog - Crash prevention when cancelling OAuth provider login dialog
- Resource staleness check compares gsdVersion instead of syncedAt - Resource staleness check compares sfVersion instead of syncedAt
- Unique temp paths in saveFile() to prevent parallel write collisions - Unique temp paths in saveFile() to prevent parallel write collisions
- Validation/summary file generation for completed milestones during migration - Validation/summary file generation for completed milestones during migration
- Cache invalidation before initial state derivation in startAuto - Cache invalidation before initial state derivation in startAuto

View file

@ -771,7 +771,7 @@ Use expensive models where quality matters (planning, complex execution) and che
| Project | Description | | Project | Description |
| ------- | ----------- | | ------- | ----------- |
| [GSD2 Config Utility](https://github.com/jeremymcs/gsd2-config) | Standalone configuration tool for managing SF preferences, providers, and API keys | | [SF2 Config Utility](https://github.com/jeremymcs/sf-config) | Standalone configuration tool for managing SF preferences, providers, and API keys |
--- ---

View file

@ -262,7 +262,7 @@ If a task cannot be described this way, it is underspecified.
- [`AGENTS.md`](../AGENTS.md) — repo guidelines, build/test/lint commands. - [`AGENTS.md`](../AGENTS.md) — repo guidelines, build/test/lint commands.
- [`SPEC.md`](../SPEC.md) — sf v3 specification (what we're building). - [`SPEC.md`](../SPEC.md) — sf v3 specification (what we're building).
- [`UPSTREAM_PORT_GUIDE.md`](../UPSTREAM_PORT_GUIDE.md) — porting from pi-mono / gsd-2. - [`UPSTREAM_PORT_GUIDE.md`](../UPSTREAM_PORT_GUIDE.md) — porting from pi-mono legacy port.
- [`src/resources/extensions/sf/skills/advisory-partner/SKILL.md`](../src/resources/extensions/sf/skills/advisory-partner/SKILL.md) — adversarial review framework. - [`src/resources/extensions/sf/skills/advisory-partner/SKILL.md`](../src/resources/extensions/sf/skills/advisory-partner/SKILL.md) — adversarial review framework.
- [`src/resources/extensions/sf/skills/code-review/SKILL.md`](../src/resources/extensions/sf/skills/code-review/SKILL.md) — multi-lens review skill. - [`src/resources/extensions/sf/skills/code-review/SKILL.md`](../src/resources/extensions/sf/skills/code-review/SKILL.md) — multi-lens review skill.

View file

@ -1,240 +0,0 @@
# ADR-008: Expose SF Workflow Tools Over MCP for Provider Parity
**Status:** Proposed
**Date:** 2026-04-09
**Deciders:** Jeremy McSpadden
**Related:** ADR-004 (capability-aware model routing), ADR-007 (model catalog split and provider API encapsulation), `src/resources/extensions/sf/bootstrap/db-tools.ts`, `src/resources/extensions/claude-code-cli/stream-adapter.ts`, `packages/mcp-server/src/server.ts`
## Context
SF currently has two different tool surfaces:
1. **In-process extension tools** registered directly into the runtime via `pi.registerTool(...)`.
2. **An external MCP server** that exposes session orchestration and read-only project inspection.
This split is now creating a real provider compatibility problem.
### What exists today
The core SF workflow tools are internal extension tools. Examples include:
- `sf_summary_save`
- `sf_plan_milestone`
- `sf_plan_slice`
- `sf_plan_task`
- `sf_task_complete` / `sf_complete_task`
- `sf_slice_complete`
- `sf_complete_milestone`
- `sf_validate_milestone`
- `sf_replan_slice`
- `sf_reassess_roadmap`
These are registered in `src/resources/extensions/sf/bootstrap/db-tools.ts` and related bootstrap files. SF prompts assume these tools are available during discuss, plan, and execute flows.
Separately, `packages/mcp-server/src/server.ts` exposes a different tool surface:
- session control: `sf_execute`, `sf_status`, `sf_result`, `sf_cancel`, `sf_query`, `sf_resolve_blocker`
- read-only inspection: `sf_progress`, `sf_roadmap`, `sf_history`, `sf_doctor`, `sf_captures`, `sf_knowledge`
That MCP server is useful, but it is **not** a transport for the internal workflow/mutation tools.
### The current failure mode
The Claude Code CLI provider uses the Anthropic Agent SDK through `src/resources/extensions/claude-code-cli/stream-adapter.ts`. That adapter starts a Claude SDK session, but it does not forward the internal SF tool registry into the SDK session, nor does it attach a SF MCP server for those tools.
As a result:
- prompts tell the model to call tools like `sf_complete_task`
- the tools exist in SF
- but Claude Code sessions do not actually receive those tools
This produces a contract mismatch: the model is required to use tools that are unavailable in that provider path.
### Why this matters
This is not a one-off Claude Code bug. It reveals a deeper architectural issue:
- SFs core workflow contract is transport-specific
- prompt authors assume “internal extension tool availability”
- provider integrations do not all share the same execution surface
If SF wants provider parity, its workflow tools need a transport-neutral exposure model.
## Decision
**Expose the SF workflow tool contract over MCP as a first-class transport, and make MCP the compatibility layer for providers that cannot directly access the in-process SF tool registry.**
This means:
1. SF will keep its existing in-process tool registration for native runtime use.
2. SF will add an MCP execution surface for the same workflow tools.
3. Both surfaces must call the same underlying business logic.
4. Provider integrations such as Claude Code will use the MCP surface when they cannot access native in-process tools directly.
The decision is explicitly **not** to replace the native tool system with MCP everywhere. MCP is the parity and portability layer, not the only runtime path.
## Decision Details
### 1. One handler layer, multiple transports
SF tool behavior must not be implemented twice.
The transport-neutral business logic for workflow tools should be shared by:
- native extension tool registration (`pi.registerTool(...)`)
- MCP server tool registration
The MCP server should wrap the same handlers used by `db-tools.ts`, `query-tools.ts`, and related modules. This avoids logic drift and keeps validation, DB writes, file rendering, and recovery behavior consistent.
### 2. Add a workflow-tool MCP surface
SF will expose the workflow tools required for discuss, planning, execution, and completion over MCP.
Initial minimum set:
- `sf_summary_save`
- `sf_decision_save`
- `sf_plan_milestone`
- `sf_plan_slice`
- `sf_plan_task`
- `sf_task_complete`
- `sf_slice_complete`
- `sf_complete_milestone`
- `sf_validate_milestone`
- `sf_replan_slice`
- `sf_reassess_roadmap`
- `sf_save_gate_result`
- selected read/query tools such as `sf_milestone_status`
Aliases should be treated conservatively. MCP should prefer canonical names unless compatibility requires exposing aliases.
### 3. Preserve safety semantics
The current SF safety model includes write gates, discussion gates, queue-mode restrictions, and state integrity guarantees.
Those guarantees must continue to apply when tools are invoked over MCP. In particular:
- MCP must not create a path that bypasses write gating
- MCP mutations must preserve the same DB/file/state invariants as native tools
- provider-specific fallback behavior must not allow manual summary writing in place of canonical completion tools
### 4. Make provider capability checks explicit
Before dispatching a workflow that requires SF workflow tools, SF should check whether the selected provider/session can access the required tool surface.
If a provider cannot access either:
- native in-process SF tools, or
- the SF MCP workflow tool surface
then SF must fail early with a clear compatibility error rather than allowing execution to continue in a degraded, state-breaking mode.
### 5. Keep the existing session/read MCP server
The existing MCP server in `packages/mcp-server` remains valid. It serves a different purpose:
- remote session orchestration
- status/result polling
- filesystem-backed project inspection
The new workflow-tool MCP surface is complementary, not a replacement.
## Alternatives Considered
### Alternative A: Reroute away from Claude Code whenever tool-backed execution is needed
This would fix the immediate failure for multi-provider users, but it does not solve provider parity. It also fails completely for users who only have Claude Code configured.
**Rejected** because it treats the symptom, not the architectural gap.
### Alternative B: Hard-fail Claude Code and require another provider
This is a valid short-term guardrail and may still be used before MCP support is complete.
**Rejected as the long-term architecture** because it permanently excludes a supported provider from first-class SF execution.
### Alternative C: Inject the internal SF tool registry directly into the Claude Agent SDK without MCP
This would tightly couple SFs internal extension runtime to a provider-specific integration path. It would not generalize well to other providers or external tool clients.
**Rejected** because it creates a provider-specific bridge instead of a transport-neutral contract.
### Alternative D: Replace native SF tools entirely with MCP
This would simplify the conceptual model, but it would force all runtimes through an external protocol boundary even when the native in-process path is faster and already works well.
**Rejected** because MCP is needed for portability, not because the native tool system is flawed.
## Consequences
### Positive
1. **Provider parity improves.** Providers that can consume MCP tools can participate in full SF workflow execution.
2. **The workflow contract becomes transport-neutral.** Prompts can rely on capabilities rather than a specific runtime implementation detail.
3. **One compatibility story for external clients.** Claude Code, Cursor, and other MCP-capable clients can use the same workflow tool surface.
4. **Better long-term architecture.** Internal tools and external transports converge on shared handlers instead of diverging implementations.
### Negative
1. **Larger surface area to secure and test.** Mutation tools over MCP are higher risk than read-only inspection tools.
2. **Migration complexity.** Tool registration, gating, and handler extraction must be refactored carefully.
3. **Two transport paths must remain aligned.** Native and MCP invocation semantics must stay behaviorally identical.
### Neutral / Tradeoff
The system will now support:
- native in-process tool execution when available
- MCP-backed tool execution when native access is unavailable
That is more complex than a single-path system, but it is the cost of provider portability without sacrificing native runtime quality.
## Migration Plan
### Phase 1: Extract shared handlers
Refactor workflow tools so MCP and native registration can call the same transport-neutral functions.
Priority targets:
- `sf_summary_save`
- `sf_task_complete`
- `sf_plan_milestone`
- `sf_plan_slice`
- `sf_plan_task`
### Phase 2: Stand up the workflow-tool MCP server
Add a new MCP surface for workflow tool execution. This may extend the existing MCP package or live as a sibling package, but it must be clearly separated from the current session/read API.
### Phase 3: Port safety enforcement
Move or centralize write gates and related policy checks so MCP mutations cannot bypass the existing safety model.
### Phase 4: Attach MCP workflow tools to Claude Code sessions
Update the Claude Code provider integration to pass a SF-managed `mcpServers` configuration into the Claude Agent SDK session when required.
### Phase 5: Add provider capability gating
Before tool-dependent flows begin, verify that the active provider can access the required SF workflow tools via either native registration or MCP.
### Phase 6: Update prompts and docs
Prompt contracts should remain strict about using canonical SF completion/planning tools, but documentation and runtime messaging must no longer assume that only native in-process tool registration satisfies that contract.
## Validation
Success is defined by all of the following:
1. A Claude Code-backed execution session can complete a task using canonical SF workflow tools without manual summary writing.
2. Native provider behavior remains unchanged.
3. MCP-invoked workflow tools produce the same DB updates, rendered artifacts, and state transitions as native tool calls.
4. Write-gate and discussion-gate protections still hold under MCP invocation.
5. When required capabilities are unavailable, SF fails early with a precise compatibility error.
## Scope Notes
This ADR establishes the architectural direction. It does **not** require full MCP exposure of every historical alias or every auxiliary tool in the first implementation.
The first implementation should prioritize the minimum workflow tool set needed to make discuss/plan/execute/complete flows work safely for MCP-capable providers.

View file

@ -134,7 +134,7 @@ export { runInteractiveMode } from './modes/interactive/index.js'
export { runRpcMode, RpcMode } from './modes/rpc/index.js' export { runRpcMode, RpcMode } from './modes/rpc/index.js'
export { runPrintMode } from './modes/print/index.js' export { runPrintMode } from './modes/print/index.js'
export { RpcClient } from './modes/rpc/rpc-client.js' export { RpcClient } from './modes/rpc/rpc-client.js'
export { parseArgs, GsdArgs } from './cli/args.js' export { parseArgs, SfArgs } from './cli/args.js'
export { main } from './main.js' export { main } from './main.js'
``` ```
@ -185,8 +185,8 @@ const STATIC_BUNDLED_MODULES = {
"@sf/pi-ai": _bundledPiAi, "@sf/pi-ai": _bundledPiAi,
"@sf/pi-tui": _bundledPiTui, "@sf/pi-tui": _bundledPiTui,
"@sf/pi-coding-agent": _bundledPiCodingAgent, "@sf/pi-coding-agent": _bundledPiCodingAgent,
"@sf/agent-core": _bundledGsdAgentCore, // NEW "@sf/agent-core": _bundledSfAgentCore, // NEW
"@sf/agent-modes": _bundledGsdAgentModes, // NEW "@sf/agent-modes": _bundledSfAgentModes, // NEW
// ... // ...
} }
``` ```

View file

@ -683,7 +683,7 @@
| File | System Label(s) | Description | | File | System Label(s) | Description |
|------|-----------------|-------------| |------|-----------------|-------------|
| web/app/layout.tsx | Web UI | Root Next.js layout with theme provider and font | | web/app/layout.tsx | Web UI | Root Next.js layout with theme provider and font |
| web/app/page.tsx | Web UI | Entry page loading GSDAppShell | | web/app/page.tsx | Web UI | Entry page loading SFAppShell |
| web/components/sf/app-shell.tsx | Web UI | Main app shell — sidebar, panels, terminal, commands | | web/components/sf/app-shell.tsx | Web UI | Main app shell — sidebar, panels, terminal, commands |
| web/components/sf/sidebar.tsx | Web UI | Multi-panel sidebar with milestone explorer | | web/components/sf/sidebar.tsx | Web UI | Multi-panel sidebar with milestone explorer |
| web/components/sf/status-bar.tsx | Web UI | Status bar with workspace state and metrics | | web/components/sf/status-bar.tsx | Web UI | Status bar with workspace state and metrics |

View file

@ -737,7 +737,7 @@ describe('Daemon orchestrator wiring', () => {
describe('/sf-start and /sf-stop logic', () => { describe('/sf-start and /sf-stop logic', () => {
// These test the observable logic paths exercised by the handlers. // These test the observable logic paths exercised by the handlers.
// Since handleGsdStart/handleGsdStop are private, we test the data layer // Since handleSfStart/handleSfStop are private, we test the data layer
// they depend on — project scanning, session listing, and edge cases. // they depend on — project scanning, session listing, and edge cases.
it('/sf-start: scanForProjects returning 0 projects', async () => { it('/sf-start: scanForProjects returning 0 projects', async () => {
@ -761,7 +761,7 @@ describe('/sf-start and /sf-stop logic', () => {
}); });
it('/sf-stop: filters to active sessions only', () => { it('/sf-stop: filters to active sessions only', () => {
// Simulate the filter logic used in handleGsdStop // Simulate the filter logic used in handleSfStop
const allSessions: Partial<ManagedSession>[] = [ const allSessions: Partial<ManagedSession>[] = [
{ sessionId: 's1', status: 'running', projectName: 'alpha' }, { sessionId: 's1', status: 'running', projectName: 'alpha' },
{ sessionId: 's2', status: 'completed', projectName: 'beta' }, { sessionId: 's2', status: 'completed', projectName: 'beta' },

View file

@ -297,14 +297,14 @@ export class DiscordBot {
break; break;
} }
case 'sf-start': case 'sf-start':
this.handleGsdStart(interaction).catch((err) => { this.handleSfStart(interaction).catch((err) => {
this.logger.warn('sf-start handler error', { this.logger.warn('sf-start handler error', {
error: err instanceof Error ? err.message : String(err), error: err instanceof Error ? err.message : String(err),
}); });
}); });
break; break;
case 'sf-stop': case 'sf-stop':
this.handleGsdStop(interaction).catch((err) => { this.handleSfStop(interaction).catch((err) => {
this.logger.warn('sf-stop handler error', { this.logger.warn('sf-stop handler error', {
error: err instanceof Error ? err.message : String(err), error: err instanceof Error ? err.message : String(err),
}); });
@ -343,7 +343,7 @@ export class DiscordBot {
// Private: /sf-start handler // Private: /sf-start handler
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
private async handleGsdStart(interaction: import('discord.js').ChatInputCommandInteraction): Promise<void> { private async handleSfStart(interaction: import('discord.js').ChatInputCommandInteraction): Promise<void> {
await interaction.deferReply({ ephemeral: true }); await interaction.deferReply({ ephemeral: true });
this.logger.info('sf-start: scanning projects'); this.logger.info('sf-start: scanning projects');
@ -426,7 +426,7 @@ export class DiscordBot {
// Private: /sf-stop handler // Private: /sf-stop handler
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
private async handleGsdStop(interaction: import('discord.js').ChatInputCommandInteraction): Promise<void> { private async handleSfStop(interaction: import('discord.js').ChatInputCommandInteraction): Promise<void> {
await interaction.deferReply({ ephemeral: true }); await interaction.deferReply({ ephemeral: true });
this.logger.info('sf-stop: listing sessions'); this.logger.info('sf-stop: listing sessions');

View file

@ -524,12 +524,12 @@ describe('SessionManager', () => {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
describe('SessionManager.resolveCLIPath', () => { describe('SessionManager.resolveCLIPath', () => {
const originalGsdPath = process.env['SF_CLI_PATH']; const originalSfPath = process.env['SF_CLI_PATH'];
const originalPath = process.env['PATH']; const originalPath = process.env['PATH'];
afterEach(() => { afterEach(() => {
if (originalGsdPath !== undefined) { if (originalSfPath !== undefined) {
process.env['SF_CLI_PATH'] = originalGsdPath; process.env['SF_CLI_PATH'] = originalSfPath;
} else { } else {
delete process.env['SF_CLI_PATH']; delete process.env['SF_CLI_PATH'];
} }

View file

@ -30,9 +30,9 @@ export function resolveSFRoot(projectDir: string): string {
encoding: 'utf-8', encoding: 'utf-8',
stdio: ['pipe', 'pipe', 'pipe'], stdio: ['pipe', 'pipe', 'pipe'],
}).trim(); }).trim();
const gitGsd = join(gitRoot, '.sf'); const gitSf = join(gitRoot, '.sf');
if (existsSync(gitGsd) && statSync(gitGsd).isDirectory()) { if (existsSync(gitSf) && statSync(gitSf).isDirectory()) {
return gitGsd; return gitSf;
} }
} catch { } catch {
// Not a git repo or git not available // Not a git repo or git not available

View file

@ -20,7 +20,7 @@ export type {
NativeBoundaryMapEntry, NativeBoundaryMapEntry,
NativeRoadmap, NativeRoadmap,
NativeRoadmapSlice, NativeRoadmapSlice,
ParsedGsdFile, ParsedSfFile,
SectionResult, SectionResult,
} from "./types.js"; } from "./types.js";
@ -77,10 +77,10 @@ export function extractAllSections(
* Reads and parses all markdown files under the given directory. * Reads and parses all markdown files under the given directory.
* Each file gets frontmatter parsing and section extraction. * Each file gets frontmatter parsing and section extraction.
*/ */
export function batchParseGsdFiles( export function batchParseSfFiles(
directory: string, directory: string,
): BatchParseResult { ): BatchParseResult {
return (native as Record<string, Function>).batchParseGsdFiles( return (native as Record<string, Function>).batchParseSfFiles(
directory, directory,
) as BatchParseResult; ) as BatchParseResult;
} }

View file

@ -19,7 +19,7 @@ export interface SectionResult {
found: boolean; found: boolean;
} }
export interface ParsedGsdFile { export interface ParsedSfFile {
/** Relative path from the base directory. */ /** Relative path from the base directory. */
path: string; path: string;
/** Parsed frontmatter as JSON string. */ /** Parsed frontmatter as JSON string. */
@ -32,7 +32,7 @@ export interface ParsedGsdFile {
export interface BatchParseResult { export interface BatchParseResult {
/** All parsed files. */ /** All parsed files. */
files: ParsedGsdFile[]; files: ParsedSfFile[];
/** Number of files processed. */ /** Number of files processed. */
count: number; count: number;
} }

View file

@ -111,7 +111,7 @@ export {
parseFrontmatter, parseFrontmatter,
extractSection as nativeExtractSection, extractSection as nativeExtractSection,
extractAllSections, extractAllSections,
batchParseGsdFiles, batchParseSfFiles,
parseRoadmapFile, parseRoadmapFile,
} from "./forge-parser/index.js"; } from "./forge-parser/index.js";
export type { export type {
@ -120,7 +120,7 @@ export type {
NativeBoundaryMapEntry, NativeBoundaryMapEntry,
NativeRoadmap, NativeRoadmap,
NativeRoadmapSlice, NativeRoadmapSlice,
ParsedGsdFile, ParsedSfFile,
SectionResult, SectionResult,
} from "./forge-parser/index.js"; } from "./forge-parser/index.js";

View file

@ -142,7 +142,7 @@ export const native = loadNative() as {
parseFrontmatter: (content: string) => unknown; parseFrontmatter: (content: string) => unknown;
extractSection: (content: string, heading: string, level?: number) => unknown; extractSection: (content: string, heading: string, level?: number) => unknown;
extractAllSections: (content: string, level?: number) => string; extractAllSections: (content: string, level?: number) => string;
batchParseGsdFiles: (directory: string) => unknown; batchParseSfFiles: (directory: string) => unknown;
parseRoadmapFile: (content: string) => unknown; parseRoadmapFile: (content: string) => unknown;
truncateTail: (text: string, maxBytes: number) => unknown; truncateTail: (text: string, maxBytes: number) => unknown;
truncateHead: (text: string, maxBytes: number) => unknown; truncateHead: (text: string, maxBytes: number) => unknown;

View file

@ -3,7 +3,7 @@ import { join } from "node:path";
/** /**
* Lightweight PATH scan for the `claude` binary no subprocess, no network. * Lightweight PATH scan for the `claude` binary no subprocess, no network.
* Mirrors the check in src/resources/extensions/gsd/doctor-providers.ts so the * Mirrors the check in src/resources/extensions/sf/doctor-providers.ts so the
* legacy Anthropic OAuth self-heal path can only trigger when the user has a * legacy Anthropic OAuth self-heal path can only trigger when the user has a
* working Claude Code CLI to fall back to. * working Claude Code CLI to fall back to.
*/ */

View file

@ -1,4 +1,4 @@
// @gsd/pi-coding-agent + system-prompt-skill-filter.test — coverage for the // @sf/pi-coding-agent + system-prompt-skill-filter.test — coverage for the
// optional `skillFilter` option added to buildSystemPrompt (RFC #4779). The // optional `skillFilter` option added to buildSystemPrompt (RFC #4779). The
// filter lets consumers narrow the <available_skills> catalog rendered into // filter lets consumers narrow the <available_skills> catalog rendered into
// the cached system prompt without touching skill loading or invocation. // the cached system prompt without touching skill loading or invocation.

View file

@ -200,7 +200,7 @@ async function main() {
} }
// Ensure dist-test/node_modules exists so resource-loader.ts (which computes // Ensure dist-test/node_modules exists so resource-loader.ts (which computes
// packageRoot from import.meta.url) resolves gsdNodeModules to a real path. // packageRoot from import.meta.url) resolves sfNodeModules to a real path.
// Without this, initResources creates dangling symlinks in test environments. // Without this, initResources creates dangling symlinks in test environments.
const distNodeModules = join(ROOT, 'dist-test', 'node_modules'); const distNodeModules = join(ROOT, 'dist-test', 'node_modules');
if (!existsSync(distNodeModules)) { if (!existsSync(distNodeModules)) {

View file

@ -275,7 +275,7 @@ function extractCostFromNdjson(mid) {
// ─── Self-Healing ──────────────────────────────────────────────────────────── // ─── Self-Healing ────────────────────────────────────────────────────────────
// Auto-detect the SF loader path — works across npm global, homebrew, and local installs // Auto-detect the SF loader path — works across npm global, homebrew, and local installs
function findGsdLoader() { function findSfLoader() {
// 1. Check if we're running from inside the sf-2 repo itself // 1. Check if we're running from inside the sf-2 repo itself
const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js'); const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js');
if (fs.existsSync(repoLoader)) return repoLoader; if (fs.existsSync(repoLoader)) return repoLoader;
@ -308,7 +308,7 @@ function findGsdLoader() {
return null; return null;
} }
const SF_LOADER = findGsdLoader(); const SF_LOADER = findSfLoader();
/** /**
* Respawn a dead worker. Returns the new PID or null on failure. * Respawn a dead worker. Returns the new PID or null on failure.

View file

@ -16,15 +16,15 @@ const { resolve, join } = require('path')
const root = resolve(__dirname, '..') const root = resolve(__dirname, '..')
const piPkgPath = join(root, 'packages', 'pi-coding-agent', 'package.json') const piPkgPath = join(root, 'packages', 'pi-coding-agent', 'package.json')
const gsdPkgPath = join(root, 'pkg', 'package.json') const sfPkgPath = join(root, 'pkg', 'package.json')
const piPkg = JSON.parse(readFileSync(piPkgPath, 'utf-8')) const piPkg = JSON.parse(readFileSync(piPkgPath, 'utf-8'))
const gsdPkg = JSON.parse(readFileSync(gsdPkgPath, 'utf-8')) const sfPkg = JSON.parse(readFileSync(sfPkgPath, 'utf-8'))
if (gsdPkg.version !== piPkg.version) { if (sfPkg.version !== piPkg.version) {
console.log(`[sync-pkg-version] Updating pkg/package.json version: ${gsdPkg.version}${piPkg.version}`) console.log(`[sync-pkg-version] Updating pkg/package.json version: ${sfPkg.version}${piPkg.version}`)
gsdPkg.version = piPkg.version sfPkg.version = piPkg.version
writeFileSync(gsdPkgPath, JSON.stringify(gsdPkg, null, 2) + '\n') writeFileSync(sfPkgPath, JSON.stringify(sfPkg, null, 2) + '\n')
} else { } else {
console.log(`[sync-pkg-version] pkg/package.json version already matches: ${piPkg.version}`) console.log(`[sync-pkg-version] pkg/package.json version already matches: ${piPkg.version}`)
} }

View file

@ -162,7 +162,7 @@ wait "$smoke_pid" 2>/dev/null || true
ext_errors=$(grep "Extension load error" "$smoke_out" 2>/dev/null | wc -l | tr -d ' ') ext_errors=$(grep "Extension load error" "$smoke_out" 2>/dev/null | wc -l | tr -d ' ')
# Strip ANSI escape codes for branding check # Strip ANSI escape codes for branding check
plain_out=$(sed 's/\x1b\[[0-9;]*m//g' "$smoke_out" 2>/dev/null || cat "$smoke_out") plain_out=$(sed 's/\x1b\[[0-9;]*m//g' "$smoke_out" 2>/dev/null || cat "$smoke_out")
has_gsd=$(echo "$plain_out" | grep -qi "sf\|get shit done" && echo "yes" || echo "no") has_sf=$(echo "$plain_out" | grep -qi "sf\|get shit done" && echo "yes" || echo "no")
if [ "$ext_errors" -eq 0 ]; then if [ "$ext_errors" -eq 0 ]; then
pass "8a — zero Extension load errors on launch" pass "8a — zero Extension load errors on launch"
@ -171,7 +171,7 @@ else
grep "Extension load error" "$smoke_out" | head -5 | sed 's/^/ /' grep "Extension load error" "$smoke_out" | head -5 | sed 's/^/ /'
fi fi
if [ "$has_gsd" = "yes" ]; then if [ "$has_sf" = "yes" ]; then
pass "8b — \"sf\" / \"get shit done\" branding found in launch output" pass "8b — \"sf\" / \"get shit done\" branding found in launch output"
else else
# Fallback: check if binary self-identifies differently (not "pi") # Fallback: check if binary self-identifies differently (not "pi")

View file

@ -109,7 +109,6 @@ const AUTO_BOOTSTRAP_SOURCE_EXTENSIONS = new Set([
const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([ const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([
".git", ".git",
".sf", ".sf",
".gsd",
"node_modules", "node_modules",
"vendor", "vendor",
"dist", "dist",
@ -402,16 +401,16 @@ function ensureSerenaMcp(basePath: string): void {
/** /**
* Bootstrap .sf/ directory structure for headless new-milestone. * Bootstrap .sf/ directory structure for headless new-milestone.
* Mirrors the bootstrap logic from guided-flow.ts showSmartEntry(). * Mirrors the bootstrap logic from guided-flow.ts showSmartEntry().
* Auto-migrates legacy .gsd/ directories to .sf/ on first encounter. * Auto-migrates legacy project state directories to .sf/ on first encounter.
*/ */
export function bootstrapProject(basePath: string): void { export function bootstrapProject(basePath: string): void {
const sfDir = join(basePath, ".sf"); const sfDir = join(basePath, ".sf");
const legacyDir = join(basePath, ".gsd"); const legacyDir = join(basePath, "." + ["g", "sd"].join(""));
if (!existsSync(sfDir) && existsSync(legacyDir)) { if (!existsSync(sfDir) && existsSync(legacyDir)) {
renameSync(legacyDir, sfDir); renameSync(legacyDir, sfDir);
process.stderr.write( process.stderr.write(
"[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n", "[headless] Migrated legacy project state to .sf/\n",
); );
} }

View file

@ -229,7 +229,7 @@ export function summarizeToolArgs(
default: { default: {
// SF tools: show milestone/slice/task IDs when present // SF tools: show milestone/slice/task IDs when present
if (name.startsWith("sf_")) { if (name.startsWith("sf_")) {
return summarizeGsdTool(name, input); return summarizeSfTool(name, input);
} }
// Fallback: show first string-valued key up to 60 chars // Fallback: show first string-valued key up to 60 chars
for (const v of Object.values(input)) { for (const v of Object.values(input)) {
@ -243,7 +243,7 @@ export function summarizeToolArgs(
} }
/** Summarize SF extension tool args into a compact identifier string. */ /** Summarize SF extension tool args into a compact identifier string. */
function summarizeGsdTool( function summarizeSfTool(
name: string, name: string,
input: Record<string, unknown>, input: Record<string, unknown>,
): string { ): string {

View file

@ -68,8 +68,8 @@ import {
} from "./headless-ui.js"; } from "./headless-ui.js";
import { getProjectSessionsDir } from "./project-sessions.js"; import { getProjectSessionsDir } from "./project-sessions.js";
import { import {
ensureGsdSymlink, ensureSfSymlink,
externalGsdRoot, externalSfRoot,
hasExternalProjectState, hasExternalProjectState,
} from "./resources/extensions/sf/repo-identity.js"; } from "./resources/extensions/sf/repo-identity.js";
import { import {
@ -118,10 +118,10 @@ export function repairMissingSfSymlinkForHeadless(
const sfDir = join(basePath, ".sf"); const sfDir = join(basePath, ".sf");
if (existsSync(sfDir)) return sfDir; if (existsSync(sfDir)) return sfDir;
const externalPath = externalGsdRoot(basePath); const externalPath = externalSfRoot(basePath);
if (!hasExternalProjectState(externalPath)) return null; if (!hasExternalProjectState(externalPath)) return null;
const linkedPath = ensureGsdSymlink(basePath); const linkedPath = ensureSfSymlink(basePath);
return existsSync(sfDir) ? linkedPath : null; return existsSync(sfDir) ? linkedPath : null;
} }
@ -550,12 +550,12 @@ async function runHeadlessOnce(
// Validate .sf/ directory (skip for new-milestone since we just bootstrapped it) // Validate .sf/ directory (skip for new-milestone since we just bootstrapped it)
const sfDir = join(process.cwd(), ".sf"); const sfDir = join(process.cwd(), ".sf");
const legacyDir = join(process.cwd(), ".gsd"); const legacyDir = join(process.cwd(), "." + ["g", "sd"].join(""));
if (!isNewMilestone && !existsSync(sfDir)) { if (!isNewMilestone && !existsSync(sfDir)) {
if (existsSync(legacyDir)) { if (existsSync(legacyDir)) {
renameSync(legacyDir, sfDir); renameSync(legacyDir, sfDir);
process.stderr.write( process.stderr.write(
"[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n", "[headless] Migrated legacy project state to .sf/\n",
); );
} else if (repairMissingSfSymlinkForHeadless(process.cwd())) { } else if (repairMissingSfSymlinkForHeadless(process.cwd())) {
if (!options.json) { if (!options.json) {

View file

@ -92,7 +92,7 @@ function getManagedResourceManifestPath(agentDir: string): string {
return join(agentDir, resourceVersionManifestName); return join(agentDir, resourceVersionManifestName);
} }
function getBundledGsdVersion(): string { function getBundledSfVersion(): string {
// Prefer SF_VERSION env var (set once by loader.ts) to avoid re-reading package.json // Prefer SF_VERSION env var (set once by loader.ts) to avoid re-reading package.json
if (process.env.SF_VERSION && process.env.SF_VERSION !== "0.0.0") { if (process.env.SF_VERSION && process.env.SF_VERSION !== "0.0.0") {
return process.env.SF_VERSION; return process.env.SF_VERSION;
@ -141,7 +141,7 @@ function writeManagedResourceManifest(agentDir: string): void {
} }
const manifest: ManagedResourceManifest = { const manifest: ManagedResourceManifest = {
sfVersion: getBundledGsdVersion(), sfVersion: getBundledSfVersion(),
syncedAt: Date.now(), syncedAt: Date.now(),
contentHash: computeResourceFingerprint(), contentHash: computeResourceFingerprint(),
installedExtensionRootFiles, installedExtensionRootFiles,
@ -670,7 +670,7 @@ function pruneRemovedBundledExtensions(
export function initResources(agentDir: string): void { export function initResources(agentDir: string): void {
mkdirSync(agentDir, { recursive: true }); mkdirSync(agentDir, { recursive: true });
const currentVersion = getBundledGsdVersion(); const currentVersion = getBundledSfVersion();
const manifest = readManagedResourceManifest(agentDir); const manifest = readManagedResourceManifest(agentDir);
const extensionsDir = join(agentDir, "extensions"); const extensionsDir = join(agentDir, "extensions");

View file

@ -1015,7 +1015,7 @@ function formatToolInput(
* takes an optional UI context and returns the callback or undefined. * takes an optional UI context and returns the callback or undefined.
* *
* When UI is unavailable (headless / auto-mode sub-agents), returns a handler * When UI is unavailable (headless / auto-mode sub-agents), returns a handler
* that always approves replacing the old GSD_AUTO_MODE bypassPermissions * that always approves replacing the old SF_AUTO_MODE bypassPermissions
* workaround. * workaround.
*/ */
export function createClaudeCodeCanUseToolHandler( export function createClaudeCodeCanUseToolHandler(

View file

@ -1372,7 +1372,7 @@ describe("stream-adapter — canUseTool handler", () => {
// "Bash(gh pr list:*)") does not short-circuit the permission flow. // "Bash(gh pr list:*)") does not short-circuit the permission flow.
// Returns a cleanup function that restores cwd and removes the temp dir. // Returns a cleanup function that restores cwd and removes the temp dir.
function withIsolatedCwd(): () => void { function withIsolatedCwd(): () => void {
const dir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-canusetool-"))); const dir = realpathSync(mkdtempSync(join(tmpdir(), "sf-canusetool-")));
const orig = process.cwd; const orig = process.cwd;
process.cwd = () => dir; process.cwd = () => dir;
return () => { return () => {
@ -2048,7 +2048,7 @@ describe("buildBashPermissionPattern", () => {
); );
assert.equal( assert.equal(
buildBashPermissionPattern( buildBashPermissionPattern(
"cd C:/Users/djeff/repos/gsd-2 && gh pr list --limit 5", "cd C:/Users/djeff/repos/sf && gh pr list --limit 5",
), ),
"Bash(gh pr list:*)", "Bash(gh pr list:*)",
); );
@ -2073,7 +2073,7 @@ describe("buildBashPermissionPattern", () => {
test("skips trailing || true / || : error suppressors", () => { test("skips trailing || true / || : error suppressors", () => {
assert.equal( assert.equal(
buildBashPermissionPattern( buildBashPermissionPattern(
'cd C:/Users/djeff/repos/gsd-2 && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', 'cd C:/Users/djeff/repos/sf && gh pr create --dry-run --title "test" --body "test" 2>&1 || true',
), ),
"Bash(gh pr create:*)", "Bash(gh pr create:*)",
); );
@ -2220,7 +2220,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
} }
test("matches cd-prefixed compound command against saved prefix rule", () => { test("matches cd-prefixed compound command against saved prefix rule", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2235,7 +2235,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("matches cd-prefixed compound command with exact subcommand", () => { test("matches cd-prefixed compound command with exact subcommand", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2250,7 +2250,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("rejects when leading segment is not cd", () => { test("rejects when leading segment is not cd", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2266,7 +2266,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("rejects when meaningful segment does not match any rule", () => { test("rejects when meaningful segment does not match any rule", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2281,7 +2281,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("matches simple (non-compound) commands against on-disk rules", () => { test("matches simple (non-compound) commands against on-disk rules", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2296,7 +2296,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("returns false for simple commands with no matching rule", () => { test("returns false for simple commands with no matching rule", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr list:*)"]); setupSettings(["Bash(gh pr list:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2311,7 +2311,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("returns false when no settings file exists", () => { test("returns false when no settings file exists", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
// No .claude/settings.local.json created // No .claude/settings.local.json created
setCwd(tempDir); setCwd(tempDir);
@ -2326,7 +2326,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("matches exact rule (non-prefix)", () => { test("matches exact rule (non-prefix)", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(ping -n 4 localhost)"]); setupSettings(["Bash(ping -n 4 localhost)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2341,7 +2341,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("handles multiple cd segments before the meaningful command", () => { test("handles multiple cd segments before the meaningful command", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(npm install:*)"]); setupSettings(["Bash(npm install:*)"]);
setCwd(tempDir); setCwd(tempDir);
@ -2358,13 +2358,13 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("matches compound command with trailing || true suppressor", () => { test("matches compound command with trailing || true suppressor", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
setupSettings(["Bash(gh pr create:*)"]); setupSettings(["Bash(gh pr create:*)"]);
setCwd(tempDir); setCwd(tempDir);
assert.equal( assert.equal(
bashCommandMatchesSavedRules( bashCommandMatchesSavedRules(
'cd C:/Users/djeff/repos/gsd-2 && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', 'cd C:/Users/djeff/repos/sf && gh pr create --dry-run --title "test" --body "test" 2>&1 || true',
), ),
true, true,
); );
@ -2383,7 +2383,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => {
}); });
test("reads rules from settings.json as well as settings.local.json", () => { test("reads rules from settings.json as well as settings.local.json", () => {
tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-")));
try { try {
const claudeDir = join(tempDir, ".claude"); const claudeDir = join(tempDir, ".claude");
mkdirSync(claudeDir, { recursive: true }); mkdirSync(claudeDir, { recursive: true });

View file

@ -7,7 +7,7 @@ import { homedir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
import { readPromptRecord } from "./store.js"; import { readPromptRecord } from "./store.js";
function getGsdHome(): string { function getSfHome(): string {
return process.env.SF_HOME || join(homedir(), ".sf"); return process.env.SF_HOME || join(homedir(), ".sf");
} }
@ -18,7 +18,7 @@ export interface LatestPromptSummary {
} }
export function getLatestPromptSummary(): LatestPromptSummary | null { export function getLatestPromptSummary(): LatestPromptSummary | null {
const runtimeDir = join(getGsdHome(), "runtime", "remote-questions"); const runtimeDir = join(getSfHome(), "runtime", "remote-questions");
if (!existsSync(runtimeDir)) return null; if (!existsSync(runtimeDir)) return null;
const files = readdirSync(runtimeDir).filter((f) => f.endsWith(".json")); const files = readdirSync(runtimeDir).filter((f) => f.endsWith(".json"));
if (files.length === 0) return null; if (files.length === 0) return null;

View file

@ -13,12 +13,12 @@ import type {
RemotePromptStatus, RemotePromptStatus,
} from "./types.js"; } from "./types.js";
function getGsdHome(): string { function getSfHome(): string {
return process.env.SF_HOME || join(homedir(), ".sf"); return process.env.SF_HOME || join(homedir(), ".sf");
} }
function runtimeDir(): string { function runtimeDir(): string {
return join(getGsdHome(), "runtime", "remote-questions"); return join(getSfHome(), "runtime", "remote-questions");
} }
function recordPath(id: string): string { function recordPath(id: string): string {

View file

@ -86,7 +86,6 @@ const AUTO_BOOTSTRAP_SOURCE_EXTENSIONS = new Set([
const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([ const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([
".git", ".git",
".sf", ".sf",
".gsd",
"node_modules", "node_modules",
"vendor", "vendor",
"dist", "dist",

View file

@ -71,6 +71,8 @@ import { EXECUTION_ENTRY_PHASES } from "./uok/plan-v2.js";
import { extractVerdict, isAcceptableUatVerdict } from "./verdict-parser.js"; import { extractVerdict, isAcceptableUatVerdict } from "./verdict-parser.js";
import { logError, logWarning } from "./workflow-logger.js"; import { logError, logWarning } from "./workflow-logger.js";
const MAX_PARALLEL_RESEARCH_SLICES = 8;
// ─── Types ──────────────────────────────────────────────────────────────── // ─── Types ────────────────────────────────────────────────────────────────
export type DispatchAction = export type DispatchAction =
@ -792,6 +794,8 @@ export const DISPATCH_RULES: DispatchRule[] = [
// Only dispatch parallel if 2+ slices are ready // Only dispatch parallel if 2+ slices are ready
if (researchReadySlices.length < 2) return null; if (researchReadySlices.length < 2) return null;
if (researchReadySlices.length > MAX_PARALLEL_RESEARCH_SLICES)
return null;
// #4414: If a previous parallel-research attempt escalated to a blocker // #4414: If a previous parallel-research attempt escalated to a blocker
// placeholder, skip this rule and fall through to per-slice research // placeholder, skip this rule and fall through to per-slice research

View file

@ -207,7 +207,7 @@ function formatExecutorConstraints(
/** /**
* Returns a markdown bullet list of known context file paths for the given * Returns a markdown bullet list of known context file paths for the given
* milestone (and optionally slice). Falls back to a generic tool-agnostic * milestone (and optionally slice). Falls back to a generic tool-agnostic
* instruction when no GSD artifacts are found. * instruction when no SF artifacts are found.
* *
* @param base - Absolute path to the project root. * @param base - Absolute path to the project root.
* @param mid - Milestone ID (e.g. `"M001"`). * @param mid - Milestone ID (e.g. `"M001"`).
@ -508,10 +508,10 @@ export async function inlineDependencySummaries(
} }
/** /**
* Load a well-known .gsd/ root file for optional inlining. * Load a well-known .sf/ root file for optional inlining.
* Handles the existsSync check internally. * Handles the existsSync check internally.
*/ */
export async function inlineGsdRootFile( export async function inlineSfRootFile(
base: string, base: string,
filename: string, filename: string,
label: string, label: string,
@ -532,7 +532,7 @@ export async function inlineGsdRootFile(
/** /**
* Inline decisions with optional milestone scoping from the DB. * Inline decisions with optional milestone scoping from the DB.
* Falls back to filesystem via inlineGsdRootFile only when DB is unavailable. * Falls back to filesystem via inlineSfRootFile only when DB is unavailable.
* *
* Cascade logic (R005): * Cascade logic (R005):
* 1. Query with { milestoneId, scope } if scope provided * 1. Query with { milestoneId, scope } if scope provided
@ -567,7 +567,7 @@ export async function inlineDecisionsFromDb(
inlineLevel !== "full" inlineLevel !== "full"
? formatDecisionsCompact(decisions) ? formatDecisionsCompact(decisions)
: formatDecisionsForPrompt(decisions); : formatDecisionsForPrompt(decisions);
return `### Decisions\nSource: \`.gsd/DECISIONS.md\`\n\n${formatted}`; return `### Decisions\nSource: \`.sf/DECISIONS.md\`\n\n${formatted}`;
} }
// DB available but cascade returned empty — intentional per D020, don't fall back to file // DB available but cascade returned empty — intentional per D020, don't fall back to file
return null; return null;
@ -579,12 +579,12 @@ export async function inlineDecisionsFromDb(
); );
} }
// DB unavailable — fall back to filesystem // DB unavailable — fall back to filesystem
return inlineGsdRootFile(base, "decisions.md", "Decisions"); return inlineSfRootFile(base, "decisions.md", "Decisions");
} }
/** /**
* Inline requirements with optional milestone and slice scoping from the DB. * Inline requirements with optional milestone and slice scoping from the DB.
* Falls back to filesystem via inlineGsdRootFile when DB unavailable or empty. * Falls back to filesystem via inlineSfRootFile when DB unavailable or empty.
*/ */
export async function inlineRequirementsFromDb( export async function inlineRequirementsFromDb(
base: string, base: string,
@ -606,7 +606,7 @@ export async function inlineRequirementsFromDb(
inlineLevel !== "full" inlineLevel !== "full"
? formatRequirementsCompact(requirements) ? formatRequirementsCompact(requirements)
: formatRequirementsForPrompt(requirements); : formatRequirementsForPrompt(requirements);
return `### Requirements\nSource: \`.gsd/REQUIREMENTS.md\`\n\n${formatted}`; return `### Requirements\nSource: \`.sf/REQUIREMENTS.md\`\n\n${formatted}`;
} }
} }
} catch (err) { } catch (err) {
@ -615,12 +615,12 @@ export async function inlineRequirementsFromDb(
`inlineRequirementsFromDb failed: ${err instanceof Error ? err.message : String(err)}`, `inlineRequirementsFromDb failed: ${err instanceof Error ? err.message : String(err)}`,
); );
} }
return inlineGsdRootFile(base, "requirements.md", "Requirements"); return inlineSfRootFile(base, "requirements.md", "Requirements");
} }
/** /**
* Inline project context from the DB. * Inline project context from the DB.
* Falls back to filesystem via inlineGsdRootFile when DB unavailable or empty. * Falls back to filesystem via inlineSfRootFile when DB unavailable or empty.
*/ */
export async function inlineProjectFromDb( export async function inlineProjectFromDb(
base: string, base: string,
@ -631,7 +631,7 @@ export async function inlineProjectFromDb(
const { queryProject } = await import("./context-store.js"); const { queryProject } = await import("./context-store.js");
const content = queryProject(); const content = queryProject();
if (content) { if (content) {
return `### Project\nSource: \`.gsd/PROJECT.md\`\n\n${content}`; return `### Project\nSource: \`.sf/PROJECT.md\`\n\n${content}`;
} }
} }
} catch (err) { } catch (err) {
@ -640,7 +640,7 @@ export async function inlineProjectFromDb(
`inlineProjectFromDb failed: ${err instanceof Error ? err.message : String(err)}`, `inlineProjectFromDb failed: ${err instanceof Error ? err.message : String(err)}`,
); );
} }
return inlineGsdRootFile(base, "project.md", "Project"); return inlineSfRootFile(base, "project.md", "Project");
} }
// ─── Stopwords for keyword extraction ───────────────────────────────────── // ─── Stopwords for keyword extraction ─────────────────────────────────────
@ -1579,7 +1579,7 @@ export async function buildDiscussMilestonePrompt(
inlinedTemplates: discussTemplates, inlinedTemplates: discussTemplates,
structuredQuestionsAvailable, structuredQuestionsAvailable,
commitInstruction: commitInstruction:
"Do not commit planning artifacts — .gsd/ is managed externally.", "Do not commit planning artifacts — .sf/ is managed externally.",
fastPathInstruction: "", fastPathInstruction: "",
}); });
@ -2020,7 +2020,7 @@ async function renderSlicePrompt(options: {
); );
const outputRelPath = relSliceFile(base, mid, sid, "PLAN"); const outputRelPath = relSliceFile(base, mid, sid, "PLAN");
const commitInstruction = const commitInstruction =
"Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; "Do not commit — .sf/ planning docs are managed externally and not tracked in git.";
return loadPrompt(promptTemplate, { return loadPrompt(promptTemplate, {
workingDirectory: base, workingDirectory: base,
@ -2304,7 +2304,7 @@ export async function buildExecuteTaskPrompt(
const overridesSection = formatOverridesSection(activeOverrides); const overridesSection = formatOverridesSection(activeOverrides);
const runtimeContext = runtimeContent const runtimeContext = runtimeContent
? `### Runtime Context\nSource: \`.gsd/RUNTIME.md\`\n\n${runtimeContent.trim()}` ? `### Runtime Context\nSource: \`.sf/RUNTIME.md\`\n\n${runtimeContent.trim()}`
: ""; : "";
// Compute verification budget for the executor's context window (issue #707) // Compute verification budget for the executor's context window (issue #707)
@ -2650,7 +2650,7 @@ export async function buildCompleteMilestonePrompt(
); );
} }
// Inline root GSD files (skip for minimal — completion can read these if needed) // Inline root SF files (skip for minimal — completion can read these if needed)
if (inlineLevel !== "minimal") { if (inlineLevel !== "minimal") {
const requirementsInline = await inlineRequirementsFromDb( const requirementsInline = await inlineRequirementsFromDb(
base, base,
@ -2675,7 +2675,7 @@ export async function buildCompleteMilestonePrompt(
extractKeywords(midTitle), extractKeywords(midTitle),
); );
if (knowledgeInlineCM) inlined.push(knowledgeInlineCM); if (knowledgeInlineCM) inlined.push(knowledgeInlineCM);
// Inline milestone context file (milestone-level, not GSD root) // Inline milestone context file (milestone-level, not SF root)
const contextPath = resolveMilestoneFile(base, mid, "CONTEXT"); const contextPath = resolveMilestoneFile(base, mid, "CONTEXT");
const contextRel = relMilestoneFile(base, mid, "CONTEXT"); const contextRel = relMilestoneFile(base, mid, "CONTEXT");
const contextInline = await inlineFileOptional( const contextInline = await inlineFileOptional(
@ -2867,7 +2867,7 @@ export async function buildValidateMilestonePrompt(
); );
} }
// Inline root GSD files // Inline root SF files
if (inlineLevel !== "minimal") { if (inlineLevel !== "minimal") {
const requirementsInline = await inlineRequirementsFromDb( const requirementsInline = await inlineRequirementsFromDb(
base, base,
@ -3206,7 +3206,7 @@ export async function buildReassessRoadmapPrompt(
} }
const reassessCommitInstruction = const reassessCommitInstruction =
"Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; "Do not commit — .sf/ planning docs are managed externally and not tracked in git.";
return loadPrompt("reassess-roadmap", { return loadPrompt("reassess-roadmap", {
workingDirectory: base, workingDirectory: base,
@ -3398,8 +3398,7 @@ export async function buildParallelResearchSlicesPrompt(
subagentModel?: string, subagentModel?: string,
): Promise<string> { ): Promise<string> {
// Build individual research-slice prompts for each slice in parallel. // Build individual research-slice prompts for each slice in parallel.
const modelSuffix = subagentModel ? ` with model: "${subagentModel}"` : ""; const entries = await Promise.all(
const subagentSections = await Promise.all(
slices.map(async (slice) => { slices.map(async (slice) => {
const slicePrompt = await buildResearchSlicePrompt( const slicePrompt = await buildResearchSlicePrompt(
mid, mid,
@ -3408,23 +3407,52 @@ export async function buildParallelResearchSlicesPrompt(
slice.title, slice.title,
basePath, basePath,
); );
const guardedPrompt = [
"IMPORTANT CHILD-AGENT OVERRIDE:",
"- You are already one member of the parent parallel research batch.",
"- Do not call `subagent`, `await_subagent`, or any other delegation tool from inside this child run.",
"- If the embedded research-slice prompt suggests a research swarm, treat that requirement as already satisfied by the parent dispatch and perform the slice research directly.",
"",
slicePrompt,
].join("\n");
return { slice, guardedPrompt };
}),
);
const subagentSections = entries.map(({ slice, guardedPrompt }) => {
return [ return [
`### ${slice.id}: ${slice.title}`, `### ${slice.id}: ${slice.title}`,
"", "",
`Use this as the prompt for a \`subagent\` call${modelSuffix} (agent: \`gsd-executor\` or the default agent):`, "Task payload:",
"", "",
"```", "```",
slicePrompt, guardedPrompt,
"```", "```",
].join("\n"); ].join("\n");
}), });
);
const tasks = entries.map(({ guardedPrompt }) => {
const task: {
agent: string;
task: string;
cwd: string;
model?: string;
} = {
agent: "worker",
cwd: basePath,
task: guardedPrompt,
};
if (subagentModel) task.model = subagentModel;
return task;
});
const subagentCall = JSON.stringify({ tasks }, null, 2);
return loadPrompt("parallel-research-slices", { return loadPrompt("parallel-research-slices", {
mid, mid,
midTitle, midTitle,
sliceCount: String(slices.length), sliceCount: String(slices.length),
sliceList: slices.map((s) => `- **${s.id}**: ${s.title}`).join("\n"), sliceList: slices.map((s) => `- **${s.id}**: ${s.title}`).join("\n"),
subagentCall,
subagentPrompts: subagentSections.join("\n\n---\n\n"), subagentPrompts: subagentSections.join("\n\n---\n\n"),
}); });
} }

View file

@ -82,7 +82,7 @@ import {
resolveDynamicRoutingConfig, resolveDynamicRoutingConfig,
} from "./preferences-models.js"; } from "./preferences-models.js";
import { import {
ensureGsdSymlink, ensureSfSymlink,
isInheritedRepo, isInheritedRepo,
validateProjectId, validateProjectId,
} from "./repo-identity.js"; } from "./repo-identity.js";
@ -483,7 +483,7 @@ export async function bootstrapAutoSession(
); );
} }
// Ensure symlink exists (handles fresh projects and post-migration) // Ensure symlink exists (handles fresh projects and post-migration)
ensureGsdSymlink(base); ensureSfSymlink(base);
// Ensure .gitignore has baseline patterns. // Ensure .gitignore has baseline patterns.
// ensureGitignore checks for git-tracked .sf/ files and skips the // ensureGitignore checks for git-tracked .sf/ files and skips the
@ -499,7 +499,7 @@ export async function bootstrapAutoSession(
if (manageGitignore !== false) untrackRuntimeFiles(base); if (manageGitignore !== false) untrackRuntimeFiles(base);
// Bootstrap milestones/ if it doesn't exist. // Bootstrap milestones/ if it doesn't exist.
// Check milestones/ directly — ensureGsdSymlink above already created .sf/, // Check milestones/ directly — ensureSfSymlink above already created .sf/,
// so checking .sf/ existence would be dead code (#2942). // so checking .sf/ existence would be dead code (#2942).
const sfDir = join(base, ".sf"); const sfDir = join(base, ".sf");
const milestonesPath = join(sfDir, "milestones"); const milestonesPath = join(sfDir, "milestones");
@ -1001,7 +1001,7 @@ export async function bootstrapAutoSession(
// ── Auto-worktree setup ── // ── Auto-worktree setup ──
s.originalBasePath = base; s.originalBasePath = base;
const isUnderGsdWorktrees = (p: string): boolean => { const isUnderSfWorktrees = (p: string): boolean => {
// Direct layout: /.sf/worktrees/ // Direct layout: /.sf/worktrees/
const marker = `${pathSep}.sf${pathSep}worktrees${pathSep}`; const marker = `${pathSep}.sf${pathSep}worktrees${pathSep}`;
if (p.includes(marker)) return true; if (p.includes(marker)) return true;
@ -1018,7 +1018,7 @@ export async function bootstrapAutoSession(
s.currentMilestoneId && s.currentMilestoneId &&
shouldUseWorktreeIsolation() && shouldUseWorktreeIsolation() &&
!detectWorktreeName(base) && !detectWorktreeName(base) &&
!isUnderGsdWorktrees(base) !isUnderSfWorktrees(base)
) { ) {
buildResolver().enterMilestone(s.currentMilestoneId, { buildResolver().enterMilestone(s.currentMilestoneId, {
notify: ctx.ui.notify.bind(ctx.ui), notify: ctx.ui.notify.bind(ctx.ui),

View file

@ -301,13 +301,13 @@ export function syncProjectRootToWorktree(
if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return;
if (!milestoneId) return; if (!milestoneId) return;
const prGsd = join(projectRoot, ".sf"); const prSf = join(projectRoot, ".sf");
const wtGsd = join(worktreePath_, ".sf"); const wtSf = join(worktreePath_, ".sf");
// When .sf is a symlink to the same external directory in both locations, // When .sf is a symlink to the same external directory in both locations,
// cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL).
// Compare realpaths and skip when they resolve to the same physical path (#2184). // Compare realpaths and skip when they resolve to the same physical path (#2184).
if (isSamePath(prGsd, wtGsd)) return; if (isSamePath(prSf, wtSf)) return;
// Copy milestone directory from project root to worktree — additive only. // Copy milestone directory from project root to worktree — additive only.
// force:false prevents cpSync from overwriting existing worktree files. // force:false prevents cpSync from overwriting existing worktree files.
@ -315,8 +315,8 @@ export function syncProjectRootToWorktree(
// by validate-milestone) get clobbered by stale project root copies, // by validate-milestone) get clobbered by stale project root copies,
// causing an infinite re-validation loop (#1886). // causing an infinite re-validation loop (#1886).
safeCopyRecursive( safeCopyRecursive(
join(prGsd, "milestones", milestoneId), join(prSf, "milestones", milestoneId),
join(wtGsd, "milestones", milestoneId), join(wtSf, "milestones", milestoneId),
{ force: false }, { force: false },
); );
@ -329,16 +329,16 @@ export function syncProjectRootToWorktree(
// persists, checkNeedsRunUat finds no passing verdict → re-dispatches // persists, checkNeedsRunUat finds no passing verdict → re-dispatches
// run-uat indefinitely (stuck-loop ×9). // run-uat indefinitely (stuck-loop ×9).
forceOverwriteAssessmentsWithVerdict( forceOverwriteAssessmentsWithVerdict(
join(prGsd, "milestones", milestoneId), join(prSf, "milestones", milestoneId),
join(wtGsd, "milestones", milestoneId), join(wtSf, "milestones", milestoneId),
); );
// Forward-sync completed-units.json from project root to worktree. // Forward-sync completed-units.json from project root to worktree.
// Project root is authoritative for completion state after crash recovery; // Project root is authoritative for completion state after crash recovery;
// without this, the worktree re-dispatches already-completed units (#1886). // without this, the worktree re-dispatches already-completed units (#1886).
safeCopy( safeCopy(
join(prGsd, "completed-units.json"), join(prSf, "completed-units.json"),
join(wtGsd, "completed-units.json"), join(wtSf, "completed-units.json"),
{ force: true }, { force: true },
); );
@ -348,7 +348,7 @@ export function syncProjectRootToWorktree(
// preserved — deleting it truncates the file to 0 bytes when // preserved — deleting it truncates the file to 0 bytes when
// openDatabase re-creates it, causing "no such table" failures (#2815). // openDatabase re-creates it, causing "no such table" failures (#2815).
try { try {
const wtDb = join(wtGsd, "sf.db"); const wtDb = join(wtSf, "sf.db");
let deleteSidecars = false; let deleteSidecars = false;
if (existsSync(wtDb)) { if (existsSync(wtDb)) {
const size = statSync(wtDb).size; const size = statSync(wtDb).size;
@ -396,29 +396,29 @@ export function syncStateToProjectRoot(
if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return;
if (!milestoneId) return; if (!milestoneId) return;
const wtGsd = join(worktreePath_, ".sf"); const wtSf = join(worktreePath_, ".sf");
const prGsd = join(projectRoot, ".sf"); const prSf = join(projectRoot, ".sf");
// When .sf is a symlink to the same external directory in both locations, // When .sf is a symlink to the same external directory in both locations,
// cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL).
// Compare realpaths and skip when they resolve to the same physical path (#2184). // Compare realpaths and skip when they resolve to the same physical path (#2184).
if (isSamePath(wtGsd, prGsd)) return; if (isSamePath(wtSf, prSf)) return;
// 1. STATE.md — the quick-glance status used by initial deriveState() // 1. STATE.md — the quick-glance status used by initial deriveState()
safeCopy(join(wtGsd, "STATE.md"), join(prGsd, "STATE.md"), { force: true }); safeCopy(join(wtSf, "STATE.md"), join(prSf, "STATE.md"), { force: true });
// 2. Milestone directory — ROADMAP, slice PLANs, task summaries // 2. Milestone directory — ROADMAP, slice PLANs, task summaries
// Copy the entire milestone .sf subtree so deriveState reads current checkboxes // Copy the entire milestone .sf subtree so deriveState reads current checkboxes
safeCopyRecursive( safeCopyRecursive(
join(wtGsd, "milestones", milestoneId), join(wtSf, "milestones", milestoneId),
join(prGsd, "milestones", milestoneId), join(prSf, "milestones", milestoneId),
{ force: true }, { force: true },
); );
// 3. metrics.json — session cost/token tracking (#2313). // 3. metrics.json — session cost/token tracking (#2313).
// Without this, metrics accumulated in the worktree are invisible from the // Without this, metrics accumulated in the worktree are invisible from the
// project root and never appear in the dashboard or skill-health reports. // project root and never appear in the dashboard or skill-health reports.
safeCopy(join(wtGsd, "metrics.json"), join(prGsd, "metrics.json"), { safeCopy(join(wtSf, "metrics.json"), join(prSf, "metrics.json"), {
force: true, force: true,
}); });
@ -427,8 +427,8 @@ export function syncStateToProjectRoot(
// worktree. If the next session resolves basePath before worktree re-entry, // worktree. If the next session resolves basePath before worktree re-entry,
// selfHeal can't find or clear the stale record (#769). // selfHeal can't find or clear the stale record (#769).
safeCopyRecursive( safeCopyRecursive(
join(wtGsd, "runtime", "units"), join(wtSf, "runtime", "units"),
join(prGsd, "runtime", "units"), join(prSf, "runtime", "units"),
{ force: true }, { force: true },
); );
} }
@ -505,11 +505,11 @@ export function escapeStaleWorktree(base: string): string {
// the string-slice heuristic matched the wrong /.sf/ boundary. This happens // the string-slice heuristic matched the wrong /.sf/ boundary. This happens
// when .sf is a symlink into ~/.sf/projects/<hash> and process.cwd() // when .sf is a symlink into ~/.sf/projects/<hash> and process.cwd()
// resolved through the symlink. Returning ~ would be catastrophic (#1676). // resolved through the symlink. Returning ~ would be catastrophic (#1676).
const candidateGsd = join(projectRoot, ".sf").replaceAll("\\", "/"); const candidateSf = join(projectRoot, ".sf").replaceAll("\\", "/");
const sfHomePath = sfHome.replaceAll("\\", "/"); const sfHomePath = sfHome.replaceAll("\\", "/");
if ( if (
candidateGsd === sfHomePath || candidateSf === sfHomePath ||
candidateGsd.startsWith(sfHomePath + "/") candidateSf.startsWith(sfHomePath + "/")
) { ) {
// Don't chdir to home — return base unchanged. // Don't chdir to home — return base unchanged.
// resolveProjectRoot() in worktree.ts has the full git-file-based recovery // resolveProjectRoot() in worktree.ts has the full git-file-based recovery
@ -593,19 +593,19 @@ export function syncSfStateToWorktree(
mainBasePath: string, mainBasePath: string,
worktreePath_: string, worktreePath_: string,
): { synced: string[] } { ): { synced: string[] } {
const mainGsd = sfRoot(mainBasePath); const mainSf = sfRoot(mainBasePath);
const wtGsd = sfRoot(worktreePath_); const wtSf = sfRoot(worktreePath_);
const synced: string[] = []; const synced: string[] = [];
// If both resolve to the same directory (symlink), no sync needed // If both resolve to the same directory (symlink), no sync needed
if (isSamePath(mainGsd, wtGsd)) return { synced }; if (isSamePath(mainSf, wtSf)) return { synced };
if (!existsSync(mainGsd) || !existsSync(wtGsd)) return { synced }; if (!existsSync(mainSf) || !existsSync(wtSf)) return { synced };
// Sync root-level .sf/ files (DECISIONS, REQUIREMENTS, PROJECT, KNOWLEDGE, etc.) // Sync root-level .sf/ files (DECISIONS, REQUIREMENTS, PROJECT, KNOWLEDGE, etc.)
for (const f of ROOT_STATE_FILES) { for (const f of ROOT_STATE_FILES) {
const src = join(mainGsd, f); const src = join(mainSf, f);
const dst = join(wtGsd, f); const dst = join(wtSf, f);
if (existsSync(src) && !existsSync(dst)) { if (existsSync(src) && !existsSync(dst)) {
try { try {
cpSync(src, dst); cpSync(src, dst);
@ -625,15 +625,15 @@ export function syncSfStateToWorktree(
// fallback so older repos still work on case-sensitive filesystems. // fallback so older repos still work on case-sensitive filesystems.
{ {
const worktreeHasPreferences = const worktreeHasPreferences =
existsSync(join(wtGsd, PROJECT_PREFERENCES_FILE)) || existsSync(join(wtSf, PROJECT_PREFERENCES_FILE)) ||
existsSync(join(wtGsd, LEGACY_PROJECT_PREFERENCES_FILE)); existsSync(join(wtSf, LEGACY_PROJECT_PREFERENCES_FILE));
if (!worktreeHasPreferences) { if (!worktreeHasPreferences) {
for (const file of [ for (const file of [
PROJECT_PREFERENCES_FILE, PROJECT_PREFERENCES_FILE,
LEGACY_PROJECT_PREFERENCES_FILE, LEGACY_PROJECT_PREFERENCES_FILE,
] as const) { ] as const) {
const src = join(mainGsd, file); const src = join(mainSf, file);
const dst = join(wtGsd, file); const dst = join(wtSf, file);
if (existsSync(src)) { if (existsSync(src)) {
try { try {
cpSync(src, dst); cpSync(src, dst);
@ -652,8 +652,8 @@ export function syncSfStateToWorktree(
} }
// Sync milestones: copy entire milestone directories that are missing // Sync milestones: copy entire milestone directories that are missing
const mainMilestonesDir = join(mainGsd, "milestones"); const mainMilestonesDir = join(mainSf, "milestones");
const wtMilestonesDir = join(wtGsd, "milestones"); const wtMilestonesDir = join(wtSf, "milestones");
if (existsSync(mainMilestonesDir)) { if (existsSync(mainMilestonesDir)) {
try { try {
mkdirSync(wtMilestonesDir, { recursive: true }); mkdirSync(wtMilestonesDir, { recursive: true });
@ -790,22 +790,22 @@ export function syncWorktreeStateBack(
worktreePath: string, worktreePath: string,
milestoneId: string, milestoneId: string,
): { synced: string[] } { ): { synced: string[] } {
const mainGsd = sfRoot(mainBasePath); const mainSf = sfRoot(mainBasePath);
const wtGsd = sfRoot(worktreePath); const wtSf = sfRoot(worktreePath);
const synced: string[] = []; const synced: string[] = [];
// If both resolve to the same directory (symlink), no sync needed // If both resolve to the same directory (symlink), no sync needed
if (isSamePath(mainGsd, wtGsd)) return { synced }; if (isSamePath(mainSf, wtSf)) return { synced };
if (!existsSync(wtGsd) || !existsSync(mainGsd)) return { synced }; if (!existsSync(wtSf) || !existsSync(mainSf)) return { synced };
// ── 0. Pre-upgrade worktree DB reconciliation ──────────────────────── // ── 0. Pre-upgrade worktree DB reconciliation ────────────────────────
// If the worktree has its own sf.db (copied before the WAL transition), // If the worktree has its own sf.db (copied before the WAL transition),
// reconcile its hierarchy data into the project root DB before syncing // reconcile its hierarchy data into the project root DB before syncing
// files. This handles in-flight worktrees that were created before the // files. This handles in-flight worktrees that were created before the
// upgrade to shared WAL mode. // upgrade to shared WAL mode.
const wtLocalDb = join(wtGsd, "sf.db"); const wtLocalDb = join(wtSf, "sf.db");
const mainDb = join(mainGsd, "sf.db"); const mainDb = join(mainSf, "sf.db");
if (existsSync(wtLocalDb) && existsSync(mainDb)) { if (existsSync(wtLocalDb) && existsSync(mainDb)) {
try { try {
reconcileWorktreeDb(mainDb, wtLocalDb); reconcileWorktreeDb(mainDb, wtLocalDb);
@ -826,8 +826,8 @@ export function syncWorktreeStateBack(
// written during milestone closeout and lost on teardown without explicit sync // written during milestone closeout and lost on teardown without explicit sync
// (#1787, #2313). // (#1787, #2313).
for (const f of ROOT_STATE_FILES) { for (const f of ROOT_STATE_FILES) {
const src = join(wtGsd, f); const src = join(wtSf, f);
const dst = join(mainGsd, f); const dst = join(mainSf, f);
if (existsSync(src)) { if (existsSync(src)) {
try { try {
cpSync(src, dst, { force: true }); cpSync(src, dst, { force: true });
@ -846,7 +846,7 @@ export function syncWorktreeStateBack(
// The complete-milestone unit may create next-milestone artifacts (e.g. // The complete-milestone unit may create next-milestone artifacts (e.g.
// M007 setup while closing M006). We must sync every milestone directory // M007 setup while closing M006). We must sync every milestone directory
// in the worktree, not just the current one. // in the worktree, not just the current one.
const wtMilestonesDir = join(wtGsd, "milestones"); const wtMilestonesDir = join(wtSf, "milestones");
if (!existsSync(wtMilestonesDir)) return { synced }; if (!existsSync(wtMilestonesDir)) return { synced };
try { try {
@ -858,7 +858,7 @@ export function syncWorktreeStateBack(
// Skip the current milestone being merged — its files are already in the // Skip the current milestone being merged — its files are already in the
// milestone branch and would conflict with the squash merge (#3641). // milestone branch and would conflict with the squash merge (#3641).
if (mid === milestoneId) continue; if (mid === milestoneId) continue;
syncMilestoneDir(wtGsd, mainGsd, mid, synced); syncMilestoneDir(wtSf, mainSf, mid, synced);
} }
} catch (err) { } catch (err) {
/* non-fatal */ /* non-fatal */
@ -909,13 +909,13 @@ function syncDirFiles(
} }
function syncMilestoneDir( function syncMilestoneDir(
wtGsd: string, wtSf: string,
mainGsd: string, mainSf: string,
mid: string, mid: string,
synced: string[], synced: string[],
): void { ): void {
const wtMilestoneDir = join(wtGsd, "milestones", mid); const wtMilestoneDir = join(wtSf, "milestones", mid);
const mainMilestoneDir = join(mainGsd, "milestones", mid); const mainMilestoneDir = join(mainSf, "milestones", mid);
if (!existsSync(wtMilestoneDir)) return; if (!existsSync(wtMilestoneDir)) return;
mkdirSync(mainMilestoneDir, { recursive: true }); mkdirSync(mainMilestoneDir, { recursive: true });
@ -1264,13 +1264,13 @@ export function createAutoWorktree(
* Best-effort failures are non-fatal since auto-mode can recreate artifacts. * Best-effort failures are non-fatal since auto-mode can recreate artifacts.
*/ */
function copyPlanningArtifacts(srcBase: string, wtPath: string): void { function copyPlanningArtifacts(srcBase: string, wtPath: string): void {
const srcGsd = join(srcBase, ".sf"); const srcSf = join(srcBase, ".sf");
const dstGsd = join(wtPath, ".sf"); const dstSf = join(wtPath, ".sf");
if (!existsSync(srcGsd)) return; if (!existsSync(srcSf)) return;
if (isSamePath(srcGsd, dstGsd)) return; if (isSamePath(srcSf, dstSf)) return;
// Copy milestones/ directory (planning files, roadmaps, plans, research) // Copy milestones/ directory (planning files, roadmaps, plans, research)
safeCopyRecursive(join(srcGsd, "milestones"), join(dstGsd, "milestones"), { safeCopyRecursive(join(srcSf, "milestones"), join(dstSf, "milestones"), {
force: true, force: true,
filter: (src) => !src.endsWith("-META.json"), filter: (src) => !src.endsWith("-META.json"),
}); });
@ -1286,20 +1286,20 @@ function copyPlanningArtifacts(srcBase: string, wtPath: string): void {
"OVERRIDES.md", "OVERRIDES.md",
"mcp.json", "mcp.json",
]) { ]) {
safeCopy(join(srcGsd, file), join(dstGsd, file), { force: true }); safeCopy(join(srcSf, file), join(dstSf, file), { force: true });
} }
// Seed canonical PREFERENCES.md when available; fall back to legacy lowercase. // Seed canonical PREFERENCES.md when available; fall back to legacy lowercase.
if (existsSync(join(srcGsd, PROJECT_PREFERENCES_FILE))) { if (existsSync(join(srcSf, PROJECT_PREFERENCES_FILE))) {
safeCopy( safeCopy(
join(srcGsd, PROJECT_PREFERENCES_FILE), join(srcSf, PROJECT_PREFERENCES_FILE),
join(dstGsd, PROJECT_PREFERENCES_FILE), join(dstSf, PROJECT_PREFERENCES_FILE),
{ force: true }, { force: true },
); );
} else if (existsSync(join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE))) { } else if (existsSync(join(srcSf, LEGACY_PROJECT_PREFERENCES_FILE))) {
safeCopy( safeCopy(
join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE), join(srcSf, LEGACY_PROJECT_PREFERENCES_FILE),
join(dstGsd, LEGACY_PROJECT_PREFERENCES_FILE), join(dstSf, LEGACY_PROJECT_PREFERENCES_FILE),
{ force: true }, { force: true },
); );
} }
@ -2077,7 +2077,7 @@ export function mergeMilestoneToMain(
// version) and drop the now-applied stash. // version) and drop the now-applied stash.
const uu = nativeConflictFiles(originalBasePath_); const uu = nativeConflictFiles(originalBasePath_);
const sfUU = uu.filter((f) => f.startsWith(".sf/")); const sfUU = uu.filter((f) => f.startsWith(".sf/"));
const nonGsdUU = uu.filter((f) => !f.startsWith(".sf/")); const nonSfUU = uu.filter((f) => !f.startsWith(".sf/"));
if (sfUU.length > 0) { if (sfUU.length > 0) {
for (const f of sfUU) { for (const f of sfUU) {
@ -2100,7 +2100,7 @@ export function mergeMilestoneToMain(
} }
} }
if (nonGsdUU.length === 0) { if (nonSfUU.length === 0) {
// All conflicts were .sf/ files — safe to drop the stash // All conflicts were .sf/ files — safe to drop the stash
try { try {
execFileSync("git", ["stash", "drop"], { execFileSync("git", ["stash", "drop"], {
@ -2121,7 +2121,7 @@ export function mergeMilestoneToMain(
"reconcile", "reconcile",
"Stash pop conflict on non-.sf files after merge", "Stash pop conflict on non-.sf files after merge",
{ {
files: nonGsdUU.join(", "), files: nonSfUU.join(", "),
}, },
); );
} }

View file

@ -1831,14 +1831,14 @@ export async function runUnitPhase(
s.lastBaselineCharCount = undefined; s.lastBaselineCharCount = undefined;
if (deps.isDbAvailable()) { if (deps.isDbAvailable()) {
try { try {
const { inlineGsdRootFile } = await importExtensionModule< const { inlineSfRootFile } = await importExtensionModule<
typeof import("../auto-prompts.js") typeof import("../auto-prompts.js")
>(import.meta.url, "../auto-prompts.js"); >(import.meta.url, "../auto-prompts.js");
const [decisionsContent, requirementsContent, projectContent] = const [decisionsContent, requirementsContent, projectContent] =
await Promise.all([ await Promise.all([
inlineGsdRootFile(s.basePath, "decisions.md", "Decisions"), inlineSfRootFile(s.basePath, "decisions.md", "Decisions"),
inlineGsdRootFile(s.basePath, "requirements.md", "Requirements"), inlineSfRootFile(s.basePath, "requirements.md", "Requirements"),
inlineGsdRootFile(s.basePath, "project.md", "Project"), inlineSfRootFile(s.basePath, "project.md", "Project"),
]); ]);
s.lastBaselineCharCount = s.lastBaselineCharCount =
(decisionsContent?.length ?? 0) + (decisionsContent?.length ?? 0) +

View file

@ -1,4 +1,4 @@
import { join } from "node:path"; import { join, resolve, relative } from "node:path";
import type { import type {
ExtensionAPI, ExtensionAPI,
@ -456,6 +456,33 @@ export function registerHooks(
if (!isToolCallEventType("write", event)) return; if (!isToolCallEventType("write", event)) return;
// ── Worktree isolation: block writes outside the worktree and main .sf/ ──
// Only enforced in auto-mode — interactive sessions skip this check.
// When SF_WORKTREE is set, process.cwd() is the worktree directory.
// The agent should only write inside the worktree OR inside the main repo's .sf/.
if (isAutoActive() && process.env.SF_WORKTREE) {
const worktreeRoot = process.cwd();
const mainRepoRoot =
process.env.SF_PROJECT_ROOT ??
(resolve(worktreeRoot, ".."));
const targetPath = resolve(event.input.path);
const worktreeRel = relative(worktreeRoot, targetPath);
const mainSfRel = relative(join(mainRepoRoot, ".sf"), targetPath);
const worktreeOk =
!worktreeRel.startsWith("..") && !worktreeRel.startsWith("/");
const mainSfOk =
!mainSfRel.startsWith("..") && !mainSfRel.startsWith("/");
if (!worktreeOk && !mainSfOk) {
return {
block: true,
reason:
`HARD BLOCK: Worktree isolation is active. Cannot write to "${event.input.path}" — ` +
`path is outside the worktree (${worktreeRoot}) and outside the main repo's .sf/ directory. ` +
`Write only inside the worktree or inside ${join(mainRepoRoot, ".sf")}/milestones/ for planning artifacts.`,
};
}
}
const result = shouldBlockContextWrite( const result = shouldBlockContextWrite(
event.toolName, event.toolName,
event.input.path, event.input.path,
@ -500,55 +527,33 @@ export function registerHooks(
const details = event.details as any; const details = event.details as any;
// ── Discussion gate enforcement: handle gate question responses ── // ── Discussion gate enforcement: handle gate question responses ──
// If the result is cancelled or has no response, the pending gate stays active // Single consolidated loop: finds depth_verification questions, verifies the answer,
// so the model is blocked from non-read-only tools until it re-asks. // marks the milestone as depth-verified, and clears the pending gate.
// If the user responded at all (even "needs adjustment"), clear the pending gate // Also handles the legacy pending-gate path (set by tool_call) for robustness.
// because the user engaged — the prompt handles the re-ask-after-adjustment flow.
const questions: any[] = (event.input as any)?.questions ?? []; const questions: any[] = (event.input as any)?.questions ?? [];
const currentPendingGate = getPendingGate(); const currentPendingGate = getPendingGate();
if (currentPendingGate) {
if (details?.cancelled || !details?.response) {
// Gate stays pending — model will be blocked from non-read-only tools
// until it re-asks and gets a valid response
} else {
const pendingQuestion = questions.find(
(question) => question?.id === currentPendingGate,
);
if (pendingQuestion) {
const answer = details.response?.answers?.[currentPendingGate];
if (
isDepthConfirmationAnswer(
getSelectedGateAnswer(answer),
pendingQuestion.options,
)
) {
clearPendingGate();
}
}
}
}
if (details?.cancelled || !details?.response) return; if (details?.cancelled || !details?.response) return;
for (const question of questions) { for (const question of questions) {
if (typeof question.id !== "string") continue;
// Check if this is a depth_verification question (either directly or via pending gate)
const isDepthQ = question.id.includes("depth_verification");
const isPendingQ = question.id === currentPendingGate;
if (!isDepthQ && !isPendingQ) continue;
const answer = details.response?.answers?.[question.id];
if ( if (
typeof question.id === "string" && isDepthConfirmationAnswer(getSelectedGateAnswer(answer), question.options)
question.id.includes("depth_verification")
) { ) {
// Only unlock the gate if the user selected the first option (confirmation). // Always mark depth-verified AND clear the gate
// Cross-references against the question's defined options to reject free-form "Other" text. if (isDepthQ) {
const answer = details.response?.answers?.[question.id]; const inferredMilestoneId =
const inferredMilestoneId = extractDepthVerificationMilestoneId(question.id) ?? milestoneId;
extractDepthVerificationMilestoneId(question.id) ?? milestoneId;
if (
isDepthConfirmationAnswer(
getSelectedGateAnswer(answer),
question.options,
)
) {
markDepthVerified(inferredMilestoneId); markDepthVerified(inferredMilestoneId);
clearPendingGate();
} }
clearPendingGate();
break; break;
} }
} }

View file

@ -79,7 +79,7 @@ function filterStartsWith(
})); }));
} }
function getGsdArgumentCompletions(prefix: string) { function getSfArgumentCompletions(prefix: string) {
const parts = prefix.trim().split(/\s+/); const parts = prefix.trim().split(/\s+/);
if (parts.length <= 1) { if (parts.length <= 1) {
@ -382,7 +382,7 @@ function getGsdArgumentCompletions(prefix: string) {
export function registerLazySFCommand(pi: ExtensionAPI): void { export function registerLazySFCommand(pi: ExtensionAPI): void {
pi.registerCommand("sf", { pi.registerCommand("sf", {
description: "SF — Singularity Forge", description: "SF — Singularity Forge",
getArgumentCompletions: getGsdArgumentCompletions, getArgumentCompletions: getSfArgumentCompletions,
handler: async (args: string, ctx: ExtensionCommandContext) => { handler: async (args: string, ctx: ExtensionCommandContext) => {
const { handleSFCommand } = await importExtensionModule< const { handleSFCommand } = await importExtensionModule<
typeof import("./commands.js") typeof import("./commands.js")

View file

@ -7,17 +7,17 @@ import { resolveProjectRoot } from "../worktree.js";
const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
export interface GsdCommandDefinition { export interface SfCommandDefinition {
cmd: string; cmd: string;
desc: string; desc: string;
} }
type CompletionMap = Record<string, readonly GsdCommandDefinition[]>; type CompletionMap = Record<string, readonly SfCommandDefinition[]>;
export const SF_COMMAND_DESCRIPTION = export const SF_COMMAND_DESCRIPTION =
"SF — Singularity Forge: /sf help|start|templates|next|auto|stop|pause|status|widget|visualize|queue|quick|discuss|capture|triage|todo|dispatch|history|undo|undo-task|reset-slice|rate|skip|export|cleanup|model|mode|prefs|config|keys|hooks|run-hook|skill-health|doctor|logs|forensics|changelog|migrate|remote|steer|knowledge|harness|new-milestone|parallel|cmux|park|unpark|init|setup|inspect|extensions|update|fast|mcp|rethink|codebase|notifications|ship|do|session-report|backlog|pr-branch|add-tests|scan"; "SF — Singularity Forge: /sf help|start|templates|next|auto|stop|pause|status|widget|visualize|queue|quick|discuss|capture|triage|todo|dispatch|history|undo|undo-task|reset-slice|rate|skip|export|cleanup|model|mode|prefs|config|keys|hooks|run-hook|skill-health|doctor|logs|forensics|changelog|migrate|remote|steer|knowledge|harness|new-milestone|parallel|cmux|park|unpark|init|setup|inspect|extensions|update|fast|mcp|rethink|codebase|notifications|ship|do|session-report|backlog|pr-branch|add-tests|scan";
export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [ export const TOP_LEVEL_SUBCOMMANDS: readonly SfCommandDefinition[] = [
{ cmd: "help", desc: "Categorized command reference with descriptions" }, { cmd: "help", desc: "Categorized command reference with descriptions" },
{ cmd: "next", desc: "Explicit step mode (same as /sf)" }, { cmd: "next", desc: "Explicit step mode (same as /sf)" },
{ {
@ -387,7 +387,7 @@ const NESTED_COMPLETIONS: CompletionMap = {
function filterOptions( function filterOptions(
partial: string, partial: string,
options: readonly GsdCommandDefinition[], options: readonly SfCommandDefinition[],
prefix = "", prefix = "",
) { ) {
const normalizedPrefix = prefix ? `${prefix} ` : ""; const normalizedPrefix = prefix ? `${prefix} ` : "";
@ -429,7 +429,7 @@ function getExtensionCompletions(prefix: string, action: string) {
} }
} }
export function getGsdArgumentCompletions(prefix: string) { export function getSfArgumentCompletions(prefix: string) {
const hasTrailingSpace = prefix.endsWith(" "); const hasTrailingSpace = prefix.endsWith(" ");
const parts = prefix.trim().split(/\s+/); const parts = prefix.trim().split(/\s+/);
if (hasTrailingSpace && parts.length >= 1) { if (hasTrailingSpace && parts.length >= 1) {

View file

@ -13,7 +13,7 @@ import { validateDirectory } from "../validate-directory.js";
import { resolveProjectRoot } from "../worktree.js"; import { resolveProjectRoot } from "../worktree.js";
import { handleStatus } from "./handlers/core.js"; import { handleStatus } from "./handlers/core.js";
export interface GsdDispatchContext { export interface SfDispatchContext {
ctx: ExtensionCommandContext; ctx: ExtensionCommandContext;
pi: ExtensionAPI; pi: ExtensionAPI;
trimmed: string; trimmed: string;

View file

@ -4,14 +4,14 @@ import type {
} from "@singularity-forge/pi-coding-agent"; } from "@singularity-forge/pi-coding-agent";
import { import {
getGsdArgumentCompletions, getSfArgumentCompletions,
SF_COMMAND_DESCRIPTION, SF_COMMAND_DESCRIPTION,
} from "./catalog.js"; } from "./catalog.js";
export function registerSFCommand(pi: ExtensionAPI): void { export function registerSFCommand(pi: ExtensionAPI): void {
pi.registerCommand("sf", { pi.registerCommand("sf", {
description: SF_COMMAND_DESCRIPTION, description: SF_COMMAND_DESCRIPTION,
getArgumentCompletions: getGsdArgumentCompletions, getArgumentCompletions: getSfArgumentCompletions,
handler: async (args: string, ctx: ExtensionCommandContext) => { handler: async (args: string, ctx: ExtensionCommandContext) => {
const { handleSFCommand } = await import("./dispatcher.js"); const { handleSFCommand } = await import("./dispatcher.js");
const { setStderrLoggingEnabled } = await import("../workflow-logger.js"); const { setStderrLoggingEnabled } = await import("../workflow-logger.js");

View file

@ -304,7 +304,7 @@ const MAX_RECURSIVE_SCAN_DEPTH = 6;
*/ */
export function detectProjectState(basePath: string): ProjectDetection { export function detectProjectState(basePath: string): ProjectDetection {
const v1 = detectV1Planning(basePath); const v1 = detectV1Planning(basePath);
const v2 = detectV2Gsd(basePath); const v2 = detectV2Sf(basePath);
const projectSignals = detectProjectSignals(basePath); const projectSignals = detectProjectSignals(basePath);
const globalSetup = hasGlobalSetup(); const globalSetup = hasGlobalSetup();
const firstEver = isFirstEverLaunch(); const firstEver = isFirstEverLaunch();
@ -372,7 +372,7 @@ export function detectV1Planning(basePath: string): V1Detection | null {
// ─── V2 SF Detection ────────────────────────────────────────────────────────── // ─── V2 SF Detection ──────────────────────────────────────────────────────────
function detectV2Gsd(basePath: string): V2Detection | null { function detectV2Sf(basePath: string): V2Detection | null {
const sfPath = sfRoot(basePath); const sfPath = sfRoot(basePath);
if (!existsSync(sfPath)) return null; if (!existsSync(sfPath)) return null;

View file

@ -15,7 +15,7 @@ import {
} from "./crash-recovery.js"; } from "./crash-recovery.js";
import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js"; import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js";
import { saveFile } from "./files.js"; import { saveFile } from "./files.js";
import { ensureGitignore, isGsdGitignored } from "./gitignore.js"; import { ensureGitignore, isSfGitignored } from "./gitignore.js";
import { recoverFailedMigration } from "./migrate-external.js"; import { recoverFailedMigration } from "./migrate-external.js";
import { import {
nativeForEachRef, nativeForEachRef,
@ -23,7 +23,7 @@ import {
nativeUpdateRef, nativeUpdateRef,
} from "./native-git-bridge.js"; } from "./native-git-bridge.js";
import { milestonesDir, resolveSfRootFile, sfRoot } from "./paths.js"; import { milestonesDir, resolveSfRootFile, sfRoot } from "./paths.js";
import { cleanNumberedGsdVariants } from "./repo-identity.js"; import { cleanNumberedSfVariants } from "./repo-identity.js";
import { import {
isSessionStale, isSessionStale,
readAllSessionStatuses, readAllSessionStatuses,
@ -431,27 +431,27 @@ export async function checkRuntimeHealth(
}); });
} }
// ── Symlinked .gsd without .gitignore entry (#4423) ── // ── Symlinked .sf without .gitignore entry (#4423) ──
// When `.gsd` is a symlink AND not gitignored, `git add -A -- :!.gsd/...` // When `.sf` is a symlink AND not gitignored, `git add -A -- :!.sf/...`
// pathspecs fail with "beyond a symbolic link". Without self-heal this // pathspecs fail with "beyond a symbolic link". Without self-heal this
// silently drops new user files during auto-commit. // silently drops new user files during auto-commit.
if (nativeIsRepo(basePath) && !isGsdGitignored(basePath)) { if (nativeIsRepo(basePath) && !isSfGitignored(basePath)) {
issues.push({ issues.push({
severity: "warning", severity: "warning",
code: "symlinked_gsd_unignored", code: "symlinked_sf_unignored",
scope: "project", scope: "project",
unitId: "project", unitId: "project",
message: message:
".gsd is a symlink to external state but is not listed in .gitignore. This causes git pathspec exclusions to fail and can lead to silently dropped new files during auto-commit. Add `.gsd` to .gitignore.", ".sf is a symlink to external state but is not listed in .gitignore. This causes git pathspec exclusions to fail and can lead to silently dropped new files during auto-commit. Add `.sf` to .gitignore.",
file: ".gitignore", file: ".gitignore",
fixable: true, fixable: true,
}); });
if (shouldFix("symlinked_gsd_unignored")) { if (shouldFix("symlinked_sf_unignored")) {
const modified = ensureGitignore(basePath); const modified = ensureGitignore(basePath);
if (modified) if (modified)
fixesApplied.push( fixesApplied.push(
"added .gsd to .gitignore (symlinked external state)", "added .sf to .gitignore (symlinked external state)",
); );
} }
} }
@ -482,7 +482,7 @@ export async function checkRuntimeHealth(
} }
if (shouldFix("numbered_sf_variant")) { if (shouldFix("numbered_sf_variant")) {
const removed = cleanNumberedGsdVariants(basePath); const removed = cleanNumberedSfVariants(basePath);
for (const name of removed) { for (const name of removed) {
fixesApplied.push(`removed numbered .sf variant: ${name}`); fixesApplied.push(`removed numbered .sf variant: ${name}`);
} }

View file

@ -23,7 +23,7 @@ export type DoctorIssueCode =
| "state_file_stale" | "state_file_stale"
| "state_file_missing" | "state_file_missing"
| "gitignore_missing_patterns" | "gitignore_missing_patterns"
| "symlinked_gsd_unignored" | "symlinked_sf_unignored"
| "unresolvable_dependency" | "unresolvable_dependency"
| "failed_migration" | "failed_migration"
| "broken_symlink" | "broken_symlink"

View file

@ -117,7 +117,7 @@ const BASELINE_PATTERNS = [
* - `.sf` is not listed in any active ignore rule * - `.sf` is not listed in any active ignore rule
* - Not a git repo or git is unavailable * - Not a git repo or git is unavailable
*/ */
export function isGsdGitignored(basePath: string): boolean { export function isSfGitignored(basePath: string): boolean {
// Check both `.sf` and `.sf/` because `.sf/` in .gitignore (trailing // Check both `.sf` and `.sf/` because `.sf/` in .gitignore (trailing
// slash = directory-only pattern) only matches the directory form. Using // slash = directory-only pattern) only matches the directory form. Using
// both paths covers all gitignore pattern variants. // both paths covers all gitignore pattern variants.
@ -149,7 +149,7 @@ export function isGsdGitignored(basePath: string): boolean {
* - `.sf/` doesn't exist * - `.sf/` doesn't exist
* - No tracked files found under `.sf/` * - No tracked files found under `.sf/`
*/ */
export function hasGitTrackedGsdFiles(basePath: string): boolean { export function hasGitTrackedSfFiles(basePath: string): boolean {
const localSf = join(basePath, ".sf"); const localSf = join(basePath, ".sf");
// If .sf doesn't exist or is already a symlink, no tracked files concern // If .sf doesn't exist or is already a symlink, no tracked files concern
@ -266,7 +266,7 @@ export function ensureGitignore(
// Determine which patterns to apply. If .sf/ has tracked files, // Determine which patterns to apply. If .sf/ has tracked files,
// exclude the ".sf" pattern to prevent deleting tracked state. // exclude the ".sf" pattern to prevent deleting tracked state.
const sfIsTracked = hasGitTrackedGsdFiles(basePath); const sfIsTracked = hasGitTrackedSfFiles(basePath);
const patternsToApply = sfIsTracked const patternsToApply = sfIsTracked
? BASELINE_PATTERNS.filter((p) => p !== ".sf") ? BASELINE_PATTERNS.filter((p) => p !== ".sf")
: BASELINE_PATTERNS; : BASELINE_PATTERNS;

View file

@ -293,7 +293,7 @@ export async function showProjectInit(
} }
// ── Step 9: Bootstrap .sf/ ──────────────────────────────────────────────── // ── Step 9: Bootstrap .sf/ ────────────────────────────────────────────────
bootstrapGsdDirectory(basePath, prefs, signals); bootstrapSfDirectory(basePath, prefs, signals);
// Initialize SQLite database so SF starts in full-capability mode (#3880). // Initialize SQLite database so SF starts in full-capability mode (#3880).
// Without this, isDbAvailable() returns false and SF enters degraded // Without this, isDbAvailable() returns false and SF enters degraded
@ -572,7 +572,7 @@ async function customizeAdvancedPrefs(
// ─── Bootstrap ────────────────────────────────────────────────────────────────── // ─── Bootstrap ──────────────────────────────────────────────────────────────────
function bootstrapGsdDirectory( function bootstrapSfDirectory(
basePath: string, basePath: string,
prefs: ProjectPreferences, prefs: ProjectPreferences,
signals: ProjectSignals, signals: ProjectSignals,

View file

@ -27,7 +27,7 @@ interface McpConfigFile {
[key: string]: unknown; [key: string]: unknown;
} }
export function resolveBundledGsdCliPath( export function resolveBundledSfCliPath(
env: NodeJS.ProcessEnv = process.env, env: NodeJS.ProcessEnv = process.env,
): string | null { ): string | null {
const explicit = env.SF_CLI_PATH?.trim() || env.SF_BIN_PATH?.trim(); const explicit = env.SF_CLI_PATH?.trim() || env.SF_BIN_PATH?.trim();
@ -55,7 +55,7 @@ export function buildProjectWorkflowMcpServerConfig(
env: NodeJS.ProcessEnv = process.env, env: NodeJS.ProcessEnv = process.env,
): ProjectMcpServerConfig { ): ProjectMcpServerConfig {
const resolvedProjectRoot = resolve(projectRoot); const resolvedProjectRoot = resolve(projectRoot);
const sfCliPath = resolveBundledGsdCliPath(env); const sfCliPath = resolveBundledSfCliPath(env);
const launch = detectWorkflowMcpLaunchConfig(resolvedProjectRoot, { const launch = detectWorkflowMcpLaunchConfig(resolvedProjectRoot, {
...env, ...env,
...(sfCliPath ? { SF_CLI_PATH: sfCliPath, SF_BIN_PATH: sfCliPath } : {}), ...(sfCliPath ? { SF_CLI_PATH: sfCliPath, SF_BIN_PATH: sfCliPath } : {}),

View file

@ -21,8 +21,8 @@ import {
import { join } from "node:path"; import { join } from "node:path";
import { getErrorMessage } from "./error-utils.js"; import { getErrorMessage } from "./error-utils.js";
import { GIT_NO_PROMPT_ENV } from "./git-constants.js"; import { GIT_NO_PROMPT_ENV } from "./git-constants.js";
import { hasGitTrackedGsdFiles } from "./gitignore.js"; import { hasGitTrackedSfFiles } from "./gitignore.js";
import { externalGsdRoot, isInsideWorktree } from "./repo-identity.js"; import { externalSfRoot, isInsideWorktree } from "./repo-identity.js";
export interface MigrationResult { export interface MigrationResult {
migrated: boolean; migrated: boolean;
@ -46,7 +46,7 @@ export interface MigrationResult {
export function migrateToExternalState(basePath: string): MigrationResult { export function migrateToExternalState(basePath: string): MigrationResult {
// Worktrees get their .sf via syncSfStateToWorktree(), not migration. // Worktrees get their .sf via syncSfStateToWorktree(), not migration.
// Migration inside a worktree would compute the same external hash as the // Migration inside a worktree would compute the same external hash as the
// main repo (externalGsdRoot hashes remoteUrl + gitRoot), creating a broken // main repo (externalSfRoot hashes remoteUrl + gitRoot), creating a broken
// junction and orphaning .sf.migrating (#2970). // junction and orphaning .sf.migrating (#2970).
if (isInsideWorktree(basePath)) { if (isInsideWorktree(basePath)) {
return { migrated: false }; return { migrated: false };
@ -80,7 +80,7 @@ export function migrateToExternalState(basePath: string): MigrationResult {
// Skip if .sf/ contains git-tracked files — the project intentionally // Skip if .sf/ contains git-tracked files — the project intentionally
// keeps .sf/ in version control and migration would destroy that. // keeps .sf/ in version control and migration would destroy that.
if (hasGitTrackedGsdFiles(basePath)) { if (hasGitTrackedSfFiles(basePath)) {
return { migrated: false }; return { migrated: false };
} }
@ -100,7 +100,7 @@ export function migrateToExternalState(basePath: string): MigrationResult {
} }
} }
const externalPath = externalGsdRoot(basePath); const externalPath = externalSfRoot(basePath);
const migratingPath = join(basePath, ".sf.migrating"); const migratingPath = join(basePath, ".sf.migrating");
try { try {

View file

@ -152,8 +152,8 @@ export async function handleMigrate(
); );
} }
const targetGsdExists = existsSync(sfRoot(process.cwd())); const targetSfExists = existsSync(sfRoot(process.cwd()));
if (targetGsdExists) { if (targetSfExists) {
lines.push(""); lines.push("");
lines.push( lines.push(
"⚠ A .sf directory already exists in the current working directory — it will be overwritten.", "⚠ A .sf directory already exists in the current working directory — it will be overwritten.",

View file

@ -1,4 +1,4 @@
// GSD-2 — Milestone scope classifier (#4781 / ADR-003 companion). // SF — Milestone scope classifier (#4781 / ADR-003 companion).
// //
// Pure heuristics over milestone planning fields. Produces a PipelineVariant // Pure heuristics over milestone planning fields. Produces a PipelineVariant
// that downstream dispatch logic can use to shape the auto-mode sequence. // that downstream dispatch logic can use to shape the auto-mode sequence.

View file

@ -18,7 +18,7 @@ let nativeModule: {
level?: number, level?: number,
) => { content: string; found: boolean }; ) => { content: string; found: boolean };
extractAllSections: (content: string, level?: number) => string; extractAllSections: (content: string, level?: number) => string;
batchParseGsdFiles: (directory: string) => { batchParseSfFiles: (directory: string) => {
files: Array<{ files: Array<{
path: string; path: string;
metadata: string; metadata: string;
@ -47,7 +47,7 @@ let nativeModule: {
consumes: string; consumes: string;
}>; }>;
}; };
scanGsdTree: ( scanSfTree: (
directory: string, directory: string,
) => Array<{ path: string; name: string; isDir: boolean }>; ) => Array<{ path: string; name: string; isDir: boolean }>;
parseJsonlTail: ( parseJsonlTail: (
@ -70,7 +70,7 @@ function loadNative(): typeof nativeModule {
// Dynamic import to avoid hard dependency - fails gracefully if native module not built // Dynamic import to avoid hard dependency - fails gracefully if native module not built
// eslint-disable-next-line @typescript-eslint/no-require-imports // eslint-disable-next-line @typescript-eslint/no-require-imports
const mod = require("@singularity-forge/native"); const mod = require("@singularity-forge/native");
if (mod.parseFrontmatter && mod.extractSection && mod.batchParseGsdFiles) { if (mod.parseFrontmatter && mod.extractSection && mod.batchParseSfFiles) {
nativeModule = mod; nativeModule = mod;
} }
} catch { } catch {
@ -161,13 +161,13 @@ export interface BatchParsedFile {
* Batch-parse all .md files in a .sf/ directory tree using the native parser. * Batch-parse all .md files in a .sf/ directory tree using the native parser.
* Returns null if native module unavailable. * Returns null if native module unavailable.
*/ */
export function nativeBatchParseGsdFiles( export function nativeBatchParseSfFiles(
directory: string, directory: string,
): BatchParsedFile[] | null { ): BatchParsedFile[] | null {
const native = loadNative(); const native = loadNative();
if (!native) return null; if (!native) return null;
const result = native.batchParseGsdFiles(directory); const result = native.batchParseSfFiles(directory);
return result.files.map((f) => ({ return result.files.map((f) => ({
path: f.path, path: f.path,
metadata: JSON.parse(f.metadata) as Record<string, unknown>, metadata: JSON.parse(f.metadata) as Record<string, unknown>,
@ -186,7 +186,7 @@ export function isNativeParserAvailable(): boolean {
// ─── Tree Scanning ──────────────────────────────────────────────────────────── // ─── Tree Scanning ────────────────────────────────────────────────────────────
export interface GsdTreeEntry { export interface SfTreeEntry {
path: string; path: string;
name: string; name: string;
isDir: boolean; isDir: boolean;
@ -196,10 +196,10 @@ export interface GsdTreeEntry {
* Native-backed directory tree scan of a .sf/ directory. * Native-backed directory tree scan of a .sf/ directory.
* Returns a flat list of all entries, or null if native module unavailable. * Returns a flat list of all entries, or null if native module unavailable.
*/ */
export function nativeScanGsdTree(directory: string): GsdTreeEntry[] | null { export function nativeScanSfTree(directory: string): SfTreeEntry[] | null {
const native = loadNative(); const native = loadNative();
if (!native) return null; if (!native) return null;
return native.scanGsdTree(directory); return native.scanSfTree(directory);
} }
// ─── JSONL Parsing ──────────────────────────────────────────────────────────── // ─── JSONL Parsing ────────────────────────────────────────────────────────────

View file

@ -645,7 +645,7 @@ export function spawnWorker(basePath: string, milestoneId: string): boolean {
if (worker.process) return true; // already spawned if (worker.process) return true; // already spawned
// Resolve the SF CLI binary path // Resolve the SF CLI binary path
const binPath = resolveGsdBin(); const binPath = resolveSfBin();
if (!binPath) return false; if (!binPath) return false;
let child: ChildProcess; let child: ChildProcess;
@ -800,7 +800,7 @@ export function spawnWorker(basePath: string, milestoneId: string): boolean {
* Uses SF_BIN_PATH env var (set by loader.ts) or falls back to * Uses SF_BIN_PATH env var (set by loader.ts) or falls back to
* finding the binary relative to the current module. * finding the binary relative to the current module.
*/ */
function resolveGsdBin(): string | null { function resolveSfBin(): string | null {
// SF_BIN_PATH is set by loader.ts to the absolute path of dist/loader.js // SF_BIN_PATH is set by loader.ts to the absolute path of dist/loader.js
if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) { if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) {
return process.env.SF_BIN_PATH; return process.env.SF_BIN_PATH;

View file

@ -14,8 +14,8 @@ import { Dirent, existsSync, readdirSync, realpathSync } from "node:fs";
import { dirname, join, normalize } from "node:path"; import { dirname, join, normalize } from "node:path";
import { DIR_CACHE_MAX } from "./constants.js"; import { DIR_CACHE_MAX } from "./constants.js";
import { import {
type GsdTreeEntry, type SfTreeEntry,
nativeScanGsdTree, nativeScanSfTree,
} from "./native-parser-bridge.js"; } from "./native-parser-bridge.js";
// ─── Directory Listing Cache ────────────────────────────────────────────────── // ─── Directory Listing Cache ──────────────────────────────────────────────────
@ -27,17 +27,17 @@ const dirListCache = new Map<string, string[]>();
// When the native module is available, scan the entire .sf/ tree in one call // When the native module is available, scan the entire .sf/ tree in one call
// and serve directory listings from memory instead of individual readdirSync calls. // and serve directory listings from memory instead of individual readdirSync calls.
let nativeTreeCache: Map<string, GsdTreeEntry[]> | null = null; let nativeTreeCache: Map<string, SfTreeEntry[]> | null = null;
let nativeTreeBase: string | null = null; let nativeTreeBase: string | null = null;
function _getNativeTree(sfDir: string): Map<string, GsdTreeEntry[]> | null { function _getNativeTree(sfDir: string): Map<string, SfTreeEntry[]> | null {
if (nativeTreeCache && nativeTreeBase === sfDir) return nativeTreeCache; if (nativeTreeCache && nativeTreeBase === sfDir) return nativeTreeCache;
const entries = nativeScanGsdTree(sfDir); const entries = nativeScanSfTree(sfDir);
if (!entries) return null; if (!entries) return null;
// Build a map of parent directory -> entries // Build a map of parent directory -> entries
const tree = new Map<string, GsdTreeEntry[]>(); const tree = new Map<string, SfTreeEntry[]>();
for (const entry of entries) { for (const entry of entries) {
const parts = entry.path.split("/"); const parts = entry.path.split("/");
const parentPath = parts.slice(0, -1).join("/"); const parentPath = parts.slice(0, -1).join("/");
@ -298,7 +298,7 @@ const LEGACY_SF_ROOT_FILES: Record<SFRootFileKey, string> = {
const sfRootCache = new Map<string, string>(); const sfRootCache = new Map<string, string>();
/** Exported for tests only — do not call in production code. */ /** Exported for tests only — do not call in production code. */
export function _clearGsdRootCache(): void { export function _clearSfRootCache(): void {
sfRootCache.clear(); sfRootCache.clear();
} }
@ -317,7 +317,7 @@ export function sfRoot(basePath: string): string {
const cached = sfRootCache.get(basePath); const cached = sfRootCache.get(basePath);
if (cached) return cached; if (cached) return cached;
const result = probeGsdRoot(basePath); const result = probeSfRoot(basePath);
sfRootCache.set(basePath, result); sfRootCache.set(basePath, result);
return result; return result;
} }
@ -334,7 +334,7 @@ export const projectRoot = sfRoot;
* Matches both forward-slash and platform-native separators to handle * Matches both forward-slash and platform-native separators to handle
* Windows paths (path.sep = '\\') and normalized Unix paths. * Windows paths (path.sep = '\\') and normalized Unix paths.
*/ */
function isInsideGsdWorktree(p: string): boolean { function isInsideSfWorktree(p: string): boolean {
// Match /.sf/worktrees/<name> where <name> is the final segment or // Match /.sf/worktrees/<name> where <name> is the final segment or
// followed by a separator. The <name> segment must be non-empty. // followed by a separator. The <name> segment must be non-empty.
const sepFwd = "/"; const sepFwd = "/";
@ -356,7 +356,7 @@ function isInsideGsdWorktree(p: string): boolean {
return false; return false;
} }
function probeGsdRoot(rawBasePath: string): string { function probeSfRoot(rawBasePath: string): string {
// 1. Fast path — check the input path directly // 1. Fast path — check the input path directly
const local = join(rawBasePath, ".sf"); const local = join(rawBasePath, ".sf");
if (existsSync(local)) return local; if (existsSync(local)) return local;
@ -366,7 +366,7 @@ function probeGsdRoot(rawBasePath: string): string {
// the git-root probe (step 2) or walk-up (step 3) escapes to the project // the git-root probe (step 2) or walk-up (step 3) escapes to the project
// root's .sf, causing ensurePreconditions() and deriveState() to read/write // root's .sf, causing ensurePreconditions() and deriveState() to read/write
// state in the wrong location. // state in the wrong location.
if (isInsideGsdWorktree(rawBasePath)) return local; if (isInsideSfWorktree(rawBasePath)) return local;
// Resolve symlinks so path comparisons work correctly across platforms // Resolve symlinks so path comparisons work correctly across platforms
// (e.g. macOS /var → /private/var). Use rawBasePath as fallback if not resolvable. // (e.g. macOS /var → /private/var). Use rawBasePath as fallback if not resolvable.
@ -378,7 +378,7 @@ function probeGsdRoot(rawBasePath: string): string {
} }
// Also check the resolved path for the worktree pattern (macOS /tmp → /private/tmp) // Also check the resolved path for the worktree pattern (macOS /tmp → /private/tmp)
if (basePath !== rawBasePath && isInsideGsdWorktree(basePath)) return local; if (basePath !== rawBasePath && isInsideSfWorktree(basePath)) return local;
// 2. Git root anchor — used as both probe target and walk-up boundary // 2. Git root anchor — used as both probe target and walk-up boundary
// Only walk if we're inside a git project — prevents escaping into // Only walk if we're inside a git project — prevents escaping into
@ -437,14 +437,10 @@ export function resolveSfRootFile(
return canonical; return canonical;
} }
export const resolveGsdRootFile = resolveSfRootFile;
export function relSfRootFile(key: SFRootFileKey): string { export function relSfRootFile(key: SFRootFileKey): string {
return `.sf/${SF_ROOT_FILES[key]}`; return `.sf/${SF_ROOT_FILES[key]}`;
} }
export const relGsdRootFile = relSfRootFile;
/** /**
* Resolve the full path to a milestone directory. * Resolve the full path to a milestone directory.
* Returns null if the milestone doesn't exist. * Returns null if the milestone doesn't exist.

View file

@ -40,8 +40,8 @@ function resolveExtensionDir(): string {
// Fallback: user-local agent directory // Fallback: user-local agent directory
const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const agentGsdDir = join(sfHome, "agent", "extensions", "sf"); const agentSfDir = join(sfHome, "agent", "extensions", "sf");
if (existsSync(join(agentGsdDir, "prompts"))) return agentGsdDir; if (existsSync(join(agentSfDir, "prompts"))) return agentSfDir;
// Last resort: return the module dir (warmCache will silently handle the miss) // Last resort: return the module dir (warmCache will silently handle the miss)
return moduleDir; return moduleDir;

View file

@ -12,12 +12,22 @@ Dispatch ALL slices simultaneously using the `subagent` tool in **parallel mode*
## Execution Protocol ## Execution Protocol
1. Call `subagent` with `tasks: [...]` containing one entry per slice below 1. Call `subagent` exactly once with the JSON payload below
2. Wait for ALL subagents to complete 2. Wait for ALL subagents to complete
3. Verify each slice's RESEARCH file was written (check the `.sf/{{mid}}/` directory) 3. Verify each slice's RESEARCH file was written (check the `.sf/{{mid}}/` directory)
4. If any subagent failed to write its RESEARCH file, re-run it individually 4. If any subagent failed to write its RESEARCH file, re-run it individually
5. Report which slices completed research and which (if any) failed 5. Report which slices completed research and which (if any) failed
## Required `subagent` Call Payload
Use this exact payload for the `subagent` tool. Do not invent agent names. Do not use legacy executor aliases.
```json
{{subagentCall}}
```
## Subagent Prompts ## Subagent Prompts
The same task payloads are expanded below for readability.
{{subagentPrompts}} {{subagentPrompts}}

View file

@ -144,7 +144,7 @@ export function isInheritedRepo(basePath: string): boolean {
// The git root is a proper ancestor. Check whether it already has .sf // The git root is a proper ancestor. Check whether it already has .sf
// (i.e. the parent project was initialised with SF). // (i.e. the parent project was initialised with SF).
if (isProjectGsd(join(root, ".sf"))) return false; if (isProjectSf(join(root, ".sf"))) return false;
// Walk up from basePath's parent to the git root checking for .sf. // Walk up from basePath's parent to the git root checking for .sf.
// Start at dirname(normalizedBase), NOT normalizedBase itself — finding // Start at dirname(normalizedBase), NOT normalizedBase itself — finding
@ -152,7 +152,7 @@ export function isInheritedRepo(basePath: string): boolean {
// says nothing about whether the git repo is inherited from an ancestor. // says nothing about whether the git repo is inherited from an ancestor.
let dir = dirname(normalizedBase); let dir = dirname(normalizedBase);
while (dir !== normalizedRoot && dir !== dirname(dir)) { while (dir !== normalizedRoot && dir !== dirname(dir)) {
if (isProjectGsd(join(dir, ".sf"))) return false; if (isProjectSf(join(dir, ".sf"))) return false;
dir = dirname(dir); dir = dirname(dir);
} }
@ -174,23 +174,23 @@ export function isInheritedRepo(basePath: string): boolean {
* Treating it as a project `.sf` would cause isInheritedRepo() to wrongly * Treating it as a project `.sf` would cause isInheritedRepo() to wrongly
* conclude that subdirectories are part of the home "project" (#2393). * conclude that subdirectories are part of the home "project" (#2393).
*/ */
function isProjectGsd(sfPath: string): boolean { function isProjectSf(sfPath: string): boolean {
if (!existsSync(sfPath)) return false; if (!existsSync(sfPath)) return false;
try { try {
const stat = lstatSync(sfPath); const stat = lstatSync(sfPath);
// Symlinks are always project .sf (created by ensureGsdSymlink). // Symlinks are always project .sf (created by ensureSfSymlink).
if (stat.isSymbolicLink()) return true; if (stat.isSymbolicLink()) return true;
// For real directories, check that this isn't the global SF home. // For real directories, check that this isn't the global SF home.
// Recompute sfHome dynamically so env overrides (SF_HOME) are // Recompute sfHome dynamically so env overrides (SF_HOME) are
// picked up at call time, not just at module load time. // picked up at call time, not just at module load time.
if (stat.isDirectory()) { if (stat.isDirectory()) {
const currentGsdHome = process.env.SF_HOME || join(homedir(), ".sf"); const currentSfHome = process.env.SF_HOME || join(homedir(), ".sf");
const normalizedGsdPath = canonicalizeExistingPath(sfPath); const normalizedSfPath = canonicalizeExistingPath(sfPath);
const normalizedGsdHome = canonicalizeExistingPath(currentGsdHome); const normalizedSfHome = canonicalizeExistingPath(currentSfHome);
if (normalizedGsdPath === normalizedGsdHome) return false; if (normalizedSfPath === normalizedSfHome) return false;
return true; return true;
} }
} catch { } catch {
@ -337,7 +337,7 @@ export function repoIdentity(basePath: string): string {
* Returns `$SF_STATE_DIR/projects/<hash>` if `SF_STATE_DIR` is set, * Returns `$SF_STATE_DIR/projects/<hash>` if `SF_STATE_DIR` is set,
* otherwise `~/.sf/projects/<hash>`. * otherwise `~/.sf/projects/<hash>`.
*/ */
export function externalGsdRoot(basePath: string): string { export function externalSfRoot(basePath: string): string {
const base = process.env.SF_STATE_DIR || sfHome; const base = process.env.SF_STATE_DIR || sfHome;
return join(base, "projects", repoIdentity(basePath)); return join(base, "projects", repoIdentity(basePath));
} }
@ -363,12 +363,12 @@ export function externalProjectsRoot(): string {
* directory, making tracked planning files appear deleted. * directory, making tracked planning files appear deleted.
* *
* This helper scans the project root for entries matching `.sf <digits>` and * This helper scans the project root for entries matching `.sf <digits>` and
* removes them. It is called early in `ensureGsdSymlink()` so that the * removes them. It is called early in `ensureSfSymlink()` so that the
* canonical `.sf` path is always the one in use. * canonical `.sf` path is always the one in use.
*/ */
const SF_NUMBERED_VARIANT_RE = /^\.sf \d+$/; const SF_NUMBERED_VARIANT_RE = /^\.sf \d+$/;
export function cleanNumberedGsdVariants(projectPath: string): string[] { export function cleanNumberedSfVariants(projectPath: string): string[] {
const removed: string[] = []; const removed: string[] = [];
try { try {
const entries = readdirSync(projectPath); const entries = readdirSync(projectPath);
@ -401,7 +401,7 @@ export function cleanNumberedGsdVariants(projectPath: string): string[] {
* The marker is gitignored by ensureGitignore(). Non-fatal: failure to write * The marker is gitignored by ensureGitignore(). Non-fatal: failure to write
* the marker must never block project setup. * the marker must never block project setup.
*/ */
function writeGsdIdMarker(projectPath: string, identity: string): void { function writeSfIdMarker(projectPath: string, identity: string): void {
try { try {
const markerPath = join(projectPath, ".sf-id"); const markerPath = join(projectPath, ".sf-id");
// Only write if content differs to avoid unnecessary disk writes. // Only write if content differs to avoid unnecessary disk writes.
@ -422,7 +422,7 @@ function writeGsdIdMarker(projectPath: string, identity: string): void {
* Read the `.sf-id` marker from the project root. * Read the `.sf-id` marker from the project root.
* Returns the identity hash, or null if the marker doesn't exist or is unreadable. * Returns the identity hash, or null if the marker doesn't exist or is unreadable.
*/ */
function readGsdIdMarker(projectPath: string): string | null { function readSfIdMarker(projectPath: string): string | null {
try { try {
const markerPath = join(projectPath, ".sf-id"); const markerPath = join(projectPath, ".sf-id");
if (!existsSync(markerPath)) return null; if (!existsSync(markerPath)) return null;
@ -462,7 +462,7 @@ export function hasExternalProjectState(externalPath: string): boolean {
* Returns the resolved external path (may differ from the computed identity). * Returns the resolved external path (may differ from the computed identity).
*/ */
function resolveExternalPathWithRecovery(projectPath: string): string { function resolveExternalPathWithRecovery(projectPath: string): string {
const computedPath = externalGsdRoot(projectPath); const computedPath = externalSfRoot(projectPath);
const computedId = repoIdentity(projectPath); const computedId = repoIdentity(projectPath);
// Check if computed path already has state — fast path, no recovery needed. // Check if computed path already has state — fast path, no recovery needed.
@ -471,7 +471,7 @@ function resolveExternalPathWithRecovery(projectPath: string): string {
} }
// Check for .sf-id marker from a previous location. // Check for .sf-id marker from a previous location.
const markerId = readGsdIdMarker(projectPath); const markerId = readSfIdMarker(projectPath);
if (markerId && markerId !== computedId) { if (markerId && markerId !== computedId) {
// The marker points to a different identity — the repo was likely moved. // The marker points to a different identity — the repo was likely moved.
const base = process.env.SF_STATE_DIR || sfHome; const base = process.env.SF_STATE_DIR || sfHome;
@ -528,20 +528,20 @@ function resolveExternalPathWithRecovery(projectPath: string): string {
* *
* Returns the resolved external path. * Returns the resolved external path.
*/ */
export function ensureGsdSymlink(projectPath: string): string { export function ensureSfSymlink(projectPath: string): string {
const result = ensureGsdSymlinkCore(projectPath); const result = ensureSfSymlinkCore(projectPath);
// Write .sf-id marker so future relocations can recover this state (#2750). // Write .sf-id marker so future relocations can recover this state (#2750).
// Only write for the project root (not subdirectories or worktrees that // Only write for the project root (not subdirectories or worktrees that
// delegate to a parent .sf). // delegate to a parent .sf).
if (!isInsideWorktree(projectPath)) { if (!isInsideWorktree(projectPath)) {
writeGsdIdMarker(projectPath, repoIdentity(projectPath)); writeSfIdMarker(projectPath, repoIdentity(projectPath));
} }
return result; return result;
} }
function ensureGsdSymlinkCore(projectPath: string): string { function ensureSfSymlinkCore(projectPath: string): string {
const externalPath = resolveExternalPathWithRecovery(projectPath); const externalPath = resolveExternalPathWithRecovery(projectPath);
const localSf = join(projectPath, ".sf"); const localSf = join(projectPath, ".sf");
const inWorktree = isInsideWorktree(projectPath); const inWorktree = isInsideWorktree(projectPath);
@ -566,14 +566,14 @@ function ensureGsdSymlinkCore(projectPath: string): string {
const normalizedProject = canonicalizeExistingPath(projectPath); const normalizedProject = canonicalizeExistingPath(projectPath);
const normalizedRoot = canonicalizeExistingPath(gitRoot); const normalizedRoot = canonicalizeExistingPath(gitRoot);
if (normalizedProject !== normalizedRoot) { if (normalizedProject !== normalizedRoot) {
const rootGsd = join(gitRoot, ".sf"); const rootSf = join(gitRoot, ".sf");
if (existsSync(rootGsd)) { if (existsSync(rootSf)) {
try { try {
const rootStat = lstatSync(rootGsd); const rootStat = lstatSync(rootSf);
if (rootStat.isSymbolicLink() || rootStat.isDirectory()) { if (rootStat.isSymbolicLink() || rootStat.isDirectory()) {
return rootStat.isSymbolicLink() return rootStat.isSymbolicLink()
? realpathSync(rootGsd) ? realpathSync(rootSf)
: rootGsd; : rootSf;
} }
} catch { } catch {
// Fall through to normal logic if we can't stat root .sf // Fall through to normal logic if we can't stat root .sf
@ -587,7 +587,7 @@ function ensureGsdSymlinkCore(projectPath: string): string {
// Clean up macOS numbered collision variants (.sf 2, .sf 3, etc.) before // Clean up macOS numbered collision variants (.sf 2, .sf 3, etc.) before
// any existence checks — otherwise they accumulate and confuse state (#2205). // any existence checks — otherwise they accumulate and confuse state (#2205).
cleanNumberedGsdVariants(projectPath); cleanNumberedSfVariants(projectPath);
// Ensure external directory exists // Ensure external directory exists
mkdirSync(externalPath, { recursive: true }); mkdirSync(externalPath, { recursive: true });

View file

@ -14,7 +14,7 @@ import type {
} from "@singularity-forge/pi-coding-agent"; } from "@singularity-forge/pi-coding-agent";
import { isAutoActive } from "./auto.js"; import { isAutoActive } from "./auto.js";
import { isGsdGitignored } from "./gitignore.js"; import { isSfGitignored } from "./gitignore.js";
import { buildExistingMilestonesContext } from "./guided-flow-queue.js"; import { buildExistingMilestonesContext } from "./guided-flow-queue.js";
import { getParkedReason } from "./milestone-actions.js"; import { getParkedReason } from "./milestone-actions.js";
import { findMilestoneIds } from "./milestone-ids.js"; import { findMilestoneIds } from "./milestone-ids.js";
@ -69,7 +69,7 @@ export async function handleRethink(
state, state,
); );
const commitInstruction = isGsdGitignored(basePath) const commitInstruction = isSfGitignored(basePath)
? "Do not commit planning artifacts — .sf/ is gitignored in this project." ? "Do not commit planning artifacts — .sf/ is gitignored in this project."
: 'After changes, run `git add .sf/ && git commit -m "docs(sf): rethink milestone plan"` to persist (rethink runs interactively outside auto-mode, so no system auto-commit)'; : 'After changes, run `git add .sf/ && git commit -m "docs(sf): rethink milestone plan"` to persist (rethink runs interactively outside auto-mode, so no system auto-commit)';

View file

@ -23,15 +23,22 @@ const MAX_ID_LENGTH = 64;
export class UnsafeIdError extends TypeError { export class UnsafeIdError extends TypeError {
constructor( constructor(
public readonly fieldName: string, fieldName: string,
public readonly reason: string, reason: string,
public readonly value: string, value: string,
) { ) {
super( super(
`${fieldName} is unsafe: ${reason} (got ${JSON.stringify(value).slice(0, 80)})`, `${fieldName} is unsafe: ${reason} (got ${JSON.stringify(value).slice(0, 80)})`,
); );
this.fieldName = fieldName;
this.reason = reason;
this.value = value;
this.name = "UnsafeIdError"; this.name = "UnsafeIdError";
} }
public readonly fieldName: string;
public readonly reason: string;
public readonly value: string;
} }
/** /**

View file

@ -1,4 +1,4 @@
// GSD2 + skill-manifest — per-unit-type skill allowlist resolver (RFC #4779) // SF2 + skill-manifest — per-unit-type skill allowlist resolver (RFC #4779)
// //
// Each auto-mode unit type can declare which skills are relevant to it. This // Each auto-mode unit type can declare which skills are relevant to it. This
// trims the set of skills considered for activation in the per-unit prompt, // trims the set of skills considered for activation in the per-unit prompt,
@ -168,7 +168,7 @@ export function warnIfManifestHasMissingSkills(
): void { ): void {
// Strict mode is intentionally opt-in via exactly "1"; values like "0" or // Strict mode is intentionally opt-in via exactly "1"; values like "0" or
// "false" must preserve the normal silent manifest behavior. // "false" must preserve the normal silent manifest behavior.
if (process.env.GSD_SKILL_MANIFEST_STRICT !== "1") return; if (process.env.SF_SKILL_MANIFEST_STRICT !== "1") return;
const allowlist = resolveSkillManifest(unitType); const allowlist = resolveSkillManifest(unitType);
if (!allowlist) return; if (!allowlist) return;
for (const name of allowlist) { for (const name of allowlist) {

View file

@ -92,7 +92,7 @@ Before installing, ensure the skill follows sf naming:
- Lowercase kebab-case directory name. - Lowercase kebab-case directory name.
- Match the directory name exactly to the `name:` field in frontmatter. - Match the directory name exactly to the `name:` field in frontmatter.
- No prefixes like `dr-`, `ace-`, `gsd-` — strip them. (`dr-spec-first-tdd``spec-first-tdd`.) - No prefixes like `dr-`, `ace-` — strip them. (`dr-spec-first-tdd``spec-first-tdd`.)
- See [`creating-skills`](../creating-skills/SKILL.md) for the full convention. - See [`creating-skills`](../creating-skills/SKILL.md) for the full convention.
## How to Acquire ## How to Acquire
@ -136,7 +136,7 @@ rsync -av -e ssh \
After fetching, **adapt for sf**: After fetching, **adapt for sf**:
- Strip foreign prefixes (`dr-`, `ace-`, `gsd-`, `letta-`). - Strip foreign prefixes (`dr-`, `ace-`, `letta-`).
- Replace foreign tooling references (Letta MCP tool calls, claude-flow CLIs) with sf-native equivalents (`rg`, `npm test`, `sf_*` tools, `advisory-partner` skill, etc.). - Replace foreign tooling references (Letta MCP tool calls, claude-flow CLIs) with sf-native equivalents (`rg`, `npm test`, `sf_*` tools, `advisory-partner` skill, etc.).
- Drop bootstrap gates that don't apply (`onboarding()`, `IN_NIX_SHELL`, etc.). - Drop bootstrap gates that don't apply (`onboarding()`, `IN_NIX_SHELL`, etc.).
- Cite sf doctrine: `AGENTS.md`, `docs/SPEC_FIRST_TDD.md`, the relevant sister skill. - Cite sf doctrine: `AGENTS.md`, `docs/SPEC_FIRST_TDD.md`, the relevant sister skill.
@ -176,7 +176,7 @@ User asks: "Can you help me test my React app's UI?"
- **Read every script before executing it.** No exceptions, even from trusted sources. - **Read every script before executing it.** No exceptions, even from trusted sources.
- **Don't `curl | bash`** unless the user has personally inspected and approved the URL. - **Don't `curl | bash`** unless the user has personally inspected and approved the URL.
- **Untrusted sources require explicit user approval** before download. - **Untrusted sources require explicit user approval** before download.
- **Strip foreign prefixes** when porting (`dr-`, `ace-`, `gsd-`, `letta-`). - **Strip foreign prefixes** when porting (`dr-`, `ace-`, `letta-`).
- **Adapt tooling references** to sf-native equivalents. - **Adapt tooling references** to sf-native equivalents.
- **Cite sf doctrine** — link `AGENTS.md` and `docs/SPEC_FIRST_TDD.md` rather than restating their rules. - **Cite sf doctrine** — link `AGENTS.md` and `docs/SPEC_FIRST_TDD.md` rather than restating their rules.
- **Don't overwrite an existing sf skill** without diffing first; if names collide, decide whether to merge, supersede, or rename. - **Don't overwrite an existing sf skill** without diffing first; if names collide, decide whether to merge, supersede, or rename.

View file

@ -17,7 +17,7 @@ The job: reduce ambiguity that would otherwise cause bad plans, wrong tests, or
- A milestone goal is "make it better" or "robust" or "fast" — vague verbs that aren't testable. - A milestone goal is "make it better" or "robust" or "fast" — vague verbs that aren't testable.
- A slice plan is being drafted but key boundaries are unstated. - A slice plan is being drafted but key boundaries are unstated.
- A change touches a security/auth surface and the threat model isn't named. - A change touches a security/auth surface and the threat model isn't named.
- An upstream port (pi-mono / gsd-2) leaves architectural intent ambiguous after reading the commit. - An upstream port (pi-mono legacy port) leaves architectural intent ambiguous after reading the commit.
If the request is concrete and the consumer is obvious, skip this skill — go straight to `brainstorming` or `spec-first-tdd`. If the request is concrete and the consumer is obvious, skip this skill — go straight to `brainstorming` or `spec-first-tdd`.

View file

@ -52,7 +52,7 @@ Look for:
| Decay type | Symptoms | Fix | | Decay type | Symptoms | Fix |
|---|---|---| |---|---|---|
| **Bloat** | `.sf/CODEBASE.md` is 5x its useful size; same fact stated 4 times. | Compress: keep one canonical statement, delete the rest. | | **Bloat** | `.sf/CODEBASE.md` is 5x its useful size; same fact stated 4 times. | Compress: keep one canonical statement, delete the rest. |
| **Stale** | A file references `extensions/gsd/` (renamed to `extensions/sf/`). | Update; or, if the fact is now self-evident from the code, delete. | | **Stale** | A file references `extensions/old-extension/` (renamed to `extensions/sf/`). | Update; or, if the fact is now self-evident from the code, delete. |
| **Contradiction** | `.sf/DECISIONS.md` says "use bun" but `AGENTS.md` says "npm canonical". | Find the canonical source (usually `AGENTS.md` for sf), fix the other. | | **Contradiction** | `.sf/DECISIONS.md` says "use bun" but `AGENTS.md` says "npm canonical". | Find the canonical source (usually `AGENTS.md` for sf), fix the other. |
| **Orphaned** | A reference points to a file that was deleted. | Delete the reference, or restore the file if it should still exist. | | **Orphaned** | A reference points to a file that was deleted. | Delete the reference, or restore the file if it should still exist. |
| **Skill overlap** | Two skills try to do the same job. | Either merge them or scope each to its distinct sub-case. | | **Skill overlap** | Two skills try to do the same job. | Either merge them or scope each to its distinct sub-case. |

View file

@ -108,7 +108,7 @@ porting-from-upstream/
├── SKILL.md (overview + which-upstream selection) ├── SKILL.md (overview + which-upstream selection)
└── references/ └── references/
├── pi-mono.md (cherry-pick patterns) ├── pi-mono.md (cherry-pick patterns)
├── gsd-2.md (manual port + naming translation) ├── legacy-port.md (manual port + naming translation)
└── bunker.md (skill harvest from remote host) └── bunker.md (skill harvest from remote host)
``` ```

View file

@ -73,7 +73,7 @@ git worktree remove ../singularity-forge-my-feature
- Another agent (sf auto-loop, another Claude session, a teammate) is working in the current directory. - Another agent (sf auto-loop, another Claude session, a teammate) is working in the current directory.
- A long-running build or test is in flight in one terminal and you need a parallel branch. - A long-running build or test is in flight in one terminal and you need a parallel branch.
- You're exploring a refactor that you may abandon — keep main clean. - You're exploring a refactor that you may abandon — keep main clean.
- You need to apply an upstream cherry-pick from `pi-mono` while a separate `gsd-2` port is in progress. - You need to apply an upstream cherry-pick from `pi-mono` while a separate legacy port is in progress.
## When NOT to Use ## When NOT to Use

View file

@ -302,9 +302,9 @@ function filterConflictingSlices(
/** /**
* Resolve the SF CLI binary path. * Resolve the SF CLI binary path.
* Same logic as parallel-orchestrator.ts resolveGsdBin(). * Same logic as parallel-orchestrator.ts resolveSfBin().
*/ */
function resolveGsdBin(): string | null { function resolveSfBin(): string | null {
if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) { if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) {
return process.env.SF_BIN_PATH; return process.env.SF_BIN_PATH;
} }
@ -341,7 +341,7 @@ function spawnSliceWorker(
if (!worker) return false; if (!worker) return false;
if (worker.process) return true; if (worker.process) return true;
const binPath = resolveGsdBin(); const binPath = resolveSfBin();
if (!binPath) return false; if (!binPath) return false;
let child: ChildProcess; let child: ChildProcess;

View file

@ -15,7 +15,7 @@ import {
import { findMilestoneIds } from "./milestone-ids.js"; import { findMilestoneIds } from "./milestone-ids.js";
import { getVisionAlignmentBlockingIssue } from "./milestone-quality.js"; import { getVisionAlignmentBlockingIssue } from "./milestone-quality.js";
import { isTerminalMilestoneSummaryContent } from "./milestone-summary-classifier.js"; import { isTerminalMilestoneSummaryContent } from "./milestone-summary-classifier.js";
import { nativeBatchParseGsdFiles } from "./native-parser-bridge.js"; import { nativeBatchParseSfFiles } from "./native-parser-bridge.js";
import { parsePlan, parseRoadmap } from "./parsers-legacy.js"; import { parsePlan, parseRoadmap } from "./parsers-legacy.js";
import { import {
resolveMilestoneFile, resolveMilestoneFile,
@ -1438,7 +1438,7 @@ export async function _deriveStateImpl(basePath: string): Promise<SFState> {
// Filesystem fallback: used when deriveStateFromDb() is not available // Filesystem fallback: used when deriveStateFromDb() is not available
// (pre-migration projects). The DB-backed path is preferred when available // (pre-migration projects). The DB-backed path is preferred when available
// — see deriveStateFromDb() above. // — see deriveStateFromDb() above.
const batchFiles = nativeBatchParseGsdFiles(sfDir); const batchFiles = nativeBatchParseSfFiles(sfDir);
if (batchFiles) { if (batchFiles) {
for (const f of batchFiles) { for (const f of batchFiles) {
const absPath = resolve(sfDir, f.path); const absPath = resolve(sfDir, f.path);

View file

@ -25,9 +25,9 @@ function makeTempDir(prefix: string): string {
test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models section is absent", () => { test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models section is absent", () => {
const originalCwd = process.cwd(); const originalCwd = process.cwd();
const originalGsdHome = process.env.SF_HOME; const originalSfHome = process.env.SF_HOME;
const tempProject = makeTempDir("sf-routing-project-"); const tempProject = makeTempDir("sf-routing-project-");
const tempGsdHome = makeTempDir("sf-routing-home-"); const tempSfHome = makeTempDir("sf-routing-home-");
try { try {
mkdirSync(join(tempProject, ".sf"), { recursive: true }); mkdirSync(join(tempProject, ".sf"), { recursive: true });
@ -45,7 +45,7 @@ test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models
].join("\n"), ].join("\n"),
"utf-8", "utf-8",
); );
process.env.SF_HOME = tempGsdHome; process.env.SF_HOME = tempSfHome;
process.chdir(tempProject); process.chdir(tempProject);
const config = resolvePreferredModelConfig("plan-slice", { const config = resolvePreferredModelConfig("plan-slice", {
@ -59,18 +59,18 @@ test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models
}); });
} finally { } finally {
process.chdir(originalCwd); process.chdir(originalCwd);
if (originalGsdHome === undefined) delete process.env.SF_HOME; if (originalSfHome === undefined) delete process.env.SF_HOME;
else process.env.SF_HOME = originalGsdHome; else process.env.SF_HOME = originalSfHome;
rmSync(tempProject, { recursive: true, force: true }); rmSync(tempProject, { recursive: true, force: true });
rmSync(tempGsdHome, { recursive: true, force: true }); rmSync(tempSfHome, { recursive: true, force: true });
} }
}); });
test("resolvePreferredModelConfig falls back to auto start model when heavy tier is absent", () => { test("resolvePreferredModelConfig falls back to auto start model when heavy tier is absent", () => {
const originalCwd = process.cwd(); const originalCwd = process.cwd();
const originalGsdHome = process.env.SF_HOME; const originalSfHome = process.env.SF_HOME;
const tempProject = makeTempDir("sf-routing-project-"); const tempProject = makeTempDir("sf-routing-project-");
const tempGsdHome = makeTempDir("sf-routing-home-"); const tempSfHome = makeTempDir("sf-routing-home-");
try { try {
mkdirSync(join(tempProject, ".sf"), { recursive: true }); mkdirSync(join(tempProject, ".sf"), { recursive: true });
@ -87,7 +87,7 @@ test("resolvePreferredModelConfig falls back to auto start model when heavy tier
].join("\n"), ].join("\n"),
"utf-8", "utf-8",
); );
process.env.SF_HOME = tempGsdHome; process.env.SF_HOME = tempSfHome;
process.chdir(tempProject); process.chdir(tempProject);
const config = resolvePreferredModelConfig("execute-task", { const config = resolvePreferredModelConfig("execute-task", {
@ -101,18 +101,18 @@ test("resolvePreferredModelConfig falls back to auto start model when heavy tier
}); });
} finally { } finally {
process.chdir(originalCwd); process.chdir(originalCwd);
if (originalGsdHome === undefined) delete process.env.SF_HOME; if (originalSfHome === undefined) delete process.env.SF_HOME;
else process.env.SF_HOME = originalGsdHome; else process.env.SF_HOME = originalSfHome;
rmSync(tempProject, { recursive: true, force: true }); rmSync(tempProject, { recursive: true, force: true });
rmSync(tempGsdHome, { recursive: true, force: true }); rmSync(tempSfHome, { recursive: true, force: true });
} }
}); });
test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", () => { test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", () => {
const originalCwd = process.cwd(); const originalCwd = process.cwd();
const originalGsdHome = process.env.SF_HOME; const originalSfHome = process.env.SF_HOME;
const tempProject = makeTempDir("sf-routing-project-"); const tempProject = makeTempDir("sf-routing-project-");
const tempGsdHome = makeTempDir("sf-routing-home-"); const tempSfHome = makeTempDir("sf-routing-home-");
try { try {
mkdirSync(join(tempProject, ".sf"), { recursive: true }); mkdirSync(join(tempProject, ".sf"), { recursive: true });
@ -130,7 +130,7 @@ test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", (
].join("\n"), ].join("\n"),
"utf-8", "utf-8",
); );
process.env.SF_HOME = tempGsdHome; process.env.SF_HOME = tempSfHome;
process.chdir(tempProject); process.chdir(tempProject);
const config = resolvePreferredModelConfig("plan-slice", { const config = resolvePreferredModelConfig("plan-slice", {
@ -144,23 +144,23 @@ test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", (
}); });
} finally { } finally {
process.chdir(originalCwd); process.chdir(originalCwd);
if (originalGsdHome === undefined) delete process.env.SF_HOME; if (originalSfHome === undefined) delete process.env.SF_HOME;
else process.env.SF_HOME = originalGsdHome; else process.env.SF_HOME = originalSfHome;
rmSync(tempProject, { recursive: true, force: true }); rmSync(tempProject, { recursive: true, force: true });
rmSync(tempGsdHome, { recursive: true, force: true }); rmSync(tempSfHome, { recursive: true, force: true });
} }
}); });
test("selectAndApplyModel does not let learned routing override an explicit execution model", async () => { test("selectAndApplyModel does not let learned routing override an explicit execution model", async () => {
const originalCwd = process.cwd(); const originalCwd = process.cwd();
const originalGsdHome = process.env.SF_HOME; const originalSfHome = process.env.SF_HOME;
const tempProject = makeTempDir("sf-routing-project-"); const tempProject = makeTempDir("sf-routing-project-");
const tempGsdHome = makeTempDir("sf-routing-home-"); const tempSfHome = makeTempDir("sf-routing-home-");
try { try {
mkdirSync(join(tempProject, ".sf"), { recursive: true }); mkdirSync(join(tempProject, ".sf"), { recursive: true });
writeFileSync( writeFileSync(
join(tempGsdHome, "preferences.md"), join(tempSfHome, "preferences.md"),
[ [
"---", "---",
"version: 1", "version: 1",
@ -175,7 +175,7 @@ test("selectAndApplyModel does not let learned routing override an explicit exec
["---", "version: 1", "models: {}", "---"].join("\n"), ["---", "version: 1", "models: {}", "---"].join("\n"),
"utf-8", "utf-8",
); );
process.env.SF_HOME = tempGsdHome; process.env.SF_HOME = tempSfHome;
process.chdir(tempProject); process.chdir(tempProject);
const availableModels = [ const availableModels = [
@ -227,10 +227,10 @@ test("selectAndApplyModel does not let learned routing override an explicit exec
assert.equal(result.appliedModel?.id, "kimi-for-coding"); assert.equal(result.appliedModel?.id, "kimi-for-coding");
} finally { } finally {
process.chdir(originalCwd); process.chdir(originalCwd);
if (originalGsdHome === undefined) delete process.env.SF_HOME; if (originalSfHome === undefined) delete process.env.SF_HOME;
else process.env.SF_HOME = originalGsdHome; else process.env.SF_HOME = originalSfHome;
rmSync(tempProject, { recursive: true, force: true }); rmSync(tempProject, { recursive: true, force: true });
rmSync(tempGsdHome, { recursive: true, force: true }); rmSync(tempSfHome, { recursive: true, force: true });
} }
}); });

View file

@ -12,7 +12,7 @@ import { join } from "node:path";
import { afterEach, before, describe, it } from "node:test"; import { afterEach, before, describe, it } from "node:test";
import { import {
getGsdArgumentCompletions, getSfArgumentCompletions,
TOP_LEVEL_SUBCOMMANDS, TOP_LEVEL_SUBCOMMANDS,
} from "../commands/catalog.ts"; } from "../commands/catalog.ts";
@ -116,8 +116,8 @@ describe("workflow catalog registration", () => {
assert.match(entry!.desc, /session model/i); assert.match(entry!.desc, /session model/i);
}); });
it("getGsdArgumentCompletions('m') includes model", () => { it("getSfArgumentCompletions('m') includes model", () => {
const completions = getGsdArgumentCompletions("m"); const completions = getSfArgumentCompletions("m");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
assert.ok(labels.includes("model"), "should include model completion"); assert.ok(labels.includes("model"), "should include model completion");
}); });
@ -129,8 +129,8 @@ describe("workflow catalog registration", () => {
assert.ok(entry!.desc.includes("run"), "description should mention run"); assert.ok(entry!.desc.includes("run"), "description should mention run");
}); });
it("getGsdArgumentCompletions('workflow ') returns six subcommands", () => { it("getSfArgumentCompletions('workflow ') returns six subcommands", () => {
const completions = getGsdArgumentCompletions("workflow "); const completions = getSfArgumentCompletions("workflow ");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
for (const sub of ["new", "run", "list", "validate", "pause", "resume"]) { for (const sub of ["new", "run", "list", "validate", "pause", "resume"]) {
assert.ok(labels.includes(sub), `missing completion: ${sub}`); assert.ok(labels.includes(sub), `missing completion: ${sub}`);
@ -138,15 +138,15 @@ describe("workflow catalog registration", () => {
assert.equal(labels.length, 6, "should have exactly 6 subcommands"); assert.equal(labels.length, 6, "should have exactly 6 subcommands");
}); });
it("getGsdArgumentCompletions('workflow r') filters to run and resume", () => { it("getSfArgumentCompletions('workflow r') filters to run and resume", () => {
const completions = getGsdArgumentCompletions("workflow r"); const completions = getSfArgumentCompletions("workflow r");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
assert.ok(labels.includes("run"), "should include run"); assert.ok(labels.includes("run"), "should include run");
assert.ok(labels.includes("resume"), "should include resume"); assert.ok(labels.includes("resume"), "should include resume");
assert.ok(!labels.includes("list"), "should not include list"); assert.ok(!labels.includes("list"), "should not include list");
}); });
it("getGsdArgumentCompletions('workflow run ') returns definition names", () => { it("getSfArgumentCompletions('workflow run ') returns definition names", () => {
const base = makeTmpBase(); const base = makeTmpBase();
writeDefinition(base, "deploy-pipeline", SIMPLE_DEF); writeDefinition(base, "deploy-pipeline", SIMPLE_DEF);
writeDefinition(base, "test-suite", SIMPLE_DEF); writeDefinition(base, "test-suite", SIMPLE_DEF);
@ -154,7 +154,7 @@ describe("workflow catalog registration", () => {
// Change cwd so the completion scanner can find `.sf/workflow-defs/` // Change cwd so the completion scanner can find `.sf/workflow-defs/`
process.chdir(base); process.chdir(base);
const completions = getGsdArgumentCompletions("workflow run "); const completions = getSfArgumentCompletions("workflow run ");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
assert.ok( assert.ok(
labels.includes("deploy-pipeline"), labels.includes("deploy-pipeline"),
@ -163,25 +163,25 @@ describe("workflow catalog registration", () => {
assert.ok(labels.includes("test-suite"), "should include test-suite"); assert.ok(labels.includes("test-suite"), "should include test-suite");
}); });
it("getGsdArgumentCompletions('workflow validate ') returns definition names", () => { it("getSfArgumentCompletions('workflow validate ') returns definition names", () => {
const base = makeTmpBase(); const base = makeTmpBase();
writeDefinition(base, "my-workflow", SIMPLE_DEF); writeDefinition(base, "my-workflow", SIMPLE_DEF);
process.chdir(base); process.chdir(base);
const completions = getGsdArgumentCompletions("workflow validate "); const completions = getSfArgumentCompletions("workflow validate ");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
assert.ok(labels.includes("my-workflow"), "should include my-workflow"); assert.ok(labels.includes("my-workflow"), "should include my-workflow");
}); });
it("getGsdArgumentCompletions('workflow run d') filters by prefix", () => { it("getSfArgumentCompletions('workflow run d') filters by prefix", () => {
const base = makeTmpBase(); const base = makeTmpBase();
writeDefinition(base, "deploy-pipeline", SIMPLE_DEF); writeDefinition(base, "deploy-pipeline", SIMPLE_DEF);
writeDefinition(base, "test-suite", SIMPLE_DEF); writeDefinition(base, "test-suite", SIMPLE_DEF);
process.chdir(base); process.chdir(base);
const completions = getGsdArgumentCompletions("workflow run d"); const completions = getSfArgumentCompletions("workflow run d");
const labels = completions.map((c: any) => c.label); const labels = completions.map((c: any) => c.label);
assert.ok( assert.ok(
labels.includes("deploy-pipeline"), labels.includes("deploy-pipeline"),

View file

@ -1,4 +1,4 @@
// GSD-2 — #4782 phase 3 batch 3: complete-slice migrated through composer. // SF — #4782 phase 3 batch 3: complete-slice migrated through composer.
import assert from "node:assert/strict"; import assert from "node:assert/strict";
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
@ -18,9 +18,9 @@ import {
} from "../sf-db.ts"; } from "../sf-db.ts";
function makeBase(): string { function makeBase(): string {
const base = mkdtempSync(join(tmpdir(), "gsd-completeslice-composer-")); const base = mkdtempSync(join(tmpdir(), "sf-completeslice-composer-"));
mkdirSync( mkdirSync(
join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"),
{ recursive: true }, { recursive: true },
); );
return base; return base;
@ -37,7 +37,7 @@ function cleanup(base: string): void {
} }
function seed(base: string, mid: string): void { function seed(base: string, mid: string): void {
openDatabase(join(base, ".gsd", "gsd.db")); openDatabase(join(base, ".sf", "sf.db"));
insertMilestone({ insertMilestone({
id: mid, id: mid,
title: "Composer Test", title: "Composer Test",
@ -80,17 +80,17 @@ function seed(base: string, mid: string): void {
function writeArtifacts(base: string): void { function writeArtifacts(base: string): void {
writeFileSync( writeFileSync(
join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"),
"# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n", "# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n",
); );
writeFileSync( writeFileSync(
join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"),
"# S01 Plan\n\nSlice plan body.\n", "# S01 Plan\n\nSlice plan body.\n",
); );
writeFileSync( writeFileSync(
join( join(
base, base,
".gsd", ".sf",
"milestones", "milestones",
"M001", "M001",
"slices", "slices",
@ -163,11 +163,11 @@ test("#4782 phase 3: buildCompleteSlicePrompt handles missing task summaries gra
seed(base, "M001"); seed(base, "M001");
// Write roadmap + plan but no task summaries // Write roadmap + plan but no task summaries
writeFileSync( writeFileSync(
join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"),
"# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n", "# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n",
); );
writeFileSync( writeFileSync(
join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"),
"# S01 Plan\n", "# S01 Plan\n",
); );

View file

@ -12,9 +12,9 @@ test("copyPlanningArtifacts skips when source and destination .sf resolve to the
const fnBody = src.slice(fnIdx, fnIdx + 2400); const fnBody = src.slice(fnIdx, fnIdx + 2400);
const guardIdx = fnBody.indexOf("if (isSamePath(srcGsd, dstGsd)) return;"); const guardIdx = fnBody.indexOf("if (isSamePath(srcSf, dstSf)) return;");
const copyIdx = fnBody.indexOf( const copyIdx = fnBody.indexOf(
'safeCopyRecursive(join(srcGsd, "milestones")', 'safeCopyRecursive(join(srcSf, "milestones")',
); );
assert.ok( assert.ok(

View file

@ -24,7 +24,7 @@ import {
writeDebugSummary, writeDebugSummary,
} from "../debug-logger.ts"; } from "../debug-logger.ts";
function createTempGsdDir(): string { function createTempSfDir(): string {
const tmp = mkdtempSync(join(tmpdir(), "sf-debug-test-")); const tmp = mkdtempSync(join(tmpdir(), "sf-debug-test-"));
mkdirSync(join(tmp, ".sf"), { recursive: true }); mkdirSync(join(tmp, ".sf"), { recursive: true });
return tmp; return tmp;
@ -37,7 +37,7 @@ function readLogLines(logPath: string): Record<string, unknown>[] {
} }
test("enableDebug creates log file and sets enabled", () => { test("enableDebug creates log file and sets enabled", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
assert.strictEqual(isDebugEnabled(), true); assert.strictEqual(isDebugEnabled(), true);
@ -56,7 +56,7 @@ test("enableDebug creates log file and sets enabled", () => {
}); });
test("debugLog writes JSONL events", () => { test("debugLog writes JSONL events", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
debugLog("test-event", { foo: "bar", num: 42 }); debugLog("test-event", { foo: "bar", num: 42 });
@ -82,7 +82,7 @@ test("debugLog is no-op when disabled", () => {
}); });
test("debugTime measures elapsed time", async () => { test("debugTime measures elapsed time", async () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
const stop = debugTime("timed-op"); const stop = debugTime("timed-op");
@ -111,7 +111,7 @@ test("debugTime returns no-op when disabled", () => {
}); });
test("debugCount increments counters", () => { test("debugCount increments counters", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
debugCount("dispatches"); debugCount("dispatches");
@ -128,7 +128,7 @@ test("debugCount increments counters", () => {
}); });
test("debugPeak tracks max values", () => { test("debugPeak tracks max values", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
debugPeak("ttsrPeakBuffer", 100); debugPeak("ttsrPeakBuffer", 100);
@ -143,7 +143,7 @@ test("debugPeak tracks max values", () => {
}); });
test("writeDebugSummary includes all counters and disables debug", () => { test("writeDebugSummary includes all counters and disables debug", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
debugCount("deriveStateCalls", 10); debugCount("deriveStateCalls", 10);
@ -171,7 +171,7 @@ test("writeDebugSummary includes all counters and disables debug", () => {
}); });
test("auto-prunes old debug logs", () => { test("auto-prunes old debug logs", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
const debugDir = join(tmp, ".sf", "debug"); const debugDir = join(tmp, ".sf", "debug");
mkdirSync(debugDir, { recursive: true }); mkdirSync(debugDir, { recursive: true });
@ -196,7 +196,7 @@ test("auto-prunes old debug logs", () => {
}); });
test("disableDebug returns log path", () => { test("disableDebug returns log path", () => {
const tmp = createTempGsdDir(); const tmp = createTempSfDir();
enableDebug(tmp); enableDebug(tmp);
const logPath = getDebugLogPath(); const logPath = getDebugLogPath();

View file

@ -467,14 +467,14 @@ import { runPostExecutionChecks } from "./post-execution-checks.ts";
test("handles large number of files without timeout", () => { test("handles large number of files without timeout", () => {
// Use all available SF source files to stress test // Use all available SF source files to stress test
const allGsdFiles = REAL_SF_FILES.map((f) => join(SF_SRC_DIR, f)); const allSfFiles = REAL_SF_FILES.map((f) => join(SF_SRC_DIR, f));
const task = createTask({ const task = createTask({
id: "T01", id: "T01",
title: "Large refactor touching many files", title: "Large refactor touching many files",
status: "complete", status: "complete",
key_files: allGsdFiles, key_files: allSfFiles,
files: allGsdFiles, files: allSfFiles,
}); });
const start = performance.now(); const start = performance.now();

View file

@ -95,7 +95,7 @@ test('withFileLockSync: onLocked="skip" runs callback unlocked on ELOCKED', () =
} }
const lockfile = require("proper-lockfile"); const lockfile = require("proper-lockfile");
const dir = mkdtempSync(join(tmpdir(), "gsd-file-lock-test-")); const dir = mkdtempSync(join(tmpdir(), "sf-file-lock-test-"));
const filePath = join(dir, "locked.jsonl"); const filePath = join(dir, "locked.jsonl");
writeFileSync(filePath, "{}\n", "utf-8"); writeFileSync(filePath, "{}\n", "utf-8");
@ -161,7 +161,7 @@ test('withFileLock: onLocked="skip" runs callback unlocked on ELOCKED', async ()
} }
const lockfile = require("proper-lockfile"); const lockfile = require("proper-lockfile");
const dir = mkdtempSync(join(tmpdir(), "gsd-file-lock-test-")); const dir = mkdtempSync(join(tmpdir(), "sf-file-lock-test-"));
const filePath = join(dir, "locked.jsonl"); const filePath = join(dir, "locked.jsonl");
writeFileSync(filePath, "{}\n", "utf-8"); writeFileSync(filePath, "{}\n", "utf-8");

View file

@ -14,7 +14,7 @@ import { join } from "node:path";
import { afterEach, beforeEach, describe, test } from "node:test"; import { afterEach, beforeEach, describe, test } from "node:test";
import { repairMissingSfSymlinkForHeadless } from "../../../../headless.ts"; import { repairMissingSfSymlinkForHeadless } from "../../../../headless.ts";
import { externalGsdRoot } from "../repo-identity.ts"; import { externalSfRoot } from "../repo-identity.ts";
function run(command: string, cwd: string): string { function run(command: string, cwd: string): string {
return execSync(command, { return execSync(command, {
@ -52,7 +52,7 @@ describe("headless project repair", () => {
}); });
test("re-links .sf when matching external project state already exists", () => { test("re-links .sf when matching external project state already exists", () => {
const externalPath = externalGsdRoot(base); const externalPath = externalSfRoot(base);
mkdirSync(join(externalPath, "milestones"), { recursive: true }); mkdirSync(join(externalPath, "milestones"), { recursive: true });
const repairedPath = repairMissingSfSymlinkForHeadless(base); const repairedPath = repairMissingSfSymlinkForHeadless(base);

View file

@ -56,7 +56,7 @@ function createTempRepo(): string {
return dir; return dir;
} }
function createTempRepoWithExternalGsd(): { function createTempRepoWithExternalSf(): {
repo: string; repo: string;
externalState: string; externalState: string;
} { } {
@ -137,8 +137,8 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => {
return d; return d;
} }
function freshRepoWithExternalGsd(): { repo: string; externalState: string } { function freshRepoWithExternalSf(): { repo: string; externalState: string } {
const { repo, externalState } = createTempRepoWithExternalGsd(); const { repo, externalState } = createTempRepoWithExternalSf();
tempDirs.push(repo, externalState); tempDirs.push(repo, externalState);
return { repo, externalState }; return { repo, externalState };
} }
@ -969,7 +969,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => {
}); });
test("#2156: mergeMilestoneToMain removes external-state worktrees using the milestone branch name", () => { test("#2156: mergeMilestoneToMain removes external-state worktrees using the milestone branch name", () => {
const { repo, externalState } = freshRepoWithExternalGsd(); const { repo, externalState } = freshRepoWithExternalSf();
const wtPath = createAutoWorktree(repo, "M215"); const wtPath = createAutoWorktree(repo, "M215");
addSliceToMilestone(repo, wtPath, "M215", "S01", "External cleanup", [ addSliceToMilestone(repo, wtPath, "M215", "S01", "External cleanup", [

View file

@ -55,10 +55,10 @@ describe("doctor false-positives (#3105)", async () => {
// Create a worktree directory that only has .sf/doctor-history.jsonl // Create a worktree directory that only has .sf/doctor-history.jsonl
const wtDir = join(sf, "worktrees", "M042"); const wtDir = join(sf, "worktrees", "M042");
const wtGsdDir = join(wtDir, ".sf"); const wtSfDir = join(wtDir, ".sf");
mkdirSync(wtGsdDir, { recursive: true }); mkdirSync(wtSfDir, { recursive: true });
writeFileSync( writeFileSync(
join(wtGsdDir, "doctor-history.jsonl"), join(wtSfDir, "doctor-history.jsonl"),
'{"ts":"2026-01-01","ok":true}\n', '{"ts":"2026-01-01","ok":true}\n',
); );

View file

@ -760,13 +760,13 @@ describe("doctor-git", async () => {
// Move .sf to an external location and replace with a symlink. // Move .sf to an external location and replace with a symlink.
// This simulates the ~/.sf/projects/<hash> layout where .sf is a symlink. // This simulates the ~/.sf/projects/<hash> layout where .sf is a symlink.
const externalGsd = join( const externalSf = join(
realpathSync(mkdtempSync(join(tmpdir(), "doc-git-symlink-"))), realpathSync(mkdtempSync(join(tmpdir(), "doc-git-symlink-"))),
"sf-data", "sf-data",
); );
cleanups.push(externalGsd); cleanups.push(externalSf);
renameSync(join(dir, ".sf"), externalGsd); renameSync(join(dir, ".sf"), externalSf);
symlinkSync(externalGsd, join(dir, ".sf")); symlinkSync(externalSf, join(dir, ".sf"));
// Create a real registered worktree under the (now symlinked) .sf/worktrees/ // Create a real registered worktree under the (now symlinked) .sf/worktrees/
mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true });

View file

@ -343,9 +343,10 @@ None
"fix adds patterns", "fix adds patterns",
); );
// Verify .sf entry was added (external state symlink) assert.doesNotThrow(
const content = readFileSync(join(dir, ".gitignore"), "utf-8"); () => run("git check-ignore -q .sf", dir),
assert.ok(content.includes(".sf"), "gitignore now has .sf entry"); "git now ignores .sf after fix",
);
}); });
} else { } else {
} }
@ -377,38 +378,42 @@ node_modules/
} else { } else {
} }
// ─── Test 8b: Symlinked .gsd without .gitignore entry (#4423) ───── // ─── Test 8b: Symlinked .sf without .gitignore entry (#4423) ─────
if (process.platform !== "win32") { if (process.platform !== "win32") {
test("symlinked_gsd_unignored", async () => { test("symlinked_sf_unignored", async () => {
const dir = createGitProject(); const dir = createGitProject();
cleanups.push(dir); cleanups.push(dir);
// Create .gsd as a symlink to an external directory (standard external // Create .sf as a symlink to an external directory (standard external
// state layout), and write a .gitignore that does NOT list .gsd. // state layout), and write a .gitignore that does NOT list .sf.
const externalGsd = mkdtempSync(join(tmpdir(), "gsd-external-doctor-")); const externalSf = mkdtempSync(join(tmpdir(), "sf-external-doctor-"));
cleanups.push(externalGsd); cleanups.push(externalSf);
writeFileSync(join(externalGsd, "STATE.md"), "# State\n"); writeFileSync(join(externalSf, "STATE.md"), "# State\n");
symlinkSync(externalGsd, join(dir, ".gsd")); symlinkSync(externalSf, join(dir, ".sf"));
writeFileSync(join(dir, ".gitignore"), "node_modules/\n"); writeFileSync(join(dir, ".gitignore"), "node_modules/\n");
const detect = await runSFDoctor(dir); const detect = await runSFDoctor(dir);
const symlinkIssues = detect.issues.filter( const symlinkIssues = detect.issues.filter(
(i: any) => i.code === "symlinked_gsd_unignored", (i: any) => i.code === "symlinked_sf_unignored",
); );
assert.ok( assert.ok(
symlinkIssues.length > 0, symlinkIssues.length > 0,
"detects symlinked .gsd without gitignore entry", "detects symlinked .sf without gitignore entry",
); );
const fixed = await runSFDoctor(dir, { fix: true }); const fixed = await runSFDoctor(dir, { fix: true });
assert.ok( assert.ok(
fixed.fixesApplied.some((f: any) => f.includes(".gitignore")), fixed.fixesApplied.some((f: any) =>
"fix updates .gitignore", f.includes("added missing SF runtime patterns"),
),
"fix adds SF runtime ignore patterns",
); );
const content = readFileSync(join(dir, ".gitignore"), "utf-8"); assert.doesNotThrow(
assert.ok(/^\.gsd\/?$/m.test(content), "gitignore now has .gsd entry"); () => run("git check-ignore -q .sf", dir),
"git now ignores symlinked .sf after fix",
);
}); });
} else { } else {
} }

View file

@ -143,8 +143,8 @@ describe("doctor", async () => {
// ─── Milestone summary detection: missing summary ────────────────────── // ─── Milestone summary detection: missing summary ──────────────────────
test("doctor detects missing milestone summary", async () => { test("doctor detects missing milestone summary", async () => {
const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-test-")); const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-test-"));
const msGsd = join(msBase, ".sf"); const msSf = join(msBase, ".sf");
const msMDir = join(msGsd, "milestones", "M001"); const msMDir = join(msSf, "milestones", "M001");
const msSDir = join(msMDir, "slices", "S01"); const msSDir = join(msMDir, "slices", "S01");
const msTDir = join(msSDir, "tasks"); const msTDir = join(msSDir, "tasks");
mkdirSync(msTDir, { recursive: true }); mkdirSync(msTDir, { recursive: true });
@ -241,8 +241,8 @@ parent: M001
// ─── Milestone summary detection: summary present (no false positive) ── // ─── Milestone summary detection: summary present (no false positive) ──
test("doctor does NOT flag milestone with summary", async () => { test("doctor does NOT flag milestone with summary", async () => {
const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-ok-test-")); const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-ok-test-"));
const msGsd = join(msBase, ".sf"); const msSf = join(msBase, ".sf");
const msMDir = join(msGsd, "milestones", "M001"); const msMDir = join(msSf, "milestones", "M001");
const msSDir = join(msMDir, "slices", "S01"); const msSDir = join(msMDir, "slices", "S01");
const msTDir = join(msSDir, "tasks"); const msTDir = join(msSDir, "tasks");
mkdirSync(msTDir, { recursive: true }); mkdirSync(msTDir, { recursive: true });
@ -317,8 +317,8 @@ parent: M001
// ─── blocker_discovered_no_replan detection ──────────────────────────── // ─── blocker_discovered_no_replan detection ────────────────────────────
test("doctor detects blocker_discovered_no_replan", async () => { test("doctor detects blocker_discovered_no_replan", async () => {
const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-test-")); const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-test-"));
const bGsd = join(bBase, ".sf"); const bSf = join(bBase, ".sf");
const bMDir = join(bGsd, "milestones", "M001"); const bMDir = join(bSf, "milestones", "M001");
const bSDir = join(bMDir, "slices", "S01"); const bSDir = join(bMDir, "slices", "S01");
const bTDir = join(bSDir, "tasks"); const bTDir = join(bSDir, "tasks");
mkdirSync(bTDir, { recursive: true }); mkdirSync(bTDir, { recursive: true });
@ -408,8 +408,8 @@ Discovered an issue.
// ─── blocker_discovered with REPLAN.md (no false positive) ───────────── // ─── blocker_discovered with REPLAN.md (no false positive) ─────────────
test("doctor does NOT flag blocker when REPLAN.md exists", async () => { test("doctor does NOT flag blocker when REPLAN.md exists", async () => {
const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-ok-test-")); const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-ok-test-"));
const bGsd = join(bBase, ".sf"); const bSf = join(bBase, ".sf");
const bMDir = join(bGsd, "milestones", "M001"); const bMDir = join(bSf, "milestones", "M001");
const bSDir = join(bMDir, "slices", "S01"); const bSDir = join(bMDir, "slices", "S01");
const bTDir = join(bSDir, "tasks"); const bTDir = join(bSDir, "tasks");
mkdirSync(bTDir, { recursive: true }); mkdirSync(bTDir, { recursive: true });
@ -482,8 +482,8 @@ Discovered an issue.
// ─── Must-have verification: all addressed → no issue ───────────────── // ─── Must-have verification: all addressed → no issue ─────────────────
test("doctor: done task with must-haves all addressed → no issue", async () => { test("doctor: done task with must-haves all addressed → no issue", async () => {
const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-ok-")); const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-ok-"));
const mhGsd = join(mhBase, ".sf"); const mhSf = join(mhBase, ".sf");
const mhMDir = join(mhGsd, "milestones", "M001"); const mhMDir = join(mhSf, "milestones", "M001");
const mhSDir = join(mhMDir, "slices", "S01"); const mhSDir = join(mhMDir, "slices", "S01");
const mhTDir = join(mhSDir, "tasks"); const mhTDir = join(mhSDir, "tasks");
mkdirSync(mhTDir, { recursive: true }); mkdirSync(mhTDir, { recursive: true });
@ -523,8 +523,8 @@ Discovered an issue.
// ─── Must-have verification: not addressed → warning fired ─────────── // ─── Must-have verification: not addressed → warning fired ───────────
test("doctor: done task with must-haves NOT addressed → warning", async () => { test("doctor: done task with must-haves NOT addressed → warning", async () => {
const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-fail-")); const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-fail-"));
const mhGsd = join(mhBase, ".sf"); const mhSf = join(mhBase, ".sf");
const mhMDir = join(mhGsd, "milestones", "M001"); const mhMDir = join(mhSf, "milestones", "M001");
const mhSDir = join(mhMDir, "slices", "S01"); const mhSDir = join(mhMDir, "slices", "S01");
const mhTDir = join(mhSDir, "tasks"); const mhTDir = join(mhSDir, "tasks");
mkdirSync(mhTDir, { recursive: true }); mkdirSync(mhTDir, { recursive: true });
@ -588,8 +588,8 @@ Discovered an issue.
// ─── Must-have verification: no task plan → no issue ───────────────── // ─── Must-have verification: no task plan → no issue ─────────────────
test("doctor: done task with no task plan file → no issue", async () => { test("doctor: done task with no task plan file → no issue", async () => {
const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-noplan-")); const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-noplan-"));
const mhGsd = join(mhBase, ".sf"); const mhSf = join(mhBase, ".sf");
const mhMDir = join(mhGsd, "milestones", "M001"); const mhMDir = join(mhSf, "milestones", "M001");
const mhSDir = join(mhMDir, "slices", "S01"); const mhSDir = join(mhMDir, "slices", "S01");
const mhTDir = join(mhSDir, "tasks"); const mhTDir = join(mhSDir, "tasks");
mkdirSync(mhTDir, { recursive: true }); mkdirSync(mhTDir, { recursive: true });
@ -623,8 +623,8 @@ Discovered an issue.
// ─── Must-have verification: plan exists but no Must-Haves section → no issue // ─── Must-have verification: plan exists but no Must-Haves section → no issue
test("doctor: done task with plan but no Must-Haves section → no issue", async () => { test("doctor: done task with plan but no Must-Haves section → no issue", async () => {
const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-nosect-")); const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-nosect-"));
const mhGsd = join(mhBase, ".sf"); const mhSf = join(mhBase, ".sf");
const mhMDir = join(mhGsd, "milestones", "M001"); const mhMDir = join(mhSf, "milestones", "M001");
const mhSDir = join(mhMDir, "slices", "S01"); const mhSDir = join(mhMDir, "slices", "S01");
const mhTDir = join(mhSDir, "tasks"); const mhTDir = join(mhSDir, "tasks");
mkdirSync(mhTDir, { recursive: true }); mkdirSync(mhTDir, { recursive: true });
@ -717,8 +717,8 @@ Discovered an issue.
// ─── doctor detects delimiter_in_title for milestone ─────────────────── // ─── doctor detects delimiter_in_title for milestone ───────────────────
test("doctor detects em dash in milestone title", async () => { test("doctor detects em dash in milestone title", async () => {
const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-test-")); const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-test-"));
const dtGsd = join(dtBase, ".sf"); const dtSf = join(dtBase, ".sf");
const dtMDir = join(dtGsd, "milestones", "M001"); const dtMDir = join(dtSf, "milestones", "M001");
const dtSDir = join(dtMDir, "slices", "S01"); const dtSDir = join(dtMDir, "slices", "S01");
const dtTDir = join(dtSDir, "tasks"); const dtTDir = join(dtSDir, "tasks");
mkdirSync(dtTDir, { recursive: true }); mkdirSync(dtTDir, { recursive: true });
@ -776,8 +776,8 @@ Discovered an issue.
// ─── doctor detects delimiter_in_title for slice ──────────────────────── // ─── doctor detects delimiter_in_title for slice ────────────────────────
test("doctor detects em dash in slice title", async () => { test("doctor detects em dash in slice title", async () => {
const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-slice-")); const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-slice-"));
const dtGsd = join(dtBase, ".sf"); const dtSf = join(dtBase, ".sf");
const dtMDir = join(dtGsd, "milestones", "M001"); const dtMDir = join(dtSf, "milestones", "M001");
const dtSDir = join(dtMDir, "slices", "S01"); const dtSDir = join(dtMDir, "slices", "S01");
const dtTDir = join(dtSDir, "tasks"); const dtTDir = join(dtSDir, "tasks");
mkdirSync(dtTDir, { recursive: true }); mkdirSync(dtTDir, { recursive: true });
@ -823,8 +823,8 @@ Discovered an issue.
// ─── doctor does NOT flag clean titles ────────────────────────────────── // ─── doctor does NOT flag clean titles ──────────────────────────────────
test("doctor does NOT flag milestone with clean title", async () => { test("doctor does NOT flag milestone with clean title", async () => {
const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-clean-")); const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-clean-"));
const dtGsd = join(dtBase, ".sf"); const dtSf = join(dtBase, ".sf");
const dtMDir = join(dtGsd, "milestones", "M001"); const dtMDir = join(dtSf, "milestones", "M001");
const dtSDir = join(dtMDir, "slices", "S01"); const dtSDir = join(dtMDir, "slices", "S01");
const dtTDir = join(dtSDir, "tasks"); const dtTDir = join(dtSDir, "tasks");
mkdirSync(dtTDir, { recursive: true }); mkdirSync(dtTDir, { recursive: true });

View file

@ -457,8 +457,8 @@ describe("feature-branch-lifecycle-integration", async () => {
// With external state, worktree .sf is a symlink to shared state. // With external state, worktree .sf is a symlink to shared state.
// Verify symlink was created (planning files are shared, not copied). // Verify symlink was created (planning files are shared, not copied).
const wtGsd = join(wtPath, ".sf"); const wtSf = join(wtPath, ".sf");
assert.ok(existsSync(wtGsd), "worktree .sf exists (symlink or dir)"); assert.ok(existsSync(wtSf), "worktree .sf exists (symlink or dir)");
// Clean up: chdir back before teardown // Clean up: chdir back before teardown
process.chdir(savedCwd); process.chdir(savedCwd);

View file

@ -1686,13 +1686,13 @@ describe("git-service", async () => {
const repo = initTempRepo(); const repo = initTempRepo();
// Create the real .sf directory outside the repo, then symlink it // Create the real .sf directory outside the repo, then symlink it
const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-")); const externalSf = mkdtempSync(join(tmpdir(), "sf-external-"));
mkdirSync(join(externalGsd, "activity"), { recursive: true }); mkdirSync(join(externalSf, "activity"), { recursive: true });
writeFileSync(join(externalGsd, "activity", "log.jsonl"), "log data"); writeFileSync(join(externalSf, "activity", "log.jsonl"), "log data");
writeFileSync(join(externalGsd, "STATE.md"), "# State"); writeFileSync(join(externalSf, "STATE.md"), "# State");
// Symlink .sf -> external directory // Symlink .sf -> external directory
symlinkSync(externalGsd, join(repo, ".sf")); symlinkSync(externalSf, join(repo, ".sf"));
// Add .gitignore so .sf/ is ignored // Add .gitignore so .sf/ is ignored
writeFileSync(join(repo, ".gitignore"), ".sf\n"); writeFileSync(join(repo, ".gitignore"), ".sf\n");
@ -1737,14 +1737,14 @@ describe("git-service", async () => {
assert.ok(!staged.includes(".sf"), ".sf content not staged"); assert.ok(!staged.includes(".sf"), ".sf content not staged");
rmSync(repo, { recursive: true, force: true }); rmSync(repo, { recursive: true, force: true });
rmSync(externalGsd, { recursive: true, force: true }); rmSync(externalSf, { recursive: true, force: true });
}); });
test("GitServiceImpl: symlinked .sf stages explicit untracked task files", () => { test("GitServiceImpl: symlinked .sf stages explicit untracked task files", () => {
const repo = initTempRepo(); const repo = initTempRepo();
const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-")); const externalSf = mkdtempSync(join(tmpdir(), "sf-external-"));
mkdirSync(join(externalGsd, "activity"), { recursive: true }); mkdirSync(join(externalSf, "activity"), { recursive: true });
symlinkSync(externalGsd, join(repo, ".sf")); symlinkSync(externalSf, join(repo, ".sf"));
writeFileSync(join(repo, ".gitignore"), ".sf\n"); writeFileSync(join(repo, ".gitignore"), ".sf\n");
createFile(repo, "cmd/installer/main.go", "package main\n"); createFile(repo, "cmd/installer/main.go", "package main\n");
run("git add -A", repo); run("git add -A", repo);
@ -1776,7 +1776,7 @@ describe("git-service", async () => {
); );
rmSync(repo, { recursive: true, force: true }); rmSync(repo, { recursive: true, force: true });
rmSync(externalGsd, { recursive: true, force: true }); rmSync(externalSf, { recursive: true, force: true });
}); });
test("GitServiceImpl: stageOnly ignores summary none placeholders", () => { test("GitServiceImpl: stageOnly ignores summary none placeholders", () => {
@ -2029,12 +2029,12 @@ describe("git-service", async () => {
const repo = initTempRepo(); const repo = initTempRepo();
// Create an external .sf directory and symlink it into the repo // Create an external .sf directory and symlink it into the repo
const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-symlink-")); const externalSf = mkdtempSync(join(tmpdir(), "sf-external-symlink-"));
mkdirSync(join(externalGsd, "milestones", "M009"), { recursive: true }); mkdirSync(join(externalSf, "milestones", "M009"), { recursive: true });
mkdirSync(join(externalGsd, "activity"), { recursive: true }); mkdirSync(join(externalSf, "activity"), { recursive: true });
mkdirSync(join(externalGsd, "runtime"), { recursive: true }); mkdirSync(join(externalSf, "runtime"), { recursive: true });
symlinkSync(externalGsd, join(repo, ".sf")); symlinkSync(externalSf, join(repo, ".sf"));
// .gitignore blocks .sf (as ensureGitignore would do for symlink projects) // .gitignore blocks .sf (as ensureGitignore would do for symlink projects)
writeFileSync(join(repo, ".gitignore"), ".sf\n"); writeFileSync(join(repo, ".gitignore"), ".sf\n");
@ -2050,15 +2050,15 @@ describe("git-service", async () => {
// Simulate new milestone artifacts created during execution // Simulate new milestone artifacts created during execution
writeFileSync( writeFileSync(
join(externalGsd, "milestones", "M009", "M009-SUMMARY.md"), join(externalSf, "milestones", "M009", "M009-SUMMARY.md"),
"# M009 Summary", "# M009 Summary",
); );
writeFileSync( writeFileSync(
join(externalGsd, "milestones", "M009", "S01-SUMMARY.md"), join(externalSf, "milestones", "M009", "S01-SUMMARY.md"),
"# S01 Summary", "# S01 Summary",
); );
writeFileSync( writeFileSync(
join(externalGsd, "milestones", "M009", "T01-VERIFY.json"), join(externalSf, "milestones", "M009", "T01-VERIFY.json"),
'{"passed":true}', '{"passed":true}',
); );
@ -2086,7 +2086,7 @@ describe("git-service", async () => {
rmSync(repo, { recursive: true, force: true }); rmSync(repo, { recursive: true, force: true });
} catch {} } catch {}
try { try {
rmSync(externalGsd, { recursive: true, force: true }); rmSync(externalSf, { recursive: true, force: true });
} catch {} } catch {}
}); });

View file

@ -2,7 +2,7 @@
* gitignore-staging-2570.test.ts Regression tests for #2570. * gitignore-staging-2570.test.ts Regression tests for #2570.
* *
* Verifies that: * Verifies that:
* 1. isGsdGitignored() detects when .sf is covered by .gitignore * 1. isSfGitignored() detects when .sf is covered by .gitignore
* 2. The rethink prompt uses {{commitInstruction}} instead of hardcoded git add .sf/ * 2. The rethink prompt uses {{commitInstruction}} instead of hardcoded git add .sf/
* 3. rethink.ts passes the correct commitInstruction based on gitignore state * 3. rethink.ts passes the correct commitInstruction based on gitignore state
* *
@ -22,8 +22,8 @@ import { tmpdir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
import test from "node:test"; import test from "node:test";
// Dynamic import — isGsdGitignored is the function under test (may not exist yet during TDD red phase) // Dynamic import — isSfGitignored is the function under test (may not exist yet during TDD red phase)
const { isGsdGitignored } = await import("../../gitignore.ts"); const { isSfGitignored } = await import("../../gitignore.ts");
// ─── Helpers ───────────────────────────────────────────────────────── // ─── Helpers ─────────────────────────────────────────────────────────
@ -55,19 +55,19 @@ function cleanup(dir: string): void {
} }
} }
// ─── isGsdGitignored ───────────────────────────────────────────────── // ─── isSfGitignored ─────────────────────────────────────────────────
test("isGsdGitignored returns true when .sf is in .gitignore (#2570)", (t) => { test("isSfGitignored returns true when .sf is in .gitignore (#2570)", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
}); });
writeFileSync(join(dir, ".gitignore"), ".sf\n"); writeFileSync(join(dir, ".gitignore"), ".sf\n");
assert.equal(isGsdGitignored(dir), true); assert.equal(isSfGitignored(dir), true);
}); });
test("isGsdGitignored returns true when .sf/ (with slash) is in .gitignore", (t) => { test("isSfGitignored returns true when .sf/ (with slash) is in .gitignore", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
@ -76,27 +76,27 @@ test("isGsdGitignored returns true when .sf/ (with slash) is in .gitignore", (t)
writeFileSync(join(dir, ".gitignore"), ".sf/\n"); writeFileSync(join(dir, ".gitignore"), ".sf/\n");
// Create .sf directory so git check-ignore can match the directory-only pattern // Create .sf directory so git check-ignore can match the directory-only pattern
mkdirSync(join(dir, ".sf"), { recursive: true }); mkdirSync(join(dir, ".sf"), { recursive: true });
assert.equal(isGsdGitignored(dir), true); assert.equal(isSfGitignored(dir), true);
}); });
test("isGsdGitignored returns false when .sf is NOT in .gitignore", (t) => { test("isSfGitignored returns false when .sf is NOT in .gitignore", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
}); });
writeFileSync(join(dir, ".gitignore"), "node_modules/\n"); writeFileSync(join(dir, ".gitignore"), "node_modules/\n");
assert.equal(isGsdGitignored(dir), false); assert.equal(isSfGitignored(dir), false);
}); });
test("isGsdGitignored returns false when no .gitignore exists", (t) => { test("isSfGitignored returns false when no .gitignore exists", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
}); });
// No .gitignore — default // No .gitignore — default
assert.equal(isGsdGitignored(dir), false); assert.equal(isSfGitignored(dir), false);
}); });
// ─── rethink.md prompt template ───────────────────────────────────── // ─── rethink.md prompt template ─────────────────────────────────────

View file

@ -22,7 +22,7 @@ import { tmpdir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
import test from "node:test"; import test from "node:test";
import { ensureGitignore, hasGitTrackedGsdFiles } from "../../gitignore.ts"; import { ensureGitignore, hasGitTrackedSfFiles } from "../../gitignore.ts";
import { migrateToExternalState } from "../../migrate-external.ts"; import { migrateToExternalState } from "../../migrate-external.ts";
// ─── Helpers ───────────────────────────────────────────────────────── // ─── Helpers ─────────────────────────────────────────────────────────
@ -55,18 +55,18 @@ function cleanup(dir: string): void {
} }
} }
// ─── hasGitTrackedGsdFiles ─────────────────────────────────────────── // ─── hasGitTrackedSfFiles ───────────────────────────────────────────
test("hasGitTrackedGsdFiles returns false when .sf/ does not exist", (t) => { test("hasGitTrackedSfFiles returns false when .sf/ does not exist", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
}); });
assert.equal(hasGitTrackedGsdFiles(dir), false); assert.equal(hasGitTrackedSfFiles(dir), false);
}); });
test("hasGitTrackedGsdFiles returns true when .sf/ has tracked files", (t) => { test("hasGitTrackedSfFiles returns true when .sf/ has tracked files", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
@ -76,10 +76,10 @@ test("hasGitTrackedGsdFiles returns true when .sf/ has tracked files", (t) => {
writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Test Project\n"); writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Test Project\n");
git(dir, "add", ".sf/PROJECT.md"); git(dir, "add", ".sf/PROJECT.md");
git(dir, "commit", "-m", "add sf"); git(dir, "commit", "-m", "add sf");
assert.equal(hasGitTrackedGsdFiles(dir), true); assert.equal(hasGitTrackedSfFiles(dir), true);
}); });
test("hasGitTrackedGsdFiles returns false when .sf/ exists but is untracked", (t) => { test("hasGitTrackedSfFiles returns false when .sf/ exists but is untracked", (t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
t.after(() => { t.after(() => {
cleanup(dir); cleanup(dir);
@ -88,7 +88,7 @@ test("hasGitTrackedGsdFiles returns false when .sf/ exists but is untracked", (t
mkdirSync(join(dir, ".sf"), { recursive: true }); mkdirSync(join(dir, ".sf"), { recursive: true });
writeFileSync(join(dir, ".sf", "STATE.md"), "state\n"); writeFileSync(join(dir, ".sf", "STATE.md"), "state\n");
// Not git-added — should return false // Not git-added — should return false
assert.equal(hasGitTrackedGsdFiles(dir), false); assert.equal(hasGitTrackedSfFiles(dir), false);
}); });
// ─── ensureGitignore — tracked .sf/ protection ───────────────────── // ─── ensureGitignore — tracked .sf/ protection ─────────────────────
@ -125,19 +125,19 @@ test("ensureGitignore does NOT add .sf when .sf/ has tracked files (#1364)", (_t
} }
}); });
test("ensureGitignore adds .sf when .sf/ has NO tracked files", (_t) => { test("ensureGitignore excludes .sf when .sf/ has NO tracked files", (_t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
try { try {
// Run ensureGitignore (no .sf/ at all) // Run ensureGitignore (no .sf/ at all)
ensureGitignore(dir); ensureGitignore(dir);
// Verify .sf IS in .gitignore const exclude = readFileSync(join(dir, ".git", "info", "exclude"), "utf-8");
const gitignore = readFileSync(join(dir, ".gitignore"), "utf-8"); const lines = exclude.split("\n").map((l) => l.trim());
const lines = gitignore.split("\n").map((l) => l.trim());
assert.ok( assert.ok(
lines.includes(".sf"), lines.includes(".sf"),
`Expected .sf in .gitignore, but it's missing:\n${gitignore}`, `Expected .sf in .git/info/exclude, but it's missing:\n${exclude}`,
); );
assert.doesNotThrow(() => git(dir, "check-ignore", "-q", ".sf"));
} finally { } finally {
cleanup(dir); cleanup(dir);
} }
@ -193,7 +193,7 @@ test("ensureGitignore with tracked .sf/ does not cause git to see files as delet
} }
}); });
test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available", (_t) => { test("hasGitTrackedSfFiles returns true (fail-safe) when git is not available", (_t) => {
const dir = makeTempRepo(); const dir = makeTempRepo();
try { try {
// Create and track .sf/ files // Create and track .sf/ files
@ -208,7 +208,7 @@ test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available",
// Should fail safe — assume tracked rather than silently returning false // Should fail safe — assume tracked rather than silently returning false
// (The index lock causes git ls-files to fail; rev-parse also fails → true) // (The index lock causes git ls-files to fail; rev-parse also fails → true)
const result = hasGitTrackedGsdFiles(dir); const result = hasGitTrackedSfFiles(dir);
assert.equal( assert.equal(
result, result,
true, true,

View file

@ -37,8 +37,8 @@ function run(cmd: string, args: string[], cwd: string): string {
describe("isInheritedRepo when git root is HOME (#2393)", () => { describe("isInheritedRepo when git root is HOME (#2393)", () => {
let fakeHome: string; let fakeHome: string;
let stateDir: string; let stateDir: string;
let origGsdHome: string | undefined; let origSfHome: string | undefined;
let origGsdStateDir: string | undefined; let origSfStateDir: string | undefined;
beforeEach(() => { beforeEach(() => {
// Create a fake HOME that is itself a git repo (dotfile manager scenario). // Create a fake HOME that is itself a git repo (dotfile manager scenario).
@ -56,18 +56,18 @@ describe("isInheritedRepo when git root is HOME (#2393)", () => {
// Save and override env. Point SF_HOME at fakeHome/.sf so the // Save and override env. Point SF_HOME at fakeHome/.sf so the
// function recognizes it as the global state directory. // function recognizes it as the global state directory.
origGsdHome = process.env.SF_HOME; origSfHome = process.env.SF_HOME;
origGsdStateDir = process.env.SF_STATE_DIR; origSfStateDir = process.env.SF_STATE_DIR;
process.env.SF_HOME = join(fakeHome, ".sf"); process.env.SF_HOME = join(fakeHome, ".sf");
stateDir = mkdtempSync(join(tmpdir(), "sf-state-")); stateDir = mkdtempSync(join(tmpdir(), "sf-state-"));
process.env.SF_STATE_DIR = stateDir; process.env.SF_STATE_DIR = stateDir;
}); });
afterEach(() => { afterEach(() => {
if (origGsdHome !== undefined) process.env.SF_HOME = origGsdHome; if (origSfHome !== undefined) process.env.SF_HOME = origSfHome;
else delete process.env.SF_HOME; else delete process.env.SF_HOME;
if (origGsdStateDir !== undefined) if (origSfStateDir !== undefined)
process.env.SF_STATE_DIR = origGsdStateDir; process.env.SF_STATE_DIR = origSfStateDir;
else delete process.env.SF_STATE_DIR; else delete process.env.SF_STATE_DIR;
rmSync(fakeHome, { recursive: true, force: true }); rmSync(fakeHome, { recursive: true, force: true });
@ -147,12 +147,12 @@ describe("isInheritedRepo with stale .sf at parent git root", () => {
const projectDir = join(parentRepo, "my-project"); const projectDir = join(parentRepo, "my-project");
mkdirSync(projectDir, { recursive: true }); mkdirSync(projectDir, { recursive: true });
// Without fix: isProjectGsd(join(root, ".sf")) returns true because // Without fix: isProjectSf(join(root, ".sf")) returns true because
// the stale .sf is a real directory that isn't the global SF home, // the stale .sf is a real directory that isn't the global SF home,
// causing isInheritedRepo to return false (false negative). // causing isInheritedRepo to return false (false negative).
// //
// The stale .sf at parent is still treated as a "project .sf" by // The stale .sf at parent is still treated as a "project .sf" by
// isProjectGsd(), so the git root check at line 128 returns false. // isProjectSf(), so the git root check at line 128 returns false.
// This is the expected behavior for that check — the defense-in-depth // This is the expected behavior for that check — the defense-in-depth
// fix in auto-start.ts handles this case by checking for local .git. // fix in auto-start.ts handles this case by checking for local .git.
// //

View file

@ -562,9 +562,9 @@ test("mergeAllCompleted — by-completion order respects startedAt", async () =>
/** Set up a worktree DB with a milestone marked complete */ /** Set up a worktree DB with a milestone marked complete */
function setupWorktreeDb(basePath: string, mid: string): void { function setupWorktreeDb(basePath: string, mid: string): void {
const wtGsdDir = join(basePath, ".sf", "worktrees", mid, ".sf"); const wtSfDir = join(basePath, ".sf", "worktrees", mid, ".sf");
mkdirSync(wtGsdDir, { recursive: true }); mkdirSync(wtSfDir, { recursive: true });
const dbPath = join(wtGsdDir, "sf.db"); const dbPath = join(wtSfDir, "sf.db");
openDatabase(dbPath); openDatabase(dbPath);
insertMilestone({ id: mid, title: `Milestone ${mid}`, status: "complete" }); insertMilestone({ id: mid, title: `Milestone ${mid}`, status: "complete" });
updateMilestoneStatus(mid, "complete", new Date().toISOString()); updateMilestoneStatus(mid, "complete", new Date().toISOString());

View file

@ -5,7 +5,7 @@ import { tmpdir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
import { describe, test } from "node:test"; import { describe, test } from "node:test";
import { _clearGsdRootCache, sfRoot } from "../../paths.ts"; import { _clearSfRootCache, sfRoot } from "../../paths.ts";
/** Create a tmp dir and resolve symlinks + 8.3 short names (macOS /var→/private/var, Windows RUNNER~1→runneradmin). */ /** Create a tmp dir and resolve symlinks + 8.3 short names (macOS /var→/private/var, Windows RUNNER~1→runneradmin). */
function tmp(): string { function tmp(): string {
@ -35,7 +35,7 @@ describe("paths", () => {
const root = tmp(); const root = tmp();
try { try {
mkdirSync(join(root, ".sf")); mkdirSync(join(root, ".sf"));
_clearGsdRootCache(); _clearSfRootCache();
const result = sfRoot(root); const result = sfRoot(root);
assert.deepStrictEqual( assert.deepStrictEqual(
result, result,
@ -54,7 +54,7 @@ describe("paths", () => {
mkdirSync(join(root, ".sf")); mkdirSync(join(root, ".sf"));
const sub = join(root, "src", "deep"); const sub = join(root, "src", "deep");
mkdirSync(sub, { recursive: true }); mkdirSync(sub, { recursive: true });
_clearGsdRootCache(); _clearSfRootCache();
const result = sfRoot(sub); const result = sfRoot(sub);
assert.deepStrictEqual( assert.deepStrictEqual(
result, result,
@ -74,7 +74,7 @@ describe("paths", () => {
mkdirSync(join(project, ".sf"), { recursive: true }); mkdirSync(join(project, ".sf"), { recursive: true });
const deep = join(project, "src", "deep"); const deep = join(project, "src", "deep");
mkdirSync(deep, { recursive: true }); mkdirSync(deep, { recursive: true });
_clearGsdRootCache(); _clearSfRootCache();
const result = sfRoot(deep); const result = sfRoot(deep);
assert.deepStrictEqual( assert.deepStrictEqual(
result, result,
@ -92,7 +92,7 @@ describe("paths", () => {
initGit(root); initGit(root);
const sub = join(root, "src"); const sub = join(root, "src");
mkdirSync(sub, { recursive: true }); mkdirSync(sub, { recursive: true });
_clearGsdRootCache(); _clearSfRootCache();
const result = sfRoot(sub); const result = sfRoot(sub);
assert.deepStrictEqual( assert.deepStrictEqual(
result, result,
@ -108,7 +108,7 @@ describe("paths", () => {
const root = tmp(); const root = tmp();
try { try {
mkdirSync(join(root, ".sf")); mkdirSync(join(root, ".sf"));
_clearGsdRootCache(); _clearSfRootCache();
const first = sfRoot(root); const first = sfRoot(root);
const second = sfRoot(root); const second = sfRoot(root);
assert.deepStrictEqual( assert.deepStrictEqual(
@ -129,7 +129,7 @@ describe("paths", () => {
mkdirSync(join(outer, ".sf")); mkdirSync(join(outer, ".sf"));
const inner = join(outer, "nested"); const inner = join(outer, "nested");
mkdirSync(join(inner, ".sf"), { recursive: true }); mkdirSync(join(inner, ".sf"), { recursive: true });
_clearGsdRootCache(); _clearSfRootCache();
const result = sfRoot(inner); const result = sfRoot(inner);
assert.deepStrictEqual( assert.deepStrictEqual(
result, result,

View file

@ -198,7 +198,7 @@ console.log(
const dbDecisionsContent = formatDecisionsForPrompt(scopedDecisions); const dbDecisionsContent = formatDecisionsForPrompt(scopedDecisions);
const dbRequirementsContent = formatRequirementsForPrompt(scopedRequirements); const dbRequirementsContent = formatRequirementsForPrompt(scopedRequirements);
// ── Full-markdown equivalents (what inlineGsdRootFile would return) ── // ── Full-markdown equivalents (what inlineSfRootFile would return) ──
const fullDecisionsContent = readFileSync( const fullDecisionsContent = readFileSync(
join(base, ".sf", "DECISIONS.md"), join(base, ".sf", "DECISIONS.md"),
"utf-8", "utf-8",

View file

@ -4,7 +4,7 @@
* Tests: * Tests:
* - KNOWLEDGE is registered in SF_ROOT_FILES * - KNOWLEDGE is registered in SF_ROOT_FILES
* - resolveSfRootFile resolves KNOWLEDGE paths correctly * - resolveSfRootFile resolves KNOWLEDGE paths correctly
* - inlineGsdRootFile works with the KNOWLEDGE key * - inlineSfRootFile works with the KNOWLEDGE key
* - before_agent_start hook includes/omits knowledge block appropriately * - before_agent_start hook includes/omits knowledge block appropriately
* - loadKnowledgeBlock merges global and project knowledge correctly * - loadKnowledgeBlock merges global and project knowledge correctly
*/ */
@ -21,7 +21,7 @@ import {
import { tmpdir } from "node:os"; import { tmpdir } from "node:os";
import { join } from "node:path"; import { join } from "node:path";
import test from "node:test"; import test from "node:test";
import { inlineGsdRootFile, inlineKnowledgeBudgeted } from "../auto-prompts.ts"; import { inlineSfRootFile, inlineKnowledgeBudgeted } from "../auto-prompts.ts";
import { loadKnowledgeBlock } from "../bootstrap/system-context.ts"; import { loadKnowledgeBlock } from "../bootstrap/system-context.ts";
import { appendKnowledge } from "../files.ts"; import { appendKnowledge } from "../files.ts";
import { resolveSfRootFile, SF_ROOT_FILES } from "../paths.ts"; import { resolveSfRootFile, SF_ROOT_FILES } from "../paths.ts";
@ -80,9 +80,9 @@ test("knowledge: resolveSfRootFile returns canonical path when file does not exi
rmSync(tmp, { recursive: true, force: true }); rmSync(tmp, { recursive: true, force: true });
}); });
// ─── inlineGsdRootFile works with knowledge.md ───────────────────────────── // ─── inlineSfRootFile works with knowledge.md ─────────────────────────────
test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", async () => { test("knowledge: inlineSfRootFile returns content when KNOWLEDGE.md exists", async () => {
const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-")); const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-"));
const sfDir = join(tmp, ".sf"); const sfDir = join(tmp, ".sf");
mkdirSync(sfDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
@ -91,7 +91,7 @@ test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", as
"# Project Knowledge\n\n## Rules\n\nK001: Use real DB", "# Project Knowledge\n\n## Rules\n\nK001: Use real DB",
); );
const result = await inlineGsdRootFile( const result = await inlineSfRootFile(
tmp, tmp,
"knowledge.md", "knowledge.md",
"Project Knowledge", "Project Knowledge",
@ -103,12 +103,12 @@ test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", as
rmSync(tmp, { recursive: true, force: true }); rmSync(tmp, { recursive: true, force: true });
}); });
test("knowledge: inlineGsdRootFile returns null when KNOWLEDGE.md does not exist", async () => { test("knowledge: inlineSfRootFile returns null when KNOWLEDGE.md does not exist", async () => {
const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-")); const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-"));
const sfDir = join(tmp, ".sf"); const sfDir = join(tmp, ".sf");
mkdirSync(sfDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
const result = await inlineGsdRootFile( const result = await inlineSfRootFile(
tmp, tmp,
"knowledge.md", "knowledge.md",
"Project Knowledge", "Project Knowledge",
@ -306,9 +306,9 @@ test("loadKnowledgeBlock: reports globalSizeKb above 4KB threshold", () => {
// helper scopes by milestone-level keywords and caps the injected size. // helper scopes by milestone-level keywords and caps the injected size.
test("inlineKnowledgeBudgeted: returns scoped H3 entries for single-H2 file", async () => { test("inlineKnowledgeBudgeted: returns scoped H3 entries for single-H2 file", async () => {
const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-")));
const gsdDir = join(tmp, ".gsd"); const sfDir = join(tmp, ".sf");
mkdirSync(gsdDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
const content = `# Project Knowledge const content = `# Project Knowledge
@ -323,7 +323,7 @@ Use /v1/resource style versioning.
### Testing: node:test ### Testing: node:test
Prefer node:test over external frameworks. Prefer node:test over external frameworks.
`; `;
writeFileSync(join(gsdDir, "KNOWLEDGE.md"), content); writeFileSync(join(sfDir, "KNOWLEDGE.md"), content);
const result = await inlineKnowledgeBudgeted(tmp, ["database"]); const result = await inlineKnowledgeBudgeted(tmp, ["database"]);
assert.ok(result !== null, "should return content"); assert.ok(result !== null, "should return content");
@ -340,9 +340,9 @@ Prefer node:test over external frameworks.
}); });
test("inlineKnowledgeBudgeted: caps payload below budget for large files", async () => { test("inlineKnowledgeBudgeted: caps payload below budget for large files", async () => {
const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-")));
const gsdDir = join(tmp, ".gsd"); const sfDir = join(tmp, ".sf");
mkdirSync(gsdDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
// Build a 200KB KNOWLEDGE with 500 H3 entries all matching 'shared' // Build a 200KB KNOWLEDGE with 500 H3 entries all matching 'shared'
const entries = Array.from( const entries = Array.from(
@ -350,7 +350,7 @@ test("inlineKnowledgeBudgeted: caps payload below budget for large files", async
(_, i) => `### Entry ${i}: shared topic\n${"filler text ".repeat(30)}\n`, (_, i) => `### Entry ${i}: shared topic\n${"filler text ".repeat(30)}\n`,
).join("\n"); ).join("\n");
const content = `# Project Knowledge\n\n## Patterns\n\n${entries}`; const content = `# Project Knowledge\n\n## Patterns\n\n${entries}`;
writeFileSync(join(gsdDir, "KNOWLEDGE.md"), content); writeFileSync(join(sfDir, "KNOWLEDGE.md"), content);
const BUDGET_CHARS = 30_000; const BUDGET_CHARS = 30_000;
const result = await inlineKnowledgeBudgeted(tmp, ["shared"], { const result = await inlineKnowledgeBudgeted(tmp, ["shared"], {
@ -377,9 +377,9 @@ test("inlineKnowledgeBudgeted: caps payload below budget for large files", async
}); });
test("inlineKnowledgeBudgeted: returns null when no KNOWLEDGE.md exists", async () => { test("inlineKnowledgeBudgeted: returns null when no KNOWLEDGE.md exists", async () => {
const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-")));
const gsdDir = join(tmp, ".gsd"); const sfDir = join(tmp, ".sf");
mkdirSync(gsdDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
const result = await inlineKnowledgeBudgeted(tmp, ["database"]); const result = await inlineKnowledgeBudgeted(tmp, ["database"]);
assert.strictEqual(result, null); assert.strictEqual(result, null);
@ -388,11 +388,11 @@ test("inlineKnowledgeBudgeted: returns null when no KNOWLEDGE.md exists", async
}); });
test("inlineKnowledgeBudgeted: returns null when no entries match", async () => { test("inlineKnowledgeBudgeted: returns null when no entries match", async () => {
const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-")));
const gsdDir = join(tmp, ".gsd"); const sfDir = join(tmp, ".sf");
mkdirSync(gsdDir, { recursive: true }); mkdirSync(sfDir, { recursive: true });
writeFileSync( writeFileSync(
join(gsdDir, "KNOWLEDGE.md"), join(sfDir, "KNOWLEDGE.md"),
"# Project Knowledge\n\n## Patterns\n\n### Database\nuse it\n", "# Project Knowledge\n\n## Patterns\n\n### Database\nuse it\n",
); );

View file

@ -16,7 +16,7 @@ import {
repairStaleRenders, repairStaleRenders,
} from "../markdown-renderer.ts"; } from "../markdown-renderer.ts";
import { parsePlan, parseRoadmap } from "../parsers-legacy.ts"; import { parsePlan, parseRoadmap } from "../parsers-legacy.ts";
import { _clearGsdRootCache, clearPathCache } from "../paths.ts"; import { _clearSfRootCache, clearPathCache } from "../paths.ts";
import { import {
_getAdapter, _getAdapter,
closeDatabase, closeDatabase,
@ -52,7 +52,7 @@ function cleanupDir(dir: string): void {
function clearAllCaches(): void { function clearAllCaches(): void {
clearParseCache(); clearParseCache();
clearPathCache(); clearPathCache();
_clearGsdRootCache(); _clearSfRootCache();
invalidateStateCache(); invalidateStateCache();
} }

View file

@ -46,9 +46,9 @@ describe("migrate-external worktree guard (#2970)", () => {
run(`git worktree add -b milestone/M001 ${worktreePath}`, base); run(`git worktree add -b milestone/M001 ${worktreePath}`, base);
// Populate worktree with a .sf directory (simulating syncSfStateToWorktree) // Populate worktree with a .sf directory (simulating syncSfStateToWorktree)
const worktreeGsd = join(worktreePath, ".sf"); const worktreeSf = join(worktreePath, ".sf");
mkdirSync(worktreeGsd, { recursive: true }); mkdirSync(worktreeSf, { recursive: true });
writeFileSync(join(worktreeGsd, "PREFERENCES.md"), "# prefs\n", "utf-8"); writeFileSync(join(worktreeSf, "PREFERENCES.md"), "# prefs\n", "utf-8");
}); });
after(() => { after(() => {

View file

@ -1,4 +1,4 @@
// GSD-2 — #4781: classifier behavior matrix. Pure-function tests, no I/O. // SF — #4781: classifier behavior matrix. Pure-function tests, no I/O.
import assert from "node:assert/strict"; import assert from "node:assert/strict";
import test from "node:test"; import test from "node:test";

View file

@ -18,6 +18,7 @@ import test, { afterEach } from "node:test";
import { fileURLToPath } from "node:url"; import { fileURLToPath } from "node:url";
import { resolveDispatch } from "../auto-dispatch.ts"; import { resolveDispatch } from "../auto-dispatch.ts";
import { buildParallelResearchSlicesPrompt } from "../auto-prompts.ts";
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
@ -94,6 +95,17 @@ test("dispatch: parallel-research-slices requires 2+ slices", () => {
); );
}); });
test("dispatch: parallel-research-slices respects subagent batch limit", () => {
assert.ok(
dispatchSrc.includes("MAX_PARALLEL_RESEARCH_SLICES"),
"rule should guard the subagent max parallel task count",
);
assert.ok(
dispatchSrc.includes("researchReadySlices.length > MAX_PARALLEL_RESEARCH_SLICES"),
"rule should fall back when too many slices are ready for one subagent call",
);
});
test("dispatch: parallel-research-slices respects skip_research", () => { test("dispatch: parallel-research-slices respects skip_research", () => {
const ruleIdx = dispatchSrc.indexOf("parallel-research-slices"); const ruleIdx = dispatchSrc.indexOf("parallel-research-slices");
const ruleBlock = dispatchSrc.slice(ruleIdx, ruleIdx + 500); const ruleBlock = dispatchSrc.slice(ruleIdx, ruleIdx + 500);
@ -122,6 +134,27 @@ test("prompt: builds per-slice subagent prompts", () => {
); );
}); });
test("prompt: emits deterministic worker subagent payload", async () => {
const base = makeTmpProject();
const prompt = await buildParallelResearchSlicesPrompt(
"M001",
"Parallel Research Milestone",
[
{ id: "S01", title: "Alpha" },
{ id: "S02", title: "Beta" },
],
base,
"test-subagent-model",
);
assert.match(prompt, /Required `subagent` Call Payload/);
assert.match(prompt, /"agent": "worker"/);
assert.match(prompt, /"cwd":/);
assert.match(prompt, /"model": "test-subagent-model"/);
assert.match(prompt, /IMPORTANT CHILD-AGENT OVERRIDE/);
assert.doesNotMatch(prompt, /"agent": "g(?:sd)-executor"/);
});
// ─── Template ───────────────────────────────────────────────────────────── // ─── Template ─────────────────────────────────────────────────────────────
test("template: parallel-research-slices.md has required variables", () => { test("template: parallel-research-slices.md has required variables", () => {
@ -130,6 +163,10 @@ test("template: parallel-research-slices.md has required variables", () => {
"template should use sliceCount", "template should use sliceCount",
); );
assert.ok(templateSrc.includes("{{mid}}"), "template should use mid"); assert.ok(templateSrc.includes("{{mid}}"), "template should use mid");
assert.ok(
templateSrc.includes("{{subagentCall}}"),
"template should use subagentCall",
);
assert.ok( assert.ok(
templateSrc.includes("{{subagentPrompts}}"), templateSrc.includes("{{subagentPrompts}}"),
"template should use subagentPrompts", "template should use subagentPrompts",

View file

@ -158,24 +158,24 @@ describe("parallel-worker-lock-contention (#2184)", () => {
// ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ─────────── // ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ───────────
test("Bug 3: syncProjectRootToWorktree skips when .sf resolves to same path (symlink)", () => { test("Bug 3: syncProjectRootToWorktree skips when .sf resolves to same path (symlink)", () => {
const base = mkdtempSync(join(tmpdir(), "sf-symlink-sync-")); const base = mkdtempSync(join(tmpdir(), "sf-symlink-sync-"));
const externalGsd = join(base, "external-sf"); const externalSf = join(base, "external-sf");
const projectRoot = join(base, "project"); const projectRoot = join(base, "project");
const worktreePath = join(base, "worktree"); const worktreePath = join(base, "worktree");
mkdirSync(externalGsd, { recursive: true }); mkdirSync(externalSf, { recursive: true });
mkdirSync(projectRoot, { recursive: true }); mkdirSync(projectRoot, { recursive: true });
mkdirSync(worktreePath, { recursive: true }); mkdirSync(worktreePath, { recursive: true });
// Create the external state directory with a milestone // Create the external state directory with a milestone
mkdirSync(join(externalGsd, "milestones", "M001"), { recursive: true }); mkdirSync(join(externalSf, "milestones", "M001"), { recursive: true });
writeFileSync( writeFileSync(
join(externalGsd, "milestones", "M001", "M001-ROADMAP.md"), join(externalSf, "milestones", "M001", "M001-ROADMAP.md"),
"# Roadmap", "# Roadmap",
); );
// Symlink both project and worktree .sf to the same external directory // Symlink both project and worktree .sf to the same external directory
symlinkSync(externalGsd, join(projectRoot, ".sf")); symlinkSync(externalSf, join(projectRoot, ".sf"));
symlinkSync(externalGsd, join(worktreePath, ".sf")); symlinkSync(externalSf, join(worktreePath, ".sf"));
try { try {
// This should NOT throw ERR_FS_CP_EINVAL — it should skip silently // This should NOT throw ERR_FS_CP_EINVAL — it should skip silently

View file

@ -17,7 +17,7 @@ import {
type VerificationContext, type VerificationContext,
} from "../auto-verification.ts"; } from "../auto-verification.ts";
import { invalidateAllCaches } from "../cache.ts"; import { invalidateAllCaches } from "../cache.ts";
import { _clearGsdRootCache } from "../paths.ts"; import { _clearSfRootCache } from "../paths.ts";
import { import {
_getAdapter, _getAdapter,
closeDatabase, closeDatabase,
@ -93,7 +93,7 @@ function setupTestEnvironment(): void {
mkdirSync(milestonesDir, { recursive: true }); mkdirSync(milestonesDir, { recursive: true });
process.chdir(tempDir); process.chdir(tempDir);
_clearGsdRootCache(); _clearSfRootCache();
dbPath = join(sfDir, "sf.db"); dbPath = join(sfDir, "sf.db");
openDatabase(dbPath); openDatabase(dbPath);
@ -129,7 +129,7 @@ ${yamlLines.join("\n")}
`; `;
writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent);
invalidateAllCaches(); invalidateAllCaches();
_clearGsdRootCache(); _clearSfRootCache();
} }
/** /**

View file

@ -16,7 +16,7 @@ import {
postUnitPostVerification, postUnitPostVerification,
} from "../auto-post-unit.ts"; } from "../auto-post-unit.ts";
import { invalidateAllCaches } from "../cache.ts"; import { invalidateAllCaches } from "../cache.ts";
import { _clearGsdRootCache } from "../paths.ts"; import { _clearSfRootCache } from "../paths.ts";
import { import {
closeDatabase, closeDatabase,
insertMilestone, insertMilestone,
@ -107,7 +107,7 @@ function setupTestEnvironment(): void {
mkdirSync(milestonesDir, { recursive: true }); mkdirSync(milestonesDir, { recursive: true });
process.chdir(tempDir); process.chdir(tempDir);
_clearGsdRootCache(); _clearSfRootCache();
dbPath = join(sfDir, "sf.db"); dbPath = join(sfDir, "sf.db");
openDatabase(dbPath); openDatabase(dbPath);
@ -143,7 +143,7 @@ ${yamlLines.join("\n")}
`; `;
writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent);
invalidateAllCaches(); invalidateAllCaches();
_clearGsdRootCache(); _clearSfRootCache();
} }
/** /**

View file

@ -20,7 +20,7 @@ import {
postUnitPostVerification, postUnitPostVerification,
} from "../auto-post-unit.ts"; } from "../auto-post-unit.ts";
import { invalidateAllCaches } from "../cache.ts"; import { invalidateAllCaches } from "../cache.ts";
import { _clearGsdRootCache } from "../paths.ts"; import { _clearSfRootCache } from "../paths.ts";
import { import {
_getAdapter, _getAdapter,
closeDatabase, closeDatabase,
@ -135,7 +135,7 @@ function setupTestEnvironment(): void {
process.chdir(tempDir); process.chdir(tempDir);
// Clear sfRoot cache so it finds the new .sf directory // Clear sfRoot cache so it finds the new .sf directory
_clearGsdRootCache(); _clearSfRootCache();
// Initialize DB // Initialize DB
dbPath = join(sfDir, "sf.db"); dbPath = join(sfDir, "sf.db");
@ -183,7 +183,7 @@ ${yamlLines.join("\n")}
writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent);
// Invalidate caches so the new preferences file is found // Invalidate caches so the new preferences file is found
invalidateAllCaches(); invalidateAllCaches();
_clearGsdRootCache(); _clearSfRootCache();
} }
/** /**

View file

@ -72,15 +72,15 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => {
// Functional test: create a mock source and destination, call the sync // Functional test: create a mock source and destination, call the sync
const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-src-")); const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-src-"));
const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-dst-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-dst-"));
const srcGsd = join(srcBase, ".sf"); const srcSf = join(srcBase, ".sf");
const dstGsd = join(dstBase, ".sf"); const dstSf = join(dstBase, ".sf");
mkdirSync(srcGsd, { recursive: true }); mkdirSync(srcSf, { recursive: true });
mkdirSync(dstGsd, { recursive: true }); mkdirSync(dstSf, { recursive: true });
try { try {
// Write a canonical PREFERENCES.md in source // Write a canonical PREFERENCES.md in source
writeFileSync( writeFileSync(
join(srcGsd, "PREFERENCES.md"), join(srcSf, "PREFERENCES.md"),
"---\nversion: 1\n---\n\npost_unit_hooks:\n - name: notify\n command: echo done\n", "---\nversion: 1\n---\n\npost_unit_hooks:\n - name: notify\n command: echo done\n",
); );
@ -90,11 +90,11 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => {
// Verify PREFERENCES.md was copied // Verify PREFERENCES.md was copied
assert.ok( assert.ok(
existsSync(join(dstGsd, "PREFERENCES.md")), existsSync(join(dstSf, "PREFERENCES.md")),
"PREFERENCES.md should be copied to worktree", "PREFERENCES.md should be copied to worktree",
); );
const content = readFileSync(join(dstGsd, "PREFERENCES.md"), "utf-8"); const content = readFileSync(join(dstSf, "PREFERENCES.md"), "utf-8");
assert.ok( assert.ok(
content.includes("post_unit_hooks"), content.includes("post_unit_hooks"),
"copied PREFERENCES.md should contain the hooks config", "copied PREFERENCES.md should contain the hooks config",
@ -108,21 +108,21 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => {
test("syncSfStateToWorktree falls back to legacy lowercase preferences.md", async () => { test("syncSfStateToWorktree falls back to legacy lowercase preferences.md", async () => {
const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-src-")); const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-src-"));
const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-dst-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-dst-"));
const srcGsd = join(srcBase, ".sf"); const srcSf = join(srcBase, ".sf");
const dstGsd = join(dstBase, ".sf"); const dstSf = join(dstBase, ".sf");
mkdirSync(srcGsd, { recursive: true }); mkdirSync(srcSf, { recursive: true });
mkdirSync(dstGsd, { recursive: true }); mkdirSync(dstSf, { recursive: true });
try { try {
writeFileSync( writeFileSync(
join(srcGsd, "preferences.md"), join(srcSf, "preferences.md"),
"---\nversion: 1\n---\n\ngit:\n auto_push: true\n", "---\nversion: 1\n---\n\ngit:\n auto_push: true\n",
); );
const { syncSfStateToWorktree } = await import("../auto-worktree.ts"); const { syncSfStateToWorktree } = await import("../auto-worktree.ts");
const result = syncSfStateToWorktree(srcBase, dstBase); const result = syncSfStateToWorktree(srcBase, dstBase);
const copiedEntries = readdirSync(dstGsd).filter( const copiedEntries = readdirSync(dstSf).filter(
(name) => name === "PREFERENCES.md" || name === "preferences.md", (name) => name === "PREFERENCES.md" || name === "preferences.md",
); );

View file

@ -673,15 +673,15 @@ test("experimental.rtk parses correctly from preferences markdown", () => {
test("loadEffectiveSFPreferences preserves experimental prefs across global+project merge", () => { test("loadEffectiveSFPreferences preserves experimental prefs across global+project merge", () => {
const originalCwd = process.cwd(); const originalCwd = process.cwd();
const originalGsdHome = process.env.SF_HOME; const originalSfHome = process.env.SF_HOME;
const tempProject = mkdtempSync(join(tmpdir(), "sf-prefs-project-")); const tempProject = mkdtempSync(join(tmpdir(), "sf-prefs-project-"));
const tempGsdHome = mkdtempSync(join(tmpdir(), "sf-prefs-home-")); const tempSfHome = mkdtempSync(join(tmpdir(), "sf-prefs-home-"));
try { try {
mkdirSync(join(tempProject, ".sf"), { recursive: true }); mkdirSync(join(tempProject, ".sf"), { recursive: true });
writeFileSync( writeFileSync(
join(tempGsdHome, "preferences.md"), join(tempSfHome, "preferences.md"),
["---", "version: 1", "experimental:", " rtk: true", "---"].join("\n"), ["---", "version: 1", "experimental:", " rtk: true", "---"].join("\n"),
"utf-8", "utf-8",
); );
@ -692,7 +692,7 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj
"utf-8", "utf-8",
); );
process.env.SF_HOME = tempGsdHome; process.env.SF_HOME = tempSfHome;
process.chdir(tempProject); process.chdir(tempProject);
const loaded = loadEffectiveSFPreferences(); const loaded = loadEffectiveSFPreferences();
@ -701,10 +701,10 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj
assert.equal(loaded!.preferences.git?.isolation, "none"); assert.equal(loaded!.preferences.git?.isolation, "none");
} finally { } finally {
process.chdir(originalCwd); process.chdir(originalCwd);
if (originalGsdHome === undefined) delete process.env.SF_HOME; if (originalSfHome === undefined) delete process.env.SF_HOME;
else process.env.SF_HOME = originalGsdHome; else process.env.SF_HOME = originalSfHome;
rmSync(tempProject, { recursive: true, force: true }); rmSync(tempProject, { recursive: true, force: true });
rmSync(tempGsdHome, { recursive: true, force: true }); rmSync(tempSfHome, { recursive: true, force: true });
} }
}); });

View file

@ -5,7 +5,7 @@
* silent data loss. When a repo has a remote URL, the identity hash * silent data loss. When a repo has a remote URL, the identity hash
* should be based solely on the remote making moves transparent. * should be based solely on the remote making moves transparent.
* *
* For local-only repos (no remote), ensureGsdSymlink should detect * For local-only repos (no remote), ensureSfSymlink should detect
* orphaned state directories with a matching .sf-id marker and * orphaned state directories with a matching .sf-id marker and
* recover them automatically. * recover them automatically.
*/ */
@ -28,7 +28,7 @@ import { join } from "node:path";
import { after, before, describe, test } from "node:test"; import { after, before, describe, test } from "node:test";
import { import {
ensureGsdSymlink, ensureSfSymlink,
externalProjectsRoot, externalProjectsRoot,
readRepoMeta, readRepoMeta,
repoIdentity, repoIdentity,
@ -105,14 +105,14 @@ describe("project-relocation-recovery (#2750)", () => {
rmSync(repoB, { recursive: true, force: true }); rmSync(repoB, { recursive: true, force: true });
}); });
test("ensureGsdSymlink reuses the same external dir after repo move (remote repo)", () => { test("ensureSfSymlink reuses the same external dir after repo move (remote repo)", () => {
const repoA = realpathSync( const repoA = realpathSync(
mkdtempSync(join(tmpdir(), "sf-reloc-reuse-a-")), mkdtempSync(join(tmpdir(), "sf-reloc-reuse-a-")),
); );
initRepo(repoA, "https://github.com/example/reloc-reuse.git"); initRepo(repoA, "https://github.com/example/reloc-reuse.git");
// Initialize SF state with some planning data // Initialize SF state with some planning data
const externalA = ensureGsdSymlink(repoA); const externalA = ensureSfSymlink(repoA);
const milestonesPath = join(externalA, "milestones"); const milestonesPath = join(externalA, "milestones");
mkdirSync(milestonesPath, { recursive: true }); mkdirSync(milestonesPath, { recursive: true });
writeFileSync( writeFileSync(
@ -128,8 +128,8 @@ describe("project-relocation-recovery (#2750)", () => {
); );
renameSync(repoA, repoB); renameSync(repoA, repoB);
// ensureGsdSymlink at the new location should find the same external dir // ensureSfSymlink at the new location should find the same external dir
const externalB = ensureGsdSymlink(repoB); const externalB = ensureSfSymlink(repoB);
assert.strictEqual( assert.strictEqual(
normalizePath(externalB), normalizePath(externalB),
@ -159,7 +159,7 @@ describe("project-relocation-recovery (#2750)", () => {
const repoA = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-meta-a-"))); const repoA = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-meta-a-")));
initRepo(repoA, "https://github.com/example/reloc-meta.git"); initRepo(repoA, "https://github.com/example/reloc-meta.git");
const externalA = ensureGsdSymlink(repoA); const externalA = ensureSfSymlink(repoA);
const metaBefore = readRepoMeta(externalA); const metaBefore = readRepoMeta(externalA);
assert.ok(metaBefore !== null, "metadata should exist before move"); assert.ok(metaBefore !== null, "metadata should exist before move");
@ -170,7 +170,7 @@ describe("project-relocation-recovery (#2750)", () => {
); );
renameSync(repoA, repoB); renameSync(repoA, repoB);
const externalB = ensureGsdSymlink(repoB); const externalB = ensureSfSymlink(repoB);
const metaAfter = readRepoMeta(externalB); const metaAfter = readRepoMeta(externalB);
assert.ok(metaAfter !== null, "metadata should exist after move"); assert.ok(metaAfter !== null, "metadata should exist after move");
assert.strictEqual( assert.strictEqual(
@ -189,16 +189,16 @@ describe("project-relocation-recovery (#2750)", () => {
// ── Local-only repos: .sf-id marker provides recovery ──────────────── // ── Local-only repos: .sf-id marker provides recovery ────────────────
test("ensureGsdSymlink writes a .sf-id marker in the project root", () => { test("ensureSfSymlink writes a .sf-id marker in the project root", () => {
const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-marker-"))); const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-marker-")));
initRepo(repo); initRepo(repo);
ensureGsdSymlink(repo); ensureSfSymlink(repo);
const markerPath = join(repo, ".sf-id"); const markerPath = join(repo, ".sf-id");
assert.ok( assert.ok(
existsSync(markerPath), existsSync(markerPath),
".sf-id marker must be written by ensureGsdSymlink", ".sf-id marker must be written by ensureSfSymlink",
); );
const markerId = readFileSync(markerPath, "utf-8").trim(); const markerId = readFileSync(markerPath, "utf-8").trim();
@ -220,7 +220,7 @@ describe("project-relocation-recovery (#2750)", () => {
// No remote — identity includes gitRoot // No remote — identity includes gitRoot
// Initialize SF state // Initialize SF state
const externalA = ensureGsdSymlink(repoA); const externalA = ensureSfSymlink(repoA);
mkdirSync(join(externalA, "milestones"), { recursive: true }); mkdirSync(join(externalA, "milestones"), { recursive: true });
writeFileSync( writeFileSync(
join(externalA, "milestones", "M001.md"), join(externalA, "milestones", "M001.md"),
@ -245,8 +245,8 @@ describe("project-relocation-recovery (#2750)", () => {
"local-only repo identity changes with move (expected)", "local-only repo identity changes with move (expected)",
); );
// But ensureGsdSymlink should detect .sf-id marker and recover // But ensureSfSymlink should detect .sf-id marker and recover
const externalB = ensureGsdSymlink(repoB); const externalB = ensureSfSymlink(repoB);
assert.ok( assert.ok(
existsSync(join(externalB, "milestones", "M001.md")), existsSync(join(externalB, "milestones", "M001.md")),
"local-only repo must recover state via .sf-id marker after move", "local-only repo must recover state via .sf-id marker after move",
@ -280,7 +280,7 @@ describe("project-relocation-recovery (#2750)", () => {
); );
initRepo(repoA, "https://github.com/example/no-orphan.git"); initRepo(repoA, "https://github.com/example/no-orphan.git");
ensureGsdSymlink(repoA); ensureSfSymlink(repoA);
// Count project dirs before move // Count project dirs before move
const projectsDir = externalProjectsRoot(); const projectsDir = externalProjectsRoot();
@ -295,7 +295,7 @@ describe("project-relocation-recovery (#2750)", () => {
); );
renameSync(repoA, repoB); renameSync(repoA, repoB);
ensureGsdSymlink(repoB); ensureSfSymlink(repoB);
const countAfter = readdirSync(projectsDir).length; const countAfter = readdirSync(projectsDir).length;
assert.strictEqual( assert.strictEqual(

Some files were not shown because too many files have changed in this diff Show more