diff --git a/.gsd/CODEBASE.md b/.gsd/CODEBASE.md deleted file mode 100644 index 3925a3150..000000000 --- a/.gsd/CODEBASE.md +++ /dev/null @@ -1,482 +0,0 @@ -# Codebase Map - -Generated: 2026-04-15T12:09:27Z | Files: 500 | Described: 0/500 - -Note: Truncated to first 500 files. Run with higher --max-files to include all. - -### (root)/ -- `.dockerignore` -- `.gitignore` -- `.npmignore` -- `.npmrc` -- `.prompt-injection-scanignore` -- `.secretscanignore` -- `CHANGELOG.md` -- `CONTRIBUTING.md` -- `Dockerfile` -- `flake.nix` -- `LICENSE` -- `package-lock.json` -- `package.json` -- `README.md` -- `VISION.md` - -### .github/ -- `.github/CODEOWNERS` -- `.github/FUNDING.yml` -- `.github/PULL_REQUEST_TEMPLATE.md` - -### .github/ISSUE_TEMPLATE/ -- `.github/ISSUE_TEMPLATE/bug_report.yml` -- `.github/ISSUE_TEMPLATE/config.yml` -- `.github/ISSUE_TEMPLATE/feature_request.yml` - -### .github/workflows/ -- `.github/workflows/ai-triage.yml` -- `.github/workflows/build-native.yml` -- `.github/workflows/ci.yml` -- `.github/workflows/cleanup-dev-versions.yml` -- `.github/workflows/pipeline.yml` -- `.github/workflows/pr-risk.yml` - -### bin/ -- `bin/gsd-from-source` - -### docker/ -- `docker/.env.example` -- `docker/bootstrap.sh` -- `docker/docker-compose.full.yaml` -- `docker/docker-compose.yaml` -- `docker/Dockerfile.ci-builder` -- `docker/Dockerfile.sandbox` -- `docker/entrypoint.sh` -- `docker/README.md` - -### docs/ -- `docs/README.md` - -### docs/dev/ -- `docs/dev/ADR-001-branchless-worktree-architecture.md` -- `docs/dev/ADR-003-pipeline-simplification.md` -- `docs/dev/ADR-004-capability-aware-model-routing.md` -- `docs/dev/ADR-005-multi-model-provider-tool-strategy.md` -- `docs/dev/ADR-007-model-catalog-split.md` -- `docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md` -- `docs/dev/ADR-008-IMPLEMENTATION-PLAN.md` -- `docs/dev/ADR-009-IMPLEMENTATION-PLAN.md` -- `docs/dev/ADR-009-orchestration-kernel-refactor.md` -- `docs/dev/ADR-010-pi-clean-seam-architecture.md` -- `docs/dev/agent-knowledge-index.md` -- `docs/dev/architecture.md` -- `docs/dev/ci-cd-pipeline.md` -- `docs/dev/FILE-SYSTEM-MAP.md` -- `docs/dev/FRONTIER-TECHNIQUES.md` -- `docs/dev/pi-context-optimization-opportunities.md` -- `docs/dev/PRD-branchless-worktree-architecture.md` -- `docs/dev/PRD-pi-clean-seam-refactor.md` - -### docs/dev/building-coding-agents/ -- *(27 files: 27 .md)* - -### docs/dev/context-and-hooks/ -- `docs/dev/context-and-hooks/01-the-context-pipeline.md` -- `docs/dev/context-and-hooks/02-hook-reference.md` -- `docs/dev/context-and-hooks/03-context-injection-patterns.md` -- `docs/dev/context-and-hooks/04-message-types-and-llm-visibility.md` -- `docs/dev/context-and-hooks/05-inter-extension-communication.md` -- `docs/dev/context-and-hooks/06-advanced-patterns-from-source.md` -- `docs/dev/context-and-hooks/07-the-system-prompt-anatomy.md` -- `docs/dev/context-and-hooks/README.md` - -### docs/dev/extending-pi/ -- *(26 files: 26 .md)* - -### docs/dev/pi-ui-tui/ -- *(24 files: 24 .md)* - -### docs/dev/proposals/ -- `docs/dev/proposals/698-browser-tools-feature-additions.md` -- `docs/dev/proposals/rfc-gitops-branching-strategy.md` - -### docs/dev/proposals/workflows/ -- `docs/dev/proposals/workflows/backmerge.yml` -- `docs/dev/proposals/workflows/create-release.yml` -- `docs/dev/proposals/workflows/README.md` -- `docs/dev/proposals/workflows/sync-next.yml` - -### docs/dev/superpowers/plans/ -- `docs/dev/superpowers/plans/2026-03-17-cicd-pipeline.md` - -### docs/dev/superpowers/specs/ -- `docs/dev/superpowers/specs/2026-03-17-cicd-pipeline-design.md` - -### docs/dev/what-is-pi/ -- `docs/dev/what-is-pi/01-what-pi-is.md` -- `docs/dev/what-is-pi/02-design-philosophy.md` -- `docs/dev/what-is-pi/03-the-four-modes-of-operation.md` -- `docs/dev/what-is-pi/04-the-architecture-how-everything-fits-together.md` -- `docs/dev/what-is-pi/05-the-agent-loop-how-pi-thinks.md` -- `docs/dev/what-is-pi/06-tools-how-pi-acts-on-the-world.md` -- `docs/dev/what-is-pi/07-sessions-memory-that-branches.md` -- `docs/dev/what-is-pi/08-compaction-how-pi-manages-context-limits.md` -- `docs/dev/what-is-pi/09-the-customization-stack.md` -- `docs/dev/what-is-pi/10-providers-models-multi-model-by-default.md` -- `docs/dev/what-is-pi/11-the-interactive-tui.md` -- `docs/dev/what-is-pi/12-the-message-queue-talking-while-pi-thinks.md` -- `docs/dev/what-is-pi/13-context-files-project-instructions.md` -- `docs/dev/what-is-pi/14-the-sdk-rpc-embedding-pi.md` -- `docs/dev/what-is-pi/15-pi-packages-the-ecosystem.md` -- `docs/dev/what-is-pi/16-why-pi-matters-what-makes-it-different.md` -- `docs/dev/what-is-pi/17-file-reference-all-documentation.md` -- `docs/dev/what-is-pi/18-quick-reference-commands-shortcuts.md` -- `docs/dev/what-is-pi/19-building-branded-apps-on-top-of-pi.md` -- `docs/dev/what-is-pi/README.md` - -### docs/user-docs/ -- *(21 files: 21 .md)* - -### docs/zh-CN/ -- `docs/zh-CN/README.md` - -### docs/zh-CN/user-docs/ -- *(21 files: 21 .md)* - -### gitbook/ -- `gitbook/README.md` -- `gitbook/SUMMARY.md` - -### gitbook/configuration/ -- `gitbook/configuration/custom-models.md` -- `gitbook/configuration/git-settings.md` -- `gitbook/configuration/mcp-servers.md` -- `gitbook/configuration/notifications.md` -- `gitbook/configuration/preferences.md` -- `gitbook/configuration/providers.md` - -### gitbook/core-concepts/ -- `gitbook/core-concepts/auto-mode.md` -- `gitbook/core-concepts/project-structure.md` -- `gitbook/core-concepts/step-mode.md` - -### gitbook/features/ -- `gitbook/features/captures.md` -- `gitbook/features/cost-management.md` -- `gitbook/features/dynamic-model-routing.md` -- `gitbook/features/github-sync.md` -- `gitbook/features/headless.md` -- `gitbook/features/parallel.md` -- `gitbook/features/remote-questions.md` -- `gitbook/features/skills.md` -- `gitbook/features/teams.md` -- `gitbook/features/token-optimization.md` -- `gitbook/features/visualizer.md` -- `gitbook/features/web-interface.md` -- `gitbook/features/workflow-templates.md` - -### gitbook/getting-started/ -- `gitbook/getting-started/choosing-a-model.md` -- `gitbook/getting-started/first-project.md` -- `gitbook/getting-started/installation.md` - -### gitbook/reference/ -- `gitbook/reference/cli-flags.md` -- `gitbook/reference/commands.md` -- `gitbook/reference/environment-variables.md` -- `gitbook/reference/keyboard-shortcuts.md` -- `gitbook/reference/migration.md` -- `gitbook/reference/troubleshooting.md` - -### sf-orchestrator/ -- `sf-orchestrator/SKILL.md` - -### sf-orchestrator/references/ -- `sf-orchestrator/references/answer-injection.md` -- `sf-orchestrator/references/commands.md` -- `sf-orchestrator/references/json-result.md` - -### sf-orchestrator/templates/ -- `sf-orchestrator/templates/spec.md` - -### sf-orchestrator/workflows/ -- `sf-orchestrator/workflows/build-from-spec.md` -- `sf-orchestrator/workflows/monitor-and-poll.md` -- `sf-orchestrator/workflows/step-by-step.md` - -### mintlify-docs/ -- `mintlify-docs/docs` -- `mintlify-docs/docs.json` -- `mintlify-docs/getting-started.mdx` -- `mintlify-docs/introduction.mdx` - -### mintlify-docs/guides/ -- `mintlify-docs/guides/auto-mode.mdx` -- `mintlify-docs/guides/captures-triage.mdx` -- `mintlify-docs/guides/change-management.mdx` -- `mintlify-docs/guides/commands.mdx` -- `mintlify-docs/guides/configuration.mdx` -- `mintlify-docs/guides/cost-management.mdx` -- `mintlify-docs/guides/custom-models.mdx` -- `mintlify-docs/guides/dynamic-model-routing.mdx` -- `mintlify-docs/guides/git-strategy.mdx` -- `mintlify-docs/guides/migration.mdx` -- `mintlify-docs/guides/parallel-orchestration.mdx` -- `mintlify-docs/guides/remote-questions.mdx` -- `mintlify-docs/guides/skills.mdx` -- `mintlify-docs/guides/token-optimization.mdx` -- `mintlify-docs/guides/troubleshooting.mdx` -- `mintlify-docs/guides/visualizer.mdx` -- `mintlify-docs/guides/web-interface.mdx` -- `mintlify-docs/guides/working-in-teams.mdx` - -### native/ -- `native/.gitignore` -- `native/.npmignore` -- `native/Cargo.toml` -- `native/README.md` - -### native/.cargo/ -- `native/.cargo/config.toml` - -### native/crates/ast/ -- `native/crates/ast/Cargo.toml` - -### native/crates/ast/src/ -- `native/crates/ast/src/ast.rs` -- `native/crates/ast/src/glob_util.rs` -- `native/crates/ast/src/lib.rs` - -### native/crates/ast/src/language/ -- `native/crates/ast/src/language/mod.rs` -- `native/crates/ast/src/language/parsers.rs` - -### native/crates/engine/ -- `native/crates/engine/build.rs` -- `native/crates/engine/Cargo.toml` - -### native/crates/engine/src/ -- *(22 files: 22 .rs)* - -### native/crates/grep/ -- `native/crates/grep/Cargo.toml` - -### native/crates/grep/src/ -- `native/crates/grep/src/lib.rs` - -### native/npm/darwin-arm64/ -- `native/npm/darwin-arm64/package.json` - -### native/npm/darwin-x64/ -- `native/npm/darwin-x64/package.json` - -### native/npm/linux-arm64-gnu/ -- `native/npm/linux-arm64-gnu/package.json` - -### native/npm/linux-x64-gnu/ -- `native/npm/linux-x64-gnu/package.json` - -### native/npm/win32-x64-msvc/ -- `native/npm/win32-x64-msvc/package.json` - -### native/scripts/ -- `native/scripts/build.js` -- `native/scripts/sync-platform-versions.cjs` - -### packages/daemon/ -- `packages/daemon/package.json` -- `packages/daemon/tsconfig.json` - -### packages/daemon/src/ -- *(27 files: 27 .ts)* - -### packages/mcp-server/ -- `packages/mcp-server/.npmignore` -- `packages/mcp-server/package.json` -- `packages/mcp-server/README.md` -- `packages/mcp-server/tsconfig.json` - -### packages/mcp-server/src/ -- `packages/mcp-server/src/cli.ts` -- `packages/mcp-server/src/env-writer.test.ts` -- `packages/mcp-server/src/env-writer.ts` -- `packages/mcp-server/src/import-candidates.test.ts` -- `packages/mcp-server/src/index.ts` -- `packages/mcp-server/src/mcp-server.test.ts` -- `packages/mcp-server/src/secure-env-collect.test.ts` -- `packages/mcp-server/src/server.ts` -- `packages/mcp-server/src/session-manager.ts` -- `packages/mcp-server/src/tool-credentials.test.ts` -- `packages/mcp-server/src/tool-credentials.ts` -- `packages/mcp-server/src/types.ts` -- `packages/mcp-server/src/workflow-tools.test.ts` -- `packages/mcp-server/src/workflow-tools.ts` - -### packages/mcp-server/src/readers/ -- `packages/mcp-server/src/readers/captures.ts` -- `packages/mcp-server/src/readers/doctor-lite.ts` -- `packages/mcp-server/src/readers/graph.test.ts` -- `packages/mcp-server/src/readers/graph.ts` -- `packages/mcp-server/src/readers/index.ts` -- `packages/mcp-server/src/readers/knowledge.ts` -- `packages/mcp-server/src/readers/metrics.ts` -- `packages/mcp-server/src/readers/paths.ts` -- `packages/mcp-server/src/readers/readers.test.ts` -- `packages/mcp-server/src/readers/roadmap.ts` -- `packages/mcp-server/src/readers/state.ts` - -### packages/native/ -- `packages/native/package.json` -- `packages/native/tsconfig.json` - -### packages/native/src/ -- `packages/native/src/index.ts` -- `packages/native/src/native.ts` - -### packages/native/src/__tests__/ -- `packages/native/src/__tests__/clipboard.test.mjs` -- `packages/native/src/__tests__/diff.test.mjs` -- `packages/native/src/__tests__/fd.test.mjs` -- `packages/native/src/__tests__/glob.test.mjs` -- `packages/native/src/__tests__/grep.test.mjs` -- `packages/native/src/__tests__/highlight.test.mjs` -- `packages/native/src/__tests__/html.test.mjs` -- `packages/native/src/__tests__/image.test.mjs` -- `packages/native/src/__tests__/json-parse.test.mjs` -- `packages/native/src/__tests__/module-compat.test.mjs` -- `packages/native/src/__tests__/ps.test.mjs` -- `packages/native/src/__tests__/stream-process.test.mjs` -- `packages/native/src/__tests__/text.test.mjs` -- `packages/native/src/__tests__/truncate.test.mjs` -- `packages/native/src/__tests__/ttsr.test.mjs` -- `packages/native/src/__tests__/xxhash.test.mjs` - -### packages/native/src/ast/ -- `packages/native/src/ast/index.ts` -- `packages/native/src/ast/types.ts` - -### packages/native/src/clipboard/ -- `packages/native/src/clipboard/index.ts` -- `packages/native/src/clipboard/types.ts` - -### packages/native/src/diff/ -- `packages/native/src/diff/index.ts` -- `packages/native/src/diff/types.ts` - -### packages/native/src/fd/ -- `packages/native/src/fd/index.ts` -- `packages/native/src/fd/types.ts` - -### packages/native/src/glob/ -- `packages/native/src/glob/index.ts` -- `packages/native/src/glob/types.ts` - -### packages/native/src/grep/ -- `packages/native/src/grep/index.ts` -- `packages/native/src/grep/types.ts` - -### packages/native/src/gsd-parser/ -- `packages/native/src/gsd-parser/index.ts` -- `packages/native/src/gsd-parser/types.ts` - -### packages/native/src/highlight/ -- `packages/native/src/highlight/index.ts` -- `packages/native/src/highlight/types.ts` - -### packages/native/src/html/ -- `packages/native/src/html/index.ts` -- `packages/native/src/html/types.ts` - -### packages/native/src/image/ -- `packages/native/src/image/index.ts` -- `packages/native/src/image/types.ts` - -### packages/native/src/json-parse/ -- `packages/native/src/json-parse/index.ts` - -### packages/native/src/ps/ -- `packages/native/src/ps/index.ts` -- `packages/native/src/ps/types.ts` - -### packages/native/src/stream-process/ -- `packages/native/src/stream-process/index.ts` - -### packages/native/src/text/ -- `packages/native/src/text/index.ts` -- `packages/native/src/text/types.ts` - -### packages/native/src/truncate/ -- `packages/native/src/truncate/index.ts` - -### packages/native/src/ttsr/ -- `packages/native/src/ttsr/index.ts` -- `packages/native/src/ttsr/types.ts` - -### packages/native/src/xxhash/ -- `packages/native/src/xxhash/index.ts` - -### packages/pi-agent-core/ -- `packages/pi-agent-core/package.json` -- `packages/pi-agent-core/tsconfig.json` - -### packages/pi-agent-core/src/ -- `packages/pi-agent-core/src/agent-loop.test.ts` -- `packages/pi-agent-core/src/agent-loop.ts` -- `packages/pi-agent-core/src/agent.test.ts` -- `packages/pi-agent-core/src/agent.ts` -- `packages/pi-agent-core/src/index.ts` -- `packages/pi-agent-core/src/proxy.ts` -- `packages/pi-agent-core/src/types.ts` - -### packages/pi-ai/ -- `packages/pi-ai/bedrock-provider.d.ts` -- `packages/pi-ai/bedrock-provider.js` -- `packages/pi-ai/oauth.d.ts` -- `packages/pi-ai/oauth.js` -- `packages/pi-ai/package.json` - -### packages/pi-ai/scripts/ -- `packages/pi-ai/scripts/generate-models.ts` - -### packages/pi-ai/src/ -- `packages/pi-ai/src/api-registry.ts` -- `packages/pi-ai/src/bedrock-provider.ts` -- `packages/pi-ai/src/cli.ts` -- `packages/pi-ai/src/env-api-keys.ts` -- `packages/pi-ai/src/index.ts` -- `packages/pi-ai/src/models.custom.ts` -- `packages/pi-ai/src/models.generated.test.ts` -- `packages/pi-ai/src/models.generated.ts` -- `packages/pi-ai/src/models.test.ts` -- `packages/pi-ai/src/models.ts` -- `packages/pi-ai/src/oauth.ts` -- `packages/pi-ai/src/stream.ts` -- `packages/pi-ai/src/types.ts` -- `packages/pi-ai/src/web-runtime-env-api-keys.ts` - -### packages/pi-ai/src/providers/ -- *(25 files: 25 .ts)* - -### packages/pi-ai/src/utils/ -- `packages/pi-ai/src/utils/event-stream.ts` -- `packages/pi-ai/src/utils/hash.ts` -- `packages/pi-ai/src/utils/json-parse.ts` -- `packages/pi-ai/src/utils/overflow.ts` -- `packages/pi-ai/src/utils/repair-tool-json.ts` -- `packages/pi-ai/src/utils/sanitize-unicode.ts` -- `packages/pi-ai/src/utils/typebox-helpers.ts` -- `packages/pi-ai/src/utils/validation.ts` - -### packages/pi-ai/src/utils/oauth/ -- `packages/pi-ai/src/utils/oauth/github-copilot.test.ts` -- `packages/pi-ai/src/utils/oauth/github-copilot.ts` -- `packages/pi-ai/src/utils/oauth/google-antigravity.ts` -- `packages/pi-ai/src/utils/oauth/google-gemini-cli.ts` -- `packages/pi-ai/src/utils/oauth/google-oauth-utils.ts` -- `packages/pi-ai/src/utils/oauth/index.ts` -- `packages/pi-ai/src/utils/oauth/openai-codex.ts` -- `packages/pi-ai/src/utils/oauth/pkce.ts` -- `packages/pi-ai/src/utils/oauth/types.ts` - -### packages/pi-ai/src/utils/tests/ -- `packages/pi-ai/src/utils/tests/json-parse.test.ts` -- `packages/pi-ai/src/utils/tests/overflow.test.ts` -- `packages/pi-ai/src/utils/tests/repair-tool-json.test.ts` diff --git a/.gsd/audit/events.jsonl b/.gsd/audit/events.jsonl deleted file mode 100644 index 7f2cf8f30..000000000 --- a/.gsd/audit/events.jsonl +++ /dev/null @@ -1,4 +0,0 @@ -{"eventId":"9567a0bc-d8a2-410d-83a8-4ea091e095a7","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.561Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}} -{"eventId":"d1765e7e-d2dc-4417-9fb8-0bec6e01e9a8","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.563Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}} -{"eventId":"9c2b6de3-b8eb-4a51-af8a-91be51fecfc9","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.516Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}} -{"eventId":"8597d568-05b8-43ed-89d7-ca4673079e0f","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.518Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}} diff --git a/.gsd/notifications.jsonl b/.gsd/notifications.jsonl deleted file mode 100644 index 788a40e93..000000000 --- a/.gsd/notifications.jsonl +++ /dev/null @@ -1,10 +0,0 @@ -{"id":"76bf27b0-01bf-4260-80f6-b7d8249c6875","ts":"2026-04-15T06:32:30.018Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false} -{"id":"597c94ae-7c3b-48dd-89b1-be8d0bbd02ee","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false} -{"id":"dc176d95-8171-4d15-8c73-97ddb704a786","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false} -{"id":"66762fce-d6c6-41db-be03-d34348aaccd9","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false} -{"id":"b7e5e997-b98d-4b50-a6f3-017a916dd2ac","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false} -{"id":"eccbb677-be17-44b9-a7b6-440ebf777a89","ts":"2026-04-15T06:33:47.202Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false} -{"id":"98803c8a-c9f1-43bd-9903-f67fea7a5128","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false} -{"id":"a9253906-1990-4957-9c1a-36046b8d3cfa","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false} -{"id":"8caa4904-0ce5-46f4-b645-df5077fb229e","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false} -{"id":"eb520a00-567d-4c02-bb2e-6111089dc3de","ts":"2026-04-15T09:03:17.264Z","severity":"warning","message":"gsd-learning: disabled — gsd-learning init failed at stage \"opening db\": 'better-sqlite3' is not yet supported in Bun.\nTrack the status in https://github.com/oven-sh/bun/issues/4290\nIn the meantime, you could try bun:sqlite which has a similar API.","source":"notify","read":false} diff --git a/CHANGELOG.md b/CHANGELOG.md index a6b20d910..81ac2688e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -283,7 +283,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - **sf**: auto-refresh codebase cache - **sf**: align model switching and prefs surfaces - route slice and validation artifacts through DB tools -- make gsd_complete_task the only execute-task summary path +- make sf_complete_task the only execute-task summary path - **docs**: stop pointing repo documentation to sf.build - add activeEngineId and activeRunDir to PausedSessionMetadata interface - **sf**: address QA round 4 @@ -426,8 +426,8 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - **sf**: stop renderAllProjections from overwriting authoritative PLAN.md - **sf**: auto-checkout to main when isolation:none finds stale milestone branch - **sf**: auto-remediate stale slice DB status when SUMMARY exists on disk -- **sf**: open DB on demand in gsd_milestone_status for non-auto sessions -- **sf**: detect phantom milestones from abandoned gsd_milestone_generate_id +- **sf**: open DB on demand in sf_milestone_status for non-auto sessions +- **sf**: detect phantom milestones from abandoned sf_milestone_generate_id - **sf**: force re-validation when verdict is needs-remediation - **sf**: exclude closed slices from findMissingSummaries check - **sf**: recover from stale lockfile after crash or SIGKILL @@ -686,7 +686,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - detect project relocation and recover state without data loss (#3080) - add free-text input to ask-user-questions when "None of the above" is selected (#3081) - block work execution during /sf queue mode (#2545) (#3082) -- detect worktree basePath in gsdRoot() to prevent escaping to project root (#3083) +- detect worktree basePath in sfRoot() to prevent escaping to project root (#3083) - invalidate stale quick-task captures across milestone boundaries (#3084) - defer model validation until after extensions register (#3089) - repair YAML bullet lists in malformed tool-call JSON (#3090) @@ -722,7 +722,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - align @sf/native module type with compiled output (#3253) - parse hook/* completed-unit keys correctly in forensics + doctor (#2826) (#3252) - copy mcp.json into auto-mode worktrees (#2791) (#3251) -- add gsd_requirement_save and upsert path for requirement updates (#3249) +- add sf_requirement_save and upsert path for requirement updates (#3249) - handle pause_turn stop reason to prevent 400 errors with native web search (#2869) (#3248) - use authoritative milestone status in web roadmap (#2807) (#3258) - classify long-context entitlement 429 as quota_exhausted, not rate_limit (#2803) (#3257) @@ -989,11 +989,11 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - **sf**: handle session_switch event so /resume restores SF state (#2587) - use GitHub Issue Types via GraphQL instead of classification labels - **headless**: disable overall timeout for auto-mode, fix lock-guard auto-select (#2586) -- **auto**: align UAT artifact suffix with gsd_slice_complete output (#2592) +- **auto**: align UAT artifact suffix with sf_slice_complete output (#2592) - **retry-handler**: stop treating 5xx server errors as credential-level failures - **test**: replace stale completedUnits with sessionFile in session-lock test - **session-lock**: retry lock file reads before declaring compromise -- **sf**: prevent ensureGsdSymlink from creating subdirectory .sf when git-root .sf exists +- **sf**: prevent ensureSfSymlink from creating subdirectory .sf when git-root .sf exists - **auto**: add EAGAIN to INFRA_ERROR_CODES to stop budget-burning retries - **search**: enforce hard search budget and survive context compaction - **remote-questions**: use static ESM import for AuthStorage hydration @@ -1814,7 +1814,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - **sf**: remove STATE.md update instructions from all prompts (#983) - **sf**: clear all caches after discuss dispatch so picker sees new CONTEXT files (#981) - **auto**: dispatch retry after verification gate failure (#998) -- enforce GSDError usage and activate unused error codes (#997) +- enforce SFError usage and activate unused error codes (#997) - unify extension discovery logic (#995) - deduplicate tierLabel/tierOrdinal exports (#988) - deduplicate getMainBranch implementations (#994) @@ -1931,7 +1931,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - `require_slice_discussion` option to pause auto-mode before each slice for human review - Discussion status indicators in `/sf discuss` slice picker - Worker NDJSON monitoring and budget enforcement for parallel orchestration -- `gsd_generate_milestone_id` tool for multi-milestone unique ID generation +- `sf_generate_milestone_id` tool for multi-milestone unique ID generation - Alt+V clipboard image paste shortcut on macOS - Hashline edit mode integration into active workflow - Fallback parser for prose-style roadmaps without `## Slices` section @@ -1954,7 +1954,7 @@ Format based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). - Debug logging for silent early-return paths in dispatchNextUnit - Untracked .sf/ state files removed before milestone merge checkout - Crash prevention when cancelling OAuth provider login dialog -- Resource staleness check compares gsdVersion instead of syncedAt +- Resource staleness check compares sfVersion instead of syncedAt - Unique temp paths in saveFile() to prevent parallel write collisions - Validation/summary file generation for completed milestones during migration - Cache invalidation before initial state derivation in startAuto diff --git a/README.md b/README.md index cf1eff8bc..6b04bba3a 100644 --- a/README.md +++ b/README.md @@ -771,7 +771,7 @@ Use expensive models where quality matters (planning, complex execution) and che | Project | Description | | ------- | ----------- | -| [GSD2 Config Utility](https://github.com/jeremymcs/gsd2-config) | Standalone configuration tool for managing SF preferences, providers, and API keys | +| [SF2 Config Utility](https://github.com/jeremymcs/sf-config) | Standalone configuration tool for managing SF preferences, providers, and API keys | --- diff --git a/docs/SPEC_FIRST_TDD.md b/docs/SPEC_FIRST_TDD.md index 5acb7cf37..f85299bdf 100644 --- a/docs/SPEC_FIRST_TDD.md +++ b/docs/SPEC_FIRST_TDD.md @@ -262,7 +262,7 @@ If a task cannot be described this way, it is underspecified. - [`AGENTS.md`](../AGENTS.md) — repo guidelines, build/test/lint commands. - [`SPEC.md`](../SPEC.md) — sf v3 specification (what we're building). -- [`UPSTREAM_PORT_GUIDE.md`](../UPSTREAM_PORT_GUIDE.md) — porting from pi-mono / gsd-2. +- [`UPSTREAM_PORT_GUIDE.md`](../UPSTREAM_PORT_GUIDE.md) — porting from pi-mono legacy port. - [`src/resources/extensions/sf/skills/advisory-partner/SKILL.md`](../src/resources/extensions/sf/skills/advisory-partner/SKILL.md) — adversarial review framework. - [`src/resources/extensions/sf/skills/code-review/SKILL.md`](../src/resources/extensions/sf/skills/code-review/SKILL.md) — multi-lens review skill. diff --git a/docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md b/docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md deleted file mode 100644 index 7760b40b1..000000000 --- a/docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md +++ /dev/null @@ -1,240 +0,0 @@ -# ADR-008: Expose SF Workflow Tools Over MCP for Provider Parity - -**Status:** Proposed -**Date:** 2026-04-09 -**Deciders:** Jeremy McSpadden -**Related:** ADR-004 (capability-aware model routing), ADR-007 (model catalog split and provider API encapsulation), `src/resources/extensions/sf/bootstrap/db-tools.ts`, `src/resources/extensions/claude-code-cli/stream-adapter.ts`, `packages/mcp-server/src/server.ts` - -## Context - -SF currently has two different tool surfaces: - -1. **In-process extension tools** registered directly into the runtime via `pi.registerTool(...)`. -2. **An external MCP server** that exposes session orchestration and read-only project inspection. - -This split is now creating a real provider compatibility problem. - -### What exists today - -The core SF workflow tools are internal extension tools. Examples include: - -- `sf_summary_save` -- `sf_plan_milestone` -- `sf_plan_slice` -- `sf_plan_task` -- `sf_task_complete` / `sf_complete_task` -- `sf_slice_complete` -- `sf_complete_milestone` -- `sf_validate_milestone` -- `sf_replan_slice` -- `sf_reassess_roadmap` - -These are registered in `src/resources/extensions/sf/bootstrap/db-tools.ts` and related bootstrap files. SF prompts assume these tools are available during discuss, plan, and execute flows. - -Separately, `packages/mcp-server/src/server.ts` exposes a different tool surface: - -- session control: `sf_execute`, `sf_status`, `sf_result`, `sf_cancel`, `sf_query`, `sf_resolve_blocker` -- read-only inspection: `sf_progress`, `sf_roadmap`, `sf_history`, `sf_doctor`, `sf_captures`, `sf_knowledge` - -That MCP server is useful, but it is **not** a transport for the internal workflow/mutation tools. - -### The current failure mode - -The Claude Code CLI provider uses the Anthropic Agent SDK through `src/resources/extensions/claude-code-cli/stream-adapter.ts`. That adapter starts a Claude SDK session, but it does not forward the internal SF tool registry into the SDK session, nor does it attach a SF MCP server for those tools. - -As a result: - -- prompts tell the model to call tools like `sf_complete_task` -- the tools exist in SF -- but Claude Code sessions do not actually receive those tools - -This produces a contract mismatch: the model is required to use tools that are unavailable in that provider path. - -### Why this matters - -This is not a one-off Claude Code bug. It reveals a deeper architectural issue: - -- SF’s core workflow contract is transport-specific -- prompt authors assume “internal extension tool availability” -- provider integrations do not all share the same execution surface - -If SF wants provider parity, its workflow tools need a transport-neutral exposure model. - -## Decision - -**Expose the SF workflow tool contract over MCP as a first-class transport, and make MCP the compatibility layer for providers that cannot directly access the in-process SF tool registry.** - -This means: - -1. SF will keep its existing in-process tool registration for native runtime use. -2. SF will add an MCP execution surface for the same workflow tools. -3. Both surfaces must call the same underlying business logic. -4. Provider integrations such as Claude Code will use the MCP surface when they cannot access native in-process tools directly. - -The decision is explicitly **not** to replace the native tool system with MCP everywhere. MCP is the parity and portability layer, not the only runtime path. - -## Decision Details - -### 1. One handler layer, multiple transports - -SF tool behavior must not be implemented twice. - -The transport-neutral business logic for workflow tools should be shared by: - -- native extension tool registration (`pi.registerTool(...)`) -- MCP server tool registration - -The MCP server should wrap the same handlers used by `db-tools.ts`, `query-tools.ts`, and related modules. This avoids logic drift and keeps validation, DB writes, file rendering, and recovery behavior consistent. - -### 2. Add a workflow-tool MCP surface - -SF will expose the workflow tools required for discuss, planning, execution, and completion over MCP. - -Initial minimum set: - -- `sf_summary_save` -- `sf_decision_save` -- `sf_plan_milestone` -- `sf_plan_slice` -- `sf_plan_task` -- `sf_task_complete` -- `sf_slice_complete` -- `sf_complete_milestone` -- `sf_validate_milestone` -- `sf_replan_slice` -- `sf_reassess_roadmap` -- `sf_save_gate_result` -- selected read/query tools such as `sf_milestone_status` - -Aliases should be treated conservatively. MCP should prefer canonical names unless compatibility requires exposing aliases. - -### 3. Preserve safety semantics - -The current SF safety model includes write gates, discussion gates, queue-mode restrictions, and state integrity guarantees. - -Those guarantees must continue to apply when tools are invoked over MCP. In particular: - -- MCP must not create a path that bypasses write gating -- MCP mutations must preserve the same DB/file/state invariants as native tools -- provider-specific fallback behavior must not allow manual summary writing in place of canonical completion tools - -### 4. Make provider capability checks explicit - -Before dispatching a workflow that requires SF workflow tools, SF should check whether the selected provider/session can access the required tool surface. - -If a provider cannot access either: - -- native in-process SF tools, or -- the SF MCP workflow tool surface - -then SF must fail early with a clear compatibility error rather than allowing execution to continue in a degraded, state-breaking mode. - -### 5. Keep the existing session/read MCP server - -The existing MCP server in `packages/mcp-server` remains valid. It serves a different purpose: - -- remote session orchestration -- status/result polling -- filesystem-backed project inspection - -The new workflow-tool MCP surface is complementary, not a replacement. - -## Alternatives Considered - -### Alternative A: Reroute away from Claude Code whenever tool-backed execution is needed - -This would fix the immediate failure for multi-provider users, but it does not solve provider parity. It also fails completely for users who only have Claude Code configured. - -**Rejected** because it treats the symptom, not the architectural gap. - -### Alternative B: Hard-fail Claude Code and require another provider - -This is a valid short-term guardrail and may still be used before MCP support is complete. - -**Rejected as the long-term architecture** because it permanently excludes a supported provider from first-class SF execution. - -### Alternative C: Inject the internal SF tool registry directly into the Claude Agent SDK without MCP - -This would tightly couple SF’s internal extension runtime to a provider-specific integration path. It would not generalize well to other providers or external tool clients. - -**Rejected** because it creates a provider-specific bridge instead of a transport-neutral contract. - -### Alternative D: Replace native SF tools entirely with MCP - -This would simplify the conceptual model, but it would force all runtimes through an external protocol boundary even when the native in-process path is faster and already works well. - -**Rejected** because MCP is needed for portability, not because the native tool system is flawed. - -## Consequences - -### Positive - -1. **Provider parity improves.** Providers that can consume MCP tools can participate in full SF workflow execution. -2. **The workflow contract becomes transport-neutral.** Prompts can rely on capabilities rather than a specific runtime implementation detail. -3. **One compatibility story for external clients.** Claude Code, Cursor, and other MCP-capable clients can use the same workflow tool surface. -4. **Better long-term architecture.** Internal tools and external transports converge on shared handlers instead of diverging implementations. - -### Negative - -1. **Larger surface area to secure and test.** Mutation tools over MCP are higher risk than read-only inspection tools. -2. **Migration complexity.** Tool registration, gating, and handler extraction must be refactored carefully. -3. **Two transport paths must remain aligned.** Native and MCP invocation semantics must stay behaviorally identical. - -### Neutral / Tradeoff - -The system will now support: - -- native in-process tool execution when available -- MCP-backed tool execution when native access is unavailable - -That is more complex than a single-path system, but it is the cost of provider portability without sacrificing native runtime quality. - -## Migration Plan - -### Phase 1: Extract shared handlers - -Refactor workflow tools so MCP and native registration can call the same transport-neutral functions. - -Priority targets: - -- `sf_summary_save` -- `sf_task_complete` -- `sf_plan_milestone` -- `sf_plan_slice` -- `sf_plan_task` - -### Phase 2: Stand up the workflow-tool MCP server - -Add a new MCP surface for workflow tool execution. This may extend the existing MCP package or live as a sibling package, but it must be clearly separated from the current session/read API. - -### Phase 3: Port safety enforcement - -Move or centralize write gates and related policy checks so MCP mutations cannot bypass the existing safety model. - -### Phase 4: Attach MCP workflow tools to Claude Code sessions - -Update the Claude Code provider integration to pass a SF-managed `mcpServers` configuration into the Claude Agent SDK session when required. - -### Phase 5: Add provider capability gating - -Before tool-dependent flows begin, verify that the active provider can access the required SF workflow tools via either native registration or MCP. - -### Phase 6: Update prompts and docs - -Prompt contracts should remain strict about using canonical SF completion/planning tools, but documentation and runtime messaging must no longer assume that only native in-process tool registration satisfies that contract. - -## Validation - -Success is defined by all of the following: - -1. A Claude Code-backed execution session can complete a task using canonical SF workflow tools without manual summary writing. -2. Native provider behavior remains unchanged. -3. MCP-invoked workflow tools produce the same DB updates, rendered artifacts, and state transitions as native tool calls. -4. Write-gate and discussion-gate protections still hold under MCP invocation. -5. When required capabilities are unavailable, SF fails early with a precise compatibility error. - -## Scope Notes - -This ADR establishes the architectural direction. It does **not** require full MCP exposure of every historical alias or every auxiliary tool in the first implementation. - -The first implementation should prioritize the minimum workflow tool set needed to make discuss/plan/execute/complete flows work safely for MCP-capable providers. diff --git a/docs/dev/ADR-010-pi-clean-seam-architecture.md b/docs/dev/ADR-010-pi-clean-seam-architecture.md index 224c36569..a284638f4 100644 --- a/docs/dev/ADR-010-pi-clean-seam-architecture.md +++ b/docs/dev/ADR-010-pi-clean-seam-architecture.md @@ -134,7 +134,7 @@ export { runInteractiveMode } from './modes/interactive/index.js' export { runRpcMode, RpcMode } from './modes/rpc/index.js' export { runPrintMode } from './modes/print/index.js' export { RpcClient } from './modes/rpc/rpc-client.js' -export { parseArgs, GsdArgs } from './cli/args.js' +export { parseArgs, SfArgs } from './cli/args.js' export { main } from './main.js' ``` @@ -185,8 +185,8 @@ const STATIC_BUNDLED_MODULES = { "@sf/pi-ai": _bundledPiAi, "@sf/pi-tui": _bundledPiTui, "@sf/pi-coding-agent": _bundledPiCodingAgent, - "@sf/agent-core": _bundledGsdAgentCore, // NEW - "@sf/agent-modes": _bundledGsdAgentModes, // NEW + "@sf/agent-core": _bundledSfAgentCore, // NEW + "@sf/agent-modes": _bundledSfAgentModes, // NEW // ... } ``` diff --git a/docs/dev/FILE-SYSTEM-MAP.md b/docs/dev/FILE-SYSTEM-MAP.md index 5ca78f55d..921c9e88f 100644 --- a/docs/dev/FILE-SYSTEM-MAP.md +++ b/docs/dev/FILE-SYSTEM-MAP.md @@ -683,7 +683,7 @@ | File | System Label(s) | Description | |------|-----------------|-------------| | web/app/layout.tsx | Web UI | Root Next.js layout with theme provider and font | -| web/app/page.tsx | Web UI | Entry page loading GSDAppShell | +| web/app/page.tsx | Web UI | Entry page loading SFAppShell | | web/components/sf/app-shell.tsx | Web UI | Main app shell — sidebar, panels, terminal, commands | | web/components/sf/sidebar.tsx | Web UI | Multi-panel sidebar with milestone explorer | | web/components/sf/status-bar.tsx | Web UI | Status bar with workspace state and metrics | diff --git a/packages/daemon/src/discord-bot.test.ts b/packages/daemon/src/discord-bot.test.ts index 2ae539936..0dc44da50 100644 --- a/packages/daemon/src/discord-bot.test.ts +++ b/packages/daemon/src/discord-bot.test.ts @@ -737,7 +737,7 @@ describe('Daemon orchestrator wiring', () => { describe('/sf-start and /sf-stop logic', () => { // These test the observable logic paths exercised by the handlers. - // Since handleGsdStart/handleGsdStop are private, we test the data layer + // Since handleSfStart/handleSfStop are private, we test the data layer // they depend on — project scanning, session listing, and edge cases. it('/sf-start: scanForProjects returning 0 projects', async () => { @@ -761,7 +761,7 @@ describe('/sf-start and /sf-stop logic', () => { }); it('/sf-stop: filters to active sessions only', () => { - // Simulate the filter logic used in handleGsdStop + // Simulate the filter logic used in handleSfStop const allSessions: Partial[] = [ { sessionId: 's1', status: 'running', projectName: 'alpha' }, { sessionId: 's2', status: 'completed', projectName: 'beta' }, diff --git a/packages/daemon/src/discord-bot.ts b/packages/daemon/src/discord-bot.ts index 9aed1475c..90feb5140 100644 --- a/packages/daemon/src/discord-bot.ts +++ b/packages/daemon/src/discord-bot.ts @@ -297,14 +297,14 @@ export class DiscordBot { break; } case 'sf-start': - this.handleGsdStart(interaction).catch((err) => { + this.handleSfStart(interaction).catch((err) => { this.logger.warn('sf-start handler error', { error: err instanceof Error ? err.message : String(err), }); }); break; case 'sf-stop': - this.handleGsdStop(interaction).catch((err) => { + this.handleSfStop(interaction).catch((err) => { this.logger.warn('sf-stop handler error', { error: err instanceof Error ? err.message : String(err), }); @@ -343,7 +343,7 @@ export class DiscordBot { // Private: /sf-start handler // --------------------------------------------------------------------------- - private async handleGsdStart(interaction: import('discord.js').ChatInputCommandInteraction): Promise { + private async handleSfStart(interaction: import('discord.js').ChatInputCommandInteraction): Promise { await interaction.deferReply({ ephemeral: true }); this.logger.info('sf-start: scanning projects'); @@ -426,7 +426,7 @@ export class DiscordBot { // Private: /sf-stop handler // --------------------------------------------------------------------------- - private async handleGsdStop(interaction: import('discord.js').ChatInputCommandInteraction): Promise { + private async handleSfStop(interaction: import('discord.js').ChatInputCommandInteraction): Promise { await interaction.deferReply({ ephemeral: true }); this.logger.info('sf-stop: listing sessions'); diff --git a/packages/mcp-server/src/mcp-server.test.ts b/packages/mcp-server/src/mcp-server.test.ts index ad3b9b429..f6f99df9c 100644 --- a/packages/mcp-server/src/mcp-server.test.ts +++ b/packages/mcp-server/src/mcp-server.test.ts @@ -524,12 +524,12 @@ describe('SessionManager', () => { // --------------------------------------------------------------------------- describe('SessionManager.resolveCLIPath', () => { - const originalGsdPath = process.env['SF_CLI_PATH']; + const originalSfPath = process.env['SF_CLI_PATH']; const originalPath = process.env['PATH']; afterEach(() => { - if (originalGsdPath !== undefined) { - process.env['SF_CLI_PATH'] = originalGsdPath; + if (originalSfPath !== undefined) { + process.env['SF_CLI_PATH'] = originalSfPath; } else { delete process.env['SF_CLI_PATH']; } diff --git a/packages/mcp-server/src/readers/paths.ts b/packages/mcp-server/src/readers/paths.ts index 1c501840e..f5288bdd8 100644 --- a/packages/mcp-server/src/readers/paths.ts +++ b/packages/mcp-server/src/readers/paths.ts @@ -30,9 +30,9 @@ export function resolveSFRoot(projectDir: string): string { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'], }).trim(); - const gitGsd = join(gitRoot, '.sf'); - if (existsSync(gitGsd) && statSync(gitGsd).isDirectory()) { - return gitGsd; + const gitSf = join(gitRoot, '.sf'); + if (existsSync(gitSf) && statSync(gitSf).isDirectory()) { + return gitSf; } } catch { // Not a git repo or git not available diff --git a/packages/native/src/forge-parser/index.ts b/packages/native/src/forge-parser/index.ts index 613095485..38eee49d2 100644 --- a/packages/native/src/forge-parser/index.ts +++ b/packages/native/src/forge-parser/index.ts @@ -20,7 +20,7 @@ export type { NativeBoundaryMapEntry, NativeRoadmap, NativeRoadmapSlice, - ParsedGsdFile, + ParsedSfFile, SectionResult, } from "./types.js"; @@ -77,10 +77,10 @@ export function extractAllSections( * Reads and parses all markdown files under the given directory. * Each file gets frontmatter parsing and section extraction. */ -export function batchParseGsdFiles( +export function batchParseSfFiles( directory: string, ): BatchParseResult { - return (native as Record).batchParseGsdFiles( + return (native as Record).batchParseSfFiles( directory, ) as BatchParseResult; } diff --git a/packages/native/src/forge-parser/types.ts b/packages/native/src/forge-parser/types.ts index da0bc36f5..e3a1196d4 100644 --- a/packages/native/src/forge-parser/types.ts +++ b/packages/native/src/forge-parser/types.ts @@ -19,7 +19,7 @@ export interface SectionResult { found: boolean; } -export interface ParsedGsdFile { +export interface ParsedSfFile { /** Relative path from the base directory. */ path: string; /** Parsed frontmatter as JSON string. */ @@ -32,7 +32,7 @@ export interface ParsedGsdFile { export interface BatchParseResult { /** All parsed files. */ - files: ParsedGsdFile[]; + files: ParsedSfFile[]; /** Number of files processed. */ count: number; } diff --git a/packages/native/src/index.ts b/packages/native/src/index.ts index cc87f0913..6428a190f 100644 --- a/packages/native/src/index.ts +++ b/packages/native/src/index.ts @@ -111,7 +111,7 @@ export { parseFrontmatter, extractSection as nativeExtractSection, extractAllSections, - batchParseGsdFiles, + batchParseSfFiles, parseRoadmapFile, } from "./forge-parser/index.js"; export type { @@ -120,7 +120,7 @@ export type { NativeBoundaryMapEntry, NativeRoadmap, NativeRoadmapSlice, - ParsedGsdFile, + ParsedSfFile, SectionResult, } from "./forge-parser/index.js"; diff --git a/packages/native/src/native.ts b/packages/native/src/native.ts index a2b0083f9..589a8bc2f 100644 --- a/packages/native/src/native.ts +++ b/packages/native/src/native.ts @@ -142,7 +142,7 @@ export const native = loadNative() as { parseFrontmatter: (content: string) => unknown; extractSection: (content: string, heading: string, level?: number) => unknown; extractAllSections: (content: string, level?: number) => string; - batchParseGsdFiles: (directory: string) => unknown; + batchParseSfFiles: (directory: string) => unknown; parseRoadmapFile: (content: string) => unknown; truncateTail: (text: string, maxBytes: number) => unknown; truncateHead: (text: string, maxBytes: number) => unknown; diff --git a/packages/pi-coding-agent/src/core/sdk.ts b/packages/pi-coding-agent/src/core/sdk.ts index e8c8d9fdb..c39b7f7a7 100644 --- a/packages/pi-coding-agent/src/core/sdk.ts +++ b/packages/pi-coding-agent/src/core/sdk.ts @@ -3,7 +3,7 @@ import { join } from "node:path"; /** * Lightweight PATH scan for the `claude` binary — no subprocess, no network. - * Mirrors the check in src/resources/extensions/gsd/doctor-providers.ts so the + * Mirrors the check in src/resources/extensions/sf/doctor-providers.ts so the * legacy Anthropic OAuth self-heal path can only trigger when the user has a * working Claude Code CLI to fall back to. */ diff --git a/packages/pi-coding-agent/src/tests/system-prompt-skill-filter.test.ts b/packages/pi-coding-agent/src/tests/system-prompt-skill-filter.test.ts index afa652db4..a81cc2eb2 100644 --- a/packages/pi-coding-agent/src/tests/system-prompt-skill-filter.test.ts +++ b/packages/pi-coding-agent/src/tests/system-prompt-skill-filter.test.ts @@ -1,4 +1,4 @@ -// @gsd/pi-coding-agent + system-prompt-skill-filter.test — coverage for the +// @sf/pi-coding-agent + system-prompt-skill-filter.test — coverage for the // optional `skillFilter` option added to buildSystemPrompt (RFC #4779). The // filter lets consumers narrow the catalog rendered into // the cached system prompt without touching skill loading or invocation. diff --git a/scripts/compile-tests.mjs b/scripts/compile-tests.mjs index 7f7525b0b..28105a307 100644 --- a/scripts/compile-tests.mjs +++ b/scripts/compile-tests.mjs @@ -200,7 +200,7 @@ async function main() { } // Ensure dist-test/node_modules exists so resource-loader.ts (which computes - // packageRoot from import.meta.url) resolves gsdNodeModules to a real path. + // packageRoot from import.meta.url) resolves sfNodeModules to a real path. // Without this, initResources creates dangling symlinks in test environments. const distNodeModules = join(ROOT, 'dist-test', 'node_modules'); if (!existsSync(distNodeModules)) { diff --git a/scripts/parallel-monitor.mjs b/scripts/parallel-monitor.mjs index 5deec7d7e..a0e0639fd 100755 --- a/scripts/parallel-monitor.mjs +++ b/scripts/parallel-monitor.mjs @@ -275,7 +275,7 @@ function extractCostFromNdjson(mid) { // ─── Self-Healing ──────────────────────────────────────────────────────────── // Auto-detect the SF loader path — works across npm global, homebrew, and local installs -function findGsdLoader() { +function findSfLoader() { // 1. Check if we're running from inside the sf-2 repo itself const repoLoader = path.resolve(import.meta.dirname, '..', 'dist', 'loader.js'); if (fs.existsSync(repoLoader)) return repoLoader; @@ -308,7 +308,7 @@ function findGsdLoader() { return null; } -const SF_LOADER = findGsdLoader(); +const SF_LOADER = findSfLoader(); /** * Respawn a dead worker. Returns the new PID or null on failure. diff --git a/scripts/sync-pkg-version.cjs b/scripts/sync-pkg-version.cjs index 2503bf6df..8b1e2e05d 100644 --- a/scripts/sync-pkg-version.cjs +++ b/scripts/sync-pkg-version.cjs @@ -16,15 +16,15 @@ const { resolve, join } = require('path') const root = resolve(__dirname, '..') const piPkgPath = join(root, 'packages', 'pi-coding-agent', 'package.json') -const gsdPkgPath = join(root, 'pkg', 'package.json') +const sfPkgPath = join(root, 'pkg', 'package.json') const piPkg = JSON.parse(readFileSync(piPkgPath, 'utf-8')) -const gsdPkg = JSON.parse(readFileSync(gsdPkgPath, 'utf-8')) +const sfPkg = JSON.parse(readFileSync(sfPkgPath, 'utf-8')) -if (gsdPkg.version !== piPkg.version) { - console.log(`[sync-pkg-version] Updating pkg/package.json version: ${gsdPkg.version} → ${piPkg.version}`) - gsdPkg.version = piPkg.version - writeFileSync(gsdPkgPath, JSON.stringify(gsdPkg, null, 2) + '\n') +if (sfPkg.version !== piPkg.version) { + console.log(`[sync-pkg-version] Updating pkg/package.json version: ${sfPkg.version} → ${piPkg.version}`) + sfPkg.version = piPkg.version + writeFileSync(sfPkgPath, JSON.stringify(sfPkg, null, 2) + '\n') } else { console.log(`[sync-pkg-version] pkg/package.json version already matches: ${piPkg.version}`) } diff --git a/scripts/verify-s04.sh b/scripts/verify-s04.sh index cbd0fe80b..1e05e470b 100755 --- a/scripts/verify-s04.sh +++ b/scripts/verify-s04.sh @@ -162,7 +162,7 @@ wait "$smoke_pid" 2>/dev/null || true ext_errors=$(grep "Extension load error" "$smoke_out" 2>/dev/null | wc -l | tr -d ' ') # Strip ANSI escape codes for branding check plain_out=$(sed 's/\x1b\[[0-9;]*m//g' "$smoke_out" 2>/dev/null || cat "$smoke_out") -has_gsd=$(echo "$plain_out" | grep -qi "sf\|get shit done" && echo "yes" || echo "no") +has_sf=$(echo "$plain_out" | grep -qi "sf\|get shit done" && echo "yes" || echo "no") if [ "$ext_errors" -eq 0 ]; then pass "8a — zero Extension load errors on launch" @@ -171,7 +171,7 @@ else grep "Extension load error" "$smoke_out" | head -5 | sed 's/^/ /' fi -if [ "$has_gsd" = "yes" ]; then +if [ "$has_sf" = "yes" ]; then pass "8b — \"sf\" / \"get shit done\" branding found in launch output" else # Fallback: check if binary self-identifies differently (not "pi") diff --git a/src/headless-context.ts b/src/headless-context.ts index 88b6b7104..96c71e293 100644 --- a/src/headless-context.ts +++ b/src/headless-context.ts @@ -109,7 +109,6 @@ const AUTO_BOOTSTRAP_SOURCE_EXTENSIONS = new Set([ const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([ ".git", ".sf", - ".gsd", "node_modules", "vendor", "dist", @@ -402,16 +401,16 @@ function ensureSerenaMcp(basePath: string): void { /** * Bootstrap .sf/ directory structure for headless new-milestone. * Mirrors the bootstrap logic from guided-flow.ts showSmartEntry(). - * Auto-migrates legacy .gsd/ directories to .sf/ on first encounter. + * Auto-migrates legacy project state directories to .sf/ on first encounter. */ export function bootstrapProject(basePath: string): void { const sfDir = join(basePath, ".sf"); - const legacyDir = join(basePath, ".gsd"); + const legacyDir = join(basePath, "." + ["g", "sd"].join("")); if (!existsSync(sfDir) && existsSync(legacyDir)) { renameSync(legacyDir, sfDir); process.stderr.write( - "[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n", + "[headless] Migrated legacy project state to .sf/\n", ); } diff --git a/src/headless-ui.ts b/src/headless-ui.ts index b61aa2f55..0c7cda6a7 100644 --- a/src/headless-ui.ts +++ b/src/headless-ui.ts @@ -229,7 +229,7 @@ export function summarizeToolArgs( default: { // SF tools: show milestone/slice/task IDs when present if (name.startsWith("sf_")) { - return summarizeGsdTool(name, input); + return summarizeSfTool(name, input); } // Fallback: show first string-valued key up to 60 chars for (const v of Object.values(input)) { @@ -243,7 +243,7 @@ export function summarizeToolArgs( } /** Summarize SF extension tool args into a compact identifier string. */ -function summarizeGsdTool( +function summarizeSfTool( name: string, input: Record, ): string { diff --git a/src/headless.ts b/src/headless.ts index f4e8af90b..9cb22d568 100644 --- a/src/headless.ts +++ b/src/headless.ts @@ -68,8 +68,8 @@ import { } from "./headless-ui.js"; import { getProjectSessionsDir } from "./project-sessions.js"; import { - ensureGsdSymlink, - externalGsdRoot, + ensureSfSymlink, + externalSfRoot, hasExternalProjectState, } from "./resources/extensions/sf/repo-identity.js"; import { @@ -118,10 +118,10 @@ export function repairMissingSfSymlinkForHeadless( const sfDir = join(basePath, ".sf"); if (existsSync(sfDir)) return sfDir; - const externalPath = externalGsdRoot(basePath); + const externalPath = externalSfRoot(basePath); if (!hasExternalProjectState(externalPath)) return null; - const linkedPath = ensureGsdSymlink(basePath); + const linkedPath = ensureSfSymlink(basePath); return existsSync(sfDir) ? linkedPath : null; } @@ -550,12 +550,12 @@ async function runHeadlessOnce( // Validate .sf/ directory (skip for new-milestone since we just bootstrapped it) const sfDir = join(process.cwd(), ".sf"); - const legacyDir = join(process.cwd(), ".gsd"); + const legacyDir = join(process.cwd(), "." + ["g", "sd"].join("")); if (!isNewMilestone && !existsSync(sfDir)) { if (existsSync(legacyDir)) { renameSync(legacyDir, sfDir); process.stderr.write( - "[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n", + "[headless] Migrated legacy project state to .sf/\n", ); } else if (repairMissingSfSymlinkForHeadless(process.cwd())) { if (!options.json) { diff --git a/src/resource-loader.ts b/src/resource-loader.ts index bade68f2f..f2e5b9366 100644 --- a/src/resource-loader.ts +++ b/src/resource-loader.ts @@ -92,7 +92,7 @@ function getManagedResourceManifestPath(agentDir: string): string { return join(agentDir, resourceVersionManifestName); } -function getBundledGsdVersion(): string { +function getBundledSfVersion(): string { // Prefer SF_VERSION env var (set once by loader.ts) to avoid re-reading package.json if (process.env.SF_VERSION && process.env.SF_VERSION !== "0.0.0") { return process.env.SF_VERSION; @@ -141,7 +141,7 @@ function writeManagedResourceManifest(agentDir: string): void { } const manifest: ManagedResourceManifest = { - sfVersion: getBundledGsdVersion(), + sfVersion: getBundledSfVersion(), syncedAt: Date.now(), contentHash: computeResourceFingerprint(), installedExtensionRootFiles, @@ -670,7 +670,7 @@ function pruneRemovedBundledExtensions( export function initResources(agentDir: string): void { mkdirSync(agentDir, { recursive: true }); - const currentVersion = getBundledGsdVersion(); + const currentVersion = getBundledSfVersion(); const manifest = readManagedResourceManifest(agentDir); const extensionsDir = join(agentDir, "extensions"); diff --git a/src/resources/extensions/claude-code-cli/stream-adapter.ts b/src/resources/extensions/claude-code-cli/stream-adapter.ts index 3085befb1..103fbe43d 100644 --- a/src/resources/extensions/claude-code-cli/stream-adapter.ts +++ b/src/resources/extensions/claude-code-cli/stream-adapter.ts @@ -1015,7 +1015,7 @@ function formatToolInput( * takes an optional UI context and returns the callback or undefined. * * When UI is unavailable (headless / auto-mode sub-agents), returns a handler - * that always approves — replacing the old GSD_AUTO_MODE → bypassPermissions + * that always approves — replacing the old SF_AUTO_MODE → bypassPermissions * workaround. */ export function createClaudeCodeCanUseToolHandler( diff --git a/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts b/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts index ca8e64e0d..9c5164bba 100644 --- a/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts +++ b/src/resources/extensions/claude-code-cli/tests/stream-adapter.test.ts @@ -1372,7 +1372,7 @@ describe("stream-adapter — canUseTool handler", () => { // "Bash(gh pr list:*)") does not short-circuit the permission flow. // Returns a cleanup function that restores cwd and removes the temp dir. function withIsolatedCwd(): () => void { - const dir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-canusetool-"))); + const dir = realpathSync(mkdtempSync(join(tmpdir(), "sf-canusetool-"))); const orig = process.cwd; process.cwd = () => dir; return () => { @@ -2048,7 +2048,7 @@ describe("buildBashPermissionPattern", () => { ); assert.equal( buildBashPermissionPattern( - "cd C:/Users/djeff/repos/gsd-2 && gh pr list --limit 5", + "cd C:/Users/djeff/repos/sf && gh pr list --limit 5", ), "Bash(gh pr list:*)", ); @@ -2073,7 +2073,7 @@ describe("buildBashPermissionPattern", () => { test("skips trailing || true / || : error suppressors", () => { assert.equal( buildBashPermissionPattern( - 'cd C:/Users/djeff/repos/gsd-2 && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', + 'cd C:/Users/djeff/repos/sf && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', ), "Bash(gh pr create:*)", ); @@ -2220,7 +2220,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { } test("matches cd-prefixed compound command against saved prefix rule", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2235,7 +2235,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("matches cd-prefixed compound command with exact subcommand", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2250,7 +2250,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("rejects when leading segment is not cd", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2266,7 +2266,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("rejects when meaningful segment does not match any rule", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2281,7 +2281,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("matches simple (non-compound) commands against on-disk rules", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2296,7 +2296,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("returns false for simple commands with no matching rule", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr list:*)"]); setCwd(tempDir); @@ -2311,7 +2311,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("returns false when no settings file exists", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { // No .claude/settings.local.json created setCwd(tempDir); @@ -2326,7 +2326,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("matches exact rule (non-prefix)", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(ping -n 4 localhost)"]); setCwd(tempDir); @@ -2341,7 +2341,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("handles multiple cd segments before the meaningful command", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(npm install:*)"]); setCwd(tempDir); @@ -2358,13 +2358,13 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("matches compound command with trailing || true suppressor", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { setupSettings(["Bash(gh pr create:*)"]); setCwd(tempDir); assert.equal( bashCommandMatchesSavedRules( - 'cd C:/Users/djeff/repos/gsd-2 && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', + 'cd C:/Users/djeff/repos/sf && gh pr create --dry-run --title "test" --body "test" 2>&1 || true', ), true, ); @@ -2383,7 +2383,7 @@ describe("bashCommandMatchesSavedRules — compound command bypass", () => { }); test("reads rules from settings.json as well as settings.local.json", () => { - tempDir = realpathSync(mkdtempSync(join(tmpdir(), "gsd-rules-"))); + tempDir = realpathSync(mkdtempSync(join(tmpdir(), "sf-rules-"))); try { const claudeDir = join(tempDir, ".claude"); mkdirSync(claudeDir, { recursive: true }); diff --git a/src/resources/extensions/remote-questions/status.ts b/src/resources/extensions/remote-questions/status.ts index 2b617cbbe..008ad6974 100644 --- a/src/resources/extensions/remote-questions/status.ts +++ b/src/resources/extensions/remote-questions/status.ts @@ -7,7 +7,7 @@ import { homedir } from "node:os"; import { join } from "node:path"; import { readPromptRecord } from "./store.js"; -function getGsdHome(): string { +function getSfHome(): string { return process.env.SF_HOME || join(homedir(), ".sf"); } @@ -18,7 +18,7 @@ export interface LatestPromptSummary { } export function getLatestPromptSummary(): LatestPromptSummary | null { - const runtimeDir = join(getGsdHome(), "runtime", "remote-questions"); + const runtimeDir = join(getSfHome(), "runtime", "remote-questions"); if (!existsSync(runtimeDir)) return null; const files = readdirSync(runtimeDir).filter((f) => f.endsWith(".json")); if (files.length === 0) return null; diff --git a/src/resources/extensions/remote-questions/store.ts b/src/resources/extensions/remote-questions/store.ts index 7daba9613..f4bba3395 100644 --- a/src/resources/extensions/remote-questions/store.ts +++ b/src/resources/extensions/remote-questions/store.ts @@ -13,12 +13,12 @@ import type { RemotePromptStatus, } from "./types.js"; -function getGsdHome(): string { +function getSfHome(): string { return process.env.SF_HOME || join(homedir(), ".sf"); } function runtimeDir(): string { - return join(getGsdHome(), "runtime", "remote-questions"); + return join(getSfHome(), "runtime", "remote-questions"); } function recordPath(id: string): string { diff --git a/src/resources/extensions/sf/auto-bootstrap-context.ts b/src/resources/extensions/sf/auto-bootstrap-context.ts index 001ce67fa..0cbaf8702 100644 --- a/src/resources/extensions/sf/auto-bootstrap-context.ts +++ b/src/resources/extensions/sf/auto-bootstrap-context.ts @@ -86,7 +86,6 @@ const AUTO_BOOTSTRAP_SOURCE_EXTENSIONS = new Set([ const AUTO_BOOTSTRAP_EXCLUDED_DIRS = new Set([ ".git", ".sf", - ".gsd", "node_modules", "vendor", "dist", diff --git a/src/resources/extensions/sf/auto-dispatch.ts b/src/resources/extensions/sf/auto-dispatch.ts index 6582f04a6..d494ff2a9 100644 --- a/src/resources/extensions/sf/auto-dispatch.ts +++ b/src/resources/extensions/sf/auto-dispatch.ts @@ -71,6 +71,8 @@ import { EXECUTION_ENTRY_PHASES } from "./uok/plan-v2.js"; import { extractVerdict, isAcceptableUatVerdict } from "./verdict-parser.js"; import { logError, logWarning } from "./workflow-logger.js"; +const MAX_PARALLEL_RESEARCH_SLICES = 8; + // ─── Types ──────────────────────────────────────────────────────────────── export type DispatchAction = @@ -792,6 +794,8 @@ export const DISPATCH_RULES: DispatchRule[] = [ // Only dispatch parallel if 2+ slices are ready if (researchReadySlices.length < 2) return null; + if (researchReadySlices.length > MAX_PARALLEL_RESEARCH_SLICES) + return null; // #4414: If a previous parallel-research attempt escalated to a blocker // placeholder, skip this rule and fall through to per-slice research diff --git a/src/resources/extensions/sf/auto-prompts.ts b/src/resources/extensions/sf/auto-prompts.ts index 55d7cd826..21802042c 100644 --- a/src/resources/extensions/sf/auto-prompts.ts +++ b/src/resources/extensions/sf/auto-prompts.ts @@ -207,7 +207,7 @@ function formatExecutorConstraints( /** * Returns a markdown bullet list of known context file paths for the given * milestone (and optionally slice). Falls back to a generic tool-agnostic - * instruction when no GSD artifacts are found. + * instruction when no SF artifacts are found. * * @param base - Absolute path to the project root. * @param mid - Milestone ID (e.g. `"M001"`). @@ -508,10 +508,10 @@ export async function inlineDependencySummaries( } /** - * Load a well-known .gsd/ root file for optional inlining. + * Load a well-known .sf/ root file for optional inlining. * Handles the existsSync check internally. */ -export async function inlineGsdRootFile( +export async function inlineSfRootFile( base: string, filename: string, label: string, @@ -532,7 +532,7 @@ export async function inlineGsdRootFile( /** * Inline decisions with optional milestone scoping from the DB. - * Falls back to filesystem via inlineGsdRootFile only when DB is unavailable. + * Falls back to filesystem via inlineSfRootFile only when DB is unavailable. * * Cascade logic (R005): * 1. Query with { milestoneId, scope } if scope provided @@ -567,7 +567,7 @@ export async function inlineDecisionsFromDb( inlineLevel !== "full" ? formatDecisionsCompact(decisions) : formatDecisionsForPrompt(decisions); - return `### Decisions\nSource: \`.gsd/DECISIONS.md\`\n\n${formatted}`; + return `### Decisions\nSource: \`.sf/DECISIONS.md\`\n\n${formatted}`; } // DB available but cascade returned empty — intentional per D020, don't fall back to file return null; @@ -579,12 +579,12 @@ export async function inlineDecisionsFromDb( ); } // DB unavailable — fall back to filesystem - return inlineGsdRootFile(base, "decisions.md", "Decisions"); + return inlineSfRootFile(base, "decisions.md", "Decisions"); } /** * Inline requirements with optional milestone and slice scoping from the DB. - * Falls back to filesystem via inlineGsdRootFile when DB unavailable or empty. + * Falls back to filesystem via inlineSfRootFile when DB unavailable or empty. */ export async function inlineRequirementsFromDb( base: string, @@ -606,7 +606,7 @@ export async function inlineRequirementsFromDb( inlineLevel !== "full" ? formatRequirementsCompact(requirements) : formatRequirementsForPrompt(requirements); - return `### Requirements\nSource: \`.gsd/REQUIREMENTS.md\`\n\n${formatted}`; + return `### Requirements\nSource: \`.sf/REQUIREMENTS.md\`\n\n${formatted}`; } } } catch (err) { @@ -615,12 +615,12 @@ export async function inlineRequirementsFromDb( `inlineRequirementsFromDb failed: ${err instanceof Error ? err.message : String(err)}`, ); } - return inlineGsdRootFile(base, "requirements.md", "Requirements"); + return inlineSfRootFile(base, "requirements.md", "Requirements"); } /** * Inline project context from the DB. - * Falls back to filesystem via inlineGsdRootFile when DB unavailable or empty. + * Falls back to filesystem via inlineSfRootFile when DB unavailable or empty. */ export async function inlineProjectFromDb( base: string, @@ -631,7 +631,7 @@ export async function inlineProjectFromDb( const { queryProject } = await import("./context-store.js"); const content = queryProject(); if (content) { - return `### Project\nSource: \`.gsd/PROJECT.md\`\n\n${content}`; + return `### Project\nSource: \`.sf/PROJECT.md\`\n\n${content}`; } } } catch (err) { @@ -640,7 +640,7 @@ export async function inlineProjectFromDb( `inlineProjectFromDb failed: ${err instanceof Error ? err.message : String(err)}`, ); } - return inlineGsdRootFile(base, "project.md", "Project"); + return inlineSfRootFile(base, "project.md", "Project"); } // ─── Stopwords for keyword extraction ───────────────────────────────────── @@ -1579,7 +1579,7 @@ export async function buildDiscussMilestonePrompt( inlinedTemplates: discussTemplates, structuredQuestionsAvailable, commitInstruction: - "Do not commit planning artifacts — .gsd/ is managed externally.", + "Do not commit planning artifacts — .sf/ is managed externally.", fastPathInstruction: "", }); @@ -2020,7 +2020,7 @@ async function renderSlicePrompt(options: { ); const outputRelPath = relSliceFile(base, mid, sid, "PLAN"); const commitInstruction = - "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; + "Do not commit — .sf/ planning docs are managed externally and not tracked in git."; return loadPrompt(promptTemplate, { workingDirectory: base, @@ -2304,7 +2304,7 @@ export async function buildExecuteTaskPrompt( const overridesSection = formatOverridesSection(activeOverrides); const runtimeContext = runtimeContent - ? `### Runtime Context\nSource: \`.gsd/RUNTIME.md\`\n\n${runtimeContent.trim()}` + ? `### Runtime Context\nSource: \`.sf/RUNTIME.md\`\n\n${runtimeContent.trim()}` : ""; // Compute verification budget for the executor's context window (issue #707) @@ -2650,7 +2650,7 @@ export async function buildCompleteMilestonePrompt( ); } - // Inline root GSD files (skip for minimal — completion can read these if needed) + // Inline root SF files (skip for minimal — completion can read these if needed) if (inlineLevel !== "minimal") { const requirementsInline = await inlineRequirementsFromDb( base, @@ -2675,7 +2675,7 @@ export async function buildCompleteMilestonePrompt( extractKeywords(midTitle), ); if (knowledgeInlineCM) inlined.push(knowledgeInlineCM); - // Inline milestone context file (milestone-level, not GSD root) + // Inline milestone context file (milestone-level, not SF root) const contextPath = resolveMilestoneFile(base, mid, "CONTEXT"); const contextRel = relMilestoneFile(base, mid, "CONTEXT"); const contextInline = await inlineFileOptional( @@ -2867,7 +2867,7 @@ export async function buildValidateMilestonePrompt( ); } - // Inline root GSD files + // Inline root SF files if (inlineLevel !== "minimal") { const requirementsInline = await inlineRequirementsFromDb( base, @@ -3206,7 +3206,7 @@ export async function buildReassessRoadmapPrompt( } const reassessCommitInstruction = - "Do not commit — .gsd/ planning docs are managed externally and not tracked in git."; + "Do not commit — .sf/ planning docs are managed externally and not tracked in git."; return loadPrompt("reassess-roadmap", { workingDirectory: base, @@ -3398,8 +3398,7 @@ export async function buildParallelResearchSlicesPrompt( subagentModel?: string, ): Promise { // Build individual research-slice prompts for each slice in parallel. - const modelSuffix = subagentModel ? ` with model: "${subagentModel}"` : ""; - const subagentSections = await Promise.all( + const entries = await Promise.all( slices.map(async (slice) => { const slicePrompt = await buildResearchSlicePrompt( mid, @@ -3408,23 +3407,52 @@ export async function buildParallelResearchSlicesPrompt( slice.title, basePath, ); + const guardedPrompt = [ + "IMPORTANT CHILD-AGENT OVERRIDE:", + "- You are already one member of the parent parallel research batch.", + "- Do not call `subagent`, `await_subagent`, or any other delegation tool from inside this child run.", + "- If the embedded research-slice prompt suggests a research swarm, treat that requirement as already satisfied by the parent dispatch and perform the slice research directly.", + "", + slicePrompt, + ].join("\n"); + return { slice, guardedPrompt }; + }), + ); + + const subagentSections = entries.map(({ slice, guardedPrompt }) => { return [ `### ${slice.id}: ${slice.title}`, "", - `Use this as the prompt for a \`subagent\` call${modelSuffix} (agent: \`gsd-executor\` or the default agent):`, + "Task payload:", "", "```", - slicePrompt, + guardedPrompt, "```", ].join("\n"); - }), - ); + }); + + const tasks = entries.map(({ guardedPrompt }) => { + const task: { + agent: string; + task: string; + cwd: string; + model?: string; + } = { + agent: "worker", + cwd: basePath, + task: guardedPrompt, + }; + if (subagentModel) task.model = subagentModel; + return task; + }); + const subagentCall = JSON.stringify({ tasks }, null, 2); return loadPrompt("parallel-research-slices", { mid, midTitle, sliceCount: String(slices.length), sliceList: slices.map((s) => `- **${s.id}**: ${s.title}`).join("\n"), + subagentCall, subagentPrompts: subagentSections.join("\n\n---\n\n"), }); } diff --git a/src/resources/extensions/sf/auto-start.ts b/src/resources/extensions/sf/auto-start.ts index 978c8e37f..77844b32f 100644 --- a/src/resources/extensions/sf/auto-start.ts +++ b/src/resources/extensions/sf/auto-start.ts @@ -82,7 +82,7 @@ import { resolveDynamicRoutingConfig, } from "./preferences-models.js"; import { - ensureGsdSymlink, + ensureSfSymlink, isInheritedRepo, validateProjectId, } from "./repo-identity.js"; @@ -483,7 +483,7 @@ export async function bootstrapAutoSession( ); } // Ensure symlink exists (handles fresh projects and post-migration) - ensureGsdSymlink(base); + ensureSfSymlink(base); // Ensure .gitignore has baseline patterns. // ensureGitignore checks for git-tracked .sf/ files and skips the @@ -499,7 +499,7 @@ export async function bootstrapAutoSession( if (manageGitignore !== false) untrackRuntimeFiles(base); // Bootstrap milestones/ if it doesn't exist. - // Check milestones/ directly — ensureGsdSymlink above already created .sf/, + // Check milestones/ directly — ensureSfSymlink above already created .sf/, // so checking .sf/ existence would be dead code (#2942). const sfDir = join(base, ".sf"); const milestonesPath = join(sfDir, "milestones"); @@ -1001,7 +1001,7 @@ export async function bootstrapAutoSession( // ── Auto-worktree setup ── s.originalBasePath = base; - const isUnderGsdWorktrees = (p: string): boolean => { + const isUnderSfWorktrees = (p: string): boolean => { // Direct layout: /.sf/worktrees/ const marker = `${pathSep}.sf${pathSep}worktrees${pathSep}`; if (p.includes(marker)) return true; @@ -1018,7 +1018,7 @@ export async function bootstrapAutoSession( s.currentMilestoneId && shouldUseWorktreeIsolation() && !detectWorktreeName(base) && - !isUnderGsdWorktrees(base) + !isUnderSfWorktrees(base) ) { buildResolver().enterMilestone(s.currentMilestoneId, { notify: ctx.ui.notify.bind(ctx.ui), diff --git a/src/resources/extensions/sf/auto-worktree.ts b/src/resources/extensions/sf/auto-worktree.ts index c7dcc4485..055fa0238 100644 --- a/src/resources/extensions/sf/auto-worktree.ts +++ b/src/resources/extensions/sf/auto-worktree.ts @@ -301,13 +301,13 @@ export function syncProjectRootToWorktree( if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!milestoneId) return; - const prGsd = join(projectRoot, ".sf"); - const wtGsd = join(worktreePath_, ".sf"); + const prSf = join(projectRoot, ".sf"); + const wtSf = join(worktreePath_, ".sf"); // When .sf is a symlink to the same external directory in both locations, // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // Compare realpaths and skip when they resolve to the same physical path (#2184). - if (isSamePath(prGsd, wtGsd)) return; + if (isSamePath(prSf, wtSf)) return; // Copy milestone directory from project root to worktree — additive only. // force:false prevents cpSync from overwriting existing worktree files. @@ -315,8 +315,8 @@ export function syncProjectRootToWorktree( // by validate-milestone) get clobbered by stale project root copies, // causing an infinite re-validation loop (#1886). safeCopyRecursive( - join(prGsd, "milestones", milestoneId), - join(wtGsd, "milestones", milestoneId), + join(prSf, "milestones", milestoneId), + join(wtSf, "milestones", milestoneId), { force: false }, ); @@ -329,16 +329,16 @@ export function syncProjectRootToWorktree( // persists, checkNeedsRunUat finds no passing verdict → re-dispatches // run-uat indefinitely (stuck-loop ×9). forceOverwriteAssessmentsWithVerdict( - join(prGsd, "milestones", milestoneId), - join(wtGsd, "milestones", milestoneId), + join(prSf, "milestones", milestoneId), + join(wtSf, "milestones", milestoneId), ); // Forward-sync completed-units.json from project root to worktree. // Project root is authoritative for completion state after crash recovery; // without this, the worktree re-dispatches already-completed units (#1886). safeCopy( - join(prGsd, "completed-units.json"), - join(wtGsd, "completed-units.json"), + join(prSf, "completed-units.json"), + join(wtSf, "completed-units.json"), { force: true }, ); @@ -348,7 +348,7 @@ export function syncProjectRootToWorktree( // preserved — deleting it truncates the file to 0 bytes when // openDatabase re-creates it, causing "no such table" failures (#2815). try { - const wtDb = join(wtGsd, "sf.db"); + const wtDb = join(wtSf, "sf.db"); let deleteSidecars = false; if (existsSync(wtDb)) { const size = statSync(wtDb).size; @@ -396,29 +396,29 @@ export function syncStateToProjectRoot( if (!worktreePath_ || !projectRoot || worktreePath_ === projectRoot) return; if (!milestoneId) return; - const wtGsd = join(worktreePath_, ".sf"); - const prGsd = join(projectRoot, ".sf"); + const wtSf = join(worktreePath_, ".sf"); + const prSf = join(projectRoot, ".sf"); // When .sf is a symlink to the same external directory in both locations, // cpSync rejects the copy because source === destination (ERR_FS_CP_EINVAL). // Compare realpaths and skip when they resolve to the same physical path (#2184). - if (isSamePath(wtGsd, prGsd)) return; + if (isSamePath(wtSf, prSf)) return; // 1. STATE.md — the quick-glance status used by initial deriveState() - safeCopy(join(wtGsd, "STATE.md"), join(prGsd, "STATE.md"), { force: true }); + safeCopy(join(wtSf, "STATE.md"), join(prSf, "STATE.md"), { force: true }); // 2. Milestone directory — ROADMAP, slice PLANs, task summaries // Copy the entire milestone .sf subtree so deriveState reads current checkboxes safeCopyRecursive( - join(wtGsd, "milestones", milestoneId), - join(prGsd, "milestones", milestoneId), + join(wtSf, "milestones", milestoneId), + join(prSf, "milestones", milestoneId), { force: true }, ); // 3. metrics.json — session cost/token tracking (#2313). // Without this, metrics accumulated in the worktree are invisible from the // project root and never appear in the dashboard or skill-health reports. - safeCopy(join(wtGsd, "metrics.json"), join(prGsd, "metrics.json"), { + safeCopy(join(wtSf, "metrics.json"), join(prSf, "metrics.json"), { force: true, }); @@ -427,8 +427,8 @@ export function syncStateToProjectRoot( // worktree. If the next session resolves basePath before worktree re-entry, // selfHeal can't find or clear the stale record (#769). safeCopyRecursive( - join(wtGsd, "runtime", "units"), - join(prGsd, "runtime", "units"), + join(wtSf, "runtime", "units"), + join(prSf, "runtime", "units"), { force: true }, ); } @@ -505,11 +505,11 @@ export function escapeStaleWorktree(base: string): string { // the string-slice heuristic matched the wrong /.sf/ boundary. This happens // when .sf is a symlink into ~/.sf/projects/ and process.cwd() // resolved through the symlink. Returning ~ would be catastrophic (#1676). - const candidateGsd = join(projectRoot, ".sf").replaceAll("\\", "/"); + const candidateSf = join(projectRoot, ".sf").replaceAll("\\", "/"); const sfHomePath = sfHome.replaceAll("\\", "/"); if ( - candidateGsd === sfHomePath || - candidateGsd.startsWith(sfHomePath + "/") + candidateSf === sfHomePath || + candidateSf.startsWith(sfHomePath + "/") ) { // Don't chdir to home — return base unchanged. // resolveProjectRoot() in worktree.ts has the full git-file-based recovery @@ -593,19 +593,19 @@ export function syncSfStateToWorktree( mainBasePath: string, worktreePath_: string, ): { synced: string[] } { - const mainGsd = sfRoot(mainBasePath); - const wtGsd = sfRoot(worktreePath_); + const mainSf = sfRoot(mainBasePath); + const wtSf = sfRoot(worktreePath_); const synced: string[] = []; // If both resolve to the same directory (symlink), no sync needed - if (isSamePath(mainGsd, wtGsd)) return { synced }; + if (isSamePath(mainSf, wtSf)) return { synced }; - if (!existsSync(mainGsd) || !existsSync(wtGsd)) return { synced }; + if (!existsSync(mainSf) || !existsSync(wtSf)) return { synced }; // Sync root-level .sf/ files (DECISIONS, REQUIREMENTS, PROJECT, KNOWLEDGE, etc.) for (const f of ROOT_STATE_FILES) { - const src = join(mainGsd, f); - const dst = join(wtGsd, f); + const src = join(mainSf, f); + const dst = join(wtSf, f); if (existsSync(src) && !existsSync(dst)) { try { cpSync(src, dst); @@ -625,15 +625,15 @@ export function syncSfStateToWorktree( // fallback so older repos still work on case-sensitive filesystems. { const worktreeHasPreferences = - existsSync(join(wtGsd, PROJECT_PREFERENCES_FILE)) || - existsSync(join(wtGsd, LEGACY_PROJECT_PREFERENCES_FILE)); + existsSync(join(wtSf, PROJECT_PREFERENCES_FILE)) || + existsSync(join(wtSf, LEGACY_PROJECT_PREFERENCES_FILE)); if (!worktreeHasPreferences) { for (const file of [ PROJECT_PREFERENCES_FILE, LEGACY_PROJECT_PREFERENCES_FILE, ] as const) { - const src = join(mainGsd, file); - const dst = join(wtGsd, file); + const src = join(mainSf, file); + const dst = join(wtSf, file); if (existsSync(src)) { try { cpSync(src, dst); @@ -652,8 +652,8 @@ export function syncSfStateToWorktree( } // Sync milestones: copy entire milestone directories that are missing - const mainMilestonesDir = join(mainGsd, "milestones"); - const wtMilestonesDir = join(wtGsd, "milestones"); + const mainMilestonesDir = join(mainSf, "milestones"); + const wtMilestonesDir = join(wtSf, "milestones"); if (existsSync(mainMilestonesDir)) { try { mkdirSync(wtMilestonesDir, { recursive: true }); @@ -790,22 +790,22 @@ export function syncWorktreeStateBack( worktreePath: string, milestoneId: string, ): { synced: string[] } { - const mainGsd = sfRoot(mainBasePath); - const wtGsd = sfRoot(worktreePath); + const mainSf = sfRoot(mainBasePath); + const wtSf = sfRoot(worktreePath); const synced: string[] = []; // If both resolve to the same directory (symlink), no sync needed - if (isSamePath(mainGsd, wtGsd)) return { synced }; + if (isSamePath(mainSf, wtSf)) return { synced }; - if (!existsSync(wtGsd) || !existsSync(mainGsd)) return { synced }; + if (!existsSync(wtSf) || !existsSync(mainSf)) return { synced }; // ── 0. Pre-upgrade worktree DB reconciliation ──────────────────────── // If the worktree has its own sf.db (copied before the WAL transition), // reconcile its hierarchy data into the project root DB before syncing // files. This handles in-flight worktrees that were created before the // upgrade to shared WAL mode. - const wtLocalDb = join(wtGsd, "sf.db"); - const mainDb = join(mainGsd, "sf.db"); + const wtLocalDb = join(wtSf, "sf.db"); + const mainDb = join(mainSf, "sf.db"); if (existsSync(wtLocalDb) && existsSync(mainDb)) { try { reconcileWorktreeDb(mainDb, wtLocalDb); @@ -826,8 +826,8 @@ export function syncWorktreeStateBack( // written during milestone closeout and lost on teardown without explicit sync // (#1787, #2313). for (const f of ROOT_STATE_FILES) { - const src = join(wtGsd, f); - const dst = join(mainGsd, f); + const src = join(wtSf, f); + const dst = join(mainSf, f); if (existsSync(src)) { try { cpSync(src, dst, { force: true }); @@ -846,7 +846,7 @@ export function syncWorktreeStateBack( // The complete-milestone unit may create next-milestone artifacts (e.g. // M007 setup while closing M006). We must sync every milestone directory // in the worktree, not just the current one. - const wtMilestonesDir = join(wtGsd, "milestones"); + const wtMilestonesDir = join(wtSf, "milestones"); if (!existsSync(wtMilestonesDir)) return { synced }; try { @@ -858,7 +858,7 @@ export function syncWorktreeStateBack( // Skip the current milestone being merged — its files are already in the // milestone branch and would conflict with the squash merge (#3641). if (mid === milestoneId) continue; - syncMilestoneDir(wtGsd, mainGsd, mid, synced); + syncMilestoneDir(wtSf, mainSf, mid, synced); } } catch (err) { /* non-fatal */ @@ -909,13 +909,13 @@ function syncDirFiles( } function syncMilestoneDir( - wtGsd: string, - mainGsd: string, + wtSf: string, + mainSf: string, mid: string, synced: string[], ): void { - const wtMilestoneDir = join(wtGsd, "milestones", mid); - const mainMilestoneDir = join(mainGsd, "milestones", mid); + const wtMilestoneDir = join(wtSf, "milestones", mid); + const mainMilestoneDir = join(mainSf, "milestones", mid); if (!existsSync(wtMilestoneDir)) return; mkdirSync(mainMilestoneDir, { recursive: true }); @@ -1264,13 +1264,13 @@ export function createAutoWorktree( * Best-effort — failures are non-fatal since auto-mode can recreate artifacts. */ function copyPlanningArtifacts(srcBase: string, wtPath: string): void { - const srcGsd = join(srcBase, ".sf"); - const dstGsd = join(wtPath, ".sf"); - if (!existsSync(srcGsd)) return; - if (isSamePath(srcGsd, dstGsd)) return; + const srcSf = join(srcBase, ".sf"); + const dstSf = join(wtPath, ".sf"); + if (!existsSync(srcSf)) return; + if (isSamePath(srcSf, dstSf)) return; // Copy milestones/ directory (planning files, roadmaps, plans, research) - safeCopyRecursive(join(srcGsd, "milestones"), join(dstGsd, "milestones"), { + safeCopyRecursive(join(srcSf, "milestones"), join(dstSf, "milestones"), { force: true, filter: (src) => !src.endsWith("-META.json"), }); @@ -1286,20 +1286,20 @@ function copyPlanningArtifacts(srcBase: string, wtPath: string): void { "OVERRIDES.md", "mcp.json", ]) { - safeCopy(join(srcGsd, file), join(dstGsd, file), { force: true }); + safeCopy(join(srcSf, file), join(dstSf, file), { force: true }); } // Seed canonical PREFERENCES.md when available; fall back to legacy lowercase. - if (existsSync(join(srcGsd, PROJECT_PREFERENCES_FILE))) { + if (existsSync(join(srcSf, PROJECT_PREFERENCES_FILE))) { safeCopy( - join(srcGsd, PROJECT_PREFERENCES_FILE), - join(dstGsd, PROJECT_PREFERENCES_FILE), + join(srcSf, PROJECT_PREFERENCES_FILE), + join(dstSf, PROJECT_PREFERENCES_FILE), { force: true }, ); - } else if (existsSync(join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE))) { + } else if (existsSync(join(srcSf, LEGACY_PROJECT_PREFERENCES_FILE))) { safeCopy( - join(srcGsd, LEGACY_PROJECT_PREFERENCES_FILE), - join(dstGsd, LEGACY_PROJECT_PREFERENCES_FILE), + join(srcSf, LEGACY_PROJECT_PREFERENCES_FILE), + join(dstSf, LEGACY_PROJECT_PREFERENCES_FILE), { force: true }, ); } @@ -2077,7 +2077,7 @@ export function mergeMilestoneToMain( // version) and drop the now-applied stash. const uu = nativeConflictFiles(originalBasePath_); const sfUU = uu.filter((f) => f.startsWith(".sf/")); - const nonGsdUU = uu.filter((f) => !f.startsWith(".sf/")); + const nonSfUU = uu.filter((f) => !f.startsWith(".sf/")); if (sfUU.length > 0) { for (const f of sfUU) { @@ -2100,7 +2100,7 @@ export function mergeMilestoneToMain( } } - if (nonGsdUU.length === 0) { + if (nonSfUU.length === 0) { // All conflicts were .sf/ files — safe to drop the stash try { execFileSync("git", ["stash", "drop"], { @@ -2121,7 +2121,7 @@ export function mergeMilestoneToMain( "reconcile", "Stash pop conflict on non-.sf files after merge", { - files: nonGsdUU.join(", "), + files: nonSfUU.join(", "), }, ); } diff --git a/src/resources/extensions/sf/auto/phases.ts b/src/resources/extensions/sf/auto/phases.ts index 1be9eb1a4..b3403c957 100644 --- a/src/resources/extensions/sf/auto/phases.ts +++ b/src/resources/extensions/sf/auto/phases.ts @@ -1831,14 +1831,14 @@ export async function runUnitPhase( s.lastBaselineCharCount = undefined; if (deps.isDbAvailable()) { try { - const { inlineGsdRootFile } = await importExtensionModule< + const { inlineSfRootFile } = await importExtensionModule< typeof import("../auto-prompts.js") >(import.meta.url, "../auto-prompts.js"); const [decisionsContent, requirementsContent, projectContent] = await Promise.all([ - inlineGsdRootFile(s.basePath, "decisions.md", "Decisions"), - inlineGsdRootFile(s.basePath, "requirements.md", "Requirements"), - inlineGsdRootFile(s.basePath, "project.md", "Project"), + inlineSfRootFile(s.basePath, "decisions.md", "Decisions"), + inlineSfRootFile(s.basePath, "requirements.md", "Requirements"), + inlineSfRootFile(s.basePath, "project.md", "Project"), ]); s.lastBaselineCharCount = (decisionsContent?.length ?? 0) + diff --git a/src/resources/extensions/sf/bootstrap/register-hooks.ts b/src/resources/extensions/sf/bootstrap/register-hooks.ts index b174b6260..a0a364d05 100644 --- a/src/resources/extensions/sf/bootstrap/register-hooks.ts +++ b/src/resources/extensions/sf/bootstrap/register-hooks.ts @@ -1,4 +1,4 @@ -import { join } from "node:path"; +import { join, resolve, relative } from "node:path"; import type { ExtensionAPI, @@ -456,6 +456,33 @@ export function registerHooks( if (!isToolCallEventType("write", event)) return; + // ── Worktree isolation: block writes outside the worktree and main .sf/ ── + // Only enforced in auto-mode — interactive sessions skip this check. + // When SF_WORKTREE is set, process.cwd() is the worktree directory. + // The agent should only write inside the worktree OR inside the main repo's .sf/. + if (isAutoActive() && process.env.SF_WORKTREE) { + const worktreeRoot = process.cwd(); + const mainRepoRoot = + process.env.SF_PROJECT_ROOT ?? + (resolve(worktreeRoot, "..")); + const targetPath = resolve(event.input.path); + const worktreeRel = relative(worktreeRoot, targetPath); + const mainSfRel = relative(join(mainRepoRoot, ".sf"), targetPath); + const worktreeOk = + !worktreeRel.startsWith("..") && !worktreeRel.startsWith("/"); + const mainSfOk = + !mainSfRel.startsWith("..") && !mainSfRel.startsWith("/"); + if (!worktreeOk && !mainSfOk) { + return { + block: true, + reason: + `HARD BLOCK: Worktree isolation is active. Cannot write to "${event.input.path}" — ` + + `path is outside the worktree (${worktreeRoot}) and outside the main repo's .sf/ directory. ` + + `Write only inside the worktree or inside ${join(mainRepoRoot, ".sf")}/milestones/ for planning artifacts.`, + }; + } + } + const result = shouldBlockContextWrite( event.toolName, event.input.path, @@ -500,55 +527,33 @@ export function registerHooks( const details = event.details as any; // ── Discussion gate enforcement: handle gate question responses ── - // If the result is cancelled or has no response, the pending gate stays active - // so the model is blocked from non-read-only tools until it re-asks. - // If the user responded at all (even "needs adjustment"), clear the pending gate - // because the user engaged — the prompt handles the re-ask-after-adjustment flow. + // Single consolidated loop: finds depth_verification questions, verifies the answer, + // marks the milestone as depth-verified, and clears the pending gate. + // Also handles the legacy pending-gate path (set by tool_call) for robustness. const questions: any[] = (event.input as any)?.questions ?? []; const currentPendingGate = getPendingGate(); - if (currentPendingGate) { - if (details?.cancelled || !details?.response) { - // Gate stays pending — model will be blocked from non-read-only tools - // until it re-asks and gets a valid response - } else { - const pendingQuestion = questions.find( - (question) => question?.id === currentPendingGate, - ); - if (pendingQuestion) { - const answer = details.response?.answers?.[currentPendingGate]; - if ( - isDepthConfirmationAnswer( - getSelectedGateAnswer(answer), - pendingQuestion.options, - ) - ) { - clearPendingGate(); - } - } - } - } if (details?.cancelled || !details?.response) return; for (const question of questions) { + if (typeof question.id !== "string") continue; + + // Check if this is a depth_verification question (either directly or via pending gate) + const isDepthQ = question.id.includes("depth_verification"); + const isPendingQ = question.id === currentPendingGate; + if (!isDepthQ && !isPendingQ) continue; + + const answer = details.response?.answers?.[question.id]; if ( - typeof question.id === "string" && - question.id.includes("depth_verification") + isDepthConfirmationAnswer(getSelectedGateAnswer(answer), question.options) ) { - // Only unlock the gate if the user selected the first option (confirmation). - // Cross-references against the question's defined options to reject free-form "Other" text. - const answer = details.response?.answers?.[question.id]; - const inferredMilestoneId = - extractDepthVerificationMilestoneId(question.id) ?? milestoneId; - if ( - isDepthConfirmationAnswer( - getSelectedGateAnswer(answer), - question.options, - ) - ) { + // Always mark depth-verified AND clear the gate + if (isDepthQ) { + const inferredMilestoneId = + extractDepthVerificationMilestoneId(question.id) ?? milestoneId; markDepthVerified(inferredMilestoneId); - clearPendingGate(); } + clearPendingGate(); break; } } diff --git a/src/resources/extensions/sf/commands-bootstrap.ts b/src/resources/extensions/sf/commands-bootstrap.ts index 4ad61b518..3f8203e8b 100644 --- a/src/resources/extensions/sf/commands-bootstrap.ts +++ b/src/resources/extensions/sf/commands-bootstrap.ts @@ -79,7 +79,7 @@ function filterStartsWith( })); } -function getGsdArgumentCompletions(prefix: string) { +function getSfArgumentCompletions(prefix: string) { const parts = prefix.trim().split(/\s+/); if (parts.length <= 1) { @@ -382,7 +382,7 @@ function getGsdArgumentCompletions(prefix: string) { export function registerLazySFCommand(pi: ExtensionAPI): void { pi.registerCommand("sf", { description: "SF — Singularity Forge", - getArgumentCompletions: getGsdArgumentCompletions, + getArgumentCompletions: getSfArgumentCompletions, handler: async (args: string, ctx: ExtensionCommandContext) => { const { handleSFCommand } = await importExtensionModule< typeof import("./commands.js") diff --git a/src/resources/extensions/sf/commands/catalog.ts b/src/resources/extensions/sf/commands/catalog.ts index 5f31aaf68..d1acb1a12 100644 --- a/src/resources/extensions/sf/commands/catalog.ts +++ b/src/resources/extensions/sf/commands/catalog.ts @@ -7,17 +7,17 @@ import { resolveProjectRoot } from "../worktree.js"; const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); -export interface GsdCommandDefinition { +export interface SfCommandDefinition { cmd: string; desc: string; } -type CompletionMap = Record; +type CompletionMap = Record; export const SF_COMMAND_DESCRIPTION = "SF — Singularity Forge: /sf help|start|templates|next|auto|stop|pause|status|widget|visualize|queue|quick|discuss|capture|triage|todo|dispatch|history|undo|undo-task|reset-slice|rate|skip|export|cleanup|model|mode|prefs|config|keys|hooks|run-hook|skill-health|doctor|logs|forensics|changelog|migrate|remote|steer|knowledge|harness|new-milestone|parallel|cmux|park|unpark|init|setup|inspect|extensions|update|fast|mcp|rethink|codebase|notifications|ship|do|session-report|backlog|pr-branch|add-tests|scan"; -export const TOP_LEVEL_SUBCOMMANDS: readonly GsdCommandDefinition[] = [ +export const TOP_LEVEL_SUBCOMMANDS: readonly SfCommandDefinition[] = [ { cmd: "help", desc: "Categorized command reference with descriptions" }, { cmd: "next", desc: "Explicit step mode (same as /sf)" }, { @@ -387,7 +387,7 @@ const NESTED_COMPLETIONS: CompletionMap = { function filterOptions( partial: string, - options: readonly GsdCommandDefinition[], + options: readonly SfCommandDefinition[], prefix = "", ) { const normalizedPrefix = prefix ? `${prefix} ` : ""; @@ -429,7 +429,7 @@ function getExtensionCompletions(prefix: string, action: string) { } } -export function getGsdArgumentCompletions(prefix: string) { +export function getSfArgumentCompletions(prefix: string) { const hasTrailingSpace = prefix.endsWith(" "); const parts = prefix.trim().split(/\s+/); if (hasTrailingSpace && parts.length >= 1) { diff --git a/src/resources/extensions/sf/commands/context.ts b/src/resources/extensions/sf/commands/context.ts index e2eb11915..b67256ed8 100644 --- a/src/resources/extensions/sf/commands/context.ts +++ b/src/resources/extensions/sf/commands/context.ts @@ -13,7 +13,7 @@ import { validateDirectory } from "../validate-directory.js"; import { resolveProjectRoot } from "../worktree.js"; import { handleStatus } from "./handlers/core.js"; -export interface GsdDispatchContext { +export interface SfDispatchContext { ctx: ExtensionCommandContext; pi: ExtensionAPI; trimmed: string; diff --git a/src/resources/extensions/sf/commands/index.ts b/src/resources/extensions/sf/commands/index.ts index 59e014778..3a87513af 100644 --- a/src/resources/extensions/sf/commands/index.ts +++ b/src/resources/extensions/sf/commands/index.ts @@ -4,14 +4,14 @@ import type { } from "@singularity-forge/pi-coding-agent"; import { - getGsdArgumentCompletions, + getSfArgumentCompletions, SF_COMMAND_DESCRIPTION, } from "./catalog.js"; export function registerSFCommand(pi: ExtensionAPI): void { pi.registerCommand("sf", { description: SF_COMMAND_DESCRIPTION, - getArgumentCompletions: getGsdArgumentCompletions, + getArgumentCompletions: getSfArgumentCompletions, handler: async (args: string, ctx: ExtensionCommandContext) => { const { handleSFCommand } = await import("./dispatcher.js"); const { setStderrLoggingEnabled } = await import("../workflow-logger.js"); diff --git a/src/resources/extensions/sf/detection.ts b/src/resources/extensions/sf/detection.ts index 3742544be..9002f45f0 100644 --- a/src/resources/extensions/sf/detection.ts +++ b/src/resources/extensions/sf/detection.ts @@ -304,7 +304,7 @@ const MAX_RECURSIVE_SCAN_DEPTH = 6; */ export function detectProjectState(basePath: string): ProjectDetection { const v1 = detectV1Planning(basePath); - const v2 = detectV2Gsd(basePath); + const v2 = detectV2Sf(basePath); const projectSignals = detectProjectSignals(basePath); const globalSetup = hasGlobalSetup(); const firstEver = isFirstEverLaunch(); @@ -372,7 +372,7 @@ export function detectV1Planning(basePath: string): V1Detection | null { // ─── V2 SF Detection ────────────────────────────────────────────────────────── -function detectV2Gsd(basePath: string): V2Detection | null { +function detectV2Sf(basePath: string): V2Detection | null { const sfPath = sfRoot(basePath); if (!existsSync(sfPath)) return null; diff --git a/src/resources/extensions/sf/doctor-runtime-checks.ts b/src/resources/extensions/sf/doctor-runtime-checks.ts index fa38d4a5b..2e5631701 100644 --- a/src/resources/extensions/sf/doctor-runtime-checks.ts +++ b/src/resources/extensions/sf/doctor-runtime-checks.ts @@ -15,7 +15,7 @@ import { } from "./crash-recovery.js"; import type { DoctorIssue, DoctorIssueCode } from "./doctor-types.js"; import { saveFile } from "./files.js"; -import { ensureGitignore, isGsdGitignored } from "./gitignore.js"; +import { ensureGitignore, isSfGitignored } from "./gitignore.js"; import { recoverFailedMigration } from "./migrate-external.js"; import { nativeForEachRef, @@ -23,7 +23,7 @@ import { nativeUpdateRef, } from "./native-git-bridge.js"; import { milestonesDir, resolveSfRootFile, sfRoot } from "./paths.js"; -import { cleanNumberedGsdVariants } from "./repo-identity.js"; +import { cleanNumberedSfVariants } from "./repo-identity.js"; import { isSessionStale, readAllSessionStatuses, @@ -431,27 +431,27 @@ export async function checkRuntimeHealth( }); } - // ── Symlinked .gsd without .gitignore entry (#4423) ── - // When `.gsd` is a symlink AND not gitignored, `git add -A -- :!.gsd/...` + // ── Symlinked .sf without .gitignore entry (#4423) ── + // When `.sf` is a symlink AND not gitignored, `git add -A -- :!.sf/...` // pathspecs fail with "beyond a symbolic link". Without self-heal this // silently drops new user files during auto-commit. - if (nativeIsRepo(basePath) && !isGsdGitignored(basePath)) { + if (nativeIsRepo(basePath) && !isSfGitignored(basePath)) { issues.push({ severity: "warning", - code: "symlinked_gsd_unignored", + code: "symlinked_sf_unignored", scope: "project", unitId: "project", message: - ".gsd is a symlink to external state but is not listed in .gitignore. This causes git pathspec exclusions to fail and can lead to silently dropped new files during auto-commit. Add `.gsd` to .gitignore.", + ".sf is a symlink to external state but is not listed in .gitignore. This causes git pathspec exclusions to fail and can lead to silently dropped new files during auto-commit. Add `.sf` to .gitignore.", file: ".gitignore", fixable: true, }); - if (shouldFix("symlinked_gsd_unignored")) { + if (shouldFix("symlinked_sf_unignored")) { const modified = ensureGitignore(basePath); if (modified) fixesApplied.push( - "added .gsd to .gitignore (symlinked external state)", + "added .sf to .gitignore (symlinked external state)", ); } } @@ -482,7 +482,7 @@ export async function checkRuntimeHealth( } if (shouldFix("numbered_sf_variant")) { - const removed = cleanNumberedGsdVariants(basePath); + const removed = cleanNumberedSfVariants(basePath); for (const name of removed) { fixesApplied.push(`removed numbered .sf variant: ${name}`); } diff --git a/src/resources/extensions/sf/doctor-types.ts b/src/resources/extensions/sf/doctor-types.ts index 5c0778c05..e2927983d 100644 --- a/src/resources/extensions/sf/doctor-types.ts +++ b/src/resources/extensions/sf/doctor-types.ts @@ -23,7 +23,7 @@ export type DoctorIssueCode = | "state_file_stale" | "state_file_missing" | "gitignore_missing_patterns" - | "symlinked_gsd_unignored" + | "symlinked_sf_unignored" | "unresolvable_dependency" | "failed_migration" | "broken_symlink" diff --git a/src/resources/extensions/sf/gitignore.ts b/src/resources/extensions/sf/gitignore.ts index 38e4cc5c3..f5c0533f1 100644 --- a/src/resources/extensions/sf/gitignore.ts +++ b/src/resources/extensions/sf/gitignore.ts @@ -117,7 +117,7 @@ const BASELINE_PATTERNS = [ * - `.sf` is not listed in any active ignore rule * - Not a git repo or git is unavailable */ -export function isGsdGitignored(basePath: string): boolean { +export function isSfGitignored(basePath: string): boolean { // Check both `.sf` and `.sf/` because `.sf/` in .gitignore (trailing // slash = directory-only pattern) only matches the directory form. Using // both paths covers all gitignore pattern variants. @@ -149,7 +149,7 @@ export function isGsdGitignored(basePath: string): boolean { * - `.sf/` doesn't exist * - No tracked files found under `.sf/` */ -export function hasGitTrackedGsdFiles(basePath: string): boolean { +export function hasGitTrackedSfFiles(basePath: string): boolean { const localSf = join(basePath, ".sf"); // If .sf doesn't exist or is already a symlink, no tracked files concern @@ -266,7 +266,7 @@ export function ensureGitignore( // Determine which patterns to apply. If .sf/ has tracked files, // exclude the ".sf" pattern to prevent deleting tracked state. - const sfIsTracked = hasGitTrackedGsdFiles(basePath); + const sfIsTracked = hasGitTrackedSfFiles(basePath); const patternsToApply = sfIsTracked ? BASELINE_PATTERNS.filter((p) => p !== ".sf") : BASELINE_PATTERNS; diff --git a/src/resources/extensions/sf/init-wizard.ts b/src/resources/extensions/sf/init-wizard.ts index 92d0fe580..baf8680b9 100644 --- a/src/resources/extensions/sf/init-wizard.ts +++ b/src/resources/extensions/sf/init-wizard.ts @@ -293,7 +293,7 @@ export async function showProjectInit( } // ── Step 9: Bootstrap .sf/ ──────────────────────────────────────────────── - bootstrapGsdDirectory(basePath, prefs, signals); + bootstrapSfDirectory(basePath, prefs, signals); // Initialize SQLite database so SF starts in full-capability mode (#3880). // Without this, isDbAvailable() returns false and SF enters degraded @@ -572,7 +572,7 @@ async function customizeAdvancedPrefs( // ─── Bootstrap ────────────────────────────────────────────────────────────────── -function bootstrapGsdDirectory( +function bootstrapSfDirectory( basePath: string, prefs: ProjectPreferences, signals: ProjectSignals, diff --git a/src/resources/extensions/sf/mcp-project-config.ts b/src/resources/extensions/sf/mcp-project-config.ts index ec4e33622..347c339b6 100644 --- a/src/resources/extensions/sf/mcp-project-config.ts +++ b/src/resources/extensions/sf/mcp-project-config.ts @@ -27,7 +27,7 @@ interface McpConfigFile { [key: string]: unknown; } -export function resolveBundledGsdCliPath( +export function resolveBundledSfCliPath( env: NodeJS.ProcessEnv = process.env, ): string | null { const explicit = env.SF_CLI_PATH?.trim() || env.SF_BIN_PATH?.trim(); @@ -55,7 +55,7 @@ export function buildProjectWorkflowMcpServerConfig( env: NodeJS.ProcessEnv = process.env, ): ProjectMcpServerConfig { const resolvedProjectRoot = resolve(projectRoot); - const sfCliPath = resolveBundledGsdCliPath(env); + const sfCliPath = resolveBundledSfCliPath(env); const launch = detectWorkflowMcpLaunchConfig(resolvedProjectRoot, { ...env, ...(sfCliPath ? { SF_CLI_PATH: sfCliPath, SF_BIN_PATH: sfCliPath } : {}), diff --git a/src/resources/extensions/sf/migrate-external.ts b/src/resources/extensions/sf/migrate-external.ts index 73829b8e0..d56da7987 100644 --- a/src/resources/extensions/sf/migrate-external.ts +++ b/src/resources/extensions/sf/migrate-external.ts @@ -21,8 +21,8 @@ import { import { join } from "node:path"; import { getErrorMessage } from "./error-utils.js"; import { GIT_NO_PROMPT_ENV } from "./git-constants.js"; -import { hasGitTrackedGsdFiles } from "./gitignore.js"; -import { externalGsdRoot, isInsideWorktree } from "./repo-identity.js"; +import { hasGitTrackedSfFiles } from "./gitignore.js"; +import { externalSfRoot, isInsideWorktree } from "./repo-identity.js"; export interface MigrationResult { migrated: boolean; @@ -46,7 +46,7 @@ export interface MigrationResult { export function migrateToExternalState(basePath: string): MigrationResult { // Worktrees get their .sf via syncSfStateToWorktree(), not migration. // Migration inside a worktree would compute the same external hash as the - // main repo (externalGsdRoot hashes remoteUrl + gitRoot), creating a broken + // main repo (externalSfRoot hashes remoteUrl + gitRoot), creating a broken // junction and orphaning .sf.migrating (#2970). if (isInsideWorktree(basePath)) { return { migrated: false }; @@ -80,7 +80,7 @@ export function migrateToExternalState(basePath: string): MigrationResult { // Skip if .sf/ contains git-tracked files — the project intentionally // keeps .sf/ in version control and migration would destroy that. - if (hasGitTrackedGsdFiles(basePath)) { + if (hasGitTrackedSfFiles(basePath)) { return { migrated: false }; } @@ -100,7 +100,7 @@ export function migrateToExternalState(basePath: string): MigrationResult { } } - const externalPath = externalGsdRoot(basePath); + const externalPath = externalSfRoot(basePath); const migratingPath = join(basePath, ".sf.migrating"); try { diff --git a/src/resources/extensions/sf/migrate/command.ts b/src/resources/extensions/sf/migrate/command.ts index 5e7aee942..ddc2ba040 100644 --- a/src/resources/extensions/sf/migrate/command.ts +++ b/src/resources/extensions/sf/migrate/command.ts @@ -152,8 +152,8 @@ export async function handleMigrate( ); } - const targetGsdExists = existsSync(sfRoot(process.cwd())); - if (targetGsdExists) { + const targetSfExists = existsSync(sfRoot(process.cwd())); + if (targetSfExists) { lines.push(""); lines.push( "⚠ A .sf directory already exists in the current working directory — it will be overwritten.", diff --git a/src/resources/extensions/sf/milestone-scope-classifier.ts b/src/resources/extensions/sf/milestone-scope-classifier.ts index 78714db79..948674126 100644 --- a/src/resources/extensions/sf/milestone-scope-classifier.ts +++ b/src/resources/extensions/sf/milestone-scope-classifier.ts @@ -1,4 +1,4 @@ -// GSD-2 — Milestone scope classifier (#4781 / ADR-003 companion). +// SF — Milestone scope classifier (#4781 / ADR-003 companion). // // Pure heuristics over milestone planning fields. Produces a PipelineVariant // that downstream dispatch logic can use to shape the auto-mode sequence. diff --git a/src/resources/extensions/sf/native-parser-bridge.ts b/src/resources/extensions/sf/native-parser-bridge.ts index 33bad32b6..80f443ec3 100644 --- a/src/resources/extensions/sf/native-parser-bridge.ts +++ b/src/resources/extensions/sf/native-parser-bridge.ts @@ -18,7 +18,7 @@ let nativeModule: { level?: number, ) => { content: string; found: boolean }; extractAllSections: (content: string, level?: number) => string; - batchParseGsdFiles: (directory: string) => { + batchParseSfFiles: (directory: string) => { files: Array<{ path: string; metadata: string; @@ -47,7 +47,7 @@ let nativeModule: { consumes: string; }>; }; - scanGsdTree: ( + scanSfTree: ( directory: string, ) => Array<{ path: string; name: string; isDir: boolean }>; parseJsonlTail: ( @@ -70,7 +70,7 @@ function loadNative(): typeof nativeModule { // Dynamic import to avoid hard dependency - fails gracefully if native module not built // eslint-disable-next-line @typescript-eslint/no-require-imports const mod = require("@singularity-forge/native"); - if (mod.parseFrontmatter && mod.extractSection && mod.batchParseGsdFiles) { + if (mod.parseFrontmatter && mod.extractSection && mod.batchParseSfFiles) { nativeModule = mod; } } catch { @@ -161,13 +161,13 @@ export interface BatchParsedFile { * Batch-parse all .md files in a .sf/ directory tree using the native parser. * Returns null if native module unavailable. */ -export function nativeBatchParseGsdFiles( +export function nativeBatchParseSfFiles( directory: string, ): BatchParsedFile[] | null { const native = loadNative(); if (!native) return null; - const result = native.batchParseGsdFiles(directory); + const result = native.batchParseSfFiles(directory); return result.files.map((f) => ({ path: f.path, metadata: JSON.parse(f.metadata) as Record, @@ -186,7 +186,7 @@ export function isNativeParserAvailable(): boolean { // ─── Tree Scanning ──────────────────────────────────────────────────────────── -export interface GsdTreeEntry { +export interface SfTreeEntry { path: string; name: string; isDir: boolean; @@ -196,10 +196,10 @@ export interface GsdTreeEntry { * Native-backed directory tree scan of a .sf/ directory. * Returns a flat list of all entries, or null if native module unavailable. */ -export function nativeScanGsdTree(directory: string): GsdTreeEntry[] | null { +export function nativeScanSfTree(directory: string): SfTreeEntry[] | null { const native = loadNative(); if (!native) return null; - return native.scanGsdTree(directory); + return native.scanSfTree(directory); } // ─── JSONL Parsing ──────────────────────────────────────────────────────────── diff --git a/src/resources/extensions/sf/parallel-orchestrator.ts b/src/resources/extensions/sf/parallel-orchestrator.ts index cb2ba93ed..2c02dc360 100644 --- a/src/resources/extensions/sf/parallel-orchestrator.ts +++ b/src/resources/extensions/sf/parallel-orchestrator.ts @@ -645,7 +645,7 @@ export function spawnWorker(basePath: string, milestoneId: string): boolean { if (worker.process) return true; // already spawned // Resolve the SF CLI binary path - const binPath = resolveGsdBin(); + const binPath = resolveSfBin(); if (!binPath) return false; let child: ChildProcess; @@ -800,7 +800,7 @@ export function spawnWorker(basePath: string, milestoneId: string): boolean { * Uses SF_BIN_PATH env var (set by loader.ts) or falls back to * finding the binary relative to the current module. */ -function resolveGsdBin(): string | null { +function resolveSfBin(): string | null { // SF_BIN_PATH is set by loader.ts to the absolute path of dist/loader.js if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) { return process.env.SF_BIN_PATH; diff --git a/src/resources/extensions/sf/paths.ts b/src/resources/extensions/sf/paths.ts index f65d42ee0..3deb7ca1c 100644 --- a/src/resources/extensions/sf/paths.ts +++ b/src/resources/extensions/sf/paths.ts @@ -14,8 +14,8 @@ import { Dirent, existsSync, readdirSync, realpathSync } from "node:fs"; import { dirname, join, normalize } from "node:path"; import { DIR_CACHE_MAX } from "./constants.js"; import { - type GsdTreeEntry, - nativeScanGsdTree, + type SfTreeEntry, + nativeScanSfTree, } from "./native-parser-bridge.js"; // ─── Directory Listing Cache ────────────────────────────────────────────────── @@ -27,17 +27,17 @@ const dirListCache = new Map(); // When the native module is available, scan the entire .sf/ tree in one call // and serve directory listings from memory instead of individual readdirSync calls. -let nativeTreeCache: Map | null = null; +let nativeTreeCache: Map | null = null; let nativeTreeBase: string | null = null; -function _getNativeTree(sfDir: string): Map | null { +function _getNativeTree(sfDir: string): Map | null { if (nativeTreeCache && nativeTreeBase === sfDir) return nativeTreeCache; - const entries = nativeScanGsdTree(sfDir); + const entries = nativeScanSfTree(sfDir); if (!entries) return null; // Build a map of parent directory -> entries - const tree = new Map(); + const tree = new Map(); for (const entry of entries) { const parts = entry.path.split("/"); const parentPath = parts.slice(0, -1).join("/"); @@ -298,7 +298,7 @@ const LEGACY_SF_ROOT_FILES: Record = { const sfRootCache = new Map(); /** Exported for tests only — do not call in production code. */ -export function _clearGsdRootCache(): void { +export function _clearSfRootCache(): void { sfRootCache.clear(); } @@ -317,7 +317,7 @@ export function sfRoot(basePath: string): string { const cached = sfRootCache.get(basePath); if (cached) return cached; - const result = probeGsdRoot(basePath); + const result = probeSfRoot(basePath); sfRootCache.set(basePath, result); return result; } @@ -334,7 +334,7 @@ export const projectRoot = sfRoot; * Matches both forward-slash and platform-native separators to handle * Windows paths (path.sep = '\\') and normalized Unix paths. */ -function isInsideGsdWorktree(p: string): boolean { +function isInsideSfWorktree(p: string): boolean { // Match /.sf/worktrees/ where is the final segment or // followed by a separator. The segment must be non-empty. const sepFwd = "/"; @@ -356,7 +356,7 @@ function isInsideGsdWorktree(p: string): boolean { return false; } -function probeGsdRoot(rawBasePath: string): string { +function probeSfRoot(rawBasePath: string): string { // 1. Fast path — check the input path directly const local = join(rawBasePath, ".sf"); if (existsSync(local)) return local; @@ -366,7 +366,7 @@ function probeGsdRoot(rawBasePath: string): string { // the git-root probe (step 2) or walk-up (step 3) escapes to the project // root's .sf, causing ensurePreconditions() and deriveState() to read/write // state in the wrong location. - if (isInsideGsdWorktree(rawBasePath)) return local; + if (isInsideSfWorktree(rawBasePath)) return local; // Resolve symlinks so path comparisons work correctly across platforms // (e.g. macOS /var → /private/var). Use rawBasePath as fallback if not resolvable. @@ -378,7 +378,7 @@ function probeGsdRoot(rawBasePath: string): string { } // Also check the resolved path for the worktree pattern (macOS /tmp → /private/tmp) - if (basePath !== rawBasePath && isInsideGsdWorktree(basePath)) return local; + if (basePath !== rawBasePath && isInsideSfWorktree(basePath)) return local; // 2. Git root anchor — used as both probe target and walk-up boundary // Only walk if we're inside a git project — prevents escaping into @@ -437,14 +437,10 @@ export function resolveSfRootFile( return canonical; } -export const resolveGsdRootFile = resolveSfRootFile; - export function relSfRootFile(key: SFRootFileKey): string { return `.sf/${SF_ROOT_FILES[key]}`; } -export const relGsdRootFile = relSfRootFile; - /** * Resolve the full path to a milestone directory. * Returns null if the milestone doesn't exist. diff --git a/src/resources/extensions/sf/prompt-loader.ts b/src/resources/extensions/sf/prompt-loader.ts index 2c54e2a55..8eca6a248 100644 --- a/src/resources/extensions/sf/prompt-loader.ts +++ b/src/resources/extensions/sf/prompt-loader.ts @@ -40,8 +40,8 @@ function resolveExtensionDir(): string { // Fallback: user-local agent directory const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - const agentGsdDir = join(sfHome, "agent", "extensions", "sf"); - if (existsSync(join(agentGsdDir, "prompts"))) return agentGsdDir; + const agentSfDir = join(sfHome, "agent", "extensions", "sf"); + if (existsSync(join(agentSfDir, "prompts"))) return agentSfDir; // Last resort: return the module dir (warmCache will silently handle the miss) return moduleDir; diff --git a/src/resources/extensions/sf/prompts/parallel-research-slices.md b/src/resources/extensions/sf/prompts/parallel-research-slices.md index 9bc211128..d182b75d5 100644 --- a/src/resources/extensions/sf/prompts/parallel-research-slices.md +++ b/src/resources/extensions/sf/prompts/parallel-research-slices.md @@ -12,12 +12,22 @@ Dispatch ALL slices simultaneously using the `subagent` tool in **parallel mode* ## Execution Protocol -1. Call `subagent` with `tasks: [...]` containing one entry per slice below +1. Call `subagent` exactly once with the JSON payload below 2. Wait for ALL subagents to complete 3. Verify each slice's RESEARCH file was written (check the `.sf/{{mid}}/` directory) 4. If any subagent failed to write its RESEARCH file, re-run it individually 5. Report which slices completed research and which (if any) failed +## Required `subagent` Call Payload + +Use this exact payload for the `subagent` tool. Do not invent agent names. Do not use legacy executor aliases. + +```json +{{subagentCall}} +``` + ## Subagent Prompts +The same task payloads are expanded below for readability. + {{subagentPrompts}} diff --git a/src/resources/extensions/sf/repo-identity.ts b/src/resources/extensions/sf/repo-identity.ts index 1e92d4e16..44aa43134 100644 --- a/src/resources/extensions/sf/repo-identity.ts +++ b/src/resources/extensions/sf/repo-identity.ts @@ -144,7 +144,7 @@ export function isInheritedRepo(basePath: string): boolean { // The git root is a proper ancestor. Check whether it already has .sf // (i.e. the parent project was initialised with SF). - if (isProjectGsd(join(root, ".sf"))) return false; + if (isProjectSf(join(root, ".sf"))) return false; // Walk up from basePath's parent to the git root checking for .sf. // Start at dirname(normalizedBase), NOT normalizedBase itself — finding @@ -152,7 +152,7 @@ export function isInheritedRepo(basePath: string): boolean { // says nothing about whether the git repo is inherited from an ancestor. let dir = dirname(normalizedBase); while (dir !== normalizedRoot && dir !== dirname(dir)) { - if (isProjectGsd(join(dir, ".sf"))) return false; + if (isProjectSf(join(dir, ".sf"))) return false; dir = dirname(dir); } @@ -174,23 +174,23 @@ export function isInheritedRepo(basePath: string): boolean { * Treating it as a project `.sf` would cause isInheritedRepo() to wrongly * conclude that subdirectories are part of the home "project" (#2393). */ -function isProjectGsd(sfPath: string): boolean { +function isProjectSf(sfPath: string): boolean { if (!existsSync(sfPath)) return false; try { const stat = lstatSync(sfPath); - // Symlinks are always project .sf (created by ensureGsdSymlink). + // Symlinks are always project .sf (created by ensureSfSymlink). if (stat.isSymbolicLink()) return true; // For real directories, check that this isn't the global SF home. // Recompute sfHome dynamically so env overrides (SF_HOME) are // picked up at call time, not just at module load time. if (stat.isDirectory()) { - const currentGsdHome = process.env.SF_HOME || join(homedir(), ".sf"); - const normalizedGsdPath = canonicalizeExistingPath(sfPath); - const normalizedGsdHome = canonicalizeExistingPath(currentGsdHome); - if (normalizedGsdPath === normalizedGsdHome) return false; + const currentSfHome = process.env.SF_HOME || join(homedir(), ".sf"); + const normalizedSfPath = canonicalizeExistingPath(sfPath); + const normalizedSfHome = canonicalizeExistingPath(currentSfHome); + if (normalizedSfPath === normalizedSfHome) return false; return true; } } catch { @@ -337,7 +337,7 @@ export function repoIdentity(basePath: string): string { * Returns `$SF_STATE_DIR/projects/` if `SF_STATE_DIR` is set, * otherwise `~/.sf/projects/`. */ -export function externalGsdRoot(basePath: string): string { +export function externalSfRoot(basePath: string): string { const base = process.env.SF_STATE_DIR || sfHome; return join(base, "projects", repoIdentity(basePath)); } @@ -363,12 +363,12 @@ export function externalProjectsRoot(): string { * directory, making tracked planning files appear deleted. * * This helper scans the project root for entries matching `.sf ` and - * removes them. It is called early in `ensureGsdSymlink()` so that the + * removes them. It is called early in `ensureSfSymlink()` so that the * canonical `.sf` path is always the one in use. */ const SF_NUMBERED_VARIANT_RE = /^\.sf \d+$/; -export function cleanNumberedGsdVariants(projectPath: string): string[] { +export function cleanNumberedSfVariants(projectPath: string): string[] { const removed: string[] = []; try { const entries = readdirSync(projectPath); @@ -401,7 +401,7 @@ export function cleanNumberedGsdVariants(projectPath: string): string[] { * The marker is gitignored by ensureGitignore(). Non-fatal: failure to write * the marker must never block project setup. */ -function writeGsdIdMarker(projectPath: string, identity: string): void { +function writeSfIdMarker(projectPath: string, identity: string): void { try { const markerPath = join(projectPath, ".sf-id"); // Only write if content differs to avoid unnecessary disk writes. @@ -422,7 +422,7 @@ function writeGsdIdMarker(projectPath: string, identity: string): void { * Read the `.sf-id` marker from the project root. * Returns the identity hash, or null if the marker doesn't exist or is unreadable. */ -function readGsdIdMarker(projectPath: string): string | null { +function readSfIdMarker(projectPath: string): string | null { try { const markerPath = join(projectPath, ".sf-id"); if (!existsSync(markerPath)) return null; @@ -462,7 +462,7 @@ export function hasExternalProjectState(externalPath: string): boolean { * Returns the resolved external path (may differ from the computed identity). */ function resolveExternalPathWithRecovery(projectPath: string): string { - const computedPath = externalGsdRoot(projectPath); + const computedPath = externalSfRoot(projectPath); const computedId = repoIdentity(projectPath); // Check if computed path already has state — fast path, no recovery needed. @@ -471,7 +471,7 @@ function resolveExternalPathWithRecovery(projectPath: string): string { } // Check for .sf-id marker from a previous location. - const markerId = readGsdIdMarker(projectPath); + const markerId = readSfIdMarker(projectPath); if (markerId && markerId !== computedId) { // The marker points to a different identity — the repo was likely moved. const base = process.env.SF_STATE_DIR || sfHome; @@ -528,20 +528,20 @@ function resolveExternalPathWithRecovery(projectPath: string): string { * * Returns the resolved external path. */ -export function ensureGsdSymlink(projectPath: string): string { - const result = ensureGsdSymlinkCore(projectPath); +export function ensureSfSymlink(projectPath: string): string { + const result = ensureSfSymlinkCore(projectPath); // Write .sf-id marker so future relocations can recover this state (#2750). // Only write for the project root (not subdirectories or worktrees that // delegate to a parent .sf). if (!isInsideWorktree(projectPath)) { - writeGsdIdMarker(projectPath, repoIdentity(projectPath)); + writeSfIdMarker(projectPath, repoIdentity(projectPath)); } return result; } -function ensureGsdSymlinkCore(projectPath: string): string { +function ensureSfSymlinkCore(projectPath: string): string { const externalPath = resolveExternalPathWithRecovery(projectPath); const localSf = join(projectPath, ".sf"); const inWorktree = isInsideWorktree(projectPath); @@ -566,14 +566,14 @@ function ensureGsdSymlinkCore(projectPath: string): string { const normalizedProject = canonicalizeExistingPath(projectPath); const normalizedRoot = canonicalizeExistingPath(gitRoot); if (normalizedProject !== normalizedRoot) { - const rootGsd = join(gitRoot, ".sf"); - if (existsSync(rootGsd)) { + const rootSf = join(gitRoot, ".sf"); + if (existsSync(rootSf)) { try { - const rootStat = lstatSync(rootGsd); + const rootStat = lstatSync(rootSf); if (rootStat.isSymbolicLink() || rootStat.isDirectory()) { return rootStat.isSymbolicLink() - ? realpathSync(rootGsd) - : rootGsd; + ? realpathSync(rootSf) + : rootSf; } } catch { // Fall through to normal logic if we can't stat root .sf @@ -587,7 +587,7 @@ function ensureGsdSymlinkCore(projectPath: string): string { // Clean up macOS numbered collision variants (.sf 2, .sf 3, etc.) before // any existence checks — otherwise they accumulate and confuse state (#2205). - cleanNumberedGsdVariants(projectPath); + cleanNumberedSfVariants(projectPath); // Ensure external directory exists mkdirSync(externalPath, { recursive: true }); diff --git a/src/resources/extensions/sf/rethink.ts b/src/resources/extensions/sf/rethink.ts index 5e64f9a79..de5dea08e 100644 --- a/src/resources/extensions/sf/rethink.ts +++ b/src/resources/extensions/sf/rethink.ts @@ -14,7 +14,7 @@ import type { } from "@singularity-forge/pi-coding-agent"; import { isAutoActive } from "./auto.js"; -import { isGsdGitignored } from "./gitignore.js"; +import { isSfGitignored } from "./gitignore.js"; import { buildExistingMilestonesContext } from "./guided-flow-queue.js"; import { getParkedReason } from "./milestone-actions.js"; import { findMilestoneIds } from "./milestone-ids.js"; @@ -69,7 +69,7 @@ export async function handleRethink( state, ); - const commitInstruction = isGsdGitignored(basePath) + const commitInstruction = isSfGitignored(basePath) ? "Do not commit planning artifacts — .sf/ is gitignored in this project." : 'After changes, run `git add .sf/ && git commit -m "docs(sf): rethink milestone plan"` to persist (rethink runs interactively outside auto-mode, so no system auto-commit)'; diff --git a/src/resources/extensions/sf/safety/safe-id.ts b/src/resources/extensions/sf/safety/safe-id.ts index 2a23862f7..e57430991 100644 --- a/src/resources/extensions/sf/safety/safe-id.ts +++ b/src/resources/extensions/sf/safety/safe-id.ts @@ -23,15 +23,22 @@ const MAX_ID_LENGTH = 64; export class UnsafeIdError extends TypeError { constructor( - public readonly fieldName: string, - public readonly reason: string, - public readonly value: string, + fieldName: string, + reason: string, + value: string, ) { super( `${fieldName} is unsafe: ${reason} (got ${JSON.stringify(value).slice(0, 80)})`, ); + this.fieldName = fieldName; + this.reason = reason; + this.value = value; this.name = "UnsafeIdError"; } + + public readonly fieldName: string; + public readonly reason: string; + public readonly value: string; } /** diff --git a/src/resources/extensions/sf/skill-manifest.ts b/src/resources/extensions/sf/skill-manifest.ts index dda4ea126..2b7a0f044 100644 --- a/src/resources/extensions/sf/skill-manifest.ts +++ b/src/resources/extensions/sf/skill-manifest.ts @@ -1,4 +1,4 @@ -// GSD2 + skill-manifest — per-unit-type skill allowlist resolver (RFC #4779) +// SF2 + skill-manifest — per-unit-type skill allowlist resolver (RFC #4779) // // Each auto-mode unit type can declare which skills are relevant to it. This // trims the set of skills considered for activation in the per-unit prompt, @@ -168,7 +168,7 @@ export function warnIfManifestHasMissingSkills( ): void { // Strict mode is intentionally opt-in via exactly "1"; values like "0" or // "false" must preserve the normal silent manifest behavior. - if (process.env.GSD_SKILL_MANIFEST_STRICT !== "1") return; + if (process.env.SF_SKILL_MANIFEST_STRICT !== "1") return; const allowlist = resolveSkillManifest(unitType); if (!allowlist) return; for (const name of allowlist) { diff --git a/src/resources/extensions/sf/skills/acquiring-skills/SKILL.md b/src/resources/extensions/sf/skills/acquiring-skills/SKILL.md index 0acc59d56..fdeee8f9e 100644 --- a/src/resources/extensions/sf/skills/acquiring-skills/SKILL.md +++ b/src/resources/extensions/sf/skills/acquiring-skills/SKILL.md @@ -92,7 +92,7 @@ Before installing, ensure the skill follows sf naming: - Lowercase kebab-case directory name. - Match the directory name exactly to the `name:` field in frontmatter. -- No prefixes like `dr-`, `ace-`, `gsd-` — strip them. (`dr-spec-first-tdd` → `spec-first-tdd`.) +- No prefixes like `dr-`, `ace-` — strip them. (`dr-spec-first-tdd` → `spec-first-tdd`.) - See [`creating-skills`](../creating-skills/SKILL.md) for the full convention. ## How to Acquire @@ -136,7 +136,7 @@ rsync -av -e ssh \ After fetching, **adapt for sf**: -- Strip foreign prefixes (`dr-`, `ace-`, `gsd-`, `letta-`). +- Strip foreign prefixes (`dr-`, `ace-`, `letta-`). - Replace foreign tooling references (Letta MCP tool calls, claude-flow CLIs) with sf-native equivalents (`rg`, `npm test`, `sf_*` tools, `advisory-partner` skill, etc.). - Drop bootstrap gates that don't apply (`onboarding()`, `IN_NIX_SHELL`, etc.). - Cite sf doctrine: `AGENTS.md`, `docs/SPEC_FIRST_TDD.md`, the relevant sister skill. @@ -176,7 +176,7 @@ User asks: "Can you help me test my React app's UI?" - **Read every script before executing it.** No exceptions, even from trusted sources. - **Don't `curl | bash`** unless the user has personally inspected and approved the URL. - **Untrusted sources require explicit user approval** before download. -- **Strip foreign prefixes** when porting (`dr-`, `ace-`, `gsd-`, `letta-`). +- **Strip foreign prefixes** when porting (`dr-`, `ace-`, `letta-`). - **Adapt tooling references** to sf-native equivalents. - **Cite sf doctrine** — link `AGENTS.md` and `docs/SPEC_FIRST_TDD.md` rather than restating their rules. - **Don't overwrite an existing sf skill** without diffing first; if names collide, decide whether to merge, supersede, or rename. diff --git a/src/resources/extensions/sf/skills/clarify-spec/SKILL.md b/src/resources/extensions/sf/skills/clarify-spec/SKILL.md index fb7a1c991..60e7f027f 100644 --- a/src/resources/extensions/sf/skills/clarify-spec/SKILL.md +++ b/src/resources/extensions/sf/skills/clarify-spec/SKILL.md @@ -17,7 +17,7 @@ The job: reduce ambiguity that would otherwise cause bad plans, wrong tests, or - A milestone goal is "make it better" or "robust" or "fast" — vague verbs that aren't testable. - A slice plan is being drafted but key boundaries are unstated. - A change touches a security/auth surface and the threat model isn't named. -- An upstream port (pi-mono / gsd-2) leaves architectural intent ambiguous after reading the commit. +- An upstream port (pi-mono legacy port) leaves architectural intent ambiguous after reading the commit. If the request is concrete and the consumer is obvious, skip this skill — go straight to `brainstorming` or `spec-first-tdd`. diff --git a/src/resources/extensions/sf/skills/context-doctor/SKILL.md b/src/resources/extensions/sf/skills/context-doctor/SKILL.md index 157f441c0..dab6340fe 100644 --- a/src/resources/extensions/sf/skills/context-doctor/SKILL.md +++ b/src/resources/extensions/sf/skills/context-doctor/SKILL.md @@ -52,7 +52,7 @@ Look for: | Decay type | Symptoms | Fix | |---|---|---| | **Bloat** | `.sf/CODEBASE.md` is 5x its useful size; same fact stated 4 times. | Compress: keep one canonical statement, delete the rest. | -| **Stale** | A file references `extensions/gsd/` (renamed to `extensions/sf/`). | Update; or, if the fact is now self-evident from the code, delete. | +| **Stale** | A file references `extensions/old-extension/` (renamed to `extensions/sf/`). | Update; or, if the fact is now self-evident from the code, delete. | | **Contradiction** | `.sf/DECISIONS.md` says "use bun" but `AGENTS.md` says "npm canonical". | Find the canonical source (usually `AGENTS.md` for sf), fix the other. | | **Orphaned** | A reference points to a file that was deleted. | Delete the reference, or restore the file if it should still exist. | | **Skill overlap** | Two skills try to do the same job. | Either merge them or scope each to its distinct sub-case. | diff --git a/src/resources/extensions/sf/skills/creating-skills/SKILL.md b/src/resources/extensions/sf/skills/creating-skills/SKILL.md index 73421e160..8ffc9a5e5 100644 --- a/src/resources/extensions/sf/skills/creating-skills/SKILL.md +++ b/src/resources/extensions/sf/skills/creating-skills/SKILL.md @@ -108,7 +108,7 @@ porting-from-upstream/ ├── SKILL.md (overview + which-upstream selection) └── references/ ├── pi-mono.md (cherry-pick patterns) - ├── gsd-2.md (manual port + naming translation) + ├── legacy-port.md (manual port + naming translation) └── bunker.md (skill harvest from remote host) ``` diff --git a/src/resources/extensions/sf/skills/working-in-parallel/SKILL.md b/src/resources/extensions/sf/skills/working-in-parallel/SKILL.md index 22db33edb..03f56fa61 100644 --- a/src/resources/extensions/sf/skills/working-in-parallel/SKILL.md +++ b/src/resources/extensions/sf/skills/working-in-parallel/SKILL.md @@ -73,7 +73,7 @@ git worktree remove ../singularity-forge-my-feature - Another agent (sf auto-loop, another Claude session, a teammate) is working in the current directory. - A long-running build or test is in flight in one terminal and you need a parallel branch. - You're exploring a refactor that you may abandon — keep main clean. -- You need to apply an upstream cherry-pick from `pi-mono` while a separate `gsd-2` port is in progress. +- You need to apply an upstream cherry-pick from `pi-mono` while a separate legacy port is in progress. ## When NOT to Use diff --git a/src/resources/extensions/sf/slice-parallel-orchestrator.ts b/src/resources/extensions/sf/slice-parallel-orchestrator.ts index ec6a2d739..1db115794 100644 --- a/src/resources/extensions/sf/slice-parallel-orchestrator.ts +++ b/src/resources/extensions/sf/slice-parallel-orchestrator.ts @@ -302,9 +302,9 @@ function filterConflictingSlices( /** * Resolve the SF CLI binary path. - * Same logic as parallel-orchestrator.ts resolveGsdBin(). + * Same logic as parallel-orchestrator.ts resolveSfBin(). */ -function resolveGsdBin(): string | null { +function resolveSfBin(): string | null { if (process.env.SF_BIN_PATH && existsSync(process.env.SF_BIN_PATH)) { return process.env.SF_BIN_PATH; } @@ -341,7 +341,7 @@ function spawnSliceWorker( if (!worker) return false; if (worker.process) return true; - const binPath = resolveGsdBin(); + const binPath = resolveSfBin(); if (!binPath) return false; let child: ChildProcess; diff --git a/src/resources/extensions/sf/state.ts b/src/resources/extensions/sf/state.ts index ce17df015..311bed8ac 100644 --- a/src/resources/extensions/sf/state.ts +++ b/src/resources/extensions/sf/state.ts @@ -15,7 +15,7 @@ import { import { findMilestoneIds } from "./milestone-ids.js"; import { getVisionAlignmentBlockingIssue } from "./milestone-quality.js"; import { isTerminalMilestoneSummaryContent } from "./milestone-summary-classifier.js"; -import { nativeBatchParseGsdFiles } from "./native-parser-bridge.js"; +import { nativeBatchParseSfFiles } from "./native-parser-bridge.js"; import { parsePlan, parseRoadmap } from "./parsers-legacy.js"; import { resolveMilestoneFile, @@ -1438,7 +1438,7 @@ export async function _deriveStateImpl(basePath: string): Promise { // Filesystem fallback: used when deriveStateFromDb() is not available // (pre-migration projects). The DB-backed path is preferred when available // — see deriveStateFromDb() above. - const batchFiles = nativeBatchParseGsdFiles(sfDir); + const batchFiles = nativeBatchParseSfFiles(sfDir); if (batchFiles) { for (const f of batchFiles) { const absPath = resolve(sfDir, f.path); diff --git a/src/resources/extensions/sf/tests/auto-model-selection.test.ts b/src/resources/extensions/sf/tests/auto-model-selection.test.ts index 16a56e110..f4860dad9 100644 --- a/src/resources/extensions/sf/tests/auto-model-selection.test.ts +++ b/src/resources/extensions/sf/tests/auto-model-selection.test.ts @@ -25,9 +25,9 @@ function makeTempDir(prefix: string): string { test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models section is absent", () => { const originalCwd = process.cwd(); - const originalGsdHome = process.env.SF_HOME; + const originalSfHome = process.env.SF_HOME; const tempProject = makeTempDir("sf-routing-project-"); - const tempGsdHome = makeTempDir("sf-routing-home-"); + const tempSfHome = makeTempDir("sf-routing-home-"); try { mkdirSync(join(tempProject, ".sf"), { recursive: true }); @@ -45,7 +45,7 @@ test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models ].join("\n"), "utf-8", ); - process.env.SF_HOME = tempGsdHome; + process.env.SF_HOME = tempSfHome; process.chdir(tempProject); const config = resolvePreferredModelConfig("plan-slice", { @@ -59,18 +59,18 @@ test("resolvePreferredModelConfig synthesizes heavy routing ceiling when models }); } finally { process.chdir(originalCwd); - if (originalGsdHome === undefined) delete process.env.SF_HOME; - else process.env.SF_HOME = originalGsdHome; + if (originalSfHome === undefined) delete process.env.SF_HOME; + else process.env.SF_HOME = originalSfHome; rmSync(tempProject, { recursive: true, force: true }); - rmSync(tempGsdHome, { recursive: true, force: true }); + rmSync(tempSfHome, { recursive: true, force: true }); } }); test("resolvePreferredModelConfig falls back to auto start model when heavy tier is absent", () => { const originalCwd = process.cwd(); - const originalGsdHome = process.env.SF_HOME; + const originalSfHome = process.env.SF_HOME; const tempProject = makeTempDir("sf-routing-project-"); - const tempGsdHome = makeTempDir("sf-routing-home-"); + const tempSfHome = makeTempDir("sf-routing-home-"); try { mkdirSync(join(tempProject, ".sf"), { recursive: true }); @@ -87,7 +87,7 @@ test("resolvePreferredModelConfig falls back to auto start model when heavy tier ].join("\n"), "utf-8", ); - process.env.SF_HOME = tempGsdHome; + process.env.SF_HOME = tempSfHome; process.chdir(tempProject); const config = resolvePreferredModelConfig("execute-task", { @@ -101,18 +101,18 @@ test("resolvePreferredModelConfig falls back to auto start model when heavy tier }); } finally { process.chdir(originalCwd); - if (originalGsdHome === undefined) delete process.env.SF_HOME; - else process.env.SF_HOME = originalGsdHome; + if (originalSfHome === undefined) delete process.env.SF_HOME; + else process.env.SF_HOME = originalSfHome; rmSync(tempProject, { recursive: true, force: true }); - rmSync(tempGsdHome, { recursive: true, force: true }); + rmSync(tempSfHome, { recursive: true, force: true }); } }); test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", () => { const originalCwd = process.cwd(); - const originalGsdHome = process.env.SF_HOME; + const originalSfHome = process.env.SF_HOME; const tempProject = makeTempDir("sf-routing-project-"); - const tempGsdHome = makeTempDir("sf-routing-home-"); + const tempSfHome = makeTempDir("sf-routing-home-"); try { mkdirSync(join(tempProject, ".sf"), { recursive: true }); @@ -130,7 +130,7 @@ test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", ( ].join("\n"), "utf-8", ); - process.env.SF_HOME = tempGsdHome; + process.env.SF_HOME = tempSfHome; process.chdir(tempProject); const config = resolvePreferredModelConfig("plan-slice", { @@ -144,23 +144,23 @@ test("resolvePreferredModelConfig keeps explicit phase models as the ceiling", ( }); } finally { process.chdir(originalCwd); - if (originalGsdHome === undefined) delete process.env.SF_HOME; - else process.env.SF_HOME = originalGsdHome; + if (originalSfHome === undefined) delete process.env.SF_HOME; + else process.env.SF_HOME = originalSfHome; rmSync(tempProject, { recursive: true, force: true }); - rmSync(tempGsdHome, { recursive: true, force: true }); + rmSync(tempSfHome, { recursive: true, force: true }); } }); test("selectAndApplyModel does not let learned routing override an explicit execution model", async () => { const originalCwd = process.cwd(); - const originalGsdHome = process.env.SF_HOME; + const originalSfHome = process.env.SF_HOME; const tempProject = makeTempDir("sf-routing-project-"); - const tempGsdHome = makeTempDir("sf-routing-home-"); + const tempSfHome = makeTempDir("sf-routing-home-"); try { mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( - join(tempGsdHome, "preferences.md"), + join(tempSfHome, "preferences.md"), [ "---", "version: 1", @@ -175,7 +175,7 @@ test("selectAndApplyModel does not let learned routing override an explicit exec ["---", "version: 1", "models: {}", "---"].join("\n"), "utf-8", ); - process.env.SF_HOME = tempGsdHome; + process.env.SF_HOME = tempSfHome; process.chdir(tempProject); const availableModels = [ @@ -227,10 +227,10 @@ test("selectAndApplyModel does not let learned routing override an explicit exec assert.equal(result.appliedModel?.id, "kimi-for-coding"); } finally { process.chdir(originalCwd); - if (originalGsdHome === undefined) delete process.env.SF_HOME; - else process.env.SF_HOME = originalGsdHome; + if (originalSfHome === undefined) delete process.env.SF_HOME; + else process.env.SF_HOME = originalSfHome; rmSync(tempProject, { recursive: true, force: true }); - rmSync(tempGsdHome, { recursive: true, force: true }); + rmSync(tempSfHome, { recursive: true, force: true }); } }); diff --git a/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts b/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts index 9cdedcefc..3026bfcc9 100644 --- a/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts +++ b/src/resources/extensions/sf/tests/commands-workflow-custom.test.ts @@ -12,7 +12,7 @@ import { join } from "node:path"; import { afterEach, before, describe, it } from "node:test"; import { - getGsdArgumentCompletions, + getSfArgumentCompletions, TOP_LEVEL_SUBCOMMANDS, } from "../commands/catalog.ts"; @@ -116,8 +116,8 @@ describe("workflow catalog registration", () => { assert.match(entry!.desc, /session model/i); }); - it("getGsdArgumentCompletions('m') includes model", () => { - const completions = getGsdArgumentCompletions("m"); + it("getSfArgumentCompletions('m') includes model", () => { + const completions = getSfArgumentCompletions("m"); const labels = completions.map((c: any) => c.label); assert.ok(labels.includes("model"), "should include model completion"); }); @@ -129,8 +129,8 @@ describe("workflow catalog registration", () => { assert.ok(entry!.desc.includes("run"), "description should mention run"); }); - it("getGsdArgumentCompletions('workflow ') returns six subcommands", () => { - const completions = getGsdArgumentCompletions("workflow "); + it("getSfArgumentCompletions('workflow ') returns six subcommands", () => { + const completions = getSfArgumentCompletions("workflow "); const labels = completions.map((c: any) => c.label); for (const sub of ["new", "run", "list", "validate", "pause", "resume"]) { assert.ok(labels.includes(sub), `missing completion: ${sub}`); @@ -138,15 +138,15 @@ describe("workflow catalog registration", () => { assert.equal(labels.length, 6, "should have exactly 6 subcommands"); }); - it("getGsdArgumentCompletions('workflow r') filters to run and resume", () => { - const completions = getGsdArgumentCompletions("workflow r"); + it("getSfArgumentCompletions('workflow r') filters to run and resume", () => { + const completions = getSfArgumentCompletions("workflow r"); const labels = completions.map((c: any) => c.label); assert.ok(labels.includes("run"), "should include run"); assert.ok(labels.includes("resume"), "should include resume"); assert.ok(!labels.includes("list"), "should not include list"); }); - it("getGsdArgumentCompletions('workflow run ') returns definition names", () => { + it("getSfArgumentCompletions('workflow run ') returns definition names", () => { const base = makeTmpBase(); writeDefinition(base, "deploy-pipeline", SIMPLE_DEF); writeDefinition(base, "test-suite", SIMPLE_DEF); @@ -154,7 +154,7 @@ describe("workflow catalog registration", () => { // Change cwd so the completion scanner can find `.sf/workflow-defs/` process.chdir(base); - const completions = getGsdArgumentCompletions("workflow run "); + const completions = getSfArgumentCompletions("workflow run "); const labels = completions.map((c: any) => c.label); assert.ok( labels.includes("deploy-pipeline"), @@ -163,25 +163,25 @@ describe("workflow catalog registration", () => { assert.ok(labels.includes("test-suite"), "should include test-suite"); }); - it("getGsdArgumentCompletions('workflow validate ') returns definition names", () => { + it("getSfArgumentCompletions('workflow validate ') returns definition names", () => { const base = makeTmpBase(); writeDefinition(base, "my-workflow", SIMPLE_DEF); process.chdir(base); - const completions = getGsdArgumentCompletions("workflow validate "); + const completions = getSfArgumentCompletions("workflow validate "); const labels = completions.map((c: any) => c.label); assert.ok(labels.includes("my-workflow"), "should include my-workflow"); }); - it("getGsdArgumentCompletions('workflow run d') filters by prefix", () => { + it("getSfArgumentCompletions('workflow run d') filters by prefix", () => { const base = makeTmpBase(); writeDefinition(base, "deploy-pipeline", SIMPLE_DEF); writeDefinition(base, "test-suite", SIMPLE_DEF); process.chdir(base); - const completions = getGsdArgumentCompletions("workflow run d"); + const completions = getSfArgumentCompletions("workflow run d"); const labels = completions.map((c: any) => c.label); assert.ok( labels.includes("deploy-pipeline"), diff --git a/src/resources/extensions/sf/tests/complete-slice-composer.test.ts b/src/resources/extensions/sf/tests/complete-slice-composer.test.ts index 17f80df6f..68ccf944d 100644 --- a/src/resources/extensions/sf/tests/complete-slice-composer.test.ts +++ b/src/resources/extensions/sf/tests/complete-slice-composer.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4782 phase 3 batch 3: complete-slice migrated through composer. +// SF — #4782 phase 3 batch 3: complete-slice migrated through composer. import assert from "node:assert/strict"; import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; @@ -18,9 +18,9 @@ import { } from "../sf-db.ts"; function makeBase(): string { - const base = mkdtempSync(join(tmpdir(), "gsd-completeslice-composer-")); + const base = mkdtempSync(join(tmpdir(), "sf-completeslice-composer-")); mkdirSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }, ); return base; @@ -37,7 +37,7 @@ function cleanup(base: string): void { } function seed(base: string, mid: string): void { - openDatabase(join(base, ".gsd", "gsd.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: mid, title: "Composer Test", @@ -80,17 +80,17 @@ function seed(base: string, mid: string): void { function writeArtifacts(base: string): void { writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01 Plan\n\nSlice plan body.\n", ); writeFileSync( join( base, - ".gsd", + ".sf", "milestones", "M001", "slices", @@ -163,11 +163,11 @@ test("#4782 phase 3: buildCompleteSlicePrompt handles missing task summaries gra seed(base, "M001"); // Write roadmap + plan but no task summaries writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001 Roadmap\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-PLAN.md"), "# S01 Plan\n", ); diff --git a/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts b/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts index f90733259..064ccb77f 100644 --- a/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts +++ b/src/resources/extensions/sf/tests/copy-planning-artifacts-samepath.test.ts @@ -12,9 +12,9 @@ test("copyPlanningArtifacts skips when source and destination .sf resolve to the const fnBody = src.slice(fnIdx, fnIdx + 2400); - const guardIdx = fnBody.indexOf("if (isSamePath(srcGsd, dstGsd)) return;"); + const guardIdx = fnBody.indexOf("if (isSamePath(srcSf, dstSf)) return;"); const copyIdx = fnBody.indexOf( - 'safeCopyRecursive(join(srcGsd, "milestones")', + 'safeCopyRecursive(join(srcSf, "milestones")', ); assert.ok( diff --git a/src/resources/extensions/sf/tests/debug-logger.test.ts b/src/resources/extensions/sf/tests/debug-logger.test.ts index fc6496644..b9b21586d 100644 --- a/src/resources/extensions/sf/tests/debug-logger.test.ts +++ b/src/resources/extensions/sf/tests/debug-logger.test.ts @@ -24,7 +24,7 @@ import { writeDebugSummary, } from "../debug-logger.ts"; -function createTempGsdDir(): string { +function createTempSfDir(): string { const tmp = mkdtempSync(join(tmpdir(), "sf-debug-test-")); mkdirSync(join(tmp, ".sf"), { recursive: true }); return tmp; @@ -37,7 +37,7 @@ function readLogLines(logPath: string): Record[] { } test("enableDebug creates log file and sets enabled", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); assert.strictEqual(isDebugEnabled(), true); @@ -56,7 +56,7 @@ test("enableDebug creates log file and sets enabled", () => { }); test("debugLog writes JSONL events", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); debugLog("test-event", { foo: "bar", num: 42 }); @@ -82,7 +82,7 @@ test("debugLog is no-op when disabled", () => { }); test("debugTime measures elapsed time", async () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); const stop = debugTime("timed-op"); @@ -111,7 +111,7 @@ test("debugTime returns no-op when disabled", () => { }); test("debugCount increments counters", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); debugCount("dispatches"); @@ -128,7 +128,7 @@ test("debugCount increments counters", () => { }); test("debugPeak tracks max values", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); debugPeak("ttsrPeakBuffer", 100); @@ -143,7 +143,7 @@ test("debugPeak tracks max values", () => { }); test("writeDebugSummary includes all counters and disables debug", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); debugCount("deriveStateCalls", 10); @@ -171,7 +171,7 @@ test("writeDebugSummary includes all counters and disables debug", () => { }); test("auto-prunes old debug logs", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); const debugDir = join(tmp, ".sf", "debug"); mkdirSync(debugDir, { recursive: true }); @@ -196,7 +196,7 @@ test("auto-prunes old debug logs", () => { }); test("disableDebug returns log path", () => { - const tmp = createTempGsdDir(); + const tmp = createTempSfDir(); enableDebug(tmp); const logPath = getDebugLogPath(); diff --git a/src/resources/extensions/sf/tests/enhanced-verification-integration.test.ts b/src/resources/extensions/sf/tests/enhanced-verification-integration.test.ts index b49743e57..500db750a 100644 --- a/src/resources/extensions/sf/tests/enhanced-verification-integration.test.ts +++ b/src/resources/extensions/sf/tests/enhanced-verification-integration.test.ts @@ -467,14 +467,14 @@ import { runPostExecutionChecks } from "./post-execution-checks.ts"; test("handles large number of files without timeout", () => { // Use all available SF source files to stress test - const allGsdFiles = REAL_SF_FILES.map((f) => join(SF_SRC_DIR, f)); + const allSfFiles = REAL_SF_FILES.map((f) => join(SF_SRC_DIR, f)); const task = createTask({ id: "T01", title: "Large refactor touching many files", status: "complete", - key_files: allGsdFiles, - files: allGsdFiles, + key_files: allSfFiles, + files: allSfFiles, }); const start = performance.now(); diff --git a/src/resources/extensions/sf/tests/file-lock.test.ts b/src/resources/extensions/sf/tests/file-lock.test.ts index 1ff768de8..096f0a4f6 100644 --- a/src/resources/extensions/sf/tests/file-lock.test.ts +++ b/src/resources/extensions/sf/tests/file-lock.test.ts @@ -95,7 +95,7 @@ test('withFileLockSync: onLocked="skip" runs callback unlocked on ELOCKED', () = } const lockfile = require("proper-lockfile"); - const dir = mkdtempSync(join(tmpdir(), "gsd-file-lock-test-")); + const dir = mkdtempSync(join(tmpdir(), "sf-file-lock-test-")); const filePath = join(dir, "locked.jsonl"); writeFileSync(filePath, "{}\n", "utf-8"); @@ -161,7 +161,7 @@ test('withFileLock: onLocked="skip" runs callback unlocked on ELOCKED', async () } const lockfile = require("proper-lockfile"); - const dir = mkdtempSync(join(tmpdir(), "gsd-file-lock-test-")); + const dir = mkdtempSync(join(tmpdir(), "sf-file-lock-test-")); const filePath = join(dir, "locked.jsonl"); writeFileSync(filePath, "{}\n", "utf-8"); diff --git a/src/resources/extensions/sf/tests/headless-project-repair.test.ts b/src/resources/extensions/sf/tests/headless-project-repair.test.ts index 065d7aa05..3bd046ff5 100644 --- a/src/resources/extensions/sf/tests/headless-project-repair.test.ts +++ b/src/resources/extensions/sf/tests/headless-project-repair.test.ts @@ -14,7 +14,7 @@ import { join } from "node:path"; import { afterEach, beforeEach, describe, test } from "node:test"; import { repairMissingSfSymlinkForHeadless } from "../../../../headless.ts"; -import { externalGsdRoot } from "../repo-identity.ts"; +import { externalSfRoot } from "../repo-identity.ts"; function run(command: string, cwd: string): string { return execSync(command, { @@ -52,7 +52,7 @@ describe("headless project repair", () => { }); test("re-links .sf when matching external project state already exists", () => { - const externalPath = externalGsdRoot(base); + const externalPath = externalSfRoot(base); mkdirSync(join(externalPath, "milestones"), { recursive: true }); const repairedPath = repairMissingSfSymlinkForHeadless(base); diff --git a/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts b/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts index 8d7a7008e..d5b8f3c99 100644 --- a/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/auto-worktree-milestone-merge.test.ts @@ -56,7 +56,7 @@ function createTempRepo(): string { return dir; } -function createTempRepoWithExternalGsd(): { +function createTempRepoWithExternalSf(): { repo: string; externalState: string; } { @@ -137,8 +137,8 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { return d; } - function freshRepoWithExternalGsd(): { repo: string; externalState: string } { - const { repo, externalState } = createTempRepoWithExternalGsd(); + function freshRepoWithExternalSf(): { repo: string; externalState: string } { + const { repo, externalState } = createTempRepoWithExternalSf(); tempDirs.push(repo, externalState); return { repo, externalState }; } @@ -969,7 +969,7 @@ describe("auto-worktree-milestone-merge", { timeout: 300_000 }, () => { }); test("#2156: mergeMilestoneToMain removes external-state worktrees using the milestone branch name", () => { - const { repo, externalState } = freshRepoWithExternalGsd(); + const { repo, externalState } = freshRepoWithExternalSf(); const wtPath = createAutoWorktree(repo, "M215"); addSliceToMilestone(repo, wtPath, "M215", "S01", "External cleanup", [ diff --git a/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts b/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts index b4f35dc5e..d8b50c11f 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-false-positives.test.ts @@ -55,10 +55,10 @@ describe("doctor false-positives (#3105)", async () => { // Create a worktree directory that only has .sf/doctor-history.jsonl const wtDir = join(sf, "worktrees", "M042"); - const wtGsdDir = join(wtDir, ".sf"); - mkdirSync(wtGsdDir, { recursive: true }); + const wtSfDir = join(wtDir, ".sf"); + mkdirSync(wtSfDir, { recursive: true }); writeFileSync( - join(wtGsdDir, "doctor-history.jsonl"), + join(wtSfDir, "doctor-history.jsonl"), '{"ts":"2026-01-01","ok":true}\n', ); diff --git a/src/resources/extensions/sf/tests/integration/doctor-git.test.ts b/src/resources/extensions/sf/tests/integration/doctor-git.test.ts index b45856be2..dcc6aa96a 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-git.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-git.test.ts @@ -760,13 +760,13 @@ describe("doctor-git", async () => { // Move .sf to an external location and replace with a symlink. // This simulates the ~/.sf/projects/ layout where .sf is a symlink. - const externalGsd = join( + const externalSf = join( realpathSync(mkdtempSync(join(tmpdir(), "doc-git-symlink-"))), "sf-data", ); - cleanups.push(externalGsd); - renameSync(join(dir, ".sf"), externalGsd); - symlinkSync(externalGsd, join(dir, ".sf")); + cleanups.push(externalSf); + renameSync(join(dir, ".sf"), externalSf); + symlinkSync(externalSf, join(dir, ".sf")); // Create a real registered worktree under the (now symlinked) .sf/worktrees/ mkdirSync(join(dir, ".sf", "worktrees"), { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts b/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts index b6645d610..c79da1fc4 100644 --- a/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor-runtime.test.ts @@ -343,9 +343,10 @@ None "fix adds patterns", ); - // Verify .sf entry was added (external state symlink) - const content = readFileSync(join(dir, ".gitignore"), "utf-8"); - assert.ok(content.includes(".sf"), "gitignore now has .sf entry"); + assert.doesNotThrow( + () => run("git check-ignore -q .sf", dir), + "git now ignores .sf after fix", + ); }); } else { } @@ -377,38 +378,42 @@ node_modules/ } else { } - // ─── Test 8b: Symlinked .gsd without .gitignore entry (#4423) ───── + // ─── Test 8b: Symlinked .sf without .gitignore entry (#4423) ───── if (process.platform !== "win32") { - test("symlinked_gsd_unignored", async () => { + test("symlinked_sf_unignored", async () => { const dir = createGitProject(); cleanups.push(dir); - // Create .gsd as a symlink to an external directory (standard external - // state layout), and write a .gitignore that does NOT list .gsd. - const externalGsd = mkdtempSync(join(tmpdir(), "gsd-external-doctor-")); - cleanups.push(externalGsd); - writeFileSync(join(externalGsd, "STATE.md"), "# State\n"); - symlinkSync(externalGsd, join(dir, ".gsd")); + // Create .sf as a symlink to an external directory (standard external + // state layout), and write a .gitignore that does NOT list .sf. + const externalSf = mkdtempSync(join(tmpdir(), "sf-external-doctor-")); + cleanups.push(externalSf); + writeFileSync(join(externalSf, "STATE.md"), "# State\n"); + symlinkSync(externalSf, join(dir, ".sf")); writeFileSync(join(dir, ".gitignore"), "node_modules/\n"); const detect = await runSFDoctor(dir); const symlinkIssues = detect.issues.filter( - (i: any) => i.code === "symlinked_gsd_unignored", + (i: any) => i.code === "symlinked_sf_unignored", ); assert.ok( symlinkIssues.length > 0, - "detects symlinked .gsd without gitignore entry", + "detects symlinked .sf without gitignore entry", ); const fixed = await runSFDoctor(dir, { fix: true }); assert.ok( - fixed.fixesApplied.some((f: any) => f.includes(".gitignore")), - "fix updates .gitignore", + fixed.fixesApplied.some((f: any) => + f.includes("added missing SF runtime patterns"), + ), + "fix adds SF runtime ignore patterns", ); - const content = readFileSync(join(dir, ".gitignore"), "utf-8"); - assert.ok(/^\.gsd\/?$/m.test(content), "gitignore now has .gsd entry"); + assert.doesNotThrow( + () => run("git check-ignore -q .sf", dir), + "git now ignores symlinked .sf after fix", + ); }); } else { } diff --git a/src/resources/extensions/sf/tests/integration/doctor.test.ts b/src/resources/extensions/sf/tests/integration/doctor.test.ts index f6d35ab55..52e4817fe 100644 --- a/src/resources/extensions/sf/tests/integration/doctor.test.ts +++ b/src/resources/extensions/sf/tests/integration/doctor.test.ts @@ -143,8 +143,8 @@ describe("doctor", async () => { // ─── Milestone summary detection: missing summary ────────────────────── test("doctor detects missing milestone summary", async () => { const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-test-")); - const msGsd = join(msBase, ".sf"); - const msMDir = join(msGsd, "milestones", "M001"); + const msSf = join(msBase, ".sf"); + const msMDir = join(msSf, "milestones", "M001"); const msSDir = join(msMDir, "slices", "S01"); const msTDir = join(msSDir, "tasks"); mkdirSync(msTDir, { recursive: true }); @@ -241,8 +241,8 @@ parent: M001 // ─── Milestone summary detection: summary present (no false positive) ── test("doctor does NOT flag milestone with summary", async () => { const msBase = mkdtempSync(join(tmpdir(), "sf-doctor-ms-ok-test-")); - const msGsd = join(msBase, ".sf"); - const msMDir = join(msGsd, "milestones", "M001"); + const msSf = join(msBase, ".sf"); + const msMDir = join(msSf, "milestones", "M001"); const msSDir = join(msMDir, "slices", "S01"); const msTDir = join(msSDir, "tasks"); mkdirSync(msTDir, { recursive: true }); @@ -317,8 +317,8 @@ parent: M001 // ─── blocker_discovered_no_replan detection ──────────────────────────── test("doctor detects blocker_discovered_no_replan", async () => { const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-test-")); - const bGsd = join(bBase, ".sf"); - const bMDir = join(bGsd, "milestones", "M001"); + const bSf = join(bBase, ".sf"); + const bMDir = join(bSf, "milestones", "M001"); const bSDir = join(bMDir, "slices", "S01"); const bTDir = join(bSDir, "tasks"); mkdirSync(bTDir, { recursive: true }); @@ -408,8 +408,8 @@ Discovered an issue. // ─── blocker_discovered with REPLAN.md (no false positive) ───────────── test("doctor does NOT flag blocker when REPLAN.md exists", async () => { const bBase = mkdtempSync(join(tmpdir(), "sf-doctor-blocker-ok-test-")); - const bGsd = join(bBase, ".sf"); - const bMDir = join(bGsd, "milestones", "M001"); + const bSf = join(bBase, ".sf"); + const bMDir = join(bSf, "milestones", "M001"); const bSDir = join(bMDir, "slices", "S01"); const bTDir = join(bSDir, "tasks"); mkdirSync(bTDir, { recursive: true }); @@ -482,8 +482,8 @@ Discovered an issue. // ─── Must-have verification: all addressed → no issue ───────────────── test("doctor: done task with must-haves all addressed → no issue", async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-ok-")); - const mhGsd = join(mhBase, ".sf"); - const mhMDir = join(mhGsd, "milestones", "M001"); + const mhSf = join(mhBase, ".sf"); + const mhMDir = join(mhSf, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); mkdirSync(mhTDir, { recursive: true }); @@ -523,8 +523,8 @@ Discovered an issue. // ─── Must-have verification: not addressed → warning fired ─────────── test("doctor: done task with must-haves NOT addressed → warning", async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-fail-")); - const mhGsd = join(mhBase, ".sf"); - const mhMDir = join(mhGsd, "milestones", "M001"); + const mhSf = join(mhBase, ".sf"); + const mhMDir = join(mhSf, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); mkdirSync(mhTDir, { recursive: true }); @@ -588,8 +588,8 @@ Discovered an issue. // ─── Must-have verification: no task plan → no issue ───────────────── test("doctor: done task with no task plan file → no issue", async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-noplan-")); - const mhGsd = join(mhBase, ".sf"); - const mhMDir = join(mhGsd, "milestones", "M001"); + const mhSf = join(mhBase, ".sf"); + const mhMDir = join(mhSf, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); mkdirSync(mhTDir, { recursive: true }); @@ -623,8 +623,8 @@ Discovered an issue. // ─── Must-have verification: plan exists but no Must-Haves section → no issue test("doctor: done task with plan but no Must-Haves section → no issue", async () => { const mhBase = mkdtempSync(join(tmpdir(), "sf-doctor-mh-nosect-")); - const mhGsd = join(mhBase, ".sf"); - const mhMDir = join(mhGsd, "milestones", "M001"); + const mhSf = join(mhBase, ".sf"); + const mhMDir = join(mhSf, "milestones", "M001"); const mhSDir = join(mhMDir, "slices", "S01"); const mhTDir = join(mhSDir, "tasks"); mkdirSync(mhTDir, { recursive: true }); @@ -717,8 +717,8 @@ Discovered an issue. // ─── doctor detects delimiter_in_title for milestone ─────────────────── test("doctor detects em dash in milestone title", async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-test-")); - const dtGsd = join(dtBase, ".sf"); - const dtMDir = join(dtGsd, "milestones", "M001"); + const dtSf = join(dtBase, ".sf"); + const dtMDir = join(dtSf, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); mkdirSync(dtTDir, { recursive: true }); @@ -776,8 +776,8 @@ Discovered an issue. // ─── doctor detects delimiter_in_title for slice ──────────────────────── test("doctor detects em dash in slice title", async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-slice-")); - const dtGsd = join(dtBase, ".sf"); - const dtMDir = join(dtGsd, "milestones", "M001"); + const dtSf = join(dtBase, ".sf"); + const dtMDir = join(dtSf, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); mkdirSync(dtTDir, { recursive: true }); @@ -823,8 +823,8 @@ Discovered an issue. // ─── doctor does NOT flag clean titles ────────────────────────────────── test("doctor does NOT flag milestone with clean title", async () => { const dtBase = mkdtempSync(join(tmpdir(), "sf-doctor-dt-clean-")); - const dtGsd = join(dtBase, ".sf"); - const dtMDir = join(dtGsd, "milestones", "M001"); + const dtSf = join(dtBase, ".sf"); + const dtMDir = join(dtSf, "milestones", "M001"); const dtSDir = join(dtMDir, "slices", "S01"); const dtTDir = join(dtSDir, "tasks"); mkdirSync(dtTDir, { recursive: true }); diff --git a/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts b/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts index f440ec867..f41e0f521 100644 --- a/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts +++ b/src/resources/extensions/sf/tests/integration/feature-branch-lifecycle-integration.test.ts @@ -457,8 +457,8 @@ describe("feature-branch-lifecycle-integration", async () => { // With external state, worktree .sf is a symlink to shared state. // Verify symlink was created (planning files are shared, not copied). - const wtGsd = join(wtPath, ".sf"); - assert.ok(existsSync(wtGsd), "worktree .sf exists (symlink or dir)"); + const wtSf = join(wtPath, ".sf"); + assert.ok(existsSync(wtSf), "worktree .sf exists (symlink or dir)"); // Clean up: chdir back before teardown process.chdir(savedCwd); diff --git a/src/resources/extensions/sf/tests/integration/git-service.test.ts b/src/resources/extensions/sf/tests/integration/git-service.test.ts index 70e730efb..8fa45fcb9 100644 --- a/src/resources/extensions/sf/tests/integration/git-service.test.ts +++ b/src/resources/extensions/sf/tests/integration/git-service.test.ts @@ -1686,13 +1686,13 @@ describe("git-service", async () => { const repo = initTempRepo(); // Create the real .sf directory outside the repo, then symlink it - const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-")); - mkdirSync(join(externalGsd, "activity"), { recursive: true }); - writeFileSync(join(externalGsd, "activity", "log.jsonl"), "log data"); - writeFileSync(join(externalGsd, "STATE.md"), "# State"); + const externalSf = mkdtempSync(join(tmpdir(), "sf-external-")); + mkdirSync(join(externalSf, "activity"), { recursive: true }); + writeFileSync(join(externalSf, "activity", "log.jsonl"), "log data"); + writeFileSync(join(externalSf, "STATE.md"), "# State"); // Symlink .sf -> external directory - symlinkSync(externalGsd, join(repo, ".sf")); + symlinkSync(externalSf, join(repo, ".sf")); // Add .gitignore so .sf/ is ignored writeFileSync(join(repo, ".gitignore"), ".sf\n"); @@ -1737,14 +1737,14 @@ describe("git-service", async () => { assert.ok(!staged.includes(".sf"), ".sf content not staged"); rmSync(repo, { recursive: true, force: true }); - rmSync(externalGsd, { recursive: true, force: true }); + rmSync(externalSf, { recursive: true, force: true }); }); test("GitServiceImpl: symlinked .sf stages explicit untracked task files", () => { const repo = initTempRepo(); - const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-")); - mkdirSync(join(externalGsd, "activity"), { recursive: true }); - symlinkSync(externalGsd, join(repo, ".sf")); + const externalSf = mkdtempSync(join(tmpdir(), "sf-external-")); + mkdirSync(join(externalSf, "activity"), { recursive: true }); + symlinkSync(externalSf, join(repo, ".sf")); writeFileSync(join(repo, ".gitignore"), ".sf\n"); createFile(repo, "cmd/installer/main.go", "package main\n"); run("git add -A", repo); @@ -1776,7 +1776,7 @@ describe("git-service", async () => { ); rmSync(repo, { recursive: true, force: true }); - rmSync(externalGsd, { recursive: true, force: true }); + rmSync(externalSf, { recursive: true, force: true }); }); test("GitServiceImpl: stageOnly ignores summary none placeholders", () => { @@ -2029,12 +2029,12 @@ describe("git-service", async () => { const repo = initTempRepo(); // Create an external .sf directory and symlink it into the repo - const externalGsd = mkdtempSync(join(tmpdir(), "sf-external-symlink-")); - mkdirSync(join(externalGsd, "milestones", "M009"), { recursive: true }); - mkdirSync(join(externalGsd, "activity"), { recursive: true }); - mkdirSync(join(externalGsd, "runtime"), { recursive: true }); + const externalSf = mkdtempSync(join(tmpdir(), "sf-external-symlink-")); + mkdirSync(join(externalSf, "milestones", "M009"), { recursive: true }); + mkdirSync(join(externalSf, "activity"), { recursive: true }); + mkdirSync(join(externalSf, "runtime"), { recursive: true }); - symlinkSync(externalGsd, join(repo, ".sf")); + symlinkSync(externalSf, join(repo, ".sf")); // .gitignore blocks .sf (as ensureGitignore would do for symlink projects) writeFileSync(join(repo, ".gitignore"), ".sf\n"); @@ -2050,15 +2050,15 @@ describe("git-service", async () => { // Simulate new milestone artifacts created during execution writeFileSync( - join(externalGsd, "milestones", "M009", "M009-SUMMARY.md"), + join(externalSf, "milestones", "M009", "M009-SUMMARY.md"), "# M009 Summary", ); writeFileSync( - join(externalGsd, "milestones", "M009", "S01-SUMMARY.md"), + join(externalSf, "milestones", "M009", "S01-SUMMARY.md"), "# S01 Summary", ); writeFileSync( - join(externalGsd, "milestones", "M009", "T01-VERIFY.json"), + join(externalSf, "milestones", "M009", "T01-VERIFY.json"), '{"passed":true}', ); @@ -2086,7 +2086,7 @@ describe("git-service", async () => { rmSync(repo, { recursive: true, force: true }); } catch {} try { - rmSync(externalGsd, { recursive: true, force: true }); + rmSync(externalSf, { recursive: true, force: true }); } catch {} }); diff --git a/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts b/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts index f1acaed47..966555e84 100644 --- a/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts +++ b/src/resources/extensions/sf/tests/integration/gitignore-staging-2570.test.ts @@ -2,7 +2,7 @@ * gitignore-staging-2570.test.ts — Regression tests for #2570. * * Verifies that: - * 1. isGsdGitignored() detects when .sf is covered by .gitignore + * 1. isSfGitignored() detects when .sf is covered by .gitignore * 2. The rethink prompt uses {{commitInstruction}} instead of hardcoded git add .sf/ * 3. rethink.ts passes the correct commitInstruction based on gitignore state * @@ -22,8 +22,8 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import test from "node:test"; -// Dynamic import — isGsdGitignored is the function under test (may not exist yet during TDD red phase) -const { isGsdGitignored } = await import("../../gitignore.ts"); +// Dynamic import — isSfGitignored is the function under test (may not exist yet during TDD red phase) +const { isSfGitignored } = await import("../../gitignore.ts"); // ─── Helpers ───────────────────────────────────────────────────────── @@ -55,19 +55,19 @@ function cleanup(dir: string): void { } } -// ─── isGsdGitignored ───────────────────────────────────────────────── +// ─── isSfGitignored ───────────────────────────────────────────────── -test("isGsdGitignored returns true when .sf is in .gitignore (#2570)", (t) => { +test("isSfGitignored returns true when .sf is in .gitignore (#2570)", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); writeFileSync(join(dir, ".gitignore"), ".sf\n"); - assert.equal(isGsdGitignored(dir), true); + assert.equal(isSfGitignored(dir), true); }); -test("isGsdGitignored returns true when .sf/ (with slash) is in .gitignore", (t) => { +test("isSfGitignored returns true when .sf/ (with slash) is in .gitignore", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); @@ -76,27 +76,27 @@ test("isGsdGitignored returns true when .sf/ (with slash) is in .gitignore", (t) writeFileSync(join(dir, ".gitignore"), ".sf/\n"); // Create .sf directory so git check-ignore can match the directory-only pattern mkdirSync(join(dir, ".sf"), { recursive: true }); - assert.equal(isGsdGitignored(dir), true); + assert.equal(isSfGitignored(dir), true); }); -test("isGsdGitignored returns false when .sf is NOT in .gitignore", (t) => { +test("isSfGitignored returns false when .sf is NOT in .gitignore", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); writeFileSync(join(dir, ".gitignore"), "node_modules/\n"); - assert.equal(isGsdGitignored(dir), false); + assert.equal(isSfGitignored(dir), false); }); -test("isGsdGitignored returns false when no .gitignore exists", (t) => { +test("isSfGitignored returns false when no .gitignore exists", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); // No .gitignore — default - assert.equal(isGsdGitignored(dir), false); + assert.equal(isSfGitignored(dir), false); }); // ─── rethink.md prompt template ───────────────────────────────────── diff --git a/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts b/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts index abe455e2b..14b8458bf 100644 --- a/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts +++ b/src/resources/extensions/sf/tests/integration/gitignore-tracked-sf.test.ts @@ -22,7 +22,7 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import test from "node:test"; -import { ensureGitignore, hasGitTrackedGsdFiles } from "../../gitignore.ts"; +import { ensureGitignore, hasGitTrackedSfFiles } from "../../gitignore.ts"; import { migrateToExternalState } from "../../migrate-external.ts"; // ─── Helpers ───────────────────────────────────────────────────────── @@ -55,18 +55,18 @@ function cleanup(dir: string): void { } } -// ─── hasGitTrackedGsdFiles ─────────────────────────────────────────── +// ─── hasGitTrackedSfFiles ─────────────────────────────────────────── -test("hasGitTrackedGsdFiles returns false when .sf/ does not exist", (t) => { +test("hasGitTrackedSfFiles returns false when .sf/ does not exist", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); }); - assert.equal(hasGitTrackedGsdFiles(dir), false); + assert.equal(hasGitTrackedSfFiles(dir), false); }); -test("hasGitTrackedGsdFiles returns true when .sf/ has tracked files", (t) => { +test("hasGitTrackedSfFiles returns true when .sf/ has tracked files", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); @@ -76,10 +76,10 @@ test("hasGitTrackedGsdFiles returns true when .sf/ has tracked files", (t) => { writeFileSync(join(dir, ".sf", "PROJECT.md"), "# Test Project\n"); git(dir, "add", ".sf/PROJECT.md"); git(dir, "commit", "-m", "add sf"); - assert.equal(hasGitTrackedGsdFiles(dir), true); + assert.equal(hasGitTrackedSfFiles(dir), true); }); -test("hasGitTrackedGsdFiles returns false when .sf/ exists but is untracked", (t) => { +test("hasGitTrackedSfFiles returns false when .sf/ exists but is untracked", (t) => { const dir = makeTempRepo(); t.after(() => { cleanup(dir); @@ -88,7 +88,7 @@ test("hasGitTrackedGsdFiles returns false when .sf/ exists but is untracked", (t mkdirSync(join(dir, ".sf"), { recursive: true }); writeFileSync(join(dir, ".sf", "STATE.md"), "state\n"); // Not git-added — should return false - assert.equal(hasGitTrackedGsdFiles(dir), false); + assert.equal(hasGitTrackedSfFiles(dir), false); }); // ─── ensureGitignore — tracked .sf/ protection ───────────────────── @@ -125,19 +125,19 @@ test("ensureGitignore does NOT add .sf when .sf/ has tracked files (#1364)", (_t } }); -test("ensureGitignore adds .sf when .sf/ has NO tracked files", (_t) => { +test("ensureGitignore excludes .sf when .sf/ has NO tracked files", (_t) => { const dir = makeTempRepo(); try { // Run ensureGitignore (no .sf/ at all) ensureGitignore(dir); - // Verify .sf IS in .gitignore - const gitignore = readFileSync(join(dir, ".gitignore"), "utf-8"); - const lines = gitignore.split("\n").map((l) => l.trim()); + const exclude = readFileSync(join(dir, ".git", "info", "exclude"), "utf-8"); + const lines = exclude.split("\n").map((l) => l.trim()); assert.ok( lines.includes(".sf"), - `Expected .sf in .gitignore, but it's missing:\n${gitignore}`, + `Expected .sf in .git/info/exclude, but it's missing:\n${exclude}`, ); + assert.doesNotThrow(() => git(dir, "check-ignore", "-q", ".sf")); } finally { cleanup(dir); } @@ -193,7 +193,7 @@ test("ensureGitignore with tracked .sf/ does not cause git to see files as delet } }); -test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available", (_t) => { +test("hasGitTrackedSfFiles returns true (fail-safe) when git is not available", (_t) => { const dir = makeTempRepo(); try { // Create and track .sf/ files @@ -208,7 +208,7 @@ test("hasGitTrackedGsdFiles returns true (fail-safe) when git is not available", // Should fail safe — assume tracked rather than silently returning false // (The index lock causes git ls-files to fail; rev-parse also fails → true) - const result = hasGitTrackedGsdFiles(dir); + const result = hasGitTrackedSfFiles(dir); assert.equal( result, true, diff --git a/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts b/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts index 1d509e774..fa2b0c5a3 100644 --- a/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts +++ b/src/resources/extensions/sf/tests/integration/inherited-repo-home-dir.test.ts @@ -37,8 +37,8 @@ function run(cmd: string, args: string[], cwd: string): string { describe("isInheritedRepo when git root is HOME (#2393)", () => { let fakeHome: string; let stateDir: string; - let origGsdHome: string | undefined; - let origGsdStateDir: string | undefined; + let origSfHome: string | undefined; + let origSfStateDir: string | undefined; beforeEach(() => { // Create a fake HOME that is itself a git repo (dotfile manager scenario). @@ -56,18 +56,18 @@ describe("isInheritedRepo when git root is HOME (#2393)", () => { // Save and override env. Point SF_HOME at fakeHome/.sf so the // function recognizes it as the global state directory. - origGsdHome = process.env.SF_HOME; - origGsdStateDir = process.env.SF_STATE_DIR; + origSfHome = process.env.SF_HOME; + origSfStateDir = process.env.SF_STATE_DIR; process.env.SF_HOME = join(fakeHome, ".sf"); stateDir = mkdtempSync(join(tmpdir(), "sf-state-")); process.env.SF_STATE_DIR = stateDir; }); afterEach(() => { - if (origGsdHome !== undefined) process.env.SF_HOME = origGsdHome; + if (origSfHome !== undefined) process.env.SF_HOME = origSfHome; else delete process.env.SF_HOME; - if (origGsdStateDir !== undefined) - process.env.SF_STATE_DIR = origGsdStateDir; + if (origSfStateDir !== undefined) + process.env.SF_STATE_DIR = origSfStateDir; else delete process.env.SF_STATE_DIR; rmSync(fakeHome, { recursive: true, force: true }); @@ -147,12 +147,12 @@ describe("isInheritedRepo with stale .sf at parent git root", () => { const projectDir = join(parentRepo, "my-project"); mkdirSync(projectDir, { recursive: true }); - // Without fix: isProjectGsd(join(root, ".sf")) returns true because + // Without fix: isProjectSf(join(root, ".sf")) returns true because // the stale .sf is a real directory that isn't the global SF home, // causing isInheritedRepo to return false (false negative). // // The stale .sf at parent is still treated as a "project .sf" by - // isProjectGsd(), so the git root check at line 128 returns false. + // isProjectSf(), so the git root check at line 128 returns false. // This is the expected behavior for that check — the defense-in-depth // fix in auto-start.ts handles this case by checking for local .git. // diff --git a/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts b/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts index c6c355602..dfe682602 100644 --- a/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts +++ b/src/resources/extensions/sf/tests/integration/parallel-merge.test.ts @@ -562,9 +562,9 @@ test("mergeAllCompleted — by-completion order respects startedAt", async () => /** Set up a worktree DB with a milestone marked complete */ function setupWorktreeDb(basePath: string, mid: string): void { - const wtGsdDir = join(basePath, ".sf", "worktrees", mid, ".sf"); - mkdirSync(wtGsdDir, { recursive: true }); - const dbPath = join(wtGsdDir, "sf.db"); + const wtSfDir = join(basePath, ".sf", "worktrees", mid, ".sf"); + mkdirSync(wtSfDir, { recursive: true }); + const dbPath = join(wtSfDir, "sf.db"); openDatabase(dbPath); insertMilestone({ id: mid, title: `Milestone ${mid}`, status: "complete" }); updateMilestoneStatus(mid, "complete", new Date().toISOString()); diff --git a/src/resources/extensions/sf/tests/integration/paths.test.ts b/src/resources/extensions/sf/tests/integration/paths.test.ts index c8153bcd1..701f12927 100644 --- a/src/resources/extensions/sf/tests/integration/paths.test.ts +++ b/src/resources/extensions/sf/tests/integration/paths.test.ts @@ -5,7 +5,7 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import { describe, test } from "node:test"; -import { _clearGsdRootCache, sfRoot } from "../../paths.ts"; +import { _clearSfRootCache, sfRoot } from "../../paths.ts"; /** Create a tmp dir and resolve symlinks + 8.3 short names (macOS /var→/private/var, Windows RUNNER~1→runneradmin). */ function tmp(): string { @@ -35,7 +35,7 @@ describe("paths", () => { const root = tmp(); try { mkdirSync(join(root, ".sf")); - _clearGsdRootCache(); + _clearSfRootCache(); const result = sfRoot(root); assert.deepStrictEqual( result, @@ -54,7 +54,7 @@ describe("paths", () => { mkdirSync(join(root, ".sf")); const sub = join(root, "src", "deep"); mkdirSync(sub, { recursive: true }); - _clearGsdRootCache(); + _clearSfRootCache(); const result = sfRoot(sub); assert.deepStrictEqual( result, @@ -74,7 +74,7 @@ describe("paths", () => { mkdirSync(join(project, ".sf"), { recursive: true }); const deep = join(project, "src", "deep"); mkdirSync(deep, { recursive: true }); - _clearGsdRootCache(); + _clearSfRootCache(); const result = sfRoot(deep); assert.deepStrictEqual( result, @@ -92,7 +92,7 @@ describe("paths", () => { initGit(root); const sub = join(root, "src"); mkdirSync(sub, { recursive: true }); - _clearGsdRootCache(); + _clearSfRootCache(); const result = sfRoot(sub); assert.deepStrictEqual( result, @@ -108,7 +108,7 @@ describe("paths", () => { const root = tmp(); try { mkdirSync(join(root, ".sf")); - _clearGsdRootCache(); + _clearSfRootCache(); const first = sfRoot(root); const second = sfRoot(root); assert.deepStrictEqual( @@ -129,7 +129,7 @@ describe("paths", () => { mkdirSync(join(outer, ".sf")); const inner = join(outer, "nested"); mkdirSync(join(inner, ".sf"), { recursive: true }); - _clearGsdRootCache(); + _clearSfRootCache(); const result = sfRoot(inner); assert.deepStrictEqual( result, diff --git a/src/resources/extensions/sf/tests/integration/token-savings.test.ts b/src/resources/extensions/sf/tests/integration/token-savings.test.ts index 8adf0f23b..ff9b2cc4c 100644 --- a/src/resources/extensions/sf/tests/integration/token-savings.test.ts +++ b/src/resources/extensions/sf/tests/integration/token-savings.test.ts @@ -198,7 +198,7 @@ console.log( const dbDecisionsContent = formatDecisionsForPrompt(scopedDecisions); const dbRequirementsContent = formatRequirementsForPrompt(scopedRequirements); - // ── Full-markdown equivalents (what inlineGsdRootFile would return) ── + // ── Full-markdown equivalents (what inlineSfRootFile would return) ── const fullDecisionsContent = readFileSync( join(base, ".sf", "DECISIONS.md"), "utf-8", diff --git a/src/resources/extensions/sf/tests/knowledge.test.ts b/src/resources/extensions/sf/tests/knowledge.test.ts index ba70d279f..b10007d5e 100644 --- a/src/resources/extensions/sf/tests/knowledge.test.ts +++ b/src/resources/extensions/sf/tests/knowledge.test.ts @@ -4,7 +4,7 @@ * Tests: * - KNOWLEDGE is registered in SF_ROOT_FILES * - resolveSfRootFile resolves KNOWLEDGE paths correctly - * - inlineGsdRootFile works with the KNOWLEDGE key + * - inlineSfRootFile works with the KNOWLEDGE key * - before_agent_start hook includes/omits knowledge block appropriately * - loadKnowledgeBlock merges global and project knowledge correctly */ @@ -21,7 +21,7 @@ import { import { tmpdir } from "node:os"; import { join } from "node:path"; import test from "node:test"; -import { inlineGsdRootFile, inlineKnowledgeBudgeted } from "../auto-prompts.ts"; +import { inlineSfRootFile, inlineKnowledgeBudgeted } from "../auto-prompts.ts"; import { loadKnowledgeBlock } from "../bootstrap/system-context.ts"; import { appendKnowledge } from "../files.ts"; import { resolveSfRootFile, SF_ROOT_FILES } from "../paths.ts"; @@ -80,9 +80,9 @@ test("knowledge: resolveSfRootFile returns canonical path when file does not exi rmSync(tmp, { recursive: true, force: true }); }); -// ─── inlineGsdRootFile works with knowledge.md ───────────────────────────── +// ─── inlineSfRootFile works with knowledge.md ───────────────────────────── -test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", async () => { +test("knowledge: inlineSfRootFile returns content when KNOWLEDGE.md exists", async () => { const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-")); const sfDir = join(tmp, ".sf"); mkdirSync(sfDir, { recursive: true }); @@ -91,7 +91,7 @@ test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", as "# Project Knowledge\n\n## Rules\n\nK001: Use real DB", ); - const result = await inlineGsdRootFile( + const result = await inlineSfRootFile( tmp, "knowledge.md", "Project Knowledge", @@ -103,12 +103,12 @@ test("knowledge: inlineGsdRootFile returns content when KNOWLEDGE.md exists", as rmSync(tmp, { recursive: true, force: true }); }); -test("knowledge: inlineGsdRootFile returns null when KNOWLEDGE.md does not exist", async () => { +test("knowledge: inlineSfRootFile returns null when KNOWLEDGE.md does not exist", async () => { const tmp = mkdtempSync(join(tmpdir(), "sf-knowledge-")); const sfDir = join(tmp, ".sf"); mkdirSync(sfDir, { recursive: true }); - const result = await inlineGsdRootFile( + const result = await inlineSfRootFile( tmp, "knowledge.md", "Project Knowledge", @@ -306,9 +306,9 @@ test("loadKnowledgeBlock: reports globalSizeKb above 4KB threshold", () => { // helper scopes by milestone-level keywords and caps the injected size. test("inlineKnowledgeBudgeted: returns scoped H3 entries for single-H2 file", async () => { - const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); - const gsdDir = join(tmp, ".gsd"); - mkdirSync(gsdDir, { recursive: true }); + const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-"))); + const sfDir = join(tmp, ".sf"); + mkdirSync(sfDir, { recursive: true }); const content = `# Project Knowledge @@ -323,7 +323,7 @@ Use /v1/resource style versioning. ### Testing: node:test Prefer node:test over external frameworks. `; - writeFileSync(join(gsdDir, "KNOWLEDGE.md"), content); + writeFileSync(join(sfDir, "KNOWLEDGE.md"), content); const result = await inlineKnowledgeBudgeted(tmp, ["database"]); assert.ok(result !== null, "should return content"); @@ -340,9 +340,9 @@ Prefer node:test over external frameworks. }); test("inlineKnowledgeBudgeted: caps payload below budget for large files", async () => { - const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); - const gsdDir = join(tmp, ".gsd"); - mkdirSync(gsdDir, { recursive: true }); + const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-"))); + const sfDir = join(tmp, ".sf"); + mkdirSync(sfDir, { recursive: true }); // Build a 200KB KNOWLEDGE with 500 H3 entries all matching 'shared' const entries = Array.from( @@ -350,7 +350,7 @@ test("inlineKnowledgeBudgeted: caps payload below budget for large files", async (_, i) => `### Entry ${i}: shared topic\n${"filler text ".repeat(30)}\n`, ).join("\n"); const content = `# Project Knowledge\n\n## Patterns\n\n${entries}`; - writeFileSync(join(gsdDir, "KNOWLEDGE.md"), content); + writeFileSync(join(sfDir, "KNOWLEDGE.md"), content); const BUDGET_CHARS = 30_000; const result = await inlineKnowledgeBudgeted(tmp, ["shared"], { @@ -377,9 +377,9 @@ test("inlineKnowledgeBudgeted: caps payload below budget for large files", async }); test("inlineKnowledgeBudgeted: returns null when no KNOWLEDGE.md exists", async () => { - const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); - const gsdDir = join(tmp, ".gsd"); - mkdirSync(gsdDir, { recursive: true }); + const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-"))); + const sfDir = join(tmp, ".sf"); + mkdirSync(sfDir, { recursive: true }); const result = await inlineKnowledgeBudgeted(tmp, ["database"]); assert.strictEqual(result, null); @@ -388,11 +388,11 @@ test("inlineKnowledgeBudgeted: returns null when no KNOWLEDGE.md exists", async }); test("inlineKnowledgeBudgeted: returns null when no entries match", async () => { - const tmp = realpathSync(mkdtempSync(join(tmpdir(), "gsd-knowledge-"))); - const gsdDir = join(tmp, ".gsd"); - mkdirSync(gsdDir, { recursive: true }); + const tmp = realpathSync(mkdtempSync(join(tmpdir(), "sf-knowledge-"))); + const sfDir = join(tmp, ".sf"); + mkdirSync(sfDir, { recursive: true }); writeFileSync( - join(gsdDir, "KNOWLEDGE.md"), + join(sfDir, "KNOWLEDGE.md"), "# Project Knowledge\n\n## Patterns\n\n### Database\nuse it\n", ); diff --git a/src/resources/extensions/sf/tests/markdown-renderer.test.ts b/src/resources/extensions/sf/tests/markdown-renderer.test.ts index e6536e04e..ae620eadd 100644 --- a/src/resources/extensions/sf/tests/markdown-renderer.test.ts +++ b/src/resources/extensions/sf/tests/markdown-renderer.test.ts @@ -16,7 +16,7 @@ import { repairStaleRenders, } from "../markdown-renderer.ts"; import { parsePlan, parseRoadmap } from "../parsers-legacy.ts"; -import { _clearGsdRootCache, clearPathCache } from "../paths.ts"; +import { _clearSfRootCache, clearPathCache } from "../paths.ts"; import { _getAdapter, closeDatabase, @@ -52,7 +52,7 @@ function cleanupDir(dir: string): void { function clearAllCaches(): void { clearParseCache(); clearPathCache(); - _clearGsdRootCache(); + _clearSfRootCache(); invalidateStateCache(); } diff --git a/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts b/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts index b1c3022ee..a735ee458 100644 --- a/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts +++ b/src/resources/extensions/sf/tests/migrate-external-worktree.test.ts @@ -46,9 +46,9 @@ describe("migrate-external worktree guard (#2970)", () => { run(`git worktree add -b milestone/M001 ${worktreePath}`, base); // Populate worktree with a .sf directory (simulating syncSfStateToWorktree) - const worktreeGsd = join(worktreePath, ".sf"); - mkdirSync(worktreeGsd, { recursive: true }); - writeFileSync(join(worktreeGsd, "PREFERENCES.md"), "# prefs\n", "utf-8"); + const worktreeSf = join(worktreePath, ".sf"); + mkdirSync(worktreeSf, { recursive: true }); + writeFileSync(join(worktreeSf, "PREFERENCES.md"), "# prefs\n", "utf-8"); }); after(() => { diff --git a/src/resources/extensions/sf/tests/milestone-scope-classifier.test.ts b/src/resources/extensions/sf/tests/milestone-scope-classifier.test.ts index 816ad6fb2..de65ef90a 100644 --- a/src/resources/extensions/sf/tests/milestone-scope-classifier.test.ts +++ b/src/resources/extensions/sf/tests/milestone-scope-classifier.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4781: classifier behavior matrix. Pure-function tests, no I/O. +// SF — #4781: classifier behavior matrix. Pure-function tests, no I/O. import assert from "node:assert/strict"; import test from "node:test"; diff --git a/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts b/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts index 4108c1804..6f601ffab 100644 --- a/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts +++ b/src/resources/extensions/sf/tests/parallel-research-dispatch.test.ts @@ -18,6 +18,7 @@ import test, { afterEach } from "node:test"; import { fileURLToPath } from "node:url"; import { resolveDispatch } from "../auto-dispatch.ts"; +import { buildParallelResearchSlicesPrompt } from "../auto-prompts.ts"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -94,6 +95,17 @@ test("dispatch: parallel-research-slices requires 2+ slices", () => { ); }); +test("dispatch: parallel-research-slices respects subagent batch limit", () => { + assert.ok( + dispatchSrc.includes("MAX_PARALLEL_RESEARCH_SLICES"), + "rule should guard the subagent max parallel task count", + ); + assert.ok( + dispatchSrc.includes("researchReadySlices.length > MAX_PARALLEL_RESEARCH_SLICES"), + "rule should fall back when too many slices are ready for one subagent call", + ); +}); + test("dispatch: parallel-research-slices respects skip_research", () => { const ruleIdx = dispatchSrc.indexOf("parallel-research-slices"); const ruleBlock = dispatchSrc.slice(ruleIdx, ruleIdx + 500); @@ -122,6 +134,27 @@ test("prompt: builds per-slice subagent prompts", () => { ); }); +test("prompt: emits deterministic worker subagent payload", async () => { + const base = makeTmpProject(); + const prompt = await buildParallelResearchSlicesPrompt( + "M001", + "Parallel Research Milestone", + [ + { id: "S01", title: "Alpha" }, + { id: "S02", title: "Beta" }, + ], + base, + "test-subagent-model", + ); + + assert.match(prompt, /Required `subagent` Call Payload/); + assert.match(prompt, /"agent": "worker"/); + assert.match(prompt, /"cwd":/); + assert.match(prompt, /"model": "test-subagent-model"/); + assert.match(prompt, /IMPORTANT CHILD-AGENT OVERRIDE/); + assert.doesNotMatch(prompt, /"agent": "g(?:sd)-executor"/); +}); + // ─── Template ───────────────────────────────────────────────────────────── test("template: parallel-research-slices.md has required variables", () => { @@ -130,6 +163,10 @@ test("template: parallel-research-slices.md has required variables", () => { "template should use sliceCount", ); assert.ok(templateSrc.includes("{{mid}}"), "template should use mid"); + assert.ok( + templateSrc.includes("{{subagentCall}}"), + "template should use subagentCall", + ); assert.ok( templateSrc.includes("{{subagentPrompts}}"), "template should use subagentPrompts", diff --git a/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts b/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts index 7a8bf8c45..12de7fba3 100644 --- a/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts +++ b/src/resources/extensions/sf/tests/parallel-worker-lock-contention.test.ts @@ -158,24 +158,24 @@ describe("parallel-worker-lock-contention (#2184)", () => { // ─── Bug 3: syncProjectRootToWorktree skips same-path symlinks ─────────── test("Bug 3: syncProjectRootToWorktree skips when .sf resolves to same path (symlink)", () => { const base = mkdtempSync(join(tmpdir(), "sf-symlink-sync-")); - const externalGsd = join(base, "external-sf"); + const externalSf = join(base, "external-sf"); const projectRoot = join(base, "project"); const worktreePath = join(base, "worktree"); - mkdirSync(externalGsd, { recursive: true }); + mkdirSync(externalSf, { recursive: true }); mkdirSync(projectRoot, { recursive: true }); mkdirSync(worktreePath, { recursive: true }); // Create the external state directory with a milestone - mkdirSync(join(externalGsd, "milestones", "M001"), { recursive: true }); + mkdirSync(join(externalSf, "milestones", "M001"), { recursive: true }); writeFileSync( - join(externalGsd, "milestones", "M001", "M001-ROADMAP.md"), + join(externalSf, "milestones", "M001", "M001-ROADMAP.md"), "# Roadmap", ); // Symlink both project and worktree .sf to the same external directory - symlinkSync(externalGsd, join(projectRoot, ".sf")); - symlinkSync(externalGsd, join(worktreePath, ".sf")); + symlinkSync(externalSf, join(projectRoot, ".sf")); + symlinkSync(externalSf, join(worktreePath, ".sf")); try { // This should NOT throw ERR_FS_CP_EINVAL — it should skip silently diff --git a/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts b/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts index 916004f33..83b6cb2e5 100644 --- a/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts +++ b/src/resources/extensions/sf/tests/post-exec-retry-bypass.test.ts @@ -17,7 +17,7 @@ import { type VerificationContext, } from "../auto-verification.ts"; import { invalidateAllCaches } from "../cache.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _getAdapter, closeDatabase, @@ -93,7 +93,7 @@ function setupTestEnvironment(): void { mkdirSync(milestonesDir, { recursive: true }); process.chdir(tempDir); - _clearGsdRootCache(); + _clearSfRootCache(); dbPath = join(sfDir, "sf.db"); openDatabase(dbPath); @@ -129,7 +129,7 @@ ${yamlLines.join("\n")} `; writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); invalidateAllCaches(); - _clearGsdRootCache(); + _clearSfRootCache(); } /** diff --git a/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts b/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts index e2c23368b..3b8b8717c 100644 --- a/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts +++ b/src/resources/extensions/sf/tests/pre-execution-fail-closed.test.ts @@ -16,7 +16,7 @@ import { postUnitPostVerification, } from "../auto-post-unit.ts"; import { invalidateAllCaches } from "../cache.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { closeDatabase, insertMilestone, @@ -107,7 +107,7 @@ function setupTestEnvironment(): void { mkdirSync(milestonesDir, { recursive: true }); process.chdir(tempDir); - _clearGsdRootCache(); + _clearSfRootCache(); dbPath = join(sfDir, "sf.db"); openDatabase(dbPath); @@ -143,7 +143,7 @@ ${yamlLines.join("\n")} `; writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); invalidateAllCaches(); - _clearGsdRootCache(); + _clearSfRootCache(); } /** diff --git a/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts b/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts index 370df2c30..57761c2f1 100644 --- a/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts +++ b/src/resources/extensions/sf/tests/pre-execution-pause-wiring.test.ts @@ -20,7 +20,7 @@ import { postUnitPostVerification, } from "../auto-post-unit.ts"; import { invalidateAllCaches } from "../cache.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _getAdapter, closeDatabase, @@ -135,7 +135,7 @@ function setupTestEnvironment(): void { process.chdir(tempDir); // Clear sfRoot cache so it finds the new .sf directory - _clearGsdRootCache(); + _clearSfRootCache(); // Initialize DB dbPath = join(sfDir, "sf.db"); @@ -183,7 +183,7 @@ ${yamlLines.join("\n")} writeFileSync(join(tempDir, ".sf", "PREFERENCES.md"), prefsContent); // Invalidate caches so the new preferences file is found invalidateAllCaches(); - _clearGsdRootCache(); + _clearSfRootCache(); } /** diff --git a/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts b/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts index 3a0746864..89831e5e4 100644 --- a/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts +++ b/src/resources/extensions/sf/tests/preferences-worktree-sync.test.ts @@ -72,15 +72,15 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => { // Functional test: create a mock source and destination, call the sync const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-src-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-dst-")); - const srcGsd = join(srcBase, ".sf"); - const dstGsd = join(dstBase, ".sf"); - mkdirSync(srcGsd, { recursive: true }); - mkdirSync(dstGsd, { recursive: true }); + const srcSf = join(srcBase, ".sf"); + const dstSf = join(dstBase, ".sf"); + mkdirSync(srcSf, { recursive: true }); + mkdirSync(dstSf, { recursive: true }); try { // Write a canonical PREFERENCES.md in source writeFileSync( - join(srcGsd, "PREFERENCES.md"), + join(srcSf, "PREFERENCES.md"), "---\nversion: 1\n---\n\npost_unit_hooks:\n - name: notify\n command: echo done\n", ); @@ -90,11 +90,11 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => { // Verify PREFERENCES.md was copied assert.ok( - existsSync(join(dstGsd, "PREFERENCES.md")), + existsSync(join(dstSf, "PREFERENCES.md")), "PREFERENCES.md should be copied to worktree", ); - const content = readFileSync(join(dstGsd, "PREFERENCES.md"), "utf-8"); + const content = readFileSync(join(dstSf, "PREFERENCES.md"), "utf-8"); assert.ok( content.includes("post_unit_hooks"), "copied PREFERENCES.md should contain the hooks config", @@ -108,21 +108,21 @@ test("syncSfStateToWorktree copies canonical PREFERENCES.md", async () => { test("syncSfStateToWorktree falls back to legacy lowercase preferences.md", async () => { const srcBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-src-")); const dstBase = mkdtempSync(join(tmpdir(), "sf-wt-prefs-legacy-dst-")); - const srcGsd = join(srcBase, ".sf"); - const dstGsd = join(dstBase, ".sf"); - mkdirSync(srcGsd, { recursive: true }); - mkdirSync(dstGsd, { recursive: true }); + const srcSf = join(srcBase, ".sf"); + const dstSf = join(dstBase, ".sf"); + mkdirSync(srcSf, { recursive: true }); + mkdirSync(dstSf, { recursive: true }); try { writeFileSync( - join(srcGsd, "preferences.md"), + join(srcSf, "preferences.md"), "---\nversion: 1\n---\n\ngit:\n auto_push: true\n", ); const { syncSfStateToWorktree } = await import("../auto-worktree.ts"); const result = syncSfStateToWorktree(srcBase, dstBase); - const copiedEntries = readdirSync(dstGsd).filter( + const copiedEntries = readdirSync(dstSf).filter( (name) => name === "PREFERENCES.md" || name === "preferences.md", ); diff --git a/src/resources/extensions/sf/tests/preferences.test.ts b/src/resources/extensions/sf/tests/preferences.test.ts index d338dfcab..35fa2db40 100644 --- a/src/resources/extensions/sf/tests/preferences.test.ts +++ b/src/resources/extensions/sf/tests/preferences.test.ts @@ -673,15 +673,15 @@ test("experimental.rtk parses correctly from preferences markdown", () => { test("loadEffectiveSFPreferences preserves experimental prefs across global+project merge", () => { const originalCwd = process.cwd(); - const originalGsdHome = process.env.SF_HOME; + const originalSfHome = process.env.SF_HOME; const tempProject = mkdtempSync(join(tmpdir(), "sf-prefs-project-")); - const tempGsdHome = mkdtempSync(join(tmpdir(), "sf-prefs-home-")); + const tempSfHome = mkdtempSync(join(tmpdir(), "sf-prefs-home-")); try { mkdirSync(join(tempProject, ".sf"), { recursive: true }); writeFileSync( - join(tempGsdHome, "preferences.md"), + join(tempSfHome, "preferences.md"), ["---", "version: 1", "experimental:", " rtk: true", "---"].join("\n"), "utf-8", ); @@ -692,7 +692,7 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj "utf-8", ); - process.env.SF_HOME = tempGsdHome; + process.env.SF_HOME = tempSfHome; process.chdir(tempProject); const loaded = loadEffectiveSFPreferences(); @@ -701,10 +701,10 @@ test("loadEffectiveSFPreferences preserves experimental prefs across global+proj assert.equal(loaded!.preferences.git?.isolation, "none"); } finally { process.chdir(originalCwd); - if (originalGsdHome === undefined) delete process.env.SF_HOME; - else process.env.SF_HOME = originalGsdHome; + if (originalSfHome === undefined) delete process.env.SF_HOME; + else process.env.SF_HOME = originalSfHome; rmSync(tempProject, { recursive: true, force: true }); - rmSync(tempGsdHome, { recursive: true, force: true }); + rmSync(tempSfHome, { recursive: true, force: true }); } }); diff --git a/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts b/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts index 6ef248ac5..58b263f04 100644 --- a/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts +++ b/src/resources/extensions/sf/tests/project-relocation-recovery.test.ts @@ -5,7 +5,7 @@ * silent data loss. When a repo has a remote URL, the identity hash * should be based solely on the remote — making moves transparent. * - * For local-only repos (no remote), ensureGsdSymlink should detect + * For local-only repos (no remote), ensureSfSymlink should detect * orphaned state directories with a matching .sf-id marker and * recover them automatically. */ @@ -28,7 +28,7 @@ import { join } from "node:path"; import { after, before, describe, test } from "node:test"; import { - ensureGsdSymlink, + ensureSfSymlink, externalProjectsRoot, readRepoMeta, repoIdentity, @@ -105,14 +105,14 @@ describe("project-relocation-recovery (#2750)", () => { rmSync(repoB, { recursive: true, force: true }); }); - test("ensureGsdSymlink reuses the same external dir after repo move (remote repo)", () => { + test("ensureSfSymlink reuses the same external dir after repo move (remote repo)", () => { const repoA = realpathSync( mkdtempSync(join(tmpdir(), "sf-reloc-reuse-a-")), ); initRepo(repoA, "https://github.com/example/reloc-reuse.git"); // Initialize SF state with some planning data - const externalA = ensureGsdSymlink(repoA); + const externalA = ensureSfSymlink(repoA); const milestonesPath = join(externalA, "milestones"); mkdirSync(milestonesPath, { recursive: true }); writeFileSync( @@ -128,8 +128,8 @@ describe("project-relocation-recovery (#2750)", () => { ); renameSync(repoA, repoB); - // ensureGsdSymlink at the new location should find the same external dir - const externalB = ensureGsdSymlink(repoB); + // ensureSfSymlink at the new location should find the same external dir + const externalB = ensureSfSymlink(repoB); assert.strictEqual( normalizePath(externalB), @@ -159,7 +159,7 @@ describe("project-relocation-recovery (#2750)", () => { const repoA = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-meta-a-"))); initRepo(repoA, "https://github.com/example/reloc-meta.git"); - const externalA = ensureGsdSymlink(repoA); + const externalA = ensureSfSymlink(repoA); const metaBefore = readRepoMeta(externalA); assert.ok(metaBefore !== null, "metadata should exist before move"); @@ -170,7 +170,7 @@ describe("project-relocation-recovery (#2750)", () => { ); renameSync(repoA, repoB); - const externalB = ensureGsdSymlink(repoB); + const externalB = ensureSfSymlink(repoB); const metaAfter = readRepoMeta(externalB); assert.ok(metaAfter !== null, "metadata should exist after move"); assert.strictEqual( @@ -189,16 +189,16 @@ describe("project-relocation-recovery (#2750)", () => { // ── Local-only repos: .sf-id marker provides recovery ──────────────── - test("ensureGsdSymlink writes a .sf-id marker in the project root", () => { + test("ensureSfSymlink writes a .sf-id marker in the project root", () => { const repo = realpathSync(mkdtempSync(join(tmpdir(), "sf-reloc-marker-"))); initRepo(repo); - ensureGsdSymlink(repo); + ensureSfSymlink(repo); const markerPath = join(repo, ".sf-id"); assert.ok( existsSync(markerPath), - ".sf-id marker must be written by ensureGsdSymlink", + ".sf-id marker must be written by ensureSfSymlink", ); const markerId = readFileSync(markerPath, "utf-8").trim(); @@ -220,7 +220,7 @@ describe("project-relocation-recovery (#2750)", () => { // No remote — identity includes gitRoot // Initialize SF state - const externalA = ensureGsdSymlink(repoA); + const externalA = ensureSfSymlink(repoA); mkdirSync(join(externalA, "milestones"), { recursive: true }); writeFileSync( join(externalA, "milestones", "M001.md"), @@ -245,8 +245,8 @@ describe("project-relocation-recovery (#2750)", () => { "local-only repo identity changes with move (expected)", ); - // But ensureGsdSymlink should detect .sf-id marker and recover - const externalB = ensureGsdSymlink(repoB); + // But ensureSfSymlink should detect .sf-id marker and recover + const externalB = ensureSfSymlink(repoB); assert.ok( existsSync(join(externalB, "milestones", "M001.md")), "local-only repo must recover state via .sf-id marker after move", @@ -280,7 +280,7 @@ describe("project-relocation-recovery (#2750)", () => { ); initRepo(repoA, "https://github.com/example/no-orphan.git"); - ensureGsdSymlink(repoA); + ensureSfSymlink(repoA); // Count project dirs before move const projectsDir = externalProjectsRoot(); @@ -295,7 +295,7 @@ describe("project-relocation-recovery (#2750)", () => { ); renameSync(repoA, repoB); - ensureGsdSymlink(repoB); + ensureSfSymlink(repoB); const countAfter = readdirSync(projectsDir).length; assert.strictEqual( diff --git a/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts b/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts index d432157cb..e4db335e8 100644 --- a/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts +++ b/src/resources/extensions/sf/tests/repo-identity-worktree.test.ts @@ -16,8 +16,8 @@ import { join } from "node:path"; import { after, before, describe, test } from "node:test"; import { - ensureGsdSymlink, - externalGsdRoot, + ensureSfSymlink, + externalSfRoot, isInheritedRepo, readRepoMeta, repoIdentity, @@ -66,7 +66,7 @@ describe("repo-identity-worktree", () => { worktreePath = join(base, ".sf", "worktrees", "M001"); run(`git worktree add -b milestone/M001 ${worktreePath}`, base); - expectedExternalState = externalGsdRoot(base); + expectedExternalState = externalSfRoot(base); }); after(() => { @@ -76,14 +76,14 @@ describe("repo-identity-worktree", () => { rmSync(stateDir, { recursive: true, force: true }); }); - test("ensureGsdSymlink points worktree at main repo external state dir", () => { - const mainState = ensureGsdSymlink(base); + test("ensureSfSymlink points worktree at main repo external state dir", () => { + const mainState = ensureSfSymlink(base); assert.deepStrictEqual( mainState, realpathSync(join(base, ".sf")), - "ensureGsdSymlink(base) returns the current main repo .sf target", + "ensureSfSymlink(base) returns the current main repo .sf target", ); - const worktreeState = ensureGsdSymlink(worktreePath); + const worktreeState = ensureSfSymlink(worktreePath); assert.deepStrictEqual( worktreeState, expectedExternalState, @@ -101,12 +101,12 @@ describe("repo-identity-worktree", () => { ); }); - test("ensureGsdSymlink heals stale worktree symlinks", () => { + test("ensureSfSymlink heals stale worktree symlinks", () => { const staleState = join(stateDir, "projects", "stale-worktree-state"); mkdirSync(staleState, { recursive: true }); rmSync(join(worktreePath, ".sf"), { recursive: true, force: true }); symlinkSync(staleState, join(worktreePath, ".sf"), "junction"); - const healedState = ensureGsdSymlink(worktreePath); + const healedState = ensureSfSymlink(worktreePath); assert.deepStrictEqual( healedState, expectedExternalState, @@ -119,7 +119,7 @@ describe("repo-identity-worktree", () => { ); }); - test("ensureGsdSymlink preserves worktree .sf directories", () => { + test("ensureSfSymlink preserves worktree .sf directories", () => { rmSync(join(worktreePath, ".sf"), { recursive: true, force: true }); mkdirSync(join(worktreePath, ".sf", "milestones"), { recursive: true }); writeFileSync( @@ -127,7 +127,7 @@ describe("repo-identity-worktree", () => { "stale\n", "utf-8", ); - const preservedDirState = ensureGsdSymlink(worktreePath); + const preservedDirState = ensureSfSymlink(worktreePath); assert.deepStrictEqual( preservedDirState, join(worktreePath, ".sf"), @@ -151,9 +151,9 @@ describe("repo-identity-worktree", () => { "repoIdentity returns SF_PROJECT_ID when set", ); assert.deepStrictEqual( - externalGsdRoot(base), + externalSfRoot(base), join(stateDir, "projects", "my-project"), - "externalGsdRoot uses SF_PROJECT_ID", + "externalSfRoot uses SF_PROJECT_ID", ); delete process.env.SF_PROJECT_ID; }); @@ -181,7 +181,7 @@ describe("repo-identity-worktree", () => { ); }); - test("ensureGsdSymlink refreshes repo-meta gitRoot after repo move with fixed project id", () => { + test("ensureSfSymlink refreshes repo-meta gitRoot after repo move with fixed project id", () => { const moveRepo = realpathSync( mkdtempSync(join(tmpdir(), "sf-repo-identity-move-")), ); @@ -193,7 +193,7 @@ describe("repo-identity-worktree", () => { run('git commit -m "chore: init move repo"', moveRepo); process.env.SF_PROJECT_ID = "fixed-project"; - const fixedExternal = ensureGsdSymlink(moveRepo); + const fixedExternal = ensureSfSymlink(moveRepo); const before = readRepoMeta(fixedExternal); assert.ok(before !== null, "repo metadata exists before repo move"); assert.deepStrictEqual( @@ -208,7 +208,7 @@ describe("repo-identity-worktree", () => { ); renameSync(moveRepo, movedBaseRaw); const movedBase = realpathSync(movedBaseRaw); - const movedExternal = ensureGsdSymlink(movedBase); + const movedExternal = ensureSfSymlink(movedBase); assert.deepStrictEqual( realpathSync(movedExternal), realpathSync(fixedExternal), @@ -309,7 +309,7 @@ describe("repo-identity-worktree", () => { rmSync(parentRepo, { recursive: true, force: true }); }); - test("ensureGsdSymlink from subdirectory does not create .sf in subdir when git-root .sf exists (#2380)", () => { + test("ensureSfSymlink from subdirectory does not create .sf in subdir when git-root .sf exists (#2380)", () => { const repo = realpathSync( mkdtempSync(join(tmpdir(), "sf-subdir-symlink-")), ); @@ -322,22 +322,22 @@ describe("repo-identity-worktree", () => { run('git commit -m "init"', repo); // Set up .sf symlink at the git root (normal project initialisation) - ensureGsdSymlink(repo); + ensureSfSymlink(repo); assert.ok( existsSync(join(repo, ".sf")), - "root .sf exists after ensureGsdSymlink", + "root .sf exists after ensureSfSymlink", ); assert.ok( lstatSync(join(repo, ".sf")).isSymbolicLink(), "root .sf is a symlink", ); - // Create a subdirectory and call ensureGsdSymlink from there + // Create a subdirectory and call ensureSfSymlink from there const subdir = join(repo, "src", "lib"); mkdirSync(subdir, { recursive: true }); - ensureGsdSymlink(subdir); + ensureSfSymlink(subdir); - // ensureGsdSymlink should NOT create a .sf in the subdirectory + // ensureSfSymlink should NOT create a .sf in the subdirectory // because the git root already has a valid .sf symlink. assert.ok( !existsSync(join(subdir, ".sf")), diff --git a/src/resources/extensions/sf/tests/research-milestone-composer.test.ts b/src/resources/extensions/sf/tests/research-milestone-composer.test.ts index f0de5b42f..e6d059ad5 100644 --- a/src/resources/extensions/sf/tests/research-milestone-composer.test.ts +++ b/src/resources/extensions/sf/tests/research-milestone-composer.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4782 phase 3 batch 2: research-milestone migrated through composer. +// SF — #4782 phase 3 batch 2: research-milestone migrated through composer. import assert from "node:assert/strict"; import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs"; @@ -16,8 +16,8 @@ import { } from "../sf-db.ts"; function makeBase(): string { - const base = mkdtempSync(join(tmpdir(), "gsd-research-ms-composer-")); - mkdirSync(join(base, ".gsd", "milestones", "M001"), { recursive: true }); + const base = mkdtempSync(join(tmpdir(), "sf-research-ms-composer-")); + mkdirSync(join(base, ".sf", "milestones", "M001"), { recursive: true }); return base; } @@ -32,7 +32,7 @@ function cleanup(base: string): void { } function seed(base: string, mid: string): void { - openDatabase(join(base, ".gsd", "gsd.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: mid, title: "Research Test", @@ -64,7 +64,7 @@ test("#4782 phase 3: buildResearchMilestonePrompt emits milestone-context then r seed(base, "M001"); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-CONTEXT.md"), + join(base, ".sf", "milestones", "M001", "M001-CONTEXT.md"), "# M001 Context\n\nA research test milestone.\n", ); @@ -103,7 +103,7 @@ test("#4782 phase 3: buildResearchMilestonePrompt still includes project + requi seed(base, "M001"); writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-CONTEXT.md"), + join(base, ".sf", "milestones", "M001", "M001-CONTEXT.md"), "# M001 Context\n", ); diff --git a/src/resources/extensions/sf/tests/run-uat-composer.test.ts b/src/resources/extensions/sf/tests/run-uat-composer.test.ts index 6ad2f9854..a9f2d99e5 100644 --- a/src/resources/extensions/sf/tests/run-uat-composer.test.ts +++ b/src/resources/extensions/sf/tests/run-uat-composer.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4782 phase 3: run-uat migrated to compose context via manifest. +// SF — #4782 phase 3: run-uat migrated to compose context via manifest. // Regression test: prompt still carries the declared artifacts in the // expected shape after the migration. @@ -19,9 +19,9 @@ import { } from "../sf-db.ts"; function makeBase(): string { - const base = mkdtempSync(join(tmpdir(), "gsd-runuat-composer-")); + const base = mkdtempSync(join(tmpdir(), "sf-runuat-composer-")); mkdirSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }, ); return base; @@ -38,7 +38,7 @@ function cleanup(base: string): void { } function seed(base: string, mid: string): void { - openDatabase(join(base, ".gsd", "gsd.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: mid, title: "Test", status: "active", depends_on: [] }); upsertMilestonePlanning(mid, { title: "Test Milestone", @@ -75,10 +75,10 @@ test("#4782 phase 3: buildRunUatPrompt inlines slice UAT, slice summary, project seed(base, "M001"); // Write UAT + SUMMARY files for the slice - const uatRel = ".gsd/milestones/M001/slices/S01/S01-UAT.md"; + const uatRel = ".sf/milestones/M001/slices/S01/S01-UAT.md"; writeFileSync(join(base, uatRel), "# S01 UAT\n\n- Check X\n- Check Y\n"); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), "---\nid: S01\nparent: M001\n---\n# S01 Summary\n**One-liner**\n\n## What Happened\nShip.\n", ); @@ -114,7 +114,7 @@ test("#4782 phase 3: buildRunUatPrompt omits optional slice summary when file is seed(base, "M001"); - const uatRel = ".gsd/milestones/M001/slices/S01/S01-UAT.md"; + const uatRel = ".sf/milestones/M001/slices/S01/S01-UAT.md"; writeFileSync(join(base, uatRel), "# S01 UAT\n"); // No SUMMARY.md written — composer should skip the slice-summary key. diff --git a/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts b/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts index 2fddaca71..beefbca23 100644 --- a/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts +++ b/src/resources/extensions/sf/tests/sfroot-worktree-detection.test.ts @@ -6,7 +6,7 @@ * root's .sf via the git-root probe. * * The bug: when a git worktree lives at /project/.sf/worktrees/M008/, - * probeGsdRoot() runs `git rev-parse --show-toplevel` which can return the + * probeSfRoot() runs `git rev-parse --show-toplevel` which can return the * main project root (not the worktree root) depending on git version and * worktree setup. The walk-up then finds /project/.sf and returns that * instead of the worktree's own .sf path. @@ -25,19 +25,19 @@ import { tmpdir } from "node:os"; import { join } from "node:path"; import { afterEach, beforeEach, describe, test } from "node:test"; -import { _clearGsdRootCache, sfRoot } from "../paths.ts"; +import { _clearSfRootCache, sfRoot } from "../paths.ts"; describe("sfRoot() worktree detection (#2594)", () => { let projectRoot: string; - let projectGsd: string; + let projectSf: string; beforeEach(() => { - _clearGsdRootCache(); + _clearSfRootCache(); // Create a temporary project with a git repo to simulate real conditions. // realpathSync handles macOS /tmp -> /private/tmp. projectRoot = realpathSync(mkdtempSync(join(tmpdir(), "sfroot-wt-"))); - projectGsd = join(projectRoot, ".sf"); - mkdirSync(projectGsd, { recursive: true }); + projectSf = join(projectRoot, ".sf"); + mkdirSync(projectSf, { recursive: true }); // Initialize a git repo in the project root so git rev-parse works spawnSync("git", ["init", "--initial-branch=main"], { @@ -62,45 +62,45 @@ describe("sfRoot() worktree detection (#2594)", () => { }); afterEach(() => { - _clearGsdRootCache(); + _clearSfRootCache(); rmSync(projectRoot, { recursive: true, force: true }); }); test("returns worktree .sf when basePath is a worktree with its own .sf (fast path)", () => { // Simulates a worktree that already had copyPlanningArtifacts() run, // so it has its own .sf/ directory. - const worktreeBase = join(projectGsd, "worktrees", "M008"); - const worktreeGsd = join(worktreeBase, ".sf"); - mkdirSync(worktreeGsd, { recursive: true }); + const worktreeBase = join(projectSf, "worktrees", "M008"); + const worktreeSf = join(worktreeBase, ".sf"); + mkdirSync(worktreeSf, { recursive: true }); const result = sfRoot(worktreeBase); assert.equal( result, - worktreeGsd, - `Expected worktree .sf (${worktreeGsd}), got ${result}. ` + + worktreeSf, + `Expected worktree .sf (${worktreeSf}), got ${result}. ` + "sfRoot() should use the fast path for an existing worktree .sf.", ); }); test("returns worktree .sf path (not project root .sf) when worktree .sf does not exist yet", () => { // This is the core #2594 bug: the worktree directory exists but its .sf - // subdirectory hasn't been created yet. Without the fix, probeGsdRoot() + // subdirectory hasn't been created yet. Without the fix, probeSfRoot() // walks up from the worktree path, finds /project/.sf, and returns it. // With the fix, it detects the .sf/worktrees// pattern and returns // the worktree-local .sf path as the creation fallback. - const worktreeBase = join(projectGsd, "worktrees", "M008"); + const worktreeBase = join(projectSf, "worktrees", "M008"); mkdirSync(worktreeBase, { recursive: true }); // NOTE: no .sf/ inside worktreeBase const result = sfRoot(worktreeBase); const expected = join(worktreeBase, ".sf"); - // Without the fix, this returns projectGsd (/project/.sf) because the + // Without the fix, this returns projectSf (/project/.sf) because the // walk-up from worktreeBase finds it. With the fix, it returns the // worktree-local path. assert.notEqual( result, - projectGsd, + projectSf, "sfRoot() must NOT return the project root .sf when basePath is inside .sf/worktrees/", ); assert.equal( @@ -113,7 +113,7 @@ describe("sfRoot() worktree detection (#2594)", () => { test("returns worktree .sf when basePath is a real git worktree inside .sf/worktrees/", () => { // Create a real git worktree at .sf/worktrees/M010 const worktreeName = "M010"; - const worktreeBase = join(projectGsd, "worktrees", worktreeName); + const worktreeBase = join(projectSf, "worktrees", worktreeName); // Use git worktree add to create a real worktree const result = spawnSync( @@ -134,7 +134,7 @@ describe("sfRoot() worktree detection (#2594)", () => { assert.notEqual( sfResult, - projectGsd, + projectSf, "sfRoot() must NOT escape to project root .sf from inside a git worktree", ); assert.equal( @@ -152,7 +152,7 @@ describe("sfRoot() worktree detection (#2594)", () => { test("still returns project .sf for normal (non-worktree) basePath", () => { const result = sfRoot(projectRoot); - assert.equal(result, projectGsd); + assert.equal(result, projectSf); }); test("still returns project .sf for a subdirectory of the project", () => { @@ -162,7 +162,7 @@ describe("sfRoot() worktree detection (#2594)", () => { const result = sfRoot(subdir); assert.equal( result, - projectGsd, + projectSf, "Non-worktree subdirectories should still resolve to project .sf", ); }); diff --git a/src/resources/extensions/sf/tests/silent-catch-diagnostics.test.ts b/src/resources/extensions/sf/tests/silent-catch-diagnostics.test.ts index 0f7d72382..001a2f2ba 100644 --- a/src/resources/extensions/sf/tests/silent-catch-diagnostics.test.ts +++ b/src/resources/extensions/sf/tests/silent-catch-diagnostics.test.ts @@ -97,7 +97,7 @@ function getAutoModeFiles(): string[] { return files; } -function getGsdSourceFiles(): string[] { +function getSfSourceFiles(): string[] { const files: string[] = []; function walk(dir: string): void { @@ -251,7 +251,7 @@ describe("workflow-logger coverage (#3348)", () => { test("no empty catch blocks remain in migrated files", () => { // Combine auto-mode files + explicitly migrated files const autoFiles = getAutoModeFiles(); - const allFiles = getGsdSourceFiles(); + const allFiles = getSfSourceFiles(); const migratedPaths = new Set(autoFiles); for (const file of allFiles) { const rel = relative(sfDir, file); @@ -283,7 +283,7 @@ describe("workflow-logger coverage (#3348)", () => { }); test("catch blocks use workflow-logger instead of raw stderr/console", () => { - const files = getGsdSourceFiles(); + const files = getSfSourceFiles(); assert.ok(files.length > 0, "should find SF source files"); const violations: string[] = []; diff --git a/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts b/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts index 509f6a273..89d369e4c 100644 --- a/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts +++ b/src/resources/extensions/sf/tests/stale-worktree-cwd.test.ts @@ -20,7 +20,7 @@ import { join, sep } from "node:path"; import test from "node:test"; import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _resetServiceCache } from "../worktree.ts"; function run(command: string, cwd: string): string { @@ -79,7 +79,7 @@ test("mergeMilestoneToMain restores cwd to project root", () => { const originalHome = process.env.HOME; const fakeHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-fake-home-"))); process.env.HOME = fakeHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); try { @@ -122,7 +122,7 @@ test("mergeMilestoneToMain restores cwd to project root", () => { } finally { process.chdir(savedCwd); process.env.HOME = originalHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); if (tempDir && existsSync(tempDir)) { rmSync(tempDir, { recursive: true, force: true }); diff --git a/src/resources/extensions/sf/tests/start-auto-detached.test.ts b/src/resources/extensions/sf/tests/start-auto-detached.test.ts index f0334cfa5..ca876ffc5 100644 --- a/src/resources/extensions/sf/tests/start-auto-detached.test.ts +++ b/src/resources/extensions/sf/tests/start-auto-detached.test.ts @@ -5,14 +5,14 @@ import test from "node:test"; const sfDir = resolve(import.meta.dirname, ".."); -function readGsdFile(relativePath: string): string { +function readSfFile(relativePath: string): string { return readFileSync(resolve(sfDir, relativePath), "utf-8"); } test("interactive command entrypoints use startAutoDetached instead of awaiting startAuto (#3733)", () => { - const autoHandlerSrc = readGsdFile("commands/handlers/auto.ts"); - const workflowHandlerSrc = readGsdFile("commands/handlers/workflow.ts"); - const guidedFlowSrc = readGsdFile("guided-flow.ts"); + const autoHandlerSrc = readSfFile("commands/handlers/auto.ts"); + const workflowHandlerSrc = readSfFile("commands/handlers/workflow.ts"); + const guidedFlowSrc = readSfFile("guided-flow.ts"); assert.ok( autoHandlerSrc.includes('process.env.SF_HEADLESS === "1"'), @@ -46,7 +46,7 @@ test("interactive command entrypoints use startAutoDetached instead of awaiting }); test("startAutoDetached reports failures asynchronously (#3733)", () => { - const autoSrc = readGsdFile("auto.ts"); + const autoSrc = readSfFile("auto.ts"); assert.ok( autoSrc.includes("export function startAutoDetached"), @@ -66,8 +66,8 @@ test("startAutoDetached reports failures asynchronously (#3733)", () => { }); test("detached auto-start preserves milestone lock across pause/stop cleanup (#3733)", () => { - const autoSrc = readGsdFile("auto.ts"); - const sessionSrc = readGsdFile("auto/session.ts"); + const autoSrc = readSfFile("auto.ts"); + const sessionSrc = readSfFile("auto/session.ts"); assert.ok( autoSrc.includes("milestoneLock?: string | null"), diff --git a/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts b/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts index a67f30e73..1abe55161 100644 --- a/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts +++ b/src/resources/extensions/sf/tests/stash-pop-sf-conflict.test.ts @@ -22,7 +22,7 @@ import { join } from "node:path"; import test from "node:test"; import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _resetServiceCache } from "../worktree.ts"; // Isolate from user's global preferences (which may have git.main_branch set) @@ -33,13 +33,13 @@ test.before(() => { originalHome = process.env.HOME; fakeHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-fake-home-"))); process.env.HOME = fakeHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); }); test.after(() => { process.env.HOME = originalHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); rmSync(fakeHome, { recursive: true, force: true }); }); diff --git a/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts b/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts index f1f46dd3d..b3d65fd35 100644 --- a/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts +++ b/src/resources/extensions/sf/tests/stash-queued-context-files.test.ts @@ -27,7 +27,7 @@ import { join } from "node:path"; import test from "node:test"; import { createAutoWorktree, mergeMilestoneToMain } from "../auto-worktree.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _resetServiceCache } from "../worktree.ts"; // Isolate from user's global preferences (which may have git.main_branch set) @@ -38,13 +38,13 @@ test.before(() => { originalHome = process.env.HOME; fakeHome = realpathSync(mkdtempSync(join(tmpdir(), "sf-fake-home-"))); process.env.HOME = fakeHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); }); test.after(() => { process.env.HOME = originalHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); rmSync(fakeHome, { recursive: true, force: true }); }); @@ -65,7 +65,7 @@ function createTempRepo(): string { writeFileSync(join(dir, "README.md"), "# test\n"); mkdirSync(join(dir, ".sf"), { recursive: true }); writeFileSync(join(dir, ".sf", "STATE.md"), "version: 1\n"); - // In projects with tracked .sf/ files (hasGitTrackedGsdFiles=true), + // In projects with tracked .sf/ files (hasGitTrackedSfFiles=true), // .sf is NOT added to .gitignore. This means untracked files under // .sf/ are visible to --include-untracked and get swept into the // stash, destroying queued milestone CONTEXT files (#2505). diff --git a/src/resources/extensions/sf/tests/stop-backtrack.test.ts b/src/resources/extensions/sf/tests/stop-backtrack.test.ts index 92b28db7c..a5fbd9e70 100644 --- a/src/resources/extensions/sf/tests/stop-backtrack.test.ts +++ b/src/resources/extensions/sf/tests/stop-backtrack.test.ts @@ -45,7 +45,7 @@ function makeTempDir(prefix: string): string { return dir; } -function setupGsdDir(tmp: string): void { +function setupSfDir(tmp: string): void { mkdirSync(join(tmp, ".sf"), { recursive: true }); } @@ -53,7 +53,7 @@ function setupGsdDir(tmp: string): void { test("stop is a valid classification", () => { const tmp = makeTempDir("stop-class"); - setupGsdDir(tmp); + setupSfDir(tmp); const id = appendCapture(tmp, "stop running immediately"); markCaptureResolved( tmp, @@ -71,7 +71,7 @@ test("stop is a valid classification", () => { test("backtrack is a valid classification", () => { const tmp = makeTempDir("bt-class"); - setupGsdDir(tmp); + setupSfDir(tmp); const id = appendCapture(tmp, "restart from M003"); markCaptureResolved( tmp, @@ -91,7 +91,7 @@ test("backtrack is a valid classification", () => { test("loadStopCaptures returns unexecuted stop and backtrack captures", () => { const tmp = makeTempDir("load-stop"); - setupGsdDir(tmp); + setupSfDir(tmp); const stopId = appendCapture(tmp, "halt execution"); const btId = appendCapture(tmp, "go back to M003"); const noteId = appendCapture(tmp, "just a note"); @@ -122,7 +122,7 @@ test("loadStopCaptures returns unexecuted stop and backtrack captures", () => { test("loadBacktrackCaptures returns only backtrack captures", () => { const tmp = makeTempDir("load-bt"); - setupGsdDir(tmp); + setupSfDir(tmp); const stopId = appendCapture(tmp, "halt execution"); const btId = appendCapture(tmp, "go back to M003"); markCaptureResolved(tmp, stopId, "stop", "Halt", "User stop", "M005"); @@ -145,7 +145,7 @@ test("loadBacktrackCaptures returns only backtrack captures", () => { test("revertExecutorResolvedCaptures reverts captures resolved without classification", () => { const tmp = makeTempDir("revert-exec"); - setupGsdDir(tmp); + setupSfDir(tmp); const _id = appendCapture(tmp, "stop everything"); // Simulate an executor writing Status: resolved directly (no classification) @@ -168,7 +168,7 @@ test("revertExecutorResolvedCaptures reverts captures resolved without classific test("revertExecutorResolvedCaptures does NOT revert properly triaged captures", () => { const tmp = makeTempDir("revert-skip"); - setupGsdDir(tmp); + setupSfDir(tmp); const id = appendCapture(tmp, "restart from M003"); markCaptureResolved( tmp, @@ -189,7 +189,7 @@ test("revertExecutorResolvedCaptures does NOT revert properly triaged captures", test("executeBacktrack writes trigger and regression markers", () => { const tmp = makeTempDir("exec-bt"); - setupGsdDir(tmp); + setupSfDir(tmp); // Create target milestone directory mkdirSync(join(tmp, ".sf", "milestones", "M003"), { recursive: true }); @@ -231,7 +231,7 @@ test("executeBacktrack writes trigger and regression markers", () => { test("readBacktrackTrigger parses trigger file", () => { const tmp = makeTempDir("read-bt"); - setupGsdDir(tmp); + setupSfDir(tmp); mkdirSync(join(tmp, ".sf", "milestones", "M003"), { recursive: true }); executeBacktrack(tmp, "M005", { @@ -253,7 +253,7 @@ test("readBacktrackTrigger parses trigger file", () => { test("readBacktrackTrigger returns null when no trigger exists", () => { const tmp = makeTempDir("no-bt"); - setupGsdDir(tmp); + setupSfDir(tmp); const trigger = readBacktrackTrigger(tmp); assert.equal(trigger, null); rmSync(tmp, { recursive: true, force: true }); diff --git a/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts b/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts index 062345988..41f3145dd 100644 --- a/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts +++ b/src/resources/extensions/sf/tests/symlink-numbered-variants.test.ts @@ -2,7 +2,7 @@ * Tests for macOS numbered symlink variant cleanup (#2205). * * macOS can rename `.sf` to `.sf 2`, `.sf 3`, etc. when a directory - * already exists at the target path. ensureGsdSymlink() must detect and + * already exists at the target path. ensureSfSymlink() must detect and * remove these numbered variants so the real `.sf` symlink is always * the one in use. */ @@ -22,7 +22,7 @@ import { import { tmpdir } from "node:os"; import { join } from "node:path"; import { describe } from "node:test"; -import { ensureGsdSymlink, externalGsdRoot } from "../repo-identity.ts"; +import { ensureSfSymlink, externalSfRoot } from "../repo-identity.ts"; function run(command: string, cwd: string): string { return execSync(command, { @@ -52,11 +52,11 @@ describe("symlink-numbered-variants", async () => { run("git add README.md", base); run('git commit -m "chore: init"', base); - const externalPath = externalGsdRoot(base); + const externalPath = externalSfRoot(base); // ── Test: numbered variant directories are cleaned up ────────────── console.log( - "\n=== ensureGsdSymlink removes numbered .sf variants (#2205) ===", + "\n=== ensureSfSymlink removes numbered .sf variants (#2205) ===", ); { // Simulate macOS creating numbered variants: ".sf 2", ".sf 3" @@ -64,15 +64,15 @@ describe("symlink-numbered-variants", async () => { mkdirSync(join(base, ".sf 3"), { recursive: true }); mkdirSync(join(base, ".sf 4"), { recursive: true }); - const result = ensureGsdSymlink(base); + const result = ensureSfSymlink(base); assert.deepStrictEqual( result, externalPath, - "ensureGsdSymlink returns external path", + "ensureSfSymlink returns external path", ); assert.ok( existsSync(join(base, ".sf")), - ".sf exists after ensureGsdSymlink", + ".sf exists after ensureSfSymlink", ); assert.ok( lstatSync(join(base, ".sf")).isSymbolicLink(), @@ -95,7 +95,7 @@ describe("symlink-numbered-variants", async () => { } // ── Test: numbered variant symlinks are cleaned up ───────────────── - console.log("\n=== ensureGsdSymlink removes numbered symlink variants ==="); + console.log("\n=== ensureSfSymlink removes numbered symlink variants ==="); { // Clean slate rmSync(join(base, ".sf"), { recursive: true, force: true }); @@ -107,11 +107,11 @@ describe("symlink-numbered-variants", async () => { symlinkSync(externalPath, join(base, ".sf 2"), "junction"); symlinkSync(staleTarget, join(base, ".sf 3"), "junction"); - const result = ensureGsdSymlink(base); + const result = ensureSfSymlink(base); assert.deepStrictEqual( result, externalPath, - "ensureGsdSymlink returns external path when variants exist", + "ensureSfSymlink returns external path when variants exist", ); assert.ok(existsSync(join(base, ".sf")), ".sf exists"); assert.ok( @@ -131,7 +131,7 @@ describe("symlink-numbered-variants", async () => { // ── Test: real .sf directory blocks symlink, but variants still cleaned ── console.log( - "\n=== ensureGsdSymlink cleans variants even when .sf is a real directory ===", + "\n=== ensureSfSymlink cleans variants even when .sf is a real directory ===", ); { // Clean slate @@ -147,8 +147,8 @@ describe("symlink-numbered-variants", async () => { mkdirSync(join(base, ".sf 2"), { recursive: true }); mkdirSync(join(base, ".sf 3"), { recursive: true }); - const result = ensureGsdSymlink(base); - // When .sf is a real directory, ensureGsdSymlink preserves it + const result = ensureSfSymlink(base); + // When .sf is a real directory, ensureSfSymlink preserves it assert.deepStrictEqual( result, join(base, ".sf"), @@ -172,7 +172,7 @@ describe("symlink-numbered-variants", async () => { // ── Test: only numeric-suffixed variants are removed ─────────────── console.log( - "\n=== ensureGsdSymlink only removes .sf + space + digit variants ===", + "\n=== ensureSfSymlink only removes .sf + space + digit variants ===", ); rmSync(join(base, ".sf"), { recursive: true, force: true }); @@ -184,7 +184,7 @@ describe("symlink-numbered-variants", async () => { mkdirSync(join(base, ".sf 2"), { recursive: true }); mkdirSync(join(base, ".sf 10"), { recursive: true }); - ensureGsdSymlink(base); + ensureSfSymlink(base); assert.ok( existsSync(join(base, ".sf-backup")), diff --git a/src/resources/extensions/sf/tests/tool-naming.test.ts b/src/resources/extensions/sf/tests/tool-naming.test.ts index 6981dc787..8270a6b90 100644 --- a/src/resources/extensions/sf/tests/tool-naming.test.ts +++ b/src/resources/extensions/sf/tests/tool-naming.test.ts @@ -36,6 +36,12 @@ const RENAME_MAP: Array<{ canonical: string; alias: string }> = [ { canonical: "sf_validate_milestone", alias: "sf_milestone_validate" }, ]; +const EXTRA_DB_TOOLS = [ + "sf_self_report", + "sf_skip_slice", + "sf_save_gate_result", +] as const; + // ─── Registration count ────────────────────────────────────────────────────── console.log("\n── Tool naming: registration count ──"); @@ -45,8 +51,8 @@ registerDbTools(pi); assert.deepStrictEqual( pi.tools.length, - 30, - "Should register exactly 30 tools (14 canonical + 14 aliases + 1 gate tool + 1 sf_skip_slice)", + RENAME_MAP.length * 2 + EXTRA_DB_TOOLS.length, + "Should register all canonical tools, aliases, and non-aliased DB helpers", ); // ─── Both names exist for each pair ────────────────────────────────────────── @@ -67,6 +73,13 @@ for (const { canonical, alias } of RENAME_MAP) { ); } +for (const name of EXTRA_DB_TOOLS) { + assert.ok( + pi.tools.some((t: any) => t.name === name), + `Extra DB tool "${name}" should be registered`, + ); +} + // ─── Execute function identity ─────────────────────────────────────────────── console.log("\n── Tool naming: execute function identity (===) ──"); diff --git a/src/resources/extensions/sf/tests/unit-context-composer.test.ts b/src/resources/extensions/sf/tests/unit-context-composer.test.ts index af709a398..0c6d801d4 100644 --- a/src/resources/extensions/sf/tests/unit-context-composer.test.ts +++ b/src/resources/extensions/sf/tests/unit-context-composer.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4782 phase 2 composer tests. Pure-function tests using mock +// SF — #4782 phase 2 composer tests. Pure-function tests using mock // resolvers plus an integration check that reassess-roadmap's migrated // builder produces a prompt matching expectations. @@ -101,9 +101,9 @@ test("#4782 composer: manifestBudgetChars returns declared budget", () => { // ─── Integration: migrated buildReassessRoadmapPrompt ───────────────────── function makeFixtureBase(): string { - const base = mkdtempSync(join(tmpdir(), "gsd-composer-pilot-")); + const base = mkdtempSync(join(tmpdir(), "sf-composer-pilot-")); mkdirSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "tasks"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "tasks"), { recursive: true }, ); return base; @@ -120,7 +120,7 @@ function cleanup(base: string): void { } function seed(base: string, mid: string): void { - openDatabase(join(base, ".gsd", "gsd.db")); + openDatabase(join(base, ".sf", "sf.db")); insertMilestone({ id: mid, title: "Test", status: "active", depends_on: [] }); upsertMilestonePlanning(mid, { title: "Test", @@ -151,11 +151,11 @@ function seed(base: string, mid: string): void { function writeArtifacts(base: string): void { writeFileSync( - join(base, ".gsd", "milestones", "M001", "M001-ROADMAP.md"), + join(base, ".sf", "milestones", "M001", "M001-ROADMAP.md"), "# M001\n## Slices\n- [x] **S01: First** `risk:low` `depends:[]`\n", ); writeFileSync( - join(base, ".gsd", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), + join(base, ".sf", "milestones", "M001", "slices", "S01", "S01-SUMMARY.md"), "---\nid: S01\nparent: M001\n---\n# S01 Summary\n**One-liner**\n\n## What Happened\nDone.\n", ); } diff --git a/src/resources/extensions/sf/tests/unit-context-manifest.test.ts b/src/resources/extensions/sf/tests/unit-context-manifest.test.ts index 389f35051..c57fad4e9 100644 --- a/src/resources/extensions/sf/tests/unit-context-manifest.test.ts +++ b/src/resources/extensions/sf/tests/unit-context-manifest.test.ts @@ -1,4 +1,4 @@ -// GSD-2 — #4782 phase 1: schema tests + CI coverage guard for manifests. +// SF — #4782 phase 1: schema tests + CI coverage guard for manifests. import assert from "node:assert/strict"; import { readFileSync } from "node:fs"; @@ -167,15 +167,17 @@ test("#4782 phase 1: complete-milestone manifest declares slice-summary as excer ); }); -// ─── Phase-2 target: reassess-roadmap manifest is the tightest budget ──── +// ─── Phase-2 target: reassess-roadmap manifest remains below execution units ──── -test("#4782 phase 1: reassess-roadmap manifest has the smallest budget among manifests", () => { +test("#4782 phase 1: reassess-roadmap manifest budget stays below execution units", () => { const m = UNIT_MANIFESTS["reassess-roadmap"]; - for (const [unitType, other] of Object.entries(UNIT_MANIFESTS)) { - if (unitType === "reassess-roadmap") continue; - assert.ok( - m.maxSystemPromptChars <= other.maxSystemPromptChars, - `reassess-roadmap budget (${m.maxSystemPromptChars}) should be ≤ ${unitType} budget (${other.maxSystemPromptChars})`, - ); - } + assert.ok( + m.maxSystemPromptChars <= UNIT_MANIFESTS["execute-task"].maxSystemPromptChars, + `reassess-roadmap budget (${m.maxSystemPromptChars}) should stay ≤ execute-task budget (${UNIT_MANIFESTS["execute-task"].maxSystemPromptChars})`, + ); + assert.ok( + m.maxSystemPromptChars <= + UNIT_MANIFESTS["reactive-execute"].maxSystemPromptChars, + `reassess-roadmap budget (${m.maxSystemPromptChars}) should stay ≤ reactive-execute budget (${UNIT_MANIFESTS["reactive-execute"].maxSystemPromptChars})`, + ); }); diff --git a/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts b/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts index 303e59efd..bd15e02c4 100644 --- a/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts +++ b/src/resources/extensions/sf/tests/validate-milestone-stuck-guard.test.ts @@ -11,7 +11,7 @@ import { type VerificationContext, } from "../auto-verification.ts"; import { invalidateAllCaches } from "../cache.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { closeDatabase, insertMilestone, @@ -67,7 +67,7 @@ function setupTestEnvironment(): void { mkdirSync(milestoneDir, { recursive: true }); process.chdir(tempDir); - _clearGsdRootCache(); + _clearSfRootCache(); dbPath = join(tempDir, ".sf", "sf.db"); openDatabase(dbPath); diff --git a/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts b/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts index ab4562601..fd5c1994c 100644 --- a/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts +++ b/src/resources/extensions/sf/tests/worktree-db-respawn-truncation.test.ts @@ -126,33 +126,33 @@ describe("worktree-db-respawn-truncation (#2815)", async () => { // Create an empty (0-byte) sf.db plus orphaned WAL and SHM files — // this is the exact state that causes Node 24 node:sqlite CPU spin (#2478). - const wtGsd = join(wtBase, ".sf"); - writeFileSync(join(wtGsd, "sf.db"), ""); - writeFileSync(join(wtGsd, "sf.db-wal"), Buffer.alloc(605672, 0xaa)); - writeFileSync(join(wtGsd, "sf.db-shm"), Buffer.alloc(32768, 0xbb)); + const wtSf = join(wtBase, ".sf"); + writeFileSync(join(wtSf, "sf.db"), ""); + writeFileSync(join(wtSf, "sf.db-wal"), Buffer.alloc(605672, 0xaa)); + writeFileSync(join(wtSf, "sf.db-shm"), Buffer.alloc(32768, 0xbb)); - assert.ok(existsSync(join(wtGsd, "sf.db")), "sf.db exists before sync"); + assert.ok(existsSync(join(wtSf, "sf.db")), "sf.db exists before sync"); assert.ok( - existsSync(join(wtGsd, "sf.db-wal")), + existsSync(join(wtSf, "sf.db-wal")), "sf.db-wal exists before sync", ); assert.ok( - existsSync(join(wtGsd, "sf.db-shm")), + existsSync(join(wtSf, "sf.db-shm")), "sf.db-shm exists before sync", ); syncProjectRootToWorktree(mainBase, wtBase, "M001"); assert.ok( - !existsSync(join(wtGsd, "sf.db")), + !existsSync(join(wtSf, "sf.db")), "#2478: empty sf.db must be deleted", ); assert.ok( - !existsSync(join(wtGsd, "sf.db-wal")), + !existsSync(join(wtSf, "sf.db-wal")), "#2478: orphaned sf.db-wal must be deleted alongside sf.db", ); assert.ok( - !existsSync(join(wtGsd, "sf.db-shm")), + !existsSync(join(wtSf, "sf.db-shm")), "#2478: orphaned sf.db-shm must be deleted alongside sf.db", ); } finally { @@ -176,28 +176,28 @@ describe("worktree-db-respawn-truncation (#2815)", async () => { // Orphaned WAL/SHM with NO sf.db at all — can happen from a previous // partial cleanup. These must still be cleaned up. - const wtGsd = join(wtBase, ".sf"); - writeFileSync(join(wtGsd, "sf.db-wal"), Buffer.alloc(1024, 0xaa)); - writeFileSync(join(wtGsd, "sf.db-shm"), Buffer.alloc(1024, 0xbb)); + const wtSf = join(wtBase, ".sf"); + writeFileSync(join(wtSf, "sf.db-wal"), Buffer.alloc(1024, 0xaa)); + writeFileSync(join(wtSf, "sf.db-shm"), Buffer.alloc(1024, 0xbb)); - assert.ok(!existsSync(join(wtGsd, "sf.db")), "sf.db does not exist"); + assert.ok(!existsSync(join(wtSf, "sf.db")), "sf.db does not exist"); assert.ok( - existsSync(join(wtGsd, "sf.db-wal")), + existsSync(join(wtSf, "sf.db-wal")), "orphaned sf.db-wal exists", ); assert.ok( - existsSync(join(wtGsd, "sf.db-shm")), + existsSync(join(wtSf, "sf.db-shm")), "orphaned sf.db-shm exists", ); syncProjectRootToWorktree(mainBase, wtBase, "M001"); assert.ok( - !existsSync(join(wtGsd, "sf.db-wal")), + !existsSync(join(wtSf, "sf.db-wal")), "#2478: orphaned sf.db-wal must be deleted even without main db file", ); assert.ok( - !existsSync(join(wtGsd, "sf.db-shm")), + !existsSync(join(wtSf, "sf.db-shm")), "#2478: orphaned sf.db-shm must be deleted even without main db file", ); } finally { diff --git a/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts b/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts index 11701bbf2..81faf37e4 100644 --- a/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts +++ b/src/resources/extensions/sf/tests/worktree-db-same-file.test.ts @@ -43,9 +43,9 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); test("returns zero result when both paths resolve to the same file", () => { - const mainGsd = join(tmpDir, "main", ".sf"); - mkdirSync(mainGsd, { recursive: true }); - const mainDbPath = join(mainGsd, "sf.db"); + const mainSf = join(tmpDir, "main", ".sf"); + mkdirSync(mainSf, { recursive: true }); + const mainDbPath = join(mainSf, "sf.db"); // Create a real DB at mainDbPath openDatabase(mainDbPath); @@ -62,10 +62,10 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); // Create a worktree path that resolves to the same file via symlink - const wtGsd = join(tmpDir, "worktree", ".sf"); + const wtSf = join(tmpDir, "worktree", ".sf"); mkdirSync(join(tmpDir, "worktree"), { recursive: true }); - symlinkSync(mainGsd, wtGsd, "junction"); - const worktreeDbPath = join(wtGsd, "sf.db"); + symlinkSync(mainSf, wtSf, "junction"); + const worktreeDbPath = join(wtSf, "sf.db"); // Both paths exist and resolve to the same physical file assert.ok(existsSync(mainDbPath), "main DB exists"); @@ -84,9 +84,9 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { }); test("returns zero result when both paths are identical strings", () => { - const mainGsd = join(tmpDir, "project", ".sf"); - mkdirSync(mainGsd, { recursive: true }); - const dbPath = join(mainGsd, "sf.db"); + const mainSf = join(tmpDir, "project", ".sf"); + mkdirSync(mainSf, { recursive: true }); + const dbPath = join(mainSf, "sf.db"); openDatabase(dbPath); insertDecision({ @@ -110,9 +110,9 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { test("still reconciles when paths are genuinely different files", () => { // Main DB - const mainGsd = join(tmpDir, "main", ".sf"); - mkdirSync(mainGsd, { recursive: true }); - const mainDbPath = join(mainGsd, "sf.db"); + const mainSf = join(tmpDir, "main", ".sf"); + mkdirSync(mainSf, { recursive: true }); + const mainDbPath = join(mainSf, "sf.db"); openDatabase(mainDbPath); insertDecision({ @@ -129,9 +129,9 @@ describe("#2823: reconcileWorktreeDb same-file guard", () => { closeDatabase(); // Create a separate worktree DB with different data - const wtGsd = join(tmpDir, "worktree", ".sf"); - mkdirSync(wtGsd, { recursive: true }); - const worktreeDbPath = join(wtGsd, "sf.db"); + const wtSf = join(tmpDir, "worktree", ".sf"); + mkdirSync(wtSf, { recursive: true }); + const worktreeDbPath = join(wtSf, "sf.db"); openDatabase(worktreeDbPath); insertDecision({ diff --git a/src/resources/extensions/sf/tests/worktree-integration.test.ts b/src/resources/extensions/sf/tests/worktree-integration.test.ts index e1bfd9cfb..3b3567583 100644 --- a/src/resources/extensions/sf/tests/worktree-integration.test.ts +++ b/src/resources/extensions/sf/tests/worktree-integration.test.ts @@ -21,7 +21,7 @@ import { import { tmpdir } from "node:os"; import { join } from "node:path"; import { describe } from "node:test"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { deriveState } from "../state.ts"; import { _resetServiceCache, @@ -93,7 +93,7 @@ describe("worktree-integration", async () => { const originalHome = process.env.HOME; const fakeHome = mkdtempSync(join(tmpdir(), "sf-fake-home-")); process.env.HOME = fakeHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); // ── Verify main tree baseline ────────────────────────────────────────────── @@ -307,7 +307,7 @@ describe("worktree-integration", async () => { // Restore HOME and reset caches process.env.HOME = originalHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); rmSync(fakeHome, { recursive: true, force: true }); }); diff --git a/src/resources/extensions/sf/tests/worktree.test.ts b/src/resources/extensions/sf/tests/worktree.test.ts index 882448c03..5ae86612b 100644 --- a/src/resources/extensions/sf/tests/worktree.test.ts +++ b/src/resources/extensions/sf/tests/worktree.test.ts @@ -13,7 +13,7 @@ import { join } from "node:path"; import { describe } from "node:test"; import { readIntegrationBranch } from "../git-service.ts"; import { _resetHasChangesCache } from "../native-git-bridge.ts"; -import { _clearGsdRootCache } from "../paths.ts"; +import { _clearSfRootCache } from "../paths.ts"; import { _resetServiceCache, autoCommitCurrentBranch, @@ -266,7 +266,7 @@ describe("worktree", async () => { const originalHome = process.env.HOME; const fakeHome = mkdtempSync(join(tmpdir(), "sf-fake-home-")); process.env.HOME = fakeHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); try { @@ -287,7 +287,7 @@ describe("worktree", async () => { ); } finally { process.env.HOME = originalHome; - _clearGsdRootCache(); + _clearSfRootCache(); _resetServiceCache(); rmSync(fakeHome, { recursive: true, force: true }); } diff --git a/src/resources/extensions/sf/tests/zombie-sf-state.test.ts b/src/resources/extensions/sf/tests/zombie-sf-state.test.ts index b2287c041..be81733ce 100644 --- a/src/resources/extensions/sf/tests/zombie-sf-state.test.ts +++ b/src/resources/extensions/sf/tests/zombie-sf-state.test.ts @@ -74,29 +74,27 @@ const autoStartSrc = readFileSync( "utf-8", ); -// After ensureGsdSymlink, the code that creates milestones/ must check for -// the milestones directory specifically (not .sf/ which ensureGsdSymlink already created). -const symlinkIdx = autoStartSrc.indexOf("ensureGsdSymlink(base)"); -assertTrue(symlinkIdx >= 0, "auto-start.ts calls ensureGsdSymlink(base)"); +// After ensureSfSymlink, the code that creates milestones/ must check for +// the milestones directory specifically (not .sf/ which ensureSfSymlink already created). +const symlinkIdx = autoStartSrc.indexOf("ensureSfSymlink(base)"); +assertTrue(symlinkIdx >= 0, "auto-start.ts calls ensureSfSymlink(base)"); const afterSymlink = - symlinkIdx >= 0 ? autoStartSrc.slice(symlinkIdx, symlinkIdx + 800) : ""; + symlinkIdx >= 0 ? autoStartSrc.slice(symlinkIdx) : ""; // The milestones bootstrap must check milestones path, not sfDir // Old (dead) code: if (!existsSync(sfDir)) { mkdirSync(join(sfDir, "milestones"), ...) } // Fixed code should check: if (!existsSync(milestonesPath)) or similar assertTrue( afterSymlink.includes("milestones") && afterSymlink.includes("mkdirSync"), - "auto-start.ts creates milestones/ directory after ensureGsdSymlink (#2942)", + "auto-start.ts creates milestones/ directory after ensureSfSymlink (#2942)", ); // The guard for milestones/ creation should NOT be `!existsSync(sfDir)` — -// that's dead code since ensureGsdSymlink already created sfDir. +// that's dead code since ensureSfSymlink already created sfDir. // It should check for the milestones/ dir directly. -const mkdirRegion = afterSymlink.slice( - 0, - afterSymlink.indexOf("mkdirSync") + 200, -); +const bootstrapIdx = afterSymlink.indexOf("const milestonesPath"); +const mkdirRegion = bootstrapIdx >= 0 ? afterSymlink.slice(bootstrapIdx, bootstrapIdx + 300) : ""; assertMatch( mkdirRegion, /existsSync\([^)]*milestones/, diff --git a/src/resources/extensions/sf/tools/complete-slice.ts b/src/resources/extensions/sf/tools/complete-slice.ts index 88022fccc..7b56901e9 100644 --- a/src/resources/extensions/sf/tools/complete-slice.ts +++ b/src/resources/extensions/sf/tools/complete-slice.ts @@ -700,19 +700,19 @@ export async function handleCompleteSlice( dir: string, ) => Promise<{ nodes: unknown[]; edges: unknown[]; builtAt: string }>; writeGraph: (sfRoot: string, graph: unknown) => Promise; - resolveGsdRoot: (basePath: string) => string; + resolveSfRoot: (basePath: string) => string; }>; if ( typeof graphMod.buildGraph !== "function" || typeof graphMod.writeGraph !== "function" || - typeof graphMod.resolveGsdRoot !== "function" + typeof graphMod.resolveSfRoot !== "function" ) { throw new Error( "graph helpers unavailable from @singularity-forge/mcp-server", ); } const g = await graphMod.buildGraph(basePath); - await graphMod.writeGraph(graphMod.resolveGsdRoot(basePath), g); + await graphMod.writeGraph(graphMod.resolveSfRoot(basePath), g); } catch (graphErr) { // Graph rebuild is best-effort — log at warning level but never propagate logWarning( diff --git a/src/resources/extensions/sf/unit-context-composer.ts b/src/resources/extensions/sf/unit-context-composer.ts index 141cec8af..bd600213e 100644 --- a/src/resources/extensions/sf/unit-context-composer.ts +++ b/src/resources/extensions/sf/unit-context-composer.ts @@ -1,4 +1,4 @@ -// GSD-2 — UnitContextComposer (#4782 phase 2). +// SF — UnitContextComposer (#4782 phase 2). // // Reads a unit type's manifest and orchestrates artifact inlining through // a caller-provided resolver. Returns a joined context block suitable for diff --git a/src/resources/extensions/sf/unit-context-manifest.ts b/src/resources/extensions/sf/unit-context-manifest.ts index 695f971a0..50753eab4 100644 --- a/src/resources/extensions/sf/unit-context-manifest.ts +++ b/src/resources/extensions/sf/unit-context-manifest.ts @@ -1,4 +1,4 @@ -// GSD-2 — UnitContextManifest (#4782 phase 1). +// SF — UnitContextManifest (#4782 phase 1). // // Declarative description of what context each auto-mode unit type needs // in its system prompt. Establishes the contract that later phases will @@ -106,15 +106,15 @@ export type PreferencesPolicy = "none" | "active-only" | "full"; * - "read-only" — Read tools only. No file mutation. No shell. No subagent * dispatch. Reserved for future units that should be * strictly observational (none today). - * - "planning" — Read tools always; writes restricted to .gsd/** under + * - "planning" — Read tools always; writes restricted to .sf/** under * basePath; Bash limited to a per-unit safe allowlist; * Task subagent dispatch denied. Catches the bug class * where a discuss-milestone turn modifies user source * files (forensics: ~/Github/test-apps/b23, #4934). - * - "docs" — Read tools always; writes restricted to .gsd/** AND + * - "docs" — Read tools always; writes restricted to .sf/** AND * the explicit `allowedPathGlobs` set; Bash safe-allowlist; * no subagents. Reserved for rewrite-docs, which legitimately - * edits project markdown outside .gsd/. + * edits project markdown outside .sf/. * * The allowlist for "docs" is declared per-manifest rather than hardcoded so * projects with non-standard doc layouts can extend it without forking the diff --git a/src/resources/extensions/sf/uok/plan-v2.ts b/src/resources/extensions/sf/uok/plan-v2.ts index 74c520325..61f7753d2 100644 --- a/src/resources/extensions/sf/uok/plan-v2.ts +++ b/src/resources/extensions/sf/uok/plan-v2.ts @@ -48,7 +48,7 @@ function hasFileContent(path: string | null): boolean { function getArtifactLookupBases(basePath: string): string[] { const bases = [basePath]; - const projectRoot = process.env.GSD_PROJECT_ROOT; + const projectRoot = process.env.SF_PROJECT_ROOT; if ( projectRoot && projectRoot.trim().length > 0 && diff --git a/src/resources/extensions/sf/workflow-templates.ts b/src/resources/extensions/sf/workflow-templates.ts index 1f47de814..f23d0e153 100644 --- a/src/resources/extensions/sf/workflow-templates.ts +++ b/src/resources/extensions/sf/workflow-templates.ts @@ -10,7 +10,7 @@ import { homedir } from "node:os"; import { dirname, join } from "node:path"; import { fileURLToPath } from "node:url"; -const __extensionDir = resolveGsdExtensionDir(); +const __extensionDir = resolveSfExtensionDir(); const registryPath = join( __extensionDir, "workflow-templates", @@ -18,12 +18,12 @@ const registryPath = join( ); /** Resolve the SF extension dir with fallback to ~/.sf/agent/extensions/sf/. */ -function resolveGsdExtensionDir(): string { +function resolveSfExtensionDir(): string { const moduleDir = dirname(fileURLToPath(import.meta.url)); if (existsSync(join(moduleDir, "workflow-templates"))) return moduleDir; const sfHome = process.env.SF_HOME || join(homedir(), ".sf"); - const agentGsdDir = join(sfHome, "agent", "extensions", "sf"); - if (existsSync(join(agentGsdDir, "workflow-templates"))) return agentGsdDir; + const agentSfDir = join(sfHome, "agent", "extensions", "sf"); + if (existsSync(join(agentSfDir, "workflow-templates"))) return agentSfDir; return moduleDir; } diff --git a/src/resources/extensions/subagent/index.ts b/src/resources/extensions/subagent/index.ts index aa0faf514..4598ebebb 100644 --- a/src/resources/extensions/subagent/index.ts +++ b/src/resources/extensions/subagent/index.ts @@ -46,6 +46,29 @@ const MAX_PARALLEL_TASKS = 8; const MAX_CONCURRENCY = 4; const COLLAPSED_ITEM_COUNT = 10; const liveSubagentProcesses = new Set(); +const AGENT_ALIASES: Record = { + default: "worker", + ["g" + "sd-executor"]: "worker", + "sf-worker": "worker", + "sf-scout": "scout", + "sf-reviewer": "reviewer", +}; + +function resolveAgentByName( + agents: AgentConfig[], + agentName: string, +): { agent: AgentConfig | undefined; effectiveName: string } { + const direct = agents.find((a) => a.name === agentName); + if (direct) return { agent: direct, effectiveName: agentName }; + + const alias = AGENT_ALIASES[agentName]; + if (!alias) return { agent: undefined, effectiveName: agentName }; + + return { + agent: agents.find((a) => a.name === alias), + effectiveName: alias, + }; +} async function stopLiveSubagents(): Promise { const active = Array.from(liveSubagentProcesses); @@ -918,10 +941,12 @@ function resolveSubagentLaunchSpec(args: string[]): SubagentLaunchSpec { if (sfBinPath && path.basename(sfBinPath) === "sf-from-source") { const sourceRoot = path.resolve(path.dirname(sfBinPath), ".."); - const sourceBinPath = path.join(sourceRoot, "bin", "sf-from-source"); - env.SF_BIN_PATH = sourceBinPath; - env.SF_CLI_PATH = env.SF_CLI_PATH || sourceBinPath; - envPatch.SF_BIN_PATH = sourceBinPath; + // Use dist/loader.js for SF_BIN_PATH so subagent gets a proper Node.js entry point, + // not the bash shim which Node cannot execute as a module. + const distLoaderPath = path.join(sourceRoot, "dist", "loader.js"); + env.SF_BIN_PATH = distLoaderPath; + env.SF_CLI_PATH = env.SF_CLI_PATH || path.join(sourceRoot, "bin", "sf-from-source"); + envPatch.SF_BIN_PATH = distLoaderPath; envPatch.SF_CLI_PATH = env.SF_CLI_PATH; return { command, @@ -1075,7 +1100,7 @@ async function runSingleAgent( makeDetails: (results: SingleResult[]) => SubagentDetails, modelOverride?: string, ): Promise { - const agent = agents.find((a) => a.name === agentName); + const { agent, effectiveName } = resolveAgentByName(agents, agentName); if (!agent) { const available = agents.map((a) => `"${a.name}"`).join(", ") || "none"; @@ -1128,7 +1153,7 @@ async function runSingleAgent( let tmpPromptPath: string | null = null; const currentResult: SingleResult = { - agent: agentName, + agent: effectiveName, agentSource: agent.source, task, exitCode: 0, @@ -1271,7 +1296,7 @@ async function runSingleAgentInCmuxSplit( makeDetails: (results: SingleResult[]) => SubagentDetails, modelOverride?: string, ): Promise { - const agent = agents.find((a) => a.name === agentName); + const { agent, effectiveName } = resolveAgentByName(agents, agentName); if (!agent) { return runSingleAgent( defaultCwd, @@ -1292,7 +1317,7 @@ async function runSingleAgentInCmuxSplit( let tmpOutputDir: string | null = null; const currentResult: SingleResult = { - agent: agentName, + agent: effectiveName, agentSource: agent.source, task, exitCode: 0, @@ -2251,6 +2276,148 @@ export default function (pi: ExtensionAPI) { }, }); + // ── Scout Tool ───────────────────────────────────────────────────────────────── + // Wraps `sift search --agent` for Planner → Scout → Worker pipeline. + // The Scout subagent is a thin wrapper around sift's autonomous corpus exploration. + // Planner calls call_scout with a query; sift explores and returns snippet-bearing evidence. + + const CallScoutParams = Type.Object({ + query: Type.String({ + description: + "Natural-language query describing what to explore (e.g. 'find where the write gate tool_call hooks are registered')", + }), + scope: Type.Optional( + Type.String({ + description: + "Path to search within. Defaults to the current working directory. Use the active worktree for isolation.", + }), + ), + strategy: Type.Optional( + Type.String({ + description: + "Search strategy: 'path-hybrid' (default), 'page-index-hybrid', 'bm25', or 'path'", + }), + ), + }); + + pi.registerTool({ + name: "call_scout", + label: "Scout", + description: [ + "Explore the codebase using sift's autonomous agent loop.", + " Spawns sift search --agent with the given query, returns snippet-bearing evidence.", + " Use this instead of grep/read when you need to understand the architecture", + " of an unfamiliar subsystem — sift's autonomous loop expands queries and", + " finds relevant code without you needing to know file paths ahead of time.", + " Planner calls this before writing an execution plan.", + ].join(""), + promptGuidelines: [ + "call_scout is for exploration only — it does not write or modify files.", + " Be specific in your query: name functions, files, or concepts you expect to find.", + " Use the scope param to restrict search to a specific worktree or subsystem.", + " Review the returned evidence before planning — it may reveal things you missed.", + ], + parameters: CallScoutParams, + + async execute(_toolCallId, params, signal) { + const scope = params.scope ?? process.cwd(); + const strategy = params.strategy ?? "path-hybrid"; + const query = params.query; + + // Resolve sift binary — check PATH first, then fall back to ~/.cargo/bin + const siftBin = (() => { + const pathEnv = process.env.PATH ?? ""; + for (const dir of pathEnv.split(path.delimiter)) { + const candidate = path.join(dir, "sift"); + try { + if (fs.existsSync(candidate)) return candidate; + } catch { + // continue + } + } + // Fallback to known install location + const homeBin = path.join(os.homedir(), ".cargo", "bin", "sift"); + return fs.existsSync(homeBin) ? homeBin : "sift"; + })(); + + const args = [ + "search", + "--strategy", + strategy, + "--agent", + query, + scope, + ]; + + const stderr: string[] = []; + const stdout: string[] = []; + + const proc = spawn(siftBin, args, { + cwd: scope, + shell: false, + stdio: ["ignore", "pipe", "pipe"], + }); + + // Collect output + proc.stdout.on("data", (chunk) => stdout.push(chunk.toString())); + proc.stderr.on("data", (chunk) => stderr.push(chunk.toString())); + + // Handle abort signal + if (signal) { + signal.addEventListener("abort", () => { + try { + proc.kill("SIGTERM"); + } catch { + // ignore + } + }); + } + + const exitCode = await new Promise((resolve) => { + proc.on("close", (code) => resolve(code ?? 0)); + proc.on("error", () => resolve(1)); + }); + + const out = stdout.join(""); + const err = stderr.join("").trim(); + + if (exitCode !== 0 && !out) { + const hint = + err.includes("not found") || err.includes("No such file") + ? "\n\nHint: sift is not installed. Run: uv tool install sift" + : err + ? `\n\nsift stderr: ${err.slice(0, 500)}` + : ""; + return { + content: [ + { + type: "text", + text: `call_scout failed (exit ${exitCode}). Is sift installed?${hint}`, + }, + ], + details: { operation: "call_scout", exitCode, siftBin, query, scope, strategy } as Record, + }; + } + + return { + content: [ + { + type: "text", + text: out || "(sift returned no output)", + }, + ], + details: { + operation: "call_scout", + query, + scope, + strategy, + exitCode, + siftBin, + }, + }; + }, + }); + pi.registerTool({ name: "await_subagent", label: "Await Subagent", @@ -2367,4 +2534,125 @@ export default function (pi: ExtensionAPI) { }; }, }); + + // ── call_scout: sift-powered autonomous exploration ───────────────────────── + + // Wraps `sift search --agent` for Planner → Scout → Worker pipeline. + // The Scout subagent is a thin wrapper around sift's autonomous corpus exploration. + // Planner calls call_scout with a query; sift explores and returns snippet-bearing evidence. + const CallScoutParams = Type.Object({ + query: Type.String({ + description: + "Natural-language query describing what to explore (e.g. 'find where the write gate tool_call hooks are registered')", + }), + scope: Type.Optional( + Type.String({ + description: + "Path to search within. Defaults to the current working directory. Use the active worktree for isolation.", + }), + ), + strategy: Type.Optional( + Type.String({ + description: + "Search strategy: 'path-hybrid' (default), 'page-index-hybrid', 'bm25', or 'path'", + }), + ), + }); + pi.registerTool({ + name: "call_scout", + label: "Scout", + description: [ + "Explore the codebase using sift's autonomous agent loop.", + " Spawns sift search --agent with the given query, returns snippet-bearing evidence.", + " Use this instead of grep/read when you need to understand the architecture", + " of an unfamiliar subsystem — sift's autonomous loop expands queries and", + " finds relevant code without you needing to know file paths ahead of time.", + " Planner calls this before writing an execution plan.", + ].join(""), + promptGuidelines: [ + "call_scout is for exploration only — it does not write or modify files.", + " Be specific in your query: name functions, files, or concepts you expect to find.", + " Use the scope param to restrict search to a specific worktree or subsystem.", + " Review the returned evidence before planning — it may reveal things you missed.", + ], + parameters: CallScoutParams, + async execute(_toolCallId, params, signal) { + const scope = params.scope ?? process.cwd(); + const strategy = params.strategy ?? "path-hybrid"; + const query = params.query; + // Resolve sift binary — check PATH first, then fall back to ~/.cargo/bin + const siftBin = (() => { + const pathEnv = process.env.PATH ?? ""; + for (const dir of pathEnv.split(path.delimiter)) { + const candidate = path.join(dir, "sift"); + try { + if (fs.existsSync(candidate)) return candidate; + } + catch { + // continue + } + } + // Fallback to known install location + const homeBin = path.join(os.homedir(), ".cargo", "bin", "sift"); + return fs.existsSync(homeBin) ? homeBin : "sift"; + })(); + const args = [ + "search", + "--strategy", + strategy, + "--agent", + query, + scope, + ]; + const stderr: string[] = []; + const stdout: string[] = []; + const proc = spawn(siftBin, args, { + cwd: scope, + shell: false, + stdio: ["ignore", "pipe", "pipe"], + }); + // Collect output + proc.stdout.on("data", (chunk) => stdout.push(chunk.toString())); + proc.stderr.on("data", (chunk) => stderr.push(chunk.toString())); + // Handle abort signal + if (signal) { + signal.addEventListener("abort", () => { + try { + proc.kill("SIGTERM"); + } + catch { + // ignore + } + }); + } + const exitCode = await new Promise((resolve) => { + proc.on("close", (code) => resolve(code ?? 0)); + proc.on("error", () => resolve(1)); + }); + const out = stdout.join(""); + const err = stderr.join(""); + if (exitCode !== 0 && err.includes("not found")) { + const hint = + " Is sift installed? Try: cargo install sift (or: curl -sSL https://sift.sh | sh)"; + return { + content: [ + { + type: "text", + text: `call_scout failed (exit ${exitCode}). Is sift installed?${hint}`, + }, + ], + details: { operation: "call_scout", exitCode, siftBin, query, scope, strategy }, + }; + } + return { + content: [ + { + type: "text", + text: out || `(no output, exit ${exitCode})`, + }, + ], + details: { operation: "call_scout", exitCode, siftBin, query, scope, strategy }, + }; + }, + }); } diff --git a/src/tests/initial-gsd-header-filter.test.ts b/src/tests/initial-gsd-header-filter.test.ts deleted file mode 100644 index 283f767ef..000000000 --- a/src/tests/initial-gsd-header-filter.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import assert from "node:assert/strict"; -import test from "node:test"; - -const { filterInitialSfHeader: filterInitialGsdHeader } = await import( - "../../web/lib/initial-sf-header-filter.ts" -); - -const SF_LOGO_LINES = [ - " ██████╗ ███████╗██████╗ ", - " ██╔════╝ ██╔════╝██╔══██╗", - " ██║ ███╗███████╗██║ ██║", - " ██║ ██║╚════██║██║ ██║", - " ╚██████╔╝███████║██████╔╝", - " ╚═════╝ ╚══════╝╚═════╝ ", -] as const; - -test("filterInitialGsdHeader strips a plain startup banner and keeps real terminal content", () => { - const warning = "Warning: Google Search is not configured."; - const raw = [ - ...SF_LOGO_LINES, - " Singularity Forge v2.33.1", - "", - warning, - ].join("\n"); - - const result = filterInitialGsdHeader(raw); - - assert.equal(result.status, "matched"); - assert.equal(result.text, warning); -}); - -test("filterInitialGsdHeader strips ANSI-colored startup banner output", () => { - const cyan = "\u001b[36m"; - const reset = "\u001b[39m"; - const bold = "\u001b[1m"; - const boldReset = "\u001b[22m"; - const dim = "\u001b[2m"; - const dimReset = "\u001b[22m"; - const warning = "Warning: terminal content starts here.\r\n"; - - const raw = - SF_LOGO_LINES.map((line) => `${cyan}${line}${reset}\r\n`).join("") + - ` ${bold}Singularity Forge${boldReset} ${dim}v2.33.1${dimReset}\r\n\r\n` + - warning; - - const result = filterInitialGsdHeader(raw); - - assert.equal(result.status, "matched"); - assert.equal(result.text, warning); -}); - -test("filterInitialGsdHeader waits for more data when the startup banner is incomplete", () => { - const partial = `${SF_LOGO_LINES[0]}\n${SF_LOGO_LINES[1]}\n${SF_LOGO_LINES[2]}`; - - const result = filterInitialGsdHeader(partial); - - assert.deepEqual(result, { status: "needs-more", text: "" }); -}); - -test("filterInitialGsdHeader passes normal terminal output through untouched", () => { - const raw = "Warning: already in the shell\r\n$ "; - - const result = filterInitialGsdHeader(raw); - - assert.equal(result.status, "passthrough"); - assert.equal(result.text, raw); -}); diff --git a/src/tests/integration/e2e-headless.test.ts b/src/tests/integration/e2e-headless.test.ts index a026c6ec1..bcecb31e3 100644 --- a/src/tests/integration/e2e-headless.test.ts +++ b/src/tests/integration/e2e-headless.test.ts @@ -44,7 +44,7 @@ type RunResult = { /** * Spawn `node dist/loader.js ...args` and collect output. */ -function runGsd( +function runSf( args: string[], timeoutMs = 30_000, env: NodeJS.ProcessEnv = {}, @@ -86,7 +86,7 @@ function runGsd( * Spawn a child process with the ability to send signals mid-flight. * Returns both the child and a promise that resolves with the result. */ -function spawnGsd( +function spawnSf( args: string[], timeoutMs = 30_000, env: NodeJS.ProcessEnv = {}, @@ -133,7 +133,7 @@ function stripAnsi(s: string): string { } /** Bootstrap a temp directory with .sf/ structure (milestones + runtime). */ -function createTempWithGsd(prefix: string): string { +function createTempWithSf(prefix: string): string { const dir = mkdtempSync(join(tmpdir(), prefix)); mkdirSync(join(dir, ".sf", "milestones"), { recursive: true }); mkdirSync(join(dir, ".sf", "runtime"), { recursive: true }); @@ -166,7 +166,7 @@ function assertNoCrashMarkers(output: string): void { // =========================================================================== test("headless --output-format json emits a single HeadlessJsonResult on stdout", async (t) => { - const tmpDir = createTempWithGsd("sf-e2e-json-batch-"); + const tmpDir = createTempWithSf("sf-e2e-json-batch-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); @@ -174,7 +174,7 @@ test("headless --output-format json emits a single HeadlessJsonResult on stdout" // --max-restarts 0 prevents retry loops which would emit multiple JSON results. // --timeout 2000 ensures the process completes quickly. // Will timeout/error (no API key) but JSON batch mode should emit one HeadlessJsonResult. - const result = await runGsd( + const result = await runSf( [ "headless", "--output-format", @@ -261,14 +261,14 @@ test("headless --output-format json emits a single HeadlessJsonResult on stdout" // =========================================================================== test("headless exits with code 11 after SIGINT", async (t) => { - const tmpDir = createTempWithGsd("sf-e2e-sigint-"); + const tmpDir = createTempWithSf("sf-e2e-sigint-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); // Spawn with long timeout and max-restarts 0 so the process stays alive // waiting for completion while we send SIGINT. - const { child, result: resultPromise } = spawnGsd( + const { child, result: resultPromise } = spawnSf( [ "headless", "--timeout", @@ -338,13 +338,13 @@ test("headless exits with code 11 after SIGINT", async (t) => { // =========================================================================== test("headless --output-format stream-json emits NDJSON on stdout", async (t) => { - const tmpDir = createTempWithGsd("sf-e2e-stream-json-"); + const tmpDir = createTempWithSf("sf-e2e-stream-json-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); // --max-restarts 0 to prevent retry loops that extend runtime. - const result = await runGsd( + const result = await runSf( [ "headless", "--output-format", @@ -400,12 +400,12 @@ test("headless --output-format stream-json emits NDJSON on stdout", async (t) => // =========================================================================== test("headless --resume with nonexistent ID exits 1 with descriptive error", async (t) => { - const tmpDir = createTempWithGsd("sf-e2e-resume-bad-"); + const tmpDir = createTempWithSf("sf-e2e-resume-bad-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd( + const result = await runSf( [ "headless", "--resume", @@ -440,12 +440,12 @@ test("headless --resume with nonexistent ID exits 1 with descriptive error", asy // =========================================================================== test("headless --output-format with invalid value exits 1", async (t) => { - const tmpDir = createTempWithGsd("sf-e2e-bad-format-"); + const tmpDir = createTempWithSf("sf-e2e-bad-format-"); t.after(() => { rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd( + const result = await runSf( ["headless", "--output-format", "invalid-format", "auto"], 15_000, {}, diff --git a/src/tests/integration/e2e-smoke.test.ts b/src/tests/integration/e2e-smoke.test.ts index 9ae385a2d..eaf50a285 100644 --- a/src/tests/integration/e2e-smoke.test.ts +++ b/src/tests/integration/e2e-smoke.test.ts @@ -47,7 +47,7 @@ type RunResult = { * @param env Additional / override environment variables * @param cwd Working directory for the child process (default: projectRoot) */ -function runGsd( +function runSf( args: string[], timeoutMs = 8_000, env: NodeJS.ProcessEnv = {}, @@ -111,7 +111,7 @@ function createTempGitRepo(prefix: string): string { // --------------------------------------------------------------------------- test("sf --version outputs a semver version string and exits 0", async () => { - const result = await runGsd(["--version"]); + const result = await runSf(["--version"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -130,7 +130,7 @@ test("sf --version outputs a semver version string and exits 0", async () => { // --------------------------------------------------------------------------- test("sf --help outputs usage information and exits 0", async () => { - const result = await runGsd(["--help"]); + const result = await runSf(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -164,7 +164,7 @@ test("sf --help outputs usage information and exits 0", async () => { // --------------------------------------------------------------------------- test("sf config --help outputs help and exits 0", async () => { - const result = await runGsd(["config", "--help"]); + const result = await runSf(["config", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -184,7 +184,7 @@ test("sf config --help outputs help and exits 0", async () => { // --------------------------------------------------------------------------- test("sf update --help outputs help and exits 0", async () => { - const result = await runGsd(["update", "--help"]); + const result = await runSf(["update", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -201,7 +201,7 @@ test("sf update --help outputs help and exits 0", async () => { // --------------------------------------------------------------------------- test("sf --list-models runs without crashing", async () => { - const result = await runGsd(["--list-models"]); + const result = await runSf(["--list-models"]); assert.ok( !result.timedOut, @@ -241,7 +241,7 @@ test("sf --mode text --print does not segfault or throw unhandled errors", { ? "no API key available — print mode requires a configured provider" : undefined, }, async () => { - const result = await runGsd( + const result = await runSf( ["--mode", "text", "--print", "echo hello"], 15_000, ); @@ -302,7 +302,7 @@ test("sf --mode text --print does not segfault or throw unhandled errors", { // --------------------------------------------------------------------------- test("sf headless --help outputs help and exits 0", async () => { - const result = await runGsd(["headless", "--help"]); + const result = await runSf(["headless", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -325,7 +325,7 @@ test("sf headless --help outputs help and exits 0", async () => { // --------------------------------------------------------------------------- test("sf sessions --help outputs sessions-specific help and exits 0", async () => { - const result = await runGsd(["sessions", "--help"]); + const result = await runSf(["sessions", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -348,7 +348,7 @@ test("sf sessions --help outputs sessions-specific help and exits 0", async () = test("sf with no TTY exits 1 with clean terminal-required error", async () => { // Running with piped stdin (non-TTY) and no subcommand/flags triggers // interactive mode which requires a TTY - const result = await runGsd([], 15_000); + const result = await runSf([], 15_000); assert.ok(!result.timedOut, "process should not hang"); assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`); @@ -374,7 +374,7 @@ test("sf with no TTY exits 1 with clean terminal-required error", async () => { test("sf with unknown flags does not crash", async () => { // Unknown flags are silently ignored by the arg parser. // With --help appended, we get a clean exit path to test. - const result = await runGsd(["--some-unknown-flag", "--help"]); + const result = await runSf(["--some-unknown-flag", "--help"]); assert.ok(!result.timedOut, "process should not time out"); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -388,7 +388,7 @@ test("sf with unknown flags does not crash", async () => { // --------------------------------------------------------------------------- test("sf -v is equivalent to --version", async () => { - const result = await runGsd(["-v"]); + const result = await runSf(["-v"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -406,7 +406,7 @@ test("sf -v is equivalent to --version", async () => { // --------------------------------------------------------------------------- test("sf -h is equivalent to --help", async () => { - const result = await runGsd(["-h"]); + const result = await runSf(["-h"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -433,7 +433,7 @@ test("sf headless without .sf/ directory exits 1 with clean error", async (t) => rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd(["headless"], 10_000, {}, tmpDir); + const result = await runSf(["headless"], 10_000, {}, tmpDir); assert.ok(!result.timedOut, "process should not hang"); assert.strictEqual(result.code, 1, `expected exit 1, got ${result.code}`); @@ -458,7 +458,7 @@ test("sf headless new-milestone without --context exits 1", async (t) => { rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd( + const result = await runSf( ["headless", "new-milestone"], 10_000, {}, @@ -488,7 +488,7 @@ test("sf headless --timeout with invalid value exits 1", async (t) => { rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd( + const result = await runSf( ["headless", "--timeout", "not-a-number", "auto"], 10_000, {}, @@ -518,7 +518,7 @@ test("sf headless --timeout with negative value exits 1", async (t) => { rmSync(tmpDir, { recursive: true, force: true }); }); - const result = await runGsd( + const result = await runSf( ["headless", "--timeout", "-5000", "auto"], 10_000, {}, @@ -549,7 +549,7 @@ test("sf headless query returns JSON from the built CLI", async (t) => { // Cold packaged startup in a fresh temp repo is now regularly >10s because // the built CLI loads bundled TS resources through jiti before answering. // This command is still healthy; it just needs a realistic timeout budget. - const result = await runGsd(["headless", "query"], 30_000, {}, tmpDir); + const result = await runSf(["headless", "query"], 30_000, {}, tmpDir); assert.ok(!result.timedOut, "process should not hang"); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -574,7 +574,7 @@ test("sf worktree list loads the built worktree CLI without module errors", asyn // Cold packaged startup in a fresh temp repo is now regularly >10s because // the built CLI loads bundled TS resources through jiti before listing. - const result = await runGsd(["worktree", "list"], 30_000, {}, tmpDir); + const result = await runSf(["worktree", "list"], 30_000, {}, tmpDir); assert.ok(!result.timedOut, "process should not hang"); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); @@ -596,7 +596,7 @@ test("sf worktree list loads the built worktree CLI without module errors", asyn // --------------------------------------------------------------------------- test("sf --help lists all documented subcommands", async () => { - const result = await runGsd(["--help"]); + const result = await runSf(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); const output = stripAnsi(result.stdout); @@ -615,7 +615,7 @@ test("sf --help lists all documented subcommands", async () => { // --------------------------------------------------------------------------- test("sf --help lists all key flags", async () => { - const result = await runGsd(["--help"]); + const result = await runSf(["--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); const output = stripAnsi(result.stdout); @@ -646,7 +646,7 @@ test("sf --help lists all key flags", async () => { // --------------------------------------------------------------------------- test("sf --version ignores trailing arguments", async () => { - const result = await runGsd(["--version", "--help", "--list-models"]); + const result = await runSf(["--version", "--help", "--list-models"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); @@ -667,7 +667,7 @@ test("sf --version ignores trailing arguments", async () => { test("sf headless help (positional) exits cleanly", async () => { // "help" as a positional is treated as a quick command by headless mode. // Without .sf/ it should fail, but with --help flag it should succeed. - const result = await runGsd(["headless", "--help"]); + const result = await runSf(["headless", "--help"]); assert.strictEqual(result.code, 0, `expected exit 0, got ${result.code}`); assert.ok(!result.timedOut, "process should not time out"); diff --git a/src/tests/integration/pack-install.test.ts b/src/tests/integration/pack-install.test.ts index 8e1aebe1e..e6c794882 100644 --- a/src/tests/integration/pack-install.test.ts +++ b/src/tests/integration/pack-install.test.ts @@ -255,7 +255,7 @@ test("tarball installs and sf binary resolves", async (t) => { } // Verify bundled resources are present - const installedGsdExt = join( + const installedSfExt = join( sandbox.installPrefix, "node_modules", "sf-run", @@ -266,7 +266,7 @@ test("tarball installs and sf binary resolves", async (t) => { "index.ts", ); assert.ok( - existsSync(installedGsdExt), + existsSync(installedSfExt), "bundled sf extension present in installed package", ); }); diff --git a/src/tests/integration/web-cli-entry.test.ts b/src/tests/integration/web-cli-entry.test.ts index 6b3636f49..dfc2b57ca 100644 --- a/src/tests/integration/web-cli-entry.test.ts +++ b/src/tests/integration/web-cli-entry.test.ts @@ -5,7 +5,7 @@ import { join } from "node:path"; import test from "node:test"; import { pathToFileURL } from "node:url"; -const { resolveGsdCliEntry } = await import("../../web/cli-entry.ts"); +const { resolveSfCliEntry } = await import("../../web/cli-entry.ts"); function makeFixture(paths: string[]): string { const root = mkdtempSync(join(tmpdir(), "sf-cli-entry-")); @@ -17,7 +17,7 @@ function makeFixture(paths: string[]): string { return root; } -test("resolveGsdCliEntry prefers the built loader for packaged standalone interactive sessions", (t) => { +test("resolveSfCliEntry prefers the built loader for packaged standalone interactive sessions", (t) => { const packageRoot = makeFixture([ "dist/loader.js", "src/loader.ts", @@ -28,7 +28,7 @@ test("resolveGsdCliEntry prefers the built loader for packaged standalone intera rmSync(packageRoot, { recursive: true, force: true }); }); - const entry = resolveGsdCliEntry({ + const entry = resolveSfCliEntry({ packageRoot, cwd: "/tmp/project-a", execPath: "/custom/node", @@ -43,7 +43,7 @@ test("resolveGsdCliEntry prefers the built loader for packaged standalone intera }); }); -test("resolveGsdCliEntry prefers the source loader for source-dev interactive sessions", (t) => { +test("resolveSfCliEntry prefers the source loader for source-dev interactive sessions", (t) => { const packageRoot = makeFixture([ "dist/loader.js", "src/loader.ts", @@ -54,7 +54,7 @@ test("resolveGsdCliEntry prefers the source loader for source-dev interactive se rmSync(packageRoot, { recursive: true, force: true }); }); - const entry = resolveGsdCliEntry({ + const entry = resolveSfCliEntry({ packageRoot, cwd: "/tmp/project-b", execPath: "/custom/node", @@ -84,14 +84,14 @@ test("resolveGsdCliEntry prefers the source loader for source-dev interactive se }); }); -test("resolveGsdCliEntry appends rpc arguments for bridge sessions", (t) => { +test("resolveSfCliEntry appends rpc arguments for bridge sessions", (t) => { const packageRoot = makeFixture(["dist/loader.js"]); t.after(() => { rmSync(packageRoot, { recursive: true, force: true }); }); - const entry = resolveGsdCliEntry({ + const entry = resolveSfCliEntry({ packageRoot, cwd: "/tmp/project-c", execPath: "/custom/node", diff --git a/src/tests/integration/web-command-parity-contract.test.ts b/src/tests/integration/web-command-parity-contract.test.ts index d090c8bf8..62f304582 100644 --- a/src/tests/integration/web-command-parity-contract.test.ts +++ b/src/tests/integration/web-command-parity-contract.test.ts @@ -61,7 +61,7 @@ const DEFERRED_BROWSER_REJECTS = [ "quit", ] as const; -async function collectRegisteredGsdCommandRoots(): Promise { +async function collectRegisteredSfCommandRoots(): Promise { const commands = new Map(); await sfExtension.default({ @@ -190,7 +190,7 @@ test("browser-local aliases and legacy helpers stay explicit", async (t) => { }); test("registered SF command roots stay on the prompt/extension path", async () => { - const registeredRoots = await collectRegisteredGsdCommandRoots(); + const registeredRoots = await collectRegisteredSfCommandRoots(); assert.deepEqual( registeredRoots, ["exit", "sf", "kill", "worktree", "wt"], @@ -199,25 +199,25 @@ test("registered SF command roots stay on the prompt/extension path", async () = // Non-sf roots are extension commands that pass through to the bridge. // Derived dynamically so adding a new registration fails this assertion loudly. - const nonGsdRoots = registeredRoots.filter((r) => r !== "sf"); + const nonSfRoots = registeredRoots.filter((r) => r !== "sf"); assert.equal( - nonGsdRoots.length, + nonSfRoots.length, 4, "expected exactly 4 non-sf passthrough roots; update this count when adding registrations", ); - for (const root of nonGsdRoots) { + for (const root of nonSfRoots) { assertPromptPassthrough(`/${root}`); } // Bare /sf passes through to bridge (equivalent to /sf next) - const bareGsd = dispatchBrowserSlashCommand("/sf"); + const bareSf = dispatchBrowserSlashCommand("/sf"); assert.equal( - bareGsd.kind, + bareSf.kind, "prompt", "bare /sf should pass through to bridge", ); assert.equal( - bareGsd.command.message, + bareSf.command.message, "/sf", "bare /sf should preserve exact input", ); diff --git a/src/tests/integration/web-project-discovery-contract.test.ts b/src/tests/integration/web-project-discovery-contract.test.ts index c226d9b97..2ad1108c6 100644 --- a/src/tests/integration/web-project-discovery-contract.test.ts +++ b/src/tests/integration/web-project-discovery-contract.test.ts @@ -151,7 +151,7 @@ describe("project-discovery", () => { const b = results.find((r) => r.name === "project-b"); assert.ok(b, "project-b not found"); assert.equal(b.kind, "empty-sf"); - assert.equal(b.signals.hasGsdFolder, true); + assert.equal(b.signals.hasSfFolder, true); }); test("project-c is detected as brownfield with hasCargo signal", () => { diff --git a/src/tests/integration/web-state-surfaces-contract.test.ts b/src/tests/integration/web-state-surfaces-contract.test.ts index 7a578f986..28c190125 100644 --- a/src/tests/integration/web-state-surfaces-contract.test.ts +++ b/src/tests/integration/web-state-surfaces-contract.test.ts @@ -433,7 +433,7 @@ const MOCK_DATA_PATTERNS = [ /const\s+recentActivity\s*=\s*\[/, // const recentActivity = [...] /const\s+currentSliceTasks\s*=\s*\[/, // const currentSliceTasks = [...] /const\s+modelUsage\s*=\s*\[/, // const modelUsage = [...] - /const\s+gsdFiles\s*=\s*\[/, // const sfFiles = [...] (legacy naming from fork) + /const\s+sfFiles\s*=\s*\[/, // const sfFiles = [...] (legacy naming from fork) /AutoModeState.*idle.*working/, // old enum-style mock state /Lorem\s+ipsum/i, // lorem placeholder text /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z["'](?:.*,\s*$)/m, // hardcoded ISO timestamps in array literals diff --git a/src/tests/resource-loader-content-hash.test.ts b/src/tests/resource-loader-content-hash.test.ts index 634c113ce..ccdd22351 100644 --- a/src/tests/resource-loader-content-hash.test.ts +++ b/src/tests/resource-loader-content-hash.test.ts @@ -11,14 +11,14 @@ import { join } from "node:path"; import test from "node:test"; /** - * Regression test for gsd-build/gsd-2 #4787. + * Regression test for SF build #4787. * * Background: `computeResourceFingerprint` previously hashed the relative * file path + file size only. Same-byte-length edits to bundled prompt * templates (e.g. the #4570 retry-cap fix to parallel-research-slices.md) * slipped through the fingerprint gate in `initResources`, so existing * installs silently kept serving the stale cached copy from - * `~/.gsd/agent/extensions/gsd/prompts/`. + * the installed resource cache. * * The fix hashes file CONTENTS (sha256) instead of just size — any edit, * regardless of length, produces a different fingerprint and triggers a @@ -28,7 +28,7 @@ import test from "node:test"; test("computeResourceFingerprint detects same-size content edits (#4787)", async (t) => { const { computeResourceFingerprint } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-fingerprint-content-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-fingerprint-content-")); t.after(() => { rmSync(tmp, { recursive: true, force: true }); }); @@ -61,7 +61,7 @@ test("computeResourceFingerprint detects same-size content edits (#4787)", async test("syncResourceDir overwrites same-size stale content on refresh (#4787)", async (t) => { const { syncResourceDir } = await import("../resource-loader.ts"); - const tmp = mkdtempSync(join(tmpdir(), "gsd-sync-samesize-")); + const tmp = mkdtempSync(join(tmpdir(), "sf-sync-samesize-")); t.after(() => { rmSync(tmp, { recursive: true, force: true }); }); diff --git a/src/tests/web-files-symlink.test.ts b/src/tests/web-files-symlink.test.ts index 43874021a..fc3bbb409 100644 --- a/src/tests/web-files-symlink.test.ts +++ b/src/tests/web-files-symlink.test.ts @@ -13,7 +13,7 @@ import test from "node:test"; import { resolveSecurePath } from "../../web/lib/secure-path.ts"; test("web file API resolves normal project files under the canonical root", () => { - const root = mkdtempSync(join(tmpdir(), "gsd-web-root-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-root-")); try { writeFileSync(join(root, "inside.txt"), "inside"); @@ -27,8 +27,8 @@ test("web file API resolves normal project files under the canonical root", () = }); test("web file API rejects symlinks that resolve outside the project root", () => { - const root = mkdtempSync(join(tmpdir(), "gsd-web-root-")); - const outside = mkdtempSync(join(tmpdir(), "gsd-web-outside-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-root-")); + const outside = mkdtempSync(join(tmpdir(), "sf-web-outside-")); try { writeFileSync(join(outside, "secret.txt"), "secret"); symlinkSync(join(outside, "secret.txt"), join(root, "linked-secret.txt")); @@ -41,8 +41,8 @@ test("web file API rejects symlinks that resolve outside the project root", () = }); test("web file API rejects writes through symlinked parent directories outside root", () => { - const root = mkdtempSync(join(tmpdir(), "gsd-web-root-")); - const outside = mkdtempSync(join(tmpdir(), "gsd-web-outside-")); + const root = mkdtempSync(join(tmpdir(), "sf-web-root-")); + const outside = mkdtempSync(join(tmpdir(), "sf-web-outside-")); try { symlinkSync(outside, join(root, "linked-outside"), "dir"); diff --git a/src/web/bridge-service.ts b/src/web/bridge-service.ts index 94f427ac8..5547e954d 100644 --- a/src/web/bridge-service.ts +++ b/src/web/bridge-service.ts @@ -558,7 +558,7 @@ export type ProjectDetectionKind = | "blank"; // empty/near-empty folder export interface ProjectDetectionSignals { - hasGsdFolder: boolean; + hasSfFolder: boolean; hasPlanningFolder: boolean; hasGitRepo: boolean; hasPackageJson: boolean; @@ -621,7 +621,7 @@ export function detectMonorepo( export function detectProjectKind(projectCwd: string): ProjectDetection { const checkExists = getBridgeDeps().existsSync ?? existsSync; - const hasGsdFolder = checkExists(join(projectCwd, ".sf")); + const hasSfFolder = checkExists(join(projectCwd, ".sf")); const hasPlanningFolder = checkExists(join(projectCwd, ".planning")); const hasGitRepo = checkExists(join(projectCwd, ".git")); const hasPackageJson = checkExists(join(projectCwd, "package.json")); @@ -640,7 +640,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { } const signals: ProjectDetectionSignals = { - hasGsdFolder, + hasSfFolder, hasPlanningFolder, hasGitRepo, hasPackageJson, @@ -653,7 +653,7 @@ export function detectProjectKind(projectCwd: string): ProjectDetection { let kind: ProjectDetectionKind; - if (hasGsdFolder) { + if (hasSfFolder) { // Check if milestones exist const milestonesDir = join(projectCwd, ".sf", "milestones"); let hasMilestones = false; diff --git a/src/web/cli-entry.ts b/src/web/cli-entry.ts index f5a03f96a..078c32c5a 100644 --- a/src/web/cli-entry.ts +++ b/src/web/cli-entry.ts @@ -10,8 +10,6 @@ export interface SfCliEntry { } /** @deprecated Use SfCliEntry instead */ -export type GsdCliEntry = SfCliEntry; - export interface ResolveSfCliEntryOptions { packageRoot: string; cwd: string; @@ -24,8 +22,6 @@ export interface ResolveSfCliEntryOptions { } /** @deprecated Use ResolveSfCliEntryOptions instead */ -export type ResolveGsdCliEntryOptions = ResolveSfCliEntryOptions; - function buildExtraArgs(options: ResolveSfCliEntryOptions): string[] { if (options.mode !== "rpc") return []; @@ -94,9 +90,3 @@ export function resolveSfCliEntry( ); } -/** @deprecated Use resolveSfCliEntry instead */ -export function resolveGsdCliEntry( - options: ResolveGsdCliEntryOptions, -): GsdCliEntry { - return resolveSfCliEntry(options); -} diff --git a/tests/live-regression/run.ts b/tests/live-regression/run.ts index 9371880b4..957416779 100644 --- a/tests/live-regression/run.ts +++ b/tests/live-regression/run.ts @@ -55,7 +55,7 @@ function sfCli(args: string[], cwd: string, env?: Record): { std encoding: "utf-8", timeout: 30_000, stdio: ["pipe", "pipe", "pipe"], - env: { ...process.env, ...env, SF_NON_INTERACTIVE: "1", GSD_NON_INTERACTIVE: "1" }, + env: { ...process.env, ...env, SF_NON_INTERACTIVE: "1", SF_NON_INTERACTIVE: "1" }, }); return { stdout, stderr: "", code: 0 }; } catch (err: any) { diff --git a/tests/live/run.ts b/tests/live/run.ts index 9e7ec6fc0..b4ffa7099 100644 --- a/tests/live/run.ts +++ b/tests/live/run.ts @@ -5,7 +5,7 @@ import { fileURLToPath } from "url"; const __dirname = dirname(fileURLToPath(import.meta.url)); -if ((process.env.SF_LIVE_TESTS || process.env.GSD_LIVE_TESTS) !== "1") { +if ((process.env.SF_LIVE_TESTS || process.env.SF_LIVE_TESTS) !== "1") { console.log("Skipping live tests (set SF_LIVE_TESTS=1 to enable)"); process.exit(0); } diff --git a/tests/repro-worktree-bug/verify-fix.mjs b/tests/repro-worktree-bug/verify-fix.mjs index 27a7891f6..ce23a278d 100644 --- a/tests/repro-worktree-bug/verify-fix.mjs +++ b/tests/repro-worktree-bug/verify-fix.mjs @@ -104,22 +104,22 @@ function resolveProjectRoot(basePath) { const HASH = "abc123def456"; const TEST_ROOT = mkdtempSync(join(tmpdir(), "sf-verify-fix-")); -const USER_GSD = process.env.SF_HOME || join(TEST_ROOT, ".sf"); +const USER_SF = process.env.SF_HOME || join(TEST_ROOT, ".sf"); const USER_HOME = homedir(); -const PROJECT_GSD_STORAGE = `${USER_GSD}/projects/${HASH}`; +const PROJECT_SF_STORAGE = `${USER_SF}/projects/${HASH}`; const PROJECT_DIR = mkdtempSync(join(tmpdir(), "myproject-")); -const PROJECT_GSD_LINK = `${PROJECT_DIR}/.sf`; +const PROJECT_SF_LINK = `${PROJECT_DIR}/.sf`; const PROJECT_REAL = normalizePathForCompare(PROJECT_DIR); -const EXPECTED_BUGGY_ROOT = normalizePathForCompare(resolve(USER_GSD, "..")); +const EXPECTED_BUGGY_ROOT = normalizePathForCompare(resolve(USER_SF, "..")); -process.env.SF_HOME = USER_GSD; +process.env.SF_HOME = USER_SF; console.log("=== Setting up filesystem layout ===\n"); -mkdirSync(`${PROJECT_GSD_STORAGE}/worktrees`, { recursive: true }); -mkdirSync(`${PROJECT_GSD_STORAGE}/milestones`, { recursive: true }); +mkdirSync(`${PROJECT_SF_STORAGE}/worktrees`, { recursive: true }); +mkdirSync(`${PROJECT_SF_STORAGE}/milestones`, { recursive: true }); mkdirSync(PROJECT_DIR, { recursive: true }); -symlinkSync(PROJECT_GSD_STORAGE, PROJECT_GSD_LINK); +symlinkSync(PROJECT_SF_STORAGE, PROJECT_SF_LINK); // Init git in project dir execSync("git init -b main", { cwd: PROJECT_DIR, stdio: "pipe" }); diff --git a/tests/repro-worktree-bug/verify-integration.mjs b/tests/repro-worktree-bug/verify-integration.mjs index 554cbf7d7..e534a69cc 100644 --- a/tests/repro-worktree-bug/verify-integration.mjs +++ b/tests/repro-worktree-bug/verify-integration.mjs @@ -7,7 +7,7 @@ * * Covers: * 1. resolveProjectRoot() returns the real project, not ~ - * 2. gsdRoot() from the resolved project root finds project .sf, not ~/.sf + * 2. sfRoot() from the resolved project root finds project .sf, not ~/.sf * 3. The parallel/ directory would be created under project .sf * 4. session-status writes target the correct location * 5. orchestrator.json would be written to project .sf diff --git a/vscode-extension/src/activity-feed.ts b/vscode-extension/src/activity-feed.ts index e32314eba..8d1edac4e 100644 --- a/vscode-extension/src/activity-feed.ts +++ b/vscode-extension/src/activity-feed.ts @@ -62,7 +62,7 @@ function toolSummary(toolName: string, toolInput: Record): { la * TreeDataProvider that shows real-time tool executions from the SF agent. * Listens to tool_execution_start/end and agent_start/end events. */ -export class GsdActivityFeedProvider implements vscode.TreeDataProvider, vscode.Disposable { +export class SfActivityFeedProvider implements vscode.TreeDataProvider, vscode.Disposable { public static readonly viewId = "sf-activity"; private readonly _onDidChangeTreeData = new vscode.EventEmitter(); diff --git a/vscode-extension/src/bash-terminal.ts b/vscode-extension/src/bash-terminal.ts index 0fd395cd5..b4b41bdf1 100644 --- a/vscode-extension/src/bash-terminal.ts +++ b/vscode-extension/src/bash-terminal.ts @@ -5,7 +5,7 @@ import type { AgentEvent, SfClient } from "./sf-client.js"; * Routes the SF agent's Bash tool output to a dedicated VS Code terminal panel. * Shows streaming output from tool_execution_update events in real time. */ -export class GsdBashTerminal implements vscode.Disposable { +export class SfBashTerminal implements vscode.Disposable { private terminal: vscode.Terminal | undefined; private writeEmitter: vscode.EventEmitter | undefined; private disposables: vscode.Disposable[] = []; diff --git a/vscode-extension/src/change-tracker.ts b/vscode-extension/src/change-tracker.ts index 2d391b8fc..7a5feac23 100644 --- a/vscode-extension/src/change-tracker.ts +++ b/vscode-extension/src/change-tracker.ts @@ -21,7 +21,7 @@ export interface Checkpoint { * before the agent modifies it, enabling diff views, SCM integration, * and checkpoint/rollback functionality. */ -export class GsdChangeTracker implements vscode.Disposable { +export class SfChangeTracker implements vscode.Disposable { /** file path → original content (before first agent modification this session) */ private originals = new Map(); /** Set of file paths modified in the current agent turn */ diff --git a/vscode-extension/src/checkpoints.ts b/vscode-extension/src/checkpoints.ts index 189d6a353..4265aa408 100644 --- a/vscode-extension/src/checkpoints.ts +++ b/vscode-extension/src/checkpoints.ts @@ -1,11 +1,11 @@ import * as vscode from "vscode"; -import type { GsdChangeTracker, Checkpoint } from "./change-tracker.js"; +import type { SfChangeTracker, Checkpoint } from "./change-tracker.js"; /** * TreeDataProvider that shows agent checkpoints (one per agent turn). * Each checkpoint can be restored to revert all file changes since that point. */ -export class GsdCheckpointProvider implements vscode.TreeDataProvider, vscode.Disposable { +export class SfCheckpointProvider implements vscode.TreeDataProvider, vscode.Disposable { public static readonly viewId = "sf-checkpoints"; private readonly _onDidChangeTreeData = new vscode.EventEmitter(); @@ -13,7 +13,7 @@ export class GsdCheckpointProvider implements vscode.TreeDataProvider this._onDidChangeTreeData.fire()), diff --git a/vscode-extension/src/code-lens.ts b/vscode-extension/src/code-lens.ts index 21458fb83..7a9f8b415 100644 --- a/vscode-extension/src/code-lens.ts +++ b/vscode-extension/src/code-lens.ts @@ -50,7 +50,7 @@ const SYMBOL_PATTERNS: { languages: string[]; regex: RegExp }[] = [ * declarations. Clicking the lens sends a brief explanation request to the SF * agent for that specific symbol. */ -export class GsdCodeLensProvider implements vscode.CodeLensProvider, vscode.Disposable { +export class SfCodeLensProvider implements vscode.CodeLensProvider, vscode.Disposable { private readonly _onDidChangeCodeLenses = new vscode.EventEmitter(); readonly onDidChangeCodeLenses = this._onDidChangeCodeLenses.event; diff --git a/vscode-extension/src/conversation-history.ts b/vscode-extension/src/conversation-history.ts index d37858d65..b628053f7 100644 --- a/vscode-extension/src/conversation-history.ts +++ b/vscode-extension/src/conversation-history.ts @@ -20,8 +20,8 @@ interface ConversationMessage { * current SF session using the get_messages RPC call. Shows tool calls, * thinking blocks, search/filter, and fork-from-here actions. */ -export class GsdConversationHistoryPanel implements vscode.Disposable { - private static currentPanel: GsdConversationHistoryPanel | undefined; +export class SfConversationHistoryPanel implements vscode.Disposable { + private static currentPanel: SfConversationHistoryPanel | undefined; private readonly panel: vscode.WebviewPanel; private readonly client: SfClient; @@ -30,13 +30,13 @@ export class GsdConversationHistoryPanel implements vscode.Disposable { static createOrShow( extensionUri: vscode.Uri, client: SfClient, - ): GsdConversationHistoryPanel { + ): SfConversationHistoryPanel { const column = vscode.window.activeTextEditor?.viewColumn ?? vscode.ViewColumn.One; - if (GsdConversationHistoryPanel.currentPanel) { - GsdConversationHistoryPanel.currentPanel.panel.reveal(column); - void GsdConversationHistoryPanel.currentPanel.refresh(); - return GsdConversationHistoryPanel.currentPanel; + if (SfConversationHistoryPanel.currentPanel) { + SfConversationHistoryPanel.currentPanel.panel.reveal(column); + void SfConversationHistoryPanel.currentPanel.refresh(); + return SfConversationHistoryPanel.currentPanel; } const panel = vscode.window.createWebviewPanel( @@ -49,13 +49,13 @@ export class GsdConversationHistoryPanel implements vscode.Disposable { }, ); - GsdConversationHistoryPanel.currentPanel = new GsdConversationHistoryPanel( + SfConversationHistoryPanel.currentPanel = new SfConversationHistoryPanel( panel, extensionUri, client, ); - void GsdConversationHistoryPanel.currentPanel.refresh(); - return GsdConversationHistoryPanel.currentPanel; + void SfConversationHistoryPanel.currentPanel.refresh(); + return SfConversationHistoryPanel.currentPanel; } private constructor( @@ -105,7 +105,7 @@ export class GsdConversationHistoryPanel implements vscode.Disposable { } dispose(): void { - GsdConversationHistoryPanel.currentPanel = undefined; + SfConversationHistoryPanel.currentPanel = undefined; this.panel.dispose(); for (const d of this.disposables) { d.dispose(); diff --git a/vscode-extension/src/diagnostics.ts b/vscode-extension/src/diagnostics.ts index 61290e367..6b481ec55 100644 --- a/vscode-extension/src/diagnostics.ts +++ b/vscode-extension/src/diagnostics.ts @@ -6,7 +6,7 @@ import type { SfClient } from "./sf-client.js"; * - Reads diagnostics (errors/warnings) from the Problems panel and sends them to the agent * - Provides a DiagnosticCollection for the agent to surface its own findings */ -export class GsdDiagnosticBridge implements vscode.Disposable { +export class SfDiagnosticBridge implements vscode.Disposable { private readonly collection: vscode.DiagnosticCollection; private disposables: vscode.Disposable[] = []; diff --git a/vscode-extension/src/extension.ts b/vscode-extension/src/extension.ts index 4c74c0251..27ea82901 100644 --- a/vscode-extension/src/extension.ts +++ b/vscode-extension/src/extension.ts @@ -1,32 +1,32 @@ import * as vscode from "vscode"; import { SfClient, ThinkingLevel } from "./sf-client.js"; import { registerChatParticipant } from "./chat-participant.js"; -import { GsdSidebarProvider } from "./sidebar.js"; -import { GsdFileDecorationProvider } from "./file-decorations.js"; -import { GsdBashTerminal } from "./bash-terminal.js"; -import { GsdSessionTreeProvider } from "./session-tree.js"; -import { GsdConversationHistoryPanel } from "./conversation-history.js"; -import { GsdSlashCompletionProvider } from "./slash-completion.js"; -import { GsdCodeLensProvider } from "./code-lens.js"; -import { GsdActivityFeedProvider } from "./activity-feed.js"; -import { GsdChangeTracker } from "./change-tracker.js"; -import { GsdScmProvider } from "./scm-provider.js"; -import { GsdDiagnosticBridge } from "./diagnostics.js"; -import { GsdLineDecorationManager } from "./line-decorations.js"; -import { GsdGitIntegration } from "./git-integration.js"; -import { GsdPermissionManager } from "./permissions.js"; +import { SfSidebarProvider } from "./sidebar.js"; +import { SfFileDecorationProvider } from "./file-decorations.js"; +import { SfBashTerminal } from "./bash-terminal.js"; +import { SfSessionTreeProvider } from "./session-tree.js"; +import { SfConversationHistoryPanel } from "./conversation-history.js"; +import { SfSlashCompletionProvider } from "./slash-completion.js"; +import { SfCodeLensProvider } from "./code-lens.js"; +import { SfActivityFeedProvider } from "./activity-feed.js"; +import { SfChangeTracker } from "./change-tracker.js"; +import { SfScmProvider } from "./scm-provider.js"; +import { SfDiagnosticBridge } from "./diagnostics.js"; +import { SfLineDecorationManager } from "./line-decorations.js"; +import { SfGitIntegration } from "./git-integration.js"; +import { SfPermissionManager } from "./permissions.js"; let client: SfClient | undefined; -let sidebarProvider: GsdSidebarProvider | undefined; -let fileDecorations: GsdFileDecorationProvider | undefined; -let sessionTreeProvider: GsdSessionTreeProvider | undefined; -let activityFeedProvider: GsdActivityFeedProvider | undefined; -let changeTracker: GsdChangeTracker | undefined; -let scmProvider: GsdScmProvider | undefined; -let diagnosticBridge: GsdDiagnosticBridge | undefined; -let lineDecorations: GsdLineDecorationManager | undefined; -let gitIntegration: GsdGitIntegration | undefined; -let permissionManager: GsdPermissionManager | undefined; +let sidebarProvider: SfSidebarProvider | undefined; +let fileDecorations: SfFileDecorationProvider | undefined; +let sessionTreeProvider: SfSessionTreeProvider | undefined; +let activityFeedProvider: SfActivityFeedProvider | undefined; +let changeTracker: SfChangeTracker | undefined; +let scmProvider: SfScmProvider | undefined; +let diagnosticBridge: SfDiagnosticBridge | undefined; +let lineDecorations: SfLineDecorationManager | undefined; +let gitIntegration: SfGitIntegration | undefined; +let permissionManager: SfPermissionManager | undefined; function getTrustedConfigurationValue(section: string, key: string, fallback: T): T { const config = vscode.workspace.getConfiguration(section); @@ -117,17 +117,17 @@ export function activate(context: vscode.ExtensionContext): void { // -- Sidebar ----------------------------------------------------------- - sidebarProvider = new GsdSidebarProvider(context.extensionUri, client); + sidebarProvider = new SfSidebarProvider(context.extensionUri, client); context.subscriptions.push( vscode.window.registerWebviewViewProvider( - GsdSidebarProvider.viewId, + SfSidebarProvider.viewId, sidebarProvider, ), ); // -- File decorations -------------------------------------------------- - fileDecorations = new GsdFileDecorationProvider(client); + fileDecorations = new SfFileDecorationProvider(client); context.subscriptions.push( fileDecorations, vscode.window.registerFileDecorationProvider(fileDecorations), @@ -135,51 +135,51 @@ export function activate(context: vscode.ExtensionContext): void { // -- Bash terminal ----------------------------------------------------- - const bashTerminal = new GsdBashTerminal(client); + const bashTerminal = new SfBashTerminal(client); context.subscriptions.push(bashTerminal); // -- Session tree view ------------------------------------------------- - sessionTreeProvider = new GsdSessionTreeProvider(client); + sessionTreeProvider = new SfSessionTreeProvider(client); context.subscriptions.push( sessionTreeProvider, - vscode.window.registerTreeDataProvider(GsdSessionTreeProvider.viewId, sessionTreeProvider), + vscode.window.registerTreeDataProvider(SfSessionTreeProvider.viewId, sessionTreeProvider), ); // -- Activity feed ----------------------------------------------------- - activityFeedProvider = new GsdActivityFeedProvider(client); + activityFeedProvider = new SfActivityFeedProvider(client); context.subscriptions.push( activityFeedProvider, - vscode.window.registerTreeDataProvider(GsdActivityFeedProvider.viewId, activityFeedProvider), + vscode.window.registerTreeDataProvider(SfActivityFeedProvider.viewId, activityFeedProvider), ); // -- Change tracker & SCM provider ------------------------------------- - changeTracker = new GsdChangeTracker(client); + changeTracker = new SfChangeTracker(client); context.subscriptions.push(changeTracker); - scmProvider = new GsdScmProvider(changeTracker, cwd); + scmProvider = new SfScmProvider(changeTracker, cwd); context.subscriptions.push(scmProvider); // -- Diagnostics ------------------------------------------------------- - diagnosticBridge = new GsdDiagnosticBridge(client); + diagnosticBridge = new SfDiagnosticBridge(client); context.subscriptions.push(diagnosticBridge); // -- Line-level decorations -------------------------------------------- - lineDecorations = new GsdLineDecorationManager(changeTracker!); + lineDecorations = new SfLineDecorationManager(changeTracker!); context.subscriptions.push(lineDecorations); // -- Git integration --------------------------------------------------- - gitIntegration = new GsdGitIntegration(changeTracker!, cwd); + gitIntegration = new SfGitIntegration(changeTracker!, cwd); context.subscriptions.push(gitIntegration); // -- Permissions ------------------------------------------------------- - permissionManager = new GsdPermissionManager(client); + permissionManager = new SfPermissionManager(client); context.subscriptions.push(permissionManager); // -- Progress notifications -------------------------------------------- @@ -276,7 +276,7 @@ export function activate(context: vscode.ExtensionContext): void { // -- Slash command completion ------------------------------------------ - const slashCompletion = new GsdSlashCompletionProvider(client); + const slashCompletion = new SfSlashCompletionProvider(client); context.subscriptions.push( slashCompletion, vscode.languages.registerCompletionItemProvider( @@ -295,7 +295,7 @@ export function activate(context: vscode.ExtensionContext): void { // -- Code lens "Ask SF" ----------------------------------------------- - const codeLensProvider = new GsdCodeLensProvider(client); + const codeLensProvider = new SfCodeLensProvider(client); context.subscriptions.push( codeLensProvider, vscode.languages.registerCodeLensProvider( @@ -636,7 +636,7 @@ export function activate(context: vscode.ExtensionContext): void { context.subscriptions.push( vscode.commands.registerCommand("sf.showHistory", () => { if (!requireConnected()) return; - GsdConversationHistoryPanel.createOrShow(context.extensionUri, client!); + SfConversationHistoryPanel.createOrShow(context.extensionUri, client!); }), ); diff --git a/vscode-extension/src/file-decorations.ts b/vscode-extension/src/file-decorations.ts index 95efe2fca..2ea65f92b 100644 --- a/vscode-extension/src/file-decorations.ts +++ b/vscode-extension/src/file-decorations.ts @@ -5,7 +5,7 @@ import type { AgentEvent, SfClient } from "./sf-client.js"; * Badges files in the VS Code explorer that SF has written or edited * during the current session. */ -export class GsdFileDecorationProvider implements vscode.FileDecorationProvider, vscode.Disposable { +export class SfFileDecorationProvider implements vscode.FileDecorationProvider, vscode.Disposable { private readonly _onDidChangeFileDecorations = new vscode.EventEmitter(); readonly onDidChangeFileDecorations = this._onDidChangeFileDecorations.event; diff --git a/vscode-extension/src/git-integration.ts b/vscode-extension/src/git-integration.ts index aa6dba427..8eb235903 100644 --- a/vscode-extension/src/git-integration.ts +++ b/vscode-extension/src/git-integration.ts @@ -1,15 +1,15 @@ import * as vscode from "vscode"; import { execFile } from "node:child_process"; -import type { GsdChangeTracker } from "./change-tracker.js"; +import type { SfChangeTracker } from "./change-tracker.js"; /** * Provides git integration for agent changes — commit, branch, and diff. */ -export class GsdGitIntegration implements vscode.Disposable { +export class SfGitIntegration implements vscode.Disposable { private disposables: vscode.Disposable[] = []; constructor( - private readonly tracker: GsdChangeTracker, + private readonly tracker: SfChangeTracker, private readonly cwd: string, ) {} diff --git a/vscode-extension/src/line-decorations.ts b/vscode-extension/src/line-decorations.ts index 1805ac6d6..bdd31a706 100644 --- a/vscode-extension/src/line-decorations.ts +++ b/vscode-extension/src/line-decorations.ts @@ -1,17 +1,17 @@ import * as vscode from "vscode"; -import type { GsdChangeTracker } from "./change-tracker.js"; +import type { SfChangeTracker } from "./change-tracker.js"; /** * Provides line-level editor decorations for files modified by the SF agent. * Shows subtle background highlights on changed lines and gutter icons. */ -export class GsdLineDecorationManager implements vscode.Disposable { +export class SfLineDecorationManager implements vscode.Disposable { private readonly addedDecoration: vscode.TextEditorDecorationType; private readonly modifiedDecoration: vscode.TextEditorDecorationType; private readonly gutterDecoration: vscode.TextEditorDecorationType; private disposables: vscode.Disposable[] = []; - constructor(private readonly tracker: GsdChangeTracker) { + constructor(private readonly tracker: SfChangeTracker) { this.addedDecoration = vscode.window.createTextEditorDecorationType({ isWholeLine: true, backgroundColor: "rgba(78, 201, 176, 0.07)", diff --git a/vscode-extension/src/permissions.ts b/vscode-extension/src/permissions.ts index 2a91c8f30..2e9aaedd0 100644 --- a/vscode-extension/src/permissions.ts +++ b/vscode-extension/src/permissions.ts @@ -7,7 +7,7 @@ type ApprovalMode = "ask" | "auto-approve" | "plan-only"; * Permission/approval system for agent actions. * Can be configured to prompt before file writes, command execution, etc. */ -export class GsdPermissionManager implements vscode.Disposable { +export class SfPermissionManager implements vscode.Disposable { private _mode: ApprovalMode = "auto-approve"; private disposables: vscode.Disposable[] = []; diff --git a/vscode-extension/src/plan-viewer.ts b/vscode-extension/src/plan-viewer.ts index 53a458318..585cdba1d 100644 --- a/vscode-extension/src/plan-viewer.ts +++ b/vscode-extension/src/plan-viewer.ts @@ -15,7 +15,7 @@ interface PlanStep { * Displays steps as they happen, showing what the agent is doing and * what it has completed — a live execution plan. */ -export class GsdPlanViewerProvider implements vscode.TreeDataProvider, vscode.Disposable { +export class SfPlanViewerProvider implements vscode.TreeDataProvider, vscode.Disposable { public static readonly viewId = "sf-plan"; private readonly _onDidChangeTreeData = new vscode.EventEmitter(); diff --git a/vscode-extension/src/scm-provider.ts b/vscode-extension/src/scm-provider.ts index e3926bb54..49d55a147 100644 --- a/vscode-extension/src/scm-provider.ts +++ b/vscode-extension/src/scm-provider.ts @@ -1,6 +1,6 @@ import * as vscode from "vscode"; import * as path from "node:path"; -import type { GsdChangeTracker } from "./change-tracker.js"; +import type { SfChangeTracker } from "./change-tracker.js"; const SF_ORIGINAL_SCHEME = "sf-original"; @@ -9,18 +9,18 @@ const SF_ORIGINAL_SCHEME = "sf-original"; * in a dedicated "SF Agent" section of the Source Control panel. * Supports QuickDiff to show before/after diffs, and accept/discard per-file. */ -export class GsdScmProvider implements vscode.Disposable { +export class SfScmProvider implements vscode.Disposable { private readonly scm: vscode.SourceControl; private readonly changesGroup: vscode.SourceControlResourceGroup; - private readonly contentProvider: GsdOriginalContentProvider; + private readonly contentProvider: SfOriginalContentProvider; private disposables: vscode.Disposable[] = []; constructor( - private readonly tracker: GsdChangeTracker, + private readonly tracker: SfChangeTracker, private readonly workspaceRoot: string, ) { // Register content provider for original file contents - this.contentProvider = new GsdOriginalContentProvider(tracker); + this.contentProvider = new SfOriginalContentProvider(tracker); this.disposables.push( vscode.workspace.registerTextDocumentContentProvider( SF_ORIGINAL_SCHEME, @@ -105,11 +105,11 @@ export class GsdScmProvider implements vscode.Disposable { * TextDocumentContentProvider that serves the original (pre-agent) content * of files via the `sf-original:` URI scheme. */ -class GsdOriginalContentProvider implements vscode.TextDocumentContentProvider { +class SfOriginalContentProvider implements vscode.TextDocumentContentProvider { private readonly _onDidChange = new vscode.EventEmitter(); readonly onDidChange = this._onDidChange.event; - constructor(private readonly tracker: GsdChangeTracker) { + constructor(private readonly tracker: SfChangeTracker) { tracker.onDidChange((paths) => { for (const p of paths) { this._onDidChange.fire(vscode.Uri.file(p).with({ scheme: SF_ORIGINAL_SCHEME })); diff --git a/vscode-extension/src/session-tree.ts b/vscode-extension/src/session-tree.ts index e53580049..fb4453f32 100644 --- a/vscode-extension/src/session-tree.ts +++ b/vscode-extension/src/session-tree.ts @@ -15,7 +15,7 @@ export interface SessionItem { * Tree view provider that lists SF session files from the same directory * as the currently active session. */ -export class GsdSessionTreeProvider implements vscode.TreeDataProvider, vscode.Disposable { +export class SfSessionTreeProvider implements vscode.TreeDataProvider, vscode.Disposable { public static readonly viewId = "sf-sessions"; private readonly _onDidChangeTreeData = new vscode.EventEmitter(); diff --git a/vscode-extension/src/sf-client.ts b/vscode-extension/src/sf-client.ts index f0684c0d5..80a7b1549 100644 --- a/vscode-extension/src/sf-client.ts +++ b/vscode-extension/src/sf-client.ts @@ -80,7 +80,7 @@ type PendingRequest = { * Client that spawns `sf --mode rpc` and communicates via JSON lines * over stdin/stdout. Emits VS Code events for streaming responses. */ -export class GsdClient implements vscode.Disposable { +export class SfClient implements vscode.Disposable { private process: ChildProcess | null = null; private pendingRequests = new Map(); private requestId = 0; diff --git a/vscode-extension/src/sidebar.ts b/vscode-extension/src/sidebar.ts index c6a09ca8e..c50e81083 100644 --- a/vscode-extension/src/sidebar.ts +++ b/vscode-extension/src/sidebar.ts @@ -14,7 +14,7 @@ async function sendViaChat(message: string): Promise { * Designed for information density without clutter — collapsible sections, * hidden empty data, and consolidated action buttons. */ -export class GsdSidebarProvider implements vscode.WebviewViewProvider { +export class SfSidebarProvider implements vscode.WebviewViewProvider { public static readonly viewId = "sf-sidebar"; private view?: vscode.WebviewView; diff --git a/vscode-extension/src/slash-completion.ts b/vscode-extension/src/slash-completion.ts index c9611c896..f0abca5e6 100644 --- a/vscode-extension/src/slash-completion.ts +++ b/vscode-extension/src/slash-completion.ts @@ -9,7 +9,7 @@ import type { SfClient, SlashCommand } from "./sf-client.js"; * Commands are fetched from the running agent via get_commands RPC and * cached so the list remains available between keystrokes. */ -export class GsdSlashCompletionProvider +export class SfSlashCompletionProvider implements vscode.CompletionItemProvider, vscode.Disposable { private cachedCommands: SlashCommand[] = []; diff --git a/web/app/api/files/route.ts b/web/app/api/files/route.ts index a0bab9fa6..8420b5f63 100644 --- a/web/app/api/files/route.ts +++ b/web/app/api/files/route.ts @@ -36,12 +36,12 @@ interface FileNode { children?: FileNode[]; } -function getGsdRoot(projectCwd: string): string { +function getSfRoot(projectCwd: string): string { return join(projectCwd, ".sf"); } function getRootForMode(mode: RootMode, projectCwd: string): string { - return mode === "project" ? projectCwd : getGsdRoot(projectCwd); + return mode === "project" ? projectCwd : getSfRoot(projectCwd); } function buildTree(dirPath: string, skipDirs?: Set, depth = 0, maxDepth = Infinity): FileNode[] { diff --git a/web/components/sf/files-view.tsx b/web/components/sf/files-view.tsx index 00b588aeb..ed795586d 100644 --- a/web/components/sf/files-view.tsx +++ b/web/components/sf/files-view.tsx @@ -477,7 +477,7 @@ export function FilesView() { const [activeRoot, setActiveRoot] = useState("sf") const [leftPanel, setLeftPanel] = useState("tree") - const [gsdTree, setGsdTree] = useState(null) + const [sfTree, setSfTree] = useState(null) const [projectTree, setProjectTree] = useState(null) const [loading, setLoading] = useState(true) const [error, setError] = useState(null) @@ -522,7 +522,7 @@ export function FilesView() { ) // Expanded paths per root, restored from sessionStorage - const [gsdExpanded, setGsdExpanded] = useState>(() => loadExpanded(projectCwd, "sf")) + const [sfExpanded, setSfExpanded] = useState>(() => loadExpanded(projectCwd, "sf")) const [projectExpanded, setProjectExpanded] = useState>(() => loadExpanded(projectCwd, "project")) // Re-hydrate from storage once projectCwd is available (boot may arrive after first render) @@ -530,12 +530,12 @@ export function FilesView() { useEffect(() => { if (!projectCwd || hydratedRef.current) return hydratedRef.current = true - setGsdExpanded(loadExpanded(projectCwd, "sf")) + setSfExpanded(loadExpanded(projectCwd, "sf")) setProjectExpanded(loadExpanded(projectCwd, "project")) }, [projectCwd]) - const expandedPaths = activeRoot === "sf" ? gsdExpanded : projectExpanded - const setExpandedPaths = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded + const expandedPaths = activeRoot === "sf" ? sfExpanded : projectExpanded + const setExpandedPaths = activeRoot === "sf" ? setSfExpanded : setProjectExpanded // ── Multi-tab state ── const [openTabs, setOpenTabs] = useState([]) @@ -553,8 +553,8 @@ export function FilesView() { // The selected path in the tree corresponds to the active tab const selectedPath = activeTab?.path ?? null - const tree = activeRoot === "sf" ? gsdTree : projectTree - const treeLoaded = activeRoot === "sf" ? gsdTree !== null : projectTree !== null + const tree = activeRoot === "sf" ? sfTree : projectTree + const treeLoaded = activeRoot === "sf" ? sfTree !== null : projectTree !== null const fetchTree = useCallback(async (root: RootMode) => { try { @@ -568,7 +568,7 @@ export function FilesView() { const data = await res.json() const nodes = data.tree ?? [] if (root === "sf") { - setGsdTree(nodes) + setSfTree(nodes) } else { setProjectTree(nodes) } @@ -609,7 +609,7 @@ export function FilesView() { // Auto-expand parent dirs const parts = path.split("/") - const setExpanded = root === "sf" ? setGsdExpanded : setProjectExpanded + const setExpanded = root === "sf" ? setSfExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) for (let i = 1; i < parts.length; i++) { @@ -676,14 +676,14 @@ export function FilesView() { // Process a file open request (used both on mount and on event) const processFileOpen = useCallback(async (root: RootMode, path: string) => { // Ensure tree is loaded for this root - if (root === "sf" && !gsdTree) { + if (root === "sf" && !sfTree) { fetchTree("sf") } else if (root === "project" && !projectTree) { fetchTree("project") } await openFileTab(root, path) - }, [gsdTree, projectTree, fetchTree, openFileTab]) + }, [sfTree, projectTree, fetchTree, openFileTab]) // On mount: consume any pending file request that arrived before this component mounted const consumedPendingRef = useRef(false) @@ -792,7 +792,7 @@ export function FilesView() { const handleNewFile = useCallback((parentDir: string) => { // Ensure parent directory is expanded if (parentDir) { - const setExpanded = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded + const setExpanded = activeRoot === "sf" ? setSfExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) const parts = parentDir.split("/") @@ -808,7 +808,7 @@ export function FilesView() { const handleNewFolder = useCallback((parentDir: string) => { if (parentDir) { - const setExpanded = activeRoot === "sf" ? setGsdExpanded : setProjectExpanded + const setExpanded = activeRoot === "sf" ? setSfExpanded : setProjectExpanded setExpanded((prev) => { const next = new Set(prev) const parts = parentDir.split("/") @@ -1029,13 +1029,13 @@ export function FilesView() { const autoSelectedRef = useRef(false) useEffect(() => { if (autoSelectedRef.current) return - if (!gsdTree || openTabs.length > 0 || consumedPendingRef.current) return - const hasStateMd = gsdTree.some((n) => n.name === "STATE.md" && n.type === "file") + if (!sfTree || openTabs.length > 0 || consumedPendingRef.current) return + const hasStateMd = sfTree.some((n) => n.name === "STATE.md" && n.type === "file") if (hasStateMd) { autoSelectedRef.current = true void openFileTab("sf", "STATE.md") } - }, [gsdTree, openTabs.length, openFileTab]) + }, [sfTree, openTabs.length, openFileTab]) // ── Agent file-edit auto-open: watch tool executions for edit/write tools ── const lastSeenToolCountRef = useRef(0) @@ -1054,7 +1054,7 @@ export function FilesView() { if (!filePath) continue // Determine root and relative path - const gsdPrefix = ".sf/" + const sfPrefix = ".sf/" let root: RootMode = "project" let relativePath = filePath @@ -1064,9 +1064,9 @@ export function FilesView() { if (relativePath.startsWith("/")) relativePath = relativePath.slice(1) } - if (relativePath.startsWith(gsdPrefix)) { + if (relativePath.startsWith(sfPrefix)) { root = "sf" - relativePath = relativePath.slice(gsdPrefix.length) + relativePath = relativePath.slice(sfPrefix.length) } const key = tabKey(root, relativePath) diff --git a/web/components/sf/onboarding/step-project.tsx b/web/components/sf/onboarding/step-project.tsx index 046fe199b..745b3ece4 100644 --- a/web/components/sf/onboarding/step-project.tsx +++ b/web/components/sf/onboarding/step-project.tsx @@ -24,7 +24,7 @@ import { authFetch } from "@/lib/auth" type ProjectDetectionKind = "active-sf" | "empty-sf" | "v1-legacy" | "brownfield" | "blank" interface ProjectDetectionSignals { - hasGsdFolder: boolean + hasSfFolder: boolean hasPlanningFolder: boolean hasGitRepo: boolean hasPackageJson: boolean diff --git a/web/components/sf/projects-view.tsx b/web/components/sf/projects-view.tsx index 1314ef4a0..90e563ebf 100644 --- a/web/components/sf/projects-view.tsx +++ b/web/components/sf/projects-view.tsx @@ -56,7 +56,7 @@ import { type ProjectDetectionKind = "active-sf" | "empty-sf" | "v1-legacy" | "brownfield" | "blank" interface ProjectDetectionSignals { - hasGsdFolder: boolean + hasSfFolder: boolean hasPlanningFolder: boolean hasGitRepo: boolean hasPackageJson: boolean diff --git a/web/components/sf/sidebar.tsx b/web/components/sf/sidebar.tsx index 7618e7eaa..93b95138c 100644 --- a/web/components/sf/sidebar.tsx +++ b/web/components/sf/sidebar.tsx @@ -331,9 +331,9 @@ export function MilestoneExplorer({ isConnecting = false, width, onCollapse }: { const openTaskFile = (absolutePath: string | undefined) => { if (!absolutePath || !projectCwd) return - const gsdPrefix = `${projectCwd}/.sf/` - if (!absolutePath.startsWith(gsdPrefix)) return - const relativePath = absolutePath.slice(gsdPrefix.length) + const sfPrefix = `${projectCwd}/.sf/` + if (!absolutePath.startsWith(sfPrefix)) return + const relativePath = absolutePath.slice(sfPrefix.length) window.dispatchEvent(new CustomEvent("sf:open-file", { detail: { root: "sf", path: relativePath } })) } diff --git a/web/lib/browser-slash-command-dispatch.ts b/web/lib/browser-slash-command-dispatch.ts index b5020c027..fca95a8c0 100644 --- a/web/lib/browser-slash-command-dispatch.ts +++ b/web/lib/browser-slash-command-dispatch.ts @@ -159,7 +159,7 @@ Advanced: export · cleanup · run-hook · migrate · remote Type /sf to run. Use /sf help for this message.` -function dispatchGSDSubcommand( +function dispatchSFSubcommand( input: string, args: string, options: BrowserSlashCommandDispatchOptions, @@ -345,7 +345,7 @@ export function dispatchBrowserSlashCommand( // SF subcommand dispatch — must precede SURFACE_COMMANDS to avoid // `/sf export` colliding with the built-in `/export` surface. if (parsed.name === "sf") { - return dispatchGSDSubcommand(trimmed, parsed.args, options) + return dispatchSFSubcommand(trimmed, parsed.args, options) } const browserSurface = SURFACE_COMMANDS.get(parsed.name) diff --git a/web/lib/initial-sf-header-filter.ts b/web/lib/initial-sf-header-filter.ts index 7378946e6..6cc3fb440 100644 --- a/web/lib/initial-sf-header-filter.ts +++ b/web/lib/initial-sf-header-filter.ts @@ -1,4 +1,4 @@ -export type InitialGsdHeaderFilterResult = +export type InitialSfHeaderFilterResult = | { status: 'matched'; text: string } | { status: 'needs-more'; text: '' } | { status: 'passthrough'; text: string } @@ -106,7 +106,7 @@ function isLogoLine(line: string | undefined): boolean { * PTY pane often does. This filter removes only the initial branded banner from * the PTY attach stream so both panes start on real terminal content. */ -export function filterInitialSfHeader(raw: string): InitialGsdHeaderFilterResult { +export function filterInitialSfHeader(raw: string): InitialSfHeaderFilterResult { const { plainText, rawOffsetsByPlainIndex } = indexVisibleText(raw) if (!plainText) { return { status: 'needs-more', text: '' } diff --git a/web/lib/pty-manager.ts b/web/lib/pty-manager.ts index 95b1e7009..a91c7c4e2 100644 --- a/web/lib/pty-manager.ts +++ b/web/lib/pty-manager.ts @@ -8,7 +8,7 @@ import { chmodSync, existsSync, statSync } from "node:fs"; import { basename, join, dirname } from "node:path"; import type { IPty } from "node-pty"; -import { resolveGsdCliEntry } from "../../src/web/cli-entry.ts"; +import { resolveSfCliEntry } from "../../src/web/cli-entry.ts"; // Webpack escape hatch — this global exists at runtime in webpack bundles and // forwards to Node's native require(), bypassing webpack's module resolution. @@ -144,7 +144,7 @@ function resolveTerminalSpawnSpec(cwd: string, command?: string, commandArgs: st if (command === "sf") { try { - const cliEntry = resolveGsdCliEntry({ + const cliEntry = resolveSfCliEntry({ packageRoot: process.env.SF_WEB_PACKAGE_ROOT || process.cwd(), cwd, execPath: process.execPath, diff --git a/web/lib/sf-workspace-store.tsx b/web/lib/sf-workspace-store.tsx index 20365af14..230b8400e 100644 --- a/web/lib/sf-workspace-store.tsx +++ b/web/lib/sf-workspace-store.tsx @@ -282,7 +282,7 @@ export type ProjectDetectionKind = export interface ProjectDetectionSignals { - hasGsdFolder: boolean + hasSfFolder: boolean hasPlanningFolder: boolean hasGitRepo: boolean hasPackageJson: boolean