diff --git a/.dockerignore b/.dockerignore
index 444ee5c7f..5897ace2a 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -12,7 +12,7 @@ packages/*/node_modules/
.env
.env.*
!.env.example
-.gsd/
+.sf/
# ── IDE & OS ──
.idea/
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f54b9a409..e7a10ea8a 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -6,31 +6,31 @@
# Last matching rule wins.
# Default: maintainers review everything not explicitly matched below
-* @gsd-build/maintainers
+* @sf-build/maintainers
# Core agent orchestration — RFC required, senior review only
-packages/pi-agent-core/ @gsd-build/maintainers
-src/resources/extensions/gsd/ @gsd-build/maintainers
+packages/pi-agent-core/ @sf-build/maintainers
+src/resources/extensions/sf/ @sf-build/maintainers
# AI/LLM provider integrations
-packages/pi-ai/ @gsd-build/maintainers
+packages/pi-ai/ @sf-build/maintainers
# Terminal UI
-packages/pi-tui/ @gsd-build/maintainers
+packages/pi-tui/ @sf-build/maintainers
# Native bindings — platform-specific, needs careful review
-native/ @gsd-build/maintainers
+native/ @sf-build/maintainers
# CI/CD and release pipeline — high blast radius
-.github/ @gsd-build/maintainers
-scripts/ @gsd-build/maintainers
-Dockerfile @gsd-build/maintainers
+.github/ @sf-build/maintainers
+scripts/ @sf-build/maintainers
+Dockerfile @sf-build/maintainers
# Security-sensitive files — always require maintainer sign-off
-.secretscanignore @gsd-build/maintainers
-scripts/secret-scan.sh @gsd-build/maintainers
-scripts/install-hooks.sh @gsd-build/maintainers
+.secretscanignore @sf-build/maintainers
+scripts/secret-scan.sh @sf-build/maintainers
+scripts/install-hooks.sh @sf-build/maintainers
# Contributor-facing docs — keep accurate, maintainers approve
-CONTRIBUTING.md @gsd-build/maintainers
-VISION.md @gsd-build/maintainers
+CONTRIBUTING.md @sf-build/maintainers
+VISION.md @sf-build/maintainers
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 29380c827..874aca82b 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -28,7 +28,7 @@ body:
attributes:
label: Summary
description: One sentence describing what is broken.
- placeholder: Running `/gsd inspect` reports "No SF database available" even though `.gsd/gsd.db` exists.
+ placeholder: Running `/sf inspect` reports "No SF database available" even though `.sf/sf.db` exists.
validations:
required: true
@@ -40,7 +40,7 @@ body:
placeholder: |
1. Run `...`
2. Open `...`
- 3. Execute `/gsd ...`
+ 3. Execute `/sf ...`
4. Observe the failure
validations:
required: true
@@ -64,10 +64,10 @@ body:
required: true
- type: input
- id: gsd_version
+ id: sf_version
attributes:
label: SF version
- description: Run `gsd --version` and paste the exact version.
+ description: Run `sf --version` and paste the exact version.
placeholder: "e.g. 2.33.1"
validations:
required: true
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
index b0ad66770..33c7d44fa 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.yml
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -32,7 +32,7 @@ body:
attributes:
label: Proposed solution
description: Describe the desired behavior, UX, CLI shape, or API as specifically as you can.
- placeholder: Add `/gsd queue list` that renders queued milestones with IDs, status, and created timestamps.
+ placeholder: Add `/sf queue list` that renders queued milestones with IDs, status, and created timestamps.
validations:
required: true
@@ -41,7 +41,7 @@ body:
attributes:
label: Alternatives considered
description: Other approaches considered and why they are weaker.
- placeholder: Reading `.gsd/QUEUE.md` manually works, but it is slower and harder to parse during terminal workflows.
+ placeholder: Reading `.sf/QUEUE.md` manually works, but it is slower and harder to parse during terminal workflows.
- type: textarea
id: use_cases
@@ -80,4 +80,4 @@ body:
attributes:
label: Additional information
description: Extra constraints, compatibility concerns, implementation hints, or references.
- placeholder: Must remain compatible with existing `.gsd/QUEUE.md` structure.
+ placeholder: Must remain compatible with existing `.sf/QUEUE.md` structure.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 22e8ce150..9950ac0ff 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -44,7 +44,7 @@ Closes #
- [ ] `pi-ai` — AI/LLM layer
- [ ] `pi-agent-core` — Agent orchestration
- [ ] `pi-coding-agent` — Coding agent
-- [ ] `gsd extension` — SF workflow
+- [ ] `sf extension` — SF workflow
- [ ] `native` — Native bindings
- [ ] `ci/build` — Workflows, scripts, config
diff --git a/.github/workflows/ai-triage.yml b/.github/workflows/ai-triage.yml
index 38e1496d9..1bce8f481 100644
--- a/.github/workflows/ai-triage.yml
+++ b/.github/workflows/ai-triage.yml
@@ -175,7 +175,7 @@ jobs:
};
const securityNote = result.violation_type === 'security-in-public'
- ? `\n\n**If this is a security vulnerability, please delete this ${type} and use [GitHub\'s private vulnerability reporting](https://github.com/gsd-build/SF/security/advisories/new) instead.** See [CONTRIBUTING.md](https://github.com/gsd-build/SF/blob/main/CONTRIBUTING.md#security) for details.`
+ ? `\n\n**If this is a security vulnerability, please delete this ${type} and use [GitHub\'s private vulnerability reporting](https://github.com/sf-build/SF/security/advisories/new) instead.** See [CONTRIBUTING.md](https://github.com/sf-build/SF/blob/main/CONTRIBUTING.md#security) for details.`
: '';
const comment = `👋 Thanks for opening this ${type}!
@@ -186,7 +186,7 @@ jobs:
${result.explanation}
- Please review our [VISION.md](https://github.com/gsd-build/SF/blob/main/VISION.md) and [CONTRIBUTING.md](https://github.com/gsd-build/SF/blob/main/CONTRIBUTING.md) for project guidelines.${securityNote}
+ Please review our [VISION.md](https://github.com/sf-build/SF/blob/main/VISION.md) and [CONTRIBUTING.md](https://github.com/sf-build/SF/blob/main/CONTRIBUTING.md) for project guidelines.${securityNote}
A maintainer will review this shortly. If you believe this was flagged in error, no action is needed — we'll take a look.
diff --git a/.github/workflows/build-native.yml b/.github/workflows/build-native.yml
index 2fd7d5563..b385e72a7 100644
--- a/.github/workflows/build-native.yml
+++ b/.github/workflows/build-native.yml
@@ -145,7 +145,7 @@ jobs:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
- echo "Publishing @gsd-build/engine-${platform}..."
+ echo "Publishing @sf-build/engine-${platform}..."
cd "native/npm/${platform}"
OUTPUT=$(npm publish --access public ${{ steps.version-check.outputs.tag_flag }} 2>&1) && echo "$OUTPUT" || {
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
@@ -167,7 +167,7 @@ jobs:
for attempt in $(seq 1 5); do
FAILED=0
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
- PKG="@gsd-build/engine-${platform}"
+ PKG="@sf-build/engine-${platform}"
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
if [ "${PUBLISHED}" != "${VERSION}" ]; then
FAILED=1
@@ -181,7 +181,7 @@ jobs:
if [ "$attempt" = "5" ]; then
echo "::error::One or more platform packages not found after 5 attempts. Aborting."
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
- PKG="@gsd-build/engine-${platform}"
+ PKG="@sf-build/engine-${platform}"
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
if [ "${PUBLISHED}" = "${VERSION}" ]; then
echo " ✓ ${PKG}@${VERSION}"
@@ -231,16 +231,16 @@ jobs:
npm init -y > /dev/null 2>&1
# Wait for npm registry with exponential backoff (5s, 10s, 20s, 30s, 30s, 30s, 30s — max ~155s vs fixed 5min)
- echo "Waiting for gsd-pi@${VERSION} to appear on npm..."
+ echo "Waiting for sf-pi@${VERSION} to appear on npm..."
DELAY=5
for attempt in $(seq 1 8); do
- PUBLISHED=$(npm view "gsd-pi@${VERSION}" version 2>/dev/null || echo "")
+ PUBLISHED=$(npm view "sf-pi@${VERSION}" version 2>/dev/null || echo "")
if [ "${PUBLISHED}" = "${VERSION}" ]; then
echo " ✓ Version ${VERSION} visible on npm (attempt ${attempt})"
break
fi
if [ "$attempt" = "8" ]; then
- echo "::warning::gsd-pi@${VERSION} not visible on npm after 8 attempts — skipping smoke test"
+ echo "::warning::sf-pi@${VERSION} not visible on npm after 8 attempts — skipping smoke test"
exit 0
fi
echo " Attempt ${attempt}: not yet visible, retrying in ${DELAY}s..."
@@ -250,15 +250,15 @@ jobs:
done
# Install and verify with backoff (5s, 10s, 20s)
- echo "Installing gsd-pi@${VERSION}..."
+ echo "Installing sf-pi@${VERSION}..."
DELAY=5
for attempt in 1 2 3; do
- if npm install "gsd-pi@${VERSION}" 2>&1 | tee /tmp/install-output.txt; then
+ if npm install "sf-pi@${VERSION}" 2>&1 | tee /tmp/install-output.txt; then
echo " ✓ Install succeeded"
- RAW=$(node node_modules/gsd-pi/dist/loader.js --version 2>&1 || echo "FAILED")
+ RAW=$(node node_modules/sf-pi/dist/loader.js --version 2>&1 || echo "FAILED")
ACTUAL=$(echo "$RAW" | sed 's/\x1b\[[0-9;]*m//g' | grep -oE "^${VERSION}$" | head -1)
if [ "$ACTUAL" = "$VERSION" ]; then
- echo " ✓ gsd --version = ${VERSION}"
+ echo " ✓ sf --version = ${VERSION}"
echo "Published package is functional"
exit 0
else
@@ -272,7 +272,7 @@ jobs:
sleep "$DELAY"
DELAY=$((DELAY * 2))
done
- echo "::error::Smoke test failed — gsd-pi@${VERSION} not installable"
+ echo "::error::Smoke test failed — sf-pi@${VERSION} not installable"
exit 1
- name: Verify dist-tag after publish
@@ -282,7 +282,7 @@ jobs:
echo "Verifying npm dist-tag 'latest' points to ${VERSION}..."
DELAY=5
for attempt in $(seq 1 6); do
- LATEST=$(npm view gsd-pi dist-tags.latest 2>/dev/null || echo "")
+ LATEST=$(npm view sf-pi dist-tags.latest 2>/dev/null || echo "")
if [ "${LATEST}" = "${VERSION}" ]; then
echo " ✓ npm dist-tags.latest = ${VERSION}"
exit 0
@@ -292,5 +292,5 @@ jobs:
DELAY=$((DELAY * 2))
if [ "$DELAY" -gt 30 ]; then DELAY=30; fi
done
- echo "::error::dist-tags.latest is '${LATEST}' but expected '${VERSION}' — run: npm dist-tag add gsd-pi@${VERSION} latest"
+ echo "::error::dist-tags.latest is '${LATEST}' but expected '${VERSION}' — run: npm dist-tag add sf-pi@${VERSION} latest"
exit 1
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b57254207..e305aae48 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -95,10 +95,10 @@ jobs:
- name: Scan for base64-encoded secrets
run: bash scripts/base64-scan.sh --diff origin/main
- - name: Ensure .gsd/ is not checked in
+ - name: Ensure .sf/ is not checked in
run: |
- if [ -d ".gsd" ]; then
- echo "::error::.gsd/ directory must not be checked in"
+ if [ -d ".sf" ]; then
+ echo "::error::.sf/ directory must not be checked in"
exit 1
fi
@@ -242,10 +242,10 @@ jobs:
- name: Run Windows portability tests
run: >-
- node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs
+ node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs
--experimental-strip-types --test
src/tests/windows-portability.test.ts
- src/resources/extensions/gsd/tests/validate-directory.test.ts
+ src/resources/extensions/sf/tests/validate-directory.test.ts
src/tests/integration/web-mode-windows-hide.test.ts
rtk-portability:
@@ -294,14 +294,14 @@ jobs:
- name: Run RTK-focused portability tests
run: >-
- node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs
+ node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs
--experimental-strip-types --experimental-test-isolation=process --test
src/tests/rtk.test.ts
src/tests/rtk-execution-seams.test.ts
src/tests/postinstall.test.ts
src/tests/app-smoke.test.ts
- src/resources/extensions/gsd/tests/custom-verification.test.ts
- src/resources/extensions/gsd/tests/verification-gate.test.ts
+ src/resources/extensions/sf/tests/custom-verification.test.ts
+ src/resources/extensions/sf/tests/verification-gate.test.ts
- name: Generate RTK benchmark evidence
if: matrix.label == 'linux'
diff --git a/.github/workflows/cleanup-dev-versions.yml b/.github/workflows/cleanup-dev-versions.yml
index 7225a22ea..b4447b7a2 100644
--- a/.github/workflows/cleanup-dev-versions.yml
+++ b/.github/workflows/cleanup-dev-versions.yml
@@ -24,7 +24,7 @@ jobs:
run: |
set -euo pipefail
- PACKAGE="gsd-pi"
+ PACKAGE="sf-pi"
MAX_AGE_DAYS=30
CUTOFF=$(date -u -d "-${MAX_AGE_DAYS} days" +%s 2>/dev/null || date -u -v-${MAX_AGE_DAYS}d +%s)
diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml
index 753e67826..4c18f864f 100644
--- a/.github/workflows/pipeline.yml
+++ b/.github/workflows/pipeline.yml
@@ -73,7 +73,7 @@ jobs:
- name: Publish @dev
run: |
VERSION=$(node -e 'process.stdout.write(require("./package.json").version)')
- if npm view "gsd-pi@${VERSION}" version 2>/dev/null; then
+ if npm view "sf-pi@${VERSION}" version 2>/dev/null; then
echo "Version ${VERSION} already published — skipping"
else
npm publish --tag dev
@@ -100,19 +100,19 @@ jobs:
registry-url: https://registry.npmjs.org
cache: 'npm'
- - name: Install gsd-pi@dev globally (with registry propagation retry)
+ - name: Install sf-pi@dev globally (with registry propagation retry)
run: |
for i in 1 2 3 4 5 6; do
- npm install -g gsd-pi@dev && exit 0
+ npm install -g sf-pi@dev && exit 0
echo "Attempt $i failed — waiting 10s for npm registry propagation..."
sleep 10
done
- echo "Failed to install gsd-pi@dev after 6 attempts"
+ echo "Failed to install sf-pi@dev after 6 attempts"
exit 1
- name: Run smoke tests (against installed binary)
run: |
- export SF_SMOKE_BINARY=$(which gsd)
+ export SF_SMOKE_BINARY=$(which sf)
npm run test:smoke
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -125,14 +125,14 @@ jobs:
- name: Run live regression tests (against installed binary)
run: |
- export SF_SMOKE_BINARY=$(which gsd)
+ export SF_SMOKE_BINARY=$(which sf)
npm run test:live-regression
- name: Promote to @next
env:
DEV_VERSION: ${{ needs.dev-publish.outputs.dev-version }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- run: npm dist-tag add "gsd-pi@${DEV_VERSION}" next
+ run: npm dist-tag add "sf-pi@${DEV_VERSION}" next
- name: Log in to GHCR
uses: docker/login-action@v4
@@ -235,7 +235,7 @@ jobs:
OUTPUT=$(npm publish 2>&1) && echo "$OUTPUT" || {
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
echo "Version already published — promoting to latest"
- npm dist-tag add "gsd-pi@${RELEASE_VERSION}" latest
+ npm dist-tag add "sf-pi@${RELEASE_VERSION}" latest
else
echo "$OUTPUT"
exit 1
@@ -268,7 +268,7 @@ jobs:
NOTES=$(cat /tmp/release-notes.md)
curl -s -X POST "$DISCORD_WEBHOOK" \
-H "Content-Type: application/json" \
- -d "$(jq -n --arg c "**SF v${RELEASE_VERSION} Released**\n\n${NOTES}\n\n\`npm i gsd-pi@${RELEASE_VERSION}\`" '{content:$c}')"
+ -d "$(jq -n --arg c "**SF v${RELEASE_VERSION} Released**\n\n${NOTES}\n\n\`npm i sf-pi@${RELEASE_VERSION}\`" '{content:$c}')"
- name: Log in to GHCR
uses: docker/login-action@v4
diff --git a/.gitignore b/.gitignore
index 1f1e564e1..24ed8b5cd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -72,7 +72,7 @@ docs/coherence-audit/
# ── SF project state (per-worktree, never committed) ──
.sf/
-.gsd/
+.sf/
# ── Stale lock files (npm is canonical) ──
pnpm-lock.yaml
diff --git a/.gsd/CODEBASE.md b/.gsd/CODEBASE.md
new file mode 100644
index 000000000..3925a3150
--- /dev/null
+++ b/.gsd/CODEBASE.md
@@ -0,0 +1,482 @@
+# Codebase Map
+
+Generated: 2026-04-15T12:09:27Z | Files: 500 | Described: 0/500
+
+Note: Truncated to first 500 files. Run with higher --max-files to include all.
+
+### (root)/
+- `.dockerignore`
+- `.gitignore`
+- `.npmignore`
+- `.npmrc`
+- `.prompt-injection-scanignore`
+- `.secretscanignore`
+- `CHANGELOG.md`
+- `CONTRIBUTING.md`
+- `Dockerfile`
+- `flake.nix`
+- `LICENSE`
+- `package-lock.json`
+- `package.json`
+- `README.md`
+- `VISION.md`
+
+### .github/
+- `.github/CODEOWNERS`
+- `.github/FUNDING.yml`
+- `.github/PULL_REQUEST_TEMPLATE.md`
+
+### .github/ISSUE_TEMPLATE/
+- `.github/ISSUE_TEMPLATE/bug_report.yml`
+- `.github/ISSUE_TEMPLATE/config.yml`
+- `.github/ISSUE_TEMPLATE/feature_request.yml`
+
+### .github/workflows/
+- `.github/workflows/ai-triage.yml`
+- `.github/workflows/build-native.yml`
+- `.github/workflows/ci.yml`
+- `.github/workflows/cleanup-dev-versions.yml`
+- `.github/workflows/pipeline.yml`
+- `.github/workflows/pr-risk.yml`
+
+### bin/
+- `bin/gsd-from-source`
+
+### docker/
+- `docker/.env.example`
+- `docker/bootstrap.sh`
+- `docker/docker-compose.full.yaml`
+- `docker/docker-compose.yaml`
+- `docker/Dockerfile.ci-builder`
+- `docker/Dockerfile.sandbox`
+- `docker/entrypoint.sh`
+- `docker/README.md`
+
+### docs/
+- `docs/README.md`
+
+### docs/dev/
+- `docs/dev/ADR-001-branchless-worktree-architecture.md`
+- `docs/dev/ADR-003-pipeline-simplification.md`
+- `docs/dev/ADR-004-capability-aware-model-routing.md`
+- `docs/dev/ADR-005-multi-model-provider-tool-strategy.md`
+- `docs/dev/ADR-007-model-catalog-split.md`
+- `docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md`
+- `docs/dev/ADR-008-IMPLEMENTATION-PLAN.md`
+- `docs/dev/ADR-009-IMPLEMENTATION-PLAN.md`
+- `docs/dev/ADR-009-orchestration-kernel-refactor.md`
+- `docs/dev/ADR-010-pi-clean-seam-architecture.md`
+- `docs/dev/agent-knowledge-index.md`
+- `docs/dev/architecture.md`
+- `docs/dev/ci-cd-pipeline.md`
+- `docs/dev/FILE-SYSTEM-MAP.md`
+- `docs/dev/FRONTIER-TECHNIQUES.md`
+- `docs/dev/pi-context-optimization-opportunities.md`
+- `docs/dev/PRD-branchless-worktree-architecture.md`
+- `docs/dev/PRD-pi-clean-seam-refactor.md`
+
+### docs/dev/building-coding-agents/
+- *(27 files: 27 .md)*
+
+### docs/dev/context-and-hooks/
+- `docs/dev/context-and-hooks/01-the-context-pipeline.md`
+- `docs/dev/context-and-hooks/02-hook-reference.md`
+- `docs/dev/context-and-hooks/03-context-injection-patterns.md`
+- `docs/dev/context-and-hooks/04-message-types-and-llm-visibility.md`
+- `docs/dev/context-and-hooks/05-inter-extension-communication.md`
+- `docs/dev/context-and-hooks/06-advanced-patterns-from-source.md`
+- `docs/dev/context-and-hooks/07-the-system-prompt-anatomy.md`
+- `docs/dev/context-and-hooks/README.md`
+
+### docs/dev/extending-pi/
+- *(26 files: 26 .md)*
+
+### docs/dev/pi-ui-tui/
+- *(24 files: 24 .md)*
+
+### docs/dev/proposals/
+- `docs/dev/proposals/698-browser-tools-feature-additions.md`
+- `docs/dev/proposals/rfc-gitops-branching-strategy.md`
+
+### docs/dev/proposals/workflows/
+- `docs/dev/proposals/workflows/backmerge.yml`
+- `docs/dev/proposals/workflows/create-release.yml`
+- `docs/dev/proposals/workflows/README.md`
+- `docs/dev/proposals/workflows/sync-next.yml`
+
+### docs/dev/superpowers/plans/
+- `docs/dev/superpowers/plans/2026-03-17-cicd-pipeline.md`
+
+### docs/dev/superpowers/specs/
+- `docs/dev/superpowers/specs/2026-03-17-cicd-pipeline-design.md`
+
+### docs/dev/what-is-pi/
+- `docs/dev/what-is-pi/01-what-pi-is.md`
+- `docs/dev/what-is-pi/02-design-philosophy.md`
+- `docs/dev/what-is-pi/03-the-four-modes-of-operation.md`
+- `docs/dev/what-is-pi/04-the-architecture-how-everything-fits-together.md`
+- `docs/dev/what-is-pi/05-the-agent-loop-how-pi-thinks.md`
+- `docs/dev/what-is-pi/06-tools-how-pi-acts-on-the-world.md`
+- `docs/dev/what-is-pi/07-sessions-memory-that-branches.md`
+- `docs/dev/what-is-pi/08-compaction-how-pi-manages-context-limits.md`
+- `docs/dev/what-is-pi/09-the-customization-stack.md`
+- `docs/dev/what-is-pi/10-providers-models-multi-model-by-default.md`
+- `docs/dev/what-is-pi/11-the-interactive-tui.md`
+- `docs/dev/what-is-pi/12-the-message-queue-talking-while-pi-thinks.md`
+- `docs/dev/what-is-pi/13-context-files-project-instructions.md`
+- `docs/dev/what-is-pi/14-the-sdk-rpc-embedding-pi.md`
+- `docs/dev/what-is-pi/15-pi-packages-the-ecosystem.md`
+- `docs/dev/what-is-pi/16-why-pi-matters-what-makes-it-different.md`
+- `docs/dev/what-is-pi/17-file-reference-all-documentation.md`
+- `docs/dev/what-is-pi/18-quick-reference-commands-shortcuts.md`
+- `docs/dev/what-is-pi/19-building-branded-apps-on-top-of-pi.md`
+- `docs/dev/what-is-pi/README.md`
+
+### docs/user-docs/
+- *(21 files: 21 .md)*
+
+### docs/zh-CN/
+- `docs/zh-CN/README.md`
+
+### docs/zh-CN/user-docs/
+- *(21 files: 21 .md)*
+
+### gitbook/
+- `gitbook/README.md`
+- `gitbook/SUMMARY.md`
+
+### gitbook/configuration/
+- `gitbook/configuration/custom-models.md`
+- `gitbook/configuration/git-settings.md`
+- `gitbook/configuration/mcp-servers.md`
+- `gitbook/configuration/notifications.md`
+- `gitbook/configuration/preferences.md`
+- `gitbook/configuration/providers.md`
+
+### gitbook/core-concepts/
+- `gitbook/core-concepts/auto-mode.md`
+- `gitbook/core-concepts/project-structure.md`
+- `gitbook/core-concepts/step-mode.md`
+
+### gitbook/features/
+- `gitbook/features/captures.md`
+- `gitbook/features/cost-management.md`
+- `gitbook/features/dynamic-model-routing.md`
+- `gitbook/features/github-sync.md`
+- `gitbook/features/headless.md`
+- `gitbook/features/parallel.md`
+- `gitbook/features/remote-questions.md`
+- `gitbook/features/skills.md`
+- `gitbook/features/teams.md`
+- `gitbook/features/token-optimization.md`
+- `gitbook/features/visualizer.md`
+- `gitbook/features/web-interface.md`
+- `gitbook/features/workflow-templates.md`
+
+### gitbook/getting-started/
+- `gitbook/getting-started/choosing-a-model.md`
+- `gitbook/getting-started/first-project.md`
+- `gitbook/getting-started/installation.md`
+
+### gitbook/reference/
+- `gitbook/reference/cli-flags.md`
+- `gitbook/reference/commands.md`
+- `gitbook/reference/environment-variables.md`
+- `gitbook/reference/keyboard-shortcuts.md`
+- `gitbook/reference/migration.md`
+- `gitbook/reference/troubleshooting.md`
+
+### sf-orchestrator/
+- `sf-orchestrator/SKILL.md`
+
+### sf-orchestrator/references/
+- `sf-orchestrator/references/answer-injection.md`
+- `sf-orchestrator/references/commands.md`
+- `sf-orchestrator/references/json-result.md`
+
+### sf-orchestrator/templates/
+- `sf-orchestrator/templates/spec.md`
+
+### sf-orchestrator/workflows/
+- `sf-orchestrator/workflows/build-from-spec.md`
+- `sf-orchestrator/workflows/monitor-and-poll.md`
+- `sf-orchestrator/workflows/step-by-step.md`
+
+### mintlify-docs/
+- `mintlify-docs/docs`
+- `mintlify-docs/docs.json`
+- `mintlify-docs/getting-started.mdx`
+- `mintlify-docs/introduction.mdx`
+
+### mintlify-docs/guides/
+- `mintlify-docs/guides/auto-mode.mdx`
+- `mintlify-docs/guides/captures-triage.mdx`
+- `mintlify-docs/guides/change-management.mdx`
+- `mintlify-docs/guides/commands.mdx`
+- `mintlify-docs/guides/configuration.mdx`
+- `mintlify-docs/guides/cost-management.mdx`
+- `mintlify-docs/guides/custom-models.mdx`
+- `mintlify-docs/guides/dynamic-model-routing.mdx`
+- `mintlify-docs/guides/git-strategy.mdx`
+- `mintlify-docs/guides/migration.mdx`
+- `mintlify-docs/guides/parallel-orchestration.mdx`
+- `mintlify-docs/guides/remote-questions.mdx`
+- `mintlify-docs/guides/skills.mdx`
+- `mintlify-docs/guides/token-optimization.mdx`
+- `mintlify-docs/guides/troubleshooting.mdx`
+- `mintlify-docs/guides/visualizer.mdx`
+- `mintlify-docs/guides/web-interface.mdx`
+- `mintlify-docs/guides/working-in-teams.mdx`
+
+### native/
+- `native/.gitignore`
+- `native/.npmignore`
+- `native/Cargo.toml`
+- `native/README.md`
+
+### native/.cargo/
+- `native/.cargo/config.toml`
+
+### native/crates/ast/
+- `native/crates/ast/Cargo.toml`
+
+### native/crates/ast/src/
+- `native/crates/ast/src/ast.rs`
+- `native/crates/ast/src/glob_util.rs`
+- `native/crates/ast/src/lib.rs`
+
+### native/crates/ast/src/language/
+- `native/crates/ast/src/language/mod.rs`
+- `native/crates/ast/src/language/parsers.rs`
+
+### native/crates/engine/
+- `native/crates/engine/build.rs`
+- `native/crates/engine/Cargo.toml`
+
+### native/crates/engine/src/
+- *(22 files: 22 .rs)*
+
+### native/crates/grep/
+- `native/crates/grep/Cargo.toml`
+
+### native/crates/grep/src/
+- `native/crates/grep/src/lib.rs`
+
+### native/npm/darwin-arm64/
+- `native/npm/darwin-arm64/package.json`
+
+### native/npm/darwin-x64/
+- `native/npm/darwin-x64/package.json`
+
+### native/npm/linux-arm64-gnu/
+- `native/npm/linux-arm64-gnu/package.json`
+
+### native/npm/linux-x64-gnu/
+- `native/npm/linux-x64-gnu/package.json`
+
+### native/npm/win32-x64-msvc/
+- `native/npm/win32-x64-msvc/package.json`
+
+### native/scripts/
+- `native/scripts/build.js`
+- `native/scripts/sync-platform-versions.cjs`
+
+### packages/daemon/
+- `packages/daemon/package.json`
+- `packages/daemon/tsconfig.json`
+
+### packages/daemon/src/
+- *(27 files: 27 .ts)*
+
+### packages/mcp-server/
+- `packages/mcp-server/.npmignore`
+- `packages/mcp-server/package.json`
+- `packages/mcp-server/README.md`
+- `packages/mcp-server/tsconfig.json`
+
+### packages/mcp-server/src/
+- `packages/mcp-server/src/cli.ts`
+- `packages/mcp-server/src/env-writer.test.ts`
+- `packages/mcp-server/src/env-writer.ts`
+- `packages/mcp-server/src/import-candidates.test.ts`
+- `packages/mcp-server/src/index.ts`
+- `packages/mcp-server/src/mcp-server.test.ts`
+- `packages/mcp-server/src/secure-env-collect.test.ts`
+- `packages/mcp-server/src/server.ts`
+- `packages/mcp-server/src/session-manager.ts`
+- `packages/mcp-server/src/tool-credentials.test.ts`
+- `packages/mcp-server/src/tool-credentials.ts`
+- `packages/mcp-server/src/types.ts`
+- `packages/mcp-server/src/workflow-tools.test.ts`
+- `packages/mcp-server/src/workflow-tools.ts`
+
+### packages/mcp-server/src/readers/
+- `packages/mcp-server/src/readers/captures.ts`
+- `packages/mcp-server/src/readers/doctor-lite.ts`
+- `packages/mcp-server/src/readers/graph.test.ts`
+- `packages/mcp-server/src/readers/graph.ts`
+- `packages/mcp-server/src/readers/index.ts`
+- `packages/mcp-server/src/readers/knowledge.ts`
+- `packages/mcp-server/src/readers/metrics.ts`
+- `packages/mcp-server/src/readers/paths.ts`
+- `packages/mcp-server/src/readers/readers.test.ts`
+- `packages/mcp-server/src/readers/roadmap.ts`
+- `packages/mcp-server/src/readers/state.ts`
+
+### packages/native/
+- `packages/native/package.json`
+- `packages/native/tsconfig.json`
+
+### packages/native/src/
+- `packages/native/src/index.ts`
+- `packages/native/src/native.ts`
+
+### packages/native/src/__tests__/
+- `packages/native/src/__tests__/clipboard.test.mjs`
+- `packages/native/src/__tests__/diff.test.mjs`
+- `packages/native/src/__tests__/fd.test.mjs`
+- `packages/native/src/__tests__/glob.test.mjs`
+- `packages/native/src/__tests__/grep.test.mjs`
+- `packages/native/src/__tests__/highlight.test.mjs`
+- `packages/native/src/__tests__/html.test.mjs`
+- `packages/native/src/__tests__/image.test.mjs`
+- `packages/native/src/__tests__/json-parse.test.mjs`
+- `packages/native/src/__tests__/module-compat.test.mjs`
+- `packages/native/src/__tests__/ps.test.mjs`
+- `packages/native/src/__tests__/stream-process.test.mjs`
+- `packages/native/src/__tests__/text.test.mjs`
+- `packages/native/src/__tests__/truncate.test.mjs`
+- `packages/native/src/__tests__/ttsr.test.mjs`
+- `packages/native/src/__tests__/xxhash.test.mjs`
+
+### packages/native/src/ast/
+- `packages/native/src/ast/index.ts`
+- `packages/native/src/ast/types.ts`
+
+### packages/native/src/clipboard/
+- `packages/native/src/clipboard/index.ts`
+- `packages/native/src/clipboard/types.ts`
+
+### packages/native/src/diff/
+- `packages/native/src/diff/index.ts`
+- `packages/native/src/diff/types.ts`
+
+### packages/native/src/fd/
+- `packages/native/src/fd/index.ts`
+- `packages/native/src/fd/types.ts`
+
+### packages/native/src/glob/
+- `packages/native/src/glob/index.ts`
+- `packages/native/src/glob/types.ts`
+
+### packages/native/src/grep/
+- `packages/native/src/grep/index.ts`
+- `packages/native/src/grep/types.ts`
+
+### packages/native/src/gsd-parser/
+- `packages/native/src/gsd-parser/index.ts`
+- `packages/native/src/gsd-parser/types.ts`
+
+### packages/native/src/highlight/
+- `packages/native/src/highlight/index.ts`
+- `packages/native/src/highlight/types.ts`
+
+### packages/native/src/html/
+- `packages/native/src/html/index.ts`
+- `packages/native/src/html/types.ts`
+
+### packages/native/src/image/
+- `packages/native/src/image/index.ts`
+- `packages/native/src/image/types.ts`
+
+### packages/native/src/json-parse/
+- `packages/native/src/json-parse/index.ts`
+
+### packages/native/src/ps/
+- `packages/native/src/ps/index.ts`
+- `packages/native/src/ps/types.ts`
+
+### packages/native/src/stream-process/
+- `packages/native/src/stream-process/index.ts`
+
+### packages/native/src/text/
+- `packages/native/src/text/index.ts`
+- `packages/native/src/text/types.ts`
+
+### packages/native/src/truncate/
+- `packages/native/src/truncate/index.ts`
+
+### packages/native/src/ttsr/
+- `packages/native/src/ttsr/index.ts`
+- `packages/native/src/ttsr/types.ts`
+
+### packages/native/src/xxhash/
+- `packages/native/src/xxhash/index.ts`
+
+### packages/pi-agent-core/
+- `packages/pi-agent-core/package.json`
+- `packages/pi-agent-core/tsconfig.json`
+
+### packages/pi-agent-core/src/
+- `packages/pi-agent-core/src/agent-loop.test.ts`
+- `packages/pi-agent-core/src/agent-loop.ts`
+- `packages/pi-agent-core/src/agent.test.ts`
+- `packages/pi-agent-core/src/agent.ts`
+- `packages/pi-agent-core/src/index.ts`
+- `packages/pi-agent-core/src/proxy.ts`
+- `packages/pi-agent-core/src/types.ts`
+
+### packages/pi-ai/
+- `packages/pi-ai/bedrock-provider.d.ts`
+- `packages/pi-ai/bedrock-provider.js`
+- `packages/pi-ai/oauth.d.ts`
+- `packages/pi-ai/oauth.js`
+- `packages/pi-ai/package.json`
+
+### packages/pi-ai/scripts/
+- `packages/pi-ai/scripts/generate-models.ts`
+
+### packages/pi-ai/src/
+- `packages/pi-ai/src/api-registry.ts`
+- `packages/pi-ai/src/bedrock-provider.ts`
+- `packages/pi-ai/src/cli.ts`
+- `packages/pi-ai/src/env-api-keys.ts`
+- `packages/pi-ai/src/index.ts`
+- `packages/pi-ai/src/models.custom.ts`
+- `packages/pi-ai/src/models.generated.test.ts`
+- `packages/pi-ai/src/models.generated.ts`
+- `packages/pi-ai/src/models.test.ts`
+- `packages/pi-ai/src/models.ts`
+- `packages/pi-ai/src/oauth.ts`
+- `packages/pi-ai/src/stream.ts`
+- `packages/pi-ai/src/types.ts`
+- `packages/pi-ai/src/web-runtime-env-api-keys.ts`
+
+### packages/pi-ai/src/providers/
+- *(25 files: 25 .ts)*
+
+### packages/pi-ai/src/utils/
+- `packages/pi-ai/src/utils/event-stream.ts`
+- `packages/pi-ai/src/utils/hash.ts`
+- `packages/pi-ai/src/utils/json-parse.ts`
+- `packages/pi-ai/src/utils/overflow.ts`
+- `packages/pi-ai/src/utils/repair-tool-json.ts`
+- `packages/pi-ai/src/utils/sanitize-unicode.ts`
+- `packages/pi-ai/src/utils/typebox-helpers.ts`
+- `packages/pi-ai/src/utils/validation.ts`
+
+### packages/pi-ai/src/utils/oauth/
+- `packages/pi-ai/src/utils/oauth/github-copilot.test.ts`
+- `packages/pi-ai/src/utils/oauth/github-copilot.ts`
+- `packages/pi-ai/src/utils/oauth/google-antigravity.ts`
+- `packages/pi-ai/src/utils/oauth/google-gemini-cli.ts`
+- `packages/pi-ai/src/utils/oauth/google-oauth-utils.ts`
+- `packages/pi-ai/src/utils/oauth/index.ts`
+- `packages/pi-ai/src/utils/oauth/openai-codex.ts`
+- `packages/pi-ai/src/utils/oauth/pkce.ts`
+- `packages/pi-ai/src/utils/oauth/types.ts`
+
+### packages/pi-ai/src/utils/tests/
+- `packages/pi-ai/src/utils/tests/json-parse.test.ts`
+- `packages/pi-ai/src/utils/tests/overflow.test.ts`
+- `packages/pi-ai/src/utils/tests/repair-tool-json.test.ts`
diff --git a/.gsd/audit/events.jsonl b/.gsd/audit/events.jsonl
new file mode 100644
index 000000000..7f2cf8f30
--- /dev/null
+++ b/.gsd/audit/events.jsonl
@@ -0,0 +1,4 @@
+{"eventId":"9567a0bc-d8a2-410d-83a8-4ea091e095a7","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.561Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
+{"eventId":"d1765e7e-d2dc-4417-9fb8-0bec6e01e9a8","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.563Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}
+{"eventId":"9c2b6de3-b8eb-4a51-af8a-91be51fecfc9","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.516Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
+{"eventId":"8597d568-05b8-43ed-89d7-ca4673079e0f","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.518Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}
diff --git a/.gsd/notifications.jsonl b/.gsd/notifications.jsonl
new file mode 100644
index 000000000..788a40e93
--- /dev/null
+++ b/.gsd/notifications.jsonl
@@ -0,0 +1,10 @@
+{"id":"76bf27b0-01bf-4260-80f6-b7d8249c6875","ts":"2026-04-15T06:32:30.018Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
+{"id":"597c94ae-7c3b-48dd-89b1-be8d0bbd02ee","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
+{"id":"dc176d95-8171-4d15-8c73-97ddb704a786","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
+{"id":"66762fce-d6c6-41db-be03-d34348aaccd9","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
+{"id":"b7e5e997-b98d-4b50-a6f3-017a916dd2ac","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
+{"id":"eccbb677-be17-44b9-a7b6-440ebf777a89","ts":"2026-04-15T06:33:47.202Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
+{"id":"98803c8a-c9f1-43bd-9903-f67fea7a5128","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
+{"id":"a9253906-1990-4957-9c1a-36046b8d3cfa","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
+{"id":"8caa4904-0ce5-46f4-b645-df5077fb229e","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
+{"id":"eb520a00-567d-4c02-bb2e-6111089dc3de","ts":"2026-04-15T09:03:17.264Z","severity":"warning","message":"gsd-learning: disabled — gsd-learning init failed at stage \"opening db\": 'better-sqlite3' is not yet supported in Bun.\nTrack the status in https://github.com/oven-sh/bun/issues/4290\nIn the meantime, you could try bun:sqlite which has a similar API.","source":"notify","read":false}
diff --git a/.prompt-injection-scanignore b/.prompt-injection-scanignore
index c594d4a49..9e12b6fa4 100644
--- a/.prompt-injection-scanignore
+++ b/.prompt-injection-scanignore
@@ -1,2 +1,2 @@
# False positives in SF prompt templates — these are legitimate LLM instructions, not injection
-src/resources/extensions/gsd/prompts/doctor-heal.md:You are now responsible
+src/resources/extensions/sf/prompts/doctor-heal.md:You are now responsible
diff --git a/.secretscanignore b/.secretscanignore
index f81ab4813..b24b8ad14 100644
--- a/.secretscanignore
+++ b/.secretscanignore
@@ -23,7 +23,7 @@ src/tests/integration/web-mode-runtime-fixtures.ts:sk-runtime-recovery-secret
src/tests/web-onboarding-contract.test.ts:sk-test-secret
# Doctor environment tests use dummy localhost DB URLs
-src/resources/extensions/gsd/tests/doctor-environment.test.ts:postgres://localhost
+src/resources/extensions/sf/tests/doctor-environment.test.ts:postgres://localhost
# Documentation examples
diff --git a/Dockerfile b/Dockerfile
index 8db7565b8..82168e867 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,6 +1,6 @@
# ──────────────────────────────────────────────
# Runtime
-# Image: ghcr.io/gsd-build/sf-run
+# Image: ghcr.io/sf-build/sf-run
# Used by: end users via docker run
# ──────────────────────────────────────────────
FROM node:24-slim AS runtime
@@ -17,5 +17,5 @@ RUN npm install -g sf-run@${SF_VERSION}
# Default working directory for user projects
WORKDIR /workspace
-ENTRYPOINT ["gsd"]
+ENTRYPOINT ["sf"]
CMD ["--help"]
diff --git a/bin/gsd-from-source b/bin/gsd-from-source
deleted file mode 100755
index 795912b30..000000000
--- a/bin/gsd-from-source
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-#
-# gsd-from-source — run SF directly from this source checkout via bun.
-#
-# Purpose: every local commit in this repo (e.g. the #4251 fix) is live
-# immediately without reinstalling the bun-packaged sf-run. Subagents can
-# spawn gsd by pointing SF_BIN_PATH at this script instead of dist/loader.js.
-#
-# Contract:
-# - Executable shim spawn() / exec() can launch directly.
-# - Exports SF_BIN_PATH before handing off to loader.ts so loader.ts's
-# `SF_BIN_PATH ||= process.argv[1]` branch preserves the shim path
-# instead of clobbering it with the .ts loader path (which is not
-# directly executable by child_process.spawn).
-#
-# Requirements: bun on PATH, node_modules populated (`bun install` once).
-set -euo pipefail
-
-SCRIPT_DIR=$(cd -- "$(dirname -- "$(readlink -f "${BASH_SOURCE[0]}")")" &>/dev/null && pwd)
-SF_SOURCE_ROOT=$(cd -- "$SCRIPT_DIR/.." &>/dev/null && pwd)
-
-export SF_BIN_PATH="$SCRIPT_DIR/gsd-from-source"
-
-exec bun run "$SF_SOURCE_ROOT/src/loader.ts" "$@"
diff --git a/docker/Dockerfile.ci-builder b/docker/Dockerfile.ci-builder
index 822651db4..e4c4454ee 100644
--- a/docker/Dockerfile.ci-builder
+++ b/docker/Dockerfile.ci-builder
@@ -1,6 +1,6 @@
# ──────────────────────────────────────────────
# CI Builder
-# Image: ghcr.io/gsd-build/gsd-ci-builder
+# Image: ghcr.io/sf-build/sf-ci-builder
# Used by: pipeline.yml Dev stage
# ──────────────────────────────────────────────
FROM node:24-bookworm
diff --git a/docker/Dockerfile.sandbox b/docker/Dockerfile.sandbox
index f76faf2c0..cceb07512 100644
--- a/docker/Dockerfile.sandbox
+++ b/docker/Dockerfile.sandbox
@@ -20,17 +20,17 @@ ARG SF_VERSION=latest
RUN npm install -g sf-run@${SF_VERSION}
# Create non-root user for sandbox isolation
-RUN groupadd --gid 1000 gsd \
- && useradd --uid 1000 --gid gsd --shell /bin/bash --create-home gsd
+RUN groupadd --gid 1000 sf \
+ && useradd --uid 1000 --gid sf --shell /bin/bash --create-home sf
# Persistent SF state directory
-RUN mkdir -p /home/gsd/.gsd && chown -R gsd:gsd /home/gsd/.gsd
+RUN mkdir -p /home/sf/.sf && chown -R sf:sf /home/sf/.sf
# Workspace directory — synced from host via Docker sandbox
WORKDIR /workspace
-RUN chown gsd:gsd /workspace
+RUN chown sf:sf /workspace
-# Entrypoint handles UID/GID remapping, bootstrap, and drops to gsd user
+# Entrypoint handles UID/GID remapping, bootstrap, and drops to sf user
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
COPY bootstrap.sh /usr/local/bin/bootstrap.sh
RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/bootstrap.sh
@@ -39,4 +39,4 @@ RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/bootstrap.sh
EXPOSE 3000
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
-CMD ["gsd", "--help"]
+CMD ["sf", "--help"]
diff --git a/docker/bootstrap.sh b/docker/bootstrap.sh
index 21f8d63e9..4b2febf4b 100755
--- a/docker/bootstrap.sh
+++ b/docker/bootstrap.sh
@@ -5,7 +5,7 @@ set -e
# SF First-Boot Bootstrap
#
# Runs once on initial container creation.
-# Called by entrypoint.sh as the gsd user.
+# Called by entrypoint.sh as the sf user.
#
# This script is idempotent — safe to run multiple
# times, but the sentinel in entrypoint.sh ensures
diff --git a/docker/docker-compose.full.yaml b/docker/docker-compose.full.yaml
index 3bc5d2c54..15e5b2f1d 100644
--- a/docker/docker-compose.full.yaml
+++ b/docker/docker-compose.full.yaml
@@ -1,21 +1,21 @@
services:
- gsd:
+ sf:
build:
context: . # Build context is the docker/ directory
dockerfile: Dockerfile.sandbox # Runtime sandbox image with entrypoint
args:
SF_VERSION: latest # Pin a specific version: SF_VERSION=2.51.0
- container_name: gsd-sandbox
+ container_name: sf-sandbox
ports:
- "3000:3000" # SF web UI
volumes:
- ../:/workspace # Project root mounted into the container
- - gsd-state:/home/gsd/.gsd # Persistent SF state across restarts
- # - ~/.ssh:/home/gsd/.ssh:ro # SSH keys for git operations (read-only)
- # - ~/.gitconfig:/home/gsd/.gitconfig:ro # Host git config
+ - sf-state:/home/sf/.sf # Persistent SF state across restarts
+ # - ~/.ssh:/home/sf/.ssh:ro # SSH keys for git operations (read-only)
+ # - ~/.gitconfig:/home/sf/.gitconfig:ro # Host git config
env_file:
- .env # API keys and secrets (see .env.example)
@@ -23,7 +23,7 @@ services:
environment:
- NODE_ENV=development
# UID/GID remapping — match your host user to avoid permission issues
- # on bind-mounted volumes. The entrypoint remaps the container's gsd
+ # on bind-mounted volumes. The entrypoint remaps the container's sf
# user to these IDs at startup. Run `id -u` / `id -g` to find yours.
- PUID=1000
- PGID=1000
@@ -36,7 +36,7 @@ services:
# Health check — verify SF is installed and responsive
healthcheck:
- test: ["CMD", "gsd", "--version"]
+ test: ["CMD", "sf", "--version"]
interval: 30s
timeout: 5s
retries: 3
@@ -57,5 +57,5 @@ services:
# network_mode: bridge # Default Docker bridge (already the default)
volumes:
- gsd-state:
+ sf-state:
driver: local
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 5a14b14d7..d7def970b 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -1,16 +1,16 @@
services:
- gsd:
+ sf:
build:
context: .
dockerfile: Dockerfile.sandbox
args:
SF_VERSION: latest
- container_name: gsd-sandbox
+ container_name: sf-sandbox
ports:
- "3000:3000"
volumes:
- ../:/workspace
- - gsd-state:/home/gsd/.gsd
+ - sf-state:/home/sf/.sf
env_file:
- .env
environment:
@@ -19,5 +19,5 @@ services:
tty: true
volumes:
- gsd-state:
+ sf-state:
driver: local
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
index 673685980..73001bbb6 100755
--- a/docker/entrypoint.sh
+++ b/docker/entrypoint.sh
@@ -12,9 +12,9 @@ set -e
# 4. Signal forwarding — exec into the final process
# ──────────────────────────────────────────────
-SF_USER="gsd"
+SF_USER="sf"
SF_HOME="/home/${SF_USER}"
-SF_DIR="${SF_HOME}/.gsd"
+SF_DIR="${SF_HOME}/.sf"
# ── 1. UID/GID Remapping ────────────────────────────────
# Accept PUID/PGID from the environment so the container
@@ -66,7 +66,7 @@ SENTINEL="${SF_DIR}/.bootstrapped"
if [ ! -f "${SENTINEL}" ]; then
if [ -x /usr/local/bin/bootstrap.sh ]; then
- # Run bootstrap as the gsd user so files get correct ownership
+ # Run bootstrap as the sf user so files get correct ownership
gosu "${SF_USER}" /usr/local/bin/bootstrap.sh
fi
touch "${SENTINEL}"
@@ -75,7 +75,7 @@ fi
# ── 4. Drop Privileges & Exec ──────────────────────────
# Replace this shell process with the final command running
-# as the gsd user. exec + gosu = proper PID 1 = proper
+# as the sf user. exec + gosu = proper PID 1 = proper
# signal forwarding (SIGTERM, SIGINT, etc.).
exec gosu "${SF_USER}" "$@"
diff --git a/docs/dev/FILE-SYSTEM-MAP.md b/docs/dev/FILE-SYSTEM-MAP.md
index 467b09973..5ca78f55d 100644
--- a/docs/dev/FILE-SYSTEM-MAP.md
+++ b/docs/dev/FILE-SYSTEM-MAP.md
@@ -1,4 +1,4 @@
-# GSD2 File System Map
+# sf File System Map
# Maps every source file to its system/subsystem labels
---
diff --git a/docs/user-docs/claude-code-auth-compliance.md b/docs/user-docs/claude-code-auth-compliance.md
index 1707878cb..acaf6f6ee 100644
--- a/docs/user-docs/claude-code-auth-compliance.md
+++ b/docs/user-docs/claude-code-auth-compliance.md
@@ -10,7 +10,7 @@ Anthropic's current public guidance draws a hard line:
- Third-party tools should prefer API key authentication through Claude Console or a supported cloud provider.
- Apps that misrepresent their identity, route third-party traffic against subscription limits, or otherwise violate Anthropic terms are explicitly prohibited.
-For GSD2, the safe path is:
+For sf, the safe path is:
1. Treat local Claude Code as an external authenticated runtime.
2. Never ask SF users to sign into Claude subscriptions through SF-managed Anthropic OAuth.
@@ -23,7 +23,7 @@ For GSD2, the safe path is:
Anthropic's help center says Claude Pro/Max users should install Claude Code, run `claude`, and "log in with the same credentials you use for Claude." It also says this connects the subscription directly to Claude Code, and that `/login` is the way to switch account types. The Team/Enterprise article gives the same flow for org accounts.
-Implication for GSD2:
+Implication for sf:
- Letting users authenticate inside the real `claude` CLI is aligned with Anthropic's documented flow.
- Detecting `claude auth status` and routing work through the local CLI or official Claude Code SDK is the lowest-risk pattern.
@@ -38,18 +38,18 @@ Anthropic's Claude Code docs say supported auth types include Claude.ai credenti
4. `apiKeyHelper`
5. subscription OAuth from `/login`
-Implication for GSD2:
+Implication for sf:
-- If GSD2 shells out to or embeds Claude Code, it should respect Claude Code's own credential selection instead of inventing a parallel Anthropic OAuth flow.
+- If sf shells out to or embeds Claude Code, it should respect Claude Code's own credential selection instead of inventing a parallel Anthropic OAuth flow.
- `apiKeyHelper` is the clean enterprise escape hatch when an org wants dynamic short-lived keys without handing raw API keys to the tool.
### 3. Anthropic commercial usage is available through API keys and supported cloud providers
Anthropic's commercial terms govern API keys and related Anthropic services for customer-built products, including products made available to end users. The authentication docs for teams recommend Claude for Teams/Enterprise, Claude Console, Bedrock, Vertex, or Microsoft Foundry.
-Implication for GSD2:
+Implication for sf:
-- If GSD2 is acting as a product for users, direct Anthropic access should be through commercial auth paths, not subscription-token reuse.
+- If sf is acting as a product for users, direct Anthropic access should be through commercial auth paths, not subscription-token reuse.
## What Anthropic Explicitly Warns Against
@@ -65,13 +65,13 @@ Anthropic's consumer terms add two more constraints:
- Users may not share account login info, API keys, or account credentials with anyone else.
- Except when accessing services via an Anthropic API key or where Anthropic explicitly permits it, users may not access the services through automated or non-human means.
-Implication for GSD2:
+Implication for sf:
- A SF-managed Anthropic OAuth flow for subscription accounts is high risk.
- Reusing user Claude subscription credentials inside SF's own API client is high risk.
- Any flow that makes Anthropic believe requests come from Claude Code when they actually come from SF infrastructure is out of bounds.
-## Current GSD2 Findings
+## Current sf Findings
### Low-risk / aligned pieces
@@ -96,7 +96,7 @@ All Anthropic OAuth code paths have been removed:
- `packages/daemon/src/orchestrator.ts` — **Updated.** OAuth token refresh removed; requires `ANTHROPIC_API_KEY` env var.
- `packages/pi-ai/src/providers/anthropic.ts` — **Updated.** OAuth client branch removed; `isOAuthToken` always returns false.
-## Recommended Policy For GSD2
+## Recommended Policy For sf
Adopt this as the repo rule:
@@ -104,10 +104,10 @@ Adopt this as the repo rule:
- the `claude` CLI
- Claude Code SDK when it is backed by the local authenticated Claude Code install
- other Anthropic-documented native flows
-- GSD2 must not implement its own Anthropic subscription OAuth flow for end users.
-- GSD2 must not persist Anthropic subscription OAuth tokens for later API use.
-- GSD2 must not send Anthropic API traffic using subscription OAuth tokens obtained by SF.
-- GSD2 may support Anthropic direct access only via:
+- sf must not implement its own Anthropic subscription OAuth flow for end users.
+- sf must not persist Anthropic subscription OAuth tokens for later API use.
+- sf must not send Anthropic API traffic using subscription OAuth tokens obtained by SF.
+- sf may support Anthropic direct access only via:
- `ANTHROPIC_API_KEY`
- Claude Console API keys stored in auth storage
- `apiKeyHelper`
@@ -157,7 +157,7 @@ This is the best long-term UX because it separates:
## Decision Rule
-If a proposed GSD2 feature needs Anthropic access, ask one question:
+If a proposed sf feature needs Anthropic access, ask one question:
"Is SF calling Anthropic as SF, or is SF delegating to the user's already-authenticated local Claude Code runtime?"
diff --git a/docs/zh-CN/user-docs/claude-code-auth-compliance.md b/docs/zh-CN/user-docs/claude-code-auth-compliance.md
index a5b2a0c0e..4e0b7dead 100644
--- a/docs/zh-CN/user-docs/claude-code-auth-compliance.md
+++ b/docs/zh-CN/user-docs/claude-code-auth-compliance.md
@@ -10,7 +10,7 @@ Anthropic 当前公开的指导原则边界非常清晰:
- 第三方工具应优先通过 Claude Console 或受支持云 provider 的 API key 进行认证。
- 任何伪装身份、绕过订阅限制转发第三方流量、或以其他方式违反 Anthropic 条款的应用,都被明确禁止。
-对于 GSD2,安全路径应当是:
+对于 sf,安全路径应当是:
1. 把本地 Claude Code 视为一个外部、已认证的运行时。
2. 永远不要让 SF 用户通过 SF 托管的 Anthropic OAuth 去登录 Claude 订阅。
@@ -23,7 +23,7 @@ Anthropic 当前公开的指导原则边界非常清晰:
Anthropic 帮助中心说明:Claude Pro / Max 用户应安装 Claude Code,运行 `claude`,并“使用与你登录 Claude 相同的凭据”完成登录。文档还指出,这样会把订阅直接连接到 Claude Code,并且 `/login` 是切换账户类型的方式。Team / Enterprise 文章对组织账号也给出了同样流程。
-对 GSD2 的含义:
+对 sf 的含义:
- 允许用户在真正的 `claude` CLI 内部完成认证,是符合 Anthropic 文档流程的
- 检测 `claude auth status`,然后通过本地 CLI 或官方 Claude Code SDK 路由工作,是风险最低的方案
@@ -38,18 +38,18 @@ Anthropic 的 Claude Code 文档说明,支持的认证类型包括 Claude.ai
4. `apiKeyHelper`
5. 来自 `/login` 的订阅 OAuth
-对 GSD2 的含义:
+对 sf 的含义:
-- 如果 GSD2 是通过 shell 调用或嵌入 Claude Code,那么它应尊重 Claude Code 自己的凭据选择逻辑,而不是再发明一套平行的 Anthropic OAuth 流程
+- 如果 sf 是通过 shell 调用或嵌入 Claude Code,那么它应尊重 Claude Code 自己的凭据选择逻辑,而不是再发明一套平行的 Anthropic OAuth 流程
- 对需要动态短期凭据、但又不希望把原始 API key 交给工具的组织来说,`apiKeyHelper` 是一个干净的企业级出口
### 3. Anthropic 的商业使用可通过 API keys 和受支持的云 provider 实现
Anthropic 的商业条款约束的是 API keys 及其相关 Anthropic 服务,包括供客户构建给终端用户使用的产品。面向团队的认证文档推荐使用 Claude for Teams / Enterprise、Claude Console、Bedrock、Vertex 或 Microsoft Foundry。
-对 GSD2 的含义:
+对 sf 的含义:
-- 如果 GSD2 作为一个产品面向用户提供 Anthropic 能力,那么任何直接 Anthropic 访问都应走商业认证路径,而不是复用订阅 token
+- 如果 sf 作为一个产品面向用户提供 Anthropic 能力,那么任何直接 Anthropic 访问都应走商业认证路径,而不是复用订阅 token
## Anthropic 明确警告的内容
@@ -65,13 +65,13 @@ Anthropic 的消费条款还额外加入两项限制:
- 用户不得把账户登录信息、API keys 或账户凭据分享给他人
- 除非是通过 Anthropic API key 访问服务,或者 Anthropic 明确允许,否则用户不得通过自动化或非人工方式访问这些服务
-对 GSD2 的含义:
+对 sf 的含义:
- 由 SF 托管的 Anthropic 订阅 OAuth 流程属于高风险
- 在 SF 自己的 API client 中复用用户 Claude 订阅凭据属于高风险
- 任何会让 Anthropic 误以为请求来自 Claude Code、但实际上来自 SF 基础设施的流程,都越界了
-## 当前 GSD2 发现
+## 当前 sf 发现
### 低风险 / 已对齐的部分
@@ -96,7 +96,7 @@ Anthropic 的消费条款还额外加入两项限制:
- `packages/daemon/src/orchestrator.ts` —— **已更新**,去掉 OAuth token refresh,改为要求 `ANTHROPIC_API_KEY` 环境变量
- `packages/pi-ai/src/providers/anthropic.ts` —— **已更新**,移除 OAuth client 分支,`isOAuthToken` 始终返回 false
-## 针对 GSD2 的建议策略
+## 针对 sf 的建议策略
将下面内容作为仓库规则:
@@ -104,10 +104,10 @@ Anthropic 的消费条款还额外加入两项限制:
- `claude` CLI
- 基于本地已认证 Claude Code 安装的 Claude Code SDK
- 其他 Anthropic 文档明确支持的原生流程
-- GSD2 不得为终端用户实现自己的 Anthropic 订阅 OAuth 流程
-- GSD2 不得持久化 Anthropic 订阅 OAuth token,供后续 API 调用使用
-- GSD2 不得使用由 SF 获取的订阅 OAuth tokens 来发送 Anthropic API 流量
-- GSD2 可以支持 Anthropic 直接访问,但仅限以下方式:
+- sf 不得为终端用户实现自己的 Anthropic 订阅 OAuth 流程
+- sf 不得持久化 Anthropic 订阅 OAuth token,供后续 API 调用使用
+- sf 不得使用由 SF 获取的订阅 OAuth tokens 来发送 Anthropic API 流量
+- sf 可以支持 Anthropic 直接访问,但仅限以下方式:
- `ANTHROPIC_API_KEY`
- 保存在 auth storage 中的 Claude Console API keys
- `apiKeyHelper`
@@ -157,7 +157,7 @@ Anthropic 的消费条款还额外加入两项限制:
## 决策规则
-如果某个拟议中的 GSD2 特性需要访问 Anthropic,先问一个问题:
+如果某个拟议中的 sf 特性需要访问 Anthropic,先问一个问题:
“SF 是以 SF 的身份调用 Anthropic,还是 SF 只是把工作委派给用户本地已认证的 Claude Code 运行时?”
diff --git a/mintlify-docs/docs.json b/mintlify-docs/docs.json
index 4974e7b93..3b7094c93 100644
--- a/mintlify-docs/docs.json
+++ b/mintlify-docs/docs.json
@@ -5,7 +5,7 @@
"logo": {
"light": "/images/logo.svg",
"dark": "/images/logo.svg",
- "href": "https://gsd.build"
+ "href": "https://sf.build"
},
"favicon": "/images/favicon.svg",
"colors": {
diff --git a/mintlify-docs/getting-started.mdx b/mintlify-docs/getting-started.mdx
index 8930ced70..4aaba68dc 100644
--- a/mintlify-docs/getting-started.mdx
+++ b/mintlify-docs/getting-started.mdx
@@ -12,15 +12,15 @@ npm install -g sf-run
Requires Node.js 22+ and Git.
-**`command not found: gsd`?** Your shell may not have npm's global bin directory in `$PATH`. Run `npm prefix -g` to find it, then add `$(npm prefix -g)/bin` to your PATH. See [troubleshooting](/guides/troubleshooting) for details.
+**`command not found: sf`?** Your shell may not have npm's global bin directory in `$PATH`. Run `npm prefix -g` to find it, then add `$(npm prefix -g)/bin` to your PATH. See [troubleshooting](/guides/troubleshooting) for details.
-SF checks for updates every 24 hours. Update in-session with `/gsd update`.
+SF checks for updates every 24 hours. Update in-session with `/sf update`.
## First launch
```bash
-gsd
+sf
```
On first launch, a setup wizard walks you through:
@@ -31,16 +31,16 @@ On first launch, a setup wizard walks you through:
Re-run the wizard anytime:
```bash
-gsd config
+sf config
```
### Set up API keys
-For non-Anthropic models, you may need a search API key. Run `/gsd config` to set keys globally — they're saved to `~/.gsd/agent/auth.json` and apply to all projects.
+For non-Anthropic models, you may need a search API key. Run `/sf config` to set keys globally — they're saved to `~/.sf/agent/auth.json` and apply to all projects.
### Set up MCP servers
-To connect SF to local or external MCP servers, add project-local config in `.mcp.json` or `.gsd/mcp.json`. See [configuration](/guides/configuration) for examples. Use `/gsd mcp` to verify connectivity.
+To connect SF to local or external MCP servers, add project-local config in `.mcp.json` or `.sf/mcp.json`. See [configuration](/guides/configuration) for examples. Use `/sf mcp` to verify connectivity.
### Offline mode
@@ -60,18 +60,18 @@ Or configure per-phase models in [preferences](/guides/configuration).
- Type `/gsd` inside a session. SF executes one unit at a time, pausing between each with a wizard showing what completed and what's next.
+ Type `/sf` inside a session. SF executes one unit at a time, pausing between each with a wizard showing what completed and what's next.
- - **No `.gsd/` directory** → starts a discussion to capture your project vision
+ - **No `.sf/` directory** → starts a discussion to capture your project vision
- **Milestone exists, no roadmap** → discuss or research the milestone
- **Roadmap exists, slices pending** → plan the next slice or execute a task
- **Mid-task** → resume where you left off
- Type `/gsd auto` and walk away. SF autonomously researches, plans, executes, verifies, commits, and advances through every slice until the milestone is complete.
+ Type `/sf auto` and walk away. SF autonomously researches, plans, executes, verifies, commits, and advances through every slice until the milestone is complete.
```
- /gsd auto
+ /sf auto
```
See [auto mode](/guides/auto-mode) for the full details.
@@ -85,20 +85,20 @@ The recommended workflow: auto mode in one terminal, steering from another.
**Terminal 1 — let it build:**
```bash
-gsd
-/gsd auto
+sf
+/sf auto
```
**Terminal 2 — steer while it works:**
```bash
-gsd
-/gsd discuss # talk through architecture decisions
-/gsd status # check progress
-/gsd queue # queue the next milestone
+sf
+/sf discuss # talk through architecture decisions
+/sf status # check progress
+/sf queue # queue the next milestone
```
-Both terminals read and write the same `.gsd/` files. Decisions in terminal 2 are picked up at the next phase boundary automatically.
+Both terminals read and write the same `.sf/` files. Decisions in terminal 2 are picked up at the next phase boundary automatically.
## Project structure
@@ -110,11 +110,11 @@ Milestone → a shippable version (4-10 slices)
Task → one context-window-sized unit of work
```
-All state lives on disk in `.gsd/`:
+All state lives on disk in `.sf/`:
```
-.gsd/
+.sf/
PROJECT.md — what the project is right now
REQUIREMENTS.md — requirement contract (active/validated/deferred)
DECISIONS.md — append-only architectural decisions
@@ -139,20 +139,20 @@ All state lives on disk in `.gsd/`:
## Resume a session
```bash
-gsd --continue # or gsd -c
+sf --continue # or sf -c
```
Resumes the most recent session. To pick from all saved sessions:
```bash
-gsd sessions
+sf sessions
```
## VS Code extension
SF is also available as a VS Code extension (publisher: FluxLabs). It provides:
-- **`@gsd` chat participant** — talk to the agent in VS Code Chat
+- **`@sf` chat participant** — talk to the agent in VS Code Chat
- **Sidebar dashboard** — connection status, model info, token usage, quick actions
- **Full command palette** — start/stop agent, switch models, export sessions
@@ -161,27 +161,27 @@ The CLI (`sf-run`) must be installed first — the extension connects to it via
## Web interface
```bash
-gsd --web
+sf --web
```
A browser-based dashboard with real-time progress and multi-project support. See [web interface](/guides/web-interface) for details.
## Troubleshooting
-### `gsd` runs `git svn dcommit` instead of SF
+### `sf` runs `git svn dcommit` instead of SF
-The [oh-my-zsh git plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/git) defines `alias gsd='git svn dcommit'`.
+The [oh-my-zsh git plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/git) defines `alias sf='git svn dcommit'`.
**Option 1** — Remove the alias in `~/.zshrc` (after the `source $ZSH/oh-my-zsh.sh` line):
```bash
-unalias gsd 2>/dev/null
+unalias sf 2>/dev/null
```
**Option 2** — Use the alternative binary name:
```bash
-gsd-cli
+sf-cli
```
-Both `gsd` and `gsd-cli` point to the same binary.
+Both `sf` and `sf-cli` point to the same binary.
diff --git a/mintlify-docs/guides/auto-mode.mdx b/mintlify-docs/guides/auto-mode.mdx
index 6bc9a9510..7c76a9522 100644
--- a/mintlify-docs/guides/auto-mode.mdx
+++ b/mintlify-docs/guides/auto-mode.mdx
@@ -1,9 +1,9 @@
---
title: "Auto mode"
-description: "SF's autonomous execution engine — run /gsd auto, walk away, come back to built software with clean git history."
+description: "SF's autonomous execution engine — run /sf auto, walk away, come back to built software with clean git history."
---
-Auto mode is a **state machine driven by files on disk**. It reads `.gsd/STATE.md`, determines the next unit of work, creates a fresh agent session with pre-loaded context, and lets the LLM execute. When the LLM finishes, auto mode reads disk state again and dispatches the next unit.
+Auto mode is a **state machine driven by files on disk**. It reads `.sf/STATE.md`, determines the next unit of work, creates a fresh agent session with pre-loaded context, and lets the LLM execute. When the LLM finishes, auto mode reads disk state again and dispatches the next unit.
## The loop
@@ -50,9 +50,9 @@ See [git strategy](/guides/git-strategy) for details.
### Crash recovery
-A lock file tracks the current unit. If the session dies, the next `/gsd auto` synthesizes a recovery briefing from tool calls that made it to disk and resumes with full context.
+A lock file tracks the current unit. If the session dies, the next `/sf auto` synthesizes a recovery briefing from tool calls that made it to disk and resumes with full context.
-**Headless auto-restart:** When running `gsd headless auto`, crashes trigger automatic restart with exponential backoff (5s → 10s → 30s cap, default 3 attempts). Combined with crash recovery, this enables overnight "run until done" execution.
+**Headless auto-restart:** When running `sf headless auto`, crashes trigger automatic restart with exponential backoff (5s → 10s → 30s cap, default 3 attempts). Combined with crash recovery, this enables overnight "run until done" execution.
### Provider error recovery
@@ -107,7 +107,7 @@ After milestone completion, SF auto-generates a self-contained HTML report with
auto_report: true # enabled by default
```
-Generate manually with `/gsd export --html`, or for all milestones with `/gsd export --html --all`.
+Generate manually with `/sf export --html`, or for all milestones with `/sf export --html --all`.
### Reactive task execution
@@ -122,7 +122,7 @@ reactive_execution: true # disabled by default
```
- /gsd auto
+ /sf auto
```
@@ -130,13 +130,13 @@ reactive_execution: true # disabled by default
```
- /gsd auto
+ /sf auto
```
Auto mode reads disk state and picks up where it left off.
```
- /gsd stop
+ /sf stop
```
Stops auto mode gracefully. Can be run from a different terminal.
@@ -145,7 +145,7 @@ reactive_execution: true # disabled by default
### Steer during execution
```
-/gsd steer
+/sf steer
```
Hard-steer plan documents without stopping the pipeline. Changes are picked up at the next phase boundary.
@@ -153,14 +153,14 @@ Hard-steer plan documents without stopping the pipeline. Changes are picked up a
### Capture thoughts
```
-/gsd capture "add rate limiting to API endpoints"
+/sf capture "add rate limiting to API endpoints"
```
Fire-and-forget thought capture. Triaged automatically between tasks. See [captures and triage](/guides/captures-triage).
## Dashboard
-`Ctrl+Alt+G` or `/gsd status` shows real-time progress:
+`Ctrl+Alt+G` or `/sf status` shows real-time progress:
- Current milestone, slice, and task
- Auto mode elapsed time and phase
diff --git a/mintlify-docs/guides/captures-triage.mdx b/mintlify-docs/guides/captures-triage.mdx
index 85b7f5baa..38b0091d6 100644
--- a/mintlify-docs/guides/captures-triage.mdx
+++ b/mintlify-docs/guides/captures-triage.mdx
@@ -10,11 +10,11 @@ Captures let you fire-and-forget thoughts during auto-mode execution. Instead of
While auto-mode is running (or any time):
```
-/gsd capture "add rate limiting to the API endpoints"
-/gsd capture "the auth flow should support OAuth, not just JWT"
+/sf capture "add rate limiting to the API endpoints"
+/sf capture "the auth flow should support OAuth, not just JWT"
```
-Captures are appended to `.gsd/CAPTURES.md` and triaged automatically between tasks.
+Captures are appended to `.sf/CAPTURES.md` and triaged automatically between tasks.
## How it works
@@ -24,7 +24,7 @@ capture → triage → confirm → resolve → resume
- `/gsd capture "thought"` appends to `.gsd/CAPTURES.md` with a timestamp and unique ID.
+ `/sf capture "thought"` appends to `.sf/CAPTURES.md` with a timestamp and unique ID.
At natural seams between tasks, SF classifies each capture.
@@ -55,7 +55,7 @@ capture → triage → confirm → resolve → resume
Trigger triage at any time:
```
-/gsd triage
+/sf triage
```
Useful when you've accumulated several captures and want to process them before the next natural seam.
@@ -72,4 +72,4 @@ Capture context is automatically injected into:
## Worktree awareness
-Captures resolve to the **original project root's** `.gsd/CAPTURES.md`, not the worktree's local copy. Captures from a steering terminal are visible to the auto-mode session running in a worktree.
+Captures resolve to the **original project root's** `.sf/CAPTURES.md`, not the worktree's local copy. Captures from a steering terminal are visible to the auto-mode session running in a worktree.
diff --git a/mintlify-docs/guides/change-management.mdx b/mintlify-docs/guides/change-management.mdx
index 989cd8441..8adc46749 100644
--- a/mintlify-docs/guides/change-management.mdx
+++ b/mintlify-docs/guides/change-management.mdx
@@ -24,10 +24,10 @@ Between milestones you have the most freedom. Inside a running milestone you hav
**A self-contained fix that can be described in a sentence.**
```
-/gsd quick "fix the date formatting bug in the invoice renderer"
+/sf quick "fix the date formatting bug in the invoice renderer"
```
-`/gsd quick` executes immediately with full SF guarantees (atomic commit, state tracking) but skips milestone ceremony. It doesn't touch the milestone pipeline.
+`/sf quick` executes immediately with full SF guarantees (atomic commit, state tracking) but skips milestone ceremony. It doesn't touch the milestone pipeline.
---
@@ -36,16 +36,16 @@ Between milestones you have the most freedom. Inside a running milestone you hav
**You spot something mid-execution but don't want to interrupt the run.**
```
-/gsd capture "the login redirect is broken on mobile viewports"
-/gsd capture "add a loading spinner to the data table"
+/sf capture "the login redirect is broken on mobile viewports"
+/sf capture "add a loading spinner to the data table"
```
-Captures are appended to `.gsd/CAPTURES.md` and triaged automatically at natural seams between tasks. See [captures and triage](/guides/captures-triage) for the full classification system.
+Captures are appended to `.sf/CAPTURES.md` and triaged automatically at natural seams between tasks. See [captures and triage](/guides/captures-triage) for the full classification system.
To force processing immediately:
```
-/gsd triage
+/sf triage
```
---
@@ -55,7 +55,7 @@ To force processing immediately:
**You're mid-slice and the plan no longer makes sense — wrong approach, missing step, or a blocker.**
```
-/gsd steer
+/sf steer
```
This opens an interactive session to hard-edit plan documents. Changes are picked up at the next phase boundary without stopping auto-mode.
@@ -74,28 +74,28 @@ For structural changes (adding tasks, removing tasks), the agent triggers a slic
```
- /gsd new-milestone
+ /sf new-milestone
```
Describe the bugs and features. SF creates a milestone — the title is what matters, not the number.
```
- /gsd queue
+ /sf queue
```
Confirm the new milestone is queued before M003. Reorder if needed.
```
- /gsd park M003
+ /sf park M003
```
Parking skips M003 without deleting it. Unpark when ready:
```
- /gsd unpark M003
+ /sf unpark M003
```
```
- /gsd auto
+ /sf auto
```
Auto-mode dispatches the next active milestone in queue order.
@@ -107,15 +107,15 @@ For structural changes (adding tasks, removing tasks), the agent triggers a slic
**You want to change M003's scope — add slices, remove slices, change the approach — before it starts.**
-Since M003 hasn't started, its plan files can be edited directly. Use `/gsd discuss` to talk through the changes and let SF rewrite the artifacts:
+Since M003 hasn't started, its plan files can be edited directly. Use `/sf discuss` to talk through the changes and let SF rewrite the artifacts:
```
-/gsd discuss
+/sf discuss
```
> "M003 needs to include the new auth flow we discovered. Can we add a slice for that and remove the old token refresh slice?"
-Or use `/gsd steer` to edit plan files directly.
+Or use `/sf steer` to edit plan files directly.
If M003 is partially done (some slices complete), auto-mode calls `reassess-roadmap` automatically after each slice. You can also discuss changes during a pause — SF can add, modify, or remove pending slices without touching the completed ones.
@@ -125,7 +125,7 @@ If M003 is partially done (some slices complete), auto-mode calls `reassess-road
**Your "Milestone 3" is effectively now "Milestone 4" because new work must insert before it.**
-SF milestone numbers are labels, not positions. Execution order is controlled by the queue, not the ID. The procedure is the same as above: create the new milestone, confirm queue order with `/gsd queue`, park M003 if needed.
+SF milestone numbers are labels, not positions. Execution order is controlled by the queue, not the ID. The procedure is the same as above: create the new milestone, confirm queue order with `/sf queue`, park M003 if needed.
The milestone IDs stay as-is — M003 just executes later. No renumbering needed.
@@ -136,7 +136,7 @@ The milestone IDs stay as-is — M003 just executes later. No renumbering needed
**After M002 you have 10+ bugs across multiple systems. Too scattered for individual quick tasks.**
```
-/gsd new-milestone
+/sf new-milestone
```
Describe the full bug list. SF creates a milestone with slices organized by system or severity. Run it in auto-mode like any other milestone. When done, all bugs land as clean commits with a formal milestone summary — readable as a bugfix release.
@@ -148,13 +148,13 @@ Describe the full bug list. SF creates a milestone with slices organized by syst
**Real ideas, but nothing that blocks the current plan.**
```
-/gsd capture "dark mode toggle on the dashboard"
+/sf capture "dark mode toggle on the dashboard"
```
Deferred captures surface during roadmap reassessment. SF can fold them into a later milestone when the timing makes sense. Or queue a dedicated features milestone directly:
```
-/gsd queue
+/sf queue
```
---
@@ -165,9 +165,9 @@ Deferred captures surface during roadmap reassessment. SF can fold them into a l
You cannot un-complete the slice. Options:
-- `/gsd quick` for small fixes
+- `/sf quick` for small fixes
- A new slice in the next milestone that explicitly patches the bug — reference the original slice in the description
-- `/gsd steer` to add a fix task to the current active milestone if you're still inside it
+- `/sf steer` to add a fix task to the current active milestone if you're still inside it
The completed slice record is preserved as-is. The fix lands as new work with its own commit and summary.
@@ -180,7 +180,7 @@ The completed slice record is preserved as-is. The fix lands as new work with it
```
- /gsd discuss
+ /sf discuss
```
Work through what's wrong and what the correction looks like before touching anything.
@@ -198,14 +198,14 @@ The completed slice record is preserved as-is. The fix lands as new work with it
| Situation | Command |
|---|---|
-| Small self-contained fix | `/gsd quick` |
-| Thought during auto-mode | `/gsd capture` |
-| Force-process captures now | `/gsd triage` |
-| Current slice plan is wrong | `/gsd steer` |
-| New work must land before next milestone | `/gsd new-milestone` + `/gsd queue` |
-| Delay a future milestone | `/gsd park ` / `/gsd unpark ` |
-| Modify a not-yet-started milestone | `/gsd discuss` or `/gsd steer` |
-| Many bugs → dedicated milestone | `/gsd new-milestone` (bugfix scope) |
-| Ideas that can wait | `/gsd capture` or `/gsd queue` |
-| Check/reorder pipeline | `/gsd queue` |
-| Architecture discussion | `/gsd discuss` |
+| Small self-contained fix | `/sf quick` |
+| Thought during auto-mode | `/sf capture` |
+| Force-process captures now | `/sf triage` |
+| Current slice plan is wrong | `/sf steer` |
+| New work must land before next milestone | `/sf new-milestone` + `/sf queue` |
+| Delay a future milestone | `/sf park ` / `/sf unpark ` |
+| Modify a not-yet-started milestone | `/sf discuss` or `/sf steer` |
+| Many bugs → dedicated milestone | `/sf new-milestone` (bugfix scope) |
+| Ideas that can wait | `/sf capture` or `/sf queue` |
+| Check/reorder pipeline | `/sf queue` |
+| Architecture discussion | `/sf discuss` |
diff --git a/mintlify-docs/guides/commands.mdx b/mintlify-docs/guides/commands.mdx
index 41c6fabdc..3129b28e6 100644
--- a/mintlify-docs/guides/commands.mdx
+++ b/mintlify-docs/guides/commands.mdx
@@ -7,105 +7,105 @@ description: "Every SF command, keyboard shortcut, and CLI flag."
| Command | Description |
|---------|-------------|
-| `/gsd` | Step mode — execute one unit at a time, pause between each |
-| `/gsd next` | Explicit step mode (same as `/gsd`) |
-| `/gsd auto` | Autonomous mode — research, plan, execute, commit, repeat |
-| `/gsd quick` | Execute a quick task with SF guarantees without full planning overhead |
-| `/gsd stop` | Stop auto mode gracefully |
-| `/gsd pause` | Pause auto mode (preserves state, `/gsd auto` to resume) |
-| `/gsd steer` | Hard-steer plan documents during execution |
-| `/gsd discuss` | Discuss architecture and decisions (works alongside auto mode) |
-| `/gsd rethink` | Conversational project reorganization |
-| `/gsd mcp` | MCP server status and connectivity |
-| `/gsd status` | Progress dashboard |
-| `/gsd widget` | Cycle dashboard widget: full / small / min / off |
-| `/gsd queue` | Queue and reorder future milestones (safe during auto mode) |
-| `/gsd capture` | Fire-and-forget thought capture (works during auto mode) |
-| `/gsd triage` | Manually trigger triage of pending captures |
-| `/gsd dispatch` | Dispatch a specific phase directly |
-| `/gsd history` | View execution history (supports `--cost`, `--phase`, `--model` filters) |
-| `/gsd forensics` | Full-access debugger for auto-mode failures |
-| `/gsd cleanup` | Clean up SF state files and stale worktrees |
-| `/gsd visualize` | Open workflow visualizer |
-| `/gsd export --html` | Generate self-contained HTML report |
-| `/gsd export --html --all` | Generate reports for all milestones |
-| `/gsd update` | Update SF to the latest version in-session |
-| `/gsd knowledge` | Add persistent project knowledge |
-| `/gsd fast` | Toggle service tier for supported models |
-| `/gsd rate` | Rate last unit's model tier (over/ok/under) |
-| `/gsd changelog` | Show categorized release notes |
-| `/gsd logs` | Browse activity logs, debug logs, and metrics |
-| `/gsd remote` | Control remote auto-mode |
-| `/gsd help` | Categorized command reference |
+| `/sf` | Step mode — execute one unit at a time, pause between each |
+| `/sf next` | Explicit step mode (same as `/sf`) |
+| `/sf auto` | Autonomous mode — research, plan, execute, commit, repeat |
+| `/sf quick` | Execute a quick task with SF guarantees without full planning overhead |
+| `/sf stop` | Stop auto mode gracefully |
+| `/sf pause` | Pause auto mode (preserves state, `/sf auto` to resume) |
+| `/sf steer` | Hard-steer plan documents during execution |
+| `/sf discuss` | Discuss architecture and decisions (works alongside auto mode) |
+| `/sf rethink` | Conversational project reorganization |
+| `/sf mcp` | MCP server status and connectivity |
+| `/sf status` | Progress dashboard |
+| `/sf widget` | Cycle dashboard widget: full / small / min / off |
+| `/sf queue` | Queue and reorder future milestones (safe during auto mode) |
+| `/sf capture` | Fire-and-forget thought capture (works during auto mode) |
+| `/sf triage` | Manually trigger triage of pending captures |
+| `/sf dispatch` | Dispatch a specific phase directly |
+| `/sf history` | View execution history (supports `--cost`, `--phase`, `--model` filters) |
+| `/sf forensics` | Full-access debugger for auto-mode failures |
+| `/sf cleanup` | Clean up SF state files and stale worktrees |
+| `/sf visualize` | Open workflow visualizer |
+| `/sf export --html` | Generate self-contained HTML report |
+| `/sf export --html --all` | Generate reports for all milestones |
+| `/sf update` | Update SF to the latest version in-session |
+| `/sf knowledge` | Add persistent project knowledge |
+| `/sf fast` | Toggle service tier for supported models |
+| `/sf rate` | Rate last unit's model tier (over/ok/under) |
+| `/sf changelog` | Show categorized release notes |
+| `/sf logs` | Browse activity logs, debug logs, and metrics |
+| `/sf remote` | Control remote auto-mode |
+| `/sf help` | Categorized command reference |
## Configuration and diagnostics
| Command | Description |
|---------|-------------|
-| `/gsd prefs` | Model selection, timeouts, budget ceiling |
-| `/gsd mode` | Switch workflow mode (solo/team) |
-| `/gsd config` | Re-run the provider setup wizard |
-| `/gsd keys` | API key manager — list, add, remove, test, rotate |
-| `/gsd doctor` | Runtime health checks with auto-fix |
-| `/gsd inspect` | Show SQLite DB diagnostics |
-| `/gsd init` | Project init wizard |
-| `/gsd setup` | Global setup status and configuration |
-| `/gsd skill-health` | Skill lifecycle dashboard |
-| `/gsd hooks` | Show configured post-unit and pre-dispatch hooks |
-| `/gsd run-hook` | Manually trigger a specific hook |
-| `/gsd migrate` | Migrate a v1 `.planning` directory to `.gsd` format |
+| `/sf prefs` | Model selection, timeouts, budget ceiling |
+| `/sf mode` | Switch workflow mode (solo/team) |
+| `/sf config` | Re-run the provider setup wizard |
+| `/sf keys` | API key manager — list, add, remove, test, rotate |
+| `/sf doctor` | Runtime health checks with auto-fix |
+| `/sf inspect` | Show SQLite DB diagnostics |
+| `/sf init` | Project init wizard |
+| `/sf setup` | Global setup status and configuration |
+| `/sf skill-health` | Skill lifecycle dashboard |
+| `/sf hooks` | Show configured post-unit and pre-dispatch hooks |
+| `/sf run-hook` | Manually trigger a specific hook |
+| `/sf migrate` | Migrate a v1 `.planning` directory to `.sf` format |
## Milestone management
| Command | Description |
|---------|-------------|
-| `/gsd new-milestone` | Create a new milestone |
-| `/gsd skip` | Prevent a unit from auto-mode dispatch |
-| `/gsd undo` | Revert last completed unit |
-| `/gsd undo-task` | Reset a specific task's completion state |
-| `/gsd reset-slice` | Reset a slice and all its tasks |
-| `/gsd park` | Park a milestone — skip without deleting |
-| `/gsd unpark` | Reactivate a parked milestone |
+| `/sf new-milestone` | Create a new milestone |
+| `/sf skip` | Prevent a unit from auto-mode dispatch |
+| `/sf undo` | Revert last completed unit |
+| `/sf undo-task` | Reset a specific task's completion state |
+| `/sf reset-slice` | Reset a slice and all its tasks |
+| `/sf park` | Park a milestone — skip without deleting |
+| `/sf unpark` | Reactivate a parked milestone |
## Parallel orchestration
| Command | Description |
|---------|-------------|
-| `/gsd parallel start` | Analyze eligibility, confirm, and start workers |
-| `/gsd parallel status` | Show all workers with state, progress, and cost |
-| `/gsd parallel stop [MID]` | Stop all workers or a specific one |
-| `/gsd parallel pause [MID]` | Pause all or a specific worker |
-| `/gsd parallel resume [MID]` | Resume paused workers |
-| `/gsd parallel merge [MID]` | Merge completed milestones to main |
+| `/sf parallel start` | Analyze eligibility, confirm, and start workers |
+| `/sf parallel status` | Show all workers with state, progress, and cost |
+| `/sf parallel stop [MID]` | Stop all workers or a specific one |
+| `/sf parallel pause [MID]` | Pause all or a specific worker |
+| `/sf parallel resume [MID]` | Resume paused workers |
+| `/sf parallel merge [MID]` | Merge completed milestones to main |
## Workflow templates
| Command | Description |
|---------|-------------|
-| `/gsd start` | Start a workflow template (bugfix, spike, feature, hotfix, refactor, etc.) |
-| `/gsd start resume` | Resume an in-progress workflow |
-| `/gsd templates` | List available workflow templates |
-| `/gsd templates info ` | Show detailed template info |
+| `/sf start` | Start a workflow template (bugfix, spike, feature, hotfix, refactor, etc.) |
+| `/sf start resume` | Resume an in-progress workflow |
+| `/sf templates` | List available workflow templates |
+| `/sf templates info ` | Show detailed template info |
## Custom workflows
| Command | Description |
|---------|-------------|
-| `/gsd workflow new` | Create a new workflow definition |
-| `/gsd workflow run ` | Create a run and start auto-mode |
-| `/gsd workflow list` | List workflow runs |
-| `/gsd workflow validate ` | Validate a workflow definition |
-| `/gsd workflow pause` | Pause custom workflow auto-mode |
-| `/gsd workflow resume` | Resume paused custom workflow auto-mode |
+| `/sf workflow new` | Create a new workflow definition |
+| `/sf workflow run ` | Create a run and start auto-mode |
+| `/sf workflow list` | List workflow runs |
+| `/sf workflow validate ` | Validate a workflow definition |
+| `/sf workflow pause` | Pause custom workflow auto-mode |
+| `/sf workflow resume` | Resume paused custom workflow auto-mode |
## Extensions
| Command | Description |
|---------|-------------|
-| `/gsd extensions list` | List all extensions and their status |
-| `/gsd extensions enable ` | Enable a disabled extension |
-| `/gsd extensions disable ` | Disable an extension |
-| `/gsd extensions info ` | Show extension details |
+| `/sf extensions list` | List all extensions and their status |
+| `/sf extensions enable ` | Enable a disabled extension |
+| `/sf extensions disable ` | Disable an extension |
+| `/sf extensions info ` | Show extension details |
## Keyboard shortcuts
@@ -125,31 +125,31 @@ In terminals without Kitty keyboard protocol support (macOS Terminal.app, JetBra
| Flag | Description |
|------|-------------|
-| `gsd` | Start a new interactive session |
-| `gsd --continue` (`-c`) | Resume the most recent session |
-| `gsd --model ` | Override the default model |
-| `gsd --print "msg"` (`-p`) | Single-shot prompt mode (no TUI) |
-| `gsd --mode ` | Output mode for non-interactive use |
-| `gsd --list-models [search]` | List available models and exit |
-| `gsd --web [path]` | Start browser-based web interface |
-| `gsd --worktree` (`-w`) `[name]` | Start session in a git worktree |
-| `gsd --no-session` | Disable session persistence |
-| `gsd --extension ` | Load an additional extension |
-| `gsd --version` (`-v`) | Print version and exit |
-| `gsd sessions` | Interactive session picker |
-| `gsd config` | Set up global API keys |
-| `gsd update` | Update SF to the latest version |
+| `sf` | Start a new interactive session |
+| `sf --continue` (`-c`) | Resume the most recent session |
+| `sf --model ` | Override the default model |
+| `sf --print "msg"` (`-p`) | Single-shot prompt mode (no TUI) |
+| `sf --mode ` | Output mode for non-interactive use |
+| `sf --list-models [search]` | List available models and exit |
+| `sf --web [path]` | Start browser-based web interface |
+| `sf --worktree` (`-w`) `[name]` | Start session in a git worktree |
+| `sf --no-session` | Disable session persistence |
+| `sf --extension ` | Load an additional extension |
+| `sf --version` (`-v`) | Print version and exit |
+| `sf sessions` | Interactive session picker |
+| `sf config` | Set up global API keys |
+| `sf update` | Update SF to the latest version |
## Headless mode
-`gsd headless` runs commands without a TUI — designed for CI, cron jobs, and scripted automation.
+`sf headless` runs commands without a TUI — designed for CI, cron jobs, and scripted automation.
```bash
-gsd headless # run auto mode
-gsd headless next # run a single unit
-gsd headless query # instant JSON snapshot (~50ms, no LLM)
-gsd headless --timeout 600000 auto # with timeout
-gsd headless new-milestone --context brief.md --auto
+sf headless # run auto mode
+sf headless next # run a single unit
+sf headless query # instant JSON snapshot (~50ms, no LLM)
+sf headless --timeout 600000 auto # with timeout
+sf headless new-milestone --context brief.md --auto
```
| Flag | Description |
@@ -163,20 +163,20 @@ gsd headless new-milestone --context brief.md --auto
**Exit codes:** `0` = complete, `1` = error/timeout, `2` = blocked.
-### `gsd headless query`
+### `sf headless query`
Returns a JSON snapshot of the project state — no LLM session, instant response.
```bash
-gsd headless query | jq '.state.phase' # "executing"
-gsd headless query | jq '.next' # next dispatch action
-gsd headless query | jq '.cost.total' # total spend
+sf headless query | jq '.state.phase' # "executing"
+sf headless query | jq '.next' # next dispatch action
+sf headless query | jq '.cost.total' # total spend
```
## MCP server mode
```bash
-gsd --mode mcp
+sf --mode mcp
```
Runs SF as a Model Context Protocol server over stdin/stdout, exposing all tools to external AI clients (Claude Desktop, VS Code Copilot, etc.).
diff --git a/mintlify-docs/guides/configuration.mdx b/mintlify-docs/guides/configuration.mdx
index 49bf208bd..9ebce10c1 100644
--- a/mintlify-docs/guides/configuration.mdx
+++ b/mintlify-docs/guides/configuration.mdx
@@ -3,16 +3,16 @@ title: "Configuration"
description: "Preferences, model selection, MCP servers, hooks, and all settings."
---
-SF preferences live in `~/.gsd/PREFERENCES.md` (global) or `.gsd/PREFERENCES.md` (project-local). Manage interactively with `/gsd prefs`.
+SF preferences live in `~/.sf/PREFERENCES.md` (global) or `.sf/PREFERENCES.md` (project-local). Manage interactively with `/sf prefs`.
## Preferences commands
| Command | Description |
|---------|-------------|
-| `/gsd prefs` | Open the global preferences wizard |
-| `/gsd prefs global` | Global preferences wizard |
-| `/gsd prefs project` | Project preferences wizard |
-| `/gsd prefs status` | Show current files, merged values, and skill status |
+| `/sf prefs` | Open the global preferences wizard |
+| `/sf prefs global` | Global preferences wizard |
+| `/sf prefs project` | Project preferences wizard |
+| `/sf prefs status` | Show current files, merged values, and skill status |
## Preferences file format
@@ -40,8 +40,8 @@ token_profile: balanced
| Scope | Path | Applies to |
|-------|------|-----------|
-| Global | `~/.gsd/PREFERENCES.md` | All projects |
-| Project | `.gsd/PREFERENCES.md` | Current project only |
+| Global | `~/.sf/PREFERENCES.md` | All projects |
+| Project | `.sf/PREFERENCES.md` | Current project only |
**Merge behavior:**
- **Scalar fields** — project wins if defined
@@ -50,7 +50,7 @@ token_profile: balanced
## Global API keys
-Tool API keys are stored globally in `~/.gsd/agent/auth.json`. Set them once with `/gsd config`.
+Tool API keys are stored globally in `~/.sf/agent/auth.json`. Set them once with `/sf config`.
| Tool | Environment variable | Purpose |
|------|---------------------|---------|
@@ -65,7 +65,7 @@ Anthropic models have built-in web search — no extra keys needed.
SF connects to external MCP servers configured in project files:
- `.mcp.json` — repo-shared config
-- `.gsd/mcp.json` — local-only config
+- `.sf/mcp.json` — local-only config
@@ -236,7 +236,7 @@ See [parallel orchestration](/guides/parallel-orchestration).
| Variable | Default | Description |
|----------|---------|-------------|
-| `SF_HOME` | `~/.gsd` | Global SF directory |
+| `SF_HOME` | `~/.sf` | Global SF directory |
| `SF_PROJECT_ID` | (auto-hash) | Override project identity hash |
| `SF_STATE_DIR` | `$SF_HOME` | Per-project state root |
| `SF_CODING_AGENT_DIR` | `$SF_HOME/agent` | Agent directory |
diff --git a/mintlify-docs/guides/cost-management.mdx b/mintlify-docs/guides/cost-management.mdx
index 70bb9aa9b..ea1a0776a 100644
--- a/mintlify-docs/guides/cost-management.mdx
+++ b/mintlify-docs/guides/cost-management.mdx
@@ -15,11 +15,11 @@ Every unit's metrics are captured automatically:
- **Tool calls** — number of tool invocations
- **Message counts** — assistant and user messages
-Data is stored in `.gsd/metrics.json` and survives across sessions.
+Data is stored in `.sf/metrics.json` and survives across sessions.
### Viewing costs
-`Ctrl+Alt+G` or `/gsd status` shows real-time cost breakdown by:
+`Ctrl+Alt+G` or `/sf status` shows real-time cost breakdown by:
- Phase (research, planning, execution, completion, reassessment)
- Slice (M001/S01, M001/S02, ...)
@@ -72,9 +72,9 @@ See [token optimization](/guides/token-optimization) for details.
## Tips
- Start with `balanced` and a generous `budget_ceiling` to establish baseline costs
-- Check `/gsd status` after a few slices to see per-slice averages
+- Check `/sf status` after a few slices to see per-slice averages
- Switch to `budget` for well-understood, repetitive work
- Use `quality` only for architectural decisions
- Per-phase model selection lets you use Opus for planning while keeping execution on Sonnet
- Enable [dynamic routing](/guides/dynamic-model-routing) for automatic downgrading on simple tasks
-- Use `/gsd visualize` → Metrics tab to see where your budget is going
+- Use `/sf visualize` → Metrics tab to see where your budget is going
diff --git a/mintlify-docs/guides/custom-models.mdx b/mintlify-docs/guides/custom-models.mdx
index 02e61ae7d..36ad5ebd8 100644
--- a/mintlify-docs/guides/custom-models.mdx
+++ b/mintlify-docs/guides/custom-models.mdx
@@ -3,7 +3,7 @@ title: "Custom models"
description: "Add custom providers and models (Ollama, vLLM, LM Studio, proxies) via models.json."
---
-Define custom models and providers in `~/.gsd/agent/models.json`. This lets you add models not in the default registry — self-hosted endpoints, fine-tuned models, proxies, or new provider releases.
+Define custom models and providers in `~/.sf/agent/models.json`. This lets you add models not in the default registry — self-hosted endpoints, fine-tuned models, proxies, or new provider releases.
The file reloads each time you open `/model` — no restart needed.
@@ -123,4 +123,4 @@ For providers with partial OpenAI compatibility, use the `compat` field at provi
| Extension | Provider | Models | Install |
|-----------|----------|--------|---------|
-| [`pi-dashscope`](https://www.npmjs.com/package/pi-dashscope) | Alibaba DashScope | Qwen3, GLM-5, MiniMax M2.5, Kimi K2.5 | `gsd install npm:pi-dashscope` |
+| [`pi-dashscope`](https://www.npmjs.com/package/pi-dashscope) | Alibaba DashScope | Qwen3, GLM-5, MiniMax M2.5, Kimi K2.5 | `sf install npm:pi-dashscope` |
diff --git a/mintlify-docs/guides/dynamic-model-routing.mdx b/mintlify-docs/guides/dynamic-model-routing.mdx
index d6cb80ed6..5de9e9afe 100644
--- a/mintlify-docs/guides/dynamic-model-routing.mdx
+++ b/mintlify-docs/guides/dynamic-model-routing.mdx
@@ -69,9 +69,9 @@ For `execute-task` units, the classifier analyzes the task plan:
## Adaptive learning
-The routing history (`.gsd/routing-history.json`) tracks success/failure per tier per unit type. If a tier's failure rate exceeds 20%, future classifications are bumped up.
+The routing history (`.sf/routing-history.json`) tracks success/failure per tier per unit type. If a tier's failure rate exceeds 20%, future classifications are bumped up.
-User feedback (`/gsd rate`) is weighted 2x vs automatic outcomes.
+User feedback (`/sf rate`) is weighted 2x vs automatic outcomes.
## Cost table
diff --git a/mintlify-docs/guides/git-strategy.mdx b/mintlify-docs/guides/git-strategy.mdx
index d0db3cabc..fa53992f5 100644
--- a/mintlify-docs/guides/git-strategy.mdx
+++ b/mintlify-docs/guides/git-strategy.mdx
@@ -12,7 +12,7 @@ Configure via the `git.isolation` preference:
| Mode | Working directory | Branch | Best for |
|------|-------------------|--------|----------|
| `none` (default) | Project root | Current branch | Most projects — no isolation overhead |
-| `worktree` | `.gsd/worktrees//` | `milestone/` | Full file isolation |
+| `worktree` | `.sf/worktrees//` | `milestone/` | Full file isolation |
| `branch` | Project root | `milestone/` | Submodule-heavy repos |
### `none` mode (default)
@@ -125,13 +125,13 @@ Pushes the milestone branch and creates a PR targeting your specified branch. Re
### `commit_docs: false`
-Adds `.gsd/` to `.gitignore` and keeps all planning artifacts local-only. Useful for teams where only some members use SF.
+Adds `.sf/` to `.gitignore` and keeps all planning artifacts local-only. Useful for teams where only some members use SF.
## Worktree management
### Automatic (auto mode)
-1. Milestone starts → worktree created at `.gsd/worktrees//`
+1. Milestone starts → worktree created at `.sf/worktrees//`
2. Planning artifacts copied into the worktree
3. All execution happens inside the worktree
4. Milestone completes → squash-merged to main
@@ -154,4 +154,4 @@ SF includes automatic recovery for common git issues:
- **Stale lock files** — removes `index.lock` files from crashed processes
- **Orphaned worktrees** — detects and offers cleanup
-Run `/gsd doctor` to check git health manually.
+Run `/sf doctor` to check git health manually.
diff --git a/mintlify-docs/guides/migration.mdx b/mintlify-docs/guides/migration.mdx
index 58414da80..2c43cec82 100644
--- a/mintlify-docs/guides/migration.mdx
+++ b/mintlify-docs/guides/migration.mdx
@@ -1,18 +1,18 @@
---
title: "Migration from v1"
-description: "Migrate .planning directories from the original SF to SF's .gsd format."
+description: "Migrate .planning directories from the original SF to SF's .sf format."
---
-If you have projects with `.planning` directories from the original Singularity Forge (v1), you can migrate them to SF's `.gsd` format.
+If you have projects with `.planning` directories from the original Singularity Forge (v1), you can migrate them to SF's `.sf` format.
## Running the migration
```bash
# From within the project directory
-/gsd migrate
+/sf migrate
# Or specify a path
-/gsd migrate ~/projects/my-old-project
+/sf migrate ~/projects/my-old-project
```
## What gets migrated
@@ -41,7 +41,7 @@ The migration handles various v1 format variations:
Verify the output:
```
-/gsd doctor
+/sf doctor
```
-This checks `.gsd/` integrity and flags any structural issues.
+This checks `.sf/` integrity and flags any structural issues.
diff --git a/mintlify-docs/guides/parallel-orchestration.mdx b/mintlify-docs/guides/parallel-orchestration.mdx
index 4c9d2e58c..5466c9c30 100644
--- a/mintlify-docs/guides/parallel-orchestration.mdx
+++ b/mintlify-docs/guides/parallel-orchestration.mdx
@@ -22,13 +22,13 @@ parallel:
2. Start parallel execution:
```
-/gsd parallel start
+/sf parallel start
```
3. Monitor progress:
```
-/gsd parallel status
+/sf parallel status
```
## Architecture
@@ -43,7 +43,7 @@ parallel:
│ └──────────┘ └──────────┘ └──────────┘ │
│ │ │ │ │
│ ▼ ▼ ▼ │
-│ .gsd/worktrees/ .gsd/worktrees/ .gsd/worktrees/ │
+│ .sf/worktrees/ .sf/worktrees/ .sf/worktrees/ │
└─────────────────────────────────────────────────────┘
```
@@ -88,16 +88,16 @@ parallel:
| Command | Description |
|---------|-------------|
-| `/gsd parallel start` | Analyze, confirm, and start workers |
-| `/gsd parallel status` | Show workers with state, progress, cost |
-| `/gsd parallel stop [MID]` | Stop all or a specific worker |
-| `/gsd parallel pause [MID]` | Pause all or a specific worker |
-| `/gsd parallel resume [MID]` | Resume paused workers |
-| `/gsd parallel merge [MID]` | Merge completed milestones to main |
+| `/sf parallel start` | Analyze, confirm, and start workers |
+| `/sf parallel status` | Show workers with state, progress, cost |
+| `/sf parallel stop [MID]` | Stop all or a specific worker |
+| `/sf parallel pause [MID]` | Pause all or a specific worker |
+| `/sf parallel resume [MID]` | Resume paused workers |
+| `/sf parallel merge [MID]` | Merge completed milestones to main |
## Merge reconciliation
-- `.gsd/` state files — auto-resolved (accept milestone branch version)
+- `.sf/` state files — auto-resolved (accept milestone branch version)
- Code conflicts — merge halts, shows conflicting files. Resolve manually and retry.
## Budget management
@@ -108,16 +108,16 @@ When `budget_ceiling` is set, aggregate cost is tracked across all workers. Ceil
### "No milestones are eligible"
-All milestones are complete or blocked by dependencies. Check `/gsd queue`.
+All milestones are complete or blocked by dependencies. Check `/sf queue`.
### Worker crashed
-Workers persist state to disk. On restart, the coordinator detects dead PIDs. Run `/gsd doctor --fix` to clean up, then `/gsd parallel start` to spawn new workers.
+Workers persist state to disk. On restart, the coordinator detects dead PIDs. Run `/sf doctor --fix` to clean up, then `/sf parallel start` to spawn new workers.
### Merge conflicts
```
-/gsd parallel merge # see which milestones conflict
-# resolve in .gsd/worktrees//
-/gsd parallel merge MID # retry
+/sf parallel merge # see which milestones conflict
+# resolve in .sf/worktrees//
+/sf parallel merge MID # retry
```
diff --git a/mintlify-docs/guides/remote-questions.mdx b/mintlify-docs/guides/remote-questions.mdx
index 10b3dce50..02988a389 100644
--- a/mintlify-docs/guides/remote-questions.mdx
+++ b/mintlify-docs/guides/remote-questions.mdx
@@ -10,7 +10,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
```
- /gsd remote discord
+ /sf remote discord
```
The setup wizard validates your bot token, picks a server and channel, sends a test message, and saves the config.
@@ -21,7 +21,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
```
- /gsd remote slack
+ /sf remote slack
```
The setup wizard validates your bot token, picks a channel, sends a test message, and saves the config.
@@ -32,7 +32,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
```
- /gsd remote telegram
+ /sf remote telegram
```
The setup wizard validates your bot token, prompts for a chat ID, sends a test message, and saves the config.
@@ -76,9 +76,9 @@ If no response within `timeout_minutes`, the LLM makes a conservative default ch
| Command | Description |
|---------|-------------|
-| `/gsd remote` | Show menu and current status |
-| `/gsd remote slack` | Set up Slack |
-| `/gsd remote discord` | Set up Discord |
-| `/gsd remote telegram` | Set up Telegram |
-| `/gsd remote status` | Show current config and last prompt status |
-| `/gsd remote disconnect` | Remove configuration |
+| `/sf remote` | Show menu and current status |
+| `/sf remote slack` | Set up Slack |
+| `/sf remote discord` | Set up Discord |
+| `/sf remote telegram` | Set up Telegram |
+| `/sf remote status` | Show current config and last prompt status |
+| `/sf remote disconnect` | Remove configuration |
diff --git a/mintlify-docs/guides/skills.mdx b/mintlify-docs/guides/skills.mdx
index 75ea193ad..ff2fae3c3 100644
--- a/mintlify-docs/guides/skills.mdx
+++ b/mintlify-docs/guides/skills.mdx
@@ -7,7 +7,7 @@ Skills are specialized instruction sets that SF loads when the task matches. The
## Bundled skills
-SF ships with these skills, installed to `~/.gsd/agent/skills/`:
+SF ships with these skills, installed to `~/.sf/agent/skills/`:
| Skill | Trigger | Description |
|-------|---------|-------------|
@@ -51,8 +51,8 @@ skill_rules:
### Resolution order
-1. **Bare name** — e.g., `frontend-design` → scans `~/.gsd/agent/skills/` and project skills
-2. **Absolute path** — e.g., `/Users/you/.gsd/agent/skills/my-skill/SKILL.md`
+1. **Bare name** — e.g., `frontend-design` → scans `~/.sf/agent/skills/` and project skills
+2. **Absolute path** — e.g., `/Users/you/.sf/agent/skills/my-skill/SKILL.md`
3. **Directory path** — looks for `SKILL.md` inside
User skills take precedence over project skills.
@@ -62,7 +62,7 @@ User skills take precedence over project skills.
Create a directory with a `SKILL.md` file:
```
-~/.gsd/agent/skills/my-skill/
+~/.sf/agent/skills/my-skill/
SKILL.md — instructions for the LLM
references/ — optional reference files
```
@@ -70,17 +70,17 @@ Create a directory with a `SKILL.md` file:
### Project-local skills
```
-.gsd/agent/skills/my-project-skill/
+.sf/agent/skills/my-project-skill/
SKILL.md
```
## Skill health dashboard
```
-/gsd skill-health # overview table
-/gsd skill-health rust-core # detailed view
-/gsd skill-health --stale 30 # unused for 30+ days
-/gsd skill-health --declining # falling success rates
+/sf skill-health # overview table
+/sf skill-health rust-core # detailed view
+/sf skill-health --stale 30 # unused for 30+ days
+/sf skill-health --declining # falling success rates
```
The dashboard flags:
diff --git a/mintlify-docs/guides/token-optimization.mdx b/mintlify-docs/guides/token-optimization.mdx
index 50e0732c1..9c2fbab4d 100644
--- a/mintlify-docs/guides/token-optimization.mdx
+++ b/mintlify-docs/guides/token-optimization.mdx
@@ -112,12 +112,12 @@ When approaching the budget ceiling, the classifier automatically downgrades tie
## Adaptive learning
-SF tracks success/failure per tier and adjusts classifications over time. User feedback via `/gsd rate` is weighted 2x:
+SF tracks success/failure per tier and adjusts classifications over time. User feedback via `/sf rate` is weighted 2x:
```
-/gsd rate over # model was overpowered
-/gsd rate ok # appropriate
-/gsd rate under # too weak
+/sf rate over # model was overpowered
+/sf rate ok # appropriate
+/sf rate under # too weak
```
## Configuration examples
diff --git a/mintlify-docs/guides/troubleshooting.mdx b/mintlify-docs/guides/troubleshooting.mdx
index 061a23864..640ec0815 100644
--- a/mintlify-docs/guides/troubleshooting.mdx
+++ b/mintlify-docs/guides/troubleshooting.mdx
@@ -1,14 +1,14 @@
---
title: "Troubleshooting"
-description: "Common issues, /gsd doctor, /gsd forensics, and recovery procedures."
+description: "Common issues, /sf doctor, /sf forensics, and recovery procedures."
---
-## `/gsd doctor`
+## `/sf doctor`
-The built-in diagnostic tool validates `.gsd/` integrity:
+The built-in diagnostic tool validates `.sf/` integrity:
```
-/gsd doctor
+/sf doctor
```
It checks file structure, referential integrity, completion state consistency, git worktree health, and stale lock files.
@@ -19,16 +19,16 @@ It checks file structure, referential integrity, completion state consistency, g
**Cause:** Stale cache after a crash, or the LLM didn't produce the expected artifact.
- **Fix:** Run `/gsd doctor` to repair state, then `/gsd auto`.
+ **Fix:** Run `/sf doctor` to repair state, then `/sf auto`.
**Cause:** A unit failed to produce its expected artifact twice in a row.
- **Fix:** Check the task plan for clarity. Refine it manually, then `/gsd auto`.
+ **Fix:** Check the task plan for clarity. Refine it manually, then `/sf auto`.
-
+
**Cause:** npm's global bin directory isn't in `$PATH`.
**Fix:**
@@ -38,7 +38,7 @@ It checks file structure, referential integrity, completion state consistency, g
source ~/.zshrc
```
- **Workaround:** `npx sf-run` or `$(npm prefix -g)/bin/gsd`
+ **Workaround:** `npx sf-run` or `$(npm prefix -g)/bin/sf`
@@ -59,25 +59,25 @@ It checks file structure, referential integrity, completion state consistency, g
- Increase `budget_ceiling` in preferences, or switch to `budget` token profile. Resume with `/gsd auto`.
+ Increase `budget_ceiling` in preferences, or switch to `budget` token profile. Resume with `/sf auto`.
SF auto-detects stale locks. If automatic recovery fails:
```bash
- rm -f .gsd/auto.lock
- rm -rf "$(dirname .gsd)/.gsd.lock"
+ rm -f .sf/auto.lock
+ rm -rf "$(dirname .sf)/.sf.lock"
```
-
- SF auto-resolves conflicts on `.gsd/` runtime files. For code conflicts, the LLM attempts resolution. If that fails, resolve manually.
+
+ SF auto-resolves conflicts on `.sf/` runtime files. For code conflicts, the LLM attempts resolution. If that fails, resolve manually.
**Cause:** Antivirus, indexers, or editors briefly locking files during atomic rename.
- **Fix:** Re-run the operation. Close tools holding files open if the error persists. Run `/gsd doctor` to verify repo health.
+ **Fix:** Re-run the operation. Close tools holding files open if the error persists. Run `/sf doctor` to verify repo health.
@@ -97,23 +97,23 @@ It checks file structure, referential integrity, completion state consistency, g
-## `/gsd forensics`
+## `/sf forensics`
Full-access debugger for post-mortem analysis:
```
-/gsd forensics [optional problem description]
+/sf forensics [optional problem description]
```
Provides anomaly detection, unit traces, metrics analysis, doctor integration, and LLM-guided investigation.
## MCP client issues
-Use `/gsd mcp` to check MCP server status and connectivity at a glance.
+Use `/sf mcp` to check MCP server status and connectivity at a glance.
- Verify `.mcp.json` or `.gsd/mcp.json` exists and parses as valid JSON.
+ Verify `.mcp.json` or `.sf/mcp.json` exists and parses as valid JSON.
@@ -130,29 +130,29 @@ Use `/gsd mcp` to check MCP server status and connectivity at a glance.
### Reset auto mode state
```bash
-rm .gsd/auto.lock
-rm .gsd/completed-units.json
+rm .sf/auto.lock
+rm .sf/completed-units.json
```
-Then `/gsd auto` to restart from current disk state.
+Then `/sf auto` to restart from current disk state.
### Reset routing history
```bash
-rm .gsd/routing-history.json
+rm .sf/routing-history.json
```
### Full state rebuild
```
-/gsd doctor
+/sf doctor
```
Rebuilds `STATE.md` from plan and roadmap files on disk.
## Getting help
-- **GitHub Issues:** [github.com/gsd-build/SF/issues](https://github.com/gsd-build/gsd-2/issues)
-- **Dashboard:** `Ctrl+Alt+G` or `/gsd status`
-- **Forensics:** `/gsd forensics`
-- **Session logs:** `.gsd/activity/`
+- **GitHub Issues:** [github.com/sf-build/SF/issues](https://github.com/sf-build/sf-2/issues)
+- **Dashboard:** `Ctrl+Alt+G` or `/sf status`
+- **Forensics:** `/sf forensics`
+- **Session logs:** `.sf/activity/`
diff --git a/mintlify-docs/guides/visualizer.mdx b/mintlify-docs/guides/visualizer.mdx
index 5ea199621..8602cc514 100644
--- a/mintlify-docs/guides/visualizer.mdx
+++ b/mintlify-docs/guides/visualizer.mdx
@@ -8,7 +8,7 @@ The workflow visualizer is a full-screen TUI overlay with four tabs showing proj
## Opening
```
-/gsd visualize
+/sf visualize
```
Or configure automatic display after milestone completion:
@@ -70,10 +70,10 @@ The visualizer refreshes from disk every 2 seconds, staying current alongside a
For shareable reports outside the terminal:
```
-/gsd export --html
+/sf export --html
```
-Generates a self-contained HTML file in `.gsd/reports/` with progress tree, dependency graph (SVG), cost/token charts, execution timeline, and changelog. All CSS and JS are inlined — printable to PDF from any browser.
+Generates a self-contained HTML file in `.sf/reports/` with progress tree, dependency graph (SVG), cost/token charts, execution timeline, and changelog. All CSS and JS are inlined — printable to PDF from any browser.
```yaml
auto_report: true # auto-generate after milestone completion (default)
diff --git a/mintlify-docs/guides/web-interface.mdx b/mintlify-docs/guides/web-interface.mdx
index b12b02930..bddbb452d 100644
--- a/mintlify-docs/guides/web-interface.mdx
+++ b/mintlify-docs/guides/web-interface.mdx
@@ -8,13 +8,13 @@ SF includes a browser-based web interface for project management, real-time prog
## Quick start
```bash
-gsd --web
+sf --web
```
### CLI flags
```bash
-gsd --web --host 0.0.0.0 --port 8080 --allowed-origins "https://example.com"
+sf --web --host 0.0.0.0 --port 8080 --allowed-origins "https://example.com"
```
| Flag | Default | Description |
diff --git a/mintlify-docs/guides/working-in-teams.mdx b/mintlify-docs/guides/working-in-teams.mdx
index 645c2bc0b..04ab1ff20 100644
--- a/mintlify-docs/guides/working-in-teams.mdx
+++ b/mintlify-docs/guides/working-in-teams.mdx
@@ -10,7 +10,7 @@ SF supports multi-user workflows where several developers work on the same repos
### 1. Set team mode
```yaml
-# .gsd/PREFERENCES.md (project-level, committed to git)
+# .sf/PREFERENCES.md (project-level, committed to git)
---
version: 1
mode: team
@@ -25,20 +25,20 @@ Share planning artifacts while keeping runtime files local:
```bash
# Runtime / ephemeral (per-developer)
-.gsd/auto.lock
-.gsd/completed-units*.json
-.gsd/state-manifest.json
-.gsd/STATE.md
-.gsd/metrics.json
-.gsd/activity/
-.gsd/runtime/
-.gsd/worktrees/
-.gsd/gsd.db*
-.gsd/journal/
-.gsd/doctor-history.jsonl
-.gsd/event-log.jsonl
-.gsd/milestones/**/continue.md
-.gsd/milestones/**/*-CONTINUE.md
+.sf/auto.lock
+.sf/completed-units*.json
+.sf/state-manifest.json
+.sf/STATE.md
+.sf/metrics.json
+.sf/activity/
+.sf/runtime/
+.sf/worktrees/
+.sf/sf.db*
+.sf/journal/
+.sf/doctor-history.jsonl
+.sf/event-log.jsonl
+.sf/milestones/**/continue.md
+.sf/milestones/**/*-CONTINUE.md
```
**Shared** (committed): preferences, PROJECT.md, REQUIREMENTS.md, DECISIONS.md, milestones.
@@ -48,7 +48,7 @@ Share planning artifacts while keeping runtime files local:
### 3. Commit
```bash
-git add .gsd/PREFERENCES.md
+git add .sf/PREFERENCES.md
git commit -m "chore: enable SF team workflow"
```
@@ -61,7 +61,7 @@ git:
commit_docs: false
```
-Adds `.gsd/` to `.gitignore` entirely. The developer gets structured planning without affecting teammates.
+Adds `.sf/` to `.gitignore` entirely. The developer gets structured planning without affecting teammates.
## Parallel development
diff --git a/mintlify-docs/images/favicon.svg b/mintlify-docs/images/favicon.svg
index d396bf107..b9b38284e 100644
--- a/mintlify-docs/images/favicon.svg
+++ b/mintlify-docs/images/favicon.svg
@@ -54,11 +54,11 @@
development system for Claude Code by TÂCHES.
- ✓ Installed commands/gsd
+ ✓ Installed commands/sf
✓ Installed get-shit-done
- Done! Run /gsd:help to get started.
+ Done! Run /sf:help to get started.
~
diff --git a/mintlify-docs/introduction.mdx b/mintlify-docs/introduction.mdx
index f0f48c6da..a9a4a601f 100644
--- a/mintlify-docs/introduction.mdx
+++ b/mintlify-docs/introduction.mdx
@@ -3,7 +3,7 @@ title: "SF — Singularity Forge"
description: "An autonomous coding agent that researches, plans, executes, and commits code while you focus on what matters."
---
-SF is an autonomous coding agent. Describe what you want built, run `/gsd auto`, and walk away. Come back to working software with clean git history.
+SF is an autonomous coding agent. Describe what you want built, run `/sf auto`, and walk away. Come back to working software with clean git history.
## What SF does
@@ -48,19 +48,19 @@ Every phase gets a fresh context window with pre-loaded context — no accumulat
- Type `/gsd` inside a session. SF executes one unit at a time, pausing between each so you can review.
+ Type `/sf` inside a session. SF executes one unit at a time, pausing between each so you can review.
```bash
- gsd
- /gsd
+ sf
+ /sf
```
- Type `/gsd auto` and walk away. SF autonomously researches, plans, executes, verifies, and commits until the milestone is complete.
+ Type `/sf auto` and walk away. SF autonomously researches, plans, executes, verifies, and commits until the milestone is complete.
```bash
- gsd
- /gsd auto
+ sf
+ /sf auto
```
@@ -70,17 +70,17 @@ The recommended workflow: auto mode in one terminal, steering from another.
**Terminal 1 — let it build:**
```bash
-gsd
-/gsd auto
+sf
+/sf auto
```
**Terminal 2 — steer while it works:**
```bash
-gsd
-/gsd discuss # talk through architecture decisions
-/gsd status # check progress
-/gsd capture # fire-and-forget thoughts
+sf
+/sf discuss # talk through architecture decisions
+/sf status # check progress
+/sf capture # fire-and-forget thoughts
```
## Next steps
diff --git a/native/Cargo.toml b/native/Cargo.toml
index b821847a4..ed6dd95b5 100644
--- a/native/Cargo.toml
+++ b/native/Cargo.toml
@@ -7,7 +7,7 @@ version = "0.1.0"
edition = "2021"
license = "MIT"
authors = ["SF Contributors"]
-repository = "https://github.com/gsd-build/gsd-2"
+repository = "https://github.com/sf-build/sf-2"
[profile.release]
opt-level = 3
diff --git a/native/README.md b/native/README.md
index 54b8ef8bd..81a727b23 100644
--- a/native/README.md
+++ b/native/README.md
@@ -84,7 +84,7 @@ Ripgrep-backed regex search using the `grep-regex`, `grep-searcher`, and `grep-m
**TypeScript usage:**
```typescript
-import { grep, searchContent } from "@gsd/native";
+import { grep, searchContent } from "@sf/native";
// Search files
const result = grep({
@@ -103,9 +103,9 @@ const contentResult = searchContent(Buffer.from(fileContent), {
});
```
-### gsd_parser
+### sf_parser
-SF file parsing and frontmatter extraction. Reads `.gsd` files and extracts structured metadata from YAML frontmatter blocks.
+SF file parsing and frontmatter extraction. Reads `.sf` files and extracts structured metadata from YAML frontmatter blocks.
### highlight
diff --git a/native/crates/engine/src/forge_parser.rs b/native/crates/engine/src/forge_parser.rs
index 342affeb0..4ba0c7ace 100644
--- a/native/crates/engine/src/forge_parser.rs
+++ b/native/crates/engine/src/forge_parser.rs
@@ -1,4 +1,4 @@
-//! SF `.gsd/` directory file parser.
+//! SF `.sf/` directory file parser.
//!
//! Parses markdown files containing YAML-like frontmatter, section headings,
//! and structured content used by SF's planning system (roadmaps, plans,
@@ -7,7 +7,7 @@
//! Key operations:
//! - `parseFrontmatter`: split frontmatter from body, parse YAML-like key-value pairs
//! - `extractSection`: extract content under a specific heading
-//! - `batchParseGsdFiles`: walk a `.gsd/` tree and parse all `.md` files in parallel
+//! - `batchParseSfFiles`: walk a `.sf/` tree and parse all `.md` files in parallel
//! - `parseRoadmapFile`: parse structured roadmap data from content
use std::path::Path;
@@ -38,7 +38,7 @@ pub struct SectionResult {
/// A single parsed SF file from batch parsing.
#[napi(object)]
-pub struct ParsedGsdFile {
+pub struct ParsedSfFile {
/// Relative path from the base directory.
pub path: String,
/// Parsed frontmatter as JSON string.
@@ -56,7 +56,7 @@ pub struct ParsedGsdFile {
#[napi(object)]
pub struct BatchParseResult {
/// All parsed files.
- pub files: Vec,
+ pub files: Vec,
/// Number of files processed.
pub count: u32,
}
@@ -724,13 +724,13 @@ pub fn extract_all_sections(content: String, level: Option) -> String {
sections_to_json(§ions)
}
-/// Batch-parse all `.md` files in a `.gsd/` directory tree.
+/// Batch-parse all `.md` files in a `.sf/` directory tree.
///
/// Reads all markdown files under the given directory, parses frontmatter
/// and extracts all level-2 sections for each file. Returns all results
/// in a single call, avoiding repeated JS<->native boundary crossings.
-#[napi(js_name = "batchParseGsdFiles")]
-pub fn batch_parse_gsd_files(directory: String) -> Result {
+#[napi(js_name = "batchParseSfFiles")]
+pub fn batch_parse_sf_files(directory: String) -> Result {
let dir_path = Path::new(&directory);
if !dir_path.exists() {
return Ok(BatchParseResult {
@@ -767,7 +767,7 @@ pub fn batch_parse_gsd_files(directory: String) -> Result {
let sections = extract_all_sections_internal(body, 2);
let sections_json = sections_to_json(§ions);
- parsed_files.push(ParsedGsdFile {
+ parsed_files.push(ParsedSfFile {
path: path.clone(),
metadata,
body: body.to_string(),
@@ -838,15 +838,15 @@ pub fn parse_roadmap_file(content: String) -> NativeRoadmap {
// ─── SF Tree Scanner ───────────────────────────────────────────────────────
#[napi(object)]
-pub struct GsdTreeEntry {
+pub struct SfTreeEntry {
pub path: String,
pub name: String,
#[napi(js_name = "isDir")]
pub is_dir: bool,
}
-#[napi(js_name = "scanGsdTree")]
-pub fn scan_gsd_tree(directory: String) -> Result> {
+#[napi(js_name = "scanSfTree")]
+pub fn scan_sf_tree(directory: String) -> Result> {
let base = Path::new(&directory);
if !base.exists() {
return Ok(Vec::new());
@@ -856,7 +856,7 @@ pub fn scan_gsd_tree(directory: String) -> Result> {
Ok(entries)
}
-fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec) -> Result<()> {
+fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec) -> Result<()> {
let read_dir = match std::fs::read_dir(dir) {
Ok(rd) => rd,
Err(e) => {
@@ -886,7 +886,7 @@ fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec
let name = entry.file_name().to_string_lossy().to_string();
let is_dir = file_type.is_dir();
- entries.push(GsdTreeEntry {
+ entries.push(SfTreeEntry {
path: relative,
name,
is_dir,
diff --git a/native/crates/engine/src/git.rs b/native/crates/engine/src/git.rs
index c32461340..c9fd455ec 100644
--- a/native/crates/engine/src/git.rs
+++ b/native/crates/engine/src/git.rs
@@ -417,7 +417,7 @@ pub fn git_diff_stat(
/// Get name-status diff between two refs with optional pathspec filter.
/// `use_merge_base`: if true, uses three-dot semantics (diff from merge base).
-/// Replaces: `git diff --name-status main...branch -- .gsd/`
+/// Replaces: `git diff --name-status main...branch -- .sf/`
#[napi]
pub fn git_diff_name_status(
repo_path: String,
@@ -523,8 +523,8 @@ pub fn git_diff_numstat(
/// Get unified diff content between two refs with optional pathspec/exclude.
/// `use_merge_base`: if true, uses three-dot semantics.
-/// `exclude`: optional pathspec to exclude (e.g., ".gsd/").
-/// Replaces: `git diff main...branch -- .gsd/` and `-- . :(exclude).gsd/`
+/// `exclude`: optional pathspec to exclude (e.g., ".sf/").
+/// Replaces: `git diff main...branch -- .sf/` and `-- . :(exclude).sf/`
#[napi]
pub fn git_diff_content(
repo_path: String,
@@ -685,7 +685,7 @@ pub fn git_worktree_list(repo_path: String) -> Result> {
}
/// List branches matching an optional glob pattern.
-/// Replaces: `git branch --list milestone/*`, `git branch --list gsd/*`
+/// Replaces: `git branch --list milestone/*`, `git branch --list sf/*`
#[napi]
pub fn git_branch_list(repo_path: String, pattern: Option) -> Result> {
let repo = open_repo(&repo_path)?;
@@ -711,13 +711,13 @@ pub fn git_branch_list(repo_path: String, pattern: Option) -> Result bool {
// Handle simple prefix/* patterns
if let Some(prefix) = pattern.strip_suffix("/*") {
- // For "gsd/*/*", this becomes "gsd/*" after first strip
+ // For "sf/*/*", this becomes "sf/*" after first strip
if prefix.contains('*') {
- // Recursive: "gsd/*/*" → name must start with "gsd/" and have at least 2 segments after
+ // Recursive: "sf/*/*" → name must start with "sf/" and have at least 2 segments after
if let Some(inner_prefix) = prefix.strip_suffix("/*") {
return name.starts_with(&format!("{inner_prefix}/"))
&& name[inner_prefix.len() + 1..].contains('/');
@@ -730,7 +730,7 @@ fn matches_branch_pattern(name: &str, pattern: &str) -> bool {
}
/// List branches that have been merged into the given target branch.
-/// Replaces: `git branch --merged main --list gsd/*`
+/// Replaces: `git branch --merged main --list sf/*`
#[napi]
pub fn git_branch_list_merged(
repo_path: String,
@@ -792,7 +792,7 @@ pub fn git_ls_files(repo_path: String, pathspec: String) -> Result>
}
/// List references matching a prefix.
-/// Replaces: `git for-each-ref refs/gsd/snapshots/ --format=%(refname)`
+/// Replaces: `git for-each-ref refs/sf/snapshots/ --format=%(refname)`
#[napi]
pub fn git_for_each_ref(repo_path: String, prefix: String) -> Result> {
let repo = open_repo(&repo_path)?;
diff --git a/native/scripts/build.js b/native/scripts/build.js
index ea399695b..b436871ac 100644
--- a/native/scripts/build.js
+++ b/native/scripts/build.js
@@ -27,26 +27,46 @@ const profile = isDev ? "debug" : "release";
const cargoArgs = ["build"];
if (!isDev) cargoArgs.push("--release");
+function getCargoEnvironment() {
+ return {
+ ...process.env,
+ // Optimize for native CPU when building locally
+ RUSTFLAGS: process.env.RUSTFLAGS || "-C target-cpu=native",
+ };
+}
+
+function getCargoTargetDirectory() {
+ if (process.env.CARGO_TARGET_DIR) {
+ return path.resolve(process.env.CARGO_TARGET_DIR);
+ }
+
+ const metadataRaw = execSync("cargo metadata --format-version 1 --no-deps", {
+ cwd: engineDir,
+ stdio: ["ignore", "pipe", "inherit"],
+ env: getCargoEnvironment(),
+ }).toString();
+ const metadata = JSON.parse(metadataRaw);
+ if (typeof metadata.target_directory !== "string" || metadata.target_directory.length === 0) {
+ throw new Error("cargo metadata did not return a target_directory");
+ }
+ return path.resolve(metadata.target_directory);
+}
+
console.log(`Building forge-engine (${profile})...`);
try {
execSync(`cargo ${cargoArgs.join(" ")}`, {
cwd: engineDir,
stdio: "inherit",
- env: {
- ...process.env,
- // Optimize for native CPU when building locally
- RUSTFLAGS: process.env.RUSTFLAGS || "-C target-cpu=native",
- },
+ env: getCargoEnvironment(),
});
} catch {
process.exit(1);
}
-// Locate the built library
-const cargoTargetRoot = process.env.CARGO_TARGET_DIR
- ? path.resolve(process.env.CARGO_TARGET_DIR)
- : path.join(nativeRoot, "target");
+// Locate the built library using Cargo's actual target directory. Under Nix this
+// is often redirected to a shared cache path rather than native/target.
+const cargoTargetRoot = getCargoTargetDirectory();
const targetDir = path.join(cargoTargetRoot, profile);
const platformTag = `${process.platform}-${process.arch}`;
diff --git a/package-lock.json b/package-lock.json
index 694cb0e3d..3468214fc 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -46,6 +46,7 @@
"proper-lockfile": "^4.1.2",
"proxy-agent": "^6.5.0",
"sharp": "^0.34.5",
+ "shell-quote": "^1.8.3",
"sql.js": "^1.14.1",
"strip-ansi": "^7.1.0",
"undici": "^7.24.2",
@@ -53,12 +54,13 @@
"zod-to-json-schema": "^3.24.6"
},
"bin": {
- "gsd": "dist/loader.js",
- "gsd-cli": "dist/loader.js"
+ "sf": "dist/loader.js",
+ "sf-cli": "dist/loader.js"
},
"devDependencies": {
"@types/node": "^24.12.0",
"@types/picomatch": "^4.0.2",
+ "@types/shell-quote": "^1.7.5",
"c8": "^11.0.0",
"esbuild": "^0.25.12",
"jiti": "^2.6.1",
@@ -69,11 +71,11 @@
},
"optionalDependencies": {
"@anthropic-ai/claude-agent-sdk": "^0.2.83",
- "@gsd-build/engine-darwin-arm64": ">=2.10.2",
- "@gsd-build/engine-darwin-x64": ">=2.10.2",
- "@gsd-build/engine-linux-arm64-gnu": ">=2.10.2",
- "@gsd-build/engine-linux-x64-gnu": ">=2.10.2",
- "@gsd-build/engine-win32-x64-msvc": ">=2.10.2",
+ "@singularity-forge/engine-darwin-arm64": ">=2.10.2",
+ "@singularity-forge/engine-darwin-x64": ">=2.10.2",
+ "@singularity-forge/engine-linux-arm64-gnu": ">=2.10.2",
+ "@singularity-forge/engine-linux-x64-gnu": ">=2.10.2",
+ "@singularity-forge/engine-win32-x64-msvc": ">=2.10.2",
"fsevents": "~2.3.3",
"koffi": "^2.9.0"
}
@@ -898,6 +900,7 @@
"integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@babel/code-frame": "^7.29.0",
"@babel/generator": "^7.29.0",
@@ -1899,107 +1902,6 @@
}
}
},
- "node_modules/@gsd-build/daemon": {
- "resolved": "packages/daemon",
- "link": true
- },
- "node_modules/@gsd-build/engine-darwin-arm64": {
- "version": "2.10.5",
- "resolved": "https://registry.npmjs.org/@gsd-build/engine-darwin-arm64/-/engine-darwin-arm64-2.10.5.tgz",
- "integrity": "sha512-3SUjfDDiCjU64rkoF7pEOonB//dE691ySb18vTQk5PBHKqG+kr859a0ncqVpE8WE4OfuOsSdbTlYd189DP9KSA==",
- "cpu": [
- "arm64"
- ],
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ]
- },
- "node_modules/@gsd-build/engine-darwin-x64": {
- "version": "2.10.5",
- "resolved": "https://registry.npmjs.org/@gsd-build/engine-darwin-x64/-/engine-darwin-x64-2.10.5.tgz",
- "integrity": "sha512-IqW64Ho094N1bVaLmFzaRh6f0nO1XywC8etDnoWmFCTBjrNucJFYug3+ERdAa/4ctyRqzyvRmxnc3WupGKUoNw==",
- "cpu": [
- "x64"
- ],
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ]
- },
- "node_modules/@gsd-build/engine-linux-arm64-gnu": {
- "version": "2.10.5",
- "resolved": "https://registry.npmjs.org/@gsd-build/engine-linux-arm64-gnu/-/engine-linux-arm64-gnu-2.10.5.tgz",
- "integrity": "sha512-4afNWcJ4L7iaPLjBKP/CjMye1+mib5JT9+qKxmOkAycMInGQfJNg8ukZr4V35Eo9rLz4TYFVN9SHjlz/84sAWA==",
- "cpu": [
- "arm64"
- ],
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@gsd-build/engine-linux-x64-gnu": {
- "version": "2.10.5",
- "resolved": "https://registry.npmjs.org/@gsd-build/engine-linux-x64-gnu/-/engine-linux-x64-gnu-2.10.5.tgz",
- "integrity": "sha512-TWWUPTxSEX08AglZUDkxTSa30hV2qWPMmyS1DfmMWNc/toCxHkucWtu5MNAv943ZpRjsuDI+M3kT9JymHiHHJg==",
- "cpu": [
- "x64"
- ],
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@gsd-build/engine-win32-x64-msvc": {
- "version": "2.10.5",
- "resolved": "https://registry.npmjs.org/@gsd-build/engine-win32-x64-msvc/-/engine-win32-x64-msvc-2.10.5.tgz",
- "integrity": "sha512-/S3oKKt6bu/PAXVTDZKgKN021klepfgckDGA4OaAyKdBbv8fwiM/L+HsmV+DMYvvUmyPlsV8dkL8cNFO87jJ+A==",
- "cpu": [
- "x64"
- ],
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ]
- },
- "node_modules/@gsd-build/mcp-server": {
- "resolved": "packages/mcp-server",
- "link": true
- },
- "node_modules/@gsd-build/rpc-client": {
- "resolved": "packages/rpc-client",
- "link": true
- },
- "node_modules/@gsd/native": {
- "resolved": "packages/native",
- "link": true
- },
- "node_modules/@gsd/pi-agent-core": {
- "resolved": "packages/pi-agent-core",
- "link": true
- },
- "node_modules/@gsd/pi-ai": {
- "resolved": "packages/pi-ai",
- "link": true
- },
- "node_modules/@gsd/pi-coding-agent": {
- "resolved": "packages/pi-coding-agent",
- "link": true
- },
- "node_modules/@gsd/pi-tui": {
- "resolved": "packages/pi-tui",
- "link": true
- },
- "node_modules/@gsd/studio": {
- "resolved": "studio",
- "link": true
- },
"node_modules/@hono/node-server": {
"version": "1.19.13",
"resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.13.tgz",
@@ -2614,6 +2516,7 @@
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.6.tgz",
"integrity": "sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"@octokit/auth-token": "^6.0.0",
"@octokit/graphql": "^9.0.3",
@@ -2851,8 +2754,7 @@
"optional": true,
"os": [
"android"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.59.0",
@@ -2866,8 +2768,7 @@
"optional": true,
"os": [
"android"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.59.0",
@@ -2881,8 +2782,7 @@
"optional": true,
"os": [
"darwin"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.59.0",
@@ -2896,8 +2796,7 @@
"optional": true,
"os": [
"darwin"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.59.0",
@@ -2911,8 +2810,7 @@
"optional": true,
"os": [
"freebsd"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.59.0",
@@ -2926,8 +2824,7 @@
"optional": true,
"os": [
"freebsd"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.59.0",
@@ -2941,8 +2838,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.59.0",
@@ -2956,8 +2852,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.59.0",
@@ -2971,8 +2866,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.59.0",
@@ -2986,8 +2880,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-loong64-gnu": {
"version": "4.59.0",
@@ -3001,8 +2894,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-loong64-musl": {
"version": "4.59.0",
@@ -3016,8 +2908,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
"version": "4.59.0",
@@ -3031,8 +2922,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-ppc64-musl": {
"version": "4.59.0",
@@ -3046,8 +2936,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.59.0",
@@ -3061,8 +2950,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
"version": "4.59.0",
@@ -3076,8 +2964,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.59.0",
@@ -3091,8 +2978,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.59.0",
@@ -3106,8 +2992,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.59.0",
@@ -3121,8 +3006,7 @@
"optional": true,
"os": [
"linux"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-openbsd-x64": {
"version": "4.59.0",
@@ -3136,8 +3020,7 @@
"optional": true,
"os": [
"openbsd"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-openharmony-arm64": {
"version": "4.59.0",
@@ -3151,8 +3034,7 @@
"optional": true,
"os": [
"openharmony"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.59.0",
@@ -3166,8 +3048,7 @@
"optional": true,
"os": [
"win32"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.59.0",
@@ -3181,8 +3062,7 @@
"optional": true,
"os": [
"win32"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-win32-x64-gnu": {
"version": "4.59.0",
@@ -3196,8 +3076,7 @@
"optional": true,
"os": [
"win32"
- ],
- "peer": true
+ ]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.59.0",
@@ -3211,8 +3090,7 @@
"optional": true,
"os": [
"win32"
- ],
- "peer": true
+ ]
},
"node_modules/@sapphire/async-queue": {
"version": "1.5.5",
@@ -3247,6 +3125,30 @@
"npm": ">=7.0.0"
}
},
+ "node_modules/@sf-run/native": {
+ "resolved": "packages/native",
+ "link": true
+ },
+ "node_modules/@sf-run/pi-agent-core": {
+ "resolved": "packages/pi-agent-core",
+ "link": true
+ },
+ "node_modules/@sf-run/pi-ai": {
+ "resolved": "packages/pi-ai",
+ "link": true
+ },
+ "node_modules/@sf-run/pi-coding-agent": {
+ "resolved": "packages/pi-coding-agent",
+ "link": true
+ },
+ "node_modules/@sf-run/pi-tui": {
+ "resolved": "packages/pi-tui",
+ "link": true
+ },
+ "node_modules/@sf-run/studio": {
+ "resolved": "studio",
+ "link": true
+ },
"node_modules/@silvia-odwyer/photon-node": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@silvia-odwyer/photon-node/-/photon-node-0.3.4.tgz",
@@ -3272,6 +3174,33 @@
"url": "https://github.com/sindresorhus/is?sponsor=1"
}
},
+ "node_modules/@singularity-forge/daemon": {
+ "resolved": "packages/daemon",
+ "link": true
+ },
+ "node_modules/@singularity-forge/engine-darwin-arm64": {
+ "optional": true
+ },
+ "node_modules/@singularity-forge/engine-darwin-x64": {
+ "optional": true
+ },
+ "node_modules/@singularity-forge/engine-linux-arm64-gnu": {
+ "optional": true
+ },
+ "node_modules/@singularity-forge/engine-linux-x64-gnu": {
+ "optional": true
+ },
+ "node_modules/@singularity-forge/engine-win32-x64-msvc": {
+ "optional": true
+ },
+ "node_modules/@singularity-forge/mcp-server": {
+ "resolved": "packages/mcp-server",
+ "link": true
+ },
+ "node_modules/@singularity-forge/rpc-client": {
+ "resolved": "packages/rpc-client",
+ "link": true
+ },
"node_modules/@smithy/abort-controller": {
"version": "4.2.12",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.12.tgz",
@@ -4282,6 +4211,17 @@
"@babel/types": "^7.28.2"
}
},
+ "node_modules/@types/body-parser": {
+ "version": "1.19.6",
+ "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz",
+ "integrity": "sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/connect": "*",
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/cacheable-request": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz",
@@ -4295,6 +4235,16 @@
"@types/responselike": "^1.0.0"
}
},
+ "node_modules/@types/connect": {
+ "version": "3.4.38",
+ "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz",
+ "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
"node_modules/@types/diff": {
"version": "7.0.2",
"resolved": "https://registry.npmjs.org/@types/diff/-/diff-7.0.2.tgz",
@@ -4314,8 +4264,33 @@
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
"integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
"dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/express": {
+ "version": "4.17.25",
+ "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.25.tgz",
+ "integrity": "sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw==",
+ "dev": true,
"license": "MIT",
- "peer": true
+ "dependencies": {
+ "@types/body-parser": "*",
+ "@types/express-serve-static-core": "^4.17.33",
+ "@types/qs": "*",
+ "@types/serve-static": "^1"
+ }
+ },
+ "node_modules/@types/express-serve-static-core": {
+ "version": "4.19.8",
+ "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.19.8.tgz",
+ "integrity": "sha512-02S5fmqeoKzVZCHPZid4b8JH2eM5HzQLZWN2FohQEy/0eXTq8VXZfSN6Pcr3F6N9R/vNrj7cpgbhjie6m/1tCA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*",
+ "@types/qs": "*",
+ "@types/range-parser": "*",
+ "@types/send": "*"
+ }
},
"node_modules/@types/hosted-git-info": {
"version": "3.0.5",
@@ -4331,6 +4306,13 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@types/http-errors": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.5.tgz",
+ "integrity": "sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/istanbul-lib-coverage": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz",
@@ -4348,6 +4330,13 @@
"@types/node": "*"
}
},
+ "node_modules/@types/mime": {
+ "version": "1.3.5",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz",
+ "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/mime-types": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz",
@@ -4380,12 +4369,27 @@
"@types/retry": "*"
}
},
+ "node_modules/@types/qs": {
+ "version": "6.15.0",
+ "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.15.0.tgz",
+ "integrity": "sha512-JawvT8iBVWpzTrz3EGw9BTQFg3BQNmwERdKE22vlTxawwtbyUSlMppvZYKLZzB5zgACXdXxbD3m1bXaMqP/9ow==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@types/range-parser": {
+ "version": "1.2.7",
+ "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz",
+ "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/react": {
"version": "19.2.14",
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz",
"integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==",
"devOptional": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"csstype": "^3.2.2"
}
@@ -4416,6 +4420,46 @@
"integrity": "sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA==",
"license": "MIT"
},
+ "node_modules/@types/send": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/@types/send/-/send-1.2.1.tgz",
+ "integrity": "sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/node": "*"
+ }
+ },
+ "node_modules/@types/serve-static": {
+ "version": "1.15.10",
+ "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.10.tgz",
+ "integrity": "sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/http-errors": "*",
+ "@types/node": "*",
+ "@types/send": "<1"
+ }
+ },
+ "node_modules/@types/serve-static/node_modules/@types/send": {
+ "version": "0.17.6",
+ "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.6.tgz",
+ "integrity": "sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@types/mime": "^1",
+ "@types/node": "*"
+ }
+ },
+ "node_modules/@types/shell-quote": {
+ "version": "1.7.5",
+ "resolved": "https://registry.npmjs.org/@types/shell-quote/-/shell-quote-1.7.5.tgz",
+ "integrity": "sha512-+UE8GAGRPbJVQDdxi16dgadcBfQ+KG2vgZhV1+3A1XmHbmwcdwhCUwIdy+d3pAGrbvgRoVSjeI9vOWyq376Yzw==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@types/sql.js": {
"version": "1.4.9",
"resolved": "https://registry.npmjs.org/@types/sql.js/-/sql.js-1.4.9.tgz",
@@ -4560,6 +4604,12 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
+ "node_modules/array-flatten": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
+ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
+ "license": "MIT"
+ },
"node_modules/ast-types": {
"version": "0.13.4",
"resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz",
@@ -4709,6 +4759,7 @@
}
],
"license": "MIT",
+ "peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
@@ -5179,6 +5230,16 @@
"node": ">= 0.8"
}
},
+ "node_modules/destroy": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
+ "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8",
+ "npm": "1.2.8000 || >= 1.4.16"
+ }
+ },
"node_modules/detect-libc": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
@@ -5581,6 +5642,7 @@
"resolved": "https://registry.npmjs.org/express/-/express-5.2.1.tgz",
"integrity": "sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"accepts": "^2.0.0",
"body-parser": "^2.2.1",
@@ -5751,7 +5813,6 @@
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
"dev": true,
"license": "MIT",
- "peer": true,
"engines": {
"node": ">=12.0.0"
},
@@ -6266,6 +6327,7 @@
"resolved": "https://registry.npmjs.org/hono/-/hono-4.12.12.tgz",
"integrity": "sha512-p1JfQMKaceuCbpJKAPKVqyqviZdS0eUxH9v82oWo1kb9xjQ5wA6iP3FNVAPDFlz5/p7d45lO+BpSk1tuSZMF4Q==",
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=16.9.0"
}
@@ -7047,6 +7109,27 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/methods": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
+ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/mime": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
+ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
+ "license": "MIT",
+ "bin": {
+ "mime": "cli.js"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
"node_modules/mime-db": {
"version": "1.54.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
@@ -7124,7 +7207,6 @@
}
],
"license": "MIT",
- "peer": true,
"bin": {
"nanoid": "bin/nanoid.cjs"
},
@@ -7456,6 +7538,7 @@
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=12"
},
@@ -7536,7 +7619,6 @@
}
],
"license": "MIT",
- "peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -7714,6 +7796,7 @@
"resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz",
"integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==",
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -7723,6 +7806,7 @@
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.4.tgz",
"integrity": "sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"scheduler": "^0.27.0"
},
@@ -7836,7 +7920,6 @@
"integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
"dev": true,
"license": "MIT",
- "peer": true,
"dependencies": {
"@types/estree": "1.0.8"
},
@@ -8077,6 +8160,18 @@
"node": ">=8"
}
},
+ "node_modules/shell-quote": {
+ "version": "1.8.3",
+ "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.3.tgz",
+ "integrity": "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/side-channel": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
@@ -8397,7 +8492,6 @@
"integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
"dev": true,
"license": "MIT",
- "peer": true,
"dependencies": {
"fdir": "^6.5.0",
"picomatch": "^4.0.3"
@@ -8579,6 +8673,15 @@
"browserslist": ">= 4.21.0"
}
},
+ "node_modules/utils-merge": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
+ "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4.0"
+ }
+ },
"node_modules/v8-to-istanbul": {
"version": "9.3.0",
"resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz",
@@ -8692,7 +8795,6 @@
"os": [
"aix"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8710,7 +8812,6 @@
"os": [
"android"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8728,7 +8829,6 @@
"os": [
"android"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8746,7 +8846,6 @@
"os": [
"android"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8764,7 +8863,6 @@
"os": [
"darwin"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8782,7 +8880,6 @@
"os": [
"darwin"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8800,7 +8897,6 @@
"os": [
"freebsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8818,7 +8914,6 @@
"os": [
"freebsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8836,7 +8931,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8854,7 +8948,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8872,7 +8965,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8890,7 +8982,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8908,7 +8999,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8926,7 +9016,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8944,7 +9033,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8962,7 +9050,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8980,7 +9067,6 @@
"os": [
"linux"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -8998,7 +9084,6 @@
"os": [
"netbsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9016,7 +9101,6 @@
"os": [
"netbsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9034,7 +9118,6 @@
"os": [
"openbsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9052,7 +9135,6 @@
"os": [
"openbsd"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9070,7 +9152,6 @@
"os": [
"openharmony"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9088,7 +9169,6 @@
"os": [
"sunos"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9106,7 +9186,6 @@
"os": [
"win32"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9124,7 +9203,6 @@
"os": [
"win32"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9142,7 +9220,6 @@
"os": [
"win32"
],
- "peer": true,
"engines": {
"node": ">=18"
}
@@ -9154,7 +9231,6 @@
"dev": true,
"hasInstallScript": true,
"license": "MIT",
- "peer": true,
"bin": {
"esbuild": "bin/esbuild"
},
@@ -9383,6 +9459,7 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz",
"integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==",
"license": "MIT",
+ "peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
@@ -9426,18 +9503,18 @@
}
},
"packages/daemon": {
- "name": "@gsd-build/daemon",
+ "name": "@singularity-forge/daemon",
"version": "2.74.0",
"license": "MIT",
"dependencies": {
"@anthropic-ai/sdk": "^0.52.0",
- "@gsd-build/rpc-client": "^2.74.0",
+ "@singularity-forge/rpc-client": "^2.74.0",
"discord.js": "^14.25.1",
"yaml": "^2.8.0",
"zod": "^3.24.0"
},
"bin": {
- "gsd-daemon": "dist/cli.js"
+ "sf-daemon": "dist/cli.js"
},
"devDependencies": {
"@types/node": "^24.12.0",
@@ -9466,16 +9543,16 @@
}
},
"packages/mcp-server": {
- "name": "@gsd-build/mcp-server",
+ "name": "@singularity-forge/mcp-server",
"version": "2.74.0",
"license": "MIT",
"dependencies": {
- "@gsd-build/rpc-client": "^2.74.0",
"@modelcontextprotocol/sdk": "^1.27.1",
+ "@singularity-forge/rpc-client": "^2.74.0",
"zod": "^4.0.0"
},
"bin": {
- "gsd-mcp-server": "dist/cli.js"
+ "sf-mcp-server": "dist/cli.js"
},
"devDependencies": {
"@types/node": "^24.12.0",
@@ -9486,16 +9563,16 @@
}
},
"packages/native": {
- "name": "@gsd/native",
+ "name": "@sf-run/native",
"version": "2.74.0",
"license": "MIT"
},
"packages/pi-agent-core": {
- "name": "@gsd/pi-agent-core",
+ "name": "@sf-run/pi-agent-core",
"version": "2.74.0"
},
"packages/pi-ai": {
- "name": "@gsd/pi-ai",
+ "name": "@sf-run/pi-ai",
"version": "2.74.0",
"dependencies": {
"@anthropic-ai/sdk": "^0.73.0",
@@ -9534,13 +9611,14 @@
}
},
"packages/pi-coding-agent": {
- "name": "@gsd/pi-coding-agent",
+ "name": "@sf-run/pi-coding-agent",
"version": "2.74.0",
"dependencies": {
"@mariozechner/jiti": "^2.6.2",
"@silvia-odwyer/photon-node": "^0.3.4",
"chalk": "^5.5.0",
"diff": "^8.0.2",
+ "express": "^4.19.2",
"extract-zip": "^2.0.1",
"file-type": "^21.1.1",
"glob": "^13.0.1",
@@ -9556,13 +9634,305 @@
},
"devDependencies": {
"@types/diff": "^7.0.2",
+ "@types/express": "^4.17.21",
"@types/hosted-git-info": "^3.0.5",
"@types/proper-lockfile": "^4.1.4",
"@types/sql.js": "^1.4.9"
}
},
+ "packages/pi-coding-agent/node_modules/accepts": {
+ "version": "1.3.8",
+ "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
+ "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-types": "~2.1.34",
+ "negotiator": "0.6.3"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/body-parser": {
+ "version": "1.20.4",
+ "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
+ "integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
+ "license": "MIT",
+ "dependencies": {
+ "bytes": "~3.1.2",
+ "content-type": "~1.0.5",
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "destroy": "~1.2.0",
+ "http-errors": "~2.0.1",
+ "iconv-lite": "~0.4.24",
+ "on-finished": "~2.4.1",
+ "qs": "~6.14.0",
+ "raw-body": "~2.5.3",
+ "type-is": "~1.6.18",
+ "unpipe": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8",
+ "npm": "1.2.8000 || >= 1.4.16"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/content-disposition": {
+ "version": "0.5.4",
+ "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
+ "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==",
+ "license": "MIT",
+ "dependencies": {
+ "safe-buffer": "5.2.1"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/cookie-signature": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.7.tgz",
+ "integrity": "sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==",
+ "license": "MIT"
+ },
+ "packages/pi-coding-agent/node_modules/debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "license": "MIT",
+ "dependencies": {
+ "ms": "2.0.0"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/debug/node_modules/ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
+ "license": "MIT"
+ },
+ "packages/pi-coding-agent/node_modules/express": {
+ "version": "4.22.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
+ "integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
+ "license": "MIT",
+ "dependencies": {
+ "accepts": "~1.3.8",
+ "array-flatten": "1.1.1",
+ "body-parser": "~1.20.3",
+ "content-disposition": "~0.5.4",
+ "content-type": "~1.0.4",
+ "cookie": "~0.7.1",
+ "cookie-signature": "~1.0.6",
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "encodeurl": "~2.0.0",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "finalhandler": "~1.3.1",
+ "fresh": "~0.5.2",
+ "http-errors": "~2.0.0",
+ "merge-descriptors": "1.0.3",
+ "methods": "~1.1.2",
+ "on-finished": "~2.4.1",
+ "parseurl": "~1.3.3",
+ "path-to-regexp": "~0.1.12",
+ "proxy-addr": "~2.0.7",
+ "qs": "~6.14.0",
+ "range-parser": "~1.2.1",
+ "safe-buffer": "5.2.1",
+ "send": "~0.19.0",
+ "serve-static": "~1.16.2",
+ "setprototypeof": "1.2.0",
+ "statuses": "~2.0.1",
+ "type-is": "~1.6.18",
+ "utils-merge": "1.0.1",
+ "vary": "~1.1.2"
+ },
+ "engines": {
+ "node": ">= 0.10.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/finalhandler": {
+ "version": "1.3.2",
+ "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz",
+ "integrity": "sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "2.6.9",
+ "encodeurl": "~2.0.0",
+ "escape-html": "~1.0.3",
+ "on-finished": "~2.4.1",
+ "parseurl": "~1.3.3",
+ "statuses": "~2.0.2",
+ "unpipe": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/fresh": {
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
+ "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/iconv-lite": {
+ "version": "0.4.24",
+ "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
+ "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
+ "license": "MIT",
+ "dependencies": {
+ "safer-buffer": ">= 2.1.2 < 3"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/media-typer": {
+ "version": "0.3.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
+ "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/merge-descriptors": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
+ "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
+ "license": "MIT",
+ "funding": {
+ "url": "https://github.com/sponsors/sindresorhus"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "license": "MIT",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/negotiator": {
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
+ "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/path-to-regexp": {
+ "version": "0.1.13",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.13.tgz",
+ "integrity": "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA==",
+ "license": "MIT"
+ },
+ "packages/pi-coding-agent/node_modules/qs": {
+ "version": "6.14.2",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz",
+ "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "side-channel": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/raw-body": {
+ "version": "2.5.3",
+ "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
+ "integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
+ "license": "MIT",
+ "dependencies": {
+ "bytes": "~3.1.2",
+ "http-errors": "~2.0.1",
+ "iconv-lite": "~0.4.24",
+ "unpipe": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/send": {
+ "version": "0.19.2",
+ "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz",
+ "integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==",
+ "license": "MIT",
+ "dependencies": {
+ "debug": "2.6.9",
+ "depd": "2.0.0",
+ "destroy": "1.2.0",
+ "encodeurl": "~2.0.0",
+ "escape-html": "~1.0.3",
+ "etag": "~1.8.1",
+ "fresh": "~0.5.2",
+ "http-errors": "~2.0.1",
+ "mime": "1.6.0",
+ "ms": "2.1.3",
+ "on-finished": "~2.4.1",
+ "range-parser": "~1.2.1",
+ "statuses": "~2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/serve-static": {
+ "version": "1.16.3",
+ "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz",
+ "integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==",
+ "license": "MIT",
+ "dependencies": {
+ "encodeurl": "~2.0.0",
+ "escape-html": "~1.0.3",
+ "parseurl": "~1.3.3",
+ "send": "~0.19.1"
+ },
+ "engines": {
+ "node": ">= 0.8.0"
+ }
+ },
+ "packages/pi-coding-agent/node_modules/type-is": {
+ "version": "1.6.18",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
+ "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
+ "license": "MIT",
+ "dependencies": {
+ "media-typer": "0.3.0",
+ "mime-types": "~2.1.24"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"packages/pi-tui": {
- "name": "@gsd/pi-tui",
+ "name": "@sf-run/pi-tui",
"version": "2.74.0",
"dependencies": {
"chalk": "^5.6.2",
@@ -9578,7 +9948,7 @@
}
},
"packages/rpc-client": {
- "name": "@gsd-build/rpc-client",
+ "name": "@singularity-forge/rpc-client",
"version": "2.74.0",
"license": "MIT",
"engines": {
@@ -9586,7 +9956,7 @@
}
},
"studio": {
- "name": "@gsd/studio",
+ "name": "@sf-run/studio",
"version": "0.0.0",
"dependencies": {
"@phosphor-icons/react": "^2.1.10",
diff --git a/package.json b/package.json
index 5b6de7da7..31ebae4cf 100644
--- a/package.json
+++ b/package.json
@@ -57,11 +57,11 @@
"copy-themes": "node scripts/copy-themes.cjs",
"copy-export-html": "node scripts/copy-export-html.cjs",
"test:compile": "node scripts/compile-tests.mjs",
- "test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test \"dist-test/src/tests/*.test.js\" \"dist-test/src/resources/extensions/gsd/tests/*.test.js\" \"dist-test/src/resources/extensions/gsd/tests/*.test.mjs\" \"dist-test/src/resources/extensions/shared/tests/*.test.js\" \"dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js\" \"dist-test/src/resources/extensions/github-sync/tests/*.test.js\" \"dist-test/src/resources/extensions/universal-config/tests/*.test.js\" \"dist-test/src/resources/extensions/voice/tests/*.test.js\" \"dist-test/src/resources/extensions/mcp-client/tests/*.test.js\"",
+ "test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test \"dist-test/src/tests/*.test.js\" \"dist-test/src/resources/extensions/sf/tests/*.test.js\" \"dist-test/src/resources/extensions/sf/tests/*.test.mjs\" \"dist-test/src/resources/extensions/shared/tests/*.test.js\" \"dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js\" \"dist-test/src/resources/extensions/github-sync/tests/*.test.js\" \"dist-test/src/resources/extensions/universal-config/tests/*.test.js\" \"dist-test/src/resources/extensions/voice/tests/*.test.js\" \"dist-test/src/resources/extensions/mcp-client/tests/*.test.js\"",
"test:packages": "node --test packages/pi-coding-agent/dist/core/*.test.js packages/pi-coding-agent/dist/core/tools/spawn-shell-windows.test.js",
- "test:marketplace": "node scripts/with-env.mjs SF_TEST_CLONE_MARKETPLACES=1 -- node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/gsd/tests/claude-import-tui.test.ts src/resources/extensions/gsd/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts",
- "test:coverage": "c8 --reporter=text --reporter=lcov --exclude=\"src/resources/extensions/gsd/tests/**\" --exclude=\"src/tests/**\" --exclude=\"scripts/**\" --exclude=\"native/**\" --exclude=\"node_modules/**\" --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts",
- "test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test \"src/tests/integration/*.test.ts\" \"src/resources/extensions/gsd/tests/integration/*.test.ts\" \"src/resources/extensions/async-jobs/*.test.ts\" \"src/resources/extensions/browser-tools/tests/*.test.mjs\"",
+ "test:marketplace": "node scripts/with-env.mjs SF_TEST_CLONE_MARKETPLACES=1 -- node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/sf/tests/claude-import-tui.test.ts src/resources/extensions/sf/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts",
+ "test:coverage": "c8 --reporter=text --reporter=lcov --exclude=\"src/resources/extensions/sf/tests/**\" --exclude=\"src/tests/**\" --exclude=\"scripts/**\" --exclude=\"native/**\" --exclude=\"node_modules/**\" --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/sf/tests/*.test.ts src/resources/extensions/sf/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts",
+ "test:integration": "node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test \"src/tests/integration/*.test.ts\" \"src/resources/extensions/sf/tests/integration/*.test.ts\" \"src/resources/extensions/async-jobs/*.test.ts\" \"src/resources/extensions/browser-tools/tests/*.test.mjs\"",
"pretest": "npm run typecheck:extensions",
"test": "npm run test:unit && npm run test:integration",
"test:smoke": "node --experimental-strip-types tests/smoke/run.ts",
@@ -70,7 +70,7 @@
"test:live": "node scripts/with-env.mjs SF_LIVE_TESTS=1 -- node --experimental-strip-types tests/live/run.ts",
"test:browser-tools": "node --test src/resources/extensions/browser-tools/tests/browser-tools-unit.test.cjs src/resources/extensions/browser-tools/tests/browser-tools-integration.test.mjs",
"test:native": "node --test packages/native/src/__tests__/grep.test.mjs",
- "test:secret-scan": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/tests/secret-scan.test.ts",
+ "test:secret-scan": "node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test src/tests/secret-scan.test.ts",
"secret-scan": "node scripts/secret-scan.mjs",
"secret-scan:install-hook": "node scripts/install-hooks.mjs",
"build:native": "node native/scripts/build.js",
@@ -129,6 +129,7 @@
"proper-lockfile": "^4.1.2",
"proxy-agent": "^6.5.0",
"sharp": "^0.34.5",
+ "shell-quote": "^1.8.3",
"sql.js": "^1.14.1",
"strip-ansi": "^7.1.0",
"undici": "^7.24.2",
@@ -138,6 +139,7 @@
"devDependencies": {
"@types/node": "^24.12.0",
"@types/picomatch": "^4.0.2",
+ "@types/shell-quote": "^1.7.5",
"c8": "^11.0.0",
"esbuild": "^0.25.12",
"jiti": "^2.6.1",
diff --git a/packages/daemon/src/session-manager.ts b/packages/daemon/src/session-manager.ts
index 40589f5d7..cc1c1be2a 100644
--- a/packages/daemon/src/session-manager.ts
+++ b/packages/daemon/src/session-manager.ts
@@ -285,8 +285,8 @@ export class SessionManager extends EventEmitter {
if (envPath) return resolve(envPath);
try {
- const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim();
- if (gsdBin) return resolve(gsdBin);
+ const sfBin = execSync('which sf', { encoding: 'utf-8' }).trim();
+ if (sfBin) return resolve(sfBin);
} catch {
// which failed
}
diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts
index cb815bf6c..eefa07963 100644
--- a/packages/mcp-server/src/index.ts
+++ b/packages/mcp-server/src/index.ts
@@ -14,7 +14,7 @@ export type {
export { MAX_EVENTS, INIT_TIMEOUT_MS } from './types.js';
// Path resolution utilities
-export { resolveGsdRoot } from './readers/paths.js';
+export { resolveSFRoot } from './readers/paths.js';
// Read-only state readers (usable without a running session)
export { readProgress } from './readers/state.js';
diff --git a/packages/mcp-server/src/readers/captures.ts b/packages/mcp-server/src/readers/captures.ts
index e184d5dee..fb6a94a21 100644
--- a/packages/mcp-server/src/readers/captures.ts
+++ b/packages/mcp-server/src/readers/captures.ts
@@ -2,7 +2,7 @@
// Copyright (c) 2026 Jeremy McSpadden
import { readFileSync, existsSync } from 'node:fs';
-import { resolveGsdRoot, resolveRootFile } from './paths.js';
+import { resolveSFRoot, resolveRootFile } from './paths.js';
// ---------------------------------------------------------------------------
// Types
@@ -86,7 +86,7 @@ export function readCaptures(
projectDir: string,
filter: 'all' | 'pending' | 'actionable' = 'all',
): CapturesResult {
- const sf = resolveGsdRoot(projectDir);
+ const sf = resolveSFRoot(projectDir);
const capturesPath = resolveRootFile(sf, 'CAPTURES.md');
if (!existsSync(capturesPath)) {
diff --git a/packages/mcp-server/src/readers/doctor-lite.ts b/packages/mcp-server/src/readers/doctor-lite.ts
index 5d4560a94..ab85d303e 100644
--- a/packages/mcp-server/src/readers/doctor-lite.ts
+++ b/packages/mcp-server/src/readers/doctor-lite.ts
@@ -3,7 +3,7 @@
import { existsSync, readFileSync } from 'node:fs';
import {
- resolveGsdRoot,
+ resolveSFRoot,
resolveRootFile,
findMilestoneIds,
resolveMilestoneFile,
@@ -38,9 +38,9 @@ export interface DoctorResult {
// Check implementations
// ---------------------------------------------------------------------------
-function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
+function checkProjectLevel(sfRoot: string, issues: DoctorIssue[]): void {
// PROJECT.md should exist
- const projectPath = resolveRootFile(gsdRoot, 'PROJECT.md');
+ const projectPath = resolveRootFile(sfRoot, 'PROJECT.md');
if (!existsSync(projectPath)) {
issues.push({
severity: 'warning',
@@ -53,9 +53,9 @@ function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
}
// STATE.md should exist if milestones exist
- const milestones = findMilestoneIds(gsdRoot);
+ const milestones = findMilestoneIds(sfRoot);
if (milestones.length > 0) {
- const statePath = resolveRootFile(gsdRoot, 'STATE.md');
+ const statePath = resolveRootFile(sfRoot, 'STATE.md');
if (!existsSync(statePath)) {
issues.push({
severity: 'warning',
@@ -69,8 +69,8 @@ function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
}
}
-function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]): void {
- const mDir = resolveMilestoneDir(gsdRoot, mid);
+function checkMilestoneLevel(sfRoot: string, mid: string, issues: DoctorIssue[]): void {
+ const mDir = resolveMilestoneDir(sfRoot, mid);
if (!mDir) {
issues.push({
severity: 'error',
@@ -83,10 +83,10 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
}
// CONTEXT.md should exist
- const ctxPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT');
+ const ctxPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT');
if (!ctxPath || !existsSync(ctxPath)) {
// Check for draft
- const draftPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT-DRAFT');
+ const draftPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT-DRAFT');
if (!draftPath || !existsSync(draftPath)) {
issues.push({
severity: 'warning',
@@ -99,9 +99,9 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
}
// ROADMAP.md should exist if slices exist
- const sliceIds = findSliceIds(gsdRoot, mid);
+ const sliceIds = findSliceIds(sfRoot, mid);
if (sliceIds.length > 0) {
- const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
+ const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
if (!roadmapPath || !existsSync(roadmapPath)) {
issues.push({
severity: 'warning',
@@ -116,10 +116,10 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
// Check if all slices done but no SUMMARY
if (sliceIds.length > 0) {
const allDone = sliceIds.every((sid) => {
- const tasks = findTaskFiles(gsdRoot, mid, sid);
+ const tasks = findTaskFiles(sfRoot, mid, sid);
return tasks.length > 0 && tasks.every((t) => t.hasSummary);
});
- const summaryPath = resolveMilestoneFile(gsdRoot, mid, 'SUMMARY');
+ const summaryPath = resolveMilestoneFile(sfRoot, mid, 'SUMMARY');
if (allDone && (!summaryPath || !existsSync(summaryPath))) {
issues.push({
severity: 'error',
@@ -133,12 +133,12 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
}
function checkSliceLevel(
- gsdRoot: string, mid: string, sid: string, issues: DoctorIssue[],
+ sfRoot: string, mid: string, sid: string, issues: DoctorIssue[],
): void {
const unitId = `${mid}/${sid}`;
// PLAN.md should exist
- const planPath = resolveSliceFile(gsdRoot, mid, sid, 'PLAN');
+ const planPath = resolveSliceFile(sfRoot, mid, sid, 'PLAN');
if (!planPath || !existsSync(planPath)) {
issues.push({
severity: 'error',
@@ -150,7 +150,7 @@ function checkSliceLevel(
}
// Tasks should have plans
- const tasks = findTaskFiles(gsdRoot, mid, sid);
+ const tasks = findTaskFiles(sfRoot, mid, sid);
for (const task of tasks) {
const taskUnitId = `${unitId}/${task.id}`;
if (!task.hasPlan) {
@@ -181,10 +181,10 @@ function checkSliceLevel(
// ---------------------------------------------------------------------------
export function runDoctorLite(projectDir: string, scope?: string): DoctorResult {
- const gsdRoot = resolveGsdRoot(projectDir);
+ const sfRoot = resolveSFRoot(projectDir);
const issues: DoctorIssue[] = [];
- if (!existsSync(gsdRoot)) {
+ if (!existsSync(sfRoot)) {
return {
ok: true,
issues: [{
@@ -199,19 +199,19 @@ export function runDoctorLite(projectDir: string, scope?: string): DoctorResult
}
// Project-level checks
- checkProjectLevel(gsdRoot, issues);
+ checkProjectLevel(sfRoot, issues);
// Milestone + slice checks
const milestoneIds = scope
- ? findMilestoneIds(gsdRoot).filter((id) => id === scope)
- : findMilestoneIds(gsdRoot);
+ ? findMilestoneIds(sfRoot).filter((id) => id === scope)
+ : findMilestoneIds(sfRoot);
for (const mid of milestoneIds) {
- checkMilestoneLevel(gsdRoot, mid, issues);
+ checkMilestoneLevel(sfRoot, mid, issues);
- const sliceIds = findSliceIds(gsdRoot, mid);
+ const sliceIds = findSliceIds(sfRoot, mid);
for (const sid of sliceIds) {
- checkSliceLevel(gsdRoot, mid, sid, issues);
+ checkSliceLevel(sfRoot, mid, sid, issues);
}
}
diff --git a/packages/mcp-server/src/readers/graph.test.ts b/packages/mcp-server/src/readers/graph.test.ts
index 2a6ad70f3..f7c42435c 100644
--- a/packages/mcp-server/src/readers/graph.test.ts
+++ b/packages/mcp-server/src/readers/graph.test.ts
@@ -357,23 +357,23 @@ describe('writeGraph', () => {
after(() => rmSync(projectDir, { recursive: true, force: true }));
it('creates graph.json in .sf/graphs/ after writeGraph()', async () => {
- const gsdRoot = join(projectDir, '.sf');
- await writeGraph(gsdRoot, graph);
- const graphPath = join(gsdRoot, 'graphs', 'graph.json');
+ const sfRoot = join(projectDir, '.sf');
+ await writeGraph(sfRoot, graph);
+ const graphPath = join(sfRoot, 'graphs', 'graph.json');
assert.ok(existsSync(graphPath), `Expected ${graphPath} to exist`);
});
it('write is atomic — no temp file remains after writeGraph()', async () => {
- const gsdRoot = join(projectDir, '.sf');
- await writeGraph(gsdRoot, graph);
- const tmpPath = join(gsdRoot, 'graphs', 'graph.tmp.json');
+ const sfRoot = join(projectDir, '.sf');
+ await writeGraph(sfRoot, graph);
+ const tmpPath = join(sfRoot, 'graphs', 'graph.tmp.json');
assert.ok(!existsSync(tmpPath), 'Temp file should not exist after successful write');
});
it('written graph.json is valid JSON with nodes and edges', async () => {
- const gsdRoot = join(projectDir, '.sf');
- await writeGraph(gsdRoot, graph);
- const raw = readFileSync(join(gsdRoot, 'graphs', 'graph.json'), 'utf-8');
+ const sfRoot = join(projectDir, '.sf');
+ await writeGraph(sfRoot, graph);
+ const raw = readFileSync(join(sfRoot, 'graphs', 'graph.json'), 'utf-8');
const parsed = JSON.parse(raw) as KnowledgeGraph;
assert.ok(Array.isArray(parsed.nodes));
assert.ok(Array.isArray(parsed.edges));
@@ -401,9 +401,9 @@ describe('graphStatus', () => {
it('returns { exists: true, nodeCount, edgeCount, ageHours } when graph exists', async () => {
makeProjectWithArtifacts(projectDir);
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
const graph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, graph);
+ await writeGraph(sfRoot, graph);
const status = await graphStatus(projectDir);
assert.equal(status.exists, true);
@@ -415,9 +415,9 @@ describe('graphStatus', () => {
it('stale = false for a freshly built graph', async () => {
makeProjectWithArtifacts(projectDir);
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
const graph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, graph);
+ await writeGraph(sfRoot, graph);
const status = await graphStatus(projectDir);
assert.equal(status.stale, false);
@@ -425,8 +425,8 @@ describe('graphStatus', () => {
it('stale = true for a graph older than 24h (builtAt backdated)', async () => {
makeProjectWithArtifacts(projectDir);
- const gsdRoot = join(projectDir, '.sf');
- mkdirSync(join(gsdRoot, 'graphs'), { recursive: true });
+ const sfRoot = join(projectDir, '.sf');
+ mkdirSync(join(sfRoot, 'graphs'), { recursive: true });
// Write a graph with a builtAt 25 hours ago
const oldGraph: KnowledgeGraph = {
@@ -435,7 +435,7 @@ describe('graphStatus', () => {
builtAt: new Date(Date.now() - 25 * 60 * 60 * 1000).toISOString(),
};
writeFileSync(
- join(gsdRoot, 'graphs', 'graph.json'),
+ join(sfRoot, 'graphs', 'graph.json'),
JSON.stringify(oldGraph),
'utf-8',
);
@@ -456,9 +456,9 @@ describe('graphQuery', () => {
before(async () => {
projectDir = tmpProject();
makeProjectWithArtifacts(projectDir);
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
const graph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, graph);
+ await writeGraph(sfRoot, graph);
});
after(() => rmSync(projectDir, { recursive: true, force: true }));
@@ -486,7 +486,7 @@ describe('graphQuery', () => {
});
it('budget trims AMBIGUOUS edges first', async () => {
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
// Write a graph with mixed confidence edges
const mixedGraph: KnowledgeGraph = {
builtAt: new Date().toISOString(),
@@ -500,7 +500,7 @@ describe('graphQuery', () => {
{ from: 'n1', to: 'n3', type: 'contains', confidence: 'INFERRED' },
],
};
- await writeGraph(gsdRoot, mixedGraph);
+ await writeGraph(sfRoot, mixedGraph);
// With a very small budget, AMBIGUOUS edges should be trimmed first
const result = await graphQuery(projectDir, 'seed node budget', 10);
@@ -509,7 +509,7 @@ describe('graphQuery', () => {
// Restore the original graph
const originalGraph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, originalGraph);
+ await writeGraph(sfRoot, originalGraph);
});
});
@@ -523,16 +523,16 @@ describe('graphDiff', () => {
beforeEach(async () => {
projectDir = tmpProject();
makeProjectWithArtifacts(projectDir);
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
const graph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, graph);
+ await writeGraph(sfRoot, graph);
});
afterEach(() => rmSync(projectDir, { recursive: true, force: true }));
it('returns empty diff when comparing graph to itself (snapshot = current)', async () => {
- const gsdRoot = join(projectDir, '.sf');
- await writeSnapshot(gsdRoot);
+ const sfRoot = join(projectDir, '.sf');
+ await writeSnapshot(sfRoot);
const diff = await graphDiff(projectDir);
assert.ok(Array.isArray(diff.nodes.added));
assert.ok(Array.isArray(diff.nodes.removed));
@@ -542,9 +542,9 @@ describe('graphDiff', () => {
});
it('returns added nodes when a new node appears after snapshot', async () => {
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
// Take snapshot of the original graph
- await writeSnapshot(gsdRoot);
+ await writeSnapshot(sfRoot);
// Now write a graph with an extra node
const extraGraph: KnowledgeGraph = {
@@ -554,14 +554,14 @@ describe('graphDiff', () => {
],
edges: [],
};
- await writeGraph(gsdRoot, extraGraph);
+ await writeGraph(sfRoot, extraGraph);
const diff = await graphDiff(projectDir);
assert.ok(diff.nodes.added.includes('brand-new-node'), 'new node should be in added');
});
it('returns removed nodes when a node disappears after snapshot', async () => {
- const gsdRoot = join(projectDir, '.sf');
+ const sfRoot = join(projectDir, '.sf');
// Create snapshot with a node that won't exist in current graph
const snapshotGraph: KnowledgeGraph = {
builtAt: new Date().toISOString(),
@@ -571,7 +571,7 @@ describe('graphDiff', () => {
edges: [],
};
writeFileSync(
- join(gsdRoot, 'graphs', '.last-build-snapshot.json'),
+ join(sfRoot, 'graphs', '.last-build-snapshot.json'),
JSON.stringify({ ...snapshotGraph, snapshotAt: new Date().toISOString() }),
'utf-8',
);
@@ -592,9 +592,9 @@ describe('graphDiff', () => {
});
it('writeSnapshot creates .last-build-snapshot.json with snapshotAt', async () => {
- const gsdRoot = join(projectDir, '.sf');
- await writeSnapshot(gsdRoot);
- const snapshotPath = join(gsdRoot, 'graphs', '.last-build-snapshot.json');
+ const sfRoot = join(projectDir, '.sf');
+ await writeSnapshot(sfRoot);
+ const snapshotPath = join(sfRoot, 'graphs', '.last-build-snapshot.json');
assert.ok(existsSync(snapshotPath));
const raw = readFileSync(snapshotPath, 'utf-8');
const parsed = JSON.parse(raw) as KnowledgeGraph & { snapshotAt: string };
diff --git a/packages/mcp-server/src/readers/graph.ts b/packages/mcp-server/src/readers/graph.ts
index 4574a025f..dc4a6751d 100644
--- a/packages/mcp-server/src/readers/graph.ts
+++ b/packages/mcp-server/src/readers/graph.ts
@@ -14,7 +14,7 @@
import { readFileSync, writeFileSync, renameSync, existsSync, mkdirSync } from 'node:fs';
import { join, resolve } from 'node:path';
-import { resolveGsdRoot, findMilestoneIds, resolveMilestoneDir, findSliceIds, resolveSliceDir } from './paths.js';
+import { resolveSFRoot, findMilestoneIds, resolveMilestoneDir, findSliceIds, resolveSliceDir } from './paths.js';
// ---------------------------------------------------------------------------
// Types
@@ -92,20 +92,20 @@ export interface GraphDiffResult {
// Graph file paths
// ---------------------------------------------------------------------------
-function graphsDir(gsdRoot: string): string {
- return join(gsdRoot, 'graphs');
+function graphsDir(sfRoot: string): string {
+ return join(sfRoot, 'graphs');
}
-function graphJsonPath(gsdRoot: string): string {
- return join(graphsDir(gsdRoot), 'graph.json');
+function graphJsonPath(sfRoot: string): string {
+ return join(graphsDir(sfRoot), 'graph.json');
}
-function graphTmpPath(gsdRoot: string): string {
- return join(graphsDir(gsdRoot), 'graph.tmp.json');
+function graphTmpPath(sfRoot: string): string {
+ return join(graphsDir(sfRoot), 'graph.tmp.json');
}
-function snapshotPath(gsdRoot: string): string {
- return join(graphsDir(gsdRoot), '.last-build-snapshot.json');
+function snapshotPath(sfRoot: string): string {
+ return join(graphsDir(sfRoot), '.last-build-snapshot.json');
}
// ---------------------------------------------------------------------------
@@ -115,8 +115,8 @@ function snapshotPath(gsdRoot: string): string {
/**
* Parse STATE.md for active milestone and phase concepts.
*/
-function parseStateFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
- const statePath = join(gsdRoot, 'STATE.md');
+function parseStateFile(sfRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
+ const statePath = join(sfRoot, 'STATE.md');
if (!existsSync(statePath)) return;
let content: string;
@@ -160,8 +160,8 @@ function parseStateFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]
/**
* Parse KNOWLEDGE.md for rules, patterns, and lessons.
*/
-function parseKnowledgeFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
- const knowledgePath = join(gsdRoot, 'KNOWLEDGE.md');
+function parseKnowledgeFile(sfRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
+ const knowledgePath = join(sfRoot, 'KNOWLEDGE.md');
if (!existsSync(knowledgePath)) return;
let content: string;
@@ -239,15 +239,15 @@ function parseKnowledgeFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEd
* Parse milestone ROADMAP.md files for milestones and slices.
*/
function parseMilestoneFiles(
- gsdRoot: string,
+ sfRoot: string,
nodes: GraphNode[],
edges: GraphEdge[],
): void {
- const milestoneIds = findMilestoneIds(gsdRoot);
+ const milestoneIds = findMilestoneIds(sfRoot);
for (const milestoneId of milestoneIds) {
try {
- parseSingleMilestone(gsdRoot, milestoneId, nodes, edges);
+ parseSingleMilestone(sfRoot, milestoneId, nodes, edges);
} catch {
// Skip this milestone on any error
}
@@ -255,12 +255,12 @@ function parseMilestoneFiles(
}
function parseSingleMilestone(
- gsdRoot: string,
+ sfRoot: string,
milestoneId: string,
nodes: GraphNode[],
edges: GraphEdge[],
): void {
- const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
+ const mDir = resolveMilestoneDir(sfRoot, milestoneId);
if (!mDir) return;
const milestoneNodeId = `milestone:${milestoneId}`;
@@ -295,10 +295,10 @@ function parseSingleMilestone(
}
// Parse slices from roadmap table or filesystem
- const sliceIds = findSliceIds(gsdRoot, milestoneId);
+ const sliceIds = findSliceIds(sfRoot, milestoneId);
for (const sliceId of sliceIds) {
try {
- parseSingleSlice(gsdRoot, milestoneId, sliceId, milestoneNodeId, nodes, edges);
+ parseSingleSlice(sfRoot, milestoneId, sliceId, milestoneNodeId, nodes, edges);
} catch {
// Skip this slice on any error
}
@@ -306,14 +306,14 @@ function parseSingleMilestone(
}
function parseSingleSlice(
- gsdRoot: string,
+ sfRoot: string,
milestoneId: string,
sliceId: string,
milestoneNodeId: string,
nodes: GraphNode[],
edges: GraphEdge[],
): void {
- const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
+ const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
if (!sDir) return;
const sliceNodeId = `slice:${milestoneId}:${sliceId}`;
@@ -397,12 +397,12 @@ function parseTasksFromPlan(
* Surprises are mapped to the 'lesson' NodeType (no distinct type exists).
* Parse errors per file are caught — the file is skipped, never rethrows.
*/
-function parseLearningsFiles(gsdRoot: string, nodes: GraphNode[], edges: GraphEdge[]): void {
- const milestoneIds = findMilestoneIds(gsdRoot);
+function parseLearningsFiles(sfRoot: string, nodes: GraphNode[], edges: GraphEdge[]): void {
+ const milestoneIds = findMilestoneIds(sfRoot);
for (const milestoneId of milestoneIds) {
try {
- parseSingleLearningsFile(gsdRoot, milestoneId, nodes, edges);
+ parseSingleLearningsFile(sfRoot, milestoneId, nodes, edges);
} catch {
// Skip this milestone's LEARNINGS.md on any error
}
@@ -410,12 +410,12 @@ function parseLearningsFiles(gsdRoot: string, nodes: GraphNode[], edges: GraphEd
}
function parseSingleLearningsFile(
- gsdRoot: string,
+ sfRoot: string,
milestoneId: string,
nodes: GraphNode[],
edges: GraphEdge[],
): void {
- const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
+ const mDir = resolveMilestoneDir(sfRoot, milestoneId);
if (!mDir) return;
const learningsPath = join(mDir, `${milestoneId}-LEARNINGS.md`);
@@ -543,7 +543,7 @@ function parseLearningsSection(
* and never causes buildGraph() to throw.
*/
export async function buildGraph(projectDir: string): Promise {
- const gsdRoot = resolveGsdRoot(resolve(projectDir));
+ const sfRoot = resolveSFRoot(resolve(projectDir));
const nodes: GraphNode[] = [];
const edges: GraphEdge[] = [];
@@ -558,7 +558,7 @@ export async function buildGraph(projectDir: string): Promise {
for (const parser of parsers) {
try {
- parser(gsdRoot, nodes, edges);
+ parser(sfRoot, nodes, edges);
} catch {
// Parsing error — skip this artifact, mark as ambiguous
nodes.push({
@@ -595,12 +595,12 @@ export async function buildGraph(projectDir: string): Promise {
* Writes to graph.tmp.json first, then renames to graph.json.
* Creates the graphs/ directory if it does not exist.
*/
-export async function writeGraph(gsdRoot: string, graph: KnowledgeGraph): Promise {
- const dir = graphsDir(gsdRoot);
+export async function writeGraph(sfRoot: string, graph: KnowledgeGraph): Promise {
+ const dir = graphsDir(sfRoot);
mkdirSync(dir, { recursive: true });
- const tmp = graphTmpPath(gsdRoot);
- const final = graphJsonPath(gsdRoot);
+ const tmp = graphTmpPath(sfRoot);
+ const final = graphJsonPath(sfRoot);
writeFileSync(tmp, JSON.stringify(graph, null, 2), 'utf-8');
renameSync(tmp, final);
@@ -614,11 +614,11 @@ export async function writeGraph(gsdRoot: string, graph: KnowledgeGraph): Promis
* Copy the current graph.json to .last-build-snapshot.json.
* Adds a snapshotAt timestamp to the copy.
*/
-export async function writeSnapshot(gsdRoot: string): Promise {
- const src = graphJsonPath(gsdRoot);
+export async function writeSnapshot(sfRoot: string): Promise {
+ const src = graphJsonPath(sfRoot);
if (!existsSync(src)) return;
- const dir = graphsDir(gsdRoot);
+ const dir = graphsDir(sfRoot);
mkdirSync(dir, { recursive: true });
const raw = readFileSync(src, 'utf-8');
@@ -630,7 +630,7 @@ export async function writeSnapshot(gsdRoot: string): Promise {
}
const snapshot = { ...graph, snapshotAt: new Date().toISOString() };
- writeFileSync(snapshotPath(gsdRoot), JSON.stringify(snapshot, null, 2), 'utf-8');
+ writeFileSync(snapshotPath(sfRoot), JSON.stringify(snapshot, null, 2), 'utf-8');
}
// ---------------------------------------------------------------------------
@@ -642,8 +642,8 @@ export async function writeSnapshot(gsdRoot: string): Promise {
* Stale means builtAt is older than 24 hours.
*/
export async function graphStatus(projectDir: string): Promise {
- const gsdRoot = resolveGsdRoot(resolve(projectDir));
- const graphPath = graphJsonPath(gsdRoot);
+ const sfRoot = resolveSFRoot(resolve(projectDir));
+ const graphPath = graphJsonPath(sfRoot);
if (!existsSync(graphPath)) {
return { exists: false };
@@ -745,8 +745,8 @@ export async function graphQuery(
term: string,
budget = 4000,
): Promise {
- const gsdRoot = resolveGsdRoot(resolve(projectDir));
- const graphPath = graphJsonPath(gsdRoot);
+ const sfRoot = resolveSFRoot(resolve(projectDir));
+ const graphPath = graphJsonPath(sfRoot);
if (!existsSync(graphPath)) {
return { nodes: [], edges: [], term, budget };
@@ -797,14 +797,14 @@ export async function graphQuery(
* If no snapshot exists, returns empty diff arrays.
*/
export async function graphDiff(projectDir: string): Promise {
- const gsdRoot = resolveGsdRoot(resolve(projectDir));
+ const sfRoot = resolveSFRoot(resolve(projectDir));
const empty: GraphDiffResult = {
nodes: { added: [], removed: [], changed: [] },
edges: { added: [], removed: [] },
};
- const graphPath = graphJsonPath(gsdRoot);
- const snap = snapshotPath(gsdRoot);
+ const graphPath = graphJsonPath(sfRoot);
+ const snap = snapshotPath(sfRoot);
if (!existsSync(graphPath)) return empty;
if (!existsSync(snap)) return empty;
diff --git a/packages/mcp-server/src/readers/index.ts b/packages/mcp-server/src/readers/index.ts
index 7cdd7b510..d159fff41 100644
--- a/packages/mcp-server/src/readers/index.ts
+++ b/packages/mcp-server/src/readers/index.ts
@@ -1,7 +1,7 @@
// SF MCP Server — readers barrel export
// Copyright (c) 2026 Jeremy McSpadden
-export { resolveGsdRoot, resolveRootFile } from './paths.js';
+export { resolveSFRoot, resolveRootFile } from './paths.js';
export { readProgress } from './state.js';
export type { ProgressResult } from './state.js';
export { readRoadmap } from './roadmap.js';
diff --git a/packages/mcp-server/src/readers/knowledge.ts b/packages/mcp-server/src/readers/knowledge.ts
index 89d127ecc..9ba75cba3 100644
--- a/packages/mcp-server/src/readers/knowledge.ts
+++ b/packages/mcp-server/src/readers/knowledge.ts
@@ -2,7 +2,7 @@
// Copyright (c) 2026 Jeremy McSpadden
import { readFileSync, existsSync } from 'node:fs';
-import { resolveGsdRoot, resolveRootFile } from './paths.js';
+import { resolveSFRoot, resolveRootFile } from './paths.js';
// ---------------------------------------------------------------------------
// Types
@@ -90,7 +90,7 @@ function parseKnowledgeMarkdown(content: string): KnowledgeEntry[] {
// ---------------------------------------------------------------------------
export function readKnowledge(projectDir: string): KnowledgeResult {
- const sf = resolveGsdRoot(projectDir);
+ const sf = resolveSFRoot(projectDir);
const knowledgePath = resolveRootFile(sf, 'KNOWLEDGE.md');
if (!existsSync(knowledgePath)) {
diff --git a/packages/mcp-server/src/readers/metrics.ts b/packages/mcp-server/src/readers/metrics.ts
index 51af8f105..db02c355a 100644
--- a/packages/mcp-server/src/readers/metrics.ts
+++ b/packages/mcp-server/src/readers/metrics.ts
@@ -2,7 +2,7 @@
// Copyright (c) 2026 Jeremy McSpadden
import { readFileSync, existsSync } from 'node:fs';
-import { resolveGsdRoot, resolveRootFile } from './paths.js';
+import { resolveSFRoot, resolveRootFile } from './paths.js';
// ---------------------------------------------------------------------------
// Types
@@ -72,7 +72,7 @@ function parseMetricsJson(content: string): MetricsUnit[] {
// ---------------------------------------------------------------------------
export function readHistory(projectDir: string, limit?: number): HistoryResult {
- const sf = resolveGsdRoot(projectDir);
+ const sf = resolveSFRoot(projectDir);
// metrics.json (primary)
const metricsPath = resolveRootFile(sf, 'metrics.json');
diff --git a/packages/mcp-server/src/readers/paths.ts b/packages/mcp-server/src/readers/paths.ts
index 49fa75580..1c501840e 100644
--- a/packages/mcp-server/src/readers/paths.ts
+++ b/packages/mcp-server/src/readers/paths.ts
@@ -14,7 +14,7 @@ import { execFileSync } from 'node:child_process';
* 3. Walk up from projectDir
* 4. Fallback: projectDir/.sf (even if missing — for init)
*/
-export function resolveGsdRoot(projectDir: string): string {
+export function resolveSFRoot(projectDir: string): string {
const resolved = resolve(projectDir);
// Fast path: .sf/ in the given directory
@@ -53,21 +53,21 @@ export function resolveGsdRoot(projectDir: string): string {
}
/** Resolve path to a .sf/ root file (STATE.md, KNOWLEDGE.md, etc.) */
-export function resolveRootFile(gsdRoot: string, name: string): string {
- return join(gsdRoot, name);
+export function resolveRootFile(sfRoot: string, name: string): string {
+ return join(sfRoot, name);
}
/** Resolve path to milestones directory */
-export function milestonesDir(gsdRoot: string): string {
- return join(gsdRoot, 'milestones');
+export function milestonesDir(sfRoot: string): string {
+ return join(sfRoot, 'milestones');
}
/**
* Find all milestone directory IDs (M001, M002, etc.).
* Handles both bare (M001/) and descriptor (M001-FLIGHT-SIM/) naming.
*/
-export function findMilestoneIds(gsdRoot: string): string[] {
- const dir = milestonesDir(gsdRoot);
+export function findMilestoneIds(sfRoot: string): string[] {
+ const dir = milestonesDir(sfRoot);
if (!existsSync(dir)) return [];
const entries = readdirSync(dir, { withFileTypes: true });
@@ -86,8 +86,8 @@ export function findMilestoneIds(gsdRoot: string): string[] {
* Resolve the actual directory name for a milestone ID.
* M001 might live in M001/ or M001-SOME-DESCRIPTOR/.
*/
-export function resolveMilestoneDir(gsdRoot: string, milestoneId: string): string | null {
- const dir = milestonesDir(gsdRoot);
+export function resolveMilestoneDir(sfRoot: string, milestoneId: string): string | null {
+ const dir = milestonesDir(sfRoot);
if (!existsSync(dir)) return null;
// Fast path: exact match
@@ -109,8 +109,8 @@ export function resolveMilestoneDir(gsdRoot: string, milestoneId: string): strin
* Resolve a milestone-level file (M001-ROADMAP.md, M001-CONTEXT.md, etc.).
* Handles various naming conventions.
*/
-export function resolveMilestoneFile(gsdRoot: string, milestoneId: string, suffix: string): string | null {
- const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
+export function resolveMilestoneFile(sfRoot: string, milestoneId: string, suffix: string): string | null {
+ const mDir = resolveMilestoneDir(sfRoot, milestoneId);
if (!mDir) return null;
const dirName = basename(mDir);
@@ -129,8 +129,8 @@ export function resolveMilestoneFile(gsdRoot: string, milestoneId: string, suffi
}
/** Find all slice IDs within a milestone (S01, S02, etc.) */
-export function findSliceIds(gsdRoot: string, milestoneId: string): string[] {
- const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
+export function findSliceIds(sfRoot: string, milestoneId: string): string[] {
+ const mDir = resolveMilestoneDir(sfRoot, milestoneId);
if (!mDir) return [];
const slicesDir = join(mDir, 'slices');
@@ -149,8 +149,8 @@ export function findSliceIds(gsdRoot: string, milestoneId: string): string[] {
}
/** Resolve the actual directory for a slice */
-export function resolveSliceDir(gsdRoot: string, milestoneId: string, sliceId: string): string | null {
- const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
+export function resolveSliceDir(sfRoot: string, milestoneId: string, sliceId: string): string | null {
+ const mDir = resolveMilestoneDir(sfRoot, milestoneId);
if (!mDir) return null;
const slicesDir = join(mDir, 'slices');
@@ -170,9 +170,9 @@ export function resolveSliceDir(gsdRoot: string, milestoneId: string, sliceId: s
/** Resolve a slice-level file (S01-PLAN.md, etc.) */
export function resolveSliceFile(
- gsdRoot: string, milestoneId: string, sliceId: string, suffix: string,
+ sfRoot: string, milestoneId: string, sliceId: string, suffix: string,
): string | null {
- const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
+ const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
if (!sDir) return null;
const dirName = basename(sDir);
@@ -190,9 +190,9 @@ export function resolveSliceFile(
/** Find all task files in a slice's tasks/ directory */
export function findTaskFiles(
- gsdRoot: string, milestoneId: string, sliceId: string,
+ sfRoot: string, milestoneId: string, sliceId: string,
): Array<{ id: string; hasPlan: boolean; hasSummary: boolean }> {
- const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
+ const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
if (!sDir) return [];
const tasksDir = join(sDir, 'tasks');
diff --git a/packages/mcp-server/src/readers/roadmap.ts b/packages/mcp-server/src/readers/roadmap.ts
index 83ca3f888..167d571ef 100644
--- a/packages/mcp-server/src/readers/roadmap.ts
+++ b/packages/mcp-server/src/readers/roadmap.ts
@@ -3,7 +3,7 @@
import { readFileSync, existsSync } from 'node:fs';
import {
- resolveGsdRoot,
+ resolveSFRoot,
findMilestoneIds,
resolveMilestoneFile,
findSliceIds,
@@ -150,15 +150,15 @@ function parseSlicePlanTasks(content: string): Array<{ id: string; title: string
// Milestone title from CONTEXT.md or ROADMAP.md H1
// ---------------------------------------------------------------------------
-function readMilestoneTitle(gsdRoot: string, mid: string): string {
- const ctxPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT');
+function readMilestoneTitle(sfRoot: string, mid: string): string {
+ const ctxPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT');
if (ctxPath && existsSync(ctxPath)) {
const content = readFileSync(ctxPath, 'utf-8');
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
if (h1) return h1[1].trim();
}
- const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
+ const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
if (roadmapPath && existsSync(roadmapPath)) {
const content = readFileSync(roadmapPath, 'utf-8');
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
@@ -168,8 +168,8 @@ function readMilestoneTitle(gsdRoot: string, mid: string): string {
return mid;
}
-function readVision(gsdRoot: string, mid: string): string {
- const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
+function readVision(sfRoot: string, mid: string): string {
+ const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
if (!roadmapPath || !existsSync(roadmapPath)) return '';
const content = readFileSync(roadmapPath, 'utf-8');
@@ -182,7 +182,7 @@ function readVision(gsdRoot: string, mid: string): string {
// ---------------------------------------------------------------------------
export function readRoadmap(projectDir: string, filterMilestoneId?: string): RoadmapResult {
- const sf = resolveGsdRoot(projectDir);
+ const sf = resolveSFRoot(projectDir);
let milestoneIds = findMilestoneIds(sf);
if (filterMilestoneId) {
diff --git a/packages/mcp-server/src/readers/state.ts b/packages/mcp-server/src/readers/state.ts
index beb2d8df1..cee5e0240 100644
--- a/packages/mcp-server/src/readers/state.ts
+++ b/packages/mcp-server/src/readers/state.ts
@@ -3,7 +3,7 @@
import { readFileSync, existsSync } from 'node:fs';
import {
- resolveGsdRoot,
+ resolveSFRoot,
resolveRootFile,
findMilestoneIds,
resolveMilestoneDir,
@@ -114,7 +114,7 @@ function parseMilestoneRegistry(content: string): RegistryEntry[] {
// Count slices/tasks by walking filesystem
// ---------------------------------------------------------------------------
-function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
+function countSlicesAndTasks(sfRoot: string, milestoneIds: string[]): {
slices: ProgressResult['slices'];
tasks: ProgressResult['tasks'];
} {
@@ -122,11 +122,11 @@ function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
let taskTotal = 0, taskDone = 0;
for (const mid of milestoneIds) {
- const sliceIds = findSliceIds(gsdRoot, mid);
+ const sliceIds = findSliceIds(sfRoot, mid);
sliceTotal += sliceIds.length;
for (const sid of sliceIds) {
- const tasks = findTaskFiles(gsdRoot, mid, sid);
+ const tasks = findTaskFiles(sfRoot, mid, sid);
taskTotal += tasks.length;
const allDone = tasks.length > 0 && tasks.every((t) => t.hasSummary);
@@ -158,7 +158,7 @@ function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
// ---------------------------------------------------------------------------
export function readProgress(projectDir: string): ProgressResult {
- const sf = resolveGsdRoot(projectDir);
+ const sf = resolveSFRoot(projectDir);
const statePath = resolveRootFile(sf, 'STATE.md');
// Defaults
diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts
index b49bb8c5a..e7ba62abc 100644
--- a/packages/mcp-server/src/server.ts
+++ b/packages/mcp-server/src/server.ts
@@ -21,7 +21,7 @@ import { readHistory } from './readers/metrics.js';
import { readCaptures } from './readers/captures.js';
import { readKnowledge } from './readers/knowledge.js';
import { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './readers/graph.js';
-import { resolveGsdRoot } from './readers/paths.js';
+import { resolveSFRoot } from './readers/paths.js';
import { runDoctorLite } from './readers/doctor-lite.js';
import { registerWorkflowTools } from './workflow-tools.js';
import { applySecrets, checkExistingEnvKeys, detectDestination } from './env-writer.js';
@@ -82,7 +82,7 @@ function normalizeQuery(query: string | undefined): QueryCategory {
}
async function readProjectState(projectDir: string, query: string | undefined): Promise> {
- const gsdDir = join(resolve(projectDir), '.sf');
+ const sfDir = join(resolve(projectDir), '.sf');
const category = normalizeQuery(query);
const wanted = new Set(QUERY_FIELDS[category]);
@@ -93,7 +93,7 @@ async function readProjectState(projectDir: string, query: string | undefined):
if (wanted.has('state')) {
try {
- result.state = await readFile(join(gsdDir, 'STATE.md'), 'utf-8');
+ result.state = await readFile(join(sfDir, 'STATE.md'), 'utf-8');
} catch {
result.state = null;
}
@@ -101,7 +101,7 @@ async function readProjectState(projectDir: string, query: string | undefined):
if (wanted.has('project')) {
try {
- result.project = await readFile(join(gsdDir, 'PROJECT.md'), 'utf-8');
+ result.project = await readFile(join(sfDir, 'PROJECT.md'), 'utf-8');
} catch {
result.project = null;
}
@@ -109,14 +109,14 @@ async function readProjectState(projectDir: string, query: string | undefined):
if (wanted.has('requirements')) {
try {
- result.requirements = await readFile(join(gsdDir, 'REQUIREMENTS.md'), 'utf-8');
+ result.requirements = await readFile(join(sfDir, 'REQUIREMENTS.md'), 'utf-8');
} catch {
result.requirements = null;
}
}
if (wanted.has('milestones')) {
- const milestonesDir = join(gsdDir, 'milestones');
+ const milestonesDir = join(sfDir, 'milestones');
try {
const entries = await readdir(milestonesDir, { withFileTypes: true });
const milestones: Array<{ id: string; hasRoadmap: boolean; hasSummary: boolean }> = [];
@@ -845,15 +845,15 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{
};
try {
- const gsdRoot = resolveGsdRoot(projectDir);
+ const sfRoot = resolveSFRoot(projectDir);
switch (mode) {
case 'build': {
if (snapshot) {
- await writeSnapshot(gsdRoot).catch(() => { /* best-effort */ });
+ await writeSnapshot(sfRoot).catch(() => { /* best-effort */ });
}
const graph = await buildGraph(projectDir);
- await writeGraph(gsdRoot, graph);
+ await writeGraph(sfRoot, graph);
return jsonContent({
built: true,
nodeCount: graph.nodes.length,
diff --git a/packages/mcp-server/src/session-manager.ts b/packages/mcp-server/src/session-manager.ts
index 9dd2eb013..029a9d635 100644
--- a/packages/mcp-server/src/session-manager.ts
+++ b/packages/mcp-server/src/session-manager.ts
@@ -249,10 +249,10 @@ export class SessionManager {
// Fallback: locate `sf` via which
try {
- const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim();
- if (gsdBin) {
+ const sfBin = execSync('which sf', { encoding: 'utf-8' }).trim();
+ if (sfBin) {
// sf bin is typically a symlink to dist/loader.js — return the resolved path
- return resolve(gsdBin);
+ return resolve(sfBin);
}
} catch {
// which failed
diff --git a/packages/native/src/__tests__/fd.test.mjs b/packages/native/src/__tests__/fd.test.mjs
index b91af2a14..00f4d1c97 100644
--- a/packages/native/src/__tests__/fd.test.mjs
+++ b/packages/native/src/__tests__/fd.test.mjs
@@ -34,7 +34,7 @@ if (!native) {
describe("native fd: fuzzyFind()", () => {
test("finds files matching a query", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.rs"), "fn main() {}");
@@ -52,7 +52,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("returns empty results for non-matching query", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "hello.txt"), "hello");
@@ -67,7 +67,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("respects maxResults limit", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
@@ -85,7 +85,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("directories have trailing slash and bonus score", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "models"));
@@ -103,7 +103,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("empty query returns all entries", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "a.txt"), "a");
@@ -123,7 +123,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("fuzzy subsequence matching works", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "MyComponentFile.tsx"), "export {}");
@@ -143,7 +143,7 @@ describe("native fd: fuzzyFind()", () => {
const previousTtl = process.env.FS_SCAN_CACHE_TTL_MS;
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => {
native.invalidateFsScanCache(tmpDir);
fs.rmSync(tmpDir, { recursive: true, force: true });
@@ -175,7 +175,7 @@ describe("native fd: fuzzyFind()", () => {
});
test("results are sorted by score descending", (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "main.ts"), "");
diff --git a/packages/native/src/__tests__/glob.test.mjs b/packages/native/src/__tests__/glob.test.mjs
index 64719a225..f7129f064 100644
--- a/packages/native/src/__tests__/glob.test.mjs
+++ b/packages/native/src/__tests__/glob.test.mjs
@@ -44,7 +44,7 @@ if (!native) {
describe("native glob: glob()", () => {
test("finds files matching a pattern", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.ts"), "const a = 1;");
@@ -60,7 +60,7 @@ describe("native glob: glob()", () => {
});
test("recursive matching into subdirectories", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "src"));
@@ -79,7 +79,7 @@ describe("native glob: glob()", () => {
});
test("respects maxResults limit", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
@@ -97,7 +97,7 @@ describe("native glob: glob()", () => {
});
test("filters by file type (directories only)", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "dir1"));
@@ -117,7 +117,7 @@ describe("native glob: glob()", () => {
});
test("respects .gitignore", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
// Init a git repo so .gitignore is respected
@@ -137,7 +137,7 @@ describe("native glob: glob()", () => {
});
test("includes gitignored files when gitignore=false", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, ".git"));
@@ -155,7 +155,7 @@ describe("native glob: glob()", () => {
});
test("skips node_modules by default", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.mkdirSync(path.join(tmpDir, "node_modules"));
@@ -173,7 +173,7 @@ describe("native glob: glob()", () => {
});
test("sortByMtime returns most recent first", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "old.txt"), "old");
@@ -209,7 +209,7 @@ describe("native glob: glob()", () => {
});
test("returns mtime for each entry", async (t) => {
- const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
+ const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "test.txt"), "content");
diff --git a/packages/native/src/__tests__/grep.test.mjs b/packages/native/src/__tests__/grep.test.mjs
index ff8a2a828..c26cbef8a 100644
--- a/packages/native/src/__tests__/grep.test.mjs
+++ b/packages/native/src/__tests__/grep.test.mjs
@@ -94,7 +94,7 @@ describe("native grep: grep()", () => {
let tmpDir;
test("returns a promise", async (t) => {
- tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
@@ -111,7 +111,7 @@ describe("native grep: grep()", () => {
});
test("searches files on disk", async (t) => {
- tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\nfoo bar\n");
@@ -133,7 +133,7 @@ describe("native grep: grep()", () => {
});
test("respects glob filter", async (t) => {
- tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
fs.writeFileSync(path.join(tmpDir, "code.ts"), "hello typescript\n");
@@ -151,7 +151,7 @@ describe("native grep: grep()", () => {
});
test("respects maxCount", async (t) => {
- tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
+ tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
for (let i = 0; i < 10; i++) {
diff --git a/packages/native/src/forge-parser/index.ts b/packages/native/src/forge-parser/index.ts
index 28ad68061..613095485 100644
--- a/packages/native/src/forge-parser/index.ts
+++ b/packages/native/src/forge-parser/index.ts
@@ -1,7 +1,7 @@
/**
* SF file parser — native Rust implementation.
*
- * Parses `.gsd/` directory markdown files containing YAML-like frontmatter
+ * Parses `.sf/` directory markdown files containing YAML-like frontmatter
* and structured sections. Replaces the JS regex-based parser for
* performance-critical batch operations.
*/
@@ -72,7 +72,7 @@ export function extractAllSections(
}
/**
- * Batch-parse all `.md` files in a `.gsd/` directory tree.
+ * Batch-parse all `.md` files in a `.sf/` directory tree.
*
* Reads and parses all markdown files under the given directory.
* Each file gets frontmatter parsing and section extraction.
diff --git a/packages/native/src/forge-parser/types.ts b/packages/native/src/forge-parser/types.ts
index f4e510608..da0bc36f5 100644
--- a/packages/native/src/forge-parser/types.ts
+++ b/packages/native/src/forge-parser/types.ts
@@ -1,7 +1,7 @@
/**
* SF file parser type definitions.
*
- * Types for the native Rust parser that handles `.gsd/` directory files
+ * Types for the native Rust parser that handles `.sf/` directory files
* containing YAML-like frontmatter and markdown sections.
*/
diff --git a/packages/pi-coding-agent/package.json b/packages/pi-coding-agent/package.json
index cfcb9a782..3aec07d48 100644
--- a/packages/pi-coding-agent/package.json
+++ b/packages/pi-coding-agent/package.json
@@ -4,8 +4,8 @@
"description": "Coding agent CLI (vendored from pi-mono)",
"type": "module",
"piConfig": {
- "name": "pi",
- "configDir": ".pi"
+ "name": "sf",
+ "configDir": ".sf"
},
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -35,12 +35,14 @@
"strip-ansi": "^7.1.0",
"undici": "^7.24.2",
"sql.js": "^1.14.1",
- "yaml": "^2.8.2"
+ "yaml": "^2.8.2",
+ "express": "^4.19.2"
},
"devDependencies": {
"@types/sql.js": "^1.4.9",
"@types/diff": "^7.0.2",
"@types/hosted-git-info": "^3.0.5",
- "@types/proper-lockfile": "^4.1.4"
+ "@types/proper-lockfile": "^4.1.4",
+ "@types/express": "^4.17.21"
}
}
diff --git a/packages/pi-coding-agent/src/cli/args.test.ts b/packages/pi-coding-agent/src/cli/args.test.ts
new file mode 100644
index 000000000..b2f5eb2d2
--- /dev/null
+++ b/packages/pi-coding-agent/src/cli/args.test.ts
@@ -0,0 +1,21 @@
+import assert from "node:assert/strict";
+import { describe, it } from "node:test";
+import { parseArgs } from "./args.ts";
+
+describe("parseArgs", () => {
+ it("parses optional-value extension flags with implicit and explicit values", () => {
+ const extensionFlags = new Map([
+ ["gemini-cli-proxy", { type: "string" as const, allowNoValue: true }],
+ ]);
+ const defaultFlagArgs = parseArgs(["--gemini-cli-proxy"], extensionFlags);
+ const explicitFlagArgs = parseArgs(["--gemini-cli-proxy=8080"], extensionFlags);
+
+ assert.deepEqual(
+ [
+ defaultFlagArgs.unknownFlags.get("gemini-cli-proxy"),
+ explicitFlagArgs.unknownFlags.get("gemini-cli-proxy"),
+ ],
+ [true, "8080"],
+ );
+ });
+});
diff --git a/packages/pi-coding-agent/src/cli/args.ts b/packages/pi-coding-agent/src/cli/args.ts
index b13330cbb..4d0781266 100644
--- a/packages/pi-coding-agent/src/cli/args.ts
+++ b/packages/pi-coding-agent/src/cli/args.ts
@@ -53,13 +53,18 @@ export interface Args {
bare?: boolean;
}
+export interface ExtensionFlagParseOptions {
+ type: "boolean" | "string";
+ allowNoValue?: boolean;
+}
+
const VALID_THINKING_LEVELS = ["off", "minimal", "low", "medium", "high", "xhigh"] as const;
export function isValidThinkingLevel(level: string): level is ThinkingLevel {
return VALID_THINKING_LEVELS.includes(level as ThinkingLevel);
}
-export function parseArgs(args: string[], extensionFlags?: Map): Args {
+export function parseArgs(args: string[], extensionFlags?: Map): Args {
const result: Args = {
messages: [],
fileArgs: [],
@@ -179,13 +184,18 @@ export function parseArgs(args: string[], extensionFlags?: Map {
- const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
+ const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
// Interactive createAgentSession call lives after the print-mode branch.
- const interactiveCreateIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx + 10);
+ const interactiveCreateIdx = sfCliSource.indexOf("createAgentSession({", printGuardIdx + 10);
// Skip the print-mode createAgentSession (already found by earlier tests);
// walk forward to the next one.
- const nextCreateIdx = gsdCliSource.indexOf("createAgentSession({", interactiveCreateIdx + 10);
+ const nextCreateIdx = sfCliSource.indexOf("createAgentSession({", interactiveCreateIdx + 10);
assert.ok(nextCreateIdx >= 0, "missing interactive createAgentSession call in src/cli.ts");
- const interactiveBlock = gsdCliSource.slice(nextCreateIdx, nextCreateIdx + 800);
+ const interactiveBlock = sfCliSource.slice(nextCreateIdx, nextCreateIdx + 800);
assert.ok(
interactiveBlock.includes("persistModelChanges: true"),
"interactive createAgentSession must explicitly pass persistModelChanges: true so user model picks still persist after the default was inverted to false (#4251)",
@@ -108,11 +108,11 @@ test("CreateAgentSessionOptions forwards persistModelChanges to AgentSession (#4
// moved to the "main.ts sets persistModelChanges = isInteractive" test below.
test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: false (#4251)", () => {
- const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
+ const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
assert.ok(printGuardIdx >= 0, "missing isPrintMode branch in src/cli.ts");
- const createIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx);
+ const createIdx = sfCliSource.indexOf("createAgentSession({", printGuardIdx);
assert.ok(createIdx >= 0, "missing createAgentSession call in print-mode branch");
- const createBlock = gsdCliSource.slice(createIdx, createIdx + 800);
+ const createBlock = sfCliSource.slice(createIdx, createIdx + 800);
assert.ok(
createBlock.includes("persistModelChanges: false"),
"print-mode createAgentSession must pass persistModelChanges: false so --model overrides cannot mutate settings.json",
@@ -120,10 +120,10 @@ test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: fa
});
test("sf src/cli.ts print-mode --model override calls setModel with persist: false (#4251)", () => {
- const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
- const overrideIdx = gsdCliSource.indexOf("if (cliFlags.model)", printGuardIdx);
+ const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
+ const overrideIdx = sfCliSource.indexOf("if (cliFlags.model)", printGuardIdx);
assert.ok(overrideIdx >= 0, "missing --model override block in print-mode branch");
- const overrideBlock = gsdCliSource.slice(overrideIdx, overrideIdx + 500);
+ const overrideBlock = sfCliSource.slice(overrideIdx, overrideIdx + 500);
assert.ok(
overrideBlock.includes("session.setModel(match, { persist: false })"),
"print-mode --model override must pass { persist: false } explicitly so the intent is visible at the call site",
@@ -131,19 +131,19 @@ test("sf src/cli.ts print-mode --model override calls setModel with persist: fal
});
test("sf src/cli.ts print-mode skips validateConfiguredModel when --model is set (#4251)", () => {
- const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
- const validateIdx = gsdCliSource.indexOf("validateConfiguredModel(", printGuardIdx);
+ const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
+ const validateIdx = sfCliSource.indexOf("validateConfiguredModel(", printGuardIdx);
assert.ok(validateIdx >= 0, "missing validateConfiguredModel call in print-mode branch");
// Walk backward to find the nearest enclosing `if (!cliFlags.model)` guard.
- const guardIdx = gsdCliSource.lastIndexOf("if (!cliFlags.model)", validateIdx);
+ const guardIdx = sfCliSource.lastIndexOf("if (!cliFlags.model)", validateIdx);
assert.ok(
guardIdx >= 0 && guardIdx > printGuardIdx,
"validateConfiguredModel must be guarded by `if (!cliFlags.model)` in print mode so a CLI-provided model never triggers fallback repair that overwrites settings.json",
);
// reapplyValidatedModelOnFallback must be inside the same guard block.
- const reapplyIdx = gsdCliSource.indexOf("reapplyValidatedModelOnFallback(", validateIdx);
+ const reapplyIdx = sfCliSource.indexOf("reapplyValidatedModelOnFallback(", validateIdx);
assert.ok(reapplyIdx >= 0, "missing reapplyValidatedModelOnFallback call");
- const blockEnd = gsdCliSource.indexOf("\n }\n", guardIdx);
+ const blockEnd = sfCliSource.indexOf("\n }\n", guardIdx);
assert.ok(
reapplyIdx < blockEnd,
"reapplyValidatedModelOnFallback must be inside the same `if (!cliFlags.model)` block as validateConfiguredModel",
diff --git a/packages/pi-coding-agent/src/core/extensions/index.ts b/packages/pi-coding-agent/src/core/extensions/index.ts
index 0438d364b..d11a101fd 100644
--- a/packages/pi-coding-agent/src/core/extensions/index.ts
+++ b/packages/pi-coding-agent/src/core/extensions/index.ts
@@ -74,6 +74,7 @@ export type {
// Runtime
ExtensionRuntime,
ExtensionShortcut,
+ ExtensionStartupContext,
ExtensionUIContext,
ExtensionUIDialogOptions,
ExtensionWidgetOptions,
diff --git a/packages/pi-coding-agent/src/core/extensions/loader.ts b/packages/pi-coding-agent/src/core/extensions/loader.ts
index ef24bb191..5c7385623 100644
--- a/packages/pi-coding-agent/src/core/extensions/loader.ts
+++ b/packages/pi-coding-agent/src/core/extensions/loader.ts
@@ -500,7 +500,16 @@ function createExtensionAPI(
registerFlag(
name: string,
- options: { description?: string; type: "boolean" | "string"; default?: boolean | string },
+ options: {
+ description?: string;
+ type: "boolean" | "string";
+ default?: boolean | string;
+ allowNoValue?: boolean;
+ onStartup?: (
+ value: boolean | string,
+ context: import("./types.js").ExtensionStartupContext,
+ ) => Promise | void;
+ },
): void {
extension.flags.set(name, { name, extensionPath: extension.path, ...options });
if (options.default !== undefined && !runtime.flagValues.has(name)) {
diff --git a/packages/pi-coding-agent/src/core/extensions/provider-registration.test.ts b/packages/pi-coding-agent/src/core/extensions/provider-registration.test.ts
index 2679feae6..d2376b0f5 100644
--- a/packages/pi-coding-agent/src/core/extensions/provider-registration.test.ts
+++ b/packages/pi-coding-agent/src/core/extensions/provider-registration.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Regression test: pendingProviderRegistrations must be flushed exactly once (#3576)
+// sf — Regression test: pendingProviderRegistrations must be flushed exactly once (#3576)
// Copyright (c) 2026 Jeremy McSpadden
import { describe, it } from "node:test";
diff --git a/packages/pi-coding-agent/src/core/extensions/types.ts b/packages/pi-coding-agent/src/core/extensions/types.ts
index f104aa458..ba46551c8 100644
--- a/packages/pi-coding-agent/src/core/extensions/types.ts
+++ b/packages/pi-coding-agent/src/core/extensions/types.ts
@@ -39,6 +39,7 @@ import type {
} from "@sf-run/pi-tui";
import type { Static, TSchema } from "@sinclair/typebox";
import type { Theme } from "../../modes/interactive/theme/theme.js";
+import type { AuthStorage } from "../auth-storage.js";
import type { BashResult } from "../bash-executor.js";
import type { CompactionPreparation, CompactionResult } from "../compaction/index.js";
import type { EventBus } from "../event-bus.js";
@@ -1164,6 +1165,8 @@ export interface ExtensionAPI {
description?: string;
type: "boolean" | "string";
default?: boolean | string;
+ allowNoValue?: boolean;
+ onStartup?: (value: boolean | string, context: ExtensionStartupContext) => Promise | void;
},
): void;
@@ -1407,9 +1410,18 @@ export interface ExtensionFlag {
description?: string;
type: "boolean" | "string";
default?: boolean | string;
+ allowNoValue?: boolean;
+ onStartup?: (value: boolean | string, context: ExtensionStartupContext) => Promise | void;
extensionPath: string;
}
+export interface ExtensionStartupContext {
+ cwd: string;
+ agentDir: string;
+ authStorage: AuthStorage;
+ modelRegistry: ModelRegistry;
+}
+
export interface ExtensionShortcut {
shortcut: KeyId;
description?: string;
diff --git a/packages/pi-coding-agent/src/core/lsp/lsp-legacy-alias.test.ts b/packages/pi-coding-agent/src/core/lsp/lsp-legacy-alias.test.ts
index c1d4d99ec..b28b7c16e 100644
--- a/packages/pi-coding-agent/src/core/lsp/lsp-legacy-alias.test.ts
+++ b/packages/pi-coding-agent/src/core/lsp/lsp-legacy-alias.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Regression test for LSP legacy server key aliases
+// sf — Regression test for LSP legacy server key aliases
// Copyright (c) 2026 Jeremy McSpadden
/**
diff --git a/packages/pi-coding-agent/src/index.ts b/packages/pi-coding-agent/src/index.ts
index 54a20b846..120d2880a 100644
--- a/packages/pi-coding-agent/src/index.ts
+++ b/packages/pi-coding-agent/src/index.ts
@@ -82,6 +82,7 @@ export type {
ExtensionHandler,
ExtensionRuntime,
ExtensionShortcut,
+ ExtensionStartupContext,
ExtensionUIContext,
ExtensionUIDialogOptions,
ExtensionWidgetOptions,
diff --git a/packages/pi-coding-agent/src/main.ts b/packages/pi-coding-agent/src/main.ts
index 573b0528c..c683031a5 100644
--- a/packages/pi-coding-agent/src/main.ts
+++ b/packages/pi-coding-agent/src/main.ts
@@ -8,7 +8,7 @@
import { type ImageContent, modelsAreEqual, supportsXhigh } from "@sf-run/pi-ai";
import chalk from "chalk";
import { createInterface } from "readline";
-import { type Args, parseArgs, printHelp } from "./cli/args.js";
+import { type Args, type ExtensionFlagParseOptions, parseArgs, printHelp } from "./cli/args.js";
import { selectConfig } from "./cli/config-selector.js";
import { processFileArguments } from "./cli/file-processor.js";
import { discoverAndPrintModels, listModels } from "./cli/list-models.js";
@@ -226,6 +226,33 @@ async function createSessionManager(
return undefined;
}
+async function runStartupFlagHandlers(
+ extensions: LoadExtensionsResult,
+ parsed: Args,
+ context: {
+ cwd: string;
+ agentDir: string;
+ authStorage: AuthStorage;
+ modelRegistry: ModelRegistry;
+ },
+): Promise {
+ let handledStartup = false;
+
+ for (const extension of extensions.extensions) {
+ for (const [flagName, flag] of extension.flags) {
+ const flagValue = parsed.unknownFlags.get(flagName);
+ if (flagValue === undefined || !flag.onStartup) {
+ continue;
+ }
+
+ await flag.onStartup(flagValue, context);
+ handledStartup = true;
+ }
+ }
+
+ return handledStartup;
+}
+
function buildSessionOptions(
parsed: Args,
scopedModels: ScopedModel[],
@@ -442,10 +469,10 @@ export async function main(args: string[]) {
}
extensionsResult.runtime.pendingProviderRegistrations = [];
- const extensionFlags = new Map();
+ const extensionFlags = new Map();
for (const ext of extensionsResult.extensions) {
for (const [name, flag] of ext.flags) {
- extensionFlags.set(name, { type: flag.type });
+ extensionFlags.set(name, { type: flag.type, allowNoValue: flag.allowNoValue });
}
}
@@ -490,6 +517,17 @@ export async function main(args: string[]) {
process.exit(0);
}
+ if (
+ await runStartupFlagHandlers(extensionsResult, parsed, {
+ cwd,
+ agentDir,
+ authStorage,
+ modelRegistry,
+ })
+ ) {
+ return;
+ }
+
// Read piped stdin content (if any) - skip for RPC mode which uses stdin for JSON-RPC
if (parsed.mode !== "rpc") {
const stdinContent = await readPipedStdin();
diff --git a/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts b/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts
index 3fe2a260c..826ded1d3 100644
--- a/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts
+++ b/packages/pi-coding-agent/src/modes/rpc/rpc-client.ts
@@ -5,6 +5,8 @@
*/
import { type ChildProcess, spawn } from "node:child_process";
+import { existsSync } from "node:fs";
+import { dirname, join, resolve } from "node:path";
import type { AgentEvent, AgentMessage, ThinkingLevel } from "@sf-run/pi-agent-core";
import type { ImageContent } from "@sf-run/pi-ai";
import type { SessionStats } from "../../core/agent-session.js";
@@ -47,6 +49,49 @@ export interface ModelInfo {
export type RpcEventListener = (event: AgentEvent) => void;
+interface RpcLaunchSpec {
+ command: string;
+ args: string[];
+}
+
+function isTypeScriptEntrypoint(cliPath: string): boolean {
+ return cliPath.endsWith(".ts") || cliPath.endsWith(".tsx");
+}
+
+function findResolveTsLoader(cliPath: string): string | null {
+ let currentDir = resolve(dirname(cliPath));
+ while (true) {
+ const candidate = join(currentDir, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs");
+ if (existsSync(candidate)) {
+ return candidate;
+ }
+ const parentDir = dirname(currentDir);
+ if (parentDir === currentDir) {
+ return null;
+ }
+ currentDir = parentDir;
+ }
+}
+
+export function buildRpcLaunchSpec(cliPath: string): RpcLaunchSpec {
+ if (!isTypeScriptEntrypoint(cliPath)) {
+ return {
+ command: "node",
+ args: [cliPath],
+ };
+ }
+
+ const resolveTsLoader = findResolveTsLoader(cliPath);
+ if (!resolveTsLoader) {
+ throw new Error(`Could not find resolve-ts.mjs for TypeScript CLI path: ${cliPath}`);
+ }
+
+ return {
+ command: "node",
+ args: ["--import", resolveTsLoader, "--experimental-strip-types", cliPath],
+ };
+}
+
// ============================================================================
// RPC Client
// ============================================================================
@@ -84,7 +129,8 @@ export class RpcClient {
args.push(...this.options.args);
}
- this.process = spawn("node", [cliPath, ...args], {
+ const launchSpec = buildRpcLaunchSpec(cliPath);
+ this.process = spawn(launchSpec.command, [...launchSpec.args, ...args], {
cwd: this.options.cwd,
env: { ...process.env, ...this.options.env },
stdio: ["pipe", "pipe", "pipe"],
diff --git a/packages/pi-coding-agent/src/modes/rpc/rpc-protocol-v2.test.ts b/packages/pi-coding-agent/src/modes/rpc/rpc-protocol-v2.test.ts
index e08161186..e05913660 100644
--- a/packages/pi-coding-agent/src/modes/rpc/rpc-protocol-v2.test.ts
+++ b/packages/pi-coding-agent/src/modes/rpc/rpc-protocol-v2.test.ts
@@ -10,6 +10,7 @@ import { describe, it, beforeEach, afterEach, mock } from "node:test";
import assert from "node:assert/strict";
import { PassThrough } from "node:stream";
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
+import { buildRpcLaunchSpec } from "./rpc-client.js";
import type {
RpcCommand,
RpcResponse,
@@ -506,6 +507,23 @@ describe("RpcClient command serialization", () => {
assert.equal(parsed.command, "prompt");
assert.equal(parsed.success, true);
});
+
+ it("typescript cli paths launch through resolve-ts", () => {
+ const repoRoot = new URL("../../../../../", import.meta.url).pathname;
+ const cliPath = `${repoRoot}src/loader.ts`;
+ const launchSpec = buildRpcLaunchSpec(cliPath);
+ assert.equal(launchSpec.command, "node");
+ assert.equal(launchSpec.args[0], "--import");
+ assert.match(launchSpec.args[1], /src\/resources\/extensions\/sf\/tests\/resolve-ts\.mjs$/);
+ assert.equal(launchSpec.args[2], "--experimental-strip-types");
+ assert.equal(launchSpec.args[3], cliPath);
+ });
+
+ it("compiled js cli paths launch directly", () => {
+ const launchSpec = buildRpcLaunchSpec("/tmp/dist/cli.js");
+ assert.equal(launchSpec.command, "node");
+ assert.deepEqual(launchSpec.args, ["/tmp/dist/cli.js"]);
+ });
});
// ============================================================================
diff --git a/packages/rpc-client/src/rpc-client.test.ts b/packages/rpc-client/src/rpc-client.test.ts
index 55f7c53c6..229acbe64 100644
--- a/packages/rpc-client/src/rpc-client.test.ts
+++ b/packages/rpc-client/src/rpc-client.test.ts
@@ -10,7 +10,7 @@ import type {
SessionStats,
RpcV2Event,
} from "./rpc-types.js";
-import { RpcClient } from "./rpc-client.js";
+import { buildRpcLaunchSpec, RpcClient } from "./rpc-client.js";
import type { SdkAgentEvent } from "./rpc-client.js";
// ============================================================================
@@ -271,6 +271,25 @@ describe("RpcClient construction", () => {
});
});
+describe("buildRpcLaunchSpec", () => {
+ it("uses direct node execution for compiled js entrypoints", () => {
+ const launchSpec = buildRpcLaunchSpec("/tmp/dist/cli.js");
+ assert.equal(launchSpec.command, "node");
+ assert.deepEqual(launchSpec.args, ["/tmp/dist/cli.js"]);
+ });
+
+ it("wraps typescript entrypoints with resolve-ts loader", () => {
+ const repoRoot = new URL("../../..", import.meta.url).pathname;
+ const cliPath = `${repoRoot}src/loader.ts`;
+ const launchSpec = buildRpcLaunchSpec(cliPath);
+ assert.equal(launchSpec.command, "node");
+ assert.equal(launchSpec.args[0], "--import");
+ assert.match(launchSpec.args[1], /src\/resources\/extensions\/sf\/tests\/resolve-ts\.mjs$/);
+ assert.equal(launchSpec.args[2], "--experimental-strip-types");
+ assert.equal(launchSpec.args[3], cliPath);
+ });
+});
+
// ============================================================================
// events() Generator Tests
// ============================================================================
diff --git a/packages/rpc-client/src/rpc-client.ts b/packages/rpc-client/src/rpc-client.ts
index 4d5edc53c..afc242dfe 100644
--- a/packages/rpc-client/src/rpc-client.ts
+++ b/packages/rpc-client/src/rpc-client.ts
@@ -7,6 +7,8 @@
*/
import { type ChildProcess, spawn } from "node:child_process";
+import { existsSync } from "node:fs";
+import { dirname, join, resolve } from "node:path";
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
import type {
BashResult,
@@ -55,6 +57,49 @@ export interface RpcClientOptions {
export type RpcEventListener = (event: SdkAgentEvent) => void;
+interface RpcLaunchSpec {
+ command: string;
+ args: string[];
+}
+
+function isTypeScriptEntrypoint(cliPath: string): boolean {
+ return cliPath.endsWith(".ts") || cliPath.endsWith(".tsx");
+}
+
+function findResolveTsLoader(cliPath: string): string | null {
+ let currentDir = resolve(dirname(cliPath));
+ while (true) {
+ const candidate = join(currentDir, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs");
+ if (existsSync(candidate)) {
+ return candidate;
+ }
+ const parentDir = dirname(currentDir);
+ if (parentDir === currentDir) {
+ return null;
+ }
+ currentDir = parentDir;
+ }
+}
+
+export function buildRpcLaunchSpec(cliPath: string): RpcLaunchSpec {
+ if (!isTypeScriptEntrypoint(cliPath)) {
+ return {
+ command: "node",
+ args: [cliPath],
+ };
+ }
+
+ const resolveTsLoader = findResolveTsLoader(cliPath);
+ if (!resolveTsLoader) {
+ throw new Error(`Could not find resolve-ts.mjs for TypeScript CLI path: ${cliPath}`);
+ }
+
+ return {
+ command: "node",
+ args: ["--import", resolveTsLoader, "--experimental-strip-types", cliPath],
+ };
+}
+
// ============================================================================
// RPC Client
// ============================================================================
@@ -95,7 +140,8 @@ export class RpcClient {
args.push(...this.options.args);
}
- this.process = spawn("node", [cliPath, ...args], {
+ const launchSpec = buildRpcLaunchSpec(cliPath);
+ this.process = spawn(launchSpec.command, [...launchSpec.args, ...args], {
cwd: this.options.cwd,
env: { ...process.env, ...this.options.env },
stdio: ["pipe", "pipe", "pipe"],
diff --git a/pkg/package.json b/pkg/package.json
index c9f4c3332..21d1eae6c 100644
--- a/pkg/package.json
+++ b/pkg/package.json
@@ -1,8 +1,8 @@
{
- "name": "@glittercowboy/gsd",
+ "name": "sf",
"version": "2.74.0",
"piConfig": {
- "name": "gsd",
- "configDir": ".gsd"
+ "name": "sf",
+ "configDir": ".sf"
}
}
diff --git a/scripts/pr-risk-check.mjs b/scripts/pr-risk-check.mjs
index 94e662c2f..2b74d6f0f 100644
--- a/scripts/pr-risk-check.mjs
+++ b/scripts/pr-risk-check.mjs
@@ -238,7 +238,7 @@ function renderConsole(report) {
const { changedFiles, systemsPerFile, unmatchedFiles, systemRisks, risk } = report;
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
- console.log(' GSD2 PR Risk Report');
+ console.log(' sf PR Risk Report');
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
console.log(`Overall Risk: ${TIER_EMOJI[risk]} ${risk.toUpperCase()}`);
diff --git a/src/claude-cli-check.ts b/src/claude-cli-check.ts
index 69a70037a..c4ad61c9d 100644
--- a/src/claude-cli-check.ts
+++ b/src/claude-cli-check.ts
@@ -1,4 +1,4 @@
-// GSD2 — Claude CLI binary detection for onboarding
+// sf — Claude CLI binary detection for onboarding
// Lightweight check used at onboarding time (before extensions load).
// The full readiness check with caching lives in the claude-code-cli extension.
diff --git a/src/cli.ts b/src/cli.ts
index cf7b5911c..0e6627660 100644
--- a/src/cli.ts
+++ b/src/cli.ts
@@ -31,7 +31,7 @@ import {
import { stopWebMode } from './web-mode.js'
import { getProjectSessionsDir } from './project-sessions.js'
import { markStartup, printStartupTimings } from './startup-timings.js'
-import { bootstrapRtk, SF_RTK_DISABLED_ENV, SF_RTK_DISABLED_ENV } from './rtk.js'
+import { bootstrapRtk, SF_RTK_DISABLED_ENV } from './rtk.js'
import { loadEffectiveSFPreferences } from './resources/extensions/sf/preferences.js'
// ---------------------------------------------------------------------------
@@ -145,14 +145,13 @@ if (process.argv.includes('--help') || process.argv.includes('-h')) {
let rtkBootstrapPromise: Promise | undefined
async function doRtkBootstrap(): Promise {
// RTK is opt-in via experimental.rtk preference. Default: disabled.
- // Honor SF_RTK_DISABLED (or SF_RTK_DISABLED) if already explicitly set in the environment
+ // Honor SF_RTK_DISABLED if already explicitly set in the environment
// (env var takes precedence over preferences for manual override).
- if (!process.env[SF_RTK_DISABLED_ENV] && !process.env[SF_RTK_DISABLED_ENV]) {
+ if (!process.env[SF_RTK_DISABLED_ENV]) {
const prefs = loadEffectiveSFPreferences()
const rtkEnabled = prefs?.preferences.experimental?.rtk === true
if (!rtkEnabled) {
process.env[SF_RTK_DISABLED_ENV] = '1'
- process.env[SF_RTK_DISABLED_ENV] = '1'
}
}
@@ -178,10 +177,10 @@ if (cliFlags.messages[0] === 'update') {
// ---------------------------------------------------------------------------
if (cliFlags.messages[0] === 'graph') {
const sub = cliFlags.messages[1]
- const { buildGraph, writeGraph, graphStatus, graphQuery, graphDiff, resolveSfRoot } = await import('@singularity-forge/mcp-server')
+ const { buildGraph, writeGraph, graphStatus, graphQuery, graphDiff, resolveGsdRoot } = await import('@singularity-forge/mcp-server')
const projectDir = process.cwd()
- const sfRoot = resolveSfRoot(projectDir)
+ const sfRoot = resolveGsdRoot(projectDir)
if (!sub || sub === 'build') {
try {
diff --git a/src/headless-context.ts b/src/headless-context.ts
index 75a6702f9..9c1ca52ca 100644
--- a/src/headless-context.ts
+++ b/src/headless-context.ts
@@ -5,7 +5,7 @@
* and bootstraps the .sf/ directory structure when needed.
*/
-import { readFileSync, mkdirSync } from 'node:fs'
+import { readFileSync, mkdirSync, existsSync, renameSync } from 'node:fs'
import { join, resolve } from 'node:path'
// ---------------------------------------------------------------------------
@@ -51,9 +51,17 @@ export async function loadContext(options: ContextOptions): Promise {
/**
* Bootstrap .sf/ directory structure for headless new-milestone.
* Mirrors the bootstrap logic from guided-flow.ts showSmartEntry().
+ * Auto-migrates legacy .gsd/ directories to .sf/ on first encounter.
*/
-export function bootstrapGsdProject(basePath: string): void {
- const gsdDir = join(basePath, '.sf')
- mkdirSync(join(gsdDir, 'milestones'), { recursive: true })
- mkdirSync(join(gsdDir, 'runtime'), { recursive: true })
+export function bootstrapProject(basePath: string): void {
+ const sfDir = join(basePath, '.sf')
+ const legacyDir = join(basePath, '.gsd')
+
+ if (!existsSync(sfDir) && existsSync(legacyDir)) {
+ renameSync(legacyDir, sfDir)
+ process.stderr.write('[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n')
+ }
+
+ mkdirSync(join(sfDir, 'milestones'), { recursive: true })
+ mkdirSync(join(sfDir, 'runtime'), { recursive: true })
}
diff --git a/src/headless-query.ts b/src/headless-query.ts
index 103902579..3e710e664 100644
--- a/src/headless-query.ts
+++ b/src/headless-query.ts
@@ -28,23 +28,23 @@ const jiti = createJiti(fileURLToPath(import.meta.url), { interopDefault: true,
const agentExtensionsDir = join(process.env.SF_AGENT_DIR || join(homedir(), '.sf', 'agent'), 'extensions', 'sf')
const { existsSync } = await import('node:fs')
const useAgentDir = existsSync(join(agentExtensionsDir, 'state.ts'))
-const gsdExtensionPath = (...segments: string[]) =>
+const sfExtensionPath = (...segments: string[]) =>
useAgentDir
? join(agentExtensionsDir, ...segments)
: resolveBundledSourceResource(import.meta.url, 'extensions', 'sf', ...segments)
async function loadExtensionModules() {
- const stateModule = await jiti.import(gsdExtensionPath('state.ts'), {}) as any
- const dispatchModule = await jiti.import(gsdExtensionPath('auto-dispatch.ts'), {}) as any
- const sessionModule = await jiti.import(gsdExtensionPath('session-status-io.ts'), {}) as any
- const prefsModule = await jiti.import(gsdExtensionPath('preferences.ts'), {}) as any
- const autoStartModule = await jiti.import(gsdExtensionPath('auto-start.ts'), {}) as any
+ const stateModule = await jiti.import(sfExtensionPath('state.ts'), {}) as any
+ const dispatchModule = await jiti.import(sfExtensionPath('auto-dispatch.ts'), {}) as any
+ const sessionModule = await jiti.import(sfExtensionPath('session-status-io.ts'), {}) as any
+ const prefsModule = await jiti.import(sfExtensionPath('preferences.ts'), {}) as any
+ const autoStartModule = await jiti.import(sfExtensionPath('auto-start.ts'), {}) as any
return {
openProjectDbIfPresent: autoStartModule.openProjectDbIfPresent as (basePath: string) => Promise,
deriveState: stateModule.deriveState as (basePath: string) => Promise,
resolveDispatch: dispatchModule.resolveDispatch as (opts: any) => Promise,
readAllSessionStatuses: sessionModule.readAllSessionStatuses as (basePath: string) => any[],
- loadEffectiveGSDPreferences: prefsModule.loadEffectiveGSDPreferences as () => any,
+ loadEffectiveSFPreferences: prefsModule.loadEffectiveSFPreferences as () => any,
}
}
@@ -83,7 +83,7 @@ export async function handleQuery(basePath: string): Promise {
deriveState,
resolveDispatch,
readAllSessionStatuses,
- loadEffectiveGSDPreferences,
+ loadEffectiveSFPreferences,
} = await loadExtensionModules()
await openProjectDbIfPresent(basePath)
const state = await deriveState(basePath)
@@ -96,7 +96,7 @@ export async function handleQuery(basePath: string): Promise {
reason: state.phase === 'complete' ? 'All milestones complete.' : state.nextAction,
}
} else {
- const loaded = loadEffectiveGSDPreferences()
+ const loaded = loadEffectiveSFPreferences()
const dispatch = await resolveDispatch({
basePath,
mid: state.activeMilestone.id,
diff --git a/src/headless.ts b/src/headless.ts
index c84418f88..4a2e540ad 100644
--- a/src/headless.ts
+++ b/src/headless.ts
@@ -12,12 +12,13 @@
* 11 — cancelled (SIGINT/SIGTERM received)
*/
-import { existsSync, mkdirSync, writeFileSync } from 'node:fs'
+import { existsSync, mkdirSync, writeFileSync, renameSync } from 'node:fs'
import { join } from 'node:path'
import { resolve } from 'node:path'
import { ChildProcess } from 'node:child_process'
-import { RpcClient, SessionManager } from '@sf-run/pi-coding-agent'
+import { SessionManager } from '@sf-run/pi-coding-agent'
+import { RpcClient } from '@singularity-forge/rpc-client'
import type { SessionInfo } from '@sf-run/pi-coding-agent'
import { getProjectSessionsDir } from './project-sessions.js'
import { loadAndValidateAnswerFile, AnswerInjector } from './headless-answers.js'
@@ -56,7 +57,7 @@ import type { ExtensionUIRequest, ProgressContext } from './headless-ui.js'
import {
loadContext,
- bootstrapGsdProject,
+ bootstrapProject,
} from './headless-context.js'
// ---------------------------------------------------------------------------
@@ -305,26 +306,32 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number):
}
// Bootstrap .sf/ if needed
- const gsdDir = join(process.cwd(), '.sf')
- if (!existsSync(gsdDir)) {
+ const sfDir = join(process.cwd(), '.sf')
+ if (!existsSync(sfDir)) {
if (!options.json) {
process.stderr.write('[headless] Bootstrapping .sf/ project structure...\n')
}
- bootstrapGsdProject(process.cwd())
+ bootstrapProject(process.cwd())
}
// Write context to temp file for the RPC child to read
- const runtimeDir = join(gsdDir, 'runtime')
+ const runtimeDir = join(sfDir, 'runtime')
mkdirSync(runtimeDir, { recursive: true })
writeFileSync(join(runtimeDir, 'headless-context.md'), contextContent, 'utf-8')
}
// Validate .sf/ directory (skip for new-milestone since we just bootstrapped it)
- const gsdDir = join(process.cwd(), '.sf')
- if (!isNewMilestone && !existsSync(gsdDir)) {
- process.stderr.write('[headless] Error: No .sf/ directory found in current directory.\n')
- process.stderr.write("[headless] Run 'sf' interactively first to initialize a project.\n")
- process.exit(1)
+ const sfDir = join(process.cwd(), '.sf')
+ const legacyDir = join(process.cwd(), '.gsd')
+ if (!isNewMilestone && !existsSync(sfDir)) {
+ if (existsSync(legacyDir)) {
+ renameSync(legacyDir, sfDir)
+ process.stderr.write('[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n')
+ } else {
+ process.stderr.write('[headless] Error: No .sf/ directory found in current directory.\n')
+ process.stderr.write("[headless] Run 'sf' interactively first to initialize a project.\n")
+ process.exit(1)
+ }
}
// Query: read-only state snapshot, no RPC child needed
diff --git a/src/loader.ts b/src/loader.ts
index a022daf76..59715918e 100644
--- a/src/loader.ts
+++ b/src/loader.ts
@@ -8,14 +8,14 @@ import { existsSync, readFileSync, mkdirSync, symlinkSync, cpSync } from 'fs'
// Fast-path: handle --version/-v and --help/-h before importing any heavy
// dependencies. This avoids loading the entire pi-coding-agent barrel import
// (~1s) just to print a version string.
-const gsdRoot = resolve(dirname(fileURLToPath(import.meta.url)), '..')
+const sfRootDir = resolve(dirname(fileURLToPath(import.meta.url)), '..')
const args = process.argv.slice(2)
const firstArg = args[0]
// Read package.json once — reused for version, banner, and SF_VERSION below
let sfVersion = '0.0.0'
try {
- const pkg = JSON.parse(readFileSync(join(gsdRoot, 'package.json'), 'utf-8'))
+ const pkg = JSON.parse(readFileSync(join(sfRootDir, 'package.json'), 'utf-8'))
sfVersion = pkg.version || '0.0.0'
} catch { /* ignore */ }
@@ -113,7 +113,7 @@ process.env.SF_CODING_AGENT_DIR = agentDir
// SF_PKG_ROOT — absolute path to sf-run package root. Used by deployed extensions
// (e.g. auto.ts resume path) to import modules like resource-loader.js that live
// in the package tree, not in the deployed ~/.sf/agent/ tree.
-process.env.SF_PKG_ROOT = gsdRoot
+process.env.SF_PKG_ROOT = sfRootDir
// RTK environment — make ~/.sf/agent/bin visible to all child-process paths,
// not just the bash tool, and force-disable RTK telemetry for SF-managed use.
@@ -123,8 +123,8 @@ applyRtkProcessEnv(process.env)
// Without this, extensions (e.g. browser-tools) can't resolve dependencies like
// `playwright` because jiti resolves modules from pi-coding-agent's location, not sf's.
// Prepending sf's node_modules to NODE_PATH fixes this for all extensions.
-const gsdNodeModules = join(gsdRoot, 'node_modules')
-process.env.NODE_PATH = [gsdNodeModules, process.env.NODE_PATH]
+const sfNodeModules = join(sfRootDir, 'node_modules')
+process.env.NODE_PATH = [sfNodeModules, process.env.NODE_PATH]
.filter(Boolean)
.join(delimiter)
// Force Node to re-evaluate module search paths with the updated NODE_PATH.
@@ -145,8 +145,8 @@ process.env.SF_BIN_PATH = process.env.SF_BIN_PATH || process.argv[1]
// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched sf extension
// when dispatching workflow prompts. Prefers dist/resources/ (stable, set at build time)
// over src/resources/ (live working tree) — see resource-loader.ts for rationale.
-const distRes = join(gsdRoot, 'dist', 'resources')
-const srcRes = join(gsdRoot, 'src', 'resources')
+const distRes = join(sfRootDir, 'dist', 'resources')
+const srcRes = join(sfRootDir, 'src', 'resources')
const resourcesDir = existsSync(distRes) ? distRes : srcRes
process.env.SF_WORKFLOW_PATH = join(resourcesDir, 'SF-WORKFLOW.md')
@@ -182,8 +182,8 @@ if (process.env.HTTP_PROXY || process.env.HTTPS_PROXY || process.env.http_proxy
// On Windows without Developer Mode or admin rights, symlinkSync will throw even for
// 'junction' type — so we fall back to cpSync (a full directory copy) which works
// everywhere without elevated permissions.
-const sfRunScopeDir = join(gsdNodeModules, '@sf-run')
-const packagesDir = join(gsdRoot, 'packages')
+const sfRunScopeDir = join(sfNodeModules, '@sf-run')
+const packagesDir = join(sfRootDir, 'packages')
const wsPackages = ['native', 'pi-agent-core', 'pi-ai', 'pi-coding-agent', 'pi-tui']
try {
if (!existsSync(sfRunScopeDir)) mkdirSync(sfRunScopeDir, { recursive: true })
diff --git a/src/resource-loader.ts b/src/resource-loader.ts
index d2944e062..f817ec47d 100644
--- a/src/resource-loader.ts
+++ b/src/resource-loader.ts
@@ -750,14 +750,22 @@ export function buildResourceLoader(
const registry = loadRegistry()
const piAgentDir = join(homedir(), '.pi', 'agent')
const piExtensionsDir = join(piAgentDir, 'extensions')
+ const piLegacyExtensionsDir = join(homedir(), '.pi', 'extensions')
const bundledKeys = getBundledExtensionKeys()
- const piExtensionPaths = discoverExtensionEntryPaths(piExtensionsDir)
- .filter((entryPath) => !bundledKeys.has(getExtensionKey(entryPath, piExtensionsDir)))
- .filter((entryPath) => {
- const manifest = readManifestFromEntryPath(entryPath)
- if (!manifest) return true
- return isExtensionEnabled(registry, manifest.id)
- })
+
+ const discoverPiExtensions = (dir: string): string[] =>
+ discoverExtensionEntryPaths(dir)
+ .filter((entryPath) => !bundledKeys.has(getExtensionKey(entryPath, dir)))
+ .filter((entryPath) => {
+ const manifest = readManifestFromEntryPath(entryPath)
+ if (!manifest) return true
+ return isExtensionEnabled(registry, manifest.id)
+ })
+
+ const piExtensionPaths = [
+ ...discoverPiExtensions(piExtensionsDir),
+ ...discoverPiExtensions(piLegacyExtensionsDir),
+ ]
// Print-mode callers pass their own additional extension paths (e.g. --extension
// flags). Non-print mode uses the implicit pi-extensions discovery above.
diff --git a/src/resources/extensions/genai-proxy/extension-manifest.json b/src/resources/extensions/genai-proxy/extension-manifest.json
index bb207e6b6..9530362a7 100644
--- a/src/resources/extensions/genai-proxy/extension-manifest.json
+++ b/src/resources/extensions/genai-proxy/extension-manifest.json
@@ -2,12 +2,9 @@
"id": "genai-proxy",
"name": "GenAI Proxy",
"version": "1.0.0",
- "description": "Exposes SF's AI engine as a standard Google GenAI / OpenAI compatible endpoint.",
- "tier": "community",
+ "description": "OpenAI-compatible proxy for Gemini CLI and GenAI clients",
+ "tier": "bundled",
"requires": {
- "platform": "all"
- },
- "provides": {
- "commands": ["/genai-proxy"]
+ "platform": ">=2.29.0"
}
}
diff --git a/src/resources/extensions/genai-proxy/index.ts b/src/resources/extensions/genai-proxy/index.ts
index 4eea34403..cdf235de8 100644
--- a/src/resources/extensions/genai-proxy/index.ts
+++ b/src/resources/extensions/genai-proxy/index.ts
@@ -1,14 +1,9 @@
import type { ExtensionAPI } from "@sf-run/pi-coding-agent";
-import { registerProxyCommands } from "./proxy-command.js";
+import { installGenaiProxyExtension } from "./proxy-command.js";
-/**
- * GenAI Proxy Extension
- *
- * Exposes Singularity Forge's AI engine (pi-ai) as a standard Google GenAI
- * compatible endpoint. This allows you to use your OAuth-authenticated
- * Google models with any tool or SDK.
- */
-export default function genaiProxy(pi: ExtensionAPI) {
- // Register /genai-proxy commands
- registerProxyCommands(pi);
+export { installGenaiProxyExtension, resolveProxyPort } from "./proxy-command.js";
+export { ProxyServer, createProxyServer } from "./proxy-server.js";
+
+export default function genaiProxyExtension(api: ExtensionAPI): void {
+ installGenaiProxyExtension(api);
}
diff --git a/src/resources/extensions/genai-proxy/package.json b/src/resources/extensions/genai-proxy/package.json
new file mode 100644
index 000000000..f069501bb
--- /dev/null
+++ b/src/resources/extensions/genai-proxy/package.json
@@ -0,0 +1,11 @@
+{
+ "name": "pi-genai-proxy",
+ "private": true,
+ "version": "1.0.0",
+ "type": "module",
+ "pi": {
+ "extensions": [
+ "./index.ts"
+ ]
+ }
+}
diff --git a/src/resources/extensions/genai-proxy/proxy-command.ts b/src/resources/extensions/genai-proxy/proxy-command.ts
index bac62a433..75e033c2f 100644
--- a/src/resources/extensions/genai-proxy/proxy-command.ts
+++ b/src/resources/extensions/genai-proxy/proxy-command.ts
@@ -1,55 +1,129 @@
-import type { ExtensionAPI } from "@sf-run/pi-coding-agent";
-import * as server from "./proxy-server.js";
+import type { ExtensionAPI, ExtensionCommandContext, ExtensionStartupContext } from "@sf-run/pi-coding-agent";
+import { createProxyServer, type ProxyServer } from "./proxy-server.js";
-export function registerProxyCommands(pi: ExtensionAPI): void {
- pi.registerCommand("genai-proxy", {
- description: "Manage GenAI Proxy server — start | stop | status",
- async handler(args, ctx) {
- const parts = (args ?? "").trim().split(/\s+/);
- const subcommand = parts[0] || "status";
+const PROXY_COMMAND_NAME = "genai-proxy";
+const PROXY_FLAG_NAME = "gemini-cli-proxy";
+const DEFAULT_PROXY_PORT = 3000;
- switch (subcommand) {
- case "start":
- const port = parseInt(parts[1], 10) || 3000;
- if (server.isRunning()) {
- ctx.ui.notify("GenAI Proxy is already running.", "info");
- return;
- }
- try {
- await server.startProxy(port, (msg) => {
- if (ctx.hasUI) {
- ctx.ui.notify(msg, "info");
- } else {
- process.stderr.write(`[genai-proxy] ${msg}\n`);
- }
- });
- ctx.ui.notify(`GenAI Proxy started on port ${port}`, "success");
- } catch (error) {
- const message = error instanceof Error ? error.message : String(error);
- ctx.ui.notify(message, "error");
- }
- break;
-
- case "stop":
- if (!server.isRunning()) {
- ctx.ui.notify("GenAI Proxy is not running.", "warning");
- return;
- }
- server.stopProxy();
- ctx.ui.notify("GenAI Proxy stopped.", "success");
- break;
-
- case "status":
- if (server.isRunning()) {
- ctx.ui.notify("GenAI Proxy is running.", "info");
- } else {
- ctx.ui.notify("GenAI Proxy is not running.", "info");
- }
- break;
-
- default:
- ctx.ui.notify("Usage: /genai-proxy start [port] | stop | status", "warning");
- }
- },
- });
+export interface ProxyCommandDependencies {
+ createProxyServer?: typeof createProxyServer;
+}
+
+export function installGenaiProxyExtension(
+ api: Pick,
+ dependencies?: ProxyCommandDependencies,
+): void {
+ let proxyServer: ProxyServer | null = null;
+ const buildProxyServer = dependencies?.createProxyServer ?? createProxyServer;
+
+ const ensureProxyServer = (context: ExtensionStartupContext | ExtensionCommandContext, port: number): ProxyServer => {
+ if (proxyServer && proxyServer.getPort() === port) {
+ return proxyServer;
+ }
+ if (proxyServer) {
+ throw new Error(`Proxy already running on port ${proxyServer.getPort()}`);
+ }
+
+ proxyServer = buildProxyServer({
+ port,
+ modelRegistry: context.modelRegistry,
+ onLog: (message) => notifyProxyStatus(context, message, "info"),
+ });
+ return proxyServer;
+ };
+
+ api.registerFlag(PROXY_FLAG_NAME, {
+ description: "Start the Gemini CLI proxy server",
+ type: "string",
+ allowNoValue: true,
+ onStartup: async (value, context) => {
+ const server = ensureProxyServer(context, resolveProxyPort(value));
+ await server.start();
+ },
+ });
+
+ api.registerCommand(PROXY_COMMAND_NAME, {
+ description: "Manage the Gemini CLI proxy server",
+ handler: async (args, context) => {
+ await handleProxyCommand(args ?? "", context, ensureProxyServer, () => proxyServer, () => {
+ proxyServer = null;
+ });
+ },
+ });
+}
+
+export function resolveProxyPort(flagValue: boolean | string | undefined): number {
+ if (flagValue === true || flagValue === false || flagValue === undefined) {
+ return DEFAULT_PROXY_PORT;
+ }
+
+ const port = Number.parseInt(flagValue, 10);
+ if (!Number.isFinite(port) || port <= 0 || port > 65535) {
+ throw new Error(`Invalid proxy port: ${flagValue}`);
+ }
+ return port;
+}
+
+async function handleProxyCommand(
+ rawArgs: string,
+ context: ExtensionCommandContext,
+ ensureProxyServer: (context: ExtensionCommandContext, port: number) => ProxyServer,
+ getProxyServer: () => ProxyServer | null,
+ clearProxyServer: () => void,
+): Promise {
+ const [subcommand = "status", portArg] = rawArgs
+ .trim()
+ .split(/\s+/)
+ .filter((value): value is string => value.length > 0);
+
+ if (subcommand === "start") {
+ const existingServer = getProxyServer();
+ if (existingServer?.isRunning()) {
+ notifyProxyStatus(context, `Proxy already running on port ${existingServer.getPort()}`, "info");
+ return;
+ }
+
+ const server = ensureProxyServer(context, resolveProxyPort(portArg === undefined ? true : portArg));
+ await server.start();
+ return;
+ }
+
+ if (subcommand === "stop") {
+ const server = getProxyServer();
+ if (!server?.isRunning()) {
+ notifyProxyStatus(context, "Proxy is not running", "warning");
+ return;
+ }
+
+ await server.stop();
+ clearProxyServer();
+ notifyProxyStatus(context, "Proxy stopped", "success");
+ return;
+ }
+
+ if (subcommand === "status") {
+ const server = getProxyServer();
+ if (server?.isRunning()) {
+ notifyProxyStatus(context, `Proxy running on port ${server.getPort()}`, "info");
+ return;
+ }
+
+ notifyProxyStatus(context, "Proxy is not running", "info");
+ return;
+ }
+
+ notifyProxyStatus(context, "Usage: /genai-proxy start [port] | stop | status", "warning");
+}
+
+function notifyProxyStatus(
+ context: ExtensionStartupContext | ExtensionCommandContext,
+ message: string,
+ type: Parameters[1],
+): void {
+ if ("ui" in context) {
+ context.ui.notify(message, type);
+ return;
+ }
+
+ process.stderr.write(`[genai-proxy] ${message}\n`);
}
diff --git a/src/resources/extensions/genai-proxy/proxy-server.ts b/src/resources/extensions/genai-proxy/proxy-server.ts
index 93358a54d..5f6ff7776 100644
--- a/src/resources/extensions/genai-proxy/proxy-server.ts
+++ b/src/resources/extensions/genai-proxy/proxy-server.ts
@@ -1,16 +1,429 @@
-const GENAI_PROXY_DISABLED_ERROR_MESSAGE =
- "GenAI proxy is disabled at startup because no supported auth bootstrap is available from this package boundary.";
+import express from "express";
+import type { Server } from "node:http";
+import {
+ stream,
+ type AssistantMessage,
+ type AssistantMessageEventStream,
+ type Api,
+ type Context,
+ type Model,
+ type ProviderStreamOptions,
+} from "@sf-run/pi-ai";
+import type { ModelRegistry } from "@sf-run/pi-coding-agent";
-let running = false;
+const LISTEN_ADDRESS = "127.0.0.1";
+const OPENAI_CREATED_TIMESTAMP = 1_677_610_602;
+const SSE_CONTENT_TYPE = "text/event-stream";
+const NDJSON_CONTENT_TYPE = "application/x-ndjson";
-export function isRunning(): boolean {
- return running;
+type ProxyStreamFn = (
+ model: Model,
+ context: Context,
+ options?: ProviderStreamOptions,
+) => AssistantMessageEventStream;
+
+export interface ProxyServerOptions {
+ port: number;
+ modelRegistry: Pick;
+ onLog?: (message: string) => void;
+ streamModel?: ProxyStreamFn;
}
-export async function startProxy(_port: number, _onLog: (msg: string) => void): Promise {
- throw new Error(GENAI_PROXY_DISABLED_ERROR_MESSAGE);
+interface OpenAiMessage {
+ role?: string;
+ content?: string | Array<{ type?: string; text?: string }>;
}
-export function stopProxy(): void {
- running = false;
+interface OpenAiChatBody {
+ model?: string;
+ messages?: OpenAiMessage[];
+ stream?: boolean;
+ temperature?: number;
+ max_tokens?: number;
+}
+
+interface GoogleStreamBody {
+ model?: string;
+ contents?: Array<{
+ role?: string;
+ parts?: Array<{ text?: string }>;
+ }>;
+ systemInstruction?: {
+ parts?: Array<{ text?: string }>;
+ };
+ stream?: boolean;
+ temperature?: number;
+ generationConfig?: {
+ maxOutputTokens?: number;
+ };
+}
+
+type RouteKind = "openai" | "google";
+
+export class ProxyServer {
+ private server: Server | null = null;
+ private boundPort: number | null = null;
+ private readonly options: ProxyServerOptions;
+ private readonly streamModel: ProxyStreamFn;
+
+ constructor(options: ProxyServerOptions) {
+ this.options = options;
+ this.streamModel = options.streamModel ?? stream;
+ }
+
+ isRunning(): boolean {
+ return this.server !== null;
+ }
+
+ getPort(): number | null {
+ return this.boundPort;
+ }
+
+ async start(): Promise {
+ if (this.server) {
+ return;
+ }
+
+ const app = express();
+ app.use(express.json({ limit: "2mb" }));
+
+ app.get(["/v1/models", "/v1beta/models"], (_req, res) => {
+ const models = this.options.modelRegistry.getAll().map((model) => ({
+ id: model.id,
+ object: "model",
+ created: OPENAI_CREATED_TIMESTAMP,
+ owned_by: model.provider,
+ name: model.name,
+ capabilities: model.capabilities,
+ }));
+
+ if (_req.path.startsWith("/v1beta")) {
+ res.json({ models });
+ return;
+ }
+
+ res.json({ object: "list", data: models });
+ });
+
+ app.post("/v1/chat/completions", async (req, res) => {
+ await this.handleCompletionRequest(req, res, "openai");
+ });
+
+ app.post("/v1beta/models/:modelId\\:streamGenerateContent", async (req, res) => {
+ await this.handleCompletionRequest(req, res, "google");
+ });
+
+ await new Promise((resolve, reject) => {
+ const server = app.listen(this.options.port, LISTEN_ADDRESS, () => {
+ this.server = server;
+ const address = server.address();
+ if (typeof address === "object" && address) {
+ this.boundPort = address.port;
+ } else {
+ this.boundPort = this.options.port;
+ }
+ this.options.onLog?.(`Proxy Server running on http://${LISTEN_ADDRESS}:${this.boundPort}`);
+ resolve();
+ });
+
+ server.once("error", reject);
+ });
+ }
+
+ async stop(): Promise {
+ if (!this.server) {
+ return;
+ }
+
+ const server = this.server;
+ this.server = null;
+ this.boundPort = null;
+
+ await new Promise((resolve, reject) => {
+ server.close((error) => {
+ if (error) {
+ reject(error);
+ return;
+ }
+ resolve();
+ });
+ });
+ }
+
+ private async handleCompletionRequest(req: express.Request, res: express.Response, routeKind: RouteKind): Promise {
+ const body = req.body as OpenAiChatBody | GoogleStreamBody;
+ const modelReference = this.resolveModelReference(body.model, req.params.modelId);
+
+ if (!modelReference) {
+ res.status(400).json({ error: "Model ID is required" });
+ return;
+ }
+
+ const model = this.resolveModel(modelReference);
+ if (!model) {
+ res.status(404).json({ error: `Model ${modelReference} not found` });
+ return;
+ }
+
+ const apiKey = await this.options.modelRegistry.getApiKey(model);
+ if (!apiKey) {
+ res.status(401).json({ error: `No credentials for provider ${model.provider}` });
+ return;
+ }
+
+ const abortController = new AbortController();
+ req.once("close", () => abortController.abort());
+
+ const maxTokens =
+ routeKind === "openai"
+ ? (body as OpenAiChatBody).max_tokens
+ : (body as GoogleStreamBody).generationConfig?.maxOutputTokens;
+
+ const context = this.normalizeContext(body, routeKind);
+ const options: ProviderStreamOptions = {
+ apiKey,
+ temperature: body.temperature,
+ maxTokens,
+ signal: abortController.signal,
+ };
+
+ const eventStream = this.streamModel(model, context, options);
+ const shouldStream = routeKind === "google" ? (body as GoogleStreamBody).stream !== false : (body as OpenAiChatBody).stream === true;
+
+ if (shouldStream) {
+ await this.sendStreamingResponse(eventStream, res, routeKind, model);
+ return;
+ }
+
+ await this.sendBufferedResponse(eventStream, res, routeKind, model);
+ }
+
+ private resolveModelReference(bodyModel: string | undefined, pathModelId: string | undefined): string | undefined {
+ return bodyModel ?? pathModelId;
+ }
+
+ private resolveModel(modelReference: string): Model | undefined {
+ const normalizedReference = modelReference.toLowerCase();
+ const exact = this.options.modelRegistry.getAll().find(
+ (model) =>
+ `${model.provider}/${model.id}`.toLowerCase() === normalizedReference || model.id.toLowerCase() === normalizedReference,
+ );
+ if (exact) {
+ return exact;
+ }
+
+ const slashIndex = modelReference.indexOf("/");
+ if (slashIndex === -1) {
+ return undefined;
+ }
+
+ const provider = modelReference.slice(0, slashIndex);
+ const modelId = modelReference.slice(slashIndex + 1);
+ return this.options.modelRegistry.find(provider, modelId);
+ }
+
+ private normalizeContext(body: OpenAiChatBody | GoogleStreamBody, routeKind: RouteKind): Context {
+ if (routeKind === "google") {
+ return this.normalizeGoogleContext(body as GoogleStreamBody);
+ }
+
+ return this.normalizeOpenAiContext(body as OpenAiChatBody);
+ }
+
+ private normalizeOpenAiContext(body: OpenAiChatBody): Context {
+ const messages = body.messages ?? [];
+ const systemPrompt = messages.find((message) => message.role === "system")?.content;
+ const normalizedMessages = messages
+ .filter((message) => message.role !== "system")
+ .map((message) => this.normalizeOpenAiMessage(message));
+
+ return {
+ systemPrompt: typeof systemPrompt === "string" ? systemPrompt : undefined,
+ messages: normalizedMessages,
+ };
+ }
+
+ private normalizeGoogleContext(body: GoogleStreamBody): Context {
+ const systemPrompt = body.systemInstruction?.parts?.map((part) => part.text ?? "").join("") || undefined;
+ const normalizedMessages = (body.contents ?? [])
+ .map((content) => {
+ const textContent = (content.parts ?? [])
+ .filter((part) => typeof part.text === "string")
+ .map((part) => ({ type: "text" as const, text: part.text ?? "" }));
+
+ if (content.role === "user") {
+ return this.createUserMessage(textContent);
+ }
+
+ return this.createAssistantMessage(textContent);
+ })
+ .filter((message) => message.content.length > 0);
+
+ return {
+ systemPrompt,
+ messages: normalizedMessages,
+ };
+ }
+
+ private normalizeOpenAiMessage(message: OpenAiMessage): Context["messages"][number] {
+ if (message.role === "assistant") {
+ return this.createAssistantMessage(this.normalizeContent(message.content));
+ }
+
+ return this.createUserMessage(this.normalizeContent(message.content));
+ }
+
+ private createUserMessage(content: string | { type: "text"; text: string }[]): Context["messages"][number] {
+ return {
+ role: "user",
+ content,
+ timestamp: Date.now(),
+ };
+ }
+
+ private createAssistantMessage(content: string | { type: "text"; text: string }[]): AssistantMessage {
+ const normalizedContent =
+ typeof content === "string" ? [{ type: "text" as const, text: content }] : content;
+
+ return {
+ role: "assistant",
+ content: normalizedContent,
+ api: "google-gemini-cli" as Api,
+ provider: "google-gemini-cli",
+ model: "proxy",
+ usage: {
+ input: 0,
+ output: 0,
+ cacheRead: 0,
+ cacheWrite: 0,
+ totalTokens: 0,
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
+ },
+ stopReason: "stop",
+ timestamp: Date.now(),
+ };
+ }
+
+ private normalizeContent(content: string | Array<{ type?: string; text?: string }> | undefined): string | { type: "text"; text: string }[] {
+ if (typeof content === "string") {
+ return content;
+ }
+
+ return (content ?? [])
+ .filter((part) => typeof part.text === "string")
+ .map((part) => ({ type: "text" as const, text: part.text ?? "" }));
+ }
+
+ private async sendStreamingResponse(
+ eventStream: AssistantMessageEventStream,
+ res: express.Response,
+ routeKind: RouteKind,
+ model: Model,
+ ): Promise {
+ res.status(200);
+ res.setHeader("Content-Type", routeKind === "openai" ? SSE_CONTENT_TYPE : NDJSON_CONTENT_TYPE);
+ res.setHeader("Cache-Control", "no-cache");
+ res.setHeader("Connection", "keep-alive");
+
+ for await (const event of eventStream) {
+ if (event.type === "text_delta") {
+ if (routeKind === "openai") {
+ res.write(`data: ${JSON.stringify(this.buildOpenAiChunk(model, event.delta))}\n\n`);
+ } else {
+ res.write(`${JSON.stringify(this.buildGoogleChunk(event.delta))}\n`);
+ }
+ }
+
+ if (event.type === "done") {
+ if (routeKind === "openai") {
+ res.write("data: [DONE]\n\n");
+ }
+ res.end();
+ return;
+ }
+
+ if (event.type === "error") {
+ if (!res.headersSent) {
+ res.status(500).json({ error: event.error.errorMessage ?? "Proxy stream failed" });
+ } else {
+ res.end();
+ }
+ return;
+ }
+ }
+
+ res.end();
+ }
+
+ private async sendBufferedResponse(
+ eventStream: AssistantMessageEventStream,
+ res: express.Response,
+ routeKind: RouteKind,
+ model: Model,
+ ): Promise {
+ const assistantMessage = await eventStream.result();
+ const text = this.extractText(assistantMessage);
+
+ if (routeKind === "openai") {
+ res.json({
+ id: `chatcmpl-${Date.now()}`,
+ object: "chat.completion",
+ created: Math.floor(Date.now() / 1000),
+ model: model.id,
+ choices: [
+ {
+ index: 0,
+ message: { role: "assistant", content: text },
+ finish_reason: "stop",
+ },
+ ],
+ usage: assistantMessage.usage,
+ });
+ return;
+ }
+
+ res.json({
+ candidates: [
+ {
+ content: {
+ parts: [{ text }],
+ },
+ },
+ ],
+ usageMetadata: assistantMessage.usage,
+ });
+ }
+
+ private extractText(message: AssistantMessage): string {
+ return message.content
+ .filter((content): content is Extract => content.type === "text")
+ .map((content) => content.text)
+ .join("");
+ }
+
+ private buildOpenAiChunk(model: Model, delta: string): Record {
+ return {
+ id: `chatcmpl-${Date.now()}`,
+ object: "chat.completion.chunk",
+ created: Math.floor(Date.now() / 1000),
+ model: model.id,
+ choices: [{ index: 0, delta: { content: delta }, finish_reason: null }],
+ };
+ }
+
+ private buildGoogleChunk(delta: string): Record {
+ return {
+ candidates: [
+ {
+ content: {
+ parts: [{ text: delta }],
+ },
+ },
+ ],
+ };
+ }
+}
+
+export function createProxyServer(options: ProxyServerOptions): ProxyServer {
+ return new ProxyServer(options);
}
diff --git a/src/resources/extensions/genai-proxy/tests/package-manifest.test.ts b/src/resources/extensions/genai-proxy/tests/package-manifest.test.ts
new file mode 100644
index 000000000..fafa58fa7
--- /dev/null
+++ b/src/resources/extensions/genai-proxy/tests/package-manifest.test.ts
@@ -0,0 +1,26 @@
+import assert from "node:assert/strict";
+import { readFileSync } from "node:fs";
+import { join } from "node:path";
+import { describe, it } from "node:test";
+
+const extensionDir = join("src", "resources", "extensions", "genai-proxy");
+
+describe("genai-proxy package metadata", () => {
+ it("declares the index.ts extension entrypoint", () => {
+ const packageJson = JSON.parse(readFileSync(join(extensionDir, "package.json"), "utf-8")) as {
+ pi?: { extensions?: string[] };
+ };
+
+ assert.deepEqual(packageJson.pi?.extensions, ["./index.ts"]);
+ });
+
+ it("declares a bundled extension manifest", () => {
+ const manifest = JSON.parse(readFileSync(join(extensionDir, "extension-manifest.json"), "utf-8")) as {
+ id: string;
+ tier: string;
+ };
+
+ assert.deepEqual({ id: manifest.id, tier: manifest.tier }, { id: "genai-proxy", tier: "bundled" });
+ });
+});
+
diff --git a/src/resources/extensions/genai-proxy/tests/proxy-command.test.ts b/src/resources/extensions/genai-proxy/tests/proxy-command.test.ts
new file mode 100644
index 000000000..bf5ccd264
--- /dev/null
+++ b/src/resources/extensions/genai-proxy/tests/proxy-command.test.ts
@@ -0,0 +1,45 @@
+import assert from "node:assert/strict";
+import { describe, it } from "node:test";
+import { installGenaiProxyExtension, resolveProxyPort } from "../proxy-command.ts";
+
+describe("genai-proxy command boundary", () => {
+ it("resolves default and explicit proxy ports from flag values", () => {
+ const result = [resolveProxyPort(true), resolveProxyPort("8080")];
+
+ assert.deepEqual(result, [3000, 8080]);
+ });
+
+ it("registers the startup flag and slash command", () => {
+ const registeredFlags: Array<{ name: string; type: string; allowNoValue: boolean; hasStartupHandler: boolean }> = [];
+ const registeredCommands: string[] = [];
+
+ installGenaiProxyExtension({
+ registerCommand: (name) => {
+ registeredCommands.push(name);
+ },
+ registerFlag: (name, options) => {
+ registeredFlags.push({
+ name,
+ type: options.type,
+ allowNoValue: options.allowNoValue ?? false,
+ hasStartupHandler: typeof options.onStartup === "function",
+ });
+ },
+ });
+
+ assert.deepEqual(
+ { flags: registeredFlags, commands: registeredCommands },
+ {
+ flags: [
+ {
+ name: "gemini-cli-proxy",
+ type: "string",
+ allowNoValue: true,
+ hasStartupHandler: true,
+ },
+ ],
+ commands: ["genai-proxy"],
+ },
+ );
+ });
+});
diff --git a/src/resources/extensions/genai-proxy/tests/proxy-server.test.ts b/src/resources/extensions/genai-proxy/tests/proxy-server.test.ts
new file mode 100644
index 000000000..2f7a49b06
--- /dev/null
+++ b/src/resources/extensions/genai-proxy/tests/proxy-server.test.ts
@@ -0,0 +1,215 @@
+import assert from "node:assert/strict";
+import { afterEach, describe, it } from "node:test";
+import { AuthStorage, ModelRegistry } from "@sf-run/pi-coding-agent";
+import type { AssistantMessageEventStream, Api, Model } from "@sf-run/pi-ai";
+import { createProxyServer, type ProxyServer } from "../proxy-server.ts";
+
+let serverCleanup: ProxyServer | undefined;
+
+afterEach(async () => {
+ if (serverCleanup) {
+ await serverCleanup.stop();
+ serverCleanup = undefined;
+ }
+});
+
+function createFakeStream(): AssistantMessageEventStream {
+ const events: Array<
+ | { type: "start"; partial: ReturnType }
+ | { type: "text_delta"; contentIndex: number; delta: string; partial: ReturnType }
+ | { type: "done"; reason: "stop"; message: ReturnType }
+ > = [];
+ let finalResult: ReturnType | undefined;
+ let completed = false;
+
+ const stream = {
+ push(event: (typeof events)[number]) {
+ events.push(event);
+ if (event.type === "done") {
+ completed = true;
+ finalResult = event.message;
+ }
+ },
+ end(): void {
+ completed = true;
+ finalResult = buildAssistantMessage([]);
+ },
+ result(): Promise> {
+ if (finalResult) {
+ return Promise.resolve(finalResult);
+ }
+ return new Promise((resolve) => {
+ const interval = setInterval(() => {
+ if (finalResult) {
+ clearInterval(interval);
+ resolve(finalResult);
+ }
+ }, 0);
+ });
+ },
+ async *[Symbol.asyncIterator](): AsyncIterator<(typeof events)[number]> {
+ let cursor = 0;
+ while (!completed || cursor < events.length) {
+ const event = events[cursor];
+ if (event) {
+ cursor++;
+ yield event;
+ continue;
+ }
+
+ await new Promise((resolve) => setTimeout(resolve, 0));
+ }
+ },
+ } as unknown as AssistantMessageEventStream;
+
+ queueMicrotask(() => {
+ stream.push({
+ type: "start",
+ partial: buildAssistantMessage([]),
+ });
+ stream.push({
+ type: "text_delta",
+ contentIndex: 0,
+ delta: "hello",
+ partial: buildAssistantMessage([]),
+ });
+ stream.push({
+ type: "done",
+ reason: "stop",
+ message: buildAssistantMessage([{ type: "text", text: "hello" }]),
+ });
+ });
+ return stream;
+}
+
+function buildAssistantMessage(content: { type: "text"; text: string }[]) {
+ return {
+ role: "assistant" as const,
+ content,
+ api: "google-gemini-cli" as Api,
+ provider: "google-gemini-cli",
+ model: "proxy",
+ usage: {
+ input: 0,
+ output: 0,
+ cacheRead: 0,
+ cacheWrite: 0,
+ totalTokens: 0,
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
+ },
+ stopReason: "stop" as const,
+ timestamp: Date.now(),
+ };
+}
+
+function createRegistry() {
+ const authStorage = AuthStorage.inMemory({
+ openai: { type: "api_key", key: "sk-test" },
+ });
+ const modelRegistry = new ModelRegistry(authStorage, undefined);
+ return { modelRegistry };
+}
+
+function createRegistryWithoutCredentials() {
+ return { modelRegistry: new ModelRegistry(AuthStorage.inMemory({}), undefined) };
+}
+
+function createProxyServerForTests(modelRegistry: ModelRegistry): ProxyServer {
+ return createProxyServer({
+ port: 0,
+ modelRegistry: {
+ find: (provider, modelId) => modelRegistry.find(provider, modelId),
+ getAll: () => modelRegistry.getAll(),
+ getApiKey: (model) => modelRegistry.getApiKey(model),
+ },
+ streamModel: () => createFakeStream(),
+ });
+}
+
+function findOpenAiModel(modelRegistry: ModelRegistry): Model {
+ const model = modelRegistry.getAll().find((candidate) => candidate.provider === "openai");
+ if (!model) {
+ throw new Error("Expected at least one openai model in the registry");
+ }
+ return model;
+}
+
+describe("ProxyServer", () => {
+ it("serves model listings on /v1/models", async () => {
+ const { modelRegistry } = createRegistry();
+ const server = createProxyServerForTests(modelRegistry);
+ serverCleanup = server;
+ await server.start();
+
+ const response = await fetch(`http://127.0.0.1:${server.getPort()}/v1/models`);
+ const data = (await response.json()) as { object: string; data: Array<{ object: string }> };
+
+ assert.deepEqual({ ok: response.ok, object: data.object, hasModels: data.data.length > 0 }, { ok: true, object: "list", hasModels: true });
+ });
+
+ it("serves OpenAI completions on /v1/chat/completions", async () => {
+ const { modelRegistry } = createRegistry();
+ const model = findOpenAiModel(modelRegistry);
+ const server = createProxyServerForTests(modelRegistry);
+ serverCleanup = server;
+ await server.start();
+
+ const response = await fetch(`http://127.0.0.1:${server.getPort()}/v1/chat/completions`, {
+ method: "POST",
+ headers: { "content-type": "application/json" },
+ body: JSON.stringify({
+ model: `${model.provider}/${model.id}`,
+ messages: [{ role: "user", content: "hello" }],
+ }),
+ });
+ const data = (await response.json()) as {
+ choices: Array<{ message: { content: string } }>;
+ };
+
+ assert.deepEqual(data.choices[0].message.content, "hello");
+ });
+
+ it("streams Google content on /v1beta/models/:modelId:streamGenerateContent", async () => {
+ const { modelRegistry } = createRegistry();
+ const model = findOpenAiModel(modelRegistry);
+ const server = createProxyServerForTests(modelRegistry);
+ serverCleanup = server;
+ await server.start();
+
+ const response = await fetch(
+ `http://127.0.0.1:${server.getPort()}/v1beta/models/${encodeURIComponent(`${model.provider}/${model.id}`)}:streamGenerateContent`,
+ {
+ method: "POST",
+ headers: { "content-type": "application/json" },
+ body: JSON.stringify({
+ contents: [{ role: "user", parts: [{ text: "hello" }] }],
+ }),
+ },
+ );
+ const text = await response.text();
+
+ assert.deepEqual(text.includes("hello"), true);
+ });
+
+ it("returns 401 when credentials are absent", async () => {
+ const { modelRegistry } = createRegistryWithoutCredentials();
+ const model = modelRegistry.getAll().find((candidate) => candidate.provider === "openai");
+ if (!model) {
+ throw new Error("Expected at least one openai model in the registry");
+ }
+ const server = createProxyServerForTests(modelRegistry);
+ serverCleanup = server;
+ await server.start();
+
+ const response = await fetch(`http://127.0.0.1:${server.getPort()}/v1/chat/completions`, {
+ method: "POST",
+ headers: { "content-type": "application/json" },
+ body: JSON.stringify({
+ model: `${model.provider}/${model.id}`,
+ messages: [{ role: "user", content: "hello" }],
+ }),
+ });
+
+ assert.deepEqual(response.status, 401);
+ });
+});
diff --git a/src/resources/extensions/guardrails/index.ts b/src/resources/extensions/guardrails/index.ts
new file mode 100644
index 000000000..aa7b27405
--- /dev/null
+++ b/src/resources/extensions/guardrails/index.ts
@@ -0,0 +1,457 @@
+/**
+ * Guardrails Extension — Security & Redaction
+ *
+ * Ported from the pi community "agents" extension pack.
+ *
+ * Features:
+ * - Redacts secrets from tool results before the LLM sees them
+ * - Blocks dangerous bash commands (rm -rf, sudo, mkfs, etc.)
+ * - Blocks writes to protected paths (.env, .git, .ssh, etc.)
+ */
+
+import type { ExtensionAPI, ExtensionContext } from "@sf-run/pi-coding-agent";
+import * as path from "node:path";
+
+// ============================================================================
+// Secret Redaction
+// ============================================================================
+
+interface RedactionRule {
+ pattern: RegExp;
+ replacement: string;
+}
+
+const SENSITIVE_PATTERNS: RedactionRule[] = [
+ { pattern: /\b(sk-[a-zA-Z0-9]{20,})\b/g, replacement: "[OPENAI_KEY_REDACTED]" },
+ { pattern: /\b(ghp_[a-zA-Z0-9]{36,})\b/g, replacement: "[GITHUB_TOKEN_REDACTED]" },
+ { pattern: /\b(gho_[a-zA-Z0-9]{36,})\b/g, replacement: "[GITHUB_OAUTH_REDACTED]" },
+ { pattern: /\b(xox[baprs]-[a-zA-Z0-9-]{10,})\b/g, replacement: "[SLACK_TOKEN_REDACTED]" },
+ { pattern: /\b(AKIA[A-Z0-9]{16})\b/g, replacement: "[AWS_KEY_REDACTED]" },
+ {
+ pattern: /\b(api[_-]?key|apikey)\s*[=:]\s*['"]?([a-zA-Z0-9_-]{20,})['"]?/gi,
+ replacement: "$1=[REDACTED]",
+ },
+ {
+ pattern: /\b(secret|token|password|passwd|pwd)\s*[=:]\s*['"]?([^\s'"]{8,})['"]?/gi,
+ replacement: "$1=[REDACTED]",
+ },
+ { pattern: /\b(bearer)\s+([a-zA-Z0-9._-]{20,})\b/gi, replacement: "Bearer [REDACTED]" },
+ { pattern: /(mongodb(\+srv)?:\/\/[^:]+:)[^@]+(@)/gi, replacement: "$1[REDACTED]$3" },
+ { pattern: /(postgres(ql)?:\/\/[^:]+:)[^@]+(@)/gi, replacement: "$1[REDACTED]$3" },
+ { pattern: /(mysql:\/\/[^:]+:)[^@]+(@)/gi, replacement: "$1[REDACTED]$3" },
+ { pattern: /(redis:\/\/[^:]+:)[^@]+(@)/gi, replacement: "$1[REDACTED]$3" },
+ {
+ pattern: /-----BEGIN (RSA |EC |OPENSSH |)PRIVATE KEY-----[\s\S]*?-----END \1PRIVATE KEY-----/g,
+ replacement: "[PRIVATE_KEY_REDACTED]",
+ },
+];
+
+const SENSITIVE_FILES: { pattern: RegExp; desc: string }[] = [
+ { pattern: /\.env$/, desc: ".env" },
+ { pattern: /\.env\.(?!example$)[^/]+$/, desc: ".env local/override" },
+ { pattern: /\.dev\.vars($|\.[ˆ/]+$)/, desc: ".dev.vars" },
+ { pattern: /secrets?\.(json|ya?ml|toml)$/i, desc: "secrets file" },
+ { pattern: /credentials/i, desc: "credentials file" },
+];
+
+function redactToolResult(toolName: string, filePath: string | undefined, text: string, ctx: ExtensionContext): { content: [{ type: "text"; text: string }] } | undefined {
+ if (toolName === "read" && filePath) {
+ if (/(^|\/)\.env\.example$/i.test(filePath)) {
+ return undefined;
+ }
+ for (const { pattern, desc } of SENSITIVE_FILES) {
+ if (pattern.test(filePath)) {
+ ctx.ui.notify(`🔒 Redacted contents of sensitive file: ${filePath}`, "info");
+ return {
+ content: [{ type: "text", text: `[Contents of ${desc} (${filePath}) redacted for security]` }],
+ };
+ }
+ }
+ }
+
+ let result = text;
+ let modified = false;
+ for (const { pattern, replacement } of SENSITIVE_PATTERNS) {
+ const next = result.replace(pattern, replacement);
+ if (next !== result) {
+ modified = true;
+ result = next;
+ }
+ }
+
+ if (modified) {
+ ctx.ui.notify("🔒 Sensitive data redacted from output", "info");
+ return { content: [{ type: "text", text: result }] };
+ }
+
+ return undefined;
+}
+
+// ============================================================================
+// Command & Path Security
+// ============================================================================
+
+interface DangerousCommand {
+ pattern: RegExp;
+ desc: string;
+}
+
+const DANGEROUS_COMMANDS: DangerousCommand[] = [
+ { pattern: /\brm\s+(-[^\s]*r|--recursive)/, desc: "recursive delete" },
+ { pattern: /\bsudo\b/, desc: "sudo command" },
+ { pattern: /\b(chmod|chown)\b.*777/, desc: "dangerous permissions" },
+ { pattern: /\bmkfs\b/, desc: "filesystem format" },
+ { pattern: /\bdd\b.*\bof=\/dev\//, desc: "raw device write" },
+ { pattern: />\s*\/dev\/sd[a-z]/, desc: "raw device overwrite" },
+ { pattern: /\bkill\s+-9\s+-1\b/, desc: "kill all processes" },
+ { pattern: /:\(\)\s*\{\s*:\s*\|\s*:\s*&\s*\}\s*;/, desc: "fork bomb" },
+];
+
+const PROTECTED_PATHS: { pattern: RegExp; desc: string }[] = [
+ { pattern: /\.env($|\.(?!example))/, desc: "environment file" },
+ { pattern: /\.dev\.vars($|\.[ˆ/]+$)/, desc: "dev vars file" },
+ { pattern: /node_modules\//, desc: "node_modules" },
+ { pattern: /^\.git\/|\/\.git\//, desc: "git directory" },
+ { pattern: /\.pem$|\.key$/, desc: "private key file" },
+ { pattern: /id_rsa|id_ed25519|id_ecdsa/, desc: "SSH key" },
+ { pattern: /\.ssh\//, desc: ".ssh directory" },
+ { pattern: /secrets?\.(json|ya?ml|toml)$/i, desc: "secrets file" },
+ { pattern: /credentials/i, desc: "credentials file" },
+];
+
+const SOFT_PROTECTED_PATHS: { pattern: RegExp; desc: string }[] = [
+ { pattern: /package-lock\.json$/, desc: "package-lock.json" },
+ { pattern: /yarn\.lock$/, desc: "yarn.lock" },
+ { pattern: /pnpm-lock\.yaml$/, desc: "pnpm-lock.yaml" },
+];
+
+const DANGEROUS_BASH_WRITES: RegExp[] = [
+ />\s*\.env(?!\.example)(\b|$)/,
+ />\s*\.dev\.vars/,
+ />\s*.*\.pem/,
+ />\s*.*\.key/,
+ /tee\s+.*\.env(?!\.example)(\b|$)/,
+ /tee\s+.*\.dev\.vars/,
+ /cp\s+.*\s+\.env(?!\.example)(\b|$)/,
+ /mv\s+.*\s+\.env(?!\.example)(\b|$)/,
+];
+
+async function checkBashCommand(command: string, ctx: ExtensionContext): Promise<{ block: true; reason: string } | undefined> {
+ for (const { pattern, desc } of DANGEROUS_COMMANDS) {
+ if (pattern.test(command)) {
+ if (!ctx.hasUI) {
+ return { block: true, reason: `Blocked ${desc} (no UI to confirm)` };
+ }
+ const ok = await ctx.ui.confirm(`⚠️ Dangerous command: ${desc}`, command);
+ if (!ok) {
+ return { block: true, reason: `Blocked ${desc} by user` };
+ }
+ break;
+ }
+ }
+
+ for (const pattern of DANGEROUS_BASH_WRITES) {
+ if (pattern.test(command)) {
+ ctx.ui.notify("🛡️ Blocked bash write to protected path", "warning");
+ return { block: true, reason: "Bash command writes to protected path" };
+ }
+ }
+
+ return undefined;
+}
+
+async function checkWritePath(filePath: string, ctx: ExtensionContext): Promise<{ block: true; reason: string } | undefined> {
+ const normalized = path.normalize(filePath);
+
+ for (const { pattern, desc } of PROTECTED_PATHS) {
+ if (pattern.test(normalized)) {
+ ctx.ui.notify(`🛡️ Blocked write to ${desc}: ${filePath}`, "warning");
+ return { block: true, reason: `Protected path: ${desc}` };
+ }
+ }
+
+ for (const { pattern, desc } of SOFT_PROTECTED_PATHS) {
+ if (pattern.test(normalized)) {
+ if (!ctx.hasUI) {
+ return { block: true, reason: `Protected path (no UI): ${desc}` };
+ }
+ const ok = await ctx.ui.confirm(`⚠️ Modifying ${desc}`, `Are you sure you want to modify ${filePath}?`);
+ if (!ok) {
+ return { block: true, reason: `User blocked write to ${desc}` };
+ }
+ break;
+ }
+ }
+
+ return undefined;
+}
+
+// ============================================================================
+// Safe Git
+// ============================================================================
+
+type PromptLevel = "high" | "medium" | "none";
+type Severity = "high" | "medium";
+
+interface SafeGitConfig {
+ promptLevel?: PromptLevel;
+ enabledByDefault?: boolean;
+}
+
+const SAFE_GIT_DEFAULTS: Required = {
+ promptLevel: "medium",
+ enabledByDefault: true,
+};
+
+const GIT_PATTERNS: { pattern: RegExp; action: string; severity: Severity }[] = [
+ // High risk
+ { pattern: /\bgit\s+push\s+.*--force(-with-lease)?\b/i, action: "force push", severity: "high" },
+ { pattern: /\bgit\s+reset\s+--hard\b/i, action: "hard reset", severity: "high" },
+ { pattern: /\bgit\s+clean\s+-[a-z]*f/i, action: "clean (remove untracked files)", severity: "high" },
+ { pattern: /\bgit\s+stash\s+(drop|clear)\b/i, action: "drop/clear stash", severity: "high" },
+ { pattern: /\bgit\s+branch\s+-[dD]\b/i, action: "delete branch", severity: "high" },
+ { pattern: /\bgit\s+reflog\s+expire\b/i, action: "expire reflog", severity: "high" },
+ // Medium risk
+ { pattern: /\bgit\s+push\b/i, action: "push", severity: "medium" },
+ { pattern: /\bgit\s+commit\b/i, action: "commit", severity: "medium" },
+ { pattern: /\bgit\s+rebase\b/i, action: "rebase", severity: "medium" },
+ { pattern: /\bgit\s+merge\b/i, action: "merge", severity: "medium" },
+ { pattern: /\bgit\s+tag\b/i, action: "create/modify tag", severity: "medium" },
+ { pattern: /\bgit\s+cherry-pick\b/i, action: "cherry-pick", severity: "medium" },
+ { pattern: /\bgit\s+revert\b/i, action: "revert", severity: "medium" },
+ { pattern: /\bgit\s+am\b/i, action: "apply patches", severity: "medium" },
+ // GitHub CLI
+ { pattern: /\bgh\s+\S+/i, action: "GitHub CLI", severity: "medium" },
+];
+
+const severityIcons: Record = {
+ high: "🔴",
+ medium: "🟡",
+};
+
+function getSafeGitConfig(
+ ctx: ExtensionContext,
+ enabledOverride?: boolean | null,
+ promptLevelOverride?: PromptLevel | null,
+): { enabled: boolean; promptLevel: PromptLevel } {
+ const settings = (ctx as any).settingsManager?.getSettings() ?? {};
+ const config: Required = { ...SAFE_GIT_DEFAULTS, ...(settings.safeGit ?? {}) };
+ return {
+ enabled: enabledOverride !== null && enabledOverride !== undefined ? enabledOverride : config.enabledByDefault,
+ promptLevel: promptLevelOverride !== null && promptLevelOverride !== undefined ? promptLevelOverride : config.promptLevel,
+ };
+}
+
+function shouldPrompt(severity: Severity, promptLevel: PromptLevel): boolean {
+ if (promptLevel === "none") return false;
+ if (promptLevel === "high") return severity === "high";
+ return true;
+}
+
+async function checkGitCommand(
+ command: string,
+ ctx: ExtensionContext,
+ sessionApprovedActions: Set,
+ sessionBlockedActions: Set,
+ enabledOverride?: boolean | null,
+ promptLevelOverride?: PromptLevel | null,
+): Promise<{ block: true; reason: string } | undefined> {
+ const { enabled, promptLevel } = getSafeGitConfig(ctx, enabledOverride, promptLevelOverride);
+ if (!enabled || promptLevel === "none") return undefined;
+
+ for (const { pattern, action, severity } of GIT_PATTERNS) {
+ if (pattern.test(command)) {
+ if (sessionBlockedActions.has(action)) {
+ ctx.ui.notify(`🚫 Git ${action} auto-blocked (session setting)`, "warning");
+ return { block: true, reason: `Git ${action} blocked by user (session setting)` };
+ }
+ if (sessionApprovedActions.has(action)) {
+ ctx.ui.notify(`✅ Git ${action} auto-approved (session setting)`, "info");
+ return undefined;
+ }
+ if (!shouldPrompt(severity, promptLevel)) {
+ return undefined;
+ }
+
+ const icon = severityIcons[severity];
+ if (!ctx.hasUI) {
+ return { block: true, reason: `Git ${action} blocked: requires explicit user approval (no UI available)` };
+ }
+
+ const title =
+ severity === "high"
+ ? `${icon} ⚠️ HIGH RISK: Git ${action} requires approval`
+ : `${icon} Git ${action} requires approval`;
+
+ const choice = await ctx.ui.select(title, [
+ "✅ Allow this command once",
+ "⏭️ Decline this time (ask again later)",
+ `✅✅ Auto-approve all "git ${action}" for this session only`,
+ `🚫 Auto-block all "git ${action}" for this session only`,
+ ]);
+ if (typeof choice !== "string") return { block: true, reason: `Git ${action} declined by user` };
+
+ if (!choice || choice.startsWith("⏭️")) {
+ ctx.ui.notify(`Git ${action} declined`, "info");
+ return { block: true, reason: `Git ${action} declined by user` };
+ }
+ if (choice.startsWith("🚫")) {
+ sessionBlockedActions.add(action);
+ ctx.ui.notify(`🚫 All "git ${action}" commands auto-blocked for this session`, "warning");
+ return { block: true, reason: `Git ${action} blocked by user (session setting)` };
+ }
+ if (choice.startsWith("✅✅")) {
+ sessionApprovedActions.add(action);
+ ctx.ui.notify(`✅ All "git ${action}" commands auto-approved for this session`, "info");
+ } else {
+ ctx.ui.notify(`Git ${action} approved once`, "info");
+ }
+ return undefined;
+ }
+ }
+
+ return undefined;
+}
+
+function registerSafeGitCommands(
+ pi: ExtensionAPI,
+ sessionEnabledOverride: { value: boolean | null },
+ sessionPromptLevelOverride: { value: PromptLevel | null },
+) {
+ pi.registerCommand("safegit", {
+ description: "Toggle safe-git protection on/off for this session",
+ handler: async (_, ctx) => {
+ const { enabled } = getSafeGitConfig(ctx, sessionEnabledOverride.value, sessionPromptLevelOverride.value);
+ sessionEnabledOverride.value = !enabled;
+ ctx.ui.notify(
+ sessionEnabledOverride.value ? "🔒 Safe-git protection ON" : "🔓 Safe-git protection OFF",
+ "info",
+ );
+ ctx.ui.notify("(Temporary for this session)", "info");
+ },
+ });
+
+ pi.registerCommand("safegit-level", {
+ description: "Set prompt level: high, medium, or none",
+ handler: async (args, ctx) => {
+ const arg = typeof args === "string" ? args.trim().toLowerCase() : "";
+ if (arg === "high" || arg === "medium" || arg === "none") {
+ sessionPromptLevelOverride.value = arg;
+ const desc = {
+ high: "🔴 Only high-risk operations require approval",
+ medium: "🟡 Medium and high-risk operations require approval",
+ none: "⚠️ No approval required (protection disabled)",
+ };
+ ctx.ui.notify(`Prompt level: ${arg}`, "info");
+ ctx.ui.notify(desc[arg], "info");
+ ctx.ui.notify("(Temporary for this session)", "info");
+ return;
+ }
+
+ const { promptLevel } = getSafeGitConfig(ctx, sessionEnabledOverride.value, sessionPromptLevelOverride.value);
+ const options = [
+ `🔴 high - Only high-risk (force push, hard reset, etc.)`,
+ `🟡 medium - Medium and high-risk (push, commit, etc.)`,
+ `⚠️ none - No prompts (disable protection)`,
+ `❌ Cancel`,
+ ];
+
+ ctx.ui.notify(`Current level: ${promptLevel}\n`, "info");
+ const choice = await ctx.ui.select("Set prompt level:", options);
+ const selectedChoice = typeof choice === "string" ? choice : undefined;
+ if (!selectedChoice || selectedChoice.startsWith("❌")) {
+ ctx.ui.notify("Cancelled.", "info");
+ return;
+ }
+ const level = selectedChoice.split(" ")[1] as PromptLevel;
+ sessionPromptLevelOverride.value = level;
+ ctx.ui.notify(`Prompt level set to: ${selectedChoice}`, "info");
+ ctx.ui.notify("(Temporary for this session)", "info");
+ },
+ });
+
+ pi.registerCommand("safegit-status", {
+ description: "Show safe-git status and settings",
+ handler: async (_, ctx) => {
+ const settings = (ctx as any).settingsManager?.getSettings() ?? {};
+ const globalConfig: Required = {
+ ...SAFE_GIT_DEFAULTS,
+ ...(settings.safeGit ?? {}),
+ };
+ const { enabled, promptLevel } = getSafeGitConfig(ctx, sessionEnabledOverride.value, sessionPromptLevelOverride.value);
+
+ const lines = [
+ "─── Safe Git Status ───",
+ "",
+ "Session State:",
+ ` Enabled: ${enabled ? "🔒 ON" : "🔓 OFF"}${sessionEnabledOverride.value !== null ? " (session override)" : ""}`,
+ ` Prompt Level: ${promptLevel}${sessionPromptLevelOverride.value !== null ? " (session override)" : ""}`,
+ "",
+ "Global Defaults:",
+ ` Enabled: ${globalConfig.enabledByDefault ? "ON" : "OFF"}`,
+ ` Prompt Level: ${globalConfig.promptLevel}`,
+ "",
+ "Prompt Levels:",
+ ` 🔴 high - force push, hard reset, clean, delete branch`,
+ ` 🟡 medium - push, commit, rebase, merge, tag, gh CLI`,
+ "",
+ "Commands: /safegit /safegit-level /safegit-status",
+ "───────────────────────",
+ ];
+ ctx.ui.notify(lines.join("\n"), "info");
+ },
+ });
+}
+
+// ============================================================================
+// Entry Point
+// ============================================================================
+
+export default function guardrails(pi: ExtensionAPI): void {
+ const sessionApprovedActions = new Set();
+ const sessionBlockedActions = new Set();
+ const sessionEnabledOverride: { value: boolean | null } = { value: null };
+ const sessionPromptLevelOverride: { value: PromptLevel | null } = { value: null };
+
+ registerSafeGitCommands(pi, sessionEnabledOverride, sessionPromptLevelOverride);
+
+ pi.on("session_start", async (_, ctx) => {
+ sessionEnabledOverride.value = null;
+ sessionPromptLevelOverride.value = null;
+ sessionApprovedActions.clear();
+ sessionBlockedActions.clear();
+
+ const { enabled, promptLevel } = getSafeGitConfig(ctx, sessionEnabledOverride.value, sessionPromptLevelOverride.value);
+ if (ctx.hasUI && enabled && promptLevel !== "none") {
+ const promptDesc = promptLevel === "high" ? "🔴 high-risk only" : "🟡 medium+high";
+ ctx.ui.notify(`Safe-git: Protection ${promptDesc}`, "info");
+ }
+ });
+
+ pi.on("tool_call", async (event, ctx) => {
+ if (event.toolName === "bash") {
+ const command = event.input.command as string;
+ const gitResult = await checkGitCommand(command, ctx, sessionApprovedActions, sessionBlockedActions, sessionEnabledOverride.value, sessionPromptLevelOverride.value);
+ if (gitResult) return gitResult;
+ return checkBashCommand(command, ctx);
+ }
+
+ if (event.toolName === "write" || event.toolName === "edit") {
+ const filePath = event.input.path as string;
+ return checkWritePath(filePath, ctx);
+ }
+
+ return undefined;
+ });
+
+ pi.on("tool_result", async (event, ctx) => {
+ if (event.isError) return undefined;
+
+ const textContent = event.content.find(
+ (c): c is { type: "text"; text: string } => c.type === "text",
+ );
+ if (!textContent) return undefined;
+
+ return redactToolResult(event.toolName, event.input.path as string | undefined, textContent.text, ctx);
+ });
+}
diff --git a/src/resources/extensions/ollama/index.ts b/src/resources/extensions/ollama/index.ts
index c0f7e32ce..f2af330c8 100644
--- a/src/resources/extensions/ollama/index.ts
+++ b/src/resources/extensions/ollama/index.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama Extension: First-class local LLM support
+// sf — Ollama Extension: First-class local LLM support
/**
* Ollama Extension
*
diff --git a/src/resources/extensions/ollama/model-capabilities.ts b/src/resources/extensions/ollama/model-capabilities.ts
index f44506fbf..50eac0927 100644
--- a/src/resources/extensions/ollama/model-capabilities.ts
+++ b/src/resources/extensions/ollama/model-capabilities.ts
@@ -1,4 +1,4 @@
-// GSD2 — Known model capability table for Ollama models
+// sf — Known model capability table for Ollama models
/**
* Maps well-known Ollama model families to their capabilities.
diff --git a/src/resources/extensions/ollama/ndjson-stream.ts b/src/resources/extensions/ollama/ndjson-stream.ts
index 32065aa4e..dd3ac4aba 100644
--- a/src/resources/extensions/ollama/ndjson-stream.ts
+++ b/src/resources/extensions/ollama/ndjson-stream.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama Extension: NDJSON streaming parser
+// sf — Ollama Extension: NDJSON streaming parser
/**
* Parses a streaming NDJSON (newline-delimited JSON) response body into
diff --git a/src/resources/extensions/ollama/ollama-chat-provider.ts b/src/resources/extensions/ollama/ollama-chat-provider.ts
index c6847b30b..ee16e217d 100644
--- a/src/resources/extensions/ollama/ollama-chat-provider.ts
+++ b/src/resources/extensions/ollama/ollama-chat-provider.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama Extension: Native /api/chat stream provider
+// sf — Ollama Extension: Native /api/chat stream provider
/**
* Implements the "ollama-chat" API provider, streaming responses directly
diff --git a/src/resources/extensions/ollama/ollama-client.ts b/src/resources/extensions/ollama/ollama-client.ts
index 2408215fd..3c453360b 100644
--- a/src/resources/extensions/ollama/ollama-client.ts
+++ b/src/resources/extensions/ollama/ollama-client.ts
@@ -1,4 +1,4 @@
-// GSD2 — HTTP client for Ollama REST API
+// sf — HTTP client for Ollama REST API
/**
* Low-level HTTP client for the Ollama REST API.
diff --git a/src/resources/extensions/ollama/ollama-commands.ts b/src/resources/extensions/ollama/ollama-commands.ts
index 684117962..89ae7386c 100644
--- a/src/resources/extensions/ollama/ollama-commands.ts
+++ b/src/resources/extensions/ollama/ollama-commands.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama slash commands
+// sf — Ollama slash commands
/**
* Registers /ollama slash commands for managing local Ollama models.
diff --git a/src/resources/extensions/ollama/ollama-discovery.ts b/src/resources/extensions/ollama/ollama-discovery.ts
index 11648f210..67521c4f1 100644
--- a/src/resources/extensions/ollama/ollama-discovery.ts
+++ b/src/resources/extensions/ollama/ollama-discovery.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama model discovery and capability detection
+// sf — Ollama model discovery and capability detection
/**
* Discovers locally available Ollama models and enriches them with
diff --git a/src/resources/extensions/ollama/ollama-tool.ts b/src/resources/extensions/ollama/ollama-tool.ts
index 0559a4118..b8fa2d60c 100644
--- a/src/resources/extensions/ollama/ollama-tool.ts
+++ b/src/resources/extensions/ollama/ollama-tool.ts
@@ -1,4 +1,4 @@
-// GSD2 — LLM-callable Ollama management tool
+// sf — LLM-callable Ollama management tool
/**
* Registers an ollama_manage tool that the LLM can call to interact
* with the local Ollama instance — list models, pull new ones, check status.
diff --git a/src/resources/extensions/ollama/tests/model-capabilities.test.ts b/src/resources/extensions/ollama/tests/model-capabilities.test.ts
index 61af68e9b..a7f9a191d 100644
--- a/src/resources/extensions/ollama/tests/model-capabilities.test.ts
+++ b/src/resources/extensions/ollama/tests/model-capabilities.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Tests for Ollama model capability detection
+// sf — Tests for Ollama model capability detection
import { describe, it } from "node:test";
import assert from "node:assert/strict";
import {
diff --git a/src/resources/extensions/ollama/tests/ollama-chat-provider-stream.test.ts b/src/resources/extensions/ollama/tests/ollama-chat-provider-stream.test.ts
index bc3982c6e..a667afb82 100644
--- a/src/resources/extensions/ollama/tests/ollama-chat-provider-stream.test.ts
+++ b/src/resources/extensions/ollama/tests/ollama-chat-provider-stream.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Regression test: Ollama streaming must not drop content on done:true chunks (#3576)
+// sf — Regression test: Ollama streaming must not drop content on done:true chunks (#3576)
// Copyright (c) 2026 Jeremy McSpadden
import { describe, it } from "node:test";
diff --git a/src/resources/extensions/ollama/tests/ollama-client.test.ts b/src/resources/extensions/ollama/tests/ollama-client.test.ts
index 0deae397a..d30acd5ce 100644
--- a/src/resources/extensions/ollama/tests/ollama-client.test.ts
+++ b/src/resources/extensions/ollama/tests/ollama-client.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Tests for Ollama HTTP client
+// sf — Tests for Ollama HTTP client
import { describe, it, beforeEach, afterEach } from "node:test";
import assert from "node:assert/strict";
import { getOllamaHost } from "../ollama-client.js";
diff --git a/src/resources/extensions/ollama/tests/ollama-discovery.test.ts b/src/resources/extensions/ollama/tests/ollama-discovery.test.ts
index 02d582d19..ed8f18471 100644
--- a/src/resources/extensions/ollama/tests/ollama-discovery.test.ts
+++ b/src/resources/extensions/ollama/tests/ollama-discovery.test.ts
@@ -1,4 +1,4 @@
-// GSD2 — Tests for Ollama model discovery and enrichment
+// sf — Tests for Ollama model discovery and enrichment
import { describe, it } from "node:test";
import assert from "node:assert/strict";
import { discoverModels } from "../ollama-discovery.js";
diff --git a/src/resources/extensions/ollama/thinking-parser.ts b/src/resources/extensions/ollama/thinking-parser.ts
index 9c060761c..3e041897b 100644
--- a/src/resources/extensions/ollama/thinking-parser.ts
+++ b/src/resources/extensions/ollama/thinking-parser.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama Extension: Stateful tag stream parser
+// sf — Ollama Extension: Stateful tag stream parser
/**
* Extracts ... thinking blocks from a streaming text response.
diff --git a/src/resources/extensions/ollama/types.ts b/src/resources/extensions/ollama/types.ts
index 51e9beb01..af93607e1 100644
--- a/src/resources/extensions/ollama/types.ts
+++ b/src/resources/extensions/ollama/types.ts
@@ -1,4 +1,4 @@
-// GSD2 — Ollama API response types
+// sf — Ollama API response types
/**
* Type definitions for the Ollama REST API.
diff --git a/src/resources/extensions/search-the-web/provider.ts b/src/resources/extensions/search-the-web/provider.ts
index f1d0416a3..418ea2038 100644
--- a/src/resources/extensions/search-the-web/provider.ts
+++ b/src/resources/extensions/search-the-web/provider.ts
@@ -18,8 +18,8 @@ import { resolveSearchProviderFromPreferences } from '../sf/preferences.js'
// Compute authFilePath locally instead of importing from app-paths.ts,
// because extensions are copied to ~/.sf/agent/extensions/ at runtime
// where the relative import '../../../app-paths.ts' doesn't resolve.
-const gsdHome = process.env.SF_HOME || join(homedir(), '.sf')
-const authFilePath = join(gsdHome, 'agent', 'auth.json')
+const sfHome = process.env.SF_HOME || join(homedir(), '.sf')
+const authFilePath = join(sfHome, 'agent', 'auth.json')
export type SearchProvider = 'tavily' | 'brave' | 'ollama' | 'combosearch'
export type SearchProviderPreference = SearchProvider | 'auto'
diff --git a/src/resources/extensions/sf-notify/index.ts b/src/resources/extensions/sf-notify/index.ts
new file mode 100644
index 000000000..2cb1ca133
--- /dev/null
+++ b/src/resources/extensions/sf-notify/index.ts
@@ -0,0 +1,428 @@
+/**
+ * SF-Notify — Background task completion notifications
+ *
+ * Detects long-running tasks and notifies you when they complete
+ * while the terminal is backgrounded.
+ */
+
+import type { ExtensionAPI, ExtensionContext } from "@sf-run/pi-coding-agent";
+import * as fs from "node:fs/promises";
+import * as os from "node:os";
+import * as path from "node:path";
+import {
+ type BackgroundNotifyConfig,
+ type TerminalInfo,
+ playBeep,
+ displayOSXNotification,
+ speakMessage,
+ bringTerminalToFront,
+ detectTerminalInfo,
+ isTerminalInBackground,
+ checkSayAvailable,
+ checkTerminalNotifierAvailable,
+ isTerminalNotifierAvailable,
+ BEEP_SOUNDS,
+ SAY_MESSAGES,
+ getCurrentDirName,
+ replaceMessageTemplates,
+} from "../shared/notify.js";
+
+// ─────────────────────────────────────────────────────────────────────────────
+// Types & Constants
+// ─────────────────────────────────────────────────────────────────────────────
+
+interface SessionState {
+ beepOverride: boolean | null;
+ beepSoundOverride: string | null;
+ focusOverride: boolean | null;
+ sayOverride: boolean | null;
+ sayMessageOverride: string | null;
+ terminalInfo: TerminalInfo;
+ lastToolTime: number | undefined;
+ totalActiveTime: number;
+}
+
+const DEFAULT_CONFIG: BackgroundNotifyConfig = {
+ thresholdMs: 2000,
+ beep: true,
+ beepSound: "Funk",
+ bringToFront: false,
+ say: false,
+ sayMessage: "Done in {dirname}",
+};
+
+enum NotificationAction {
+ Beeped = "beeped",
+ Spoke = "spoke",
+ BroughtToFront = "brought to front",
+}
+
+// ─────────────────────────────────────────────────────────────────────────────
+// Settings Loader
+// ─────────────────────────────────────────────────────────────────────────────
+
+async function readSettingsFile(): Promise {
+ const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json");
+ const piPath = path.join(os.homedir(), ".pi", "agent", "settings.json");
+ for (const p of [sfPath, piPath]) {
+ try {
+ const content = await fs.readFile(p, "utf8");
+ return JSON.parse(content);
+ } catch {}
+ }
+ return {};
+}
+
+async function getBackgroundNotifyConfig(
+ ctx: ExtensionContext,
+ overrides?: Partial
+): Promise {
+ const settings = (ctx as any).settingsManager?.getSettings() ?? {};
+ let config: BackgroundNotifyConfig;
+ if (settings.backgroundNotify) {
+ config = { ...DEFAULT_CONFIG, ...settings.backgroundNotify };
+ } else {
+ const fileSettings = await readSettingsFile();
+ config = { ...DEFAULT_CONFIG, ...fileSettings.backgroundNotify };
+ }
+ if (overrides) config = { ...config, ...overrides };
+ return config;
+}
+
+// ─────────────────────────────────────────────────────────────────────────────
+// Helpers
+// ─────────────────────────────────────────────────────────────────────────────
+
+function resetSessionState(state: SessionState): void {
+ state.beepOverride = null;
+ state.beepSoundOverride = null;
+ state.focusOverride = null;
+ state.sayOverride = null;
+ state.sayMessageOverride = null;
+ state.lastToolTime = undefined;
+ state.totalActiveTime = 0;
+}
+
+function getEffective(state: SessionState, config: BackgroundNotifyConfig) {
+ return {
+ beep: state.beepOverride ?? config.beep,
+ focus: state.focusOverride ?? config.bringToFront,
+ say: state.sayOverride ?? config.say,
+ sound: state.beepSoundOverride ?? config.beepSound,
+ sayMessage: state.sayMessageOverride ?? config.sayMessage,
+ };
+}
+
+function extractOptionText(action: string, iconPrefix: string): string | null {
+ if (!action || action === "❌ Cancel" || action === "───") return null;
+ if (action.startsWith(iconPrefix)) {
+ return action.replace(iconPrefix, "").replace(" ✓", "").replace(/^"|"$/g, "");
+ }
+ return null;
+}
+
+async function saveGlobalSettings(ctx: ExtensionContext, updates: Partial): Promise {
+ try {
+ const sfPath = path.join(os.homedir(), ".sf", "agent", "settings.json");
+ let fileSettings: any = {};
+ try {
+ const content = await fs.readFile(sfPath, "utf8");
+ fileSettings = JSON.parse(content);
+ } catch {
+ // no file yet
+ }
+ fileSettings.backgroundNotify = { ...(fileSettings.backgroundNotify ?? {}), ...updates };
+ await fs.mkdir(path.dirname(sfPath), { recursive: true });
+ await fs.writeFile(sfPath, JSON.stringify(fileSettings, null, 2), "utf8");
+ } catch (err) {
+ console.error("Failed to save settings:", err);
+ throw err;
+ }
+}
+
+// ─────────────────────────────────────────────────────────────────────────────
+// Main Extension
+// ─────────────────────────────────────────────────────────────────────────────
+
+export default function sfNotify(pi: ExtensionAPI) {
+ const state: SessionState = {
+ beepOverride: null,
+ beepSoundOverride: null,
+ focusOverride: null,
+ sayOverride: null,
+ sayMessageOverride: null,
+ terminalInfo: {},
+ lastToolTime: undefined,
+ totalActiveTime: 0,
+ };
+
+ registerCommands(pi, state);
+
+ pi.on("session_start", async (_, ctx) => {
+ resetSessionState(state);
+ state.terminalInfo = await detectTerminalInfo();
+ await checkSayAvailable();
+ await checkTerminalNotifierAvailable();
+
+ if (ctx.hasUI && (await isTerminalNotifierAvailable())) {
+ ctx.ui.notify("📢 Using terminal-notifier for notifications (clicking will activate Terminal)", "info");
+ }
+ });
+
+ pi.on("agent_start", () => {
+ state.lastToolTime = Date.now();
+ state.totalActiveTime = 0;
+ });
+
+ pi.on("tool_result", () => {
+ if (state.lastToolTime) {
+ state.totalActiveTime += Date.now() - state.lastToolTime;
+ }
+ state.lastToolTime = Date.now();
+ });
+
+ pi.on("agent_end", async (_, ctx) => {
+ if (!state.lastToolTime) return;
+ state.totalActiveTime += Date.now() - state.lastToolTime;
+ const duration = state.totalActiveTime;
+ state.lastToolTime = undefined;
+ state.totalActiveTime = 0;
+
+ const config = await getBackgroundNotifyConfig(ctx);
+ const eff = getEffective(state, config);
+
+ if (!eff.beep && !eff.focus && !eff.say) return;
+ if (duration < config.thresholdMs) return;
+
+ const isBackground = await isTerminalInBackground(state.terminalInfo);
+ if (!isBackground) return;
+
+ const tasks: Promise[] = [];
+ const actions: NotificationAction[] = [];
+
+ if (eff.beep) {
+ const notificationMessage = replaceMessageTemplates(eff.sayMessage);
+ displayOSXNotification(notificationMessage, eff.sound, state.terminalInfo);
+ actions.push(NotificationAction.Beeped);
+ }
+ if (eff.focus) {
+ tasks.push(bringTerminalToFront(state.terminalInfo));
+ actions.push(NotificationAction.BroughtToFront);
+ }
+ if (eff.say) {
+ speakMessage(eff.sayMessage);
+ actions.push(NotificationAction.Spoke);
+ }
+
+ await Promise.all(tasks);
+
+ if (ctx.hasUI) {
+ ctx.ui.notify(`Task completed in ${(duration / 1000).toFixed(1)}s (${actions.join(", ")})`, "info");
+ }
+ });
+}
+
+// ─────────────────────────────────────────────────────────────────────────────
+// Commands
+// ─────────────────────────────────────────────────────────────────────────────
+
+function registerCommands(pi: ExtensionAPI, state: SessionState) {
+ pi.registerCommand("notify-beep", {
+ description: "Toggle beep notification",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const current = state.beepOverride ?? config.beep;
+
+ if (current) {
+ state.beepOverride = false;
+ ctx.ui.notify("🔇 Beep OFF", "warning");
+ } else {
+ const currentSound = state.beepSoundOverride ?? config.beepSound;
+ const options = [
+ "🔊 Use current sound",
+ "───",
+ ...BEEP_SOUNDS.map((s) => `🎵 ${s}${s === currentSound ? " ✓" : ""}`),
+ "───",
+ "❌ Cancel",
+ ];
+ const action = await ctx.ui.select(`Turn beep ON - Select sound (current: ${currentSound})`, options);
+ const selectedAction = typeof action === "string" ? action : undefined;
+ if (!selectedAction || selectedAction === "❌ Cancel" || selectedAction === "───") return;
+ if (selectedAction === "🔊 Use current sound") {
+ state.beepOverride = true;
+ ctx.ui.notify(`🔊 Beep ON (${currentSound})`, "info");
+ playBeep(currentSound);
+ } else {
+ const sound = extractOptionText(selectedAction, "🎵 ");
+ if (sound) {
+ state.beepOverride = true;
+ state.beepSoundOverride = sound;
+ ctx.ui.notify(`🔊 Beep ON (${sound})`, "info");
+ playBeep(sound);
+ }
+ }
+ }
+ },
+ });
+
+ pi.registerCommand("notify-focus", {
+ description: "Toggle bring-to-front",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const current = state.focusOverride ?? config.bringToFront;
+ state.focusOverride = !current;
+ ctx.ui.notify(state.focusOverride ? "🪟 Focus ON" : "⬜ Focus OFF", state.focusOverride ? "info" : "warning");
+ },
+ });
+
+ pi.registerCommand("notify-say", {
+ description: "Toggle speech notification",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const current = state.sayOverride ?? config.say;
+
+ if (current) {
+ state.sayOverride = false;
+ ctx.ui.notify("🔇 Speech OFF", "warning");
+ } else {
+ const currentMessage = state.sayMessageOverride ?? config.sayMessage;
+ const options = [
+ "🗣️ Use current message",
+ "───",
+ ...SAY_MESSAGES.map((m) => `💬 "${m}"${m === currentMessage ? " ✓" : ""}`),
+ "───",
+ "✏️ Enter custom message...",
+ "───",
+ "❌ Cancel",
+ ];
+ const action = await ctx.ui.select(`Turn speech ON - Select message (current: "${currentMessage}")`, options);
+ const selectedAction = typeof action === "string" ? action : undefined;
+ if (!selectedAction || selectedAction === "❌ Cancel" || selectedAction === "───") return;
+ if (selectedAction === "🗣️ Use current message") {
+ state.sayOverride = true;
+ ctx.ui.notify(`🗣️ Speech ON ("${currentMessage}")`, "info");
+ speakMessage(currentMessage);
+ } else if (selectedAction.startsWith("💬 ")) {
+ const message = selectedAction.replace('💬 "', '').replace('"', '').replace(" ✓", "");
+ state.sayOverride = true;
+ state.sayMessageOverride = message;
+ ctx.ui.notify(`🗣️ Speech ON ("${message}")`, "info");
+ speakMessage(message);
+ } else if (selectedAction === "✏️ Enter custom message...") {
+ const customMessage = await ctx.ui.input("Enter message to speak");
+ if (customMessage && customMessage.trim()) {
+ state.sayOverride = true;
+ state.sayMessageOverride = customMessage.trim();
+ ctx.ui.notify(`🗣️ Speech ON ("${customMessage.trim()}")`, "info");
+ speakMessage(customMessage.trim());
+ }
+ }
+ }
+ },
+ });
+
+ pi.registerCommand("notify-threshold", {
+ description: "Set notification threshold (minimum task duration)",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const options = [
+ `1000ms (1s)${config.thresholdMs === 1000 ? " ✓" : ""}`,
+ `2000ms (2s)${config.thresholdMs === 2000 ? " ✓" : ""}`,
+ `3000ms (3s)${config.thresholdMs === 3000 ? " ✓" : ""}`,
+ `5000ms (5s)${config.thresholdMs === 5000 ? " ✓" : ""}`,
+ `10000ms (10s)${config.thresholdMs === 10000 ? " ✓" : ""}`,
+ "───",
+ "❌ Cancel",
+ ];
+ const action = await ctx.ui.select(`Threshold (current: ${config.thresholdMs}ms)`, options);
+ const selectedAction = typeof action === "string" ? action : undefined;
+ if (!selectedAction || selectedAction === "❌ Cancel" || selectedAction === "───") return;
+ const match = selectedAction.match(/^(\d+)ms/);
+ if (match) {
+ const newThreshold = parseInt(match[1], 10);
+ await saveGlobalSettings(ctx, { thresholdMs: newThreshold });
+ ctx.ui.notify(`⏱️ Threshold set to ${newThreshold}ms`, "info");
+ }
+ },
+ });
+
+ pi.registerCommand("notify-status", {
+ description: "Show notification settings",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const eff = getEffective(state, config);
+ const beepIcon = eff.beep ? "🔊" : "🔇";
+ const focusIcon = eff.focus ? "🪟" : "⬜";
+ const sayIcon = eff.say ? "🗣️" : "🔇";
+ const globalBeepIcon = config.beep ? "🔊" : "🔇";
+ const globalFocusIcon = config.bringToFront ? "🪟" : "⬜";
+ const globalSayIcon = config.say ? "🗣️" : "🔇";
+ const hasOverrides = state.beepOverride !== null || state.focusOverride !== null || state.beepSoundOverride !== null || state.sayOverride !== null || state.sayMessageOverride !== null;
+
+ const lines = [
+ "╭─ Background Notify Status ─╮",
+ "",
+ "Current (Effective):",
+ ` ${beepIcon} Beep: ${eff.beep ? "ON" : "OFF"}`,
+ ` ${focusIcon} Focus: ${eff.focus ? "ON" : "OFF"}`,
+ ` ${sayIcon} Speech: ${eff.say ? "ON" : "OFF"}`,
+ ` 💬 Message: "${eff.sayMessage}"`,
+ eff.sayMessage.includes("{dirname}") ? ` → Spoken: "${replaceMessageTemplates(eff.sayMessage)}"` : "",
+ ` 🎵 Sound: ${eff.sound}`,
+ ` ⏱️ Threshold: ${config.thresholdMs}ms`,
+ "",
+ "Global Defaults:",
+ ` ${globalBeepIcon} Beep: ${config.beep ? "ON" : "OFF"}`,
+ ` ${globalFocusIcon} Focus: ${config.bringToFront ? "ON" : "OFF"}`,
+ ` ${globalSayIcon} Speech: ${config.say ? "ON" : "OFF"}`,
+ ` 💬 Message: "${config.sayMessage}"`,
+ config.sayMessage.includes("{dirname}") ? ` → Spoken: "${replaceMessageTemplates(config.sayMessage)}"` : "",
+ ` 🎵 Sound: ${config.beepSound}`,
+ ` ⏱️ Threshold: ${config.thresholdMs}ms`,
+ ];
+
+ if (hasOverrides) {
+ lines.push("", "Session Overrides:");
+ if (state.beepOverride !== null) lines.push(` ${state.beepOverride ? "🔊" : "🔇"} Beep: ${state.beepOverride ? "ON" : "OFF"}`);
+ if (state.focusOverride !== null) lines.push(` ${state.focusOverride ? "🪟" : "⬜"} Focus: ${state.focusOverride ? "ON" : "OFF"}`);
+ if (state.beepSoundOverride !== null) lines.push(` 🎵 Sound: ${state.beepSoundOverride}`);
+ if (state.sayOverride !== null) lines.push(` ${state.sayOverride ? "🗣️" : "🔇"} Speech: ${state.sayOverride ? "ON" : "OFF"}`);
+ if (state.sayMessageOverride !== null) {
+ lines.push(` 💬 Message: "${state.sayMessageOverride}"`);
+ if (state.sayMessageOverride.includes("{dirname}")) {
+ lines.push(` → Spoken: "${replaceMessageTemplates(state.sayMessageOverride)}"`);
+ }
+ }
+ }
+
+ lines.push("", `💻 Terminal: ${state.terminalInfo.terminalApp ?? "(unknown)"}`, "╰────────────────────────────╯");
+ ctx.ui.notify(lines.filter(Boolean).join("\n"), "info");
+ },
+ });
+
+ pi.registerCommand("notify-save-global", {
+ description: "Save current settings as global defaults",
+ handler: async (_, ctx) => {
+ const config = await getBackgroundNotifyConfig(ctx);
+ const eff = getEffective(state, config);
+ await saveGlobalSettings(ctx, {
+ beep: eff.beep,
+ bringToFront: eff.focus,
+ beepSound: eff.sound,
+ say: eff.say,
+ sayMessage: eff.sayMessage,
+ thresholdMs: config.thresholdMs,
+ });
+ ctx.ui.notify("✅ Settings saved to ~/.sf/agent/settings.json", "info");
+ const status = [
+ ` ${eff.beep ? "🔊" : "🔇"} Beep: ${eff.beep ? "ON" : "OFF"}`,
+ ` ${eff.focus ? "🪟" : "⬜"} Focus: ${eff.focus ? "ON" : "OFF"}`,
+ ` ${eff.say ? "🗣️" : "🔇"} Speech: ${eff.say ? "ON" : "OFF"}`,
+ ` 💬 Message: "${eff.sayMessage}"`,
+ ` 🎵 Sound: ${eff.sound}`,
+ ` ⏱️ Threshold: ${config.thresholdMs}ms`,
+ ].filter(Boolean).join("\n");
+ ctx.ui.notify(status, "info");
+ },
+ });
+}
diff --git a/src/resources/extensions/sf-permissions/index.ts b/src/resources/extensions/sf-permissions/index.ts
new file mode 100644
index 000000000..ecf9fd8a7
--- /dev/null
+++ b/src/resources/extensions/sf-permissions/index.ts
@@ -0,0 +1,610 @@
+/**
+ * Permission Extension for Singularity Forge
+ *
+ * Implements layered permission control.
+ *
+ * Interactive mode:
+ * Use `/permission` command to view or change the level.
+ * Use `/permission-mode` to switch between ask vs block.
+ * When changing via command, you'll be asked: session-only or global?
+ *
+ * Print mode (sf -p):
+ * Set SF_PERMISSION_LEVEL env var: SF_PERMISSION_LEVEL=medium sf -p "task"
+ * Operations beyond level will exit with helpful error message.
+ * Use SF_PERMISSION_LEVEL=bypassed for CI/containers (dangerous!)
+ *
+ * Levels:
+ * minimal - Read-only mode (default)
+ * ✅ Read files, ls, grep, git status/log/diff
+ * ❌ No file modifications, no commands with side effects
+ *
+ * low - File operations only
+ * ✅ Create/edit files in project directory
+ * ❌ No package installs, no git commits, no builds
+ *
+ * medium - Development operations
+ * ✅ npm/pip install, git commit/pull, make/build
+ * ❌ No git push, no sudo, no production changes
+ *
+ * high - Full operations
+ * ✅ git push, deployments, scripts
+ * ⚠️ Still prompts for destructive commands (rm -rf, etc.)
+ *
+ * Usage:
+ * sf --extension ./index.ts
+ *
+ * Or add to ~/.sf/agent/extensions/ or .sf/extensions/ for automatic loading.
+ */
+
+import { exec } from "node:child_process";
+import fs from "node:fs";
+import os from "node:os";
+import path from "node:path";
+import type { ExtensionAPI } from "@sf-run/pi-coding-agent";
+import {
+ type PermissionLevel,
+ type PermissionMode,
+ LEVELS,
+ LEVEL_INDEX,
+ LEVEL_INFO,
+ LEVEL_ALLOWED_DESC,
+ PERMISSION_MODES,
+ PERMISSION_MODE_INFO,
+ loadGlobalPermission,
+ saveGlobalPermission,
+ loadGlobalPermissionMode,
+ saveGlobalPermissionMode,
+ classifyCommand,
+ loadPermissionConfig,
+ savePermissionConfig,
+ invalidateConfigCache,
+ type PermissionConfig,
+} from "./permission-core.js";
+
+// Re-export types and constants needed by the hook
+export {
+ type PermissionLevel,
+ type PermissionMode,
+ LEVELS,
+ LEVEL_INFO,
+ PERMISSION_MODES,
+ PERMISSION_MODE_INFO,
+};
+
+// ============================================================================
+// SOUND NOTIFICATION
+// ============================================================================
+
+function playPermissionSound(): void {
+ const isMac = process.platform === "darwin";
+
+ if (isMac) {
+ exec('afplay /System/Library/Sounds/Funk.aiff 2>/dev/null', (err) => {
+ if (err) process.stdout.write("\x07");
+ });
+ } else {
+ process.stdout.write("\x07");
+ }
+}
+
+// ============================================================================
+// STATUS TEXT
+// ============================================================================
+
+const BOLD = "\x1b[1m";
+const RESET = "\x1b[0m";
+const RED = "\x1b[31m";
+const YELLOW = "\x1b[33m";
+const GREEN = "\x1b[32m";
+const CYAN = "\x1b[36m";
+const DIM = "\x1b[2m";
+
+const LEVEL_COLORS: Record = {
+ minimal: RED,
+ low: YELLOW,
+ medium: CYAN,
+ high: GREEN,
+ bypassed: DIM,
+};
+
+function getStatusText(level: PermissionLevel): string {
+ const info = LEVEL_INFO[level];
+ const color = LEVEL_COLORS[level];
+ return `${BOLD}${color}${info.label}${RESET} ${DIM}- ${info.desc}${RESET}`;
+}
+
+// ============================================================================
+// MODE DETECTION
+// ============================================================================
+
+function getPiModeFromArgv(argv: string[] = process.argv): string | undefined {
+ // Support both: --mode rpc and --mode=rpc
+ const eq = argv.find((a) => a.startsWith("--mode="));
+ if (eq) return eq.slice("--mode=".length);
+
+ const idx = argv.indexOf("--mode");
+ if (idx !== -1 && idx + 1 < argv.length) return argv[idx + 1];
+
+ return undefined;
+}
+
+function hasInteractiveUI(ctx: any): boolean {
+ if (!ctx?.hasUI) return false;
+
+ // In non-interactive modes (rpc/json/print), UI prompts are not desired.
+ // We still allow notifications, but block instead of asking.
+ const mode = getPiModeFromArgv()?.toLowerCase();
+ if (mode && mode !== "interactive") return false;
+
+ return true;
+}
+
+function isQuietMode(ctx: any): boolean {
+ if (ctx?.quiet || ctx?.isQuiet) return true;
+ if (ctx?.ui?.quiet || ctx?.ui?.isQuiet) return true;
+ if (ctx?.settings?.quietStartup || ctx?.settings?.quiet) return true;
+
+ const envQuiet = process.env.SF_QUIET?.toLowerCase() || process.env.PI_QUIET?.toLowerCase();
+ if (envQuiet && ["1", "true", "yes"].includes(envQuiet)) return true;
+
+ if (process.argv.includes("--quiet") || process.argv.includes("-q")) return true;
+
+ return isQuietStartupFromSettings();
+}
+
+function isQuietStartupFromSettings(): boolean {
+ const sfSettingsPath = path.join(os.homedir(), ".sf", "agent", "settings.json");
+ try {
+ if (fs.existsSync(sfSettingsPath)) {
+ const raw = fs.readFileSync(sfSettingsPath, "utf-8");
+ const settings = JSON.parse(raw) as { quietStartup?: boolean };
+ return settings.quietStartup === true;
+ }
+ } catch {}
+
+ const piSettingsPath = path.join(os.homedir(), ".pi", "agent", "settings.json");
+ try {
+ if (fs.existsSync(piSettingsPath)) {
+ const raw = fs.readFileSync(piSettingsPath, "utf-8");
+ const settings = JSON.parse(raw) as { quietStartup?: boolean };
+ return settings.quietStartup === true;
+ }
+ } catch {}
+
+ return false;
+}
+
+// ============================================================================
+// STATE MANAGEMENT
+// ============================================================================
+
+export interface PermissionState {
+ currentLevel: PermissionLevel;
+ isSessionOnly: boolean;
+ permissionMode: PermissionMode;
+ isModeSessionOnly: boolean;
+}
+
+export function createInitialState(): PermissionState {
+ return {
+ currentLevel: "minimal",
+ isSessionOnly: false,
+ permissionMode: "ask",
+ isModeSessionOnly: false,
+ };
+}
+
+function setLevel(
+ state: PermissionState,
+ level: PermissionLevel,
+ saveGlobally: boolean,
+ ctx: any
+): void {
+ state.currentLevel = level;
+ state.isSessionOnly = !saveGlobally;
+ if (saveGlobally) {
+ saveGlobalPermission(level);
+ }
+ if (ctx.ui?.setStatus) {
+ ctx.ui.setStatus("authority", getStatusText(level));
+ }
+}
+
+function setMode(
+ state: PermissionState,
+ mode: PermissionMode,
+ saveGlobally: boolean,
+ ctx: any
+): void {
+ state.permissionMode = mode;
+ state.isModeSessionOnly = !saveGlobally;
+ if (saveGlobally) {
+ saveGlobalPermissionMode(mode);
+ }
+}
+
+// ============================================================================
+// HANDLERS
+// ============================================================================
+
+/** Handle /permission config subcommand */
+async function handleConfigSubcommand(
+ state: PermissionState,
+ args: string,
+ ctx: any
+): Promise {
+ const parts = args.trim().split(/\s+/);
+ const action = parts[0];
+
+ if (action === "show") {
+ const config = loadPermissionConfig();
+ const configStr = JSON.stringify(config, null, 2);
+ ctx.ui.notify(`Permission Config:\n${configStr}`, "info");
+ return;
+ }
+
+ if (action === "reset") {
+ savePermissionConfig({});
+ invalidateConfigCache();
+ ctx.ui.notify("Permission config reset to defaults", "info");
+ return;
+ }
+
+ // Show help
+ const help = `Usage: /permission config
+
+Actions:
+ show - Display current configuration
+ reset - Reset to default configuration
+
+Edit ~/.sf/agent/settings.json directly for full control:
+
+{
+ "permissionConfig": {
+ "overrides": {
+ "minimal": ["tmux list-*", "tmux show-*"],
+ "medium": ["tmux *", "screen *"],
+ "high": ["rm -rf *"],
+ "dangerous": ["dd if=* of=/dev/*"]
+ },
+ "prefixMappings": [
+ { "from": "fvm flutter", "to": "flutter" },
+ { "from": "nvm exec", "to": "" }
+ ]
+ }
+}`;
+
+ ctx.ui.notify(help, "info");
+}
+
+/** Handle /permission command */
+export async function handlePermissionCommand(
+ state: PermissionState,
+ args: string,
+ ctx: any
+): Promise {
+ const arg = args.trim().toLowerCase();
+
+ // Handle config subcommand
+ if (arg === "config" || arg.startsWith("config ")) {
+ const configArgs = arg.replace(/^config\s*/, '');
+ await handleConfigSubcommand(state, configArgs, ctx);
+ return;
+ }
+
+ // Direct level set: /permission medium
+ if (arg && LEVELS.includes(arg as PermissionLevel)) {
+ const newLevel = arg as PermissionLevel;
+
+ if (hasInteractiveUI(ctx)) {
+ const scope = await ctx.ui.select("Save permission level to:", [
+ "Session only",
+ "Global (persists)",
+ ]);
+ if (!scope) return;
+
+ setLevel(state, newLevel, scope === "Global (persists)", ctx);
+ const saveMsg = scope === "Global (persists)" ? " (saved globally)" : " (session only)";
+ ctx.ui.notify(`Permission: ${LEVEL_INFO[newLevel].label}${saveMsg}`, "info");
+ } else {
+ setLevel(state, newLevel, false, ctx);
+ ctx.ui.notify(`Permission: ${LEVEL_INFO[newLevel].label}`, "info");
+ }
+ return;
+ }
+
+ // Show current level (no UI)
+ if (!hasInteractiveUI(ctx)) {
+ ctx.ui.notify(
+ `Current permission: ${LEVEL_INFO[state.currentLevel].label} (${LEVEL_INFO[state.currentLevel].desc})`,
+ "info"
+ );
+ return;
+ }
+
+ // Show selector
+ const options = LEVELS.map((level) => {
+ const info = LEVEL_INFO[level];
+ const marker = level === state.currentLevel ? " ← current" : "";
+ return `${info.label}: ${info.desc}${marker}`;
+ });
+
+ const choice = await ctx.ui.select("Select permission level", options);
+ if (!choice) return;
+
+ const selectedLabel = choice.split(":")[0].trim();
+ const newLevel = LEVELS.find((l) => LEVEL_INFO[l].label === selectedLabel);
+ if (!newLevel || newLevel === state.currentLevel) return;
+
+ const scope = await ctx.ui.select("Save to:", ["Session only", "Global (persits)"]);
+ if (!scope) return;
+
+ setLevel(state, newLevel, scope === "Global (persits)", ctx);
+ const saveMsg = scope === "Global (persits)" ? " (saved globally)" : " (session only)";
+ ctx.ui.notify(`Permission: ${LEVEL_INFO[newLevel].label}${saveMsg}`, "info");
+}
+
+/** Handle /permission-mode command */
+export async function handlePermissionModeCommand(
+ state: PermissionState,
+ args: string,
+ ctx: any
+): Promise {
+ const arg = args.trim().toLowerCase();
+
+ if (arg && PERMISSION_MODES.includes(arg as PermissionMode)) {
+ const newMode = arg as PermissionMode;
+
+ if (hasInteractiveUI(ctx)) {
+ const scope = await ctx.ui.select("Save permission mode to:", [
+ "Session only",
+ "Global (persists)",
+ ]);
+ if (!scope) return;
+
+ setMode(state, newMode, scope === "Global (persists)", ctx);
+ const saveMsg = scope === "Global (persists)" ? " (saved globally)" : " (session only)";
+ ctx.ui.notify(`Permission mode: ${PERMISSION_MODE_INFO[newMode].label}${saveMsg}`, "info");
+ } else {
+ setMode(state, newMode, false, ctx);
+ ctx.ui.notify(`Permission mode: ${PERMISSION_MODE_INFO[newMode].label}`, "info");
+ }
+ return;
+ }
+
+ if (!hasInteractiveUI(ctx)) {
+ ctx.ui.notify(
+ `Current permission mode: ${PERMISSION_MODE_INFO[state.permissionMode].label} (${PERMISSION_MODE_INFO[state.permissionMode].desc})`,
+ "info"
+ );
+ return;
+ }
+
+ const options = PERMISSION_MODES.map((mode) => {
+ const info = PERMISSION_MODE_INFO[mode];
+ const marker = mode === state.permissionMode ? " ← current" : "";
+ return `${info.label}: ${info.desc}${marker}`;
+ });
+
+ const choice = await ctx.ui.select("Select permission mode", options);
+ if (!choice) return;
+
+ const selectedLabel = choice.split(":")[0].trim();
+ const newMode = PERMISSION_MODES.find((m) => PERMISSION_MODE_INFO[m].label === selectedLabel);
+ if (!newMode || newMode === state.permissionMode) return;
+
+ const scope = await ctx.ui.select("Save to:", ["Session only", "Global (persists)"]);
+ if (!scope) return;
+
+ setMode(state, newMode, scope === "Global (persists)", ctx);
+ const saveMsg = scope === "Global (persists)" ? " (saved globally)" : " (session only)";
+ ctx.ui.notify(`Permission mode: ${PERMISSION_MODE_INFO[newMode].label}${saveMsg}`, "info");
+}
+
+/** Handle session_start - initialize level and show status */
+export function handleSessionStart(state: PermissionState, ctx: any): void {
+ // Check env var first (for print mode)
+ const envLevel = process.env.SF_PERMISSION_LEVEL?.toLowerCase() || process.env.PI_PERMISSION_LEVEL?.toLowerCase();
+ if (envLevel && LEVELS.includes(envLevel as PermissionLevel)) {
+ state.currentLevel = envLevel as PermissionLevel;
+ } else {
+ const globalLevel = loadGlobalPermission();
+ if (globalLevel) {
+ state.currentLevel = globalLevel;
+ }
+ }
+
+ if (ctx.hasUI) {
+ const globalMode = loadGlobalPermissionMode();
+ if (globalMode) {
+ state.permissionMode = globalMode;
+ }
+ }
+
+ if (ctx.hasUI) {
+ if (ctx.ui?.setStatus) {
+ ctx.ui.setStatus("authority", getStatusText(state.currentLevel));
+ }
+ if (state.currentLevel === "bypassed") {
+ ctx.ui.notify("⚠️ Permission bypassed - all checks disabled!", "warning");
+ } else if (!isQuietMode(ctx)) {
+ ctx.ui.notify(`Permission: ${LEVEL_INFO[state.currentLevel].label} (use /permission to change)`, "info");
+ }
+ if (state.permissionMode === "block") {
+ ctx.ui.notify("Permission mode: Block (use /permission-mode to change)", "info");
+ }
+ }
+}
+
+/** Handle bash tool_call - check permission and prompt if needed */
+export async function handleBashToolCall(
+ state: PermissionState,
+ command: string,
+ ctx: any
+): Promise<{ block: true; reason: string } | undefined> {
+ if (state.currentLevel === "bypassed") return undefined;
+
+ const classification = classifyCommand(command);
+
+ // Dangerous commands - always prompt unless in block mode
+ if (classification.dangerous) {
+ if (!hasInteractiveUI(ctx)) {
+ return {
+ block: true,
+ reason: `Dangerous command requires confirmation: ${command}\nUser can re-run with: SF_PERMISSION_LEVEL=bypassed sf -p "..."`
+ };
+ }
+
+ if (state.permissionMode === "block") {
+ return {
+ block: true,
+ reason: `Blocked by permission mode (block). Dangerous command: ${command}\nUse /permission-mode ask to enable confirmations.`
+ };
+ }
+
+ playPermissionSound();
+ const choice = await ctx.ui.select(
+ `⚠️ Dangerous command`,
+ ["Allow once", "Cancel"]
+ );
+
+ if (choice !== "Allow once") {
+ return { block: true, reason: "Cancelled" };
+ }
+ return undefined;
+ }
+
+ // Check level
+ const requiredIndex = LEVEL_INDEX[classification.level];
+ const currentIndex = LEVEL_INDEX[state.currentLevel];
+
+ if (requiredIndex <= currentIndex) return undefined;
+
+ const requiredLevel = classification.level;
+ const requiredInfo = LEVEL_INFO[requiredLevel];
+
+ // Print mode: block
+ if (!hasInteractiveUI(ctx)) {
+ return {
+ block: true,
+ reason: `Blocked by permission (${state.currentLevel}). Command: ${command}\nAllowed at this level: ${LEVEL_ALLOWED_DESC[state.currentLevel]}\nUser can re-run with: SF_PERMISSION_LEVEL=${requiredLevel} sf -p "..."`
+ };
+ }
+
+ if (state.permissionMode === "block") {
+ return {
+ block: true,
+ reason: `Blocked by permission (${state.currentLevel}, mode: block). Command: ${command}\nRequires ${requiredInfo.label}. Allowed at this level: ${LEVEL_ALLOWED_DESC[state.currentLevel]}\nUse /permission ${requiredLevel} or /permission-mode ask to enable prompts.`
+ };
+ }
+
+ // Interactive mode: prompt
+ playPermissionSound();
+ const choice = await ctx.ui.select(
+ `Requires ${requiredInfo.label}`,
+ ["Allow once", `Allow all (${requiredInfo.label})`, "Cancel"]
+ );
+
+ if (choice === "Allow once") return undefined;
+
+ if (choice === `Allow all (${requiredInfo.label})`) {
+ setLevel(state, requiredLevel, true, ctx);
+ ctx.ui.notify(`Permission → ${requiredInfo.label} (saved globally)`, "info");
+ return undefined;
+ }
+
+ return { block: true, reason: "Cancelled" };
+}
+
+/** Options for handleWriteToolCall */
+export interface WriteToolCallOptions {
+ state: PermissionState;
+ toolName: string;
+ filePath: string;
+ ctx: any;
+}
+
+/** Handle write/edit tool_call - check permission and prompt if needed */
+export async function handleWriteToolCall(
+ opts: WriteToolCallOptions
+): Promise<{ block: true; reason: string } | undefined> {
+ const { state, toolName, filePath, ctx } = opts;
+
+ if (state.currentLevel === "bypassed") return undefined;
+
+ if (LEVEL_INDEX[state.currentLevel] >= LEVEL_INDEX["low"]) return undefined;
+
+ const action = toolName === "write" ? "Write" : "Edit";
+ const message = `Requires Low: ${action} ${filePath}`;
+
+ // Print mode: block
+ if (!hasInteractiveUI(ctx)) {
+ return {
+ block: true,
+ reason: `Blocked by permission (${state.currentLevel}). ${action}: ${filePath}\nAllowed at this level: ${LEVEL_ALLOWED_DESC[state.currentLevel]}\nUser can re-run with: SF_PERMISSION_LEVEL=low sf -p "..."`
+ };
+ }
+
+ if (state.permissionMode === "block") {
+ return {
+ block: true,
+ reason: `Blocked by permission (${state.currentLevel}, mode: block). ${action}: ${filePath}\nRequires Low. Allowed at this level: ${LEVEL_ALLOWED_DESC[state.currentLevel]}\nUse /permission low or /permission-mode ask to enable prompts.`
+ };
+ }
+
+ // Interactive mode: prompt
+ playPermissionSound();
+ const choice = await ctx.ui.select(
+ message,
+ ["Allow once", "Allow all (Low)", "Cancel"]
+ );
+
+ if (choice === "Allow once") return undefined;
+
+ if (choice === "Allow all (Low)") {
+ setLevel(state, "low", true, ctx);
+ ctx.ui.notify(`Permission → Low (saved globally)`, "info");
+ return undefined;
+ }
+
+ return { block: true, reason: "Cancelled" };
+}
+
+// ============================================================================
+// Extension entry point
+// ============================================================================
+
+export default function (pi: ExtensionAPI) {
+ const state = createInitialState();
+
+ pi.registerCommand("permission", {
+ description: "View or change permission level",
+ handler: (args, ctx) => handlePermissionCommand(state, args, ctx),
+ });
+
+ pi.registerCommand("permission-mode", {
+ description: "Set permission prompt mode (ask or block)",
+ handler: (args, ctx) => handlePermissionModeCommand(state, args, ctx),
+ });
+
+ pi.on("session_start", async (_event, ctx) => {
+ handleSessionStart(state, ctx);
+ });
+
+ pi.on("tool_call", async (event, ctx) => {
+ if (event.toolName === "bash") {
+ return handleBashToolCall(state, event.input.command as string, ctx);
+ }
+
+ if (event.toolName === "write" || event.toolName === "edit") {
+ return handleWriteToolCall({
+ state,
+ toolName: event.toolName,
+ filePath: event.input.path as string,
+ ctx,
+ });
+ }
+
+ return undefined;
+ });
+}
diff --git a/src/resources/extensions/sf-permissions/package.json b/src/resources/extensions/sf-permissions/package.json
new file mode 100644
index 000000000..e15991c89
--- /dev/null
+++ b/src/resources/extensions/sf-permissions/package.json
@@ -0,0 +1,11 @@
+{
+ "name": "pi-extension-sf-permissions",
+ "private": true,
+ "version": "1.0.0",
+ "type": "module",
+ "pi": {
+ "extensions": [
+ "./index.ts"
+ ]
+ }
+}
diff --git a/src/resources/extensions/sf-permissions/permission-core.ts b/src/resources/extensions/sf-permissions/permission-core.ts
new file mode 100644
index 000000000..c6c844551
--- /dev/null
+++ b/src/resources/extensions/sf-permissions/permission-core.ts
@@ -0,0 +1,1220 @@
+/**
+ * Core permission logic - command classification and settings
+ *
+ * This module contains pure functions for:
+ * - Parsing shell commands
+ * - Classifying commands by required permission level
+ * - Detecting dangerous commands
+ * - Managing settings persistence
+ */
+
+import * as fs from "node:fs";
+import * as path from "node:path";
+import { parse } from "shell-quote";
+
+// ============================================================================
+// TYPES
+// ============================================================================
+
+export type PermissionLevel = "minimal" | "low" | "medium" | "high" | "bypassed";
+
+export type PermissionMode = "ask" | "block";
+
+export const LEVELS: PermissionLevel[] = ["minimal", "low", "medium", "high", "bypassed"];
+export const PERMISSION_MODES: PermissionMode[] = ["ask", "block"];
+
+export const LEVEL_INDEX: Record = {
+ minimal: 0,
+ low: 1,
+ medium: 2,
+ high: 3,
+ bypassed: 4,
+};
+
+export const LEVEL_INFO: Record = {
+ minimal: { label: "Minimal", desc: "Read-only" },
+ low: { label: "Low", desc: "File ops only" },
+ medium: { label: "Medium", desc: "Dev operations" },
+ high: { label: "High", desc: "Full operations" },
+ bypassed: { label: "Bypassed", desc: "All checks disabled" },
+};
+
+export const PERMISSION_MODE_INFO: Record = {
+ ask: { label: "Ask", desc: "Prompt when permission is required" },
+ block: { label: "Block", desc: "Block instead of prompting" },
+};
+
+export const LEVEL_ALLOWED_DESC: Record = {
+ minimal: "read-only (cat, ls, grep, git status/diff/log, npm list, version checks)",
+ low: "read-only + file write/edit",
+ medium: "dev ops (install packages, build, test, git commit/pull, file operations)",
+ high: "full operations except dangerous commands",
+ bypassed: "all operations",
+};
+
+export interface Classification {
+ level: PermissionLevel;
+ dangerous: boolean;
+}
+
+// ============================================================================
+// CONFIGURATION TYPES
+// ============================================================================
+
+export interface PermissionConfig {
+ /** Override patterns to force specific permission levels */
+ overrides?: {
+ minimal?: string[];
+ low?: string[];
+ medium?: string[];
+ high?: string[];
+ dangerous?: string[];
+ };
+ /** Prefix mappings to normalize commands before classification */
+ prefixMappings?: Array<{
+ from: string;
+ to: string;
+ }>;
+}
+
+// ============================================================================
+// CONFIGURATION CACHING
+// ============================================================================
+
+let configCache: PermissionConfig | null = null;
+let configCacheTime = 0;
+/** Cache TTL in milliseconds - balance between responsiveness and performance */
+const CONFIG_CACHE_TTL = 5000; // 5 seconds
+
+let regexCache: Map = new Map();
+/** Maximum cached regex patterns to prevent memory exhaustion */
+const MAX_REGEX_CACHE_SIZE = 500;
+
+function getCachedConfig(): PermissionConfig {
+ const now = Date.now();
+ if (!configCache || now - configCacheTime > CONFIG_CACHE_TTL) {
+ configCache = loadPermissionConfig();
+ configCacheTime = now;
+ }
+ return configCache;
+}
+
+function getCachedRegex(pattern: string): RegExp {
+ let regex = regexCache.get(pattern);
+ if (!regex) {
+ // Evict oldest entries if cache is full (simple FIFO eviction)
+ if (regexCache.size >= MAX_REGEX_CACHE_SIZE) {
+ const firstKey = regexCache.keys().next().value;
+ if (firstKey) regexCache.delete(firstKey);
+ }
+ regex = globToRegex(pattern);
+ regexCache.set(pattern, regex);
+ }
+ return regex;
+}
+
+export function invalidateConfigCache(): void {
+ configCache = null;
+ regexCache.clear();
+}
+
+/**
+ * Validate and sanitize permission config
+ * Returns a safe config object with invalid entries removed
+ */
+function validateConfig(config: unknown): PermissionConfig {
+ if (!config || typeof config !== 'object') {
+ return {};
+ }
+
+ const result: PermissionConfig = {};
+ const raw = config as Record;
+
+ // Validate overrides
+ if (raw.overrides && typeof raw.overrides === 'object') {
+ const overrides = raw.overrides as Record;
+ result.overrides = {};
+
+ const levels = ['minimal', 'low', 'medium', 'high', 'dangerous'] as const;
+ for (const level of levels) {
+ const patterns = overrides[level];
+ if (Array.isArray(patterns)) {
+ // Filter to only valid string patterns, limit count
+ const validPatterns = patterns
+ .filter((p): p is string => typeof p === 'string' && p.length > 0)
+ .slice(0, 100); // Max 100 patterns per level
+ if (validPatterns.length > 0) {
+ result.overrides[level] = validPatterns;
+ }
+ }
+ }
+ }
+
+ // Validate prefix mappings
+ if (Array.isArray(raw.prefixMappings)) {
+ const validMappings = raw.prefixMappings
+ .filter((m): m is { from: string; to: string } =>
+ m && typeof m === 'object' &&
+ typeof (m as any).from === 'string' && (m as any).from.length > 0 &&
+ typeof (m as any).to === 'string'
+ )
+ .slice(0, 50); // Max 50 prefix mappings
+ if (validMappings.length > 0) {
+ result.prefixMappings = validMappings;
+ }
+ }
+
+ return result;
+}
+
+// ============================================================================
+// PATTERN MATCHING
+// ============================================================================
+
+/**
+ * Convert a glob-like pattern to a RegExp
+ * Supports: * (any chars), ? (single char)
+ * Patterns are matched against the full command string
+ */
+function globToRegex(pattern: string): RegExp {
+ try {
+ // Limit pattern complexity to prevent ReDoS
+ // Reject patterns with too many consecutive * (creates .*.*.*... patterns)
+ if (/\*{5,}/.test(pattern)) {
+ // More than 4 consecutive * - reject to prevent exponential backtracking
+ return /(?!)/;
+ }
+
+ // Escape regex special chars first (except * and ? which we handle specially)
+ // Note: - is not special outside character classes, so we don't need to escape it
+ let regex = pattern
+ .replace(/[.+^${}()|[\]\\]/g, '\\$&')
+ .replace(/\*/g, '.*') // * -> match any characters
+ .replace(/\?/g, '.'); // ? -> match single character
+
+ return new RegExp(`^${regex}$`, 'i');
+ } catch {
+ // Return a pattern that never matches on invalid input
+ return /(?!)/;
+ }
+}
+
+/**
+ * Check if a command matches any pattern in the list
+ */
+function matchesAnyPattern(command: string, patterns: string[] | undefined | null): boolean {
+ if (!patterns || !Array.isArray(patterns) || patterns.length === 0) {
+ return false;
+ }
+ return patterns.some(pattern =>
+ typeof pattern === 'string' && getCachedRegex(pattern).test(command)
+ );
+}
+
+/**
+ * Apply prefix mappings to normalize command before classification
+ * e.g., "fvm flutter build" → "flutter build"
+ */
+function applyPrefixMappings(
+ command: string,
+ mappings: PermissionConfig['prefixMappings']
+): string {
+ if (!mappings || !Array.isArray(mappings) || mappings.length === 0) return command;
+
+ const trimmed = command.trim();
+ const trimmedLower = trimmed.toLowerCase();
+
+ for (const mapping of mappings) {
+ // Validate mapping structure
+ if (!mapping || typeof mapping.from !== 'string' || typeof mapping.to !== 'string') {
+ continue;
+ }
+
+ const { from, to } = mapping;
+ const fromLower = from.toLowerCase();
+
+ if (trimmedLower.startsWith(fromLower)) {
+ // Check for word boundary (whitespace or end of string after prefix)
+ const afterPrefix = trimmed.substring(fromLower.length);
+ // Use regex to check for whitespace boundary (handles tabs, multiple spaces)
+ if (afterPrefix === '' || /^\s/.test(afterPrefix)) {
+ // Replace prefix with mapped value, preserve rest with trimmed leading space
+ const remainder = afterPrefix.replace(/^\s+/, '');
+ if (to === '') {
+ return remainder;
+ }
+ return remainder ? `${to} ${remainder}` : to;
+ }
+ }
+ }
+
+ return command;
+}
+
+/**
+ * Check if command matches any configured override
+ * Returns the override classification or null if no match
+ */
+function checkOverrides(
+ command: string,
+ overrides: PermissionConfig['overrides']
+): Classification | null {
+ if (!overrides) return null;
+
+ const trimmed = command.trim();
+
+ // Check dangerous first (highest priority)
+ if (overrides.dangerous && matchesAnyPattern(trimmed, overrides.dangerous)) {
+ return { level: 'high', dangerous: true };
+ }
+
+ // Check levels in order of specificity (high to low)
+ if (overrides.high && matchesAnyPattern(trimmed, overrides.high)) {
+ return { level: 'high', dangerous: false };
+ }
+
+ if (overrides.medium && matchesAnyPattern(trimmed, overrides.medium)) {
+ return { level: 'medium', dangerous: false };
+ }
+
+ if (overrides.low && matchesAnyPattern(trimmed, overrides.low)) {
+ return { level: 'low', dangerous: false };
+ }
+
+ if (overrides.minimal && matchesAnyPattern(trimmed, overrides.minimal)) {
+ return { level: 'minimal', dangerous: false };
+ }
+
+ return null; // No override matched
+}
+
+// ============================================================================
+// SETTINGS PERSISTENCE
+// ============================================================================
+
+function getSfSettingsPath(): string {
+ return path.join(process.env.HOME || "", ".sf", "agent", "settings.json");
+}
+
+function getPiSettingsPath(): string {
+ return path.join(process.env.HOME || "", ".pi", "agent", "settings.json");
+}
+
+function getSettingsPath(): string {
+ const sfPath = getSfSettingsPath();
+ if (fs.existsSync(sfPath)) {
+ return sfPath;
+ }
+ const piPath = getPiSettingsPath();
+ if (fs.existsSync(piPath)) {
+ return piPath;
+ }
+ return sfPath;
+}
+
+function loadSettings(): Record {
+ const sfPath = getSfSettingsPath();
+ try {
+ if (fs.existsSync(sfPath)) {
+ return JSON.parse(fs.readFileSync(sfPath, "utf-8"));
+ }
+ } catch {}
+
+ const piPath = getPiSettingsPath();
+ try {
+ if (fs.existsSync(piPath)) {
+ return JSON.parse(fs.readFileSync(piPath, "utf-8"));
+ }
+ } catch {}
+
+ return {};
+}
+
+function saveSettings(settings: Record): void {
+ const settingsPath = getSfSettingsPath();
+ const dir = path.dirname(settingsPath);
+ const tempPath = `${settingsPath}.tmp`;
+
+ try {
+ if (!fs.existsSync(dir)) {
+ fs.mkdirSync(dir, { recursive: true });
+ }
+ // Atomic write: write to temp file first, then rename
+ fs.writeFileSync(tempPath, JSON.stringify(settings, null, 2) + "\n");
+ fs.renameSync(tempPath, settingsPath); // Atomic on POSIX systems
+ } catch (e) {
+ // Clean up temp file on error
+ try {
+ if (fs.existsSync(tempPath)) {
+ fs.unlinkSync(tempPath);
+ }
+ } catch {}
+ throw e;
+ }
+}
+
+export function loadGlobalPermission(): PermissionLevel | null {
+ const settings = loadSettings();
+ const level = (settings.permissionLevel as string)?.toLowerCase();
+ if (level && LEVELS.includes(level as PermissionLevel)) {
+ return level as PermissionLevel;
+ }
+ return null;
+}
+
+export function saveGlobalPermission(level: PermissionLevel): void {
+ const settings = loadSettings();
+ settings.permissionLevel = level;
+ saveSettings(settings);
+}
+
+export function loadGlobalPermissionMode(): PermissionMode | null {
+ const settings = loadSettings();
+ const mode = (settings.permissionMode as string)?.toLowerCase();
+ if (mode && PERMISSION_MODES.includes(mode as PermissionMode)) {
+ return mode as PermissionMode;
+ }
+ return null;
+}
+
+export function saveGlobalPermissionMode(mode: PermissionMode): void {
+ const settings = loadSettings();
+ settings.permissionMode = mode;
+ saveSettings(settings);
+}
+
+export function loadPermissionConfig(): PermissionConfig {
+ const settings = loadSettings();
+ return validateConfig(settings.permissionConfig);
+}
+
+export function savePermissionConfig(config: PermissionConfig): void {
+ const settings = loadSettings();
+ settings.permissionConfig = config;
+ saveSettings(settings);
+}
+
+// ============================================================================
+// COMMAND PARSING
+// ============================================================================
+
+interface ParsedCommand {
+ segments: string[][]; // Commands split by operators
+ operators: string[]; // |, &&, ||, ;
+ raw: string;
+ hasShellTricks?: boolean;
+ /** Output redirections to non-special files (>, >>) */
+ writesFiles?: boolean;
+}
+
+// Shell execution commands that can run arbitrary code
+const SHELL_EXECUTION_COMMANDS = new Set([
+ "eval", "exec", "source", ".", // shell builtins
+ "env", // can execute commands: env rm -rf /
+ "command", // bypasses aliases, can execute arbitrary commands
+ "builtin", // uses shell builtins directly
+ // Wrapper commands that can execute arbitrary commands
+ "time", "nice", "nohup", "timeout", "watch", "strace",
+ // Note: xargs is handled in CONDITIONAL_WRITE_COMMANDS with smart logic
+]);
+
+// Patterns that indicate command substitution or shell tricks in raw command
+// Only patterns that can actually execute arbitrary code
+const SHELL_TRICK_PATTERNS = [
+ /\$\((?!\()[^)]+\)/, // $(command) - command substitution (exclude $(( for arithmetic)
+ /`[^`]+`/, // `command` - backtick substitution
+ /<\([^)]+\)/, // <(command) - process substitution (input)
+ />\([^)]+\)/, // >(command) - process substitution (output)
+];
+
+// Check if ${...} contains nested command substitution
+// Simple ${VAR} is safe, but ${VAR:-$(cmd)} or ${VAR:-`cmd`} is dangerous
+function hasDangerousExpansion(command: string): boolean {
+ const braceExpansions = command.match(/\$\{[^}]+\}/g) || [];
+ for (const expansion of braceExpansions) {
+ // Check for nested $() or backticks inside ${...}
+ if (/\$\(|\`/.test(expansion)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+function detectShellTricks(command: string): boolean {
+ // Check basic patterns first
+ if (SHELL_TRICK_PATTERNS.some(pattern => pattern.test(command))) {
+ return true;
+ }
+ // Check for dangerous ${...} expansions with nested command substitution
+ if (hasDangerousExpansion(command)) {
+ return true;
+ }
+ return false;
+}
+
+/**
+ * Check if a command contains arithmetic expansion $((..))
+ * Used to avoid false positives from shell-quote parsing
+ */
+function hasArithmeticExpansion(command: string): boolean {
+ return /\$\(\(/.test(command);
+}
+
+// Output redirection operators that write to files
+const OUTPUT_REDIRECTION_OPS = new Set([">", ">>", ">|", "&>", "&>>"]);
+
+// Safe redirection targets (not actual file writes)
+const SAFE_REDIRECTION_TARGETS = new Set([
+ "/dev/null", "/dev/stdout", "/dev/stderr",
+ "/dev/fd/1", "/dev/fd/2",
+]);
+
+function parseCommand(command: string): ParsedCommand {
+ const hasShellTricks = detectShellTricks(command);
+
+ // shell-quote can throw on complex patterns it doesn't understand
+ // In that case, treat the command as having shell tricks (require high permission)
+ let tokens: ReturnType;
+ try {
+ tokens = parse(command);
+ } catch {
+ // Parse failed - treat as dangerous
+ return {
+ segments: [],
+ operators: [],
+ raw: command,
+ hasShellTricks: true
+ };
+ }
+
+ const segments: string[][] = [];
+ const operators: string[] = [];
+ let currentSegment: string[] = [];
+ let foundCommandSubstitution = false;
+ let writesFiles = false;
+
+ // Redirection operators - these don't start new command segments
+ const REDIRECTION_OPS = new Set([">", "<", ">>", ">&", "<&", ">|", "<>", "&>", "&>>"]);
+ let pendingOutputRedirect = false;
+
+ for (let i = 0; i < tokens.length; i++) {
+ const token = tokens[i];
+
+ if (pendingOutputRedirect) {
+ // This token is a redirection target
+ pendingOutputRedirect = false;
+ if (typeof token === "string") {
+ // Check if this is writing to a real file (not /dev/null etc.)
+ if (!SAFE_REDIRECTION_TARGETS.has(token) && !token.startsWith("/dev/fd/")) {
+ writesFiles = true;
+ }
+ }
+ continue;
+ }
+
+ if (typeof token === "string") {
+ currentSegment.push(token);
+ } else if (token && typeof token === "object") {
+ if ("op" in token) {
+ const op = token.op as string;
+ if (REDIRECTION_OPS.has(op)) {
+ // Check if this is an output redirection
+ if (OUTPUT_REDIRECTION_OPS.has(op)) {
+ pendingOutputRedirect = true;
+ } else {
+ // Input redirection or fd duplication - skip next token
+ // For >&, <& we need to check if it's fd duplication (2>&1) or file redirect
+ if (op === ">&" || op === "<&") {
+ const nextToken = tokens[i + 1];
+ if (typeof nextToken === "string" && /^\d+$/.test(nextToken)) {
+ // fd duplication like 2>&1, skip it
+ i++;
+ } else {
+ // File redirect like >&file
+ pendingOutputRedirect = true;
+ }
+ }
+ }
+ } else {
+ // Only treat actual command separators as segment boundaries
+ // ( and ) are grouping/subshell/arithmetic operators, not separators
+ const COMMAND_SEPARATORS = new Set(["|", "&&", "||", ";", "&"]);
+ if (COMMAND_SEPARATORS.has(op)) {
+ if (currentSegment.length > 0) {
+ segments.push(currentSegment);
+ currentSegment = [];
+ }
+ operators.push(op);
+ }
+ // Ignore ( and ) - they don't create new command segments
+ }
+ } else if ("comment" in token) {
+ // Comment - ignore
+ } else {
+ // shell-quote returns special objects for:
+ // - { op: 'glob', pattern: '*.js' } - globs
+ // - { op: string } - operators
+ // Any other object type indicates shell parsing complexity
+ // that we should treat as potentially dangerous
+ foundCommandSubstitution = true;
+ }
+ }
+ }
+
+ if (currentSegment.length > 0) {
+ segments.push(currentSegment);
+ }
+
+ return {
+ segments,
+ operators,
+ raw: command,
+ hasShellTricks: hasShellTricks || foundCommandSubstitution,
+ writesFiles
+ };
+}
+
+function getCommandName(tokens: string[]): string {
+ if (tokens.length === 0) return "";
+
+ let cmd = tokens[0];
+
+ // Strip path prefix
+ if (cmd.includes("/")) {
+ cmd = cmd.split("/").pop() || cmd;
+ }
+
+ // Strip leading backslash (alias bypass)
+ if (cmd.startsWith("\\")) {
+ cmd = cmd.slice(1);
+ }
+
+ return cmd.toLowerCase();
+}
+
+// ============================================================================
+// DANGEROUS COMMAND DETECTION
+// ============================================================================
+
+function isDangerousCommand(tokens: string[]): boolean {
+ if (tokens.length === 0) return false;
+
+ const cmd = getCommandName(tokens);
+ const args = tokens.slice(1);
+ const argsStr = args.join(" ");
+
+ // sudo - always dangerous
+ if (cmd === "sudo") return true;
+
+ // rm with recursive + force
+ if (cmd === "rm") {
+ let hasRecursive = false;
+ let hasForce = false;
+
+ for (const arg of args) {
+ if (arg === "--recursive") hasRecursive = true;
+ if (arg === "--force") hasForce = true;
+ if (arg.startsWith("-") && !arg.startsWith("--")) {
+ if (arg.includes("r") || arg.includes("R")) hasRecursive = true;
+ if (arg.includes("f")) hasForce = true;
+ }
+ }
+
+ if (hasRecursive && hasForce) return true;
+ }
+
+ // chmod 777 or a+rwx
+ if (cmd === "chmod") {
+ if (argsStr.includes("777") || argsStr.includes("a+rwx")) return true;
+ }
+
+ // dd to device
+ if (cmd === "dd") {
+ if (argsStr.match(/of=\/dev\//)) return true;
+ }
+
+ // Dangerous system commands
+ if (["fdisk", "parted", "format"].includes(cmd)) return true;
+ if (cmd.startsWith("mkfs")) return true; // mkfs, mkfs.ext4, mkfs.xfs, etc.
+
+ // Shutdown/reboot
+ if (["shutdown", "reboot", "halt", "poweroff", "init"].includes(cmd)) return true;
+
+ // Fork bomb pattern
+ if (tokens.join("").includes(":(){ :|:& };:")) return true;
+
+ return false;
+}
+
+// ============================================================================
+// LEVEL CLASSIFICATION
+// ============================================================================
+
+// Common redirection targets (treated as read-only)
+const REDIRECTION_TARGETS = new Set([
+ "/dev/null", "/dev/stdin", "/dev/stdout", "/dev/stderr",
+ "/dev/zero", "/dev/full", "/dev/random", "/dev/urandom",
+ "/dev/fd", "/dev/tty", "/dev/ptmx",
+]);
+
+// File descriptor numbers used in redirections (e.g., 2>&1)
+const FD_NUMBERS = new Set(["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]);
+
+// MINIMAL level - read-only commands
+const MINIMAL_COMMANDS = new Set([
+ // File reading
+ "cat", "less", "more", "head", "tail", "bat", "tac",
+ // Directory listing/navigation
+ "ls", "tree", "pwd", "dir", "vdir", "cd", "pushd", "popd", "dirs",
+ // Search (note: find handled specially due to -exec/-delete)
+ "grep", "egrep", "fgrep", "rg", "ag", "ack", "fd", "locate", "which", "whereis",
+ // Info
+ "echo", "printf", "whoami", "id", "date", "cal", "uname", "hostname", "uptime",
+ "type", "file", "stat", "wc", "du", "df", "free",
+ "ps", "top", "htop", "pgrep", "sleep",
+ // Man/help
+ "man", "help", "info",
+ // Pipeline utilities (note: xargs, tee handled specially - they can write/execute)
+ "sort", "uniq", "cut", "awk", "sed", "tr", "column", "paste", "join",
+ "comm", "diff", "cmp", "patch",
+ // Shell test commands (read-only conditionals)
+ "test", "[", "[[", "true", "false",
+]);
+
+// Commands that can write files based on arguments
+// find: -exec, -execdir, -ok, -okdir, -delete can modify filesystem
+// xargs: executes commands with input as arguments (but safe if running read-only commands)
+// tee: writes to files (but read-only when used with /dev/null or --)
+
+/**
+ * Extract the command that xargs will execute.
+ * Parses xargs options to find the first non-option argument.
+ * Returns null if no command specified (xargs defaults to /bin/echo).
+ */
+function extractXargsCommand(tokens: string[]): string | null {
+ const args = tokens.slice(1); // Skip 'xargs' itself
+
+ // xargs options that consume the next argument
+ const OPTIONS_WITH_ARG = new Set(["-I", "-d", "-E", "-L", "-n", "-P", "-s", "-a"]);
+
+ let i = 0;
+ while (i < args.length) {
+ const arg = args[i];
+
+ // End of options marker
+ if (arg === "--") {
+ i++;
+ break;
+ }
+
+ // Not an option - this is the command
+ if (!arg.startsWith("-")) {
+ break;
+ }
+
+ // Long options (--null, --max-args=5, etc.)
+ if (arg.startsWith("--")) {
+ // Long options either are flags or use = for values, so just skip
+ i++;
+ continue;
+ }
+
+ // Short option that takes a required argument
+ // Could be: -I {} (separate) or -I{} (attached)
+ const optLetter = arg.substring(0, 2); // e.g., "-I"
+ if (OPTIONS_WITH_ARG.has(optLetter)) {
+ if (arg.length > 2) {
+ // Argument attached: -I{} or -n10
+ i++;
+ } else {
+ // Argument is next token: -I {}
+ i += 2;
+ }
+ continue;
+ }
+
+ // -i and -e can have optional attached argument (deprecated forms)
+ // -i[replstr], -e[eof-str]
+ if (arg.startsWith("-i") || arg.startsWith("-e")) {
+ i++;
+ continue;
+ }
+
+ // Other short options are flags (can be combined): -0, -t, -p, -r, -x
+ // e.g., -0tr means -0 -t -r
+ i++;
+ }
+
+ // Return the command if found
+ if (i < args.length) {
+ const cmd = args[i];
+ // Strip path prefix (e.g., /usr/bin/cat -> cat)
+ if (cmd.includes("/")) {
+ return cmd.split("/").pop()?.toLowerCase() || null;
+ }
+ return cmd.toLowerCase();
+ }
+
+ // No command found - xargs defaults to /bin/echo (safe)
+ return null;
+}
+
+const CONDITIONAL_WRITE_COMMANDS: Record boolean> = {
+ find: (tokens) => {
+ const dangerousFlags = ["-exec", "-execdir", "-ok", "-okdir", "-delete"];
+ return tokens.some(t => dangerousFlags.includes(t.toLowerCase()));
+ },
+ xargs: (tokens) => {
+ // xargs executes commands with input as arguments
+ // Safe if running a read-only command from MINIMAL_COMMANDS
+ const xargsCmd = extractXargsCommand(tokens);
+
+ // No command = defaults to /bin/echo (safe, just prints)
+ if (xargsCmd === null) return false;
+
+ // Check if the command xargs will run is read-only
+ if (MINIMAL_COMMANDS.has(xargsCmd)) return false;
+
+ // Unknown or non-minimal command - not safe
+ return true;
+ },
+ tee: (tokens) => {
+ // tee writes to files unless only used with /dev/null or --
+ const args = tokens.slice(1).filter(t => !t.startsWith("-"));
+ if (args.length === 0) return false; // tee with no file args writes to stdout only
+ // Check if all file args are /dev/null
+ return !args.every(a => a === "/dev/null");
+ },
+};
+
+const MINIMAL_GIT_SUBCOMMANDS = new Set([
+ "status", "log", "diff", "show", "branch", "remote", "tag",
+ "ls-files", "ls-tree", "cat-file", "rev-parse", "describe",
+ "shortlog", "blame", "annotate", "whatchanged", "reflog",
+ "fetch", // read-only: just downloads refs, doesn't change working tree
+]);
+
+const MINIMAL_PACKAGE_SUBCOMMANDS: Record> = {
+ npm: new Set(["list", "ls", "info", "view", "outdated", "audit", "explain", "why", "search"]),
+ yarn: new Set(["list", "info", "why", "outdated", "audit"]),
+ pnpm: new Set(["list", "ls", "outdated", "audit", "why"]),
+ bun: new Set(["pm", "ls"]),
+ pip: new Set(["list", "show", "freeze", "check"]),
+ pip3: new Set(["list", "show", "freeze", "check"]),
+ cargo: new Set(["tree", "metadata", "search", "info"]),
+ go: new Set(["list", "version", "env"]),
+ gem: new Set(["list", "info", "search", "query"]),
+ composer: new Set(["show", "info", "search", "outdated", "audit"]),
+ dotnet: new Set(["list", "nuget"]),
+ flutter: new Set(["doctor", "devices", "config"]),
+ dart: new Set(["info"]),
+};
+
+function isMinimalLevel(tokens: string[]): boolean {
+ if (tokens.length === 0) return true;
+
+ const cmd = getCommandName(tokens);
+ const fullCmd = tokens[0]; // Keep full path for checking redirection targets
+ const subCmd = tokens.length > 1 ? tokens[1].toLowerCase() : "";
+
+ // Check if this is a file descriptor number from redirection parsing (e.g., "1" from 2>&1)
+ if (tokens.length === 1 && FD_NUMBERS.has(fullCmd)) return true;
+
+ // Check if this is a common redirection target (e.g., /dev/null)
+ if (REDIRECTION_TARGETS.has(fullCmd)) return true;
+
+ // Check conditional write commands (find with -exec, xargs, tee with files)
+ const conditionalCheck = CONDITIONAL_WRITE_COMMANDS[cmd];
+ if (conditionalCheck) {
+ // If the command would write/execute, it's not minimal level
+ if (conditionalCheck(tokens)) {
+ return false;
+ }
+ // Otherwise it's safe (e.g., find without -exec, tee to /dev/null)
+ return true;
+ }
+
+ // Basic read-only commands
+ if (MINIMAL_COMMANDS.has(cmd)) return true;
+
+ // Version checks
+ if (tokens.includes("--version") || tokens.includes("-v") || tokens.includes("-V")) {
+ return true;
+ }
+
+ // Git read operations
+ if (cmd === "git" && subCmd && MINIMAL_GIT_SUBCOMMANDS.has(subCmd)) {
+ // Some git commands are only read-only without additional args
+ // e.g., "git branch" lists branches (minimal), "git branch new" creates (medium)
+ // e.g., "git tag" lists tags (minimal), "git tag v1.0" creates (medium)
+ const READ_ONLY_WITHOUT_ARGS = new Set(["branch", "tag", "remote"]);
+ if (READ_ONLY_WITHOUT_ARGS.has(subCmd)) {
+ // Check if there are args beyond flags (starting with -)
+ const nonFlagArgs = tokens.slice(2).filter(t => !t.startsWith("-"));
+ if (nonFlagArgs.length > 0) {
+ return false; // Has args, not read-only
+ }
+ }
+ return true;
+ }
+
+ // Package manager read operations
+ if (MINIMAL_PACKAGE_SUBCOMMANDS[cmd]?.has(subCmd)) {
+ return true;
+ }
+
+ return false;
+}
+
+// MEDIUM level - build/install/test operations only (NOT running code)
+const MEDIUM_PACKAGE_PATTERNS: Array<[string, RegExp]> = [
+ // Node.js - install, build, test only (NOT run/start/exec which execute arbitrary code)
+ ["npm", /^(install|ci|add|remove|uninstall|update|rebuild|dedupe|prune|link|pack|test|build)$/],
+ ["yarn", /^(install|add|remove|upgrade|import|link|pack|test|build)$/],
+ ["pnpm", /^(install|add|remove|update|link|pack|test|build)$/],
+ ["bun", /^(install|add|remove|update|link|test|build)$/],
+ // npx/bunx/pnpx run arbitrary packages - HIGH (not included here)
+
+ // Python - install/build only (NOT running scripts)
+ ["pip", /^install$/],
+ ["pip3", /^install$/],
+ ["pipenv", /^(install|update|sync|lock|uninstall)$/],
+ ["poetry", /^(install|add|remove|update|lock|build)$/],
+ ["conda", /^(install|update|remove|create)$/],
+ ["uv", /^(pip|sync|lock)$/],
+ // python/python3 run arbitrary code - HIGH (not included here)
+ ["pytest", /./], // test runner is safe
+
+ // Rust - build/test/lint only (NOT cargo run)
+ ["cargo", /^(install|add|remove|fetch|update|build|test|check|clippy|fmt|doc|bench|clean)$/],
+ ["rustfmt", /./],
+ // rustc compiles but doesn't run - medium
+ ["rustc", /./],
+
+ // Go - build/test only (NOT go run)
+ ["go", /^(get|mod|build|test|generate|fmt|vet|clean|install)$/],
+
+ // Ruby - install/build only
+ ["gem", /^install$/],
+ ["bundle", /^(install|update|add|remove|binstubs)$/],
+ ["bundler", /^(install|update|add|remove)$/],
+ // CocoaPods - dependency management only
+ ["pod", /^(install|update|repo)$/],
+ // rake/rails can run arbitrary code - HIGH (not included here)
+ ["rspec", /./], // test runner
+
+ // PHP - install only
+ ["composer", /^(install|require|remove|update|dump-autoload)$/],
+ // php runs code - HIGH (not included here)
+ ["phpunit", /./], // test runner
+
+ // Java/Kotlin - compile/test only (NOT run)
+ ["mvn", /^(install|compile|test|package|clean|dependency|verify)$/],
+ ["gradle", /^(build|test|clean|assemble|dependencies|check)$/],
+ // gradlew can run arbitrary tasks - HIGH (not included here)
+
+ // .NET - build/test only (NOT run/watch)
+ ["dotnet", /^(restore|add|build|test|clean|publish|pack|new)$/],
+ ["nuget", /^install$/],
+
+ // Dart/Flutter - build/test only (NOT run)
+ ["dart", /^(pub|compile|test|analyze|format|fix)$/],
+ ["flutter", /^(pub|build|test|analyze|clean|create|doctor)$/],
+ ["pub", /^(get|upgrade|downgrade|cache|deps)$/],
+
+ // Swift - build/test only (NOT run)
+ ["swift", /^(package|build|test)$/],
+ ["swiftc", /./],
+
+ // Elixir - build/test only (NOT run)
+ ["mix", /^(deps|compile|test|ecto|phx\.gen)$/],
+ // elixir runs code - HIGH (not included here)
+
+ // Haskell - build/test only (NOT run)
+ ["cabal", /^(install|build|test|update)$/],
+ ["stack", /^(install|build|test|setup)$/],
+ // ghc compiles but doesn't run - medium
+ ["ghc", /./],
+
+ // Others
+ ["nimble", /^install$/],
+ ["zig", /^(build|test|fetch)$/],
+ ["cmake", /./],
+ ["make", /./],
+ ["ninja", /./],
+ ["meson", /./],
+
+ // Linters/formatters - static analysis only (MEDIUM)
+ ["eslint", /./],
+ ["prettier", /./],
+ ["black", /./],
+ ["flake8", /./],
+ ["pylint", /./],
+ ["ruff", /./],
+ ["pyflakes", /./],
+ ["bandit", /./],
+ ["mypy", /./],
+ ["pyright", /./],
+ ["tsc", /./],
+ ["tslint", /./],
+ ["standard", /./],
+ ["xo", /./],
+ ["rubocop", /./],
+ ["standardrb", /./],
+ ["reek", /./],
+ ["brakeman", /./],
+ ["golangci-lint", /./],
+ ["gofmt", /./],
+ ["go vet", /./],
+ ["golint", /./],
+ ["staticcheck", /./],
+ ["errcheck", /./],
+ ["misspell", /./],
+ ["swiftlint", /./],
+ ["swiftformat", /./],
+ ["ktlint", /./],
+ ["detekt", /./],
+ ["dartanalyzer", /./], // dart analyze alternative name
+ ["dartfmt", /./],
+ ["clang-tidy", /./],
+ ["clang-format", /./],
+ ["cppcheck", /./],
+ ["checkstyle", /./],
+ ["pmd", /./],
+ ["spotbugs", /./],
+ ["sonarqube", /./],
+ ["phpcs", /./],
+ ["phpmd", /./],
+ ["phpstan", /./],
+ ["psalm", /./],
+ ["php-cs-fixer", /./],
+ ["luacheck", /./],
+ ["shellcheck", /./],
+ ["checkov", /./],
+ ["tflint", /./],
+ ["buf", /./], // protobuf linter
+ ["sqlfluff", /./],
+ ["yamllint", /./],
+ ["markdownlint", /./],
+ ["djlint", /./],
+ ["djhtml", /./],
+ ["commitlint", /./],
+
+ // Test runners
+ ["jest", /./],
+ ["mocha", /./],
+ ["vitest", /./],
+
+ // File ops
+ ["mkdir", /./],
+ ["touch", /./],
+ ["cp", /./],
+ ["mv", /./],
+ ["ln", /./],
+
+ // Database (local dev)
+ ["prisma", /^(generate|migrate|db|studio)$/],
+ ["sequelize", /^(db|migration)$/],
+ ["typeorm", /^(migration)$/],
+];
+
+const MEDIUM_GIT_SUBCOMMANDS = new Set([
+ "add", "commit", "pull", "checkout", "switch", "branch",
+ "merge", "rebase", "cherry-pick", "stash", "revert", "tag",
+ "rm", "mv", "reset", "clone", // reset without --hard, clone is reversible
+ // NOT included (irreversible):
+ // - clean: permanently deletes untracked files
+ // - restore: can discard uncommitted changes permanently
+]);
+
+// Safe npm/yarn/pnpm/bun run scripts (build, test, lint - not dev, start, serve)
+const SAFE_RUN_SCRIPTS = new Set([
+ "build", "compile", "test", "lint", "format", "fmt", "check", "typecheck",
+ "type-check", "types", "validate", "verify", "prepare", "prepublish",
+ "prepublishOnly", "prepack", "postpack", "clean", "lint:fix", "format:check",
+ "build:prod", "build:dev", "build:production", "build:development",
+ "test:unit", "test:integration", "test:e2e", "test:coverage",
+]);
+
+// Scripts that run servers or arbitrary code
+const UNSAFE_RUN_SCRIPTS = new Set([
+ "start", "dev", "develop", "serve", "server", "watch", "preview",
+ "start:dev", "start:prod", "dev:server",
+]);
+
+function isSafeRunScript(script: string): boolean {
+ const s = script.toLowerCase();
+ // Check explicit safe list
+ if (SAFE_RUN_SCRIPTS.has(s)) return true;
+ // Check if starts with safe prefix
+ if (s.startsWith("build") || s.startsWith("test") || s.startsWith("lint") ||
+ s.startsWith("format") || s.startsWith("check") || s.startsWith("type")) {
+ return true;
+ }
+ // Check explicit unsafe list
+ if (UNSAFE_RUN_SCRIPTS.has(s)) return false;
+ // Check unsafe prefixes
+ if (s.startsWith("start") || s.startsWith("dev") || s.startsWith("serve") ||
+ s.startsWith("watch")) {
+ return false;
+ }
+ // Default: unknown scripts are unsafe
+ return false;
+}
+
+function isMediumLevel(tokens: string[]): boolean {
+ if (tokens.length === 0) return false;
+
+ const cmd = getCommandName(tokens);
+ const subCmd = tokens.length > 1 ? tokens[1].toLowerCase() : "";
+ const thirdArg = tokens.length > 2 ? tokens[2] : "";
+
+ // Git local operations (not push)
+ if (cmd === "git") {
+ if (subCmd === "push") return false; // push is HIGH
+ if (subCmd === "reset" && tokens.includes("--hard")) return false; // hard reset is HIGH
+ if (MEDIUM_GIT_SUBCOMMANDS.has(subCmd)) return true;
+ }
+
+ // Handle npm/yarn/pnpm/bun run