Rename GSD→SF: complete rebrand from fork origin
- All gsdDir/gsdRoot/gsdHome → sfDir/sfRootDir/sfHome - GSDWorkspace* → SFWorkspace* interfaces - bootstrapGsdProject → bootstrapProject - runGSDDoctor → runSFDoctor - GsdClient → SfClient, gsd-client.ts → sf-client.ts - .gsd/ → .sf/ in all tests, docs, docker, native, vscode - Auto-migration: headless detects .gsd/ → renames to .sf/ - Deleted gsd-phase-state.ts backward-compat re-export - Renamed bin/gsd-from-source → bin/sf-from-source - Updated mintlify docs, github workflows, docker configs
This commit is contained in:
parent
6e10d93d0d
commit
9d739dfa5d
190 changed files with 8950 additions and 2283 deletions
|
|
@ -12,7 +12,7 @@ packages/*/node_modules/
|
||||||
.env
|
.env
|
||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
.gsd/
|
.sf/
|
||||||
|
|
||||||
# ── IDE & OS ──
|
# ── IDE & OS ──
|
||||||
.idea/
|
.idea/
|
||||||
|
|
|
||||||
28
.github/CODEOWNERS
vendored
28
.github/CODEOWNERS
vendored
|
|
@ -6,31 +6,31 @@
|
||||||
# Last matching rule wins.
|
# Last matching rule wins.
|
||||||
|
|
||||||
# Default: maintainers review everything not explicitly matched below
|
# Default: maintainers review everything not explicitly matched below
|
||||||
* @gsd-build/maintainers
|
* @sf-build/maintainers
|
||||||
|
|
||||||
# Core agent orchestration — RFC required, senior review only
|
# Core agent orchestration — RFC required, senior review only
|
||||||
packages/pi-agent-core/ @gsd-build/maintainers
|
packages/pi-agent-core/ @sf-build/maintainers
|
||||||
src/resources/extensions/gsd/ @gsd-build/maintainers
|
src/resources/extensions/sf/ @sf-build/maintainers
|
||||||
|
|
||||||
# AI/LLM provider integrations
|
# AI/LLM provider integrations
|
||||||
packages/pi-ai/ @gsd-build/maintainers
|
packages/pi-ai/ @sf-build/maintainers
|
||||||
|
|
||||||
# Terminal UI
|
# Terminal UI
|
||||||
packages/pi-tui/ @gsd-build/maintainers
|
packages/pi-tui/ @sf-build/maintainers
|
||||||
|
|
||||||
# Native bindings — platform-specific, needs careful review
|
# Native bindings — platform-specific, needs careful review
|
||||||
native/ @gsd-build/maintainers
|
native/ @sf-build/maintainers
|
||||||
|
|
||||||
# CI/CD and release pipeline — high blast radius
|
# CI/CD and release pipeline — high blast radius
|
||||||
.github/ @gsd-build/maintainers
|
.github/ @sf-build/maintainers
|
||||||
scripts/ @gsd-build/maintainers
|
scripts/ @sf-build/maintainers
|
||||||
Dockerfile @gsd-build/maintainers
|
Dockerfile @sf-build/maintainers
|
||||||
|
|
||||||
# Security-sensitive files — always require maintainer sign-off
|
# Security-sensitive files — always require maintainer sign-off
|
||||||
.secretscanignore @gsd-build/maintainers
|
.secretscanignore @sf-build/maintainers
|
||||||
scripts/secret-scan.sh @gsd-build/maintainers
|
scripts/secret-scan.sh @sf-build/maintainers
|
||||||
scripts/install-hooks.sh @gsd-build/maintainers
|
scripts/install-hooks.sh @sf-build/maintainers
|
||||||
|
|
||||||
# Contributor-facing docs — keep accurate, maintainers approve
|
# Contributor-facing docs — keep accurate, maintainers approve
|
||||||
CONTRIBUTING.md @gsd-build/maintainers
|
CONTRIBUTING.md @sf-build/maintainers
|
||||||
VISION.md @gsd-build/maintainers
|
VISION.md @sf-build/maintainers
|
||||||
|
|
|
||||||
8
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
8
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -28,7 +28,7 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: Summary
|
label: Summary
|
||||||
description: One sentence describing what is broken.
|
description: One sentence describing what is broken.
|
||||||
placeholder: Running `/gsd inspect` reports "No SF database available" even though `.gsd/gsd.db` exists.
|
placeholder: Running `/sf inspect` reports "No SF database available" even though `.sf/sf.db` exists.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
|
@ -40,7 +40,7 @@ body:
|
||||||
placeholder: |
|
placeholder: |
|
||||||
1. Run `...`
|
1. Run `...`
|
||||||
2. Open `...`
|
2. Open `...`
|
||||||
3. Execute `/gsd ...`
|
3. Execute `/sf ...`
|
||||||
4. Observe the failure
|
4. Observe the failure
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
@ -64,10 +64,10 @@ body:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
- type: input
|
- type: input
|
||||||
id: gsd_version
|
id: sf_version
|
||||||
attributes:
|
attributes:
|
||||||
label: SF version
|
label: SF version
|
||||||
description: Run `gsd --version` and paste the exact version.
|
description: Run `sf --version` and paste the exact version.
|
||||||
placeholder: "e.g. 2.33.1"
|
placeholder: "e.g. 2.33.1"
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
|
||||||
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -32,7 +32,7 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: Proposed solution
|
label: Proposed solution
|
||||||
description: Describe the desired behavior, UX, CLI shape, or API as specifically as you can.
|
description: Describe the desired behavior, UX, CLI shape, or API as specifically as you can.
|
||||||
placeholder: Add `/gsd queue list` that renders queued milestones with IDs, status, and created timestamps.
|
placeholder: Add `/sf queue list` that renders queued milestones with IDs, status, and created timestamps.
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
|
@ -41,7 +41,7 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: Alternatives considered
|
label: Alternatives considered
|
||||||
description: Other approaches considered and why they are weaker.
|
description: Other approaches considered and why they are weaker.
|
||||||
placeholder: Reading `.gsd/QUEUE.md` manually works, but it is slower and harder to parse during terminal workflows.
|
placeholder: Reading `.sf/QUEUE.md` manually works, but it is slower and harder to parse during terminal workflows.
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: use_cases
|
id: use_cases
|
||||||
|
|
@ -80,4 +80,4 @@ body:
|
||||||
attributes:
|
attributes:
|
||||||
label: Additional information
|
label: Additional information
|
||||||
description: Extra constraints, compatibility concerns, implementation hints, or references.
|
description: Extra constraints, compatibility concerns, implementation hints, or references.
|
||||||
placeholder: Must remain compatible with existing `.gsd/QUEUE.md` structure.
|
placeholder: Must remain compatible with existing `.sf/QUEUE.md` structure.
|
||||||
|
|
|
||||||
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
|
@ -44,7 +44,7 @@ Closes #<!-- issue number — required -->
|
||||||
- [ ] `pi-ai` — AI/LLM layer
|
- [ ] `pi-ai` — AI/LLM layer
|
||||||
- [ ] `pi-agent-core` — Agent orchestration
|
- [ ] `pi-agent-core` — Agent orchestration
|
||||||
- [ ] `pi-coding-agent` — Coding agent
|
- [ ] `pi-coding-agent` — Coding agent
|
||||||
- [ ] `gsd extension` — SF workflow
|
- [ ] `sf extension` — SF workflow
|
||||||
- [ ] `native` — Native bindings
|
- [ ] `native` — Native bindings
|
||||||
- [ ] `ci/build` — Workflows, scripts, config
|
- [ ] `ci/build` — Workflows, scripts, config
|
||||||
|
|
||||||
|
|
|
||||||
4
.github/workflows/ai-triage.yml
vendored
4
.github/workflows/ai-triage.yml
vendored
|
|
@ -175,7 +175,7 @@ jobs:
|
||||||
};
|
};
|
||||||
|
|
||||||
const securityNote = result.violation_type === 'security-in-public'
|
const securityNote = result.violation_type === 'security-in-public'
|
||||||
? `\n\n**If this is a security vulnerability, please delete this ${type} and use [GitHub\'s private vulnerability reporting](https://github.com/gsd-build/SF/security/advisories/new) instead.** See [CONTRIBUTING.md](https://github.com/gsd-build/SF/blob/main/CONTRIBUTING.md#security) for details.`
|
? `\n\n**If this is a security vulnerability, please delete this ${type} and use [GitHub\'s private vulnerability reporting](https://github.com/sf-build/SF/security/advisories/new) instead.** See [CONTRIBUTING.md](https://github.com/sf-build/SF/blob/main/CONTRIBUTING.md#security) for details.`
|
||||||
: '';
|
: '';
|
||||||
|
|
||||||
const comment = `👋 Thanks for opening this ${type}!
|
const comment = `👋 Thanks for opening this ${type}!
|
||||||
|
|
@ -186,7 +186,7 @@ jobs:
|
||||||
|
|
||||||
${result.explanation}
|
${result.explanation}
|
||||||
|
|
||||||
Please review our [VISION.md](https://github.com/gsd-build/SF/blob/main/VISION.md) and [CONTRIBUTING.md](https://github.com/gsd-build/SF/blob/main/CONTRIBUTING.md) for project guidelines.${securityNote}
|
Please review our [VISION.md](https://github.com/sf-build/SF/blob/main/VISION.md) and [CONTRIBUTING.md](https://github.com/sf-build/SF/blob/main/CONTRIBUTING.md) for project guidelines.${securityNote}
|
||||||
|
|
||||||
A maintainer will review this shortly. If you believe this was flagged in error, no action is needed — we'll take a look.
|
A maintainer will review this shortly. If you believe this was flagged in error, no action is needed — we'll take a look.
|
||||||
|
|
||||||
|
|
|
||||||
26
.github/workflows/build-native.yml
vendored
26
.github/workflows/build-native.yml
vendored
|
|
@ -145,7 +145,7 @@ jobs:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
||||||
echo "Publishing @gsd-build/engine-${platform}..."
|
echo "Publishing @sf-build/engine-${platform}..."
|
||||||
cd "native/npm/${platform}"
|
cd "native/npm/${platform}"
|
||||||
OUTPUT=$(npm publish --access public ${{ steps.version-check.outputs.tag_flag }} 2>&1) && echo "$OUTPUT" || {
|
OUTPUT=$(npm publish --access public ${{ steps.version-check.outputs.tag_flag }} 2>&1) && echo "$OUTPUT" || {
|
||||||
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
|
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
|
||||||
|
|
@ -167,7 +167,7 @@ jobs:
|
||||||
for attempt in $(seq 1 5); do
|
for attempt in $(seq 1 5); do
|
||||||
FAILED=0
|
FAILED=0
|
||||||
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
||||||
PKG="@gsd-build/engine-${platform}"
|
PKG="@sf-build/engine-${platform}"
|
||||||
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
|
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
|
||||||
if [ "${PUBLISHED}" != "${VERSION}" ]; then
|
if [ "${PUBLISHED}" != "${VERSION}" ]; then
|
||||||
FAILED=1
|
FAILED=1
|
||||||
|
|
@ -181,7 +181,7 @@ jobs:
|
||||||
if [ "$attempt" = "5" ]; then
|
if [ "$attempt" = "5" ]; then
|
||||||
echo "::error::One or more platform packages not found after 5 attempts. Aborting."
|
echo "::error::One or more platform packages not found after 5 attempts. Aborting."
|
||||||
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
for platform in darwin-arm64 darwin-x64 linux-x64-gnu linux-arm64-gnu win32-x64-msvc; do
|
||||||
PKG="@gsd-build/engine-${platform}"
|
PKG="@sf-build/engine-${platform}"
|
||||||
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
|
PUBLISHED=$(npm view "${PKG}@${VERSION}" version 2>/dev/null || echo "")
|
||||||
if [ "${PUBLISHED}" = "${VERSION}" ]; then
|
if [ "${PUBLISHED}" = "${VERSION}" ]; then
|
||||||
echo " ✓ ${PKG}@${VERSION}"
|
echo " ✓ ${PKG}@${VERSION}"
|
||||||
|
|
@ -231,16 +231,16 @@ jobs:
|
||||||
npm init -y > /dev/null 2>&1
|
npm init -y > /dev/null 2>&1
|
||||||
|
|
||||||
# Wait for npm registry with exponential backoff (5s, 10s, 20s, 30s, 30s, 30s, 30s — max ~155s vs fixed 5min)
|
# Wait for npm registry with exponential backoff (5s, 10s, 20s, 30s, 30s, 30s, 30s — max ~155s vs fixed 5min)
|
||||||
echo "Waiting for gsd-pi@${VERSION} to appear on npm..."
|
echo "Waiting for sf-pi@${VERSION} to appear on npm..."
|
||||||
DELAY=5
|
DELAY=5
|
||||||
for attempt in $(seq 1 8); do
|
for attempt in $(seq 1 8); do
|
||||||
PUBLISHED=$(npm view "gsd-pi@${VERSION}" version 2>/dev/null || echo "")
|
PUBLISHED=$(npm view "sf-pi@${VERSION}" version 2>/dev/null || echo "")
|
||||||
if [ "${PUBLISHED}" = "${VERSION}" ]; then
|
if [ "${PUBLISHED}" = "${VERSION}" ]; then
|
||||||
echo " ✓ Version ${VERSION} visible on npm (attempt ${attempt})"
|
echo " ✓ Version ${VERSION} visible on npm (attempt ${attempt})"
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
if [ "$attempt" = "8" ]; then
|
if [ "$attempt" = "8" ]; then
|
||||||
echo "::warning::gsd-pi@${VERSION} not visible on npm after 8 attempts — skipping smoke test"
|
echo "::warning::sf-pi@${VERSION} not visible on npm after 8 attempts — skipping smoke test"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo " Attempt ${attempt}: not yet visible, retrying in ${DELAY}s..."
|
echo " Attempt ${attempt}: not yet visible, retrying in ${DELAY}s..."
|
||||||
|
|
@ -250,15 +250,15 @@ jobs:
|
||||||
done
|
done
|
||||||
|
|
||||||
# Install and verify with backoff (5s, 10s, 20s)
|
# Install and verify with backoff (5s, 10s, 20s)
|
||||||
echo "Installing gsd-pi@${VERSION}..."
|
echo "Installing sf-pi@${VERSION}..."
|
||||||
DELAY=5
|
DELAY=5
|
||||||
for attempt in 1 2 3; do
|
for attempt in 1 2 3; do
|
||||||
if npm install "gsd-pi@${VERSION}" 2>&1 | tee /tmp/install-output.txt; then
|
if npm install "sf-pi@${VERSION}" 2>&1 | tee /tmp/install-output.txt; then
|
||||||
echo " ✓ Install succeeded"
|
echo " ✓ Install succeeded"
|
||||||
RAW=$(node node_modules/gsd-pi/dist/loader.js --version 2>&1 || echo "FAILED")
|
RAW=$(node node_modules/sf-pi/dist/loader.js --version 2>&1 || echo "FAILED")
|
||||||
ACTUAL=$(echo "$RAW" | sed 's/\x1b\[[0-9;]*m//g' | grep -oE "^${VERSION}$" | head -1)
|
ACTUAL=$(echo "$RAW" | sed 's/\x1b\[[0-9;]*m//g' | grep -oE "^${VERSION}$" | head -1)
|
||||||
if [ "$ACTUAL" = "$VERSION" ]; then
|
if [ "$ACTUAL" = "$VERSION" ]; then
|
||||||
echo " ✓ gsd --version = ${VERSION}"
|
echo " ✓ sf --version = ${VERSION}"
|
||||||
echo "Published package is functional"
|
echo "Published package is functional"
|
||||||
exit 0
|
exit 0
|
||||||
else
|
else
|
||||||
|
|
@ -272,7 +272,7 @@ jobs:
|
||||||
sleep "$DELAY"
|
sleep "$DELAY"
|
||||||
DELAY=$((DELAY * 2))
|
DELAY=$((DELAY * 2))
|
||||||
done
|
done
|
||||||
echo "::error::Smoke test failed — gsd-pi@${VERSION} not installable"
|
echo "::error::Smoke test failed — sf-pi@${VERSION} not installable"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Verify dist-tag after publish
|
- name: Verify dist-tag after publish
|
||||||
|
|
@ -282,7 +282,7 @@ jobs:
|
||||||
echo "Verifying npm dist-tag 'latest' points to ${VERSION}..."
|
echo "Verifying npm dist-tag 'latest' points to ${VERSION}..."
|
||||||
DELAY=5
|
DELAY=5
|
||||||
for attempt in $(seq 1 6); do
|
for attempt in $(seq 1 6); do
|
||||||
LATEST=$(npm view gsd-pi dist-tags.latest 2>/dev/null || echo "")
|
LATEST=$(npm view sf-pi dist-tags.latest 2>/dev/null || echo "")
|
||||||
if [ "${LATEST}" = "${VERSION}" ]; then
|
if [ "${LATEST}" = "${VERSION}" ]; then
|
||||||
echo " ✓ npm dist-tags.latest = ${VERSION}"
|
echo " ✓ npm dist-tags.latest = ${VERSION}"
|
||||||
exit 0
|
exit 0
|
||||||
|
|
@ -292,5 +292,5 @@ jobs:
|
||||||
DELAY=$((DELAY * 2))
|
DELAY=$((DELAY * 2))
|
||||||
if [ "$DELAY" -gt 30 ]; then DELAY=30; fi
|
if [ "$DELAY" -gt 30 ]; then DELAY=30; fi
|
||||||
done
|
done
|
||||||
echo "::error::dist-tags.latest is '${LATEST}' but expected '${VERSION}' — run: npm dist-tag add gsd-pi@${VERSION} latest"
|
echo "::error::dist-tags.latest is '${LATEST}' but expected '${VERSION}' — run: npm dist-tag add sf-pi@${VERSION} latest"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
||||||
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
|
|
@ -95,10 +95,10 @@ jobs:
|
||||||
- name: Scan for base64-encoded secrets
|
- name: Scan for base64-encoded secrets
|
||||||
run: bash scripts/base64-scan.sh --diff origin/main
|
run: bash scripts/base64-scan.sh --diff origin/main
|
||||||
|
|
||||||
- name: Ensure .gsd/ is not checked in
|
- name: Ensure .sf/ is not checked in
|
||||||
run: |
|
run: |
|
||||||
if [ -d ".gsd" ]; then
|
if [ -d ".sf" ]; then
|
||||||
echo "::error::.gsd/ directory must not be checked in"
|
echo "::error::.sf/ directory must not be checked in"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
@ -242,10 +242,10 @@ jobs:
|
||||||
|
|
||||||
- name: Run Windows portability tests
|
- name: Run Windows portability tests
|
||||||
run: >-
|
run: >-
|
||||||
node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs
|
node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs
|
||||||
--experimental-strip-types --test
|
--experimental-strip-types --test
|
||||||
src/tests/windows-portability.test.ts
|
src/tests/windows-portability.test.ts
|
||||||
src/resources/extensions/gsd/tests/validate-directory.test.ts
|
src/resources/extensions/sf/tests/validate-directory.test.ts
|
||||||
src/tests/integration/web-mode-windows-hide.test.ts
|
src/tests/integration/web-mode-windows-hide.test.ts
|
||||||
|
|
||||||
rtk-portability:
|
rtk-portability:
|
||||||
|
|
@ -294,14 +294,14 @@ jobs:
|
||||||
|
|
||||||
- name: Run RTK-focused portability tests
|
- name: Run RTK-focused portability tests
|
||||||
run: >-
|
run: >-
|
||||||
node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs
|
node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs
|
||||||
--experimental-strip-types --experimental-test-isolation=process --test
|
--experimental-strip-types --experimental-test-isolation=process --test
|
||||||
src/tests/rtk.test.ts
|
src/tests/rtk.test.ts
|
||||||
src/tests/rtk-execution-seams.test.ts
|
src/tests/rtk-execution-seams.test.ts
|
||||||
src/tests/postinstall.test.ts
|
src/tests/postinstall.test.ts
|
||||||
src/tests/app-smoke.test.ts
|
src/tests/app-smoke.test.ts
|
||||||
src/resources/extensions/gsd/tests/custom-verification.test.ts
|
src/resources/extensions/sf/tests/custom-verification.test.ts
|
||||||
src/resources/extensions/gsd/tests/verification-gate.test.ts
|
src/resources/extensions/sf/tests/verification-gate.test.ts
|
||||||
|
|
||||||
- name: Generate RTK benchmark evidence
|
- name: Generate RTK benchmark evidence
|
||||||
if: matrix.label == 'linux'
|
if: matrix.label == 'linux'
|
||||||
|
|
|
||||||
2
.github/workflows/cleanup-dev-versions.yml
vendored
2
.github/workflows/cleanup-dev-versions.yml
vendored
|
|
@ -24,7 +24,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
PACKAGE="gsd-pi"
|
PACKAGE="sf-pi"
|
||||||
MAX_AGE_DAYS=30
|
MAX_AGE_DAYS=30
|
||||||
CUTOFF=$(date -u -d "-${MAX_AGE_DAYS} days" +%s 2>/dev/null || date -u -v-${MAX_AGE_DAYS}d +%s)
|
CUTOFF=$(date -u -d "-${MAX_AGE_DAYS} days" +%s 2>/dev/null || date -u -v-${MAX_AGE_DAYS}d +%s)
|
||||||
|
|
||||||
|
|
|
||||||
18
.github/workflows/pipeline.yml
vendored
18
.github/workflows/pipeline.yml
vendored
|
|
@ -73,7 +73,7 @@ jobs:
|
||||||
- name: Publish @dev
|
- name: Publish @dev
|
||||||
run: |
|
run: |
|
||||||
VERSION=$(node -e 'process.stdout.write(require("./package.json").version)')
|
VERSION=$(node -e 'process.stdout.write(require("./package.json").version)')
|
||||||
if npm view "gsd-pi@${VERSION}" version 2>/dev/null; then
|
if npm view "sf-pi@${VERSION}" version 2>/dev/null; then
|
||||||
echo "Version ${VERSION} already published — skipping"
|
echo "Version ${VERSION} already published — skipping"
|
||||||
else
|
else
|
||||||
npm publish --tag dev
|
npm publish --tag dev
|
||||||
|
|
@ -100,19 +100,19 @@ jobs:
|
||||||
registry-url: https://registry.npmjs.org
|
registry-url: https://registry.npmjs.org
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|
||||||
- name: Install gsd-pi@dev globally (with registry propagation retry)
|
- name: Install sf-pi@dev globally (with registry propagation retry)
|
||||||
run: |
|
run: |
|
||||||
for i in 1 2 3 4 5 6; do
|
for i in 1 2 3 4 5 6; do
|
||||||
npm install -g gsd-pi@dev && exit 0
|
npm install -g sf-pi@dev && exit 0
|
||||||
echo "Attempt $i failed — waiting 10s for npm registry propagation..."
|
echo "Attempt $i failed — waiting 10s for npm registry propagation..."
|
||||||
sleep 10
|
sleep 10
|
||||||
done
|
done
|
||||||
echo "Failed to install gsd-pi@dev after 6 attempts"
|
echo "Failed to install sf-pi@dev after 6 attempts"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Run smoke tests (against installed binary)
|
- name: Run smoke tests (against installed binary)
|
||||||
run: |
|
run: |
|
||||||
export SF_SMOKE_BINARY=$(which gsd)
|
export SF_SMOKE_BINARY=$(which sf)
|
||||||
npm run test:smoke
|
npm run test:smoke
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
@ -125,14 +125,14 @@ jobs:
|
||||||
|
|
||||||
- name: Run live regression tests (against installed binary)
|
- name: Run live regression tests (against installed binary)
|
||||||
run: |
|
run: |
|
||||||
export SF_SMOKE_BINARY=$(which gsd)
|
export SF_SMOKE_BINARY=$(which sf)
|
||||||
npm run test:live-regression
|
npm run test:live-regression
|
||||||
|
|
||||||
- name: Promote to @next
|
- name: Promote to @next
|
||||||
env:
|
env:
|
||||||
DEV_VERSION: ${{ needs.dev-publish.outputs.dev-version }}
|
DEV_VERSION: ${{ needs.dev-publish.outputs.dev-version }}
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
run: npm dist-tag add "gsd-pi@${DEV_VERSION}" next
|
run: npm dist-tag add "sf-pi@${DEV_VERSION}" next
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@v4
|
uses: docker/login-action@v4
|
||||||
|
|
@ -235,7 +235,7 @@ jobs:
|
||||||
OUTPUT=$(npm publish 2>&1) && echo "$OUTPUT" || {
|
OUTPUT=$(npm publish 2>&1) && echo "$OUTPUT" || {
|
||||||
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
|
if echo "$OUTPUT" | grep -q "cannot publish over the previously published"; then
|
||||||
echo "Version already published — promoting to latest"
|
echo "Version already published — promoting to latest"
|
||||||
npm dist-tag add "gsd-pi@${RELEASE_VERSION}" latest
|
npm dist-tag add "sf-pi@${RELEASE_VERSION}" latest
|
||||||
else
|
else
|
||||||
echo "$OUTPUT"
|
echo "$OUTPUT"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
@ -268,7 +268,7 @@ jobs:
|
||||||
NOTES=$(cat /tmp/release-notes.md)
|
NOTES=$(cat /tmp/release-notes.md)
|
||||||
curl -s -X POST "$DISCORD_WEBHOOK" \
|
curl -s -X POST "$DISCORD_WEBHOOK" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
-d "$(jq -n --arg c "**SF v${RELEASE_VERSION} Released**\n\n${NOTES}\n\n\`npm i gsd-pi@${RELEASE_VERSION}\`" '{content:$c}')"
|
-d "$(jq -n --arg c "**SF v${RELEASE_VERSION} Released**\n\n${NOTES}\n\n\`npm i sf-pi@${RELEASE_VERSION}\`" '{content:$c}')"
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@v4
|
uses: docker/login-action@v4
|
||||||
|
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -72,7 +72,7 @@ docs/coherence-audit/
|
||||||
|
|
||||||
# ── SF project state (per-worktree, never committed) ──
|
# ── SF project state (per-worktree, never committed) ──
|
||||||
.sf/
|
.sf/
|
||||||
.gsd/
|
.sf/
|
||||||
|
|
||||||
# ── Stale lock files (npm is canonical) ──
|
# ── Stale lock files (npm is canonical) ──
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
|
|
|
||||||
482
.gsd/CODEBASE.md
Normal file
482
.gsd/CODEBASE.md
Normal file
|
|
@ -0,0 +1,482 @@
|
||||||
|
# Codebase Map
|
||||||
|
|
||||||
|
Generated: 2026-04-15T12:09:27Z | Files: 500 | Described: 0/500
|
||||||
|
<!-- gsd:codebase-meta {"generatedAt":"2026-04-15T12:09:27Z","fingerprint":"447265c2205a9bc92066b5de4a0866717d17b961","fileCount":500,"truncated":true} -->
|
||||||
|
Note: Truncated to first 500 files. Run with higher --max-files to include all.
|
||||||
|
|
||||||
|
### (root)/
|
||||||
|
- `.dockerignore`
|
||||||
|
- `.gitignore`
|
||||||
|
- `.npmignore`
|
||||||
|
- `.npmrc`
|
||||||
|
- `.prompt-injection-scanignore`
|
||||||
|
- `.secretscanignore`
|
||||||
|
- `CHANGELOG.md`
|
||||||
|
- `CONTRIBUTING.md`
|
||||||
|
- `Dockerfile`
|
||||||
|
- `flake.nix`
|
||||||
|
- `LICENSE`
|
||||||
|
- `package-lock.json`
|
||||||
|
- `package.json`
|
||||||
|
- `README.md`
|
||||||
|
- `VISION.md`
|
||||||
|
|
||||||
|
### .github/
|
||||||
|
- `.github/CODEOWNERS`
|
||||||
|
- `.github/FUNDING.yml`
|
||||||
|
- `.github/PULL_REQUEST_TEMPLATE.md`
|
||||||
|
|
||||||
|
### .github/ISSUE_TEMPLATE/
|
||||||
|
- `.github/ISSUE_TEMPLATE/bug_report.yml`
|
||||||
|
- `.github/ISSUE_TEMPLATE/config.yml`
|
||||||
|
- `.github/ISSUE_TEMPLATE/feature_request.yml`
|
||||||
|
|
||||||
|
### .github/workflows/
|
||||||
|
- `.github/workflows/ai-triage.yml`
|
||||||
|
- `.github/workflows/build-native.yml`
|
||||||
|
- `.github/workflows/ci.yml`
|
||||||
|
- `.github/workflows/cleanup-dev-versions.yml`
|
||||||
|
- `.github/workflows/pipeline.yml`
|
||||||
|
- `.github/workflows/pr-risk.yml`
|
||||||
|
|
||||||
|
### bin/
|
||||||
|
- `bin/gsd-from-source`
|
||||||
|
|
||||||
|
### docker/
|
||||||
|
- `docker/.env.example`
|
||||||
|
- `docker/bootstrap.sh`
|
||||||
|
- `docker/docker-compose.full.yaml`
|
||||||
|
- `docker/docker-compose.yaml`
|
||||||
|
- `docker/Dockerfile.ci-builder`
|
||||||
|
- `docker/Dockerfile.sandbox`
|
||||||
|
- `docker/entrypoint.sh`
|
||||||
|
- `docker/README.md`
|
||||||
|
|
||||||
|
### docs/
|
||||||
|
- `docs/README.md`
|
||||||
|
|
||||||
|
### docs/dev/
|
||||||
|
- `docs/dev/ADR-001-branchless-worktree-architecture.md`
|
||||||
|
- `docs/dev/ADR-003-pipeline-simplification.md`
|
||||||
|
- `docs/dev/ADR-004-capability-aware-model-routing.md`
|
||||||
|
- `docs/dev/ADR-005-multi-model-provider-tool-strategy.md`
|
||||||
|
- `docs/dev/ADR-007-model-catalog-split.md`
|
||||||
|
- `docs/dev/ADR-008-gsd-tools-over-mcp-for-provider-parity.md`
|
||||||
|
- `docs/dev/ADR-008-IMPLEMENTATION-PLAN.md`
|
||||||
|
- `docs/dev/ADR-009-IMPLEMENTATION-PLAN.md`
|
||||||
|
- `docs/dev/ADR-009-orchestration-kernel-refactor.md`
|
||||||
|
- `docs/dev/ADR-010-pi-clean-seam-architecture.md`
|
||||||
|
- `docs/dev/agent-knowledge-index.md`
|
||||||
|
- `docs/dev/architecture.md`
|
||||||
|
- `docs/dev/ci-cd-pipeline.md`
|
||||||
|
- `docs/dev/FILE-SYSTEM-MAP.md`
|
||||||
|
- `docs/dev/FRONTIER-TECHNIQUES.md`
|
||||||
|
- `docs/dev/pi-context-optimization-opportunities.md`
|
||||||
|
- `docs/dev/PRD-branchless-worktree-architecture.md`
|
||||||
|
- `docs/dev/PRD-pi-clean-seam-refactor.md`
|
||||||
|
|
||||||
|
### docs/dev/building-coding-agents/
|
||||||
|
- *(27 files: 27 .md)*
|
||||||
|
|
||||||
|
### docs/dev/context-and-hooks/
|
||||||
|
- `docs/dev/context-and-hooks/01-the-context-pipeline.md`
|
||||||
|
- `docs/dev/context-and-hooks/02-hook-reference.md`
|
||||||
|
- `docs/dev/context-and-hooks/03-context-injection-patterns.md`
|
||||||
|
- `docs/dev/context-and-hooks/04-message-types-and-llm-visibility.md`
|
||||||
|
- `docs/dev/context-and-hooks/05-inter-extension-communication.md`
|
||||||
|
- `docs/dev/context-and-hooks/06-advanced-patterns-from-source.md`
|
||||||
|
- `docs/dev/context-and-hooks/07-the-system-prompt-anatomy.md`
|
||||||
|
- `docs/dev/context-and-hooks/README.md`
|
||||||
|
|
||||||
|
### docs/dev/extending-pi/
|
||||||
|
- *(26 files: 26 .md)*
|
||||||
|
|
||||||
|
### docs/dev/pi-ui-tui/
|
||||||
|
- *(24 files: 24 .md)*
|
||||||
|
|
||||||
|
### docs/dev/proposals/
|
||||||
|
- `docs/dev/proposals/698-browser-tools-feature-additions.md`
|
||||||
|
- `docs/dev/proposals/rfc-gitops-branching-strategy.md`
|
||||||
|
|
||||||
|
### docs/dev/proposals/workflows/
|
||||||
|
- `docs/dev/proposals/workflows/backmerge.yml`
|
||||||
|
- `docs/dev/proposals/workflows/create-release.yml`
|
||||||
|
- `docs/dev/proposals/workflows/README.md`
|
||||||
|
- `docs/dev/proposals/workflows/sync-next.yml`
|
||||||
|
|
||||||
|
### docs/dev/superpowers/plans/
|
||||||
|
- `docs/dev/superpowers/plans/2026-03-17-cicd-pipeline.md`
|
||||||
|
|
||||||
|
### docs/dev/superpowers/specs/
|
||||||
|
- `docs/dev/superpowers/specs/2026-03-17-cicd-pipeline-design.md`
|
||||||
|
|
||||||
|
### docs/dev/what-is-pi/
|
||||||
|
- `docs/dev/what-is-pi/01-what-pi-is.md`
|
||||||
|
- `docs/dev/what-is-pi/02-design-philosophy.md`
|
||||||
|
- `docs/dev/what-is-pi/03-the-four-modes-of-operation.md`
|
||||||
|
- `docs/dev/what-is-pi/04-the-architecture-how-everything-fits-together.md`
|
||||||
|
- `docs/dev/what-is-pi/05-the-agent-loop-how-pi-thinks.md`
|
||||||
|
- `docs/dev/what-is-pi/06-tools-how-pi-acts-on-the-world.md`
|
||||||
|
- `docs/dev/what-is-pi/07-sessions-memory-that-branches.md`
|
||||||
|
- `docs/dev/what-is-pi/08-compaction-how-pi-manages-context-limits.md`
|
||||||
|
- `docs/dev/what-is-pi/09-the-customization-stack.md`
|
||||||
|
- `docs/dev/what-is-pi/10-providers-models-multi-model-by-default.md`
|
||||||
|
- `docs/dev/what-is-pi/11-the-interactive-tui.md`
|
||||||
|
- `docs/dev/what-is-pi/12-the-message-queue-talking-while-pi-thinks.md`
|
||||||
|
- `docs/dev/what-is-pi/13-context-files-project-instructions.md`
|
||||||
|
- `docs/dev/what-is-pi/14-the-sdk-rpc-embedding-pi.md`
|
||||||
|
- `docs/dev/what-is-pi/15-pi-packages-the-ecosystem.md`
|
||||||
|
- `docs/dev/what-is-pi/16-why-pi-matters-what-makes-it-different.md`
|
||||||
|
- `docs/dev/what-is-pi/17-file-reference-all-documentation.md`
|
||||||
|
- `docs/dev/what-is-pi/18-quick-reference-commands-shortcuts.md`
|
||||||
|
- `docs/dev/what-is-pi/19-building-branded-apps-on-top-of-pi.md`
|
||||||
|
- `docs/dev/what-is-pi/README.md`
|
||||||
|
|
||||||
|
### docs/user-docs/
|
||||||
|
- *(21 files: 21 .md)*
|
||||||
|
|
||||||
|
### docs/zh-CN/
|
||||||
|
- `docs/zh-CN/README.md`
|
||||||
|
|
||||||
|
### docs/zh-CN/user-docs/
|
||||||
|
- *(21 files: 21 .md)*
|
||||||
|
|
||||||
|
### gitbook/
|
||||||
|
- `gitbook/README.md`
|
||||||
|
- `gitbook/SUMMARY.md`
|
||||||
|
|
||||||
|
### gitbook/configuration/
|
||||||
|
- `gitbook/configuration/custom-models.md`
|
||||||
|
- `gitbook/configuration/git-settings.md`
|
||||||
|
- `gitbook/configuration/mcp-servers.md`
|
||||||
|
- `gitbook/configuration/notifications.md`
|
||||||
|
- `gitbook/configuration/preferences.md`
|
||||||
|
- `gitbook/configuration/providers.md`
|
||||||
|
|
||||||
|
### gitbook/core-concepts/
|
||||||
|
- `gitbook/core-concepts/auto-mode.md`
|
||||||
|
- `gitbook/core-concepts/project-structure.md`
|
||||||
|
- `gitbook/core-concepts/step-mode.md`
|
||||||
|
|
||||||
|
### gitbook/features/
|
||||||
|
- `gitbook/features/captures.md`
|
||||||
|
- `gitbook/features/cost-management.md`
|
||||||
|
- `gitbook/features/dynamic-model-routing.md`
|
||||||
|
- `gitbook/features/github-sync.md`
|
||||||
|
- `gitbook/features/headless.md`
|
||||||
|
- `gitbook/features/parallel.md`
|
||||||
|
- `gitbook/features/remote-questions.md`
|
||||||
|
- `gitbook/features/skills.md`
|
||||||
|
- `gitbook/features/teams.md`
|
||||||
|
- `gitbook/features/token-optimization.md`
|
||||||
|
- `gitbook/features/visualizer.md`
|
||||||
|
- `gitbook/features/web-interface.md`
|
||||||
|
- `gitbook/features/workflow-templates.md`
|
||||||
|
|
||||||
|
### gitbook/getting-started/
|
||||||
|
- `gitbook/getting-started/choosing-a-model.md`
|
||||||
|
- `gitbook/getting-started/first-project.md`
|
||||||
|
- `gitbook/getting-started/installation.md`
|
||||||
|
|
||||||
|
### gitbook/reference/
|
||||||
|
- `gitbook/reference/cli-flags.md`
|
||||||
|
- `gitbook/reference/commands.md`
|
||||||
|
- `gitbook/reference/environment-variables.md`
|
||||||
|
- `gitbook/reference/keyboard-shortcuts.md`
|
||||||
|
- `gitbook/reference/migration.md`
|
||||||
|
- `gitbook/reference/troubleshooting.md`
|
||||||
|
|
||||||
|
### sf-orchestrator/
|
||||||
|
- `sf-orchestrator/SKILL.md`
|
||||||
|
|
||||||
|
### sf-orchestrator/references/
|
||||||
|
- `sf-orchestrator/references/answer-injection.md`
|
||||||
|
- `sf-orchestrator/references/commands.md`
|
||||||
|
- `sf-orchestrator/references/json-result.md`
|
||||||
|
|
||||||
|
### sf-orchestrator/templates/
|
||||||
|
- `sf-orchestrator/templates/spec.md`
|
||||||
|
|
||||||
|
### sf-orchestrator/workflows/
|
||||||
|
- `sf-orchestrator/workflows/build-from-spec.md`
|
||||||
|
- `sf-orchestrator/workflows/monitor-and-poll.md`
|
||||||
|
- `sf-orchestrator/workflows/step-by-step.md`
|
||||||
|
|
||||||
|
### mintlify-docs/
|
||||||
|
- `mintlify-docs/docs`
|
||||||
|
- `mintlify-docs/docs.json`
|
||||||
|
- `mintlify-docs/getting-started.mdx`
|
||||||
|
- `mintlify-docs/introduction.mdx`
|
||||||
|
|
||||||
|
### mintlify-docs/guides/
|
||||||
|
- `mintlify-docs/guides/auto-mode.mdx`
|
||||||
|
- `mintlify-docs/guides/captures-triage.mdx`
|
||||||
|
- `mintlify-docs/guides/change-management.mdx`
|
||||||
|
- `mintlify-docs/guides/commands.mdx`
|
||||||
|
- `mintlify-docs/guides/configuration.mdx`
|
||||||
|
- `mintlify-docs/guides/cost-management.mdx`
|
||||||
|
- `mintlify-docs/guides/custom-models.mdx`
|
||||||
|
- `mintlify-docs/guides/dynamic-model-routing.mdx`
|
||||||
|
- `mintlify-docs/guides/git-strategy.mdx`
|
||||||
|
- `mintlify-docs/guides/migration.mdx`
|
||||||
|
- `mintlify-docs/guides/parallel-orchestration.mdx`
|
||||||
|
- `mintlify-docs/guides/remote-questions.mdx`
|
||||||
|
- `mintlify-docs/guides/skills.mdx`
|
||||||
|
- `mintlify-docs/guides/token-optimization.mdx`
|
||||||
|
- `mintlify-docs/guides/troubleshooting.mdx`
|
||||||
|
- `mintlify-docs/guides/visualizer.mdx`
|
||||||
|
- `mintlify-docs/guides/web-interface.mdx`
|
||||||
|
- `mintlify-docs/guides/working-in-teams.mdx`
|
||||||
|
|
||||||
|
### native/
|
||||||
|
- `native/.gitignore`
|
||||||
|
- `native/.npmignore`
|
||||||
|
- `native/Cargo.toml`
|
||||||
|
- `native/README.md`
|
||||||
|
|
||||||
|
### native/.cargo/
|
||||||
|
- `native/.cargo/config.toml`
|
||||||
|
|
||||||
|
### native/crates/ast/
|
||||||
|
- `native/crates/ast/Cargo.toml`
|
||||||
|
|
||||||
|
### native/crates/ast/src/
|
||||||
|
- `native/crates/ast/src/ast.rs`
|
||||||
|
- `native/crates/ast/src/glob_util.rs`
|
||||||
|
- `native/crates/ast/src/lib.rs`
|
||||||
|
|
||||||
|
### native/crates/ast/src/language/
|
||||||
|
- `native/crates/ast/src/language/mod.rs`
|
||||||
|
- `native/crates/ast/src/language/parsers.rs`
|
||||||
|
|
||||||
|
### native/crates/engine/
|
||||||
|
- `native/crates/engine/build.rs`
|
||||||
|
- `native/crates/engine/Cargo.toml`
|
||||||
|
|
||||||
|
### native/crates/engine/src/
|
||||||
|
- *(22 files: 22 .rs)*
|
||||||
|
|
||||||
|
### native/crates/grep/
|
||||||
|
- `native/crates/grep/Cargo.toml`
|
||||||
|
|
||||||
|
### native/crates/grep/src/
|
||||||
|
- `native/crates/grep/src/lib.rs`
|
||||||
|
|
||||||
|
### native/npm/darwin-arm64/
|
||||||
|
- `native/npm/darwin-arm64/package.json`
|
||||||
|
|
||||||
|
### native/npm/darwin-x64/
|
||||||
|
- `native/npm/darwin-x64/package.json`
|
||||||
|
|
||||||
|
### native/npm/linux-arm64-gnu/
|
||||||
|
- `native/npm/linux-arm64-gnu/package.json`
|
||||||
|
|
||||||
|
### native/npm/linux-x64-gnu/
|
||||||
|
- `native/npm/linux-x64-gnu/package.json`
|
||||||
|
|
||||||
|
### native/npm/win32-x64-msvc/
|
||||||
|
- `native/npm/win32-x64-msvc/package.json`
|
||||||
|
|
||||||
|
### native/scripts/
|
||||||
|
- `native/scripts/build.js`
|
||||||
|
- `native/scripts/sync-platform-versions.cjs`
|
||||||
|
|
||||||
|
### packages/daemon/
|
||||||
|
- `packages/daemon/package.json`
|
||||||
|
- `packages/daemon/tsconfig.json`
|
||||||
|
|
||||||
|
### packages/daemon/src/
|
||||||
|
- *(27 files: 27 .ts)*
|
||||||
|
|
||||||
|
### packages/mcp-server/
|
||||||
|
- `packages/mcp-server/.npmignore`
|
||||||
|
- `packages/mcp-server/package.json`
|
||||||
|
- `packages/mcp-server/README.md`
|
||||||
|
- `packages/mcp-server/tsconfig.json`
|
||||||
|
|
||||||
|
### packages/mcp-server/src/
|
||||||
|
- `packages/mcp-server/src/cli.ts`
|
||||||
|
- `packages/mcp-server/src/env-writer.test.ts`
|
||||||
|
- `packages/mcp-server/src/env-writer.ts`
|
||||||
|
- `packages/mcp-server/src/import-candidates.test.ts`
|
||||||
|
- `packages/mcp-server/src/index.ts`
|
||||||
|
- `packages/mcp-server/src/mcp-server.test.ts`
|
||||||
|
- `packages/mcp-server/src/secure-env-collect.test.ts`
|
||||||
|
- `packages/mcp-server/src/server.ts`
|
||||||
|
- `packages/mcp-server/src/session-manager.ts`
|
||||||
|
- `packages/mcp-server/src/tool-credentials.test.ts`
|
||||||
|
- `packages/mcp-server/src/tool-credentials.ts`
|
||||||
|
- `packages/mcp-server/src/types.ts`
|
||||||
|
- `packages/mcp-server/src/workflow-tools.test.ts`
|
||||||
|
- `packages/mcp-server/src/workflow-tools.ts`
|
||||||
|
|
||||||
|
### packages/mcp-server/src/readers/
|
||||||
|
- `packages/mcp-server/src/readers/captures.ts`
|
||||||
|
- `packages/mcp-server/src/readers/doctor-lite.ts`
|
||||||
|
- `packages/mcp-server/src/readers/graph.test.ts`
|
||||||
|
- `packages/mcp-server/src/readers/graph.ts`
|
||||||
|
- `packages/mcp-server/src/readers/index.ts`
|
||||||
|
- `packages/mcp-server/src/readers/knowledge.ts`
|
||||||
|
- `packages/mcp-server/src/readers/metrics.ts`
|
||||||
|
- `packages/mcp-server/src/readers/paths.ts`
|
||||||
|
- `packages/mcp-server/src/readers/readers.test.ts`
|
||||||
|
- `packages/mcp-server/src/readers/roadmap.ts`
|
||||||
|
- `packages/mcp-server/src/readers/state.ts`
|
||||||
|
|
||||||
|
### packages/native/
|
||||||
|
- `packages/native/package.json`
|
||||||
|
- `packages/native/tsconfig.json`
|
||||||
|
|
||||||
|
### packages/native/src/
|
||||||
|
- `packages/native/src/index.ts`
|
||||||
|
- `packages/native/src/native.ts`
|
||||||
|
|
||||||
|
### packages/native/src/__tests__/
|
||||||
|
- `packages/native/src/__tests__/clipboard.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/diff.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/fd.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/glob.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/grep.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/highlight.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/html.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/image.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/json-parse.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/module-compat.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/ps.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/stream-process.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/text.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/truncate.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/ttsr.test.mjs`
|
||||||
|
- `packages/native/src/__tests__/xxhash.test.mjs`
|
||||||
|
|
||||||
|
### packages/native/src/ast/
|
||||||
|
- `packages/native/src/ast/index.ts`
|
||||||
|
- `packages/native/src/ast/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/clipboard/
|
||||||
|
- `packages/native/src/clipboard/index.ts`
|
||||||
|
- `packages/native/src/clipboard/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/diff/
|
||||||
|
- `packages/native/src/diff/index.ts`
|
||||||
|
- `packages/native/src/diff/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/fd/
|
||||||
|
- `packages/native/src/fd/index.ts`
|
||||||
|
- `packages/native/src/fd/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/glob/
|
||||||
|
- `packages/native/src/glob/index.ts`
|
||||||
|
- `packages/native/src/glob/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/grep/
|
||||||
|
- `packages/native/src/grep/index.ts`
|
||||||
|
- `packages/native/src/grep/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/gsd-parser/
|
||||||
|
- `packages/native/src/gsd-parser/index.ts`
|
||||||
|
- `packages/native/src/gsd-parser/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/highlight/
|
||||||
|
- `packages/native/src/highlight/index.ts`
|
||||||
|
- `packages/native/src/highlight/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/html/
|
||||||
|
- `packages/native/src/html/index.ts`
|
||||||
|
- `packages/native/src/html/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/image/
|
||||||
|
- `packages/native/src/image/index.ts`
|
||||||
|
- `packages/native/src/image/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/json-parse/
|
||||||
|
- `packages/native/src/json-parse/index.ts`
|
||||||
|
|
||||||
|
### packages/native/src/ps/
|
||||||
|
- `packages/native/src/ps/index.ts`
|
||||||
|
- `packages/native/src/ps/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/stream-process/
|
||||||
|
- `packages/native/src/stream-process/index.ts`
|
||||||
|
|
||||||
|
### packages/native/src/text/
|
||||||
|
- `packages/native/src/text/index.ts`
|
||||||
|
- `packages/native/src/text/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/truncate/
|
||||||
|
- `packages/native/src/truncate/index.ts`
|
||||||
|
|
||||||
|
### packages/native/src/ttsr/
|
||||||
|
- `packages/native/src/ttsr/index.ts`
|
||||||
|
- `packages/native/src/ttsr/types.ts`
|
||||||
|
|
||||||
|
### packages/native/src/xxhash/
|
||||||
|
- `packages/native/src/xxhash/index.ts`
|
||||||
|
|
||||||
|
### packages/pi-agent-core/
|
||||||
|
- `packages/pi-agent-core/package.json`
|
||||||
|
- `packages/pi-agent-core/tsconfig.json`
|
||||||
|
|
||||||
|
### packages/pi-agent-core/src/
|
||||||
|
- `packages/pi-agent-core/src/agent-loop.test.ts`
|
||||||
|
- `packages/pi-agent-core/src/agent-loop.ts`
|
||||||
|
- `packages/pi-agent-core/src/agent.test.ts`
|
||||||
|
- `packages/pi-agent-core/src/agent.ts`
|
||||||
|
- `packages/pi-agent-core/src/index.ts`
|
||||||
|
- `packages/pi-agent-core/src/proxy.ts`
|
||||||
|
- `packages/pi-agent-core/src/types.ts`
|
||||||
|
|
||||||
|
### packages/pi-ai/
|
||||||
|
- `packages/pi-ai/bedrock-provider.d.ts`
|
||||||
|
- `packages/pi-ai/bedrock-provider.js`
|
||||||
|
- `packages/pi-ai/oauth.d.ts`
|
||||||
|
- `packages/pi-ai/oauth.js`
|
||||||
|
- `packages/pi-ai/package.json`
|
||||||
|
|
||||||
|
### packages/pi-ai/scripts/
|
||||||
|
- `packages/pi-ai/scripts/generate-models.ts`
|
||||||
|
|
||||||
|
### packages/pi-ai/src/
|
||||||
|
- `packages/pi-ai/src/api-registry.ts`
|
||||||
|
- `packages/pi-ai/src/bedrock-provider.ts`
|
||||||
|
- `packages/pi-ai/src/cli.ts`
|
||||||
|
- `packages/pi-ai/src/env-api-keys.ts`
|
||||||
|
- `packages/pi-ai/src/index.ts`
|
||||||
|
- `packages/pi-ai/src/models.custom.ts`
|
||||||
|
- `packages/pi-ai/src/models.generated.test.ts`
|
||||||
|
- `packages/pi-ai/src/models.generated.ts`
|
||||||
|
- `packages/pi-ai/src/models.test.ts`
|
||||||
|
- `packages/pi-ai/src/models.ts`
|
||||||
|
- `packages/pi-ai/src/oauth.ts`
|
||||||
|
- `packages/pi-ai/src/stream.ts`
|
||||||
|
- `packages/pi-ai/src/types.ts`
|
||||||
|
- `packages/pi-ai/src/web-runtime-env-api-keys.ts`
|
||||||
|
|
||||||
|
### packages/pi-ai/src/providers/
|
||||||
|
- *(25 files: 25 .ts)*
|
||||||
|
|
||||||
|
### packages/pi-ai/src/utils/
|
||||||
|
- `packages/pi-ai/src/utils/event-stream.ts`
|
||||||
|
- `packages/pi-ai/src/utils/hash.ts`
|
||||||
|
- `packages/pi-ai/src/utils/json-parse.ts`
|
||||||
|
- `packages/pi-ai/src/utils/overflow.ts`
|
||||||
|
- `packages/pi-ai/src/utils/repair-tool-json.ts`
|
||||||
|
- `packages/pi-ai/src/utils/sanitize-unicode.ts`
|
||||||
|
- `packages/pi-ai/src/utils/typebox-helpers.ts`
|
||||||
|
- `packages/pi-ai/src/utils/validation.ts`
|
||||||
|
|
||||||
|
### packages/pi-ai/src/utils/oauth/
|
||||||
|
- `packages/pi-ai/src/utils/oauth/github-copilot.test.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/github-copilot.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/google-antigravity.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/google-gemini-cli.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/google-oauth-utils.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/index.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/openai-codex.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/pkce.ts`
|
||||||
|
- `packages/pi-ai/src/utils/oauth/types.ts`
|
||||||
|
|
||||||
|
### packages/pi-ai/src/utils/tests/
|
||||||
|
- `packages/pi-ai/src/utils/tests/json-parse.test.ts`
|
||||||
|
- `packages/pi-ai/src/utils/tests/overflow.test.ts`
|
||||||
|
- `packages/pi-ai/src/utils/tests/repair-tool-json.test.ts`
|
||||||
4
.gsd/audit/events.jsonl
Normal file
4
.gsd/audit/events.jsonl
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
{"eventId":"9567a0bc-d8a2-410d-83a8-4ea091e095a7","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.561Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
|
||||||
|
{"eventId":"d1765e7e-d2dc-4417-9fb8-0bec6e01e9a8","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T10:50:29.563Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}
|
||||||
|
{"eventId":"9c2b6de3-b8eb-4a51-af8a-91be51fecfc9","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.516Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"retry","failureClass":"timeout","attempt":1,"maxAttempts":2,"retryable":true}}
|
||||||
|
{"eventId":"8597d568-05b8-43ed-89d7-ca4673079e0f","traceId":"trace-a","turnId":"turn-a","category":"gate","type":"gate-run","ts":"2026-04-15T13:00:19.518Z","payload":{"gateId":"timeout-gate","gateType":"verification","outcome":"pass","failureClass":"none","attempt":2,"maxAttempts":1,"retryable":false}}
|
||||||
10
.gsd/notifications.jsonl
Normal file
10
.gsd/notifications.jsonl
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
{"id":"76bf27b0-01bf-4260-80f6-b7d8249c6875","ts":"2026-04-15T06:32:30.018Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
|
||||||
|
{"id":"597c94ae-7c3b-48dd-89b1-be8d0bbd02ee","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
|
||||||
|
{"id":"dc176d95-8171-4d15-8c73-97ddb704a786","ts":"2026-04-15T06:32:30.019Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
|
||||||
|
{"id":"66762fce-d6c6-41db-be03-d34348aaccd9","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
|
||||||
|
{"id":"b7e5e997-b98d-4b50-a6f3-017a916dd2ac","ts":"2026-04-15T06:33:47.201Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
|
||||||
|
{"id":"eccbb677-be17-44b9-a7b6-440ebf777a89","ts":"2026-04-15T06:33:47.202Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
|
||||||
|
{"id":"98803c8a-c9f1-43bd-9903-f67fea7a5128","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"[gsd-learning] wrote 0 fallback chain(s) (0 total entries) to /home/mhugo/.gsd/agent/settings.json","source":"notify","read":false}
|
||||||
|
{"id":"a9253906-1990-4957-9c1a-36046b8d3cfa","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"gsd-learning: active — 40 models with priors, db at /home/mhugo/.gsd/gsd-learning.db","source":"notify","read":false}
|
||||||
|
{"id":"8caa4904-0ce5-46f4-b645-df5077fb229e","ts":"2026-04-15T06:36:16.506Z","severity":"info","message":"MCP client ready — 7 server(s) configured","source":"notify","read":false}
|
||||||
|
{"id":"eb520a00-567d-4c02-bb2e-6111089dc3de","ts":"2026-04-15T09:03:17.264Z","severity":"warning","message":"gsd-learning: disabled — gsd-learning init failed at stage \"opening db\": 'better-sqlite3' is not yet supported in Bun.\nTrack the status in https://github.com/oven-sh/bun/issues/4290\nIn the meantime, you could try bun:sqlite which has a similar API.","source":"notify","read":false}
|
||||||
|
|
@ -1,2 +1,2 @@
|
||||||
# False positives in SF prompt templates — these are legitimate LLM instructions, not injection
|
# False positives in SF prompt templates — these are legitimate LLM instructions, not injection
|
||||||
src/resources/extensions/gsd/prompts/doctor-heal.md:You are now responsible
|
src/resources/extensions/sf/prompts/doctor-heal.md:You are now responsible
|
||||||
|
|
|
||||||
|
|
@ -23,7 +23,7 @@ src/tests/integration/web-mode-runtime-fixtures.ts:sk-runtime-recovery-secret
|
||||||
src/tests/web-onboarding-contract.test.ts:sk-test-secret
|
src/tests/web-onboarding-contract.test.ts:sk-test-secret
|
||||||
|
|
||||||
# Doctor environment tests use dummy localhost DB URLs
|
# Doctor environment tests use dummy localhost DB URLs
|
||||||
src/resources/extensions/gsd/tests/doctor-environment.test.ts:postgres://localhost
|
src/resources/extensions/sf/tests/doctor-environment.test.ts:postgres://localhost
|
||||||
|
|
||||||
|
|
||||||
# Documentation examples
|
# Documentation examples
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
# ──────────────────────────────────────────────
|
# ──────────────────────────────────────────────
|
||||||
# Runtime
|
# Runtime
|
||||||
# Image: ghcr.io/gsd-build/sf-run
|
# Image: ghcr.io/sf-build/sf-run
|
||||||
# Used by: end users via docker run
|
# Used by: end users via docker run
|
||||||
# ──────────────────────────────────────────────
|
# ──────────────────────────────────────────────
|
||||||
FROM node:24-slim AS runtime
|
FROM node:24-slim AS runtime
|
||||||
|
|
@ -17,5 +17,5 @@ RUN npm install -g sf-run@${SF_VERSION}
|
||||||
# Default working directory for user projects
|
# Default working directory for user projects
|
||||||
WORKDIR /workspace
|
WORKDIR /workspace
|
||||||
|
|
||||||
ENTRYPOINT ["gsd"]
|
ENTRYPOINT ["sf"]
|
||||||
CMD ["--help"]
|
CMD ["--help"]
|
||||||
|
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# gsd-from-source — run SF directly from this source checkout via bun.
|
|
||||||
#
|
|
||||||
# Purpose: every local commit in this repo (e.g. the #4251 fix) is live
|
|
||||||
# immediately without reinstalling the bun-packaged sf-run. Subagents can
|
|
||||||
# spawn gsd by pointing SF_BIN_PATH at this script instead of dist/loader.js.
|
|
||||||
#
|
|
||||||
# Contract:
|
|
||||||
# - Executable shim spawn() / exec() can launch directly.
|
|
||||||
# - Exports SF_BIN_PATH before handing off to loader.ts so loader.ts's
|
|
||||||
# `SF_BIN_PATH ||= process.argv[1]` branch preserves the shim path
|
|
||||||
# instead of clobbering it with the .ts loader path (which is not
|
|
||||||
# directly executable by child_process.spawn).
|
|
||||||
#
|
|
||||||
# Requirements: bun on PATH, node_modules populated (`bun install` once).
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
SCRIPT_DIR=$(cd -- "$(dirname -- "$(readlink -f "${BASH_SOURCE[0]}")")" &>/dev/null && pwd)
|
|
||||||
SF_SOURCE_ROOT=$(cd -- "$SCRIPT_DIR/.." &>/dev/null && pwd)
|
|
||||||
|
|
||||||
export SF_BIN_PATH="$SCRIPT_DIR/gsd-from-source"
|
|
||||||
|
|
||||||
exec bun run "$SF_SOURCE_ROOT/src/loader.ts" "$@"
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
# ──────────────────────────────────────────────
|
# ──────────────────────────────────────────────
|
||||||
# CI Builder
|
# CI Builder
|
||||||
# Image: ghcr.io/gsd-build/gsd-ci-builder
|
# Image: ghcr.io/sf-build/sf-ci-builder
|
||||||
# Used by: pipeline.yml Dev stage
|
# Used by: pipeline.yml Dev stage
|
||||||
# ──────────────────────────────────────────────
|
# ──────────────────────────────────────────────
|
||||||
FROM node:24-bookworm
|
FROM node:24-bookworm
|
||||||
|
|
|
||||||
|
|
@ -20,17 +20,17 @@ ARG SF_VERSION=latest
|
||||||
RUN npm install -g sf-run@${SF_VERSION}
|
RUN npm install -g sf-run@${SF_VERSION}
|
||||||
|
|
||||||
# Create non-root user for sandbox isolation
|
# Create non-root user for sandbox isolation
|
||||||
RUN groupadd --gid 1000 gsd \
|
RUN groupadd --gid 1000 sf \
|
||||||
&& useradd --uid 1000 --gid gsd --shell /bin/bash --create-home gsd
|
&& useradd --uid 1000 --gid sf --shell /bin/bash --create-home sf
|
||||||
|
|
||||||
# Persistent SF state directory
|
# Persistent SF state directory
|
||||||
RUN mkdir -p /home/gsd/.gsd && chown -R gsd:gsd /home/gsd/.gsd
|
RUN mkdir -p /home/sf/.sf && chown -R sf:sf /home/sf/.sf
|
||||||
|
|
||||||
# Workspace directory — synced from host via Docker sandbox
|
# Workspace directory — synced from host via Docker sandbox
|
||||||
WORKDIR /workspace
|
WORKDIR /workspace
|
||||||
RUN chown gsd:gsd /workspace
|
RUN chown sf:sf /workspace
|
||||||
|
|
||||||
# Entrypoint handles UID/GID remapping, bootstrap, and drops to gsd user
|
# Entrypoint handles UID/GID remapping, bootstrap, and drops to sf user
|
||||||
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
|
COPY entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
COPY bootstrap.sh /usr/local/bin/bootstrap.sh
|
COPY bootstrap.sh /usr/local/bin/bootstrap.sh
|
||||||
RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/bootstrap.sh
|
RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/bootstrap.sh
|
||||||
|
|
@ -39,4 +39,4 @@ RUN chmod +x /usr/local/bin/entrypoint.sh /usr/local/bin/bootstrap.sh
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
|
||||||
CMD ["gsd", "--help"]
|
CMD ["sf", "--help"]
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ set -e
|
||||||
# SF First-Boot Bootstrap
|
# SF First-Boot Bootstrap
|
||||||
#
|
#
|
||||||
# Runs once on initial container creation.
|
# Runs once on initial container creation.
|
||||||
# Called by entrypoint.sh as the gsd user.
|
# Called by entrypoint.sh as the sf user.
|
||||||
#
|
#
|
||||||
# This script is idempotent — safe to run multiple
|
# This script is idempotent — safe to run multiple
|
||||||
# times, but the sentinel in entrypoint.sh ensures
|
# times, but the sentinel in entrypoint.sh ensures
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,21 @@
|
||||||
services:
|
services:
|
||||||
gsd:
|
sf:
|
||||||
build:
|
build:
|
||||||
context: . # Build context is the docker/ directory
|
context: . # Build context is the docker/ directory
|
||||||
dockerfile: Dockerfile.sandbox # Runtime sandbox image with entrypoint
|
dockerfile: Dockerfile.sandbox # Runtime sandbox image with entrypoint
|
||||||
args:
|
args:
|
||||||
SF_VERSION: latest # Pin a specific version: SF_VERSION=2.51.0
|
SF_VERSION: latest # Pin a specific version: SF_VERSION=2.51.0
|
||||||
|
|
||||||
container_name: gsd-sandbox
|
container_name: sf-sandbox
|
||||||
|
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000" # SF web UI
|
- "3000:3000" # SF web UI
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ../:/workspace # Project root mounted into the container
|
- ../:/workspace # Project root mounted into the container
|
||||||
- gsd-state:/home/gsd/.gsd # Persistent SF state across restarts
|
- sf-state:/home/sf/.sf # Persistent SF state across restarts
|
||||||
# - ~/.ssh:/home/gsd/.ssh:ro # SSH keys for git operations (read-only)
|
# - ~/.ssh:/home/sf/.ssh:ro # SSH keys for git operations (read-only)
|
||||||
# - ~/.gitconfig:/home/gsd/.gitconfig:ro # Host git config
|
# - ~/.gitconfig:/home/sf/.gitconfig:ro # Host git config
|
||||||
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env # API keys and secrets (see .env.example)
|
- .env # API keys and secrets (see .env.example)
|
||||||
|
|
@ -23,7 +23,7 @@ services:
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
# UID/GID remapping — match your host user to avoid permission issues
|
# UID/GID remapping — match your host user to avoid permission issues
|
||||||
# on bind-mounted volumes. The entrypoint remaps the container's gsd
|
# on bind-mounted volumes. The entrypoint remaps the container's sf
|
||||||
# user to these IDs at startup. Run `id -u` / `id -g` to find yours.
|
# user to these IDs at startup. Run `id -u` / `id -g` to find yours.
|
||||||
- PUID=1000
|
- PUID=1000
|
||||||
- PGID=1000
|
- PGID=1000
|
||||||
|
|
@ -36,7 +36,7 @@ services:
|
||||||
|
|
||||||
# Health check — verify SF is installed and responsive
|
# Health check — verify SF is installed and responsive
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "gsd", "--version"]
|
test: ["CMD", "sf", "--version"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|
@ -57,5 +57,5 @@ services:
|
||||||
# network_mode: bridge # Default Docker bridge (already the default)
|
# network_mode: bridge # Default Docker bridge (already the default)
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
gsd-state:
|
sf-state:
|
||||||
driver: local
|
driver: local
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,16 @@
|
||||||
services:
|
services:
|
||||||
gsd:
|
sf:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile.sandbox
|
dockerfile: Dockerfile.sandbox
|
||||||
args:
|
args:
|
||||||
SF_VERSION: latest
|
SF_VERSION: latest
|
||||||
container_name: gsd-sandbox
|
container_name: sf-sandbox
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
volumes:
|
volumes:
|
||||||
- ../:/workspace
|
- ../:/workspace
|
||||||
- gsd-state:/home/gsd/.gsd
|
- sf-state:/home/sf/.sf
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
|
|
@ -19,5 +19,5 @@ services:
|
||||||
tty: true
|
tty: true
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
gsd-state:
|
sf-state:
|
||||||
driver: local
|
driver: local
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,9 @@ set -e
|
||||||
# 4. Signal forwarding — exec into the final process
|
# 4. Signal forwarding — exec into the final process
|
||||||
# ──────────────────────────────────────────────
|
# ──────────────────────────────────────────────
|
||||||
|
|
||||||
SF_USER="gsd"
|
SF_USER="sf"
|
||||||
SF_HOME="/home/${SF_USER}"
|
SF_HOME="/home/${SF_USER}"
|
||||||
SF_DIR="${SF_HOME}/.gsd"
|
SF_DIR="${SF_HOME}/.sf"
|
||||||
|
|
||||||
# ── 1. UID/GID Remapping ────────────────────────────────
|
# ── 1. UID/GID Remapping ────────────────────────────────
|
||||||
# Accept PUID/PGID from the environment so the container
|
# Accept PUID/PGID from the environment so the container
|
||||||
|
|
@ -66,7 +66,7 @@ SENTINEL="${SF_DIR}/.bootstrapped"
|
||||||
|
|
||||||
if [ ! -f "${SENTINEL}" ]; then
|
if [ ! -f "${SENTINEL}" ]; then
|
||||||
if [ -x /usr/local/bin/bootstrap.sh ]; then
|
if [ -x /usr/local/bin/bootstrap.sh ]; then
|
||||||
# Run bootstrap as the gsd user so files get correct ownership
|
# Run bootstrap as the sf user so files get correct ownership
|
||||||
gosu "${SF_USER}" /usr/local/bin/bootstrap.sh
|
gosu "${SF_USER}" /usr/local/bin/bootstrap.sh
|
||||||
fi
|
fi
|
||||||
touch "${SENTINEL}"
|
touch "${SENTINEL}"
|
||||||
|
|
@ -75,7 +75,7 @@ fi
|
||||||
|
|
||||||
# ── 4. Drop Privileges & Exec ──────────────────────────
|
# ── 4. Drop Privileges & Exec ──────────────────────────
|
||||||
# Replace this shell process with the final command running
|
# Replace this shell process with the final command running
|
||||||
# as the gsd user. exec + gosu = proper PID 1 = proper
|
# as the sf user. exec + gosu = proper PID 1 = proper
|
||||||
# signal forwarding (SIGTERM, SIGINT, etc.).
|
# signal forwarding (SIGTERM, SIGINT, etc.).
|
||||||
|
|
||||||
exec gosu "${SF_USER}" "$@"
|
exec gosu "${SF_USER}" "$@"
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
# GSD2 File System Map
|
# sf File System Map
|
||||||
# Maps every source file to its system/subsystem labels
|
# Maps every source file to its system/subsystem labels
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ Anthropic's current public guidance draws a hard line:
|
||||||
- Third-party tools should prefer API key authentication through Claude Console or a supported cloud provider.
|
- Third-party tools should prefer API key authentication through Claude Console or a supported cloud provider.
|
||||||
- Apps that misrepresent their identity, route third-party traffic against subscription limits, or otherwise violate Anthropic terms are explicitly prohibited.
|
- Apps that misrepresent their identity, route third-party traffic against subscription limits, or otherwise violate Anthropic terms are explicitly prohibited.
|
||||||
|
|
||||||
For GSD2, the safe path is:
|
For sf, the safe path is:
|
||||||
|
|
||||||
1. Treat local Claude Code as an external authenticated runtime.
|
1. Treat local Claude Code as an external authenticated runtime.
|
||||||
2. Never ask SF users to sign into Claude subscriptions through SF-managed Anthropic OAuth.
|
2. Never ask SF users to sign into Claude subscriptions through SF-managed Anthropic OAuth.
|
||||||
|
|
@ -23,7 +23,7 @@ For GSD2, the safe path is:
|
||||||
|
|
||||||
Anthropic's help center says Claude Pro/Max users should install Claude Code, run `claude`, and "log in with the same credentials you use for Claude." It also says this connects the subscription directly to Claude Code, and that `/login` is the way to switch account types. The Team/Enterprise article gives the same flow for org accounts.
|
Anthropic's help center says Claude Pro/Max users should install Claude Code, run `claude`, and "log in with the same credentials you use for Claude." It also says this connects the subscription directly to Claude Code, and that `/login` is the way to switch account types. The Team/Enterprise article gives the same flow for org accounts.
|
||||||
|
|
||||||
Implication for GSD2:
|
Implication for sf:
|
||||||
|
|
||||||
- Letting users authenticate inside the real `claude` CLI is aligned with Anthropic's documented flow.
|
- Letting users authenticate inside the real `claude` CLI is aligned with Anthropic's documented flow.
|
||||||
- Detecting `claude auth status` and routing work through the local CLI or official Claude Code SDK is the lowest-risk pattern.
|
- Detecting `claude auth status` and routing work through the local CLI or official Claude Code SDK is the lowest-risk pattern.
|
||||||
|
|
@ -38,18 +38,18 @@ Anthropic's Claude Code docs say supported auth types include Claude.ai credenti
|
||||||
4. `apiKeyHelper`
|
4. `apiKeyHelper`
|
||||||
5. subscription OAuth from `/login`
|
5. subscription OAuth from `/login`
|
||||||
|
|
||||||
Implication for GSD2:
|
Implication for sf:
|
||||||
|
|
||||||
- If GSD2 shells out to or embeds Claude Code, it should respect Claude Code's own credential selection instead of inventing a parallel Anthropic OAuth flow.
|
- If sf shells out to or embeds Claude Code, it should respect Claude Code's own credential selection instead of inventing a parallel Anthropic OAuth flow.
|
||||||
- `apiKeyHelper` is the clean enterprise escape hatch when an org wants dynamic short-lived keys without handing raw API keys to the tool.
|
- `apiKeyHelper` is the clean enterprise escape hatch when an org wants dynamic short-lived keys without handing raw API keys to the tool.
|
||||||
|
|
||||||
### 3. Anthropic commercial usage is available through API keys and supported cloud providers
|
### 3. Anthropic commercial usage is available through API keys and supported cloud providers
|
||||||
|
|
||||||
Anthropic's commercial terms govern API keys and related Anthropic services for customer-built products, including products made available to end users. The authentication docs for teams recommend Claude for Teams/Enterprise, Claude Console, Bedrock, Vertex, or Microsoft Foundry.
|
Anthropic's commercial terms govern API keys and related Anthropic services for customer-built products, including products made available to end users. The authentication docs for teams recommend Claude for Teams/Enterprise, Claude Console, Bedrock, Vertex, or Microsoft Foundry.
|
||||||
|
|
||||||
Implication for GSD2:
|
Implication for sf:
|
||||||
|
|
||||||
- If GSD2 is acting as a product for users, direct Anthropic access should be through commercial auth paths, not subscription-token reuse.
|
- If sf is acting as a product for users, direct Anthropic access should be through commercial auth paths, not subscription-token reuse.
|
||||||
|
|
||||||
## What Anthropic Explicitly Warns Against
|
## What Anthropic Explicitly Warns Against
|
||||||
|
|
||||||
|
|
@ -65,13 +65,13 @@ Anthropic's consumer terms add two more constraints:
|
||||||
- Users may not share account login info, API keys, or account credentials with anyone else.
|
- Users may not share account login info, API keys, or account credentials with anyone else.
|
||||||
- Except when accessing services via an Anthropic API key or where Anthropic explicitly permits it, users may not access the services through automated or non-human means.
|
- Except when accessing services via an Anthropic API key or where Anthropic explicitly permits it, users may not access the services through automated or non-human means.
|
||||||
|
|
||||||
Implication for GSD2:
|
Implication for sf:
|
||||||
|
|
||||||
- A SF-managed Anthropic OAuth flow for subscription accounts is high risk.
|
- A SF-managed Anthropic OAuth flow for subscription accounts is high risk.
|
||||||
- Reusing user Claude subscription credentials inside SF's own API client is high risk.
|
- Reusing user Claude subscription credentials inside SF's own API client is high risk.
|
||||||
- Any flow that makes Anthropic believe requests come from Claude Code when they actually come from SF infrastructure is out of bounds.
|
- Any flow that makes Anthropic believe requests come from Claude Code when they actually come from SF infrastructure is out of bounds.
|
||||||
|
|
||||||
## Current GSD2 Findings
|
## Current sf Findings
|
||||||
|
|
||||||
### Low-risk / aligned pieces
|
### Low-risk / aligned pieces
|
||||||
|
|
||||||
|
|
@ -96,7 +96,7 @@ All Anthropic OAuth code paths have been removed:
|
||||||
- `packages/daemon/src/orchestrator.ts` — **Updated.** OAuth token refresh removed; requires `ANTHROPIC_API_KEY` env var.
|
- `packages/daemon/src/orchestrator.ts` — **Updated.** OAuth token refresh removed; requires `ANTHROPIC_API_KEY` env var.
|
||||||
- `packages/pi-ai/src/providers/anthropic.ts` — **Updated.** OAuth client branch removed; `isOAuthToken` always returns false.
|
- `packages/pi-ai/src/providers/anthropic.ts` — **Updated.** OAuth client branch removed; `isOAuthToken` always returns false.
|
||||||
|
|
||||||
## Recommended Policy For GSD2
|
## Recommended Policy For sf
|
||||||
|
|
||||||
Adopt this as the repo rule:
|
Adopt this as the repo rule:
|
||||||
|
|
||||||
|
|
@ -104,10 +104,10 @@ Adopt this as the repo rule:
|
||||||
- the `claude` CLI
|
- the `claude` CLI
|
||||||
- Claude Code SDK when it is backed by the local authenticated Claude Code install
|
- Claude Code SDK when it is backed by the local authenticated Claude Code install
|
||||||
- other Anthropic-documented native flows
|
- other Anthropic-documented native flows
|
||||||
- GSD2 must not implement its own Anthropic subscription OAuth flow for end users.
|
- sf must not implement its own Anthropic subscription OAuth flow for end users.
|
||||||
- GSD2 must not persist Anthropic subscription OAuth tokens for later API use.
|
- sf must not persist Anthropic subscription OAuth tokens for later API use.
|
||||||
- GSD2 must not send Anthropic API traffic using subscription OAuth tokens obtained by SF.
|
- sf must not send Anthropic API traffic using subscription OAuth tokens obtained by SF.
|
||||||
- GSD2 may support Anthropic direct access only via:
|
- sf may support Anthropic direct access only via:
|
||||||
- `ANTHROPIC_API_KEY`
|
- `ANTHROPIC_API_KEY`
|
||||||
- Claude Console API keys stored in auth storage
|
- Claude Console API keys stored in auth storage
|
||||||
- `apiKeyHelper`
|
- `apiKeyHelper`
|
||||||
|
|
@ -157,7 +157,7 @@ This is the best long-term UX because it separates:
|
||||||
|
|
||||||
## Decision Rule
|
## Decision Rule
|
||||||
|
|
||||||
If a proposed GSD2 feature needs Anthropic access, ask one question:
|
If a proposed sf feature needs Anthropic access, ask one question:
|
||||||
|
|
||||||
"Is SF calling Anthropic as SF, or is SF delegating to the user's already-authenticated local Claude Code runtime?"
|
"Is SF calling Anthropic as SF, or is SF delegating to the user's already-authenticated local Claude Code runtime?"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ Anthropic 当前公开的指导原则边界非常清晰:
|
||||||
- 第三方工具应优先通过 Claude Console 或受支持云 provider 的 API key 进行认证。
|
- 第三方工具应优先通过 Claude Console 或受支持云 provider 的 API key 进行认证。
|
||||||
- 任何伪装身份、绕过订阅限制转发第三方流量、或以其他方式违反 Anthropic 条款的应用,都被明确禁止。
|
- 任何伪装身份、绕过订阅限制转发第三方流量、或以其他方式违反 Anthropic 条款的应用,都被明确禁止。
|
||||||
|
|
||||||
对于 GSD2,安全路径应当是:
|
对于 sf,安全路径应当是:
|
||||||
|
|
||||||
1. 把本地 Claude Code 视为一个外部、已认证的运行时。
|
1. 把本地 Claude Code 视为一个外部、已认证的运行时。
|
||||||
2. 永远不要让 SF 用户通过 SF 托管的 Anthropic OAuth 去登录 Claude 订阅。
|
2. 永远不要让 SF 用户通过 SF 托管的 Anthropic OAuth 去登录 Claude 订阅。
|
||||||
|
|
@ -23,7 +23,7 @@ Anthropic 当前公开的指导原则边界非常清晰:
|
||||||
|
|
||||||
Anthropic 帮助中心说明:Claude Pro / Max 用户应安装 Claude Code,运行 `claude`,并“使用与你登录 Claude 相同的凭据”完成登录。文档还指出,这样会把订阅直接连接到 Claude Code,并且 `/login` 是切换账户类型的方式。Team / Enterprise 文章对组织账号也给出了同样流程。
|
Anthropic 帮助中心说明:Claude Pro / Max 用户应安装 Claude Code,运行 `claude`,并“使用与你登录 Claude 相同的凭据”完成登录。文档还指出,这样会把订阅直接连接到 Claude Code,并且 `/login` 是切换账户类型的方式。Team / Enterprise 文章对组织账号也给出了同样流程。
|
||||||
|
|
||||||
对 GSD2 的含义:
|
对 sf 的含义:
|
||||||
|
|
||||||
- 允许用户在真正的 `claude` CLI 内部完成认证,是符合 Anthropic 文档流程的
|
- 允许用户在真正的 `claude` CLI 内部完成认证,是符合 Anthropic 文档流程的
|
||||||
- 检测 `claude auth status`,然后通过本地 CLI 或官方 Claude Code SDK 路由工作,是风险最低的方案
|
- 检测 `claude auth status`,然后通过本地 CLI 或官方 Claude Code SDK 路由工作,是风险最低的方案
|
||||||
|
|
@ -38,18 +38,18 @@ Anthropic 的 Claude Code 文档说明,支持的认证类型包括 Claude.ai
|
||||||
4. `apiKeyHelper`
|
4. `apiKeyHelper`
|
||||||
5. 来自 `/login` 的订阅 OAuth
|
5. 来自 `/login` 的订阅 OAuth
|
||||||
|
|
||||||
对 GSD2 的含义:
|
对 sf 的含义:
|
||||||
|
|
||||||
- 如果 GSD2 是通过 shell 调用或嵌入 Claude Code,那么它应尊重 Claude Code 自己的凭据选择逻辑,而不是再发明一套平行的 Anthropic OAuth 流程
|
- 如果 sf 是通过 shell 调用或嵌入 Claude Code,那么它应尊重 Claude Code 自己的凭据选择逻辑,而不是再发明一套平行的 Anthropic OAuth 流程
|
||||||
- 对需要动态短期凭据、但又不希望把原始 API key 交给工具的组织来说,`apiKeyHelper` 是一个干净的企业级出口
|
- 对需要动态短期凭据、但又不希望把原始 API key 交给工具的组织来说,`apiKeyHelper` 是一个干净的企业级出口
|
||||||
|
|
||||||
### 3. Anthropic 的商业使用可通过 API keys 和受支持的云 provider 实现
|
### 3. Anthropic 的商业使用可通过 API keys 和受支持的云 provider 实现
|
||||||
|
|
||||||
Anthropic 的商业条款约束的是 API keys 及其相关 Anthropic 服务,包括供客户构建给终端用户使用的产品。面向团队的认证文档推荐使用 Claude for Teams / Enterprise、Claude Console、Bedrock、Vertex 或 Microsoft Foundry。
|
Anthropic 的商业条款约束的是 API keys 及其相关 Anthropic 服务,包括供客户构建给终端用户使用的产品。面向团队的认证文档推荐使用 Claude for Teams / Enterprise、Claude Console、Bedrock、Vertex 或 Microsoft Foundry。
|
||||||
|
|
||||||
对 GSD2 的含义:
|
对 sf 的含义:
|
||||||
|
|
||||||
- 如果 GSD2 作为一个产品面向用户提供 Anthropic 能力,那么任何直接 Anthropic 访问都应走商业认证路径,而不是复用订阅 token
|
- 如果 sf 作为一个产品面向用户提供 Anthropic 能力,那么任何直接 Anthropic 访问都应走商业认证路径,而不是复用订阅 token
|
||||||
|
|
||||||
## Anthropic 明确警告的内容
|
## Anthropic 明确警告的内容
|
||||||
|
|
||||||
|
|
@ -65,13 +65,13 @@ Anthropic 的消费条款还额外加入两项限制:
|
||||||
- 用户不得把账户登录信息、API keys 或账户凭据分享给他人
|
- 用户不得把账户登录信息、API keys 或账户凭据分享给他人
|
||||||
- 除非是通过 Anthropic API key 访问服务,或者 Anthropic 明确允许,否则用户不得通过自动化或非人工方式访问这些服务
|
- 除非是通过 Anthropic API key 访问服务,或者 Anthropic 明确允许,否则用户不得通过自动化或非人工方式访问这些服务
|
||||||
|
|
||||||
对 GSD2 的含义:
|
对 sf 的含义:
|
||||||
|
|
||||||
- 由 SF 托管的 Anthropic 订阅 OAuth 流程属于高风险
|
- 由 SF 托管的 Anthropic 订阅 OAuth 流程属于高风险
|
||||||
- 在 SF 自己的 API client 中复用用户 Claude 订阅凭据属于高风险
|
- 在 SF 自己的 API client 中复用用户 Claude 订阅凭据属于高风险
|
||||||
- 任何会让 Anthropic 误以为请求来自 Claude Code、但实际上来自 SF 基础设施的流程,都越界了
|
- 任何会让 Anthropic 误以为请求来自 Claude Code、但实际上来自 SF 基础设施的流程,都越界了
|
||||||
|
|
||||||
## 当前 GSD2 发现
|
## 当前 sf 发现
|
||||||
|
|
||||||
### 低风险 / 已对齐的部分
|
### 低风险 / 已对齐的部分
|
||||||
|
|
||||||
|
|
@ -96,7 +96,7 @@ Anthropic 的消费条款还额外加入两项限制:
|
||||||
- `packages/daemon/src/orchestrator.ts` —— **已更新**,去掉 OAuth token refresh,改为要求 `ANTHROPIC_API_KEY` 环境变量
|
- `packages/daemon/src/orchestrator.ts` —— **已更新**,去掉 OAuth token refresh,改为要求 `ANTHROPIC_API_KEY` 环境变量
|
||||||
- `packages/pi-ai/src/providers/anthropic.ts` —— **已更新**,移除 OAuth client 分支,`isOAuthToken` 始终返回 false
|
- `packages/pi-ai/src/providers/anthropic.ts` —— **已更新**,移除 OAuth client 分支,`isOAuthToken` 始终返回 false
|
||||||
|
|
||||||
## 针对 GSD2 的建议策略
|
## 针对 sf 的建议策略
|
||||||
|
|
||||||
将下面内容作为仓库规则:
|
将下面内容作为仓库规则:
|
||||||
|
|
||||||
|
|
@ -104,10 +104,10 @@ Anthropic 的消费条款还额外加入两项限制:
|
||||||
- `claude` CLI
|
- `claude` CLI
|
||||||
- 基于本地已认证 Claude Code 安装的 Claude Code SDK
|
- 基于本地已认证 Claude Code 安装的 Claude Code SDK
|
||||||
- 其他 Anthropic 文档明确支持的原生流程
|
- 其他 Anthropic 文档明确支持的原生流程
|
||||||
- GSD2 不得为终端用户实现自己的 Anthropic 订阅 OAuth 流程
|
- sf 不得为终端用户实现自己的 Anthropic 订阅 OAuth 流程
|
||||||
- GSD2 不得持久化 Anthropic 订阅 OAuth token,供后续 API 调用使用
|
- sf 不得持久化 Anthropic 订阅 OAuth token,供后续 API 调用使用
|
||||||
- GSD2 不得使用由 SF 获取的订阅 OAuth tokens 来发送 Anthropic API 流量
|
- sf 不得使用由 SF 获取的订阅 OAuth tokens 来发送 Anthropic API 流量
|
||||||
- GSD2 可以支持 Anthropic 直接访问,但仅限以下方式:
|
- sf 可以支持 Anthropic 直接访问,但仅限以下方式:
|
||||||
- `ANTHROPIC_API_KEY`
|
- `ANTHROPIC_API_KEY`
|
||||||
- 保存在 auth storage 中的 Claude Console API keys
|
- 保存在 auth storage 中的 Claude Console API keys
|
||||||
- `apiKeyHelper`
|
- `apiKeyHelper`
|
||||||
|
|
@ -157,7 +157,7 @@ Anthropic 的消费条款还额外加入两项限制:
|
||||||
|
|
||||||
## 决策规则
|
## 决策规则
|
||||||
|
|
||||||
如果某个拟议中的 GSD2 特性需要访问 Anthropic,先问一个问题:
|
如果某个拟议中的 sf 特性需要访问 Anthropic,先问一个问题:
|
||||||
|
|
||||||
“SF 是以 SF 的身份调用 Anthropic,还是 SF 只是把工作委派给用户本地已认证的 Claude Code 运行时?”
|
“SF 是以 SF 的身份调用 Anthropic,还是 SF 只是把工作委派给用户本地已认证的 Claude Code 运行时?”
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@
|
||||||
"logo": {
|
"logo": {
|
||||||
"light": "/images/logo.svg",
|
"light": "/images/logo.svg",
|
||||||
"dark": "/images/logo.svg",
|
"dark": "/images/logo.svg",
|
||||||
"href": "https://gsd.build"
|
"href": "https://sf.build"
|
||||||
},
|
},
|
||||||
"favicon": "/images/favicon.svg",
|
"favicon": "/images/favicon.svg",
|
||||||
"colors": {
|
"colors": {
|
||||||
|
|
|
||||||
|
|
@ -12,15 +12,15 @@ npm install -g sf-run
|
||||||
Requires Node.js 22+ and Git.
|
Requires Node.js 22+ and Git.
|
||||||
|
|
||||||
<Note>
|
<Note>
|
||||||
**`command not found: gsd`?** Your shell may not have npm's global bin directory in `$PATH`. Run `npm prefix -g` to find it, then add `$(npm prefix -g)/bin` to your PATH. See [troubleshooting](/guides/troubleshooting) for details.
|
**`command not found: sf`?** Your shell may not have npm's global bin directory in `$PATH`. Run `npm prefix -g` to find it, then add `$(npm prefix -g)/bin` to your PATH. See [troubleshooting](/guides/troubleshooting) for details.
|
||||||
</Note>
|
</Note>
|
||||||
|
|
||||||
SF checks for updates every 24 hours. Update in-session with `/gsd update`.
|
SF checks for updates every 24 hours. Update in-session with `/sf update`.
|
||||||
|
|
||||||
## First launch
|
## First launch
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
```
|
```
|
||||||
|
|
||||||
On first launch, a setup wizard walks you through:
|
On first launch, a setup wizard walks you through:
|
||||||
|
|
@ -31,16 +31,16 @@ On first launch, a setup wizard walks you through:
|
||||||
Re-run the wizard anytime:
|
Re-run the wizard anytime:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd config
|
sf config
|
||||||
```
|
```
|
||||||
|
|
||||||
### Set up API keys
|
### Set up API keys
|
||||||
|
|
||||||
For non-Anthropic models, you may need a search API key. Run `/gsd config` to set keys globally — they're saved to `~/.gsd/agent/auth.json` and apply to all projects.
|
For non-Anthropic models, you may need a search API key. Run `/sf config` to set keys globally — they're saved to `~/.sf/agent/auth.json` and apply to all projects.
|
||||||
|
|
||||||
### Set up MCP servers
|
### Set up MCP servers
|
||||||
|
|
||||||
To connect SF to local or external MCP servers, add project-local config in `.mcp.json` or `.gsd/mcp.json`. See [configuration](/guides/configuration) for examples. Use `/gsd mcp` to verify connectivity.
|
To connect SF to local or external MCP servers, add project-local config in `.mcp.json` or `.sf/mcp.json`. See [configuration](/guides/configuration) for examples. Use `/sf mcp` to verify connectivity.
|
||||||
|
|
||||||
### Offline mode
|
### Offline mode
|
||||||
|
|
||||||
|
|
@ -60,18 +60,18 @@ Or configure per-phase models in [preferences](/guides/configuration).
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<Tab title="Step mode">
|
<Tab title="Step mode">
|
||||||
Type `/gsd` inside a session. SF executes one unit at a time, pausing between each with a wizard showing what completed and what's next.
|
Type `/sf` inside a session. SF executes one unit at a time, pausing between each with a wizard showing what completed and what's next.
|
||||||
|
|
||||||
- **No `.gsd/` directory** → starts a discussion to capture your project vision
|
- **No `.sf/` directory** → starts a discussion to capture your project vision
|
||||||
- **Milestone exists, no roadmap** → discuss or research the milestone
|
- **Milestone exists, no roadmap** → discuss or research the milestone
|
||||||
- **Roadmap exists, slices pending** → plan the next slice or execute a task
|
- **Roadmap exists, slices pending** → plan the next slice or execute a task
|
||||||
- **Mid-task** → resume where you left off
|
- **Mid-task** → resume where you left off
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab title="Auto mode">
|
<Tab title="Auto mode">
|
||||||
Type `/gsd auto` and walk away. SF autonomously researches, plans, executes, verifies, commits, and advances through every slice until the milestone is complete.
|
Type `/sf auto` and walk away. SF autonomously researches, plans, executes, verifies, commits, and advances through every slice until the milestone is complete.
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
|
|
||||||
See [auto mode](/guides/auto-mode) for the full details.
|
See [auto mode](/guides/auto-mode) for the full details.
|
||||||
|
|
@ -85,20 +85,20 @@ The recommended workflow: auto mode in one terminal, steering from another.
|
||||||
**Terminal 1 — let it build:**
|
**Terminal 1 — let it build:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
|
|
||||||
**Terminal 2 — steer while it works:**
|
**Terminal 2 — steer while it works:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd discuss # talk through architecture decisions
|
/sf discuss # talk through architecture decisions
|
||||||
/gsd status # check progress
|
/sf status # check progress
|
||||||
/gsd queue # queue the next milestone
|
/sf queue # queue the next milestone
|
||||||
```
|
```
|
||||||
|
|
||||||
Both terminals read and write the same `.gsd/` files. Decisions in terminal 2 are picked up at the next phase boundary automatically.
|
Both terminals read and write the same `.sf/` files. Decisions in terminal 2 are picked up at the next phase boundary automatically.
|
||||||
|
|
||||||
## Project structure
|
## Project structure
|
||||||
|
|
||||||
|
|
@ -110,11 +110,11 @@ Milestone → a shippable version (4-10 slices)
|
||||||
Task → one context-window-sized unit of work
|
Task → one context-window-sized unit of work
|
||||||
```
|
```
|
||||||
|
|
||||||
All state lives on disk in `.gsd/`:
|
All state lives on disk in `.sf/`:
|
||||||
|
|
||||||
<Accordion title="Directory structure">
|
<Accordion title="Directory structure">
|
||||||
```
|
```
|
||||||
.gsd/
|
.sf/
|
||||||
PROJECT.md — what the project is right now
|
PROJECT.md — what the project is right now
|
||||||
REQUIREMENTS.md — requirement contract (active/validated/deferred)
|
REQUIREMENTS.md — requirement contract (active/validated/deferred)
|
||||||
DECISIONS.md — append-only architectural decisions
|
DECISIONS.md — append-only architectural decisions
|
||||||
|
|
@ -139,20 +139,20 @@ All state lives on disk in `.gsd/`:
|
||||||
## Resume a session
|
## Resume a session
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd --continue # or gsd -c
|
sf --continue # or sf -c
|
||||||
```
|
```
|
||||||
|
|
||||||
Resumes the most recent session. To pick from all saved sessions:
|
Resumes the most recent session. To pick from all saved sessions:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd sessions
|
sf sessions
|
||||||
```
|
```
|
||||||
|
|
||||||
## VS Code extension
|
## VS Code extension
|
||||||
|
|
||||||
SF is also available as a VS Code extension (publisher: FluxLabs). It provides:
|
SF is also available as a VS Code extension (publisher: FluxLabs). It provides:
|
||||||
|
|
||||||
- **`@gsd` chat participant** — talk to the agent in VS Code Chat
|
- **`@sf` chat participant** — talk to the agent in VS Code Chat
|
||||||
- **Sidebar dashboard** — connection status, model info, token usage, quick actions
|
- **Sidebar dashboard** — connection status, model info, token usage, quick actions
|
||||||
- **Full command palette** — start/stop agent, switch models, export sessions
|
- **Full command palette** — start/stop agent, switch models, export sessions
|
||||||
|
|
||||||
|
|
@ -161,27 +161,27 @@ The CLI (`sf-run`) must be installed first — the extension connects to it via
|
||||||
## Web interface
|
## Web interface
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd --web
|
sf --web
|
||||||
```
|
```
|
||||||
|
|
||||||
A browser-based dashboard with real-time progress and multi-project support. See [web interface](/guides/web-interface) for details.
|
A browser-based dashboard with real-time progress and multi-project support. See [web interface](/guides/web-interface) for details.
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
### `gsd` runs `git svn dcommit` instead of SF
|
### `sf` runs `git svn dcommit` instead of SF
|
||||||
|
|
||||||
The [oh-my-zsh git plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/git) defines `alias gsd='git svn dcommit'`.
|
The [oh-my-zsh git plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/git) defines `alias sf='git svn dcommit'`.
|
||||||
|
|
||||||
**Option 1** — Remove the alias in `~/.zshrc` (after the `source $ZSH/oh-my-zsh.sh` line):
|
**Option 1** — Remove the alias in `~/.zshrc` (after the `source $ZSH/oh-my-zsh.sh` line):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
unalias gsd 2>/dev/null
|
unalias sf 2>/dev/null
|
||||||
```
|
```
|
||||||
|
|
||||||
**Option 2** — Use the alternative binary name:
|
**Option 2** — Use the alternative binary name:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd-cli
|
sf-cli
|
||||||
```
|
```
|
||||||
|
|
||||||
Both `gsd` and `gsd-cli` point to the same binary.
|
Both `sf` and `sf-cli` point to the same binary.
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
---
|
---
|
||||||
title: "Auto mode"
|
title: "Auto mode"
|
||||||
description: "SF's autonomous execution engine — run /gsd auto, walk away, come back to built software with clean git history."
|
description: "SF's autonomous execution engine — run /sf auto, walk away, come back to built software with clean git history."
|
||||||
---
|
---
|
||||||
|
|
||||||
Auto mode is a **state machine driven by files on disk**. It reads `.gsd/STATE.md`, determines the next unit of work, creates a fresh agent session with pre-loaded context, and lets the LLM execute. When the LLM finishes, auto mode reads disk state again and dispatches the next unit.
|
Auto mode is a **state machine driven by files on disk**. It reads `.sf/STATE.md`, determines the next unit of work, creates a fresh agent session with pre-loaded context, and lets the LLM execute. When the LLM finishes, auto mode reads disk state again and dispatches the next unit.
|
||||||
|
|
||||||
## The loop
|
## The loop
|
||||||
|
|
||||||
|
|
@ -50,9 +50,9 @@ See [git strategy](/guides/git-strategy) for details.
|
||||||
|
|
||||||
### Crash recovery
|
### Crash recovery
|
||||||
|
|
||||||
A lock file tracks the current unit. If the session dies, the next `/gsd auto` synthesizes a recovery briefing from tool calls that made it to disk and resumes with full context.
|
A lock file tracks the current unit. If the session dies, the next `/sf auto` synthesizes a recovery briefing from tool calls that made it to disk and resumes with full context.
|
||||||
|
|
||||||
**Headless auto-restart:** When running `gsd headless auto`, crashes trigger automatic restart with exponential backoff (5s → 10s → 30s cap, default 3 attempts). Combined with crash recovery, this enables overnight "run until done" execution.
|
**Headless auto-restart:** When running `sf headless auto`, crashes trigger automatic restart with exponential backoff (5s → 10s → 30s cap, default 3 attempts). Combined with crash recovery, this enables overnight "run until done" execution.
|
||||||
|
|
||||||
### Provider error recovery
|
### Provider error recovery
|
||||||
|
|
||||||
|
|
@ -107,7 +107,7 @@ After milestone completion, SF auto-generates a self-contained HTML report with
|
||||||
auto_report: true # enabled by default
|
auto_report: true # enabled by default
|
||||||
```
|
```
|
||||||
|
|
||||||
Generate manually with `/gsd export --html`, or for all milestones with `/gsd export --html --all`.
|
Generate manually with `/sf export --html`, or for all milestones with `/sf export --html --all`.
|
||||||
|
|
||||||
### Reactive task execution
|
### Reactive task execution
|
||||||
|
|
||||||
|
|
@ -122,7 +122,7 @@ reactive_execution: true # disabled by default
|
||||||
<Steps>
|
<Steps>
|
||||||
<Step title="Start">
|
<Step title="Start">
|
||||||
```
|
```
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Pause">
|
<Step title="Pause">
|
||||||
|
|
@ -130,13 +130,13 @@ reactive_execution: true # disabled by default
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Resume">
|
<Step title="Resume">
|
||||||
```
|
```
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
Auto mode reads disk state and picks up where it left off.
|
Auto mode reads disk state and picks up where it left off.
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Stop">
|
<Step title="Stop">
|
||||||
```
|
```
|
||||||
/gsd stop
|
/sf stop
|
||||||
```
|
```
|
||||||
Stops auto mode gracefully. Can be run from a different terminal.
|
Stops auto mode gracefully. Can be run from a different terminal.
|
||||||
</Step>
|
</Step>
|
||||||
|
|
@ -145,7 +145,7 @@ reactive_execution: true # disabled by default
|
||||||
### Steer during execution
|
### Steer during execution
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd steer
|
/sf steer
|
||||||
```
|
```
|
||||||
|
|
||||||
Hard-steer plan documents without stopping the pipeline. Changes are picked up at the next phase boundary.
|
Hard-steer plan documents without stopping the pipeline. Changes are picked up at the next phase boundary.
|
||||||
|
|
@ -153,14 +153,14 @@ Hard-steer plan documents without stopping the pipeline. Changes are picked up a
|
||||||
### Capture thoughts
|
### Capture thoughts
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd capture "add rate limiting to API endpoints"
|
/sf capture "add rate limiting to API endpoints"
|
||||||
```
|
```
|
||||||
|
|
||||||
Fire-and-forget thought capture. Triaged automatically between tasks. See [captures and triage](/guides/captures-triage).
|
Fire-and-forget thought capture. Triaged automatically between tasks. See [captures and triage](/guides/captures-triage).
|
||||||
|
|
||||||
## Dashboard
|
## Dashboard
|
||||||
|
|
||||||
`Ctrl+Alt+G` or `/gsd status` shows real-time progress:
|
`Ctrl+Alt+G` or `/sf status` shows real-time progress:
|
||||||
|
|
||||||
- Current milestone, slice, and task
|
- Current milestone, slice, and task
|
||||||
- Auto mode elapsed time and phase
|
- Auto mode elapsed time and phase
|
||||||
|
|
|
||||||
|
|
@ -10,11 +10,11 @@ Captures let you fire-and-forget thoughts during auto-mode execution. Instead of
|
||||||
While auto-mode is running (or any time):
|
While auto-mode is running (or any time):
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd capture "add rate limiting to the API endpoints"
|
/sf capture "add rate limiting to the API endpoints"
|
||||||
/gsd capture "the auth flow should support OAuth, not just JWT"
|
/sf capture "the auth flow should support OAuth, not just JWT"
|
||||||
```
|
```
|
||||||
|
|
||||||
Captures are appended to `.gsd/CAPTURES.md` and triaged automatically between tasks.
|
Captures are appended to `.sf/CAPTURES.md` and triaged automatically between tasks.
|
||||||
|
|
||||||
## How it works
|
## How it works
|
||||||
|
|
||||||
|
|
@ -24,7 +24,7 @@ capture → triage → confirm → resolve → resume
|
||||||
|
|
||||||
<Steps>
|
<Steps>
|
||||||
<Step title="Capture">
|
<Step title="Capture">
|
||||||
`/gsd capture "thought"` appends to `.gsd/CAPTURES.md` with a timestamp and unique ID.
|
`/sf capture "thought"` appends to `.sf/CAPTURES.md` with a timestamp and unique ID.
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Triage">
|
<Step title="Triage">
|
||||||
At natural seams between tasks, SF classifies each capture.
|
At natural seams between tasks, SF classifies each capture.
|
||||||
|
|
@ -55,7 +55,7 @@ capture → triage → confirm → resolve → resume
|
||||||
Trigger triage at any time:
|
Trigger triage at any time:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd triage
|
/sf triage
|
||||||
```
|
```
|
||||||
|
|
||||||
Useful when you've accumulated several captures and want to process them before the next natural seam.
|
Useful when you've accumulated several captures and want to process them before the next natural seam.
|
||||||
|
|
@ -72,4 +72,4 @@ Capture context is automatically injected into:
|
||||||
|
|
||||||
## Worktree awareness
|
## Worktree awareness
|
||||||
|
|
||||||
Captures resolve to the **original project root's** `.gsd/CAPTURES.md`, not the worktree's local copy. Captures from a steering terminal are visible to the auto-mode session running in a worktree.
|
Captures resolve to the **original project root's** `.sf/CAPTURES.md`, not the worktree's local copy. Captures from a steering terminal are visible to the auto-mode session running in a worktree.
|
||||||
|
|
|
||||||
|
|
@ -24,10 +24,10 @@ Between milestones you have the most freedom. Inside a running milestone you hav
|
||||||
**A self-contained fix that can be described in a sentence.**
|
**A self-contained fix that can be described in a sentence.**
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd quick "fix the date formatting bug in the invoice renderer"
|
/sf quick "fix the date formatting bug in the invoice renderer"
|
||||||
```
|
```
|
||||||
|
|
||||||
`/gsd quick` executes immediately with full SF guarantees (atomic commit, state tracking) but skips milestone ceremony. It doesn't touch the milestone pipeline.
|
`/sf quick` executes immediately with full SF guarantees (atomic commit, state tracking) but skips milestone ceremony. It doesn't touch the milestone pipeline.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
@ -36,16 +36,16 @@ Between milestones you have the most freedom. Inside a running milestone you hav
|
||||||
**You spot something mid-execution but don't want to interrupt the run.**
|
**You spot something mid-execution but don't want to interrupt the run.**
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd capture "the login redirect is broken on mobile viewports"
|
/sf capture "the login redirect is broken on mobile viewports"
|
||||||
/gsd capture "add a loading spinner to the data table"
|
/sf capture "add a loading spinner to the data table"
|
||||||
```
|
```
|
||||||
|
|
||||||
Captures are appended to `.gsd/CAPTURES.md` and triaged automatically at natural seams between tasks. See [captures and triage](/guides/captures-triage) for the full classification system.
|
Captures are appended to `.sf/CAPTURES.md` and triaged automatically at natural seams between tasks. See [captures and triage](/guides/captures-triage) for the full classification system.
|
||||||
|
|
||||||
To force processing immediately:
|
To force processing immediately:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd triage
|
/sf triage
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
@ -55,7 +55,7 @@ To force processing immediately:
|
||||||
**You're mid-slice and the plan no longer makes sense — wrong approach, missing step, or a blocker.**
|
**You're mid-slice and the plan no longer makes sense — wrong approach, missing step, or a blocker.**
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd steer
|
/sf steer
|
||||||
```
|
```
|
||||||
|
|
||||||
This opens an interactive session to hard-edit plan documents. Changes are picked up at the next phase boundary without stopping auto-mode.
|
This opens an interactive session to hard-edit plan documents. Changes are picked up at the next phase boundary without stopping auto-mode.
|
||||||
|
|
@ -74,28 +74,28 @@ For structural changes (adding tasks, removing tasks), the agent triggers a slic
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Create a new milestone for the pre-M003 work">
|
<Step title="Create a new milestone for the pre-M003 work">
|
||||||
```
|
```
|
||||||
/gsd new-milestone
|
/sf new-milestone
|
||||||
```
|
```
|
||||||
Describe the bugs and features. SF creates a milestone — the title is what matters, not the number.
|
Describe the bugs and features. SF creates a milestone — the title is what matters, not the number.
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Check and reorder the queue">
|
<Step title="Check and reorder the queue">
|
||||||
```
|
```
|
||||||
/gsd queue
|
/sf queue
|
||||||
```
|
```
|
||||||
Confirm the new milestone is queued before M003. Reorder if needed.
|
Confirm the new milestone is queued before M003. Reorder if needed.
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Park M003 if it shouldn't run yet">
|
<Step title="Park M003 if it shouldn't run yet">
|
||||||
```
|
```
|
||||||
/gsd park M003
|
/sf park M003
|
||||||
```
|
```
|
||||||
Parking skips M003 without deleting it. Unpark when ready:
|
Parking skips M003 without deleting it. Unpark when ready:
|
||||||
```
|
```
|
||||||
/gsd unpark M003
|
/sf unpark M003
|
||||||
```
|
```
|
||||||
</Step>
|
</Step>
|
||||||
<Step title="Run auto-mode">
|
<Step title="Run auto-mode">
|
||||||
```
|
```
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
Auto-mode dispatches the next active milestone in queue order.
|
Auto-mode dispatches the next active milestone in queue order.
|
||||||
</Step>
|
</Step>
|
||||||
|
|
@ -107,15 +107,15 @@ For structural changes (adding tasks, removing tasks), the agent triggers a slic
|
||||||
|
|
||||||
**You want to change M003's scope — add slices, remove slices, change the approach — before it starts.**
|
**You want to change M003's scope — add slices, remove slices, change the approach — before it starts.**
|
||||||
|
|
||||||
Since M003 hasn't started, its plan files can be edited directly. Use `/gsd discuss` to talk through the changes and let SF rewrite the artifacts:
|
Since M003 hasn't started, its plan files can be edited directly. Use `/sf discuss` to talk through the changes and let SF rewrite the artifacts:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd discuss
|
/sf discuss
|
||||||
```
|
```
|
||||||
|
|
||||||
> "M003 needs to include the new auth flow we discovered. Can we add a slice for that and remove the old token refresh slice?"
|
> "M003 needs to include the new auth flow we discovered. Can we add a slice for that and remove the old token refresh slice?"
|
||||||
|
|
||||||
Or use `/gsd steer` to edit plan files directly.
|
Or use `/sf steer` to edit plan files directly.
|
||||||
|
|
||||||
If M003 is partially done (some slices complete), auto-mode calls `reassess-roadmap` automatically after each slice. You can also discuss changes during a pause — SF can add, modify, or remove pending slices without touching the completed ones.
|
If M003 is partially done (some slices complete), auto-mode calls `reassess-roadmap` automatically after each slice. You can also discuss changes during a pause — SF can add, modify, or remove pending slices without touching the completed ones.
|
||||||
|
|
||||||
|
|
@ -125,7 +125,7 @@ If M003 is partially done (some slices complete), auto-mode calls `reassess-road
|
||||||
|
|
||||||
**Your "Milestone 3" is effectively now "Milestone 4" because new work must insert before it.**
|
**Your "Milestone 3" is effectively now "Milestone 4" because new work must insert before it.**
|
||||||
|
|
||||||
SF milestone numbers are labels, not positions. Execution order is controlled by the queue, not the ID. The procedure is the same as above: create the new milestone, confirm queue order with `/gsd queue`, park M003 if needed.
|
SF milestone numbers are labels, not positions. Execution order is controlled by the queue, not the ID. The procedure is the same as above: create the new milestone, confirm queue order with `/sf queue`, park M003 if needed.
|
||||||
|
|
||||||
The milestone IDs stay as-is — M003 just executes later. No renumbering needed.
|
The milestone IDs stay as-is — M003 just executes later. No renumbering needed.
|
||||||
|
|
||||||
|
|
@ -136,7 +136,7 @@ The milestone IDs stay as-is — M003 just executes later. No renumbering needed
|
||||||
**After M002 you have 10+ bugs across multiple systems. Too scattered for individual quick tasks.**
|
**After M002 you have 10+ bugs across multiple systems. Too scattered for individual quick tasks.**
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd new-milestone
|
/sf new-milestone
|
||||||
```
|
```
|
||||||
|
|
||||||
Describe the full bug list. SF creates a milestone with slices organized by system or severity. Run it in auto-mode like any other milestone. When done, all bugs land as clean commits with a formal milestone summary — readable as a bugfix release.
|
Describe the full bug list. SF creates a milestone with slices organized by system or severity. Run it in auto-mode like any other milestone. When done, all bugs land as clean commits with a formal milestone summary — readable as a bugfix release.
|
||||||
|
|
@ -148,13 +148,13 @@ Describe the full bug list. SF creates a milestone with slices organized by syst
|
||||||
**Real ideas, but nothing that blocks the current plan.**
|
**Real ideas, but nothing that blocks the current plan.**
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd capture "dark mode toggle on the dashboard"
|
/sf capture "dark mode toggle on the dashboard"
|
||||||
```
|
```
|
||||||
|
|
||||||
Deferred captures surface during roadmap reassessment. SF can fold them into a later milestone when the timing makes sense. Or queue a dedicated features milestone directly:
|
Deferred captures surface during roadmap reassessment. SF can fold them into a later milestone when the timing makes sense. Or queue a dedicated features milestone directly:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd queue
|
/sf queue
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
@ -165,9 +165,9 @@ Deferred captures surface during roadmap reassessment. SF can fold them into a l
|
||||||
|
|
||||||
You cannot un-complete the slice. Options:
|
You cannot un-complete the slice. Options:
|
||||||
|
|
||||||
- `/gsd quick` for small fixes
|
- `/sf quick` for small fixes
|
||||||
- A new slice in the next milestone that explicitly patches the bug — reference the original slice in the description
|
- A new slice in the next milestone that explicitly patches the bug — reference the original slice in the description
|
||||||
- `/gsd steer` to add a fix task to the current active milestone if you're still inside it
|
- `/sf steer` to add a fix task to the current active milestone if you're still inside it
|
||||||
|
|
||||||
The completed slice record is preserved as-is. The fix lands as new work with its own commit and summary.
|
The completed slice record is preserved as-is. The fix lands as new work with its own commit and summary.
|
||||||
|
|
||||||
|
|
@ -180,7 +180,7 @@ The completed slice record is preserved as-is. The fix lands as new work with it
|
||||||
<Steps>
|
<Steps>
|
||||||
<Step title="Discuss the situation">
|
<Step title="Discuss the situation">
|
||||||
```
|
```
|
||||||
/gsd discuss
|
/sf discuss
|
||||||
```
|
```
|
||||||
Work through what's wrong and what the correction looks like before touching anything.
|
Work through what's wrong and what the correction looks like before touching anything.
|
||||||
</Step>
|
</Step>
|
||||||
|
|
@ -198,14 +198,14 @@ The completed slice record is preserved as-is. The fix lands as new work with it
|
||||||
|
|
||||||
| Situation | Command |
|
| Situation | Command |
|
||||||
|---|---|
|
|---|---|
|
||||||
| Small self-contained fix | `/gsd quick` |
|
| Small self-contained fix | `/sf quick` |
|
||||||
| Thought during auto-mode | `/gsd capture` |
|
| Thought during auto-mode | `/sf capture` |
|
||||||
| Force-process captures now | `/gsd triage` |
|
| Force-process captures now | `/sf triage` |
|
||||||
| Current slice plan is wrong | `/gsd steer` |
|
| Current slice plan is wrong | `/sf steer` |
|
||||||
| New work must land before next milestone | `/gsd new-milestone` + `/gsd queue` |
|
| New work must land before next milestone | `/sf new-milestone` + `/sf queue` |
|
||||||
| Delay a future milestone | `/gsd park <MID>` / `/gsd unpark <MID>` |
|
| Delay a future milestone | `/sf park <MID>` / `/sf unpark <MID>` |
|
||||||
| Modify a not-yet-started milestone | `/gsd discuss` or `/gsd steer` |
|
| Modify a not-yet-started milestone | `/sf discuss` or `/sf steer` |
|
||||||
| Many bugs → dedicated milestone | `/gsd new-milestone` (bugfix scope) |
|
| Many bugs → dedicated milestone | `/sf new-milestone` (bugfix scope) |
|
||||||
| Ideas that can wait | `/gsd capture` or `/gsd queue` |
|
| Ideas that can wait | `/sf capture` or `/sf queue` |
|
||||||
| Check/reorder pipeline | `/gsd queue` |
|
| Check/reorder pipeline | `/sf queue` |
|
||||||
| Architecture discussion | `/gsd discuss` |
|
| Architecture discussion | `/sf discuss` |
|
||||||
|
|
|
||||||
|
|
@ -7,105 +7,105 @@ description: "Every SF command, keyboard shortcut, and CLI flag."
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd` | Step mode — execute one unit at a time, pause between each |
|
| `/sf` | Step mode — execute one unit at a time, pause between each |
|
||||||
| `/gsd next` | Explicit step mode (same as `/gsd`) |
|
| `/sf next` | Explicit step mode (same as `/sf`) |
|
||||||
| `/gsd auto` | Autonomous mode — research, plan, execute, commit, repeat |
|
| `/sf auto` | Autonomous mode — research, plan, execute, commit, repeat |
|
||||||
| `/gsd quick` | Execute a quick task with SF guarantees without full planning overhead |
|
| `/sf quick` | Execute a quick task with SF guarantees without full planning overhead |
|
||||||
| `/gsd stop` | Stop auto mode gracefully |
|
| `/sf stop` | Stop auto mode gracefully |
|
||||||
| `/gsd pause` | Pause auto mode (preserves state, `/gsd auto` to resume) |
|
| `/sf pause` | Pause auto mode (preserves state, `/sf auto` to resume) |
|
||||||
| `/gsd steer` | Hard-steer plan documents during execution |
|
| `/sf steer` | Hard-steer plan documents during execution |
|
||||||
| `/gsd discuss` | Discuss architecture and decisions (works alongside auto mode) |
|
| `/sf discuss` | Discuss architecture and decisions (works alongside auto mode) |
|
||||||
| `/gsd rethink` | Conversational project reorganization |
|
| `/sf rethink` | Conversational project reorganization |
|
||||||
| `/gsd mcp` | MCP server status and connectivity |
|
| `/sf mcp` | MCP server status and connectivity |
|
||||||
| `/gsd status` | Progress dashboard |
|
| `/sf status` | Progress dashboard |
|
||||||
| `/gsd widget` | Cycle dashboard widget: full / small / min / off |
|
| `/sf widget` | Cycle dashboard widget: full / small / min / off |
|
||||||
| `/gsd queue` | Queue and reorder future milestones (safe during auto mode) |
|
| `/sf queue` | Queue and reorder future milestones (safe during auto mode) |
|
||||||
| `/gsd capture` | Fire-and-forget thought capture (works during auto mode) |
|
| `/sf capture` | Fire-and-forget thought capture (works during auto mode) |
|
||||||
| `/gsd triage` | Manually trigger triage of pending captures |
|
| `/sf triage` | Manually trigger triage of pending captures |
|
||||||
| `/gsd dispatch` | Dispatch a specific phase directly |
|
| `/sf dispatch` | Dispatch a specific phase directly |
|
||||||
| `/gsd history` | View execution history (supports `--cost`, `--phase`, `--model` filters) |
|
| `/sf history` | View execution history (supports `--cost`, `--phase`, `--model` filters) |
|
||||||
| `/gsd forensics` | Full-access debugger for auto-mode failures |
|
| `/sf forensics` | Full-access debugger for auto-mode failures |
|
||||||
| `/gsd cleanup` | Clean up SF state files and stale worktrees |
|
| `/sf cleanup` | Clean up SF state files and stale worktrees |
|
||||||
| `/gsd visualize` | Open workflow visualizer |
|
| `/sf visualize` | Open workflow visualizer |
|
||||||
| `/gsd export --html` | Generate self-contained HTML report |
|
| `/sf export --html` | Generate self-contained HTML report |
|
||||||
| `/gsd export --html --all` | Generate reports for all milestones |
|
| `/sf export --html --all` | Generate reports for all milestones |
|
||||||
| `/gsd update` | Update SF to the latest version in-session |
|
| `/sf update` | Update SF to the latest version in-session |
|
||||||
| `/gsd knowledge` | Add persistent project knowledge |
|
| `/sf knowledge` | Add persistent project knowledge |
|
||||||
| `/gsd fast` | Toggle service tier for supported models |
|
| `/sf fast` | Toggle service tier for supported models |
|
||||||
| `/gsd rate` | Rate last unit's model tier (over/ok/under) |
|
| `/sf rate` | Rate last unit's model tier (over/ok/under) |
|
||||||
| `/gsd changelog` | Show categorized release notes |
|
| `/sf changelog` | Show categorized release notes |
|
||||||
| `/gsd logs` | Browse activity logs, debug logs, and metrics |
|
| `/sf logs` | Browse activity logs, debug logs, and metrics |
|
||||||
| `/gsd remote` | Control remote auto-mode |
|
| `/sf remote` | Control remote auto-mode |
|
||||||
| `/gsd help` | Categorized command reference |
|
| `/sf help` | Categorized command reference |
|
||||||
|
|
||||||
## Configuration and diagnostics
|
## Configuration and diagnostics
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd prefs` | Model selection, timeouts, budget ceiling |
|
| `/sf prefs` | Model selection, timeouts, budget ceiling |
|
||||||
| `/gsd mode` | Switch workflow mode (solo/team) |
|
| `/sf mode` | Switch workflow mode (solo/team) |
|
||||||
| `/gsd config` | Re-run the provider setup wizard |
|
| `/sf config` | Re-run the provider setup wizard |
|
||||||
| `/gsd keys` | API key manager — list, add, remove, test, rotate |
|
| `/sf keys` | API key manager — list, add, remove, test, rotate |
|
||||||
| `/gsd doctor` | Runtime health checks with auto-fix |
|
| `/sf doctor` | Runtime health checks with auto-fix |
|
||||||
| `/gsd inspect` | Show SQLite DB diagnostics |
|
| `/sf inspect` | Show SQLite DB diagnostics |
|
||||||
| `/gsd init` | Project init wizard |
|
| `/sf init` | Project init wizard |
|
||||||
| `/gsd setup` | Global setup status and configuration |
|
| `/sf setup` | Global setup status and configuration |
|
||||||
| `/gsd skill-health` | Skill lifecycle dashboard |
|
| `/sf skill-health` | Skill lifecycle dashboard |
|
||||||
| `/gsd hooks` | Show configured post-unit and pre-dispatch hooks |
|
| `/sf hooks` | Show configured post-unit and pre-dispatch hooks |
|
||||||
| `/gsd run-hook` | Manually trigger a specific hook |
|
| `/sf run-hook` | Manually trigger a specific hook |
|
||||||
| `/gsd migrate` | Migrate a v1 `.planning` directory to `.gsd` format |
|
| `/sf migrate` | Migrate a v1 `.planning` directory to `.sf` format |
|
||||||
|
|
||||||
## Milestone management
|
## Milestone management
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd new-milestone` | Create a new milestone |
|
| `/sf new-milestone` | Create a new milestone |
|
||||||
| `/gsd skip` | Prevent a unit from auto-mode dispatch |
|
| `/sf skip` | Prevent a unit from auto-mode dispatch |
|
||||||
| `/gsd undo` | Revert last completed unit |
|
| `/sf undo` | Revert last completed unit |
|
||||||
| `/gsd undo-task` | Reset a specific task's completion state |
|
| `/sf undo-task` | Reset a specific task's completion state |
|
||||||
| `/gsd reset-slice` | Reset a slice and all its tasks |
|
| `/sf reset-slice` | Reset a slice and all its tasks |
|
||||||
| `/gsd park` | Park a milestone — skip without deleting |
|
| `/sf park` | Park a milestone — skip without deleting |
|
||||||
| `/gsd unpark` | Reactivate a parked milestone |
|
| `/sf unpark` | Reactivate a parked milestone |
|
||||||
|
|
||||||
## Parallel orchestration
|
## Parallel orchestration
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd parallel start` | Analyze eligibility, confirm, and start workers |
|
| `/sf parallel start` | Analyze eligibility, confirm, and start workers |
|
||||||
| `/gsd parallel status` | Show all workers with state, progress, and cost |
|
| `/sf parallel status` | Show all workers with state, progress, and cost |
|
||||||
| `/gsd parallel stop [MID]` | Stop all workers or a specific one |
|
| `/sf parallel stop [MID]` | Stop all workers or a specific one |
|
||||||
| `/gsd parallel pause [MID]` | Pause all or a specific worker |
|
| `/sf parallel pause [MID]` | Pause all or a specific worker |
|
||||||
| `/gsd parallel resume [MID]` | Resume paused workers |
|
| `/sf parallel resume [MID]` | Resume paused workers |
|
||||||
| `/gsd parallel merge [MID]` | Merge completed milestones to main |
|
| `/sf parallel merge [MID]` | Merge completed milestones to main |
|
||||||
|
|
||||||
## Workflow templates
|
## Workflow templates
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd start` | Start a workflow template (bugfix, spike, feature, hotfix, refactor, etc.) |
|
| `/sf start` | Start a workflow template (bugfix, spike, feature, hotfix, refactor, etc.) |
|
||||||
| `/gsd start resume` | Resume an in-progress workflow |
|
| `/sf start resume` | Resume an in-progress workflow |
|
||||||
| `/gsd templates` | List available workflow templates |
|
| `/sf templates` | List available workflow templates |
|
||||||
| `/gsd templates info <name>` | Show detailed template info |
|
| `/sf templates info <name>` | Show detailed template info |
|
||||||
|
|
||||||
## Custom workflows
|
## Custom workflows
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd workflow new` | Create a new workflow definition |
|
| `/sf workflow new` | Create a new workflow definition |
|
||||||
| `/gsd workflow run <name>` | Create a run and start auto-mode |
|
| `/sf workflow run <name>` | Create a run and start auto-mode |
|
||||||
| `/gsd workflow list` | List workflow runs |
|
| `/sf workflow list` | List workflow runs |
|
||||||
| `/gsd workflow validate <name>` | Validate a workflow definition |
|
| `/sf workflow validate <name>` | Validate a workflow definition |
|
||||||
| `/gsd workflow pause` | Pause custom workflow auto-mode |
|
| `/sf workflow pause` | Pause custom workflow auto-mode |
|
||||||
| `/gsd workflow resume` | Resume paused custom workflow auto-mode |
|
| `/sf workflow resume` | Resume paused custom workflow auto-mode |
|
||||||
|
|
||||||
## Extensions
|
## Extensions
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd extensions list` | List all extensions and their status |
|
| `/sf extensions list` | List all extensions and their status |
|
||||||
| `/gsd extensions enable <id>` | Enable a disabled extension |
|
| `/sf extensions enable <id>` | Enable a disabled extension |
|
||||||
| `/gsd extensions disable <id>` | Disable an extension |
|
| `/sf extensions disable <id>` | Disable an extension |
|
||||||
| `/gsd extensions info <id>` | Show extension details |
|
| `/sf extensions info <id>` | Show extension details |
|
||||||
|
|
||||||
## Keyboard shortcuts
|
## Keyboard shortcuts
|
||||||
|
|
||||||
|
|
@ -125,31 +125,31 @@ In terminals without Kitty keyboard protocol support (macOS Terminal.app, JetBra
|
||||||
|
|
||||||
| Flag | Description |
|
| Flag | Description |
|
||||||
|------|-------------|
|
|------|-------------|
|
||||||
| `gsd` | Start a new interactive session |
|
| `sf` | Start a new interactive session |
|
||||||
| `gsd --continue` (`-c`) | Resume the most recent session |
|
| `sf --continue` (`-c`) | Resume the most recent session |
|
||||||
| `gsd --model <id>` | Override the default model |
|
| `sf --model <id>` | Override the default model |
|
||||||
| `gsd --print "msg"` (`-p`) | Single-shot prompt mode (no TUI) |
|
| `sf --print "msg"` (`-p`) | Single-shot prompt mode (no TUI) |
|
||||||
| `gsd --mode <text\|json\|rpc\|mcp>` | Output mode for non-interactive use |
|
| `sf --mode <text\|json\|rpc\|mcp>` | Output mode for non-interactive use |
|
||||||
| `gsd --list-models [search]` | List available models and exit |
|
| `sf --list-models [search]` | List available models and exit |
|
||||||
| `gsd --web [path]` | Start browser-based web interface |
|
| `sf --web [path]` | Start browser-based web interface |
|
||||||
| `gsd --worktree` (`-w`) `[name]` | Start session in a git worktree |
|
| `sf --worktree` (`-w`) `[name]` | Start session in a git worktree |
|
||||||
| `gsd --no-session` | Disable session persistence |
|
| `sf --no-session` | Disable session persistence |
|
||||||
| `gsd --extension <path>` | Load an additional extension |
|
| `sf --extension <path>` | Load an additional extension |
|
||||||
| `gsd --version` (`-v`) | Print version and exit |
|
| `sf --version` (`-v`) | Print version and exit |
|
||||||
| `gsd sessions` | Interactive session picker |
|
| `sf sessions` | Interactive session picker |
|
||||||
| `gsd config` | Set up global API keys |
|
| `sf config` | Set up global API keys |
|
||||||
| `gsd update` | Update SF to the latest version |
|
| `sf update` | Update SF to the latest version |
|
||||||
|
|
||||||
## Headless mode
|
## Headless mode
|
||||||
|
|
||||||
`gsd headless` runs commands without a TUI — designed for CI, cron jobs, and scripted automation.
|
`sf headless` runs commands without a TUI — designed for CI, cron jobs, and scripted automation.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd headless # run auto mode
|
sf headless # run auto mode
|
||||||
gsd headless next # run a single unit
|
sf headless next # run a single unit
|
||||||
gsd headless query # instant JSON snapshot (~50ms, no LLM)
|
sf headless query # instant JSON snapshot (~50ms, no LLM)
|
||||||
gsd headless --timeout 600000 auto # with timeout
|
sf headless --timeout 600000 auto # with timeout
|
||||||
gsd headless new-milestone --context brief.md --auto
|
sf headless new-milestone --context brief.md --auto
|
||||||
```
|
```
|
||||||
|
|
||||||
| Flag | Description |
|
| Flag | Description |
|
||||||
|
|
@ -163,20 +163,20 @@ gsd headless new-milestone --context brief.md --auto
|
||||||
|
|
||||||
**Exit codes:** `0` = complete, `1` = error/timeout, `2` = blocked.
|
**Exit codes:** `0` = complete, `1` = error/timeout, `2` = blocked.
|
||||||
|
|
||||||
### `gsd headless query`
|
### `sf headless query`
|
||||||
|
|
||||||
Returns a JSON snapshot of the project state — no LLM session, instant response.
|
Returns a JSON snapshot of the project state — no LLM session, instant response.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd headless query | jq '.state.phase' # "executing"
|
sf headless query | jq '.state.phase' # "executing"
|
||||||
gsd headless query | jq '.next' # next dispatch action
|
sf headless query | jq '.next' # next dispatch action
|
||||||
gsd headless query | jq '.cost.total' # total spend
|
sf headless query | jq '.cost.total' # total spend
|
||||||
```
|
```
|
||||||
|
|
||||||
## MCP server mode
|
## MCP server mode
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd --mode mcp
|
sf --mode mcp
|
||||||
```
|
```
|
||||||
|
|
||||||
Runs SF as a Model Context Protocol server over stdin/stdout, exposing all tools to external AI clients (Claude Desktop, VS Code Copilot, etc.).
|
Runs SF as a Model Context Protocol server over stdin/stdout, exposing all tools to external AI clients (Claude Desktop, VS Code Copilot, etc.).
|
||||||
|
|
|
||||||
|
|
@ -3,16 +3,16 @@ title: "Configuration"
|
||||||
description: "Preferences, model selection, MCP servers, hooks, and all settings."
|
description: "Preferences, model selection, MCP servers, hooks, and all settings."
|
||||||
---
|
---
|
||||||
|
|
||||||
SF preferences live in `~/.gsd/PREFERENCES.md` (global) or `.gsd/PREFERENCES.md` (project-local). Manage interactively with `/gsd prefs`.
|
SF preferences live in `~/.sf/PREFERENCES.md` (global) or `.sf/PREFERENCES.md` (project-local). Manage interactively with `/sf prefs`.
|
||||||
|
|
||||||
## Preferences commands
|
## Preferences commands
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd prefs` | Open the global preferences wizard |
|
| `/sf prefs` | Open the global preferences wizard |
|
||||||
| `/gsd prefs global` | Global preferences wizard |
|
| `/sf prefs global` | Global preferences wizard |
|
||||||
| `/gsd prefs project` | Project preferences wizard |
|
| `/sf prefs project` | Project preferences wizard |
|
||||||
| `/gsd prefs status` | Show current files, merged values, and skill status |
|
| `/sf prefs status` | Show current files, merged values, and skill status |
|
||||||
|
|
||||||
## Preferences file format
|
## Preferences file format
|
||||||
|
|
||||||
|
|
@ -40,8 +40,8 @@ token_profile: balanced
|
||||||
|
|
||||||
| Scope | Path | Applies to |
|
| Scope | Path | Applies to |
|
||||||
|-------|------|-----------|
|
|-------|------|-----------|
|
||||||
| Global | `~/.gsd/PREFERENCES.md` | All projects |
|
| Global | `~/.sf/PREFERENCES.md` | All projects |
|
||||||
| Project | `.gsd/PREFERENCES.md` | Current project only |
|
| Project | `.sf/PREFERENCES.md` | Current project only |
|
||||||
|
|
||||||
**Merge behavior:**
|
**Merge behavior:**
|
||||||
- **Scalar fields** — project wins if defined
|
- **Scalar fields** — project wins if defined
|
||||||
|
|
@ -50,7 +50,7 @@ token_profile: balanced
|
||||||
|
|
||||||
## Global API keys
|
## Global API keys
|
||||||
|
|
||||||
Tool API keys are stored globally in `~/.gsd/agent/auth.json`. Set them once with `/gsd config`.
|
Tool API keys are stored globally in `~/.sf/agent/auth.json`. Set them once with `/sf config`.
|
||||||
|
|
||||||
| Tool | Environment variable | Purpose |
|
| Tool | Environment variable | Purpose |
|
||||||
|------|---------------------|---------|
|
|------|---------------------|---------|
|
||||||
|
|
@ -65,7 +65,7 @@ Anthropic models have built-in web search — no extra keys needed.
|
||||||
SF connects to external MCP servers configured in project files:
|
SF connects to external MCP servers configured in project files:
|
||||||
|
|
||||||
- `.mcp.json` — repo-shared config
|
- `.mcp.json` — repo-shared config
|
||||||
- `.gsd/mcp.json` — local-only config
|
- `.sf/mcp.json` — local-only config
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<Tab title="stdio server">
|
<Tab title="stdio server">
|
||||||
|
|
@ -236,7 +236,7 @@ See [parallel orchestration](/guides/parallel-orchestration).
|
||||||
|
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
|----------|---------|-------------|
|
|----------|---------|-------------|
|
||||||
| `SF_HOME` | `~/.gsd` | Global SF directory |
|
| `SF_HOME` | `~/.sf` | Global SF directory |
|
||||||
| `SF_PROJECT_ID` | (auto-hash) | Override project identity hash |
|
| `SF_PROJECT_ID` | (auto-hash) | Override project identity hash |
|
||||||
| `SF_STATE_DIR` | `$SF_HOME` | Per-project state root |
|
| `SF_STATE_DIR` | `$SF_HOME` | Per-project state root |
|
||||||
| `SF_CODING_AGENT_DIR` | `$SF_HOME/agent` | Agent directory |
|
| `SF_CODING_AGENT_DIR` | `$SF_HOME/agent` | Agent directory |
|
||||||
|
|
|
||||||
|
|
@ -15,11 +15,11 @@ Every unit's metrics are captured automatically:
|
||||||
- **Tool calls** — number of tool invocations
|
- **Tool calls** — number of tool invocations
|
||||||
- **Message counts** — assistant and user messages
|
- **Message counts** — assistant and user messages
|
||||||
|
|
||||||
Data is stored in `.gsd/metrics.json` and survives across sessions.
|
Data is stored in `.sf/metrics.json` and survives across sessions.
|
||||||
|
|
||||||
### Viewing costs
|
### Viewing costs
|
||||||
|
|
||||||
`Ctrl+Alt+G` or `/gsd status` shows real-time cost breakdown by:
|
`Ctrl+Alt+G` or `/sf status` shows real-time cost breakdown by:
|
||||||
|
|
||||||
- Phase (research, planning, execution, completion, reassessment)
|
- Phase (research, planning, execution, completion, reassessment)
|
||||||
- Slice (M001/S01, M001/S02, ...)
|
- Slice (M001/S01, M001/S02, ...)
|
||||||
|
|
@ -72,9 +72,9 @@ See [token optimization](/guides/token-optimization) for details.
|
||||||
## Tips
|
## Tips
|
||||||
|
|
||||||
- Start with `balanced` and a generous `budget_ceiling` to establish baseline costs
|
- Start with `balanced` and a generous `budget_ceiling` to establish baseline costs
|
||||||
- Check `/gsd status` after a few slices to see per-slice averages
|
- Check `/sf status` after a few slices to see per-slice averages
|
||||||
- Switch to `budget` for well-understood, repetitive work
|
- Switch to `budget` for well-understood, repetitive work
|
||||||
- Use `quality` only for architectural decisions
|
- Use `quality` only for architectural decisions
|
||||||
- Per-phase model selection lets you use Opus for planning while keeping execution on Sonnet
|
- Per-phase model selection lets you use Opus for planning while keeping execution on Sonnet
|
||||||
- Enable [dynamic routing](/guides/dynamic-model-routing) for automatic downgrading on simple tasks
|
- Enable [dynamic routing](/guides/dynamic-model-routing) for automatic downgrading on simple tasks
|
||||||
- Use `/gsd visualize` → Metrics tab to see where your budget is going
|
- Use `/sf visualize` → Metrics tab to see where your budget is going
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ title: "Custom models"
|
||||||
description: "Add custom providers and models (Ollama, vLLM, LM Studio, proxies) via models.json."
|
description: "Add custom providers and models (Ollama, vLLM, LM Studio, proxies) via models.json."
|
||||||
---
|
---
|
||||||
|
|
||||||
Define custom models and providers in `~/.gsd/agent/models.json`. This lets you add models not in the default registry — self-hosted endpoints, fine-tuned models, proxies, or new provider releases.
|
Define custom models and providers in `~/.sf/agent/models.json`. This lets you add models not in the default registry — self-hosted endpoints, fine-tuned models, proxies, or new provider releases.
|
||||||
|
|
||||||
The file reloads each time you open `/model` — no restart needed.
|
The file reloads each time you open `/model` — no restart needed.
|
||||||
|
|
||||||
|
|
@ -123,4 +123,4 @@ For providers with partial OpenAI compatibility, use the `compat` field at provi
|
||||||
|
|
||||||
| Extension | Provider | Models | Install |
|
| Extension | Provider | Models | Install |
|
||||||
|-----------|----------|--------|---------|
|
|-----------|----------|--------|---------|
|
||||||
| [`pi-dashscope`](https://www.npmjs.com/package/pi-dashscope) | Alibaba DashScope | Qwen3, GLM-5, MiniMax M2.5, Kimi K2.5 | `gsd install npm:pi-dashscope` |
|
| [`pi-dashscope`](https://www.npmjs.com/package/pi-dashscope) | Alibaba DashScope | Qwen3, GLM-5, MiniMax M2.5, Kimi K2.5 | `sf install npm:pi-dashscope` |
|
||||||
|
|
|
||||||
|
|
@ -69,9 +69,9 @@ For `execute-task` units, the classifier analyzes the task plan:
|
||||||
|
|
||||||
## Adaptive learning
|
## Adaptive learning
|
||||||
|
|
||||||
The routing history (`.gsd/routing-history.json`) tracks success/failure per tier per unit type. If a tier's failure rate exceeds 20%, future classifications are bumped up.
|
The routing history (`.sf/routing-history.json`) tracks success/failure per tier per unit type. If a tier's failure rate exceeds 20%, future classifications are bumped up.
|
||||||
|
|
||||||
User feedback (`/gsd rate`) is weighted 2x vs automatic outcomes.
|
User feedback (`/sf rate`) is weighted 2x vs automatic outcomes.
|
||||||
|
|
||||||
## Cost table
|
## Cost table
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ Configure via the `git.isolation` preference:
|
||||||
| Mode | Working directory | Branch | Best for |
|
| Mode | Working directory | Branch | Best for |
|
||||||
|------|-------------------|--------|----------|
|
|------|-------------------|--------|----------|
|
||||||
| `none` (default) | Project root | Current branch | Most projects — no isolation overhead |
|
| `none` (default) | Project root | Current branch | Most projects — no isolation overhead |
|
||||||
| `worktree` | `.gsd/worktrees/<MID>/` | `milestone/<MID>` | Full file isolation |
|
| `worktree` | `.sf/worktrees/<MID>/` | `milestone/<MID>` | Full file isolation |
|
||||||
| `branch` | Project root | `milestone/<MID>` | Submodule-heavy repos |
|
| `branch` | Project root | `milestone/<MID>` | Submodule-heavy repos |
|
||||||
|
|
||||||
### `none` mode (default)
|
### `none` mode (default)
|
||||||
|
|
@ -125,13 +125,13 @@ Pushes the milestone branch and creates a PR targeting your specified branch. Re
|
||||||
|
|
||||||
### `commit_docs: false`
|
### `commit_docs: false`
|
||||||
|
|
||||||
Adds `.gsd/` to `.gitignore` and keeps all planning artifacts local-only. Useful for teams where only some members use SF.
|
Adds `.sf/` to `.gitignore` and keeps all planning artifacts local-only. Useful for teams where only some members use SF.
|
||||||
|
|
||||||
## Worktree management
|
## Worktree management
|
||||||
|
|
||||||
### Automatic (auto mode)
|
### Automatic (auto mode)
|
||||||
|
|
||||||
1. Milestone starts → worktree created at `.gsd/worktrees/<MID>/`
|
1. Milestone starts → worktree created at `.sf/worktrees/<MID>/`
|
||||||
2. Planning artifacts copied into the worktree
|
2. Planning artifacts copied into the worktree
|
||||||
3. All execution happens inside the worktree
|
3. All execution happens inside the worktree
|
||||||
4. Milestone completes → squash-merged to main
|
4. Milestone completes → squash-merged to main
|
||||||
|
|
@ -154,4 +154,4 @@ SF includes automatic recovery for common git issues:
|
||||||
- **Stale lock files** — removes `index.lock` files from crashed processes
|
- **Stale lock files** — removes `index.lock` files from crashed processes
|
||||||
- **Orphaned worktrees** — detects and offers cleanup
|
- **Orphaned worktrees** — detects and offers cleanup
|
||||||
|
|
||||||
Run `/gsd doctor` to check git health manually.
|
Run `/sf doctor` to check git health manually.
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,18 @@
|
||||||
---
|
---
|
||||||
title: "Migration from v1"
|
title: "Migration from v1"
|
||||||
description: "Migrate .planning directories from the original SF to SF's .gsd format."
|
description: "Migrate .planning directories from the original SF to SF's .sf format."
|
||||||
---
|
---
|
||||||
|
|
||||||
If you have projects with `.planning` directories from the original Singularity Forge (v1), you can migrate them to SF's `.gsd` format.
|
If you have projects with `.planning` directories from the original Singularity Forge (v1), you can migrate them to SF's `.sf` format.
|
||||||
|
|
||||||
## Running the migration
|
## Running the migration
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# From within the project directory
|
# From within the project directory
|
||||||
/gsd migrate
|
/sf migrate
|
||||||
|
|
||||||
# Or specify a path
|
# Or specify a path
|
||||||
/gsd migrate ~/projects/my-old-project
|
/sf migrate ~/projects/my-old-project
|
||||||
```
|
```
|
||||||
|
|
||||||
## What gets migrated
|
## What gets migrated
|
||||||
|
|
@ -41,7 +41,7 @@ The migration handles various v1 format variations:
|
||||||
Verify the output:
|
Verify the output:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd doctor
|
/sf doctor
|
||||||
```
|
```
|
||||||
|
|
||||||
This checks `.gsd/` integrity and flags any structural issues.
|
This checks `.sf/` integrity and flags any structural issues.
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,13 @@ parallel:
|
||||||
2. Start parallel execution:
|
2. Start parallel execution:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd parallel start
|
/sf parallel start
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Monitor progress:
|
3. Monitor progress:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd parallel status
|
/sf parallel status
|
||||||
```
|
```
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
@ -43,7 +43,7 @@ parallel:
|
||||||
│ └──────────┘ └──────────┘ └──────────┘ │
|
│ └──────────┘ └──────────┘ └──────────┘ │
|
||||||
│ │ │ │ │
|
│ │ │ │ │
|
||||||
│ ▼ ▼ ▼ │
|
│ ▼ ▼ ▼ │
|
||||||
│ .gsd/worktrees/ .gsd/worktrees/ .gsd/worktrees/ │
|
│ .sf/worktrees/ .sf/worktrees/ .sf/worktrees/ │
|
||||||
└─────────────────────────────────────────────────────┘
|
└─────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -88,16 +88,16 @@ parallel:
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd parallel start` | Analyze, confirm, and start workers |
|
| `/sf parallel start` | Analyze, confirm, and start workers |
|
||||||
| `/gsd parallel status` | Show workers with state, progress, cost |
|
| `/sf parallel status` | Show workers with state, progress, cost |
|
||||||
| `/gsd parallel stop [MID]` | Stop all or a specific worker |
|
| `/sf parallel stop [MID]` | Stop all or a specific worker |
|
||||||
| `/gsd parallel pause [MID]` | Pause all or a specific worker |
|
| `/sf parallel pause [MID]` | Pause all or a specific worker |
|
||||||
| `/gsd parallel resume [MID]` | Resume paused workers |
|
| `/sf parallel resume [MID]` | Resume paused workers |
|
||||||
| `/gsd parallel merge [MID]` | Merge completed milestones to main |
|
| `/sf parallel merge [MID]` | Merge completed milestones to main |
|
||||||
|
|
||||||
## Merge reconciliation
|
## Merge reconciliation
|
||||||
|
|
||||||
- `.gsd/` state files — auto-resolved (accept milestone branch version)
|
- `.sf/` state files — auto-resolved (accept milestone branch version)
|
||||||
- Code conflicts — merge halts, shows conflicting files. Resolve manually and retry.
|
- Code conflicts — merge halts, shows conflicting files. Resolve manually and retry.
|
||||||
|
|
||||||
## Budget management
|
## Budget management
|
||||||
|
|
@ -108,16 +108,16 @@ When `budget_ceiling` is set, aggregate cost is tracked across all workers. Ceil
|
||||||
|
|
||||||
### "No milestones are eligible"
|
### "No milestones are eligible"
|
||||||
|
|
||||||
All milestones are complete or blocked by dependencies. Check `/gsd queue`.
|
All milestones are complete or blocked by dependencies. Check `/sf queue`.
|
||||||
|
|
||||||
### Worker crashed
|
### Worker crashed
|
||||||
|
|
||||||
Workers persist state to disk. On restart, the coordinator detects dead PIDs. Run `/gsd doctor --fix` to clean up, then `/gsd parallel start` to spawn new workers.
|
Workers persist state to disk. On restart, the coordinator detects dead PIDs. Run `/sf doctor --fix` to clean up, then `/sf parallel start` to spawn new workers.
|
||||||
|
|
||||||
### Merge conflicts
|
### Merge conflicts
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd parallel merge # see which milestones conflict
|
/sf parallel merge # see which milestones conflict
|
||||||
# resolve in .gsd/worktrees/<MID>/
|
# resolve in .sf/worktrees/<MID>/
|
||||||
/gsd parallel merge MID # retry
|
/sf parallel merge MID # retry
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<Tab title="Discord">
|
<Tab title="Discord">
|
||||||
```
|
```
|
||||||
/gsd remote discord
|
/sf remote discord
|
||||||
```
|
```
|
||||||
|
|
||||||
The setup wizard validates your bot token, picks a server and channel, sends a test message, and saves the config.
|
The setup wizard validates your bot token, picks a server and channel, sends a test message, and saves the config.
|
||||||
|
|
@ -21,7 +21,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab title="Slack">
|
<Tab title="Slack">
|
||||||
```
|
```
|
||||||
/gsd remote slack
|
/sf remote slack
|
||||||
```
|
```
|
||||||
|
|
||||||
The setup wizard validates your bot token, picks a channel, sends a test message, and saves the config.
|
The setup wizard validates your bot token, picks a channel, sends a test message, and saves the config.
|
||||||
|
|
@ -32,7 +32,7 @@ Remote questions allow SF to ask for user input via Slack, Discord, or Telegram
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab title="Telegram">
|
<Tab title="Telegram">
|
||||||
```
|
```
|
||||||
/gsd remote telegram
|
/sf remote telegram
|
||||||
```
|
```
|
||||||
|
|
||||||
The setup wizard validates your bot token, prompts for a chat ID, sends a test message, and saves the config.
|
The setup wizard validates your bot token, prompts for a chat ID, sends a test message, and saves the config.
|
||||||
|
|
@ -76,9 +76,9 @@ If no response within `timeout_minutes`, the LLM makes a conservative default ch
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `/gsd remote` | Show menu and current status |
|
| `/sf remote` | Show menu and current status |
|
||||||
| `/gsd remote slack` | Set up Slack |
|
| `/sf remote slack` | Set up Slack |
|
||||||
| `/gsd remote discord` | Set up Discord |
|
| `/sf remote discord` | Set up Discord |
|
||||||
| `/gsd remote telegram` | Set up Telegram |
|
| `/sf remote telegram` | Set up Telegram |
|
||||||
| `/gsd remote status` | Show current config and last prompt status |
|
| `/sf remote status` | Show current config and last prompt status |
|
||||||
| `/gsd remote disconnect` | Remove configuration |
|
| `/sf remote disconnect` | Remove configuration |
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ Skills are specialized instruction sets that SF loads when the task matches. The
|
||||||
|
|
||||||
## Bundled skills
|
## Bundled skills
|
||||||
|
|
||||||
SF ships with these skills, installed to `~/.gsd/agent/skills/`:
|
SF ships with these skills, installed to `~/.sf/agent/skills/`:
|
||||||
|
|
||||||
| Skill | Trigger | Description |
|
| Skill | Trigger | Description |
|
||||||
|-------|---------|-------------|
|
|-------|---------|-------------|
|
||||||
|
|
@ -51,8 +51,8 @@ skill_rules:
|
||||||
|
|
||||||
### Resolution order
|
### Resolution order
|
||||||
|
|
||||||
1. **Bare name** — e.g., `frontend-design` → scans `~/.gsd/agent/skills/` and project skills
|
1. **Bare name** — e.g., `frontend-design` → scans `~/.sf/agent/skills/` and project skills
|
||||||
2. **Absolute path** — e.g., `/Users/you/.gsd/agent/skills/my-skill/SKILL.md`
|
2. **Absolute path** — e.g., `/Users/you/.sf/agent/skills/my-skill/SKILL.md`
|
||||||
3. **Directory path** — looks for `SKILL.md` inside
|
3. **Directory path** — looks for `SKILL.md` inside
|
||||||
|
|
||||||
User skills take precedence over project skills.
|
User skills take precedence over project skills.
|
||||||
|
|
@ -62,7 +62,7 @@ User skills take precedence over project skills.
|
||||||
Create a directory with a `SKILL.md` file:
|
Create a directory with a `SKILL.md` file:
|
||||||
|
|
||||||
```
|
```
|
||||||
~/.gsd/agent/skills/my-skill/
|
~/.sf/agent/skills/my-skill/
|
||||||
SKILL.md — instructions for the LLM
|
SKILL.md — instructions for the LLM
|
||||||
references/ — optional reference files
|
references/ — optional reference files
|
||||||
```
|
```
|
||||||
|
|
@ -70,17 +70,17 @@ Create a directory with a `SKILL.md` file:
|
||||||
### Project-local skills
|
### Project-local skills
|
||||||
|
|
||||||
```
|
```
|
||||||
.gsd/agent/skills/my-project-skill/
|
.sf/agent/skills/my-project-skill/
|
||||||
SKILL.md
|
SKILL.md
|
||||||
```
|
```
|
||||||
|
|
||||||
## Skill health dashboard
|
## Skill health dashboard
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd skill-health # overview table
|
/sf skill-health # overview table
|
||||||
/gsd skill-health rust-core # detailed view
|
/sf skill-health rust-core # detailed view
|
||||||
/gsd skill-health --stale 30 # unused for 30+ days
|
/sf skill-health --stale 30 # unused for 30+ days
|
||||||
/gsd skill-health --declining # falling success rates
|
/sf skill-health --declining # falling success rates
|
||||||
```
|
```
|
||||||
|
|
||||||
The dashboard flags:
|
The dashboard flags:
|
||||||
|
|
|
||||||
|
|
@ -112,12 +112,12 @@ When approaching the budget ceiling, the classifier automatically downgrades tie
|
||||||
|
|
||||||
## Adaptive learning
|
## Adaptive learning
|
||||||
|
|
||||||
SF tracks success/failure per tier and adjusts classifications over time. User feedback via `/gsd rate` is weighted 2x:
|
SF tracks success/failure per tier and adjusts classifications over time. User feedback via `/sf rate` is weighted 2x:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd rate over # model was overpowered
|
/sf rate over # model was overpowered
|
||||||
/gsd rate ok # appropriate
|
/sf rate ok # appropriate
|
||||||
/gsd rate under # too weak
|
/sf rate under # too weak
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration examples
|
## Configuration examples
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
---
|
---
|
||||||
title: "Troubleshooting"
|
title: "Troubleshooting"
|
||||||
description: "Common issues, /gsd doctor, /gsd forensics, and recovery procedures."
|
description: "Common issues, /sf doctor, /sf forensics, and recovery procedures."
|
||||||
---
|
---
|
||||||
|
|
||||||
## `/gsd doctor`
|
## `/sf doctor`
|
||||||
|
|
||||||
The built-in diagnostic tool validates `.gsd/` integrity:
|
The built-in diagnostic tool validates `.sf/` integrity:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd doctor
|
/sf doctor
|
||||||
```
|
```
|
||||||
|
|
||||||
It checks file structure, referential integrity, completion state consistency, git worktree health, and stale lock files.
|
It checks file structure, referential integrity, completion state consistency, git worktree health, and stale lock files.
|
||||||
|
|
@ -19,16 +19,16 @@ It checks file structure, referential integrity, completion state consistency, g
|
||||||
<Accordion title="Auto mode loops on the same unit">
|
<Accordion title="Auto mode loops on the same unit">
|
||||||
**Cause:** Stale cache after a crash, or the LLM didn't produce the expected artifact.
|
**Cause:** Stale cache after a crash, or the LLM didn't produce the expected artifact.
|
||||||
|
|
||||||
**Fix:** Run `/gsd doctor` to repair state, then `/gsd auto`.
|
**Fix:** Run `/sf doctor` to repair state, then `/sf auto`.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Auto mode stops with 'Loop detected'">
|
<Accordion title="Auto mode stops with 'Loop detected'">
|
||||||
**Cause:** A unit failed to produce its expected artifact twice in a row.
|
**Cause:** A unit failed to produce its expected artifact twice in a row.
|
||||||
|
|
||||||
**Fix:** Check the task plan for clarity. Refine it manually, then `/gsd auto`.
|
**Fix:** Check the task plan for clarity. Refine it manually, then `/sf auto`.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="command not found: gsd">
|
<Accordion title="command not found: sf">
|
||||||
**Cause:** npm's global bin directory isn't in `$PATH`.
|
**Cause:** npm's global bin directory isn't in `$PATH`.
|
||||||
|
|
||||||
**Fix:**
|
**Fix:**
|
||||||
|
|
@ -38,7 +38,7 @@ It checks file structure, referential integrity, completion state consistency, g
|
||||||
source ~/.zshrc
|
source ~/.zshrc
|
||||||
```
|
```
|
||||||
|
|
||||||
**Workaround:** `npx sf-run` or `$(npm prefix -g)/bin/gsd`
|
**Workaround:** `npx sf-run` or `$(npm prefix -g)/bin/sf`
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Provider errors during auto mode">
|
<Accordion title="Provider errors during auto mode">
|
||||||
|
|
@ -59,25 +59,25 @@ It checks file structure, referential integrity, completion state consistency, g
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Budget ceiling reached">
|
<Accordion title="Budget ceiling reached">
|
||||||
Increase `budget_ceiling` in preferences, or switch to `budget` token profile. Resume with `/gsd auto`.
|
Increase `budget_ceiling` in preferences, or switch to `budget` token profile. Resume with `/sf auto`.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Stale lock file">
|
<Accordion title="Stale lock file">
|
||||||
SF auto-detects stale locks. If automatic recovery fails:
|
SF auto-detects stale locks. If automatic recovery fails:
|
||||||
```bash
|
```bash
|
||||||
rm -f .gsd/auto.lock
|
rm -f .sf/auto.lock
|
||||||
rm -rf "$(dirname .gsd)/.gsd.lock"
|
rm -rf "$(dirname .sf)/.sf.lock"
|
||||||
```
|
```
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Git merge conflicts on .gsd/ files">
|
<Accordion title="Git merge conflicts on .sf/ files">
|
||||||
SF auto-resolves conflicts on `.gsd/` runtime files. For code conflicts, the LLM attempts resolution. If that fails, resolve manually.
|
SF auto-resolves conflicts on `.sf/` runtime files. For code conflicts, the LLM attempts resolution. If that fails, resolve manually.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="EBUSY / EPERM / EACCES on Windows">
|
<Accordion title="EBUSY / EPERM / EACCES on Windows">
|
||||||
**Cause:** Antivirus, indexers, or editors briefly locking files during atomic rename.
|
**Cause:** Antivirus, indexers, or editors briefly locking files during atomic rename.
|
||||||
|
|
||||||
**Fix:** Re-run the operation. Close tools holding files open if the error persists. Run `/gsd doctor` to verify repo health.
|
**Fix:** Re-run the operation. Close tools holding files open if the error persists. Run `/sf doctor` to verify repo health.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="Worktree isolation stopped working after upgrade to v2.45+">
|
<Accordion title="Worktree isolation stopped working after upgrade to v2.45+">
|
||||||
|
|
@ -97,23 +97,23 @@ It checks file structure, referential integrity, completion state consistency, g
|
||||||
</Accordion>
|
</Accordion>
|
||||||
</AccordionGroup>
|
</AccordionGroup>
|
||||||
|
|
||||||
## `/gsd forensics`
|
## `/sf forensics`
|
||||||
|
|
||||||
Full-access debugger for post-mortem analysis:
|
Full-access debugger for post-mortem analysis:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd forensics [optional problem description]
|
/sf forensics [optional problem description]
|
||||||
```
|
```
|
||||||
|
|
||||||
Provides anomaly detection, unit traces, metrics analysis, doctor integration, and LLM-guided investigation.
|
Provides anomaly detection, unit traces, metrics analysis, doctor integration, and LLM-guided investigation.
|
||||||
|
|
||||||
## MCP client issues
|
## MCP client issues
|
||||||
|
|
||||||
Use `/gsd mcp` to check MCP server status and connectivity at a glance.
|
Use `/sf mcp` to check MCP server status and connectivity at a glance.
|
||||||
|
|
||||||
<AccordionGroup>
|
<AccordionGroup>
|
||||||
<Accordion title="No configured servers">
|
<Accordion title="No configured servers">
|
||||||
Verify `.mcp.json` or `.gsd/mcp.json` exists and parses as valid JSON.
|
Verify `.mcp.json` or `.sf/mcp.json` exists and parses as valid JSON.
|
||||||
</Accordion>
|
</Accordion>
|
||||||
|
|
||||||
<Accordion title="mcp_discover times out">
|
<Accordion title="mcp_discover times out">
|
||||||
|
|
@ -130,29 +130,29 @@ Use `/gsd mcp` to check MCP server status and connectivity at a glance.
|
||||||
### Reset auto mode state
|
### Reset auto mode state
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
rm .gsd/auto.lock
|
rm .sf/auto.lock
|
||||||
rm .gsd/completed-units.json
|
rm .sf/completed-units.json
|
||||||
```
|
```
|
||||||
|
|
||||||
Then `/gsd auto` to restart from current disk state.
|
Then `/sf auto` to restart from current disk state.
|
||||||
|
|
||||||
### Reset routing history
|
### Reset routing history
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
rm .gsd/routing-history.json
|
rm .sf/routing-history.json
|
||||||
```
|
```
|
||||||
|
|
||||||
### Full state rebuild
|
### Full state rebuild
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd doctor
|
/sf doctor
|
||||||
```
|
```
|
||||||
|
|
||||||
Rebuilds `STATE.md` from plan and roadmap files on disk.
|
Rebuilds `STATE.md` from plan and roadmap files on disk.
|
||||||
|
|
||||||
## Getting help
|
## Getting help
|
||||||
|
|
||||||
- **GitHub Issues:** [github.com/gsd-build/SF/issues](https://github.com/gsd-build/gsd-2/issues)
|
- **GitHub Issues:** [github.com/sf-build/SF/issues](https://github.com/sf-build/sf-2/issues)
|
||||||
- **Dashboard:** `Ctrl+Alt+G` or `/gsd status`
|
- **Dashboard:** `Ctrl+Alt+G` or `/sf status`
|
||||||
- **Forensics:** `/gsd forensics`
|
- **Forensics:** `/sf forensics`
|
||||||
- **Session logs:** `.gsd/activity/`
|
- **Session logs:** `.sf/activity/`
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ The workflow visualizer is a full-screen TUI overlay with four tabs showing proj
|
||||||
## Opening
|
## Opening
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd visualize
|
/sf visualize
|
||||||
```
|
```
|
||||||
|
|
||||||
Or configure automatic display after milestone completion:
|
Or configure automatic display after milestone completion:
|
||||||
|
|
@ -70,10 +70,10 @@ The visualizer refreshes from disk every 2 seconds, staying current alongside a
|
||||||
For shareable reports outside the terminal:
|
For shareable reports outside the terminal:
|
||||||
|
|
||||||
```
|
```
|
||||||
/gsd export --html
|
/sf export --html
|
||||||
```
|
```
|
||||||
|
|
||||||
Generates a self-contained HTML file in `.gsd/reports/` with progress tree, dependency graph (SVG), cost/token charts, execution timeline, and changelog. All CSS and JS are inlined — printable to PDF from any browser.
|
Generates a self-contained HTML file in `.sf/reports/` with progress tree, dependency graph (SVG), cost/token charts, execution timeline, and changelog. All CSS and JS are inlined — printable to PDF from any browser.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
auto_report: true # auto-generate after milestone completion (default)
|
auto_report: true # auto-generate after milestone completion (default)
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,13 @@ SF includes a browser-based web interface for project management, real-time prog
|
||||||
## Quick start
|
## Quick start
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd --web
|
sf --web
|
||||||
```
|
```
|
||||||
|
|
||||||
### CLI flags
|
### CLI flags
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd --web --host 0.0.0.0 --port 8080 --allowed-origins "https://example.com"
|
sf --web --host 0.0.0.0 --port 8080 --allowed-origins "https://example.com"
|
||||||
```
|
```
|
||||||
|
|
||||||
| Flag | Default | Description |
|
| Flag | Default | Description |
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ SF supports multi-user workflows where several developers work on the same repos
|
||||||
### 1. Set team mode
|
### 1. Set team mode
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
# .gsd/PREFERENCES.md (project-level, committed to git)
|
# .sf/PREFERENCES.md (project-level, committed to git)
|
||||||
---
|
---
|
||||||
version: 1
|
version: 1
|
||||||
mode: team
|
mode: team
|
||||||
|
|
@ -25,20 +25,20 @@ Share planning artifacts while keeping runtime files local:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Runtime / ephemeral (per-developer)
|
# Runtime / ephemeral (per-developer)
|
||||||
.gsd/auto.lock
|
.sf/auto.lock
|
||||||
.gsd/completed-units*.json
|
.sf/completed-units*.json
|
||||||
.gsd/state-manifest.json
|
.sf/state-manifest.json
|
||||||
.gsd/STATE.md
|
.sf/STATE.md
|
||||||
.gsd/metrics.json
|
.sf/metrics.json
|
||||||
.gsd/activity/
|
.sf/activity/
|
||||||
.gsd/runtime/
|
.sf/runtime/
|
||||||
.gsd/worktrees/
|
.sf/worktrees/
|
||||||
.gsd/gsd.db*
|
.sf/sf.db*
|
||||||
.gsd/journal/
|
.sf/journal/
|
||||||
.gsd/doctor-history.jsonl
|
.sf/doctor-history.jsonl
|
||||||
.gsd/event-log.jsonl
|
.sf/event-log.jsonl
|
||||||
.gsd/milestones/**/continue.md
|
.sf/milestones/**/continue.md
|
||||||
.gsd/milestones/**/*-CONTINUE.md
|
.sf/milestones/**/*-CONTINUE.md
|
||||||
```
|
```
|
||||||
|
|
||||||
**Shared** (committed): preferences, PROJECT.md, REQUIREMENTS.md, DECISIONS.md, milestones.
|
**Shared** (committed): preferences, PROJECT.md, REQUIREMENTS.md, DECISIONS.md, milestones.
|
||||||
|
|
@ -48,7 +48,7 @@ Share planning artifacts while keeping runtime files local:
|
||||||
### 3. Commit
|
### 3. Commit
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git add .gsd/PREFERENCES.md
|
git add .sf/PREFERENCES.md
|
||||||
git commit -m "chore: enable SF team workflow"
|
git commit -m "chore: enable SF team workflow"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -61,7 +61,7 @@ git:
|
||||||
commit_docs: false
|
commit_docs: false
|
||||||
```
|
```
|
||||||
|
|
||||||
Adds `.gsd/` to `.gitignore` entirely. The developer gets structured planning without affecting teammates.
|
Adds `.sf/` to `.gitignore` entirely. The developer gets structured planning without affecting teammates.
|
||||||
|
|
||||||
## Parallel development
|
## Parallel development
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -54,11 +54,11 @@
|
||||||
<text class="text white" font-size="15" y="232"> development system for Claude Code by TÂCHES.</text>
|
<text class="text white" font-size="15" y="232"> development system for Claude Code by TÂCHES.</text>
|
||||||
|
|
||||||
<!-- Install output -->
|
<!-- Install output -->
|
||||||
<text class="text" font-size="15" y="280"><tspan class="green"> ✓</tspan><tspan class="white"> Installed commands/gsd</tspan></text>
|
<text class="text" font-size="15" y="280"><tspan class="green"> ✓</tspan><tspan class="white"> Installed commands/sf</tspan></text>
|
||||||
<text class="text" font-size="15" y="304"><tspan class="green"> ✓</tspan><tspan class="white"> Installed get-shit-done</tspan></text>
|
<text class="text" font-size="15" y="304"><tspan class="green"> ✓</tspan><tspan class="white"> Installed get-shit-done</tspan></text>
|
||||||
|
|
||||||
<!-- Done message -->
|
<!-- Done message -->
|
||||||
<text class="text" font-size="15" y="352"><tspan class="green"> Done!</tspan><tspan class="white"> Run </tspan><tspan class="cyan">/gsd:help</tspan><tspan class="white"> to get started.</tspan></text>
|
<text class="text" font-size="15" y="352"><tspan class="green"> Done!</tspan><tspan class="white"> Run </tspan><tspan class="cyan">/sf:help</tspan><tspan class="white"> to get started.</tspan></text>
|
||||||
|
|
||||||
<!-- New prompt -->
|
<!-- New prompt -->
|
||||||
<text class="text prompt" font-size="15" y="400">~</text>
|
<text class="text prompt" font-size="15" y="400">~</text>
|
||||||
|
|
|
||||||
|
Before Width: | Height: | Size: 3.5 KiB After Width: | Height: | Size: 3.5 KiB |
|
|
@ -3,7 +3,7 @@ title: "SF — Singularity Forge"
|
||||||
description: "An autonomous coding agent that researches, plans, executes, and commits code while you focus on what matters."
|
description: "An autonomous coding agent that researches, plans, executes, and commits code while you focus on what matters."
|
||||||
---
|
---
|
||||||
|
|
||||||
SF is an autonomous coding agent. Describe what you want built, run `/gsd auto`, and walk away. Come back to working software with clean git history.
|
SF is an autonomous coding agent. Describe what you want built, run `/sf auto`, and walk away. Come back to working software with clean git history.
|
||||||
|
|
||||||
## What SF does
|
## What SF does
|
||||||
|
|
||||||
|
|
@ -48,19 +48,19 @@ Every phase gets a fresh context window with pre-loaded context — no accumulat
|
||||||
|
|
||||||
<Tabs>
|
<Tabs>
|
||||||
<Tab title="Step mode">
|
<Tab title="Step mode">
|
||||||
Type `/gsd` inside a session. SF executes one unit at a time, pausing between each so you can review.
|
Type `/sf` inside a session. SF executes one unit at a time, pausing between each so you can review.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd
|
/sf
|
||||||
```
|
```
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab title="Auto mode">
|
<Tab title="Auto mode">
|
||||||
Type `/gsd auto` and walk away. SF autonomously researches, plans, executes, verifies, and commits until the milestone is complete.
|
Type `/sf auto` and walk away. SF autonomously researches, plans, executes, verifies, and commits until the milestone is complete.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
</Tab>
|
</Tab>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
@ -70,17 +70,17 @@ The recommended workflow: auto mode in one terminal, steering from another.
|
||||||
**Terminal 1 — let it build:**
|
**Terminal 1 — let it build:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd auto
|
/sf auto
|
||||||
```
|
```
|
||||||
|
|
||||||
**Terminal 2 — steer while it works:**
|
**Terminal 2 — steer while it works:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gsd
|
sf
|
||||||
/gsd discuss # talk through architecture decisions
|
/sf discuss # talk through architecture decisions
|
||||||
/gsd status # check progress
|
/sf status # check progress
|
||||||
/gsd capture # fire-and-forget thoughts
|
/sf capture # fire-and-forget thoughts
|
||||||
```
|
```
|
||||||
|
|
||||||
## Next steps
|
## Next steps
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
authors = ["SF Contributors"]
|
authors = ["SF Contributors"]
|
||||||
repository = "https://github.com/gsd-build/gsd-2"
|
repository = "https://github.com/sf-build/sf-2"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
|
|
||||||
|
|
@ -84,7 +84,7 @@ Ripgrep-backed regex search using the `grep-regex`, `grep-searcher`, and `grep-m
|
||||||
**TypeScript usage:**
|
**TypeScript usage:**
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { grep, searchContent } from "@gsd/native";
|
import { grep, searchContent } from "@sf/native";
|
||||||
|
|
||||||
// Search files
|
// Search files
|
||||||
const result = grep({
|
const result = grep({
|
||||||
|
|
@ -103,9 +103,9 @@ const contentResult = searchContent(Buffer.from(fileContent), {
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
### gsd_parser
|
### sf_parser
|
||||||
|
|
||||||
SF file parsing and frontmatter extraction. Reads `.gsd` files and extracts structured metadata from YAML frontmatter blocks.
|
SF file parsing and frontmatter extraction. Reads `.sf` files and extracts structured metadata from YAML frontmatter blocks.
|
||||||
|
|
||||||
### highlight
|
### highlight
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
//! SF `.gsd/` directory file parser.
|
//! SF `.sf/` directory file parser.
|
||||||
//!
|
//!
|
||||||
//! Parses markdown files containing YAML-like frontmatter, section headings,
|
//! Parses markdown files containing YAML-like frontmatter, section headings,
|
||||||
//! and structured content used by SF's planning system (roadmaps, plans,
|
//! and structured content used by SF's planning system (roadmaps, plans,
|
||||||
|
|
@ -7,7 +7,7 @@
|
||||||
//! Key operations:
|
//! Key operations:
|
||||||
//! - `parseFrontmatter`: split frontmatter from body, parse YAML-like key-value pairs
|
//! - `parseFrontmatter`: split frontmatter from body, parse YAML-like key-value pairs
|
||||||
//! - `extractSection`: extract content under a specific heading
|
//! - `extractSection`: extract content under a specific heading
|
||||||
//! - `batchParseGsdFiles`: walk a `.gsd/` tree and parse all `.md` files in parallel
|
//! - `batchParseSfFiles`: walk a `.sf/` tree and parse all `.md` files in parallel
|
||||||
//! - `parseRoadmapFile`: parse structured roadmap data from content
|
//! - `parseRoadmapFile`: parse structured roadmap data from content
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
@ -38,7 +38,7 @@ pub struct SectionResult {
|
||||||
|
|
||||||
/// A single parsed SF file from batch parsing.
|
/// A single parsed SF file from batch parsing.
|
||||||
#[napi(object)]
|
#[napi(object)]
|
||||||
pub struct ParsedGsdFile {
|
pub struct ParsedSfFile {
|
||||||
/// Relative path from the base directory.
|
/// Relative path from the base directory.
|
||||||
pub path: String,
|
pub path: String,
|
||||||
/// Parsed frontmatter as JSON string.
|
/// Parsed frontmatter as JSON string.
|
||||||
|
|
@ -56,7 +56,7 @@ pub struct ParsedGsdFile {
|
||||||
#[napi(object)]
|
#[napi(object)]
|
||||||
pub struct BatchParseResult {
|
pub struct BatchParseResult {
|
||||||
/// All parsed files.
|
/// All parsed files.
|
||||||
pub files: Vec<ParsedGsdFile>,
|
pub files: Vec<ParsedSfFile>,
|
||||||
/// Number of files processed.
|
/// Number of files processed.
|
||||||
pub count: u32,
|
pub count: u32,
|
||||||
}
|
}
|
||||||
|
|
@ -724,13 +724,13 @@ pub fn extract_all_sections(content: String, level: Option<u32>) -> String {
|
||||||
sections_to_json(§ions)
|
sections_to_json(§ions)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Batch-parse all `.md` files in a `.gsd/` directory tree.
|
/// Batch-parse all `.md` files in a `.sf/` directory tree.
|
||||||
///
|
///
|
||||||
/// Reads all markdown files under the given directory, parses frontmatter
|
/// Reads all markdown files under the given directory, parses frontmatter
|
||||||
/// and extracts all level-2 sections for each file. Returns all results
|
/// and extracts all level-2 sections for each file. Returns all results
|
||||||
/// in a single call, avoiding repeated JS<->native boundary crossings.
|
/// in a single call, avoiding repeated JS<->native boundary crossings.
|
||||||
#[napi(js_name = "batchParseGsdFiles")]
|
#[napi(js_name = "batchParseSfFiles")]
|
||||||
pub fn batch_parse_gsd_files(directory: String) -> Result<BatchParseResult> {
|
pub fn batch_parse_sf_files(directory: String) -> Result<BatchParseResult> {
|
||||||
let dir_path = Path::new(&directory);
|
let dir_path = Path::new(&directory);
|
||||||
if !dir_path.exists() {
|
if !dir_path.exists() {
|
||||||
return Ok(BatchParseResult {
|
return Ok(BatchParseResult {
|
||||||
|
|
@ -767,7 +767,7 @@ pub fn batch_parse_gsd_files(directory: String) -> Result<BatchParseResult> {
|
||||||
let sections = extract_all_sections_internal(body, 2);
|
let sections = extract_all_sections_internal(body, 2);
|
||||||
let sections_json = sections_to_json(§ions);
|
let sections_json = sections_to_json(§ions);
|
||||||
|
|
||||||
parsed_files.push(ParsedGsdFile {
|
parsed_files.push(ParsedSfFile {
|
||||||
path: path.clone(),
|
path: path.clone(),
|
||||||
metadata,
|
metadata,
|
||||||
body: body.to_string(),
|
body: body.to_string(),
|
||||||
|
|
@ -838,15 +838,15 @@ pub fn parse_roadmap_file(content: String) -> NativeRoadmap {
|
||||||
// ─── SF Tree Scanner ───────────────────────────────────────────────────────
|
// ─── SF Tree Scanner ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
#[napi(object)]
|
#[napi(object)]
|
||||||
pub struct GsdTreeEntry {
|
pub struct SfTreeEntry {
|
||||||
pub path: String,
|
pub path: String,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
#[napi(js_name = "isDir")]
|
#[napi(js_name = "isDir")]
|
||||||
pub is_dir: bool,
|
pub is_dir: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[napi(js_name = "scanGsdTree")]
|
#[napi(js_name = "scanSfTree")]
|
||||||
pub fn scan_gsd_tree(directory: String) -> Result<Vec<GsdTreeEntry>> {
|
pub fn scan_sf_tree(directory: String) -> Result<Vec<SfTreeEntry>> {
|
||||||
let base = Path::new(&directory);
|
let base = Path::new(&directory);
|
||||||
if !base.exists() {
|
if !base.exists() {
|
||||||
return Ok(Vec::new());
|
return Ok(Vec::new());
|
||||||
|
|
@ -856,7 +856,7 @@ pub fn scan_gsd_tree(directory: String) -> Result<Vec<GsdTreeEntry>> {
|
||||||
Ok(entries)
|
Ok(entries)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec<GsdTreeEntry>) -> Result<()> {
|
fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec<SfTreeEntry>) -> Result<()> {
|
||||||
let read_dir = match std::fs::read_dir(dir) {
|
let read_dir = match std::fs::read_dir(dir) {
|
||||||
Ok(rd) => rd,
|
Ok(rd) => rd,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
|
@ -886,7 +886,7 @@ fn collect_tree_entries(base: &Path, dir: &Path, entries: &mut Vec<GsdTreeEntry>
|
||||||
let name = entry.file_name().to_string_lossy().to_string();
|
let name = entry.file_name().to_string_lossy().to_string();
|
||||||
let is_dir = file_type.is_dir();
|
let is_dir = file_type.is_dir();
|
||||||
|
|
||||||
entries.push(GsdTreeEntry {
|
entries.push(SfTreeEntry {
|
||||||
path: relative,
|
path: relative,
|
||||||
name,
|
name,
|
||||||
is_dir,
|
is_dir,
|
||||||
|
|
|
||||||
|
|
@ -417,7 +417,7 @@ pub fn git_diff_stat(
|
||||||
|
|
||||||
/// Get name-status diff between two refs with optional pathspec filter.
|
/// Get name-status diff between two refs with optional pathspec filter.
|
||||||
/// `use_merge_base`: if true, uses three-dot semantics (diff from merge base).
|
/// `use_merge_base`: if true, uses three-dot semantics (diff from merge base).
|
||||||
/// Replaces: `git diff --name-status main...branch -- .gsd/`
|
/// Replaces: `git diff --name-status main...branch -- .sf/`
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn git_diff_name_status(
|
pub fn git_diff_name_status(
|
||||||
repo_path: String,
|
repo_path: String,
|
||||||
|
|
@ -523,8 +523,8 @@ pub fn git_diff_numstat(
|
||||||
|
|
||||||
/// Get unified diff content between two refs with optional pathspec/exclude.
|
/// Get unified diff content between two refs with optional pathspec/exclude.
|
||||||
/// `use_merge_base`: if true, uses three-dot semantics.
|
/// `use_merge_base`: if true, uses three-dot semantics.
|
||||||
/// `exclude`: optional pathspec to exclude (e.g., ".gsd/").
|
/// `exclude`: optional pathspec to exclude (e.g., ".sf/").
|
||||||
/// Replaces: `git diff main...branch -- .gsd/` and `-- . :(exclude).gsd/`
|
/// Replaces: `git diff main...branch -- .sf/` and `-- . :(exclude).sf/`
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn git_diff_content(
|
pub fn git_diff_content(
|
||||||
repo_path: String,
|
repo_path: String,
|
||||||
|
|
@ -685,7 +685,7 @@ pub fn git_worktree_list(repo_path: String) -> Result<Vec<GitWorktreeEntry>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List branches matching an optional glob pattern.
|
/// List branches matching an optional glob pattern.
|
||||||
/// Replaces: `git branch --list milestone/*`, `git branch --list gsd/*`
|
/// Replaces: `git branch --list milestone/*`, `git branch --list sf/*`
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn git_branch_list(repo_path: String, pattern: Option<String>) -> Result<Vec<String>> {
|
pub fn git_branch_list(repo_path: String, pattern: Option<String>) -> Result<Vec<String>> {
|
||||||
let repo = open_repo(&repo_path)?;
|
let repo = open_repo(&repo_path)?;
|
||||||
|
|
@ -711,13 +711,13 @@ pub fn git_branch_list(repo_path: String, pattern: Option<String>) -> Result<Vec
|
||||||
Ok(names)
|
Ok(names)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Simple branch pattern matching for patterns like "milestone/*", "gsd/*/*"
|
/// Simple branch pattern matching for patterns like "milestone/*", "sf/*/*"
|
||||||
fn matches_branch_pattern(name: &str, pattern: &str) -> bool {
|
fn matches_branch_pattern(name: &str, pattern: &str) -> bool {
|
||||||
// Handle simple prefix/* patterns
|
// Handle simple prefix/* patterns
|
||||||
if let Some(prefix) = pattern.strip_suffix("/*") {
|
if let Some(prefix) = pattern.strip_suffix("/*") {
|
||||||
// For "gsd/*/*", this becomes "gsd/*" after first strip
|
// For "sf/*/*", this becomes "sf/*" after first strip
|
||||||
if prefix.contains('*') {
|
if prefix.contains('*') {
|
||||||
// Recursive: "gsd/*/*" → name must start with "gsd/" and have at least 2 segments after
|
// Recursive: "sf/*/*" → name must start with "sf/" and have at least 2 segments after
|
||||||
if let Some(inner_prefix) = prefix.strip_suffix("/*") {
|
if let Some(inner_prefix) = prefix.strip_suffix("/*") {
|
||||||
return name.starts_with(&format!("{inner_prefix}/"))
|
return name.starts_with(&format!("{inner_prefix}/"))
|
||||||
&& name[inner_prefix.len() + 1..].contains('/');
|
&& name[inner_prefix.len() + 1..].contains('/');
|
||||||
|
|
@ -730,7 +730,7 @@ fn matches_branch_pattern(name: &str, pattern: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List branches that have been merged into the given target branch.
|
/// List branches that have been merged into the given target branch.
|
||||||
/// Replaces: `git branch --merged main --list gsd/*`
|
/// Replaces: `git branch --merged main --list sf/*`
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn git_branch_list_merged(
|
pub fn git_branch_list_merged(
|
||||||
repo_path: String,
|
repo_path: String,
|
||||||
|
|
@ -792,7 +792,7 @@ pub fn git_ls_files(repo_path: String, pathspec: String) -> Result<Vec<String>>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List references matching a prefix.
|
/// List references matching a prefix.
|
||||||
/// Replaces: `git for-each-ref refs/gsd/snapshots/ --format=%(refname)`
|
/// Replaces: `git for-each-ref refs/sf/snapshots/ --format=%(refname)`
|
||||||
#[napi]
|
#[napi]
|
||||||
pub fn git_for_each_ref(repo_path: String, prefix: String) -> Result<Vec<String>> {
|
pub fn git_for_each_ref(repo_path: String, prefix: String) -> Result<Vec<String>> {
|
||||||
let repo = open_repo(&repo_path)?;
|
let repo = open_repo(&repo_path)?;
|
||||||
|
|
|
||||||
|
|
@ -27,26 +27,46 @@ const profile = isDev ? "debug" : "release";
|
||||||
const cargoArgs = ["build"];
|
const cargoArgs = ["build"];
|
||||||
if (!isDev) cargoArgs.push("--release");
|
if (!isDev) cargoArgs.push("--release");
|
||||||
|
|
||||||
|
function getCargoEnvironment() {
|
||||||
|
return {
|
||||||
|
...process.env,
|
||||||
|
// Optimize for native CPU when building locally
|
||||||
|
RUSTFLAGS: process.env.RUSTFLAGS || "-C target-cpu=native",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCargoTargetDirectory() {
|
||||||
|
if (process.env.CARGO_TARGET_DIR) {
|
||||||
|
return path.resolve(process.env.CARGO_TARGET_DIR);
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataRaw = execSync("cargo metadata --format-version 1 --no-deps", {
|
||||||
|
cwd: engineDir,
|
||||||
|
stdio: ["ignore", "pipe", "inherit"],
|
||||||
|
env: getCargoEnvironment(),
|
||||||
|
}).toString();
|
||||||
|
const metadata = JSON.parse(metadataRaw);
|
||||||
|
if (typeof metadata.target_directory !== "string" || metadata.target_directory.length === 0) {
|
||||||
|
throw new Error("cargo metadata did not return a target_directory");
|
||||||
|
}
|
||||||
|
return path.resolve(metadata.target_directory);
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`Building forge-engine (${profile})...`);
|
console.log(`Building forge-engine (${profile})...`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
execSync(`cargo ${cargoArgs.join(" ")}`, {
|
execSync(`cargo ${cargoArgs.join(" ")}`, {
|
||||||
cwd: engineDir,
|
cwd: engineDir,
|
||||||
stdio: "inherit",
|
stdio: "inherit",
|
||||||
env: {
|
env: getCargoEnvironment(),
|
||||||
...process.env,
|
|
||||||
// Optimize for native CPU when building locally
|
|
||||||
RUSTFLAGS: process.env.RUSTFLAGS || "-C target-cpu=native",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch {
|
} catch {
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Locate the built library
|
// Locate the built library using Cargo's actual target directory. Under Nix this
|
||||||
const cargoTargetRoot = process.env.CARGO_TARGET_DIR
|
// is often redirected to a shared cache path rather than native/target.
|
||||||
? path.resolve(process.env.CARGO_TARGET_DIR)
|
const cargoTargetRoot = getCargoTargetDirectory();
|
||||||
: path.join(nativeRoot, "target");
|
|
||||||
|
|
||||||
const targetDir = path.join(cargoTargetRoot, profile);
|
const targetDir = path.join(cargoTargetRoot, profile);
|
||||||
const platformTag = `${process.platform}-${process.arch}`;
|
const platformTag = `${process.platform}-${process.arch}`;
|
||||||
|
|
|
||||||
778
package-lock.json
generated
778
package-lock.json
generated
File diff suppressed because it is too large
Load diff
12
package.json
12
package.json
|
|
@ -57,11 +57,11 @@
|
||||||
"copy-themes": "node scripts/copy-themes.cjs",
|
"copy-themes": "node scripts/copy-themes.cjs",
|
||||||
"copy-export-html": "node scripts/copy-export-html.cjs",
|
"copy-export-html": "node scripts/copy-export-html.cjs",
|
||||||
"test:compile": "node scripts/compile-tests.mjs",
|
"test:compile": "node scripts/compile-tests.mjs",
|
||||||
"test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test \"dist-test/src/tests/*.test.js\" \"dist-test/src/resources/extensions/gsd/tests/*.test.js\" \"dist-test/src/resources/extensions/gsd/tests/*.test.mjs\" \"dist-test/src/resources/extensions/shared/tests/*.test.js\" \"dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js\" \"dist-test/src/resources/extensions/github-sync/tests/*.test.js\" \"dist-test/src/resources/extensions/universal-config/tests/*.test.js\" \"dist-test/src/resources/extensions/voice/tests/*.test.js\" \"dist-test/src/resources/extensions/mcp-client/tests/*.test.js\"",
|
"test:unit": "npm run test:compile && node --import ./scripts/dist-test-resolve.mjs --experimental-test-isolation=process --test-reporter=./scripts/test-reporter-compact.mjs --test \"dist-test/src/tests/*.test.js\" \"dist-test/src/resources/extensions/sf/tests/*.test.js\" \"dist-test/src/resources/extensions/sf/tests/*.test.mjs\" \"dist-test/src/resources/extensions/shared/tests/*.test.js\" \"dist-test/src/resources/extensions/claude-code-cli/tests/*.test.js\" \"dist-test/src/resources/extensions/github-sync/tests/*.test.js\" \"dist-test/src/resources/extensions/universal-config/tests/*.test.js\" \"dist-test/src/resources/extensions/voice/tests/*.test.js\" \"dist-test/src/resources/extensions/mcp-client/tests/*.test.js\"",
|
||||||
"test:packages": "node --test packages/pi-coding-agent/dist/core/*.test.js packages/pi-coding-agent/dist/core/tools/spawn-shell-windows.test.js",
|
"test:packages": "node --test packages/pi-coding-agent/dist/core/*.test.js packages/pi-coding-agent/dist/core/tools/spawn-shell-windows.test.js",
|
||||||
"test:marketplace": "node scripts/with-env.mjs SF_TEST_CLONE_MARKETPLACES=1 -- node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/gsd/tests/claude-import-tui.test.ts src/resources/extensions/gsd/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts",
|
"test:marketplace": "node scripts/with-env.mjs SF_TEST_CLONE_MARKETPLACES=1 -- node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test src/resources/extensions/sf/tests/claude-import-tui.test.ts src/resources/extensions/sf/tests/plugin-importer-live.test.ts src/tests/marketplace-discovery.test.ts",
|
||||||
"test:coverage": "c8 --reporter=text --reporter=lcov --exclude=\"src/resources/extensions/gsd/tests/**\" --exclude=\"src/tests/**\" --exclude=\"scripts/**\" --exclude=\"native/**\" --exclude=\"node_modules/**\" --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/gsd/tests/*.test.ts src/resources/extensions/gsd/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts",
|
"test:coverage": "c8 --reporter=text --reporter=lcov --exclude=\"src/resources/extensions/sf/tests/**\" --exclude=\"src/tests/**\" --exclude=\"scripts/**\" --exclude=\"native/**\" --exclude=\"node_modules/**\" --check-coverage --statements=40 --lines=40 --branches=20 --functions=20 node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --experimental-test-isolation=process --test src/resources/extensions/sf/tests/*.test.ts src/resources/extensions/sf/tests/*.test.mjs src/tests/*.test.ts src/resources/extensions/shared/tests/*.test.ts",
|
||||||
"test:integration": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test \"src/tests/integration/*.test.ts\" \"src/resources/extensions/gsd/tests/integration/*.test.ts\" \"src/resources/extensions/async-jobs/*.test.ts\" \"src/resources/extensions/browser-tools/tests/*.test.mjs\"",
|
"test:integration": "node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test \"src/tests/integration/*.test.ts\" \"src/resources/extensions/sf/tests/integration/*.test.ts\" \"src/resources/extensions/async-jobs/*.test.ts\" \"src/resources/extensions/browser-tools/tests/*.test.mjs\"",
|
||||||
"pretest": "npm run typecheck:extensions",
|
"pretest": "npm run typecheck:extensions",
|
||||||
"test": "npm run test:unit && npm run test:integration",
|
"test": "npm run test:unit && npm run test:integration",
|
||||||
"test:smoke": "node --experimental-strip-types tests/smoke/run.ts",
|
"test:smoke": "node --experimental-strip-types tests/smoke/run.ts",
|
||||||
|
|
@ -70,7 +70,7 @@
|
||||||
"test:live": "node scripts/with-env.mjs SF_LIVE_TESTS=1 -- node --experimental-strip-types tests/live/run.ts",
|
"test:live": "node scripts/with-env.mjs SF_LIVE_TESTS=1 -- node --experimental-strip-types tests/live/run.ts",
|
||||||
"test:browser-tools": "node --test src/resources/extensions/browser-tools/tests/browser-tools-unit.test.cjs src/resources/extensions/browser-tools/tests/browser-tools-integration.test.mjs",
|
"test:browser-tools": "node --test src/resources/extensions/browser-tools/tests/browser-tools-unit.test.cjs src/resources/extensions/browser-tools/tests/browser-tools-integration.test.mjs",
|
||||||
"test:native": "node --test packages/native/src/__tests__/grep.test.mjs",
|
"test:native": "node --test packages/native/src/__tests__/grep.test.mjs",
|
||||||
"test:secret-scan": "node --import ./src/resources/extensions/gsd/tests/resolve-ts.mjs --experimental-strip-types --test src/tests/secret-scan.test.ts",
|
"test:secret-scan": "node --import ./src/resources/extensions/sf/tests/resolve-ts.mjs --experimental-strip-types --test src/tests/secret-scan.test.ts",
|
||||||
"secret-scan": "node scripts/secret-scan.mjs",
|
"secret-scan": "node scripts/secret-scan.mjs",
|
||||||
"secret-scan:install-hook": "node scripts/install-hooks.mjs",
|
"secret-scan:install-hook": "node scripts/install-hooks.mjs",
|
||||||
"build:native": "node native/scripts/build.js",
|
"build:native": "node native/scripts/build.js",
|
||||||
|
|
@ -129,6 +129,7 @@
|
||||||
"proper-lockfile": "^4.1.2",
|
"proper-lockfile": "^4.1.2",
|
||||||
"proxy-agent": "^6.5.0",
|
"proxy-agent": "^6.5.0",
|
||||||
"sharp": "^0.34.5",
|
"sharp": "^0.34.5",
|
||||||
|
"shell-quote": "^1.8.3",
|
||||||
"sql.js": "^1.14.1",
|
"sql.js": "^1.14.1",
|
||||||
"strip-ansi": "^7.1.0",
|
"strip-ansi": "^7.1.0",
|
||||||
"undici": "^7.24.2",
|
"undici": "^7.24.2",
|
||||||
|
|
@ -138,6 +139,7 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^24.12.0",
|
"@types/node": "^24.12.0",
|
||||||
"@types/picomatch": "^4.0.2",
|
"@types/picomatch": "^4.0.2",
|
||||||
|
"@types/shell-quote": "^1.7.5",
|
||||||
"c8": "^11.0.0",
|
"c8": "^11.0.0",
|
||||||
"esbuild": "^0.25.12",
|
"esbuild": "^0.25.12",
|
||||||
"jiti": "^2.6.1",
|
"jiti": "^2.6.1",
|
||||||
|
|
|
||||||
|
|
@ -285,8 +285,8 @@ export class SessionManager extends EventEmitter {
|
||||||
if (envPath) return resolve(envPath);
|
if (envPath) return resolve(envPath);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim();
|
const sfBin = execSync('which sf', { encoding: 'utf-8' }).trim();
|
||||||
if (gsdBin) return resolve(gsdBin);
|
if (sfBin) return resolve(sfBin);
|
||||||
} catch {
|
} catch {
|
||||||
// which failed
|
// which failed
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ export type {
|
||||||
export { MAX_EVENTS, INIT_TIMEOUT_MS } from './types.js';
|
export { MAX_EVENTS, INIT_TIMEOUT_MS } from './types.js';
|
||||||
|
|
||||||
// Path resolution utilities
|
// Path resolution utilities
|
||||||
export { resolveGsdRoot } from './readers/paths.js';
|
export { resolveSFRoot } from './readers/paths.js';
|
||||||
|
|
||||||
// Read-only state readers (usable without a running session)
|
// Read-only state readers (usable without a running session)
|
||||||
export { readProgress } from './readers/state.js';
|
export { readProgress } from './readers/state.js';
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import { resolveGsdRoot, resolveRootFile } from './paths.js';
|
import { resolveSFRoot, resolveRootFile } from './paths.js';
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Types
|
// Types
|
||||||
|
|
@ -86,7 +86,7 @@ export function readCaptures(
|
||||||
projectDir: string,
|
projectDir: string,
|
||||||
filter: 'all' | 'pending' | 'actionable' = 'all',
|
filter: 'all' | 'pending' | 'actionable' = 'all',
|
||||||
): CapturesResult {
|
): CapturesResult {
|
||||||
const sf = resolveGsdRoot(projectDir);
|
const sf = resolveSFRoot(projectDir);
|
||||||
const capturesPath = resolveRootFile(sf, 'CAPTURES.md');
|
const capturesPath = resolveRootFile(sf, 'CAPTURES.md');
|
||||||
|
|
||||||
if (!existsSync(capturesPath)) {
|
if (!existsSync(capturesPath)) {
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
import { existsSync, readFileSync } from 'node:fs';
|
import { existsSync, readFileSync } from 'node:fs';
|
||||||
import {
|
import {
|
||||||
resolveGsdRoot,
|
resolveSFRoot,
|
||||||
resolveRootFile,
|
resolveRootFile,
|
||||||
findMilestoneIds,
|
findMilestoneIds,
|
||||||
resolveMilestoneFile,
|
resolveMilestoneFile,
|
||||||
|
|
@ -38,9 +38,9 @@ export interface DoctorResult {
|
||||||
// Check implementations
|
// Check implementations
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
|
function checkProjectLevel(sfRoot: string, issues: DoctorIssue[]): void {
|
||||||
// PROJECT.md should exist
|
// PROJECT.md should exist
|
||||||
const projectPath = resolveRootFile(gsdRoot, 'PROJECT.md');
|
const projectPath = resolveRootFile(sfRoot, 'PROJECT.md');
|
||||||
if (!existsSync(projectPath)) {
|
if (!existsSync(projectPath)) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'warning',
|
severity: 'warning',
|
||||||
|
|
@ -53,9 +53,9 @@ function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
|
||||||
}
|
}
|
||||||
|
|
||||||
// STATE.md should exist if milestones exist
|
// STATE.md should exist if milestones exist
|
||||||
const milestones = findMilestoneIds(gsdRoot);
|
const milestones = findMilestoneIds(sfRoot);
|
||||||
if (milestones.length > 0) {
|
if (milestones.length > 0) {
|
||||||
const statePath = resolveRootFile(gsdRoot, 'STATE.md');
|
const statePath = resolveRootFile(sfRoot, 'STATE.md');
|
||||||
if (!existsSync(statePath)) {
|
if (!existsSync(statePath)) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'warning',
|
severity: 'warning',
|
||||||
|
|
@ -69,8 +69,8 @@ function checkProjectLevel(gsdRoot: string, issues: DoctorIssue[]): void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]): void {
|
function checkMilestoneLevel(sfRoot: string, mid: string, issues: DoctorIssue[]): void {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, mid);
|
const mDir = resolveMilestoneDir(sfRoot, mid);
|
||||||
if (!mDir) {
|
if (!mDir) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'error',
|
severity: 'error',
|
||||||
|
|
@ -83,10 +83,10 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
|
||||||
}
|
}
|
||||||
|
|
||||||
// CONTEXT.md should exist
|
// CONTEXT.md should exist
|
||||||
const ctxPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT');
|
const ctxPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT');
|
||||||
if (!ctxPath || !existsSync(ctxPath)) {
|
if (!ctxPath || !existsSync(ctxPath)) {
|
||||||
// Check for draft
|
// Check for draft
|
||||||
const draftPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT-DRAFT');
|
const draftPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT-DRAFT');
|
||||||
if (!draftPath || !existsSync(draftPath)) {
|
if (!draftPath || !existsSync(draftPath)) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'warning',
|
severity: 'warning',
|
||||||
|
|
@ -99,9 +99,9 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
|
||||||
}
|
}
|
||||||
|
|
||||||
// ROADMAP.md should exist if slices exist
|
// ROADMAP.md should exist if slices exist
|
||||||
const sliceIds = findSliceIds(gsdRoot, mid);
|
const sliceIds = findSliceIds(sfRoot, mid);
|
||||||
if (sliceIds.length > 0) {
|
if (sliceIds.length > 0) {
|
||||||
const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
|
const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
|
||||||
if (!roadmapPath || !existsSync(roadmapPath)) {
|
if (!roadmapPath || !existsSync(roadmapPath)) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'warning',
|
severity: 'warning',
|
||||||
|
|
@ -116,10 +116,10 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
|
||||||
// Check if all slices done but no SUMMARY
|
// Check if all slices done but no SUMMARY
|
||||||
if (sliceIds.length > 0) {
|
if (sliceIds.length > 0) {
|
||||||
const allDone = sliceIds.every((sid) => {
|
const allDone = sliceIds.every((sid) => {
|
||||||
const tasks = findTaskFiles(gsdRoot, mid, sid);
|
const tasks = findTaskFiles(sfRoot, mid, sid);
|
||||||
return tasks.length > 0 && tasks.every((t) => t.hasSummary);
|
return tasks.length > 0 && tasks.every((t) => t.hasSummary);
|
||||||
});
|
});
|
||||||
const summaryPath = resolveMilestoneFile(gsdRoot, mid, 'SUMMARY');
|
const summaryPath = resolveMilestoneFile(sfRoot, mid, 'SUMMARY');
|
||||||
if (allDone && (!summaryPath || !existsSync(summaryPath))) {
|
if (allDone && (!summaryPath || !existsSync(summaryPath))) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'error',
|
severity: 'error',
|
||||||
|
|
@ -133,12 +133,12 @@ function checkMilestoneLevel(gsdRoot: string, mid: string, issues: DoctorIssue[]
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkSliceLevel(
|
function checkSliceLevel(
|
||||||
gsdRoot: string, mid: string, sid: string, issues: DoctorIssue[],
|
sfRoot: string, mid: string, sid: string, issues: DoctorIssue[],
|
||||||
): void {
|
): void {
|
||||||
const unitId = `${mid}/${sid}`;
|
const unitId = `${mid}/${sid}`;
|
||||||
|
|
||||||
// PLAN.md should exist
|
// PLAN.md should exist
|
||||||
const planPath = resolveSliceFile(gsdRoot, mid, sid, 'PLAN');
|
const planPath = resolveSliceFile(sfRoot, mid, sid, 'PLAN');
|
||||||
if (!planPath || !existsSync(planPath)) {
|
if (!planPath || !existsSync(planPath)) {
|
||||||
issues.push({
|
issues.push({
|
||||||
severity: 'error',
|
severity: 'error',
|
||||||
|
|
@ -150,7 +150,7 @@ function checkSliceLevel(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tasks should have plans
|
// Tasks should have plans
|
||||||
const tasks = findTaskFiles(gsdRoot, mid, sid);
|
const tasks = findTaskFiles(sfRoot, mid, sid);
|
||||||
for (const task of tasks) {
|
for (const task of tasks) {
|
||||||
const taskUnitId = `${unitId}/${task.id}`;
|
const taskUnitId = `${unitId}/${task.id}`;
|
||||||
if (!task.hasPlan) {
|
if (!task.hasPlan) {
|
||||||
|
|
@ -181,10 +181,10 @@ function checkSliceLevel(
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function runDoctorLite(projectDir: string, scope?: string): DoctorResult {
|
export function runDoctorLite(projectDir: string, scope?: string): DoctorResult {
|
||||||
const gsdRoot = resolveGsdRoot(projectDir);
|
const sfRoot = resolveSFRoot(projectDir);
|
||||||
const issues: DoctorIssue[] = [];
|
const issues: DoctorIssue[] = [];
|
||||||
|
|
||||||
if (!existsSync(gsdRoot)) {
|
if (!existsSync(sfRoot)) {
|
||||||
return {
|
return {
|
||||||
ok: true,
|
ok: true,
|
||||||
issues: [{
|
issues: [{
|
||||||
|
|
@ -199,19 +199,19 @@ export function runDoctorLite(projectDir: string, scope?: string): DoctorResult
|
||||||
}
|
}
|
||||||
|
|
||||||
// Project-level checks
|
// Project-level checks
|
||||||
checkProjectLevel(gsdRoot, issues);
|
checkProjectLevel(sfRoot, issues);
|
||||||
|
|
||||||
// Milestone + slice checks
|
// Milestone + slice checks
|
||||||
const milestoneIds = scope
|
const milestoneIds = scope
|
||||||
? findMilestoneIds(gsdRoot).filter((id) => id === scope)
|
? findMilestoneIds(sfRoot).filter((id) => id === scope)
|
||||||
: findMilestoneIds(gsdRoot);
|
: findMilestoneIds(sfRoot);
|
||||||
|
|
||||||
for (const mid of milestoneIds) {
|
for (const mid of milestoneIds) {
|
||||||
checkMilestoneLevel(gsdRoot, mid, issues);
|
checkMilestoneLevel(sfRoot, mid, issues);
|
||||||
|
|
||||||
const sliceIds = findSliceIds(gsdRoot, mid);
|
const sliceIds = findSliceIds(sfRoot, mid);
|
||||||
for (const sid of sliceIds) {
|
for (const sid of sliceIds) {
|
||||||
checkSliceLevel(gsdRoot, mid, sid, issues);
|
checkSliceLevel(sfRoot, mid, sid, issues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -357,23 +357,23 @@ describe('writeGraph', () => {
|
||||||
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
it('creates graph.json in .sf/graphs/ after writeGraph()', async () => {
|
it('creates graph.json in .sf/graphs/ after writeGraph()', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
const graphPath = join(gsdRoot, 'graphs', 'graph.json');
|
const graphPath = join(sfRoot, 'graphs', 'graph.json');
|
||||||
assert.ok(existsSync(graphPath), `Expected ${graphPath} to exist`);
|
assert.ok(existsSync(graphPath), `Expected ${graphPath} to exist`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('write is atomic — no temp file remains after writeGraph()', async () => {
|
it('write is atomic — no temp file remains after writeGraph()', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
const tmpPath = join(gsdRoot, 'graphs', 'graph.tmp.json');
|
const tmpPath = join(sfRoot, 'graphs', 'graph.tmp.json');
|
||||||
assert.ok(!existsSync(tmpPath), 'Temp file should not exist after successful write');
|
assert.ok(!existsSync(tmpPath), 'Temp file should not exist after successful write');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('written graph.json is valid JSON with nodes and edges', async () => {
|
it('written graph.json is valid JSON with nodes and edges', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
const raw = readFileSync(join(gsdRoot, 'graphs', 'graph.json'), 'utf-8');
|
const raw = readFileSync(join(sfRoot, 'graphs', 'graph.json'), 'utf-8');
|
||||||
const parsed = JSON.parse(raw) as KnowledgeGraph;
|
const parsed = JSON.parse(raw) as KnowledgeGraph;
|
||||||
assert.ok(Array.isArray(parsed.nodes));
|
assert.ok(Array.isArray(parsed.nodes));
|
||||||
assert.ok(Array.isArray(parsed.edges));
|
assert.ok(Array.isArray(parsed.edges));
|
||||||
|
|
@ -401,9 +401,9 @@ describe('graphStatus', () => {
|
||||||
|
|
||||||
it('returns { exists: true, nodeCount, edgeCount, ageHours } when graph exists', async () => {
|
it('returns { exists: true, nodeCount, edgeCount, ageHours } when graph exists', async () => {
|
||||||
makeProjectWithArtifacts(projectDir);
|
makeProjectWithArtifacts(projectDir);
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
const graph = await buildGraph(projectDir);
|
const graph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
|
|
||||||
const status = await graphStatus(projectDir);
|
const status = await graphStatus(projectDir);
|
||||||
assert.equal(status.exists, true);
|
assert.equal(status.exists, true);
|
||||||
|
|
@ -415,9 +415,9 @@ describe('graphStatus', () => {
|
||||||
|
|
||||||
it('stale = false for a freshly built graph', async () => {
|
it('stale = false for a freshly built graph', async () => {
|
||||||
makeProjectWithArtifacts(projectDir);
|
makeProjectWithArtifacts(projectDir);
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
const graph = await buildGraph(projectDir);
|
const graph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
|
|
||||||
const status = await graphStatus(projectDir);
|
const status = await graphStatus(projectDir);
|
||||||
assert.equal(status.stale, false);
|
assert.equal(status.stale, false);
|
||||||
|
|
@ -425,8 +425,8 @@ describe('graphStatus', () => {
|
||||||
|
|
||||||
it('stale = true for a graph older than 24h (builtAt backdated)', async () => {
|
it('stale = true for a graph older than 24h (builtAt backdated)', async () => {
|
||||||
makeProjectWithArtifacts(projectDir);
|
makeProjectWithArtifacts(projectDir);
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
mkdirSync(join(gsdRoot, 'graphs'), { recursive: true });
|
mkdirSync(join(sfRoot, 'graphs'), { recursive: true });
|
||||||
|
|
||||||
// Write a graph with a builtAt 25 hours ago
|
// Write a graph with a builtAt 25 hours ago
|
||||||
const oldGraph: KnowledgeGraph = {
|
const oldGraph: KnowledgeGraph = {
|
||||||
|
|
@ -435,7 +435,7 @@ describe('graphStatus', () => {
|
||||||
builtAt: new Date(Date.now() - 25 * 60 * 60 * 1000).toISOString(),
|
builtAt: new Date(Date.now() - 25 * 60 * 60 * 1000).toISOString(),
|
||||||
};
|
};
|
||||||
writeFileSync(
|
writeFileSync(
|
||||||
join(gsdRoot, 'graphs', 'graph.json'),
|
join(sfRoot, 'graphs', 'graph.json'),
|
||||||
JSON.stringify(oldGraph),
|
JSON.stringify(oldGraph),
|
||||||
'utf-8',
|
'utf-8',
|
||||||
);
|
);
|
||||||
|
|
@ -456,9 +456,9 @@ describe('graphQuery', () => {
|
||||||
before(async () => {
|
before(async () => {
|
||||||
projectDir = tmpProject();
|
projectDir = tmpProject();
|
||||||
makeProjectWithArtifacts(projectDir);
|
makeProjectWithArtifacts(projectDir);
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
const graph = await buildGraph(projectDir);
|
const graph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
after(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||||
|
|
@ -486,7 +486,7 @@ describe('graphQuery', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('budget trims AMBIGUOUS edges first', async () => {
|
it('budget trims AMBIGUOUS edges first', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
// Write a graph with mixed confidence edges
|
// Write a graph with mixed confidence edges
|
||||||
const mixedGraph: KnowledgeGraph = {
|
const mixedGraph: KnowledgeGraph = {
|
||||||
builtAt: new Date().toISOString(),
|
builtAt: new Date().toISOString(),
|
||||||
|
|
@ -500,7 +500,7 @@ describe('graphQuery', () => {
|
||||||
{ from: 'n1', to: 'n3', type: 'contains', confidence: 'INFERRED' },
|
{ from: 'n1', to: 'n3', type: 'contains', confidence: 'INFERRED' },
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
await writeGraph(gsdRoot, mixedGraph);
|
await writeGraph(sfRoot, mixedGraph);
|
||||||
|
|
||||||
// With a very small budget, AMBIGUOUS edges should be trimmed first
|
// With a very small budget, AMBIGUOUS edges should be trimmed first
|
||||||
const result = await graphQuery(projectDir, 'seed node budget', 10);
|
const result = await graphQuery(projectDir, 'seed node budget', 10);
|
||||||
|
|
@ -509,7 +509,7 @@ describe('graphQuery', () => {
|
||||||
|
|
||||||
// Restore the original graph
|
// Restore the original graph
|
||||||
const originalGraph = await buildGraph(projectDir);
|
const originalGraph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, originalGraph);
|
await writeGraph(sfRoot, originalGraph);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -523,16 +523,16 @@ describe('graphDiff', () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
projectDir = tmpProject();
|
projectDir = tmpProject();
|
||||||
makeProjectWithArtifacts(projectDir);
|
makeProjectWithArtifacts(projectDir);
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
const graph = await buildGraph(projectDir);
|
const graph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => rmSync(projectDir, { recursive: true, force: true }));
|
afterEach(() => rmSync(projectDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
it('returns empty diff when comparing graph to itself (snapshot = current)', async () => {
|
it('returns empty diff when comparing graph to itself (snapshot = current)', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
await writeSnapshot(gsdRoot);
|
await writeSnapshot(sfRoot);
|
||||||
const diff = await graphDiff(projectDir);
|
const diff = await graphDiff(projectDir);
|
||||||
assert.ok(Array.isArray(diff.nodes.added));
|
assert.ok(Array.isArray(diff.nodes.added));
|
||||||
assert.ok(Array.isArray(diff.nodes.removed));
|
assert.ok(Array.isArray(diff.nodes.removed));
|
||||||
|
|
@ -542,9 +542,9 @@ describe('graphDiff', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns added nodes when a new node appears after snapshot', async () => {
|
it('returns added nodes when a new node appears after snapshot', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
// Take snapshot of the original graph
|
// Take snapshot of the original graph
|
||||||
await writeSnapshot(gsdRoot);
|
await writeSnapshot(sfRoot);
|
||||||
|
|
||||||
// Now write a graph with an extra node
|
// Now write a graph with an extra node
|
||||||
const extraGraph: KnowledgeGraph = {
|
const extraGraph: KnowledgeGraph = {
|
||||||
|
|
@ -554,14 +554,14 @@ describe('graphDiff', () => {
|
||||||
],
|
],
|
||||||
edges: [],
|
edges: [],
|
||||||
};
|
};
|
||||||
await writeGraph(gsdRoot, extraGraph);
|
await writeGraph(sfRoot, extraGraph);
|
||||||
|
|
||||||
const diff = await graphDiff(projectDir);
|
const diff = await graphDiff(projectDir);
|
||||||
assert.ok(diff.nodes.added.includes('brand-new-node'), 'new node should be in added');
|
assert.ok(diff.nodes.added.includes('brand-new-node'), 'new node should be in added');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('returns removed nodes when a node disappears after snapshot', async () => {
|
it('returns removed nodes when a node disappears after snapshot', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
// Create snapshot with a node that won't exist in current graph
|
// Create snapshot with a node that won't exist in current graph
|
||||||
const snapshotGraph: KnowledgeGraph = {
|
const snapshotGraph: KnowledgeGraph = {
|
||||||
builtAt: new Date().toISOString(),
|
builtAt: new Date().toISOString(),
|
||||||
|
|
@ -571,7 +571,7 @@ describe('graphDiff', () => {
|
||||||
edges: [],
|
edges: [],
|
||||||
};
|
};
|
||||||
writeFileSync(
|
writeFileSync(
|
||||||
join(gsdRoot, 'graphs', '.last-build-snapshot.json'),
|
join(sfRoot, 'graphs', '.last-build-snapshot.json'),
|
||||||
JSON.stringify({ ...snapshotGraph, snapshotAt: new Date().toISOString() }),
|
JSON.stringify({ ...snapshotGraph, snapshotAt: new Date().toISOString() }),
|
||||||
'utf-8',
|
'utf-8',
|
||||||
);
|
);
|
||||||
|
|
@ -592,9 +592,9 @@ describe('graphDiff', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('writeSnapshot creates .last-build-snapshot.json with snapshotAt', async () => {
|
it('writeSnapshot creates .last-build-snapshot.json with snapshotAt', async () => {
|
||||||
const gsdRoot = join(projectDir, '.sf');
|
const sfRoot = join(projectDir, '.sf');
|
||||||
await writeSnapshot(gsdRoot);
|
await writeSnapshot(sfRoot);
|
||||||
const snapshotPath = join(gsdRoot, 'graphs', '.last-build-snapshot.json');
|
const snapshotPath = join(sfRoot, 'graphs', '.last-build-snapshot.json');
|
||||||
assert.ok(existsSync(snapshotPath));
|
assert.ok(existsSync(snapshotPath));
|
||||||
const raw = readFileSync(snapshotPath, 'utf-8');
|
const raw = readFileSync(snapshotPath, 'utf-8');
|
||||||
const parsed = JSON.parse(raw) as KnowledgeGraph & { snapshotAt: string };
|
const parsed = JSON.parse(raw) as KnowledgeGraph & { snapshotAt: string };
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
import { readFileSync, writeFileSync, renameSync, existsSync, mkdirSync } from 'node:fs';
|
import { readFileSync, writeFileSync, renameSync, existsSync, mkdirSync } from 'node:fs';
|
||||||
import { join, resolve } from 'node:path';
|
import { join, resolve } from 'node:path';
|
||||||
import { resolveGsdRoot, findMilestoneIds, resolveMilestoneDir, findSliceIds, resolveSliceDir } from './paths.js';
|
import { resolveSFRoot, findMilestoneIds, resolveMilestoneDir, findSliceIds, resolveSliceDir } from './paths.js';
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Types
|
// Types
|
||||||
|
|
@ -92,20 +92,20 @@ export interface GraphDiffResult {
|
||||||
// Graph file paths
|
// Graph file paths
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
function graphsDir(gsdRoot: string): string {
|
function graphsDir(sfRoot: string): string {
|
||||||
return join(gsdRoot, 'graphs');
|
return join(sfRoot, 'graphs');
|
||||||
}
|
}
|
||||||
|
|
||||||
function graphJsonPath(gsdRoot: string): string {
|
function graphJsonPath(sfRoot: string): string {
|
||||||
return join(graphsDir(gsdRoot), 'graph.json');
|
return join(graphsDir(sfRoot), 'graph.json');
|
||||||
}
|
}
|
||||||
|
|
||||||
function graphTmpPath(gsdRoot: string): string {
|
function graphTmpPath(sfRoot: string): string {
|
||||||
return join(graphsDir(gsdRoot), 'graph.tmp.json');
|
return join(graphsDir(sfRoot), 'graph.tmp.json');
|
||||||
}
|
}
|
||||||
|
|
||||||
function snapshotPath(gsdRoot: string): string {
|
function snapshotPath(sfRoot: string): string {
|
||||||
return join(graphsDir(gsdRoot), '.last-build-snapshot.json');
|
return join(graphsDir(sfRoot), '.last-build-snapshot.json');
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
@ -115,8 +115,8 @@ function snapshotPath(gsdRoot: string): string {
|
||||||
/**
|
/**
|
||||||
* Parse STATE.md for active milestone and phase concepts.
|
* Parse STATE.md for active milestone and phase concepts.
|
||||||
*/
|
*/
|
||||||
function parseStateFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
function parseStateFile(sfRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
||||||
const statePath = join(gsdRoot, 'STATE.md');
|
const statePath = join(sfRoot, 'STATE.md');
|
||||||
if (!existsSync(statePath)) return;
|
if (!existsSync(statePath)) return;
|
||||||
|
|
||||||
let content: string;
|
let content: string;
|
||||||
|
|
@ -160,8 +160,8 @@ function parseStateFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]
|
||||||
/**
|
/**
|
||||||
* Parse KNOWLEDGE.md for rules, patterns, and lessons.
|
* Parse KNOWLEDGE.md for rules, patterns, and lessons.
|
||||||
*/
|
*/
|
||||||
function parseKnowledgeFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
function parseKnowledgeFile(sfRoot: string, nodes: GraphNode[], _edges: GraphEdge[]): void {
|
||||||
const knowledgePath = join(gsdRoot, 'KNOWLEDGE.md');
|
const knowledgePath = join(sfRoot, 'KNOWLEDGE.md');
|
||||||
if (!existsSync(knowledgePath)) return;
|
if (!existsSync(knowledgePath)) return;
|
||||||
|
|
||||||
let content: string;
|
let content: string;
|
||||||
|
|
@ -239,15 +239,15 @@ function parseKnowledgeFile(gsdRoot: string, nodes: GraphNode[], _edges: GraphEd
|
||||||
* Parse milestone ROADMAP.md files for milestones and slices.
|
* Parse milestone ROADMAP.md files for milestones and slices.
|
||||||
*/
|
*/
|
||||||
function parseMilestoneFiles(
|
function parseMilestoneFiles(
|
||||||
gsdRoot: string,
|
sfRoot: string,
|
||||||
nodes: GraphNode[],
|
nodes: GraphNode[],
|
||||||
edges: GraphEdge[],
|
edges: GraphEdge[],
|
||||||
): void {
|
): void {
|
||||||
const milestoneIds = findMilestoneIds(gsdRoot);
|
const milestoneIds = findMilestoneIds(sfRoot);
|
||||||
|
|
||||||
for (const milestoneId of milestoneIds) {
|
for (const milestoneId of milestoneIds) {
|
||||||
try {
|
try {
|
||||||
parseSingleMilestone(gsdRoot, milestoneId, nodes, edges);
|
parseSingleMilestone(sfRoot, milestoneId, nodes, edges);
|
||||||
} catch {
|
} catch {
|
||||||
// Skip this milestone on any error
|
// Skip this milestone on any error
|
||||||
}
|
}
|
||||||
|
|
@ -255,12 +255,12 @@ function parseMilestoneFiles(
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseSingleMilestone(
|
function parseSingleMilestone(
|
||||||
gsdRoot: string,
|
sfRoot: string,
|
||||||
milestoneId: string,
|
milestoneId: string,
|
||||||
nodes: GraphNode[],
|
nodes: GraphNode[],
|
||||||
edges: GraphEdge[],
|
edges: GraphEdge[],
|
||||||
): void {
|
): void {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
const mDir = resolveMilestoneDir(sfRoot, milestoneId);
|
||||||
if (!mDir) return;
|
if (!mDir) return;
|
||||||
|
|
||||||
const milestoneNodeId = `milestone:${milestoneId}`;
|
const milestoneNodeId = `milestone:${milestoneId}`;
|
||||||
|
|
@ -295,10 +295,10 @@ function parseSingleMilestone(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse slices from roadmap table or filesystem
|
// Parse slices from roadmap table or filesystem
|
||||||
const sliceIds = findSliceIds(gsdRoot, milestoneId);
|
const sliceIds = findSliceIds(sfRoot, milestoneId);
|
||||||
for (const sliceId of sliceIds) {
|
for (const sliceId of sliceIds) {
|
||||||
try {
|
try {
|
||||||
parseSingleSlice(gsdRoot, milestoneId, sliceId, milestoneNodeId, nodes, edges);
|
parseSingleSlice(sfRoot, milestoneId, sliceId, milestoneNodeId, nodes, edges);
|
||||||
} catch {
|
} catch {
|
||||||
// Skip this slice on any error
|
// Skip this slice on any error
|
||||||
}
|
}
|
||||||
|
|
@ -306,14 +306,14 @@ function parseSingleMilestone(
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseSingleSlice(
|
function parseSingleSlice(
|
||||||
gsdRoot: string,
|
sfRoot: string,
|
||||||
milestoneId: string,
|
milestoneId: string,
|
||||||
sliceId: string,
|
sliceId: string,
|
||||||
milestoneNodeId: string,
|
milestoneNodeId: string,
|
||||||
nodes: GraphNode[],
|
nodes: GraphNode[],
|
||||||
edges: GraphEdge[],
|
edges: GraphEdge[],
|
||||||
): void {
|
): void {
|
||||||
const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
|
const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
|
||||||
if (!sDir) return;
|
if (!sDir) return;
|
||||||
|
|
||||||
const sliceNodeId = `slice:${milestoneId}:${sliceId}`;
|
const sliceNodeId = `slice:${milestoneId}:${sliceId}`;
|
||||||
|
|
@ -397,12 +397,12 @@ function parseTasksFromPlan(
|
||||||
* Surprises are mapped to the 'lesson' NodeType (no distinct type exists).
|
* Surprises are mapped to the 'lesson' NodeType (no distinct type exists).
|
||||||
* Parse errors per file are caught — the file is skipped, never rethrows.
|
* Parse errors per file are caught — the file is skipped, never rethrows.
|
||||||
*/
|
*/
|
||||||
function parseLearningsFiles(gsdRoot: string, nodes: GraphNode[], edges: GraphEdge[]): void {
|
function parseLearningsFiles(sfRoot: string, nodes: GraphNode[], edges: GraphEdge[]): void {
|
||||||
const milestoneIds = findMilestoneIds(gsdRoot);
|
const milestoneIds = findMilestoneIds(sfRoot);
|
||||||
|
|
||||||
for (const milestoneId of milestoneIds) {
|
for (const milestoneId of milestoneIds) {
|
||||||
try {
|
try {
|
||||||
parseSingleLearningsFile(gsdRoot, milestoneId, nodes, edges);
|
parseSingleLearningsFile(sfRoot, milestoneId, nodes, edges);
|
||||||
} catch {
|
} catch {
|
||||||
// Skip this milestone's LEARNINGS.md on any error
|
// Skip this milestone's LEARNINGS.md on any error
|
||||||
}
|
}
|
||||||
|
|
@ -410,12 +410,12 @@ function parseLearningsFiles(gsdRoot: string, nodes: GraphNode[], edges: GraphEd
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseSingleLearningsFile(
|
function parseSingleLearningsFile(
|
||||||
gsdRoot: string,
|
sfRoot: string,
|
||||||
milestoneId: string,
|
milestoneId: string,
|
||||||
nodes: GraphNode[],
|
nodes: GraphNode[],
|
||||||
edges: GraphEdge[],
|
edges: GraphEdge[],
|
||||||
): void {
|
): void {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
const mDir = resolveMilestoneDir(sfRoot, milestoneId);
|
||||||
if (!mDir) return;
|
if (!mDir) return;
|
||||||
|
|
||||||
const learningsPath = join(mDir, `${milestoneId}-LEARNINGS.md`);
|
const learningsPath = join(mDir, `${milestoneId}-LEARNINGS.md`);
|
||||||
|
|
@ -543,7 +543,7 @@ function parseLearningsSection(
|
||||||
* and never causes buildGraph() to throw.
|
* and never causes buildGraph() to throw.
|
||||||
*/
|
*/
|
||||||
export async function buildGraph(projectDir: string): Promise<KnowledgeGraph> {
|
export async function buildGraph(projectDir: string): Promise<KnowledgeGraph> {
|
||||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
const sfRoot = resolveSFRoot(resolve(projectDir));
|
||||||
|
|
||||||
const nodes: GraphNode[] = [];
|
const nodes: GraphNode[] = [];
|
||||||
const edges: GraphEdge[] = [];
|
const edges: GraphEdge[] = [];
|
||||||
|
|
@ -558,7 +558,7 @@ export async function buildGraph(projectDir: string): Promise<KnowledgeGraph> {
|
||||||
|
|
||||||
for (const parser of parsers) {
|
for (const parser of parsers) {
|
||||||
try {
|
try {
|
||||||
parser(gsdRoot, nodes, edges);
|
parser(sfRoot, nodes, edges);
|
||||||
} catch {
|
} catch {
|
||||||
// Parsing error — skip this artifact, mark as ambiguous
|
// Parsing error — skip this artifact, mark as ambiguous
|
||||||
nodes.push({
|
nodes.push({
|
||||||
|
|
@ -595,12 +595,12 @@ export async function buildGraph(projectDir: string): Promise<KnowledgeGraph> {
|
||||||
* Writes to graph.tmp.json first, then renames to graph.json.
|
* Writes to graph.tmp.json first, then renames to graph.json.
|
||||||
* Creates the graphs/ directory if it does not exist.
|
* Creates the graphs/ directory if it does not exist.
|
||||||
*/
|
*/
|
||||||
export async function writeGraph(gsdRoot: string, graph: KnowledgeGraph): Promise<void> {
|
export async function writeGraph(sfRoot: string, graph: KnowledgeGraph): Promise<void> {
|
||||||
const dir = graphsDir(gsdRoot);
|
const dir = graphsDir(sfRoot);
|
||||||
mkdirSync(dir, { recursive: true });
|
mkdirSync(dir, { recursive: true });
|
||||||
|
|
||||||
const tmp = graphTmpPath(gsdRoot);
|
const tmp = graphTmpPath(sfRoot);
|
||||||
const final = graphJsonPath(gsdRoot);
|
const final = graphJsonPath(sfRoot);
|
||||||
|
|
||||||
writeFileSync(tmp, JSON.stringify(graph, null, 2), 'utf-8');
|
writeFileSync(tmp, JSON.stringify(graph, null, 2), 'utf-8');
|
||||||
renameSync(tmp, final);
|
renameSync(tmp, final);
|
||||||
|
|
@ -614,11 +614,11 @@ export async function writeGraph(gsdRoot: string, graph: KnowledgeGraph): Promis
|
||||||
* Copy the current graph.json to .last-build-snapshot.json.
|
* Copy the current graph.json to .last-build-snapshot.json.
|
||||||
* Adds a snapshotAt timestamp to the copy.
|
* Adds a snapshotAt timestamp to the copy.
|
||||||
*/
|
*/
|
||||||
export async function writeSnapshot(gsdRoot: string): Promise<void> {
|
export async function writeSnapshot(sfRoot: string): Promise<void> {
|
||||||
const src = graphJsonPath(gsdRoot);
|
const src = graphJsonPath(sfRoot);
|
||||||
if (!existsSync(src)) return;
|
if (!existsSync(src)) return;
|
||||||
|
|
||||||
const dir = graphsDir(gsdRoot);
|
const dir = graphsDir(sfRoot);
|
||||||
mkdirSync(dir, { recursive: true });
|
mkdirSync(dir, { recursive: true });
|
||||||
|
|
||||||
const raw = readFileSync(src, 'utf-8');
|
const raw = readFileSync(src, 'utf-8');
|
||||||
|
|
@ -630,7 +630,7 @@ export async function writeSnapshot(gsdRoot: string): Promise<void> {
|
||||||
}
|
}
|
||||||
const snapshot = { ...graph, snapshotAt: new Date().toISOString() };
|
const snapshot = { ...graph, snapshotAt: new Date().toISOString() };
|
||||||
|
|
||||||
writeFileSync(snapshotPath(gsdRoot), JSON.stringify(snapshot, null, 2), 'utf-8');
|
writeFileSync(snapshotPath(sfRoot), JSON.stringify(snapshot, null, 2), 'utf-8');
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
@ -642,8 +642,8 @@ export async function writeSnapshot(gsdRoot: string): Promise<void> {
|
||||||
* Stale means builtAt is older than 24 hours.
|
* Stale means builtAt is older than 24 hours.
|
||||||
*/
|
*/
|
||||||
export async function graphStatus(projectDir: string): Promise<GraphStatusResult> {
|
export async function graphStatus(projectDir: string): Promise<GraphStatusResult> {
|
||||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
const sfRoot = resolveSFRoot(resolve(projectDir));
|
||||||
const graphPath = graphJsonPath(gsdRoot);
|
const graphPath = graphJsonPath(sfRoot);
|
||||||
|
|
||||||
if (!existsSync(graphPath)) {
|
if (!existsSync(graphPath)) {
|
||||||
return { exists: false };
|
return { exists: false };
|
||||||
|
|
@ -745,8 +745,8 @@ export async function graphQuery(
|
||||||
term: string,
|
term: string,
|
||||||
budget = 4000,
|
budget = 4000,
|
||||||
): Promise<GraphQueryResult> {
|
): Promise<GraphQueryResult> {
|
||||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
const sfRoot = resolveSFRoot(resolve(projectDir));
|
||||||
const graphPath = graphJsonPath(gsdRoot);
|
const graphPath = graphJsonPath(sfRoot);
|
||||||
|
|
||||||
if (!existsSync(graphPath)) {
|
if (!existsSync(graphPath)) {
|
||||||
return { nodes: [], edges: [], term, budget };
|
return { nodes: [], edges: [], term, budget };
|
||||||
|
|
@ -797,14 +797,14 @@ export async function graphQuery(
|
||||||
* If no snapshot exists, returns empty diff arrays.
|
* If no snapshot exists, returns empty diff arrays.
|
||||||
*/
|
*/
|
||||||
export async function graphDiff(projectDir: string): Promise<GraphDiffResult> {
|
export async function graphDiff(projectDir: string): Promise<GraphDiffResult> {
|
||||||
const gsdRoot = resolveGsdRoot(resolve(projectDir));
|
const sfRoot = resolveSFRoot(resolve(projectDir));
|
||||||
const empty: GraphDiffResult = {
|
const empty: GraphDiffResult = {
|
||||||
nodes: { added: [], removed: [], changed: [] },
|
nodes: { added: [], removed: [], changed: [] },
|
||||||
edges: { added: [], removed: [] },
|
edges: { added: [], removed: [] },
|
||||||
};
|
};
|
||||||
|
|
||||||
const graphPath = graphJsonPath(gsdRoot);
|
const graphPath = graphJsonPath(sfRoot);
|
||||||
const snap = snapshotPath(gsdRoot);
|
const snap = snapshotPath(sfRoot);
|
||||||
|
|
||||||
if (!existsSync(graphPath)) return empty;
|
if (!existsSync(graphPath)) return empty;
|
||||||
if (!existsSync(snap)) return empty;
|
if (!existsSync(snap)) return empty;
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
// SF MCP Server — readers barrel export
|
// SF MCP Server — readers barrel export
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
export { resolveGsdRoot, resolveRootFile } from './paths.js';
|
export { resolveSFRoot, resolveRootFile } from './paths.js';
|
||||||
export { readProgress } from './state.js';
|
export { readProgress } from './state.js';
|
||||||
export type { ProgressResult } from './state.js';
|
export type { ProgressResult } from './state.js';
|
||||||
export { readRoadmap } from './roadmap.js';
|
export { readRoadmap } from './roadmap.js';
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import { resolveGsdRoot, resolveRootFile } from './paths.js';
|
import { resolveSFRoot, resolveRootFile } from './paths.js';
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Types
|
// Types
|
||||||
|
|
@ -90,7 +90,7 @@ function parseKnowledgeMarkdown(content: string): KnowledgeEntry[] {
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function readKnowledge(projectDir: string): KnowledgeResult {
|
export function readKnowledge(projectDir: string): KnowledgeResult {
|
||||||
const sf = resolveGsdRoot(projectDir);
|
const sf = resolveSFRoot(projectDir);
|
||||||
const knowledgePath = resolveRootFile(sf, 'KNOWLEDGE.md');
|
const knowledgePath = resolveRootFile(sf, 'KNOWLEDGE.md');
|
||||||
|
|
||||||
if (!existsSync(knowledgePath)) {
|
if (!existsSync(knowledgePath)) {
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import { resolveGsdRoot, resolveRootFile } from './paths.js';
|
import { resolveSFRoot, resolveRootFile } from './paths.js';
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Types
|
// Types
|
||||||
|
|
@ -72,7 +72,7 @@ function parseMetricsJson(content: string): MetricsUnit[] {
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function readHistory(projectDir: string, limit?: number): HistoryResult {
|
export function readHistory(projectDir: string, limit?: number): HistoryResult {
|
||||||
const sf = resolveGsdRoot(projectDir);
|
const sf = resolveSFRoot(projectDir);
|
||||||
|
|
||||||
// metrics.json (primary)
|
// metrics.json (primary)
|
||||||
const metricsPath = resolveRootFile(sf, 'metrics.json');
|
const metricsPath = resolveRootFile(sf, 'metrics.json');
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ import { execFileSync } from 'node:child_process';
|
||||||
* 3. Walk up from projectDir
|
* 3. Walk up from projectDir
|
||||||
* 4. Fallback: projectDir/.sf (even if missing — for init)
|
* 4. Fallback: projectDir/.sf (even if missing — for init)
|
||||||
*/
|
*/
|
||||||
export function resolveGsdRoot(projectDir: string): string {
|
export function resolveSFRoot(projectDir: string): string {
|
||||||
const resolved = resolve(projectDir);
|
const resolved = resolve(projectDir);
|
||||||
|
|
||||||
// Fast path: .sf/ in the given directory
|
// Fast path: .sf/ in the given directory
|
||||||
|
|
@ -53,21 +53,21 @@ export function resolveGsdRoot(projectDir: string): string {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Resolve path to a .sf/ root file (STATE.md, KNOWLEDGE.md, etc.) */
|
/** Resolve path to a .sf/ root file (STATE.md, KNOWLEDGE.md, etc.) */
|
||||||
export function resolveRootFile(gsdRoot: string, name: string): string {
|
export function resolveRootFile(sfRoot: string, name: string): string {
|
||||||
return join(gsdRoot, name);
|
return join(sfRoot, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Resolve path to milestones directory */
|
/** Resolve path to milestones directory */
|
||||||
export function milestonesDir(gsdRoot: string): string {
|
export function milestonesDir(sfRoot: string): string {
|
||||||
return join(gsdRoot, 'milestones');
|
return join(sfRoot, 'milestones');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find all milestone directory IDs (M001, M002, etc.).
|
* Find all milestone directory IDs (M001, M002, etc.).
|
||||||
* Handles both bare (M001/) and descriptor (M001-FLIGHT-SIM/) naming.
|
* Handles both bare (M001/) and descriptor (M001-FLIGHT-SIM/) naming.
|
||||||
*/
|
*/
|
||||||
export function findMilestoneIds(gsdRoot: string): string[] {
|
export function findMilestoneIds(sfRoot: string): string[] {
|
||||||
const dir = milestonesDir(gsdRoot);
|
const dir = milestonesDir(sfRoot);
|
||||||
if (!existsSync(dir)) return [];
|
if (!existsSync(dir)) return [];
|
||||||
|
|
||||||
const entries = readdirSync(dir, { withFileTypes: true });
|
const entries = readdirSync(dir, { withFileTypes: true });
|
||||||
|
|
@ -86,8 +86,8 @@ export function findMilestoneIds(gsdRoot: string): string[] {
|
||||||
* Resolve the actual directory name for a milestone ID.
|
* Resolve the actual directory name for a milestone ID.
|
||||||
* M001 might live in M001/ or M001-SOME-DESCRIPTOR/.
|
* M001 might live in M001/ or M001-SOME-DESCRIPTOR/.
|
||||||
*/
|
*/
|
||||||
export function resolveMilestoneDir(gsdRoot: string, milestoneId: string): string | null {
|
export function resolveMilestoneDir(sfRoot: string, milestoneId: string): string | null {
|
||||||
const dir = milestonesDir(gsdRoot);
|
const dir = milestonesDir(sfRoot);
|
||||||
if (!existsSync(dir)) return null;
|
if (!existsSync(dir)) return null;
|
||||||
|
|
||||||
// Fast path: exact match
|
// Fast path: exact match
|
||||||
|
|
@ -109,8 +109,8 @@ export function resolveMilestoneDir(gsdRoot: string, milestoneId: string): strin
|
||||||
* Resolve a milestone-level file (M001-ROADMAP.md, M001-CONTEXT.md, etc.).
|
* Resolve a milestone-level file (M001-ROADMAP.md, M001-CONTEXT.md, etc.).
|
||||||
* Handles various naming conventions.
|
* Handles various naming conventions.
|
||||||
*/
|
*/
|
||||||
export function resolveMilestoneFile(gsdRoot: string, milestoneId: string, suffix: string): string | null {
|
export function resolveMilestoneFile(sfRoot: string, milestoneId: string, suffix: string): string | null {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
const mDir = resolveMilestoneDir(sfRoot, milestoneId);
|
||||||
if (!mDir) return null;
|
if (!mDir) return null;
|
||||||
|
|
||||||
const dirName = basename(mDir);
|
const dirName = basename(mDir);
|
||||||
|
|
@ -129,8 +129,8 @@ export function resolveMilestoneFile(gsdRoot: string, milestoneId: string, suffi
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Find all slice IDs within a milestone (S01, S02, etc.) */
|
/** Find all slice IDs within a milestone (S01, S02, etc.) */
|
||||||
export function findSliceIds(gsdRoot: string, milestoneId: string): string[] {
|
export function findSliceIds(sfRoot: string, milestoneId: string): string[] {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
const mDir = resolveMilestoneDir(sfRoot, milestoneId);
|
||||||
if (!mDir) return [];
|
if (!mDir) return [];
|
||||||
|
|
||||||
const slicesDir = join(mDir, 'slices');
|
const slicesDir = join(mDir, 'slices');
|
||||||
|
|
@ -149,8 +149,8 @@ export function findSliceIds(gsdRoot: string, milestoneId: string): string[] {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Resolve the actual directory for a slice */
|
/** Resolve the actual directory for a slice */
|
||||||
export function resolveSliceDir(gsdRoot: string, milestoneId: string, sliceId: string): string | null {
|
export function resolveSliceDir(sfRoot: string, milestoneId: string, sliceId: string): string | null {
|
||||||
const mDir = resolveMilestoneDir(gsdRoot, milestoneId);
|
const mDir = resolveMilestoneDir(sfRoot, milestoneId);
|
||||||
if (!mDir) return null;
|
if (!mDir) return null;
|
||||||
|
|
||||||
const slicesDir = join(mDir, 'slices');
|
const slicesDir = join(mDir, 'slices');
|
||||||
|
|
@ -170,9 +170,9 @@ export function resolveSliceDir(gsdRoot: string, milestoneId: string, sliceId: s
|
||||||
|
|
||||||
/** Resolve a slice-level file (S01-PLAN.md, etc.) */
|
/** Resolve a slice-level file (S01-PLAN.md, etc.) */
|
||||||
export function resolveSliceFile(
|
export function resolveSliceFile(
|
||||||
gsdRoot: string, milestoneId: string, sliceId: string, suffix: string,
|
sfRoot: string, milestoneId: string, sliceId: string, suffix: string,
|
||||||
): string | null {
|
): string | null {
|
||||||
const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
|
const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
|
||||||
if (!sDir) return null;
|
if (!sDir) return null;
|
||||||
|
|
||||||
const dirName = basename(sDir);
|
const dirName = basename(sDir);
|
||||||
|
|
@ -190,9 +190,9 @@ export function resolveSliceFile(
|
||||||
|
|
||||||
/** Find all task files in a slice's tasks/ directory */
|
/** Find all task files in a slice's tasks/ directory */
|
||||||
export function findTaskFiles(
|
export function findTaskFiles(
|
||||||
gsdRoot: string, milestoneId: string, sliceId: string,
|
sfRoot: string, milestoneId: string, sliceId: string,
|
||||||
): Array<{ id: string; hasPlan: boolean; hasSummary: boolean }> {
|
): Array<{ id: string; hasPlan: boolean; hasSummary: boolean }> {
|
||||||
const sDir = resolveSliceDir(gsdRoot, milestoneId, sliceId);
|
const sDir = resolveSliceDir(sfRoot, milestoneId, sliceId);
|
||||||
if (!sDir) return [];
|
if (!sDir) return [];
|
||||||
|
|
||||||
const tasksDir = join(sDir, 'tasks');
|
const tasksDir = join(sDir, 'tasks');
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import {
|
import {
|
||||||
resolveGsdRoot,
|
resolveSFRoot,
|
||||||
findMilestoneIds,
|
findMilestoneIds,
|
||||||
resolveMilestoneFile,
|
resolveMilestoneFile,
|
||||||
findSliceIds,
|
findSliceIds,
|
||||||
|
|
@ -150,15 +150,15 @@ function parseSlicePlanTasks(content: string): Array<{ id: string; title: string
|
||||||
// Milestone title from CONTEXT.md or ROADMAP.md H1
|
// Milestone title from CONTEXT.md or ROADMAP.md H1
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
function readMilestoneTitle(gsdRoot: string, mid: string): string {
|
function readMilestoneTitle(sfRoot: string, mid: string): string {
|
||||||
const ctxPath = resolveMilestoneFile(gsdRoot, mid, 'CONTEXT');
|
const ctxPath = resolveMilestoneFile(sfRoot, mid, 'CONTEXT');
|
||||||
if (ctxPath && existsSync(ctxPath)) {
|
if (ctxPath && existsSync(ctxPath)) {
|
||||||
const content = readFileSync(ctxPath, 'utf-8');
|
const content = readFileSync(ctxPath, 'utf-8');
|
||||||
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
|
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
|
||||||
if (h1) return h1[1].trim();
|
if (h1) return h1[1].trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
|
const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
|
||||||
if (roadmapPath && existsSync(roadmapPath)) {
|
if (roadmapPath && existsSync(roadmapPath)) {
|
||||||
const content = readFileSync(roadmapPath, 'utf-8');
|
const content = readFileSync(roadmapPath, 'utf-8');
|
||||||
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
|
const h1 = content.match(/^#\s+(?:M\d+:?\s*)?(.+)/m);
|
||||||
|
|
@ -168,8 +168,8 @@ function readMilestoneTitle(gsdRoot: string, mid: string): string {
|
||||||
return mid;
|
return mid;
|
||||||
}
|
}
|
||||||
|
|
||||||
function readVision(gsdRoot: string, mid: string): string {
|
function readVision(sfRoot: string, mid: string): string {
|
||||||
const roadmapPath = resolveMilestoneFile(gsdRoot, mid, 'ROADMAP');
|
const roadmapPath = resolveMilestoneFile(sfRoot, mid, 'ROADMAP');
|
||||||
if (!roadmapPath || !existsSync(roadmapPath)) return '';
|
if (!roadmapPath || !existsSync(roadmapPath)) return '';
|
||||||
|
|
||||||
const content = readFileSync(roadmapPath, 'utf-8');
|
const content = readFileSync(roadmapPath, 'utf-8');
|
||||||
|
|
@ -182,7 +182,7 @@ function readVision(gsdRoot: string, mid: string): string {
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function readRoadmap(projectDir: string, filterMilestoneId?: string): RoadmapResult {
|
export function readRoadmap(projectDir: string, filterMilestoneId?: string): RoadmapResult {
|
||||||
const sf = resolveGsdRoot(projectDir);
|
const sf = resolveSFRoot(projectDir);
|
||||||
let milestoneIds = findMilestoneIds(sf);
|
let milestoneIds = findMilestoneIds(sf);
|
||||||
|
|
||||||
if (filterMilestoneId) {
|
if (filterMilestoneId) {
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
import {
|
import {
|
||||||
resolveGsdRoot,
|
resolveSFRoot,
|
||||||
resolveRootFile,
|
resolveRootFile,
|
||||||
findMilestoneIds,
|
findMilestoneIds,
|
||||||
resolveMilestoneDir,
|
resolveMilestoneDir,
|
||||||
|
|
@ -114,7 +114,7 @@ function parseMilestoneRegistry(content: string): RegistryEntry[] {
|
||||||
// Count slices/tasks by walking filesystem
|
// Count slices/tasks by walking filesystem
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
|
function countSlicesAndTasks(sfRoot: string, milestoneIds: string[]): {
|
||||||
slices: ProgressResult['slices'];
|
slices: ProgressResult['slices'];
|
||||||
tasks: ProgressResult['tasks'];
|
tasks: ProgressResult['tasks'];
|
||||||
} {
|
} {
|
||||||
|
|
@ -122,11 +122,11 @@ function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
|
||||||
let taskTotal = 0, taskDone = 0;
|
let taskTotal = 0, taskDone = 0;
|
||||||
|
|
||||||
for (const mid of milestoneIds) {
|
for (const mid of milestoneIds) {
|
||||||
const sliceIds = findSliceIds(gsdRoot, mid);
|
const sliceIds = findSliceIds(sfRoot, mid);
|
||||||
sliceTotal += sliceIds.length;
|
sliceTotal += sliceIds.length;
|
||||||
|
|
||||||
for (const sid of sliceIds) {
|
for (const sid of sliceIds) {
|
||||||
const tasks = findTaskFiles(gsdRoot, mid, sid);
|
const tasks = findTaskFiles(sfRoot, mid, sid);
|
||||||
taskTotal += tasks.length;
|
taskTotal += tasks.length;
|
||||||
|
|
||||||
const allDone = tasks.length > 0 && tasks.every((t) => t.hasSummary);
|
const allDone = tasks.length > 0 && tasks.every((t) => t.hasSummary);
|
||||||
|
|
@ -158,7 +158,7 @@ function countSlicesAndTasks(gsdRoot: string, milestoneIds: string[]): {
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export function readProgress(projectDir: string): ProgressResult {
|
export function readProgress(projectDir: string): ProgressResult {
|
||||||
const sf = resolveGsdRoot(projectDir);
|
const sf = resolveSFRoot(projectDir);
|
||||||
const statePath = resolveRootFile(sf, 'STATE.md');
|
const statePath = resolveRootFile(sf, 'STATE.md');
|
||||||
|
|
||||||
// Defaults
|
// Defaults
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ import { readHistory } from './readers/metrics.js';
|
||||||
import { readCaptures } from './readers/captures.js';
|
import { readCaptures } from './readers/captures.js';
|
||||||
import { readKnowledge } from './readers/knowledge.js';
|
import { readKnowledge } from './readers/knowledge.js';
|
||||||
import { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './readers/graph.js';
|
import { buildGraph, writeGraph, writeSnapshot, graphStatus, graphQuery, graphDiff } from './readers/graph.js';
|
||||||
import { resolveGsdRoot } from './readers/paths.js';
|
import { resolveSFRoot } from './readers/paths.js';
|
||||||
import { runDoctorLite } from './readers/doctor-lite.js';
|
import { runDoctorLite } from './readers/doctor-lite.js';
|
||||||
import { registerWorkflowTools } from './workflow-tools.js';
|
import { registerWorkflowTools } from './workflow-tools.js';
|
||||||
import { applySecrets, checkExistingEnvKeys, detectDestination } from './env-writer.js';
|
import { applySecrets, checkExistingEnvKeys, detectDestination } from './env-writer.js';
|
||||||
|
|
@ -82,7 +82,7 @@ function normalizeQuery(query: string | undefined): QueryCategory {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function readProjectState(projectDir: string, query: string | undefined): Promise<Record<string, unknown>> {
|
async function readProjectState(projectDir: string, query: string | undefined): Promise<Record<string, unknown>> {
|
||||||
const gsdDir = join(resolve(projectDir), '.sf');
|
const sfDir = join(resolve(projectDir), '.sf');
|
||||||
const category = normalizeQuery(query);
|
const category = normalizeQuery(query);
|
||||||
const wanted = new Set<ProjectStateField>(QUERY_FIELDS[category]);
|
const wanted = new Set<ProjectStateField>(QUERY_FIELDS[category]);
|
||||||
|
|
||||||
|
|
@ -93,7 +93,7 @@ async function readProjectState(projectDir: string, query: string | undefined):
|
||||||
|
|
||||||
if (wanted.has('state')) {
|
if (wanted.has('state')) {
|
||||||
try {
|
try {
|
||||||
result.state = await readFile(join(gsdDir, 'STATE.md'), 'utf-8');
|
result.state = await readFile(join(sfDir, 'STATE.md'), 'utf-8');
|
||||||
} catch {
|
} catch {
|
||||||
result.state = null;
|
result.state = null;
|
||||||
}
|
}
|
||||||
|
|
@ -101,7 +101,7 @@ async function readProjectState(projectDir: string, query: string | undefined):
|
||||||
|
|
||||||
if (wanted.has('project')) {
|
if (wanted.has('project')) {
|
||||||
try {
|
try {
|
||||||
result.project = await readFile(join(gsdDir, 'PROJECT.md'), 'utf-8');
|
result.project = await readFile(join(sfDir, 'PROJECT.md'), 'utf-8');
|
||||||
} catch {
|
} catch {
|
||||||
result.project = null;
|
result.project = null;
|
||||||
}
|
}
|
||||||
|
|
@ -109,14 +109,14 @@ async function readProjectState(projectDir: string, query: string | undefined):
|
||||||
|
|
||||||
if (wanted.has('requirements')) {
|
if (wanted.has('requirements')) {
|
||||||
try {
|
try {
|
||||||
result.requirements = await readFile(join(gsdDir, 'REQUIREMENTS.md'), 'utf-8');
|
result.requirements = await readFile(join(sfDir, 'REQUIREMENTS.md'), 'utf-8');
|
||||||
} catch {
|
} catch {
|
||||||
result.requirements = null;
|
result.requirements = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (wanted.has('milestones')) {
|
if (wanted.has('milestones')) {
|
||||||
const milestonesDir = join(gsdDir, 'milestones');
|
const milestonesDir = join(sfDir, 'milestones');
|
||||||
try {
|
try {
|
||||||
const entries = await readdir(milestonesDir, { withFileTypes: true });
|
const entries = await readdir(milestonesDir, { withFileTypes: true });
|
||||||
const milestones: Array<{ id: string; hasRoadmap: boolean; hasSummary: boolean }> = [];
|
const milestones: Array<{ id: string; hasRoadmap: boolean; hasSummary: boolean }> = [];
|
||||||
|
|
@ -845,15 +845,15 @@ export async function createMcpServer(sessionManager: SessionManager): Promise<{
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const gsdRoot = resolveGsdRoot(projectDir);
|
const sfRoot = resolveSFRoot(projectDir);
|
||||||
|
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case 'build': {
|
case 'build': {
|
||||||
if (snapshot) {
|
if (snapshot) {
|
||||||
await writeSnapshot(gsdRoot).catch(() => { /* best-effort */ });
|
await writeSnapshot(sfRoot).catch(() => { /* best-effort */ });
|
||||||
}
|
}
|
||||||
const graph = await buildGraph(projectDir);
|
const graph = await buildGraph(projectDir);
|
||||||
await writeGraph(gsdRoot, graph);
|
await writeGraph(sfRoot, graph);
|
||||||
return jsonContent({
|
return jsonContent({
|
||||||
built: true,
|
built: true,
|
||||||
nodeCount: graph.nodes.length,
|
nodeCount: graph.nodes.length,
|
||||||
|
|
|
||||||
|
|
@ -249,10 +249,10 @@ export class SessionManager {
|
||||||
|
|
||||||
// Fallback: locate `sf` via which
|
// Fallback: locate `sf` via which
|
||||||
try {
|
try {
|
||||||
const gsdBin = execSync('which sf', { encoding: 'utf-8' }).trim();
|
const sfBin = execSync('which sf', { encoding: 'utf-8' }).trim();
|
||||||
if (gsdBin) {
|
if (sfBin) {
|
||||||
// sf bin is typically a symlink to dist/loader.js — return the resolved path
|
// sf bin is typically a symlink to dist/loader.js — return the resolved path
|
||||||
return resolve(gsdBin);
|
return resolve(sfBin);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// which failed
|
// which failed
|
||||||
|
|
|
||||||
|
|
@ -34,7 +34,7 @@ if (!native) {
|
||||||
|
|
||||||
describe("native fd: fuzzyFind()", () => {
|
describe("native fd: fuzzyFind()", () => {
|
||||||
test("finds files matching a query", (t) => {
|
test("finds files matching a query", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "main.rs"), "fn main() {}");
|
fs.writeFileSync(path.join(tmpDir, "main.rs"), "fn main() {}");
|
||||||
|
|
@ -52,7 +52,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("returns empty results for non-matching query", (t) => {
|
test("returns empty results for non-matching query", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "hello.txt"), "hello");
|
fs.writeFileSync(path.join(tmpDir, "hello.txt"), "hello");
|
||||||
|
|
@ -67,7 +67,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("respects maxResults limit", (t) => {
|
test("respects maxResults limit", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
for (let i = 0; i < 10; i++) {
|
for (let i = 0; i < 10; i++) {
|
||||||
|
|
@ -85,7 +85,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("directories have trailing slash and bonus score", (t) => {
|
test("directories have trailing slash and bonus score", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "models"));
|
fs.mkdirSync(path.join(tmpDir, "models"));
|
||||||
|
|
@ -103,7 +103,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("empty query returns all entries", (t) => {
|
test("empty query returns all entries", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "a.txt"), "a");
|
fs.writeFileSync(path.join(tmpDir, "a.txt"), "a");
|
||||||
|
|
@ -123,7 +123,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("fuzzy subsequence matching works", (t) => {
|
test("fuzzy subsequence matching works", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "MyComponentFile.tsx"), "export {}");
|
fs.writeFileSync(path.join(tmpDir, "MyComponentFile.tsx"), "export {}");
|
||||||
|
|
@ -143,7 +143,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
const previousTtl = process.env.FS_SCAN_CACHE_TTL_MS;
|
const previousTtl = process.env.FS_SCAN_CACHE_TTL_MS;
|
||||||
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
|
process.env.FS_SCAN_CACHE_TTL_MS = "10000";
|
||||||
|
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => {
|
t.after(() => {
|
||||||
native.invalidateFsScanCache(tmpDir);
|
native.invalidateFsScanCache(tmpDir);
|
||||||
fs.rmSync(tmpDir, { recursive: true, force: true });
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
|
@ -175,7 +175,7 @@ describe("native fd: fuzzyFind()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("results are sorted by score descending", (t) => {
|
test("results are sorted by score descending", (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-fd-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-fd-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "main.ts"), "");
|
fs.writeFileSync(path.join(tmpDir, "main.ts"), "");
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ if (!native) {
|
||||||
|
|
||||||
describe("native glob: glob()", () => {
|
describe("native glob: glob()", () => {
|
||||||
test("finds files matching a pattern", async (t) => {
|
test("finds files matching a pattern", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "file1.ts"), "const a = 1;");
|
fs.writeFileSync(path.join(tmpDir, "file1.ts"), "const a = 1;");
|
||||||
|
|
@ -60,7 +60,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("recursive matching into subdirectories", async (t) => {
|
test("recursive matching into subdirectories", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "src"));
|
fs.mkdirSync(path.join(tmpDir, "src"));
|
||||||
|
|
@ -79,7 +79,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("respects maxResults limit", async (t) => {
|
test("respects maxResults limit", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
for (let i = 0; i < 10; i++) {
|
for (let i = 0; i < 10; i++) {
|
||||||
|
|
@ -97,7 +97,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("filters by file type (directories only)", async (t) => {
|
test("filters by file type (directories only)", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "dir1"));
|
fs.mkdirSync(path.join(tmpDir, "dir1"));
|
||||||
|
|
@ -117,7 +117,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("respects .gitignore", async (t) => {
|
test("respects .gitignore", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
// Init a git repo so .gitignore is respected
|
// Init a git repo so .gitignore is respected
|
||||||
|
|
@ -137,7 +137,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("includes gitignored files when gitignore=false", async (t) => {
|
test("includes gitignored files when gitignore=false", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.mkdirSync(path.join(tmpDir, ".git"));
|
fs.mkdirSync(path.join(tmpDir, ".git"));
|
||||||
|
|
@ -155,7 +155,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("skips node_modules by default", async (t) => {
|
test("skips node_modules by default", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.mkdirSync(path.join(tmpDir, "node_modules"));
|
fs.mkdirSync(path.join(tmpDir, "node_modules"));
|
||||||
|
|
@ -173,7 +173,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("sortByMtime returns most recent first", async (t) => {
|
test("sortByMtime returns most recent first", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "old.txt"), "old");
|
fs.writeFileSync(path.join(tmpDir, "old.txt"), "old");
|
||||||
|
|
@ -209,7 +209,7 @@ describe("native glob: glob()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("returns mtime for each entry", async (t) => {
|
test("returns mtime for each entry", async (t) => {
|
||||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-glob-test-"));
|
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-glob-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "test.txt"), "content");
|
fs.writeFileSync(path.join(tmpDir, "test.txt"), "content");
|
||||||
|
|
|
||||||
|
|
@ -94,7 +94,7 @@ describe("native grep: grep()", () => {
|
||||||
let tmpDir;
|
let tmpDir;
|
||||||
|
|
||||||
test("returns a promise", async (t) => {
|
test("returns a promise", async (t) => {
|
||||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
|
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\n");
|
||||||
|
|
@ -111,7 +111,7 @@ describe("native grep: grep()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("searches files on disk", async (t) => {
|
test("searches files on disk", async (t) => {
|
||||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\nfoo bar\n");
|
fs.writeFileSync(path.join(tmpDir, "file1.txt"), "hello world\nfoo bar\n");
|
||||||
|
|
@ -133,7 +133,7 @@ describe("native grep: grep()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("respects glob filter", async (t) => {
|
test("respects glob filter", async (t) => {
|
||||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
fs.writeFileSync(path.join(tmpDir, "code.ts"), "hello typescript\n");
|
fs.writeFileSync(path.join(tmpDir, "code.ts"), "hello typescript\n");
|
||||||
|
|
@ -151,7 +151,7 @@ describe("native grep: grep()", () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test("respects maxCount", async (t) => {
|
test("respects maxCount", async (t) => {
|
||||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "gsd-grep-test-"));
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "sf-grep-test-"));
|
||||||
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
t.after(() => fs.rmSync(tmpDir, { recursive: true, force: true }));
|
||||||
|
|
||||||
for (let i = 0; i < 10; i++) {
|
for (let i = 0; i < 10; i++) {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
/**
|
/**
|
||||||
* SF file parser — native Rust implementation.
|
* SF file parser — native Rust implementation.
|
||||||
*
|
*
|
||||||
* Parses `.gsd/` directory markdown files containing YAML-like frontmatter
|
* Parses `.sf/` directory markdown files containing YAML-like frontmatter
|
||||||
* and structured sections. Replaces the JS regex-based parser for
|
* and structured sections. Replaces the JS regex-based parser for
|
||||||
* performance-critical batch operations.
|
* performance-critical batch operations.
|
||||||
*/
|
*/
|
||||||
|
|
@ -72,7 +72,7 @@ export function extractAllSections(
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Batch-parse all `.md` files in a `.gsd/` directory tree.
|
* Batch-parse all `.md` files in a `.sf/` directory tree.
|
||||||
*
|
*
|
||||||
* Reads and parses all markdown files under the given directory.
|
* Reads and parses all markdown files under the given directory.
|
||||||
* Each file gets frontmatter parsing and section extraction.
|
* Each file gets frontmatter parsing and section extraction.
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
/**
|
/**
|
||||||
* SF file parser type definitions.
|
* SF file parser type definitions.
|
||||||
*
|
*
|
||||||
* Types for the native Rust parser that handles `.gsd/` directory files
|
* Types for the native Rust parser that handles `.sf/` directory files
|
||||||
* containing YAML-like frontmatter and markdown sections.
|
* containing YAML-like frontmatter and markdown sections.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,8 @@
|
||||||
"description": "Coding agent CLI (vendored from pi-mono)",
|
"description": "Coding agent CLI (vendored from pi-mono)",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"piConfig": {
|
"piConfig": {
|
||||||
"name": "pi",
|
"name": "sf",
|
||||||
"configDir": ".pi"
|
"configDir": ".sf"
|
||||||
},
|
},
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
|
|
@ -35,12 +35,14 @@
|
||||||
"strip-ansi": "^7.1.0",
|
"strip-ansi": "^7.1.0",
|
||||||
"undici": "^7.24.2",
|
"undici": "^7.24.2",
|
||||||
"sql.js": "^1.14.1",
|
"sql.js": "^1.14.1",
|
||||||
"yaml": "^2.8.2"
|
"yaml": "^2.8.2",
|
||||||
|
"express": "^4.19.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/sql.js": "^1.4.9",
|
"@types/sql.js": "^1.4.9",
|
||||||
"@types/diff": "^7.0.2",
|
"@types/diff": "^7.0.2",
|
||||||
"@types/hosted-git-info": "^3.0.5",
|
"@types/hosted-git-info": "^3.0.5",
|
||||||
"@types/proper-lockfile": "^4.1.4"
|
"@types/proper-lockfile": "^4.1.4",
|
||||||
|
"@types/express": "^4.17.21"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
21
packages/pi-coding-agent/src/cli/args.test.ts
Normal file
21
packages/pi-coding-agent/src/cli/args.test.ts
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
import assert from "node:assert/strict";
|
||||||
|
import { describe, it } from "node:test";
|
||||||
|
import { parseArgs } from "./args.ts";
|
||||||
|
|
||||||
|
describe("parseArgs", () => {
|
||||||
|
it("parses optional-value extension flags with implicit and explicit values", () => {
|
||||||
|
const extensionFlags = new Map([
|
||||||
|
["gemini-cli-proxy", { type: "string" as const, allowNoValue: true }],
|
||||||
|
]);
|
||||||
|
const defaultFlagArgs = parseArgs(["--gemini-cli-proxy"], extensionFlags);
|
||||||
|
const explicitFlagArgs = parseArgs(["--gemini-cli-proxy=8080"], extensionFlags);
|
||||||
|
|
||||||
|
assert.deepEqual(
|
||||||
|
[
|
||||||
|
defaultFlagArgs.unknownFlags.get("gemini-cli-proxy"),
|
||||||
|
explicitFlagArgs.unknownFlags.get("gemini-cli-proxy"),
|
||||||
|
],
|
||||||
|
[true, "8080"],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -53,13 +53,18 @@ export interface Args {
|
||||||
bare?: boolean;
|
bare?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ExtensionFlagParseOptions {
|
||||||
|
type: "boolean" | "string";
|
||||||
|
allowNoValue?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
const VALID_THINKING_LEVELS = ["off", "minimal", "low", "medium", "high", "xhigh"] as const;
|
const VALID_THINKING_LEVELS = ["off", "minimal", "low", "medium", "high", "xhigh"] as const;
|
||||||
|
|
||||||
export function isValidThinkingLevel(level: string): level is ThinkingLevel {
|
export function isValidThinkingLevel(level: string): level is ThinkingLevel {
|
||||||
return VALID_THINKING_LEVELS.includes(level as ThinkingLevel);
|
return VALID_THINKING_LEVELS.includes(level as ThinkingLevel);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseArgs(args: string[], extensionFlags?: Map<string, { type: "boolean" | "string" }>): Args {
|
export function parseArgs(args: string[], extensionFlags?: Map<string, ExtensionFlagParseOptions>): Args {
|
||||||
const result: Args = {
|
const result: Args = {
|
||||||
messages: [],
|
messages: [],
|
||||||
fileArgs: [],
|
fileArgs: [],
|
||||||
|
|
@ -179,13 +184,18 @@ export function parseArgs(args: string[], extensionFlags?: Map<string, { type: "
|
||||||
result.fileArgs.push(arg.slice(1)); // Remove @ prefix
|
result.fileArgs.push(arg.slice(1)); // Remove @ prefix
|
||||||
} else if (arg.startsWith("--") && extensionFlags) {
|
} else if (arg.startsWith("--") && extensionFlags) {
|
||||||
// Check if it's an extension-registered flag
|
// Check if it's an extension-registered flag
|
||||||
const flagName = arg.slice(2);
|
const equalsIndex = arg.indexOf("=");
|
||||||
|
const flagName = arg.slice(2, equalsIndex === -1 ? undefined : equalsIndex);
|
||||||
const extFlag = extensionFlags.get(flagName);
|
const extFlag = extensionFlags.get(flagName);
|
||||||
if (extFlag) {
|
if (extFlag) {
|
||||||
if (extFlag.type === "boolean") {
|
if (extFlag.type === "boolean") {
|
||||||
result.unknownFlags.set(flagName, true);
|
result.unknownFlags.set(flagName, true);
|
||||||
} else if (extFlag.type === "string" && i + 1 < args.length) {
|
} else if (equalsIndex !== -1) {
|
||||||
|
result.unknownFlags.set(flagName, arg.slice(equalsIndex + 1));
|
||||||
|
} else if (i + 1 < args.length && !args[i + 1].startsWith("-") && !args[i + 1].startsWith("@")) {
|
||||||
result.unknownFlags.set(flagName, args[++i]);
|
result.unknownFlags.set(flagName, args[++i]);
|
||||||
|
} else if (extFlag.allowNoValue) {
|
||||||
|
result.unknownFlags.set(flagName, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Unknown flags without extensionFlags are silently ignored (first pass)
|
// Unknown flags without extensionFlags are silently ignored (first pass)
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ const mainSource = readFileSync(
|
||||||
join(repoRoot, "packages/pi-coding-agent/src/main.ts"),
|
join(repoRoot, "packages/pi-coding-agent/src/main.ts"),
|
||||||
"utf-8",
|
"utf-8",
|
||||||
);
|
);
|
||||||
const gsdCliSource = readFileSync(
|
const sfCliSource = readFileSync(
|
||||||
join(repoRoot, "src/cli.ts"),
|
join(repoRoot, "src/cli.ts"),
|
||||||
"utf-8",
|
"utf-8",
|
||||||
);
|
);
|
||||||
|
|
@ -56,14 +56,14 @@ test("AgentSession stores persistModelChanges and defaults it to false (#4251)",
|
||||||
});
|
});
|
||||||
|
|
||||||
test("sf src/cli.ts interactive branch opts into persistence (#4251)", () => {
|
test("sf src/cli.ts interactive branch opts into persistence (#4251)", () => {
|
||||||
const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
|
const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
|
||||||
// Interactive createAgentSession call lives after the print-mode branch.
|
// Interactive createAgentSession call lives after the print-mode branch.
|
||||||
const interactiveCreateIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx + 10);
|
const interactiveCreateIdx = sfCliSource.indexOf("createAgentSession({", printGuardIdx + 10);
|
||||||
// Skip the print-mode createAgentSession (already found by earlier tests);
|
// Skip the print-mode createAgentSession (already found by earlier tests);
|
||||||
// walk forward to the next one.
|
// walk forward to the next one.
|
||||||
const nextCreateIdx = gsdCliSource.indexOf("createAgentSession({", interactiveCreateIdx + 10);
|
const nextCreateIdx = sfCliSource.indexOf("createAgentSession({", interactiveCreateIdx + 10);
|
||||||
assert.ok(nextCreateIdx >= 0, "missing interactive createAgentSession call in src/cli.ts");
|
assert.ok(nextCreateIdx >= 0, "missing interactive createAgentSession call in src/cli.ts");
|
||||||
const interactiveBlock = gsdCliSource.slice(nextCreateIdx, nextCreateIdx + 800);
|
const interactiveBlock = sfCliSource.slice(nextCreateIdx, nextCreateIdx + 800);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
interactiveBlock.includes("persistModelChanges: true"),
|
interactiveBlock.includes("persistModelChanges: true"),
|
||||||
"interactive createAgentSession must explicitly pass persistModelChanges: true so user model picks still persist after the default was inverted to false (#4251)",
|
"interactive createAgentSession must explicitly pass persistModelChanges: true so user model picks still persist after the default was inverted to false (#4251)",
|
||||||
|
|
@ -108,11 +108,11 @@ test("CreateAgentSessionOptions forwards persistModelChanges to AgentSession (#4
|
||||||
// moved to the "main.ts sets persistModelChanges = isInteractive" test below.
|
// moved to the "main.ts sets persistModelChanges = isInteractive" test below.
|
||||||
|
|
||||||
test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: false (#4251)", () => {
|
test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: false (#4251)", () => {
|
||||||
const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
|
const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
|
||||||
assert.ok(printGuardIdx >= 0, "missing isPrintMode branch in src/cli.ts");
|
assert.ok(printGuardIdx >= 0, "missing isPrintMode branch in src/cli.ts");
|
||||||
const createIdx = gsdCliSource.indexOf("createAgentSession({", printGuardIdx);
|
const createIdx = sfCliSource.indexOf("createAgentSession({", printGuardIdx);
|
||||||
assert.ok(createIdx >= 0, "missing createAgentSession call in print-mode branch");
|
assert.ok(createIdx >= 0, "missing createAgentSession call in print-mode branch");
|
||||||
const createBlock = gsdCliSource.slice(createIdx, createIdx + 800);
|
const createBlock = sfCliSource.slice(createIdx, createIdx + 800);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
createBlock.includes("persistModelChanges: false"),
|
createBlock.includes("persistModelChanges: false"),
|
||||||
"print-mode createAgentSession must pass persistModelChanges: false so --model overrides cannot mutate settings.json",
|
"print-mode createAgentSession must pass persistModelChanges: false so --model overrides cannot mutate settings.json",
|
||||||
|
|
@ -120,10 +120,10 @@ test("sf src/cli.ts print-mode createAgentSession passes persistModelChanges: fa
|
||||||
});
|
});
|
||||||
|
|
||||||
test("sf src/cli.ts print-mode --model override calls setModel with persist: false (#4251)", () => {
|
test("sf src/cli.ts print-mode --model override calls setModel with persist: false (#4251)", () => {
|
||||||
const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
|
const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
|
||||||
const overrideIdx = gsdCliSource.indexOf("if (cliFlags.model)", printGuardIdx);
|
const overrideIdx = sfCliSource.indexOf("if (cliFlags.model)", printGuardIdx);
|
||||||
assert.ok(overrideIdx >= 0, "missing --model override block in print-mode branch");
|
assert.ok(overrideIdx >= 0, "missing --model override block in print-mode branch");
|
||||||
const overrideBlock = gsdCliSource.slice(overrideIdx, overrideIdx + 500);
|
const overrideBlock = sfCliSource.slice(overrideIdx, overrideIdx + 500);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
overrideBlock.includes("session.setModel(match, { persist: false })"),
|
overrideBlock.includes("session.setModel(match, { persist: false })"),
|
||||||
"print-mode --model override must pass { persist: false } explicitly so the intent is visible at the call site",
|
"print-mode --model override must pass { persist: false } explicitly so the intent is visible at the call site",
|
||||||
|
|
@ -131,19 +131,19 @@ test("sf src/cli.ts print-mode --model override calls setModel with persist: fal
|
||||||
});
|
});
|
||||||
|
|
||||||
test("sf src/cli.ts print-mode skips validateConfiguredModel when --model is set (#4251)", () => {
|
test("sf src/cli.ts print-mode skips validateConfiguredModel when --model is set (#4251)", () => {
|
||||||
const printGuardIdx = gsdCliSource.indexOf("if (isPrintMode)");
|
const printGuardIdx = sfCliSource.indexOf("if (isPrintMode)");
|
||||||
const validateIdx = gsdCliSource.indexOf("validateConfiguredModel(", printGuardIdx);
|
const validateIdx = sfCliSource.indexOf("validateConfiguredModel(", printGuardIdx);
|
||||||
assert.ok(validateIdx >= 0, "missing validateConfiguredModel call in print-mode branch");
|
assert.ok(validateIdx >= 0, "missing validateConfiguredModel call in print-mode branch");
|
||||||
// Walk backward to find the nearest enclosing `if (!cliFlags.model)` guard.
|
// Walk backward to find the nearest enclosing `if (!cliFlags.model)` guard.
|
||||||
const guardIdx = gsdCliSource.lastIndexOf("if (!cliFlags.model)", validateIdx);
|
const guardIdx = sfCliSource.lastIndexOf("if (!cliFlags.model)", validateIdx);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
guardIdx >= 0 && guardIdx > printGuardIdx,
|
guardIdx >= 0 && guardIdx > printGuardIdx,
|
||||||
"validateConfiguredModel must be guarded by `if (!cliFlags.model)` in print mode so a CLI-provided model never triggers fallback repair that overwrites settings.json",
|
"validateConfiguredModel must be guarded by `if (!cliFlags.model)` in print mode so a CLI-provided model never triggers fallback repair that overwrites settings.json",
|
||||||
);
|
);
|
||||||
// reapplyValidatedModelOnFallback must be inside the same guard block.
|
// reapplyValidatedModelOnFallback must be inside the same guard block.
|
||||||
const reapplyIdx = gsdCliSource.indexOf("reapplyValidatedModelOnFallback(", validateIdx);
|
const reapplyIdx = sfCliSource.indexOf("reapplyValidatedModelOnFallback(", validateIdx);
|
||||||
assert.ok(reapplyIdx >= 0, "missing reapplyValidatedModelOnFallback call");
|
assert.ok(reapplyIdx >= 0, "missing reapplyValidatedModelOnFallback call");
|
||||||
const blockEnd = gsdCliSource.indexOf("\n }\n", guardIdx);
|
const blockEnd = sfCliSource.indexOf("\n }\n", guardIdx);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
reapplyIdx < blockEnd,
|
reapplyIdx < blockEnd,
|
||||||
"reapplyValidatedModelOnFallback must be inside the same `if (!cliFlags.model)` block as validateConfiguredModel",
|
"reapplyValidatedModelOnFallback must be inside the same `if (!cliFlags.model)` block as validateConfiguredModel",
|
||||||
|
|
|
||||||
|
|
@ -74,6 +74,7 @@ export type {
|
||||||
// Runtime
|
// Runtime
|
||||||
ExtensionRuntime,
|
ExtensionRuntime,
|
||||||
ExtensionShortcut,
|
ExtensionShortcut,
|
||||||
|
ExtensionStartupContext,
|
||||||
ExtensionUIContext,
|
ExtensionUIContext,
|
||||||
ExtensionUIDialogOptions,
|
ExtensionUIDialogOptions,
|
||||||
ExtensionWidgetOptions,
|
ExtensionWidgetOptions,
|
||||||
|
|
|
||||||
|
|
@ -500,7 +500,16 @@ function createExtensionAPI(
|
||||||
|
|
||||||
registerFlag(
|
registerFlag(
|
||||||
name: string,
|
name: string,
|
||||||
options: { description?: string; type: "boolean" | "string"; default?: boolean | string },
|
options: {
|
||||||
|
description?: string;
|
||||||
|
type: "boolean" | "string";
|
||||||
|
default?: boolean | string;
|
||||||
|
allowNoValue?: boolean;
|
||||||
|
onStartup?: (
|
||||||
|
value: boolean | string,
|
||||||
|
context: import("./types.js").ExtensionStartupContext,
|
||||||
|
) => Promise<void> | void;
|
||||||
|
},
|
||||||
): void {
|
): void {
|
||||||
extension.flags.set(name, { name, extensionPath: extension.path, ...options });
|
extension.flags.set(name, { name, extensionPath: extension.path, ...options });
|
||||||
if (options.default !== undefined && !runtime.flagValues.has(name)) {
|
if (options.default !== undefined && !runtime.flagValues.has(name)) {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
// GSD2 — Regression test: pendingProviderRegistrations must be flushed exactly once (#3576)
|
// sf — Regression test: pendingProviderRegistrations must be flushed exactly once (#3576)
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
import { describe, it } from "node:test";
|
import { describe, it } from "node:test";
|
||||||
|
|
|
||||||
|
|
@ -39,6 +39,7 @@ import type {
|
||||||
} from "@sf-run/pi-tui";
|
} from "@sf-run/pi-tui";
|
||||||
import type { Static, TSchema } from "@sinclair/typebox";
|
import type { Static, TSchema } from "@sinclair/typebox";
|
||||||
import type { Theme } from "../../modes/interactive/theme/theme.js";
|
import type { Theme } from "../../modes/interactive/theme/theme.js";
|
||||||
|
import type { AuthStorage } from "../auth-storage.js";
|
||||||
import type { BashResult } from "../bash-executor.js";
|
import type { BashResult } from "../bash-executor.js";
|
||||||
import type { CompactionPreparation, CompactionResult } from "../compaction/index.js";
|
import type { CompactionPreparation, CompactionResult } from "../compaction/index.js";
|
||||||
import type { EventBus } from "../event-bus.js";
|
import type { EventBus } from "../event-bus.js";
|
||||||
|
|
@ -1164,6 +1165,8 @@ export interface ExtensionAPI {
|
||||||
description?: string;
|
description?: string;
|
||||||
type: "boolean" | "string";
|
type: "boolean" | "string";
|
||||||
default?: boolean | string;
|
default?: boolean | string;
|
||||||
|
allowNoValue?: boolean;
|
||||||
|
onStartup?: (value: boolean | string, context: ExtensionStartupContext) => Promise<void> | void;
|
||||||
},
|
},
|
||||||
): void;
|
): void;
|
||||||
|
|
||||||
|
|
@ -1407,9 +1410,18 @@ export interface ExtensionFlag {
|
||||||
description?: string;
|
description?: string;
|
||||||
type: "boolean" | "string";
|
type: "boolean" | "string";
|
||||||
default?: boolean | string;
|
default?: boolean | string;
|
||||||
|
allowNoValue?: boolean;
|
||||||
|
onStartup?: (value: boolean | string, context: ExtensionStartupContext) => Promise<void> | void;
|
||||||
extensionPath: string;
|
extensionPath: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ExtensionStartupContext {
|
||||||
|
cwd: string;
|
||||||
|
agentDir: string;
|
||||||
|
authStorage: AuthStorage;
|
||||||
|
modelRegistry: ModelRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
export interface ExtensionShortcut {
|
export interface ExtensionShortcut {
|
||||||
shortcut: KeyId;
|
shortcut: KeyId;
|
||||||
description?: string;
|
description?: string;
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
// GSD2 — Regression test for LSP legacy server key aliases
|
// sf — Regression test for LSP legacy server key aliases
|
||||||
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
// Copyright (c) 2026 Jeremy McSpadden <jeremy@fluxlabs.net>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -82,6 +82,7 @@ export type {
|
||||||
ExtensionHandler,
|
ExtensionHandler,
|
||||||
ExtensionRuntime,
|
ExtensionRuntime,
|
||||||
ExtensionShortcut,
|
ExtensionShortcut,
|
||||||
|
ExtensionStartupContext,
|
||||||
ExtensionUIContext,
|
ExtensionUIContext,
|
||||||
ExtensionUIDialogOptions,
|
ExtensionUIDialogOptions,
|
||||||
ExtensionWidgetOptions,
|
ExtensionWidgetOptions,
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@
|
||||||
import { type ImageContent, modelsAreEqual, supportsXhigh } from "@sf-run/pi-ai";
|
import { type ImageContent, modelsAreEqual, supportsXhigh } from "@sf-run/pi-ai";
|
||||||
import chalk from "chalk";
|
import chalk from "chalk";
|
||||||
import { createInterface } from "readline";
|
import { createInterface } from "readline";
|
||||||
import { type Args, parseArgs, printHelp } from "./cli/args.js";
|
import { type Args, type ExtensionFlagParseOptions, parseArgs, printHelp } from "./cli/args.js";
|
||||||
import { selectConfig } from "./cli/config-selector.js";
|
import { selectConfig } from "./cli/config-selector.js";
|
||||||
import { processFileArguments } from "./cli/file-processor.js";
|
import { processFileArguments } from "./cli/file-processor.js";
|
||||||
import { discoverAndPrintModels, listModels } from "./cli/list-models.js";
|
import { discoverAndPrintModels, listModels } from "./cli/list-models.js";
|
||||||
|
|
@ -226,6 +226,33 @@ async function createSessionManager(
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function runStartupFlagHandlers(
|
||||||
|
extensions: LoadExtensionsResult,
|
||||||
|
parsed: Args,
|
||||||
|
context: {
|
||||||
|
cwd: string;
|
||||||
|
agentDir: string;
|
||||||
|
authStorage: AuthStorage;
|
||||||
|
modelRegistry: ModelRegistry;
|
||||||
|
},
|
||||||
|
): Promise<boolean> {
|
||||||
|
let handledStartup = false;
|
||||||
|
|
||||||
|
for (const extension of extensions.extensions) {
|
||||||
|
for (const [flagName, flag] of extension.flags) {
|
||||||
|
const flagValue = parsed.unknownFlags.get(flagName);
|
||||||
|
if (flagValue === undefined || !flag.onStartup) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
await flag.onStartup(flagValue, context);
|
||||||
|
handledStartup = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return handledStartup;
|
||||||
|
}
|
||||||
|
|
||||||
function buildSessionOptions(
|
function buildSessionOptions(
|
||||||
parsed: Args,
|
parsed: Args,
|
||||||
scopedModels: ScopedModel[],
|
scopedModels: ScopedModel[],
|
||||||
|
|
@ -442,10 +469,10 @@ export async function main(args: string[]) {
|
||||||
}
|
}
|
||||||
extensionsResult.runtime.pendingProviderRegistrations = [];
|
extensionsResult.runtime.pendingProviderRegistrations = [];
|
||||||
|
|
||||||
const extensionFlags = new Map<string, { type: "boolean" | "string" }>();
|
const extensionFlags = new Map<string, ExtensionFlagParseOptions>();
|
||||||
for (const ext of extensionsResult.extensions) {
|
for (const ext of extensionsResult.extensions) {
|
||||||
for (const [name, flag] of ext.flags) {
|
for (const [name, flag] of ext.flags) {
|
||||||
extensionFlags.set(name, { type: flag.type });
|
extensionFlags.set(name, { type: flag.type, allowNoValue: flag.allowNoValue });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -490,6 +517,17 @@ export async function main(args: string[]) {
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
await runStartupFlagHandlers(extensionsResult, parsed, {
|
||||||
|
cwd,
|
||||||
|
agentDir,
|
||||||
|
authStorage,
|
||||||
|
modelRegistry,
|
||||||
|
})
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Read piped stdin content (if any) - skip for RPC mode which uses stdin for JSON-RPC
|
// Read piped stdin content (if any) - skip for RPC mode which uses stdin for JSON-RPC
|
||||||
if (parsed.mode !== "rpc") {
|
if (parsed.mode !== "rpc") {
|
||||||
const stdinContent = await readPipedStdin();
|
const stdinContent = await readPipedStdin();
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,8 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { type ChildProcess, spawn } from "node:child_process";
|
import { type ChildProcess, spawn } from "node:child_process";
|
||||||
|
import { existsSync } from "node:fs";
|
||||||
|
import { dirname, join, resolve } from "node:path";
|
||||||
import type { AgentEvent, AgentMessage, ThinkingLevel } from "@sf-run/pi-agent-core";
|
import type { AgentEvent, AgentMessage, ThinkingLevel } from "@sf-run/pi-agent-core";
|
||||||
import type { ImageContent } from "@sf-run/pi-ai";
|
import type { ImageContent } from "@sf-run/pi-ai";
|
||||||
import type { SessionStats } from "../../core/agent-session.js";
|
import type { SessionStats } from "../../core/agent-session.js";
|
||||||
|
|
@ -47,6 +49,49 @@ export interface ModelInfo {
|
||||||
|
|
||||||
export type RpcEventListener = (event: AgentEvent) => void;
|
export type RpcEventListener = (event: AgentEvent) => void;
|
||||||
|
|
||||||
|
interface RpcLaunchSpec {
|
||||||
|
command: string;
|
||||||
|
args: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTypeScriptEntrypoint(cliPath: string): boolean {
|
||||||
|
return cliPath.endsWith(".ts") || cliPath.endsWith(".tsx");
|
||||||
|
}
|
||||||
|
|
||||||
|
function findResolveTsLoader(cliPath: string): string | null {
|
||||||
|
let currentDir = resolve(dirname(cliPath));
|
||||||
|
while (true) {
|
||||||
|
const candidate = join(currentDir, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs");
|
||||||
|
if (existsSync(candidate)) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
const parentDir = dirname(currentDir);
|
||||||
|
if (parentDir === currentDir) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
currentDir = parentDir;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildRpcLaunchSpec(cliPath: string): RpcLaunchSpec {
|
||||||
|
if (!isTypeScriptEntrypoint(cliPath)) {
|
||||||
|
return {
|
||||||
|
command: "node",
|
||||||
|
args: [cliPath],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveTsLoader = findResolveTsLoader(cliPath);
|
||||||
|
if (!resolveTsLoader) {
|
||||||
|
throw new Error(`Could not find resolve-ts.mjs for TypeScript CLI path: ${cliPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command: "node",
|
||||||
|
args: ["--import", resolveTsLoader, "--experimental-strip-types", cliPath],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// RPC Client
|
// RPC Client
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
@ -84,7 +129,8 @@ export class RpcClient {
|
||||||
args.push(...this.options.args);
|
args.push(...this.options.args);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.process = spawn("node", [cliPath, ...args], {
|
const launchSpec = buildRpcLaunchSpec(cliPath);
|
||||||
|
this.process = spawn(launchSpec.command, [...launchSpec.args, ...args], {
|
||||||
cwd: this.options.cwd,
|
cwd: this.options.cwd,
|
||||||
env: { ...process.env, ...this.options.env },
|
env: { ...process.env, ...this.options.env },
|
||||||
stdio: ["pipe", "pipe", "pipe"],
|
stdio: ["pipe", "pipe", "pipe"],
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ import { describe, it, beforeEach, afterEach, mock } from "node:test";
|
||||||
import assert from "node:assert/strict";
|
import assert from "node:assert/strict";
|
||||||
import { PassThrough } from "node:stream";
|
import { PassThrough } from "node:stream";
|
||||||
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
|
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
|
||||||
|
import { buildRpcLaunchSpec } from "./rpc-client.js";
|
||||||
import type {
|
import type {
|
||||||
RpcCommand,
|
RpcCommand,
|
||||||
RpcResponse,
|
RpcResponse,
|
||||||
|
|
@ -506,6 +507,23 @@ describe("RpcClient command serialization", () => {
|
||||||
assert.equal(parsed.command, "prompt");
|
assert.equal(parsed.command, "prompt");
|
||||||
assert.equal(parsed.success, true);
|
assert.equal(parsed.success, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("typescript cli paths launch through resolve-ts", () => {
|
||||||
|
const repoRoot = new URL("../../../../../", import.meta.url).pathname;
|
||||||
|
const cliPath = `${repoRoot}src/loader.ts`;
|
||||||
|
const launchSpec = buildRpcLaunchSpec(cliPath);
|
||||||
|
assert.equal(launchSpec.command, "node");
|
||||||
|
assert.equal(launchSpec.args[0], "--import");
|
||||||
|
assert.match(launchSpec.args[1], /src\/resources\/extensions\/sf\/tests\/resolve-ts\.mjs$/);
|
||||||
|
assert.equal(launchSpec.args[2], "--experimental-strip-types");
|
||||||
|
assert.equal(launchSpec.args[3], cliPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("compiled js cli paths launch directly", () => {
|
||||||
|
const launchSpec = buildRpcLaunchSpec("/tmp/dist/cli.js");
|
||||||
|
assert.equal(launchSpec.command, "node");
|
||||||
|
assert.deepEqual(launchSpec.args, ["/tmp/dist/cli.js"]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ import type {
|
||||||
SessionStats,
|
SessionStats,
|
||||||
RpcV2Event,
|
RpcV2Event,
|
||||||
} from "./rpc-types.js";
|
} from "./rpc-types.js";
|
||||||
import { RpcClient } from "./rpc-client.js";
|
import { buildRpcLaunchSpec, RpcClient } from "./rpc-client.js";
|
||||||
import type { SdkAgentEvent } from "./rpc-client.js";
|
import type { SdkAgentEvent } from "./rpc-client.js";
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
@ -271,6 +271,25 @@ describe("RpcClient construction", () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("buildRpcLaunchSpec", () => {
|
||||||
|
it("uses direct node execution for compiled js entrypoints", () => {
|
||||||
|
const launchSpec = buildRpcLaunchSpec("/tmp/dist/cli.js");
|
||||||
|
assert.equal(launchSpec.command, "node");
|
||||||
|
assert.deepEqual(launchSpec.args, ["/tmp/dist/cli.js"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("wraps typescript entrypoints with resolve-ts loader", () => {
|
||||||
|
const repoRoot = new URL("../../..", import.meta.url).pathname;
|
||||||
|
const cliPath = `${repoRoot}src/loader.ts`;
|
||||||
|
const launchSpec = buildRpcLaunchSpec(cliPath);
|
||||||
|
assert.equal(launchSpec.command, "node");
|
||||||
|
assert.equal(launchSpec.args[0], "--import");
|
||||||
|
assert.match(launchSpec.args[1], /src\/resources\/extensions\/sf\/tests\/resolve-ts\.mjs$/);
|
||||||
|
assert.equal(launchSpec.args[2], "--experimental-strip-types");
|
||||||
|
assert.equal(launchSpec.args[3], cliPath);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// events() Generator Tests
|
// events() Generator Tests
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,8 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { type ChildProcess, spawn } from "node:child_process";
|
import { type ChildProcess, spawn } from "node:child_process";
|
||||||
|
import { existsSync } from "node:fs";
|
||||||
|
import { dirname, join, resolve } from "node:path";
|
||||||
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
|
import { attachJsonlLineReader, serializeJsonLine } from "./jsonl.js";
|
||||||
import type {
|
import type {
|
||||||
BashResult,
|
BashResult,
|
||||||
|
|
@ -55,6 +57,49 @@ export interface RpcClientOptions {
|
||||||
|
|
||||||
export type RpcEventListener = (event: SdkAgentEvent) => void;
|
export type RpcEventListener = (event: SdkAgentEvent) => void;
|
||||||
|
|
||||||
|
interface RpcLaunchSpec {
|
||||||
|
command: string;
|
||||||
|
args: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function isTypeScriptEntrypoint(cliPath: string): boolean {
|
||||||
|
return cliPath.endsWith(".ts") || cliPath.endsWith(".tsx");
|
||||||
|
}
|
||||||
|
|
||||||
|
function findResolveTsLoader(cliPath: string): string | null {
|
||||||
|
let currentDir = resolve(dirname(cliPath));
|
||||||
|
while (true) {
|
||||||
|
const candidate = join(currentDir, "src", "resources", "extensions", "sf", "tests", "resolve-ts.mjs");
|
||||||
|
if (existsSync(candidate)) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
const parentDir = dirname(currentDir);
|
||||||
|
if (parentDir === currentDir) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
currentDir = parentDir;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildRpcLaunchSpec(cliPath: string): RpcLaunchSpec {
|
||||||
|
if (!isTypeScriptEntrypoint(cliPath)) {
|
||||||
|
return {
|
||||||
|
command: "node",
|
||||||
|
args: [cliPath],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveTsLoader = findResolveTsLoader(cliPath);
|
||||||
|
if (!resolveTsLoader) {
|
||||||
|
throw new Error(`Could not find resolve-ts.mjs for TypeScript CLI path: ${cliPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command: "node",
|
||||||
|
args: ["--import", resolveTsLoader, "--experimental-strip-types", cliPath],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// RPC Client
|
// RPC Client
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
|
|
@ -95,7 +140,8 @@ export class RpcClient {
|
||||||
args.push(...this.options.args);
|
args.push(...this.options.args);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.process = spawn("node", [cliPath, ...args], {
|
const launchSpec = buildRpcLaunchSpec(cliPath);
|
||||||
|
this.process = spawn(launchSpec.command, [...launchSpec.args, ...args], {
|
||||||
cwd: this.options.cwd,
|
cwd: this.options.cwd,
|
||||||
env: { ...process.env, ...this.options.env },
|
env: { ...process.env, ...this.options.env },
|
||||||
stdio: ["pipe", "pipe", "pipe"],
|
stdio: ["pipe", "pipe", "pipe"],
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
{
|
{
|
||||||
"name": "@glittercowboy/gsd",
|
"name": "sf",
|
||||||
"version": "2.74.0",
|
"version": "2.74.0",
|
||||||
"piConfig": {
|
"piConfig": {
|
||||||
"name": "gsd",
|
"name": "sf",
|
||||||
"configDir": ".gsd"
|
"configDir": ".sf"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -238,7 +238,7 @@ function renderConsole(report) {
|
||||||
const { changedFiles, systemsPerFile, unmatchedFiles, systemRisks, risk } = report;
|
const { changedFiles, systemsPerFile, unmatchedFiles, systemRisks, risk } = report;
|
||||||
|
|
||||||
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||||
console.log(' GSD2 PR Risk Report');
|
console.log(' sf PR Risk Report');
|
||||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||||
|
|
||||||
console.log(`Overall Risk: ${TIER_EMOJI[risk]} ${risk.toUpperCase()}`);
|
console.log(`Overall Risk: ${TIER_EMOJI[risk]} ${risk.toUpperCase()}`);
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
// GSD2 — Claude CLI binary detection for onboarding
|
// sf — Claude CLI binary detection for onboarding
|
||||||
// Lightweight check used at onboarding time (before extensions load).
|
// Lightweight check used at onboarding time (before extensions load).
|
||||||
// The full readiness check with caching lives in the claude-code-cli extension.
|
// The full readiness check with caching lives in the claude-code-cli extension.
|
||||||
|
|
||||||
|
|
|
||||||
11
src/cli.ts
11
src/cli.ts
|
|
@ -31,7 +31,7 @@ import {
|
||||||
import { stopWebMode } from './web-mode.js'
|
import { stopWebMode } from './web-mode.js'
|
||||||
import { getProjectSessionsDir } from './project-sessions.js'
|
import { getProjectSessionsDir } from './project-sessions.js'
|
||||||
import { markStartup, printStartupTimings } from './startup-timings.js'
|
import { markStartup, printStartupTimings } from './startup-timings.js'
|
||||||
import { bootstrapRtk, SF_RTK_DISABLED_ENV, SF_RTK_DISABLED_ENV } from './rtk.js'
|
import { bootstrapRtk, SF_RTK_DISABLED_ENV } from './rtk.js'
|
||||||
import { loadEffectiveSFPreferences } from './resources/extensions/sf/preferences.js'
|
import { loadEffectiveSFPreferences } from './resources/extensions/sf/preferences.js'
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
@ -145,14 +145,13 @@ if (process.argv.includes('--help') || process.argv.includes('-h')) {
|
||||||
let rtkBootstrapPromise: Promise<void> | undefined
|
let rtkBootstrapPromise: Promise<void> | undefined
|
||||||
async function doRtkBootstrap(): Promise<void> {
|
async function doRtkBootstrap(): Promise<void> {
|
||||||
// RTK is opt-in via experimental.rtk preference. Default: disabled.
|
// RTK is opt-in via experimental.rtk preference. Default: disabled.
|
||||||
// Honor SF_RTK_DISABLED (or SF_RTK_DISABLED) if already explicitly set in the environment
|
// Honor SF_RTK_DISABLED if already explicitly set in the environment
|
||||||
// (env var takes precedence over preferences for manual override).
|
// (env var takes precedence over preferences for manual override).
|
||||||
if (!process.env[SF_RTK_DISABLED_ENV] && !process.env[SF_RTK_DISABLED_ENV]) {
|
if (!process.env[SF_RTK_DISABLED_ENV]) {
|
||||||
const prefs = loadEffectiveSFPreferences()
|
const prefs = loadEffectiveSFPreferences()
|
||||||
const rtkEnabled = prefs?.preferences.experimental?.rtk === true
|
const rtkEnabled = prefs?.preferences.experimental?.rtk === true
|
||||||
if (!rtkEnabled) {
|
if (!rtkEnabled) {
|
||||||
process.env[SF_RTK_DISABLED_ENV] = '1'
|
process.env[SF_RTK_DISABLED_ENV] = '1'
|
||||||
process.env[SF_RTK_DISABLED_ENV] = '1'
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -178,10 +177,10 @@ if (cliFlags.messages[0] === 'update') {
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
if (cliFlags.messages[0] === 'graph') {
|
if (cliFlags.messages[0] === 'graph') {
|
||||||
const sub = cliFlags.messages[1]
|
const sub = cliFlags.messages[1]
|
||||||
const { buildGraph, writeGraph, graphStatus, graphQuery, graphDiff, resolveSfRoot } = await import('@singularity-forge/mcp-server')
|
const { buildGraph, writeGraph, graphStatus, graphQuery, graphDiff, resolveGsdRoot } = await import('@singularity-forge/mcp-server')
|
||||||
|
|
||||||
const projectDir = process.cwd()
|
const projectDir = process.cwd()
|
||||||
const sfRoot = resolveSfRoot(projectDir)
|
const sfRoot = resolveGsdRoot(projectDir)
|
||||||
|
|
||||||
if (!sub || sub === 'build') {
|
if (!sub || sub === 'build') {
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@
|
||||||
* and bootstraps the .sf/ directory structure when needed.
|
* and bootstraps the .sf/ directory structure when needed.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { readFileSync, mkdirSync } from 'node:fs'
|
import { readFileSync, mkdirSync, existsSync, renameSync } from 'node:fs'
|
||||||
import { join, resolve } from 'node:path'
|
import { join, resolve } from 'node:path'
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
@ -51,9 +51,17 @@ export async function loadContext(options: ContextOptions): Promise<string> {
|
||||||
/**
|
/**
|
||||||
* Bootstrap .sf/ directory structure for headless new-milestone.
|
* Bootstrap .sf/ directory structure for headless new-milestone.
|
||||||
* Mirrors the bootstrap logic from guided-flow.ts showSmartEntry().
|
* Mirrors the bootstrap logic from guided-flow.ts showSmartEntry().
|
||||||
|
* Auto-migrates legacy .gsd/ directories to .sf/ on first encounter.
|
||||||
*/
|
*/
|
||||||
export function bootstrapGsdProject(basePath: string): void {
|
export function bootstrapProject(basePath: string): void {
|
||||||
const gsdDir = join(basePath, '.sf')
|
const sfDir = join(basePath, '.sf')
|
||||||
mkdirSync(join(gsdDir, 'milestones'), { recursive: true })
|
const legacyDir = join(basePath, '.gsd')
|
||||||
mkdirSync(join(gsdDir, 'runtime'), { recursive: true })
|
|
||||||
|
if (!existsSync(sfDir) && existsSync(legacyDir)) {
|
||||||
|
renameSync(legacyDir, sfDir)
|
||||||
|
process.stderr.write('[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdirSync(join(sfDir, 'milestones'), { recursive: true })
|
||||||
|
mkdirSync(join(sfDir, 'runtime'), { recursive: true })
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,23 +28,23 @@ const jiti = createJiti(fileURLToPath(import.meta.url), { interopDefault: true,
|
||||||
const agentExtensionsDir = join(process.env.SF_AGENT_DIR || join(homedir(), '.sf', 'agent'), 'extensions', 'sf')
|
const agentExtensionsDir = join(process.env.SF_AGENT_DIR || join(homedir(), '.sf', 'agent'), 'extensions', 'sf')
|
||||||
const { existsSync } = await import('node:fs')
|
const { existsSync } = await import('node:fs')
|
||||||
const useAgentDir = existsSync(join(agentExtensionsDir, 'state.ts'))
|
const useAgentDir = existsSync(join(agentExtensionsDir, 'state.ts'))
|
||||||
const gsdExtensionPath = (...segments: string[]) =>
|
const sfExtensionPath = (...segments: string[]) =>
|
||||||
useAgentDir
|
useAgentDir
|
||||||
? join(agentExtensionsDir, ...segments)
|
? join(agentExtensionsDir, ...segments)
|
||||||
: resolveBundledSourceResource(import.meta.url, 'extensions', 'sf', ...segments)
|
: resolveBundledSourceResource(import.meta.url, 'extensions', 'sf', ...segments)
|
||||||
|
|
||||||
async function loadExtensionModules() {
|
async function loadExtensionModules() {
|
||||||
const stateModule = await jiti.import(gsdExtensionPath('state.ts'), {}) as any
|
const stateModule = await jiti.import(sfExtensionPath('state.ts'), {}) as any
|
||||||
const dispatchModule = await jiti.import(gsdExtensionPath('auto-dispatch.ts'), {}) as any
|
const dispatchModule = await jiti.import(sfExtensionPath('auto-dispatch.ts'), {}) as any
|
||||||
const sessionModule = await jiti.import(gsdExtensionPath('session-status-io.ts'), {}) as any
|
const sessionModule = await jiti.import(sfExtensionPath('session-status-io.ts'), {}) as any
|
||||||
const prefsModule = await jiti.import(gsdExtensionPath('preferences.ts'), {}) as any
|
const prefsModule = await jiti.import(sfExtensionPath('preferences.ts'), {}) as any
|
||||||
const autoStartModule = await jiti.import(gsdExtensionPath('auto-start.ts'), {}) as any
|
const autoStartModule = await jiti.import(sfExtensionPath('auto-start.ts'), {}) as any
|
||||||
return {
|
return {
|
||||||
openProjectDbIfPresent: autoStartModule.openProjectDbIfPresent as (basePath: string) => Promise<void>,
|
openProjectDbIfPresent: autoStartModule.openProjectDbIfPresent as (basePath: string) => Promise<void>,
|
||||||
deriveState: stateModule.deriveState as (basePath: string) => Promise<SFState>,
|
deriveState: stateModule.deriveState as (basePath: string) => Promise<SFState>,
|
||||||
resolveDispatch: dispatchModule.resolveDispatch as (opts: any) => Promise<any>,
|
resolveDispatch: dispatchModule.resolveDispatch as (opts: any) => Promise<any>,
|
||||||
readAllSessionStatuses: sessionModule.readAllSessionStatuses as (basePath: string) => any[],
|
readAllSessionStatuses: sessionModule.readAllSessionStatuses as (basePath: string) => any[],
|
||||||
loadEffectiveGSDPreferences: prefsModule.loadEffectiveGSDPreferences as () => any,
|
loadEffectiveSFPreferences: prefsModule.loadEffectiveSFPreferences as () => any,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -83,7 +83,7 @@ export async function handleQuery(basePath: string): Promise<QueryResult> {
|
||||||
deriveState,
|
deriveState,
|
||||||
resolveDispatch,
|
resolveDispatch,
|
||||||
readAllSessionStatuses,
|
readAllSessionStatuses,
|
||||||
loadEffectiveGSDPreferences,
|
loadEffectiveSFPreferences,
|
||||||
} = await loadExtensionModules()
|
} = await loadExtensionModules()
|
||||||
await openProjectDbIfPresent(basePath)
|
await openProjectDbIfPresent(basePath)
|
||||||
const state = await deriveState(basePath)
|
const state = await deriveState(basePath)
|
||||||
|
|
@ -96,7 +96,7 @@ export async function handleQuery(basePath: string): Promise<QueryResult> {
|
||||||
reason: state.phase === 'complete' ? 'All milestones complete.' : state.nextAction,
|
reason: state.phase === 'complete' ? 'All milestones complete.' : state.nextAction,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const loaded = loadEffectiveGSDPreferences()
|
const loaded = loadEffectiveSFPreferences()
|
||||||
const dispatch = await resolveDispatch({
|
const dispatch = await resolveDispatch({
|
||||||
basePath,
|
basePath,
|
||||||
mid: state.activeMilestone.id,
|
mid: state.activeMilestone.id,
|
||||||
|
|
|
||||||
|
|
@ -12,12 +12,13 @@
|
||||||
* 11 — cancelled (SIGINT/SIGTERM received)
|
* 11 — cancelled (SIGINT/SIGTERM received)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { existsSync, mkdirSync, writeFileSync } from 'node:fs'
|
import { existsSync, mkdirSync, writeFileSync, renameSync } from 'node:fs'
|
||||||
import { join } from 'node:path'
|
import { join } from 'node:path'
|
||||||
import { resolve } from 'node:path'
|
import { resolve } from 'node:path'
|
||||||
import { ChildProcess } from 'node:child_process'
|
import { ChildProcess } from 'node:child_process'
|
||||||
|
|
||||||
import { RpcClient, SessionManager } from '@sf-run/pi-coding-agent'
|
import { SessionManager } from '@sf-run/pi-coding-agent'
|
||||||
|
import { RpcClient } from '@singularity-forge/rpc-client'
|
||||||
import type { SessionInfo } from '@sf-run/pi-coding-agent'
|
import type { SessionInfo } from '@sf-run/pi-coding-agent'
|
||||||
import { getProjectSessionsDir } from './project-sessions.js'
|
import { getProjectSessionsDir } from './project-sessions.js'
|
||||||
import { loadAndValidateAnswerFile, AnswerInjector } from './headless-answers.js'
|
import { loadAndValidateAnswerFile, AnswerInjector } from './headless-answers.js'
|
||||||
|
|
@ -56,7 +57,7 @@ import type { ExtensionUIRequest, ProgressContext } from './headless-ui.js'
|
||||||
|
|
||||||
import {
|
import {
|
||||||
loadContext,
|
loadContext,
|
||||||
bootstrapGsdProject,
|
bootstrapProject,
|
||||||
} from './headless-context.js'
|
} from './headless-context.js'
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
@ -305,26 +306,32 @@ async function runHeadlessOnce(options: HeadlessOptions, restartCount: number):
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bootstrap .sf/ if needed
|
// Bootstrap .sf/ if needed
|
||||||
const gsdDir = join(process.cwd(), '.sf')
|
const sfDir = join(process.cwd(), '.sf')
|
||||||
if (!existsSync(gsdDir)) {
|
if (!existsSync(sfDir)) {
|
||||||
if (!options.json) {
|
if (!options.json) {
|
||||||
process.stderr.write('[headless] Bootstrapping .sf/ project structure...\n')
|
process.stderr.write('[headless] Bootstrapping .sf/ project structure...\n')
|
||||||
}
|
}
|
||||||
bootstrapGsdProject(process.cwd())
|
bootstrapProject(process.cwd())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write context to temp file for the RPC child to read
|
// Write context to temp file for the RPC child to read
|
||||||
const runtimeDir = join(gsdDir, 'runtime')
|
const runtimeDir = join(sfDir, 'runtime')
|
||||||
mkdirSync(runtimeDir, { recursive: true })
|
mkdirSync(runtimeDir, { recursive: true })
|
||||||
writeFileSync(join(runtimeDir, 'headless-context.md'), contextContent, 'utf-8')
|
writeFileSync(join(runtimeDir, 'headless-context.md'), contextContent, 'utf-8')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate .sf/ directory (skip for new-milestone since we just bootstrapped it)
|
// Validate .sf/ directory (skip for new-milestone since we just bootstrapped it)
|
||||||
const gsdDir = join(process.cwd(), '.sf')
|
const sfDir = join(process.cwd(), '.sf')
|
||||||
if (!isNewMilestone && !existsSync(gsdDir)) {
|
const legacyDir = join(process.cwd(), '.gsd')
|
||||||
process.stderr.write('[headless] Error: No .sf/ directory found in current directory.\n')
|
if (!isNewMilestone && !existsSync(sfDir)) {
|
||||||
process.stderr.write("[headless] Run 'sf' interactively first to initialize a project.\n")
|
if (existsSync(legacyDir)) {
|
||||||
process.exit(1)
|
renameSync(legacyDir, sfDir)
|
||||||
|
process.stderr.write('[headless] Migrated .gsd/ → .sf/ (legacy GSD2 project detected)\n')
|
||||||
|
} else {
|
||||||
|
process.stderr.write('[headless] Error: No .sf/ directory found in current directory.\n')
|
||||||
|
process.stderr.write("[headless] Run 'sf' interactively first to initialize a project.\n")
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query: read-only state snapshot, no RPC child needed
|
// Query: read-only state snapshot, no RPC child needed
|
||||||
|
|
|
||||||
|
|
@ -8,14 +8,14 @@ import { existsSync, readFileSync, mkdirSync, symlinkSync, cpSync } from 'fs'
|
||||||
// Fast-path: handle --version/-v and --help/-h before importing any heavy
|
// Fast-path: handle --version/-v and --help/-h before importing any heavy
|
||||||
// dependencies. This avoids loading the entire pi-coding-agent barrel import
|
// dependencies. This avoids loading the entire pi-coding-agent barrel import
|
||||||
// (~1s) just to print a version string.
|
// (~1s) just to print a version string.
|
||||||
const gsdRoot = resolve(dirname(fileURLToPath(import.meta.url)), '..')
|
const sfRootDir = resolve(dirname(fileURLToPath(import.meta.url)), '..')
|
||||||
const args = process.argv.slice(2)
|
const args = process.argv.slice(2)
|
||||||
const firstArg = args[0]
|
const firstArg = args[0]
|
||||||
|
|
||||||
// Read package.json once — reused for version, banner, and SF_VERSION below
|
// Read package.json once — reused for version, banner, and SF_VERSION below
|
||||||
let sfVersion = '0.0.0'
|
let sfVersion = '0.0.0'
|
||||||
try {
|
try {
|
||||||
const pkg = JSON.parse(readFileSync(join(gsdRoot, 'package.json'), 'utf-8'))
|
const pkg = JSON.parse(readFileSync(join(sfRootDir, 'package.json'), 'utf-8'))
|
||||||
sfVersion = pkg.version || '0.0.0'
|
sfVersion = pkg.version || '0.0.0'
|
||||||
} catch { /* ignore */ }
|
} catch { /* ignore */ }
|
||||||
|
|
||||||
|
|
@ -113,7 +113,7 @@ process.env.SF_CODING_AGENT_DIR = agentDir
|
||||||
// SF_PKG_ROOT — absolute path to sf-run package root. Used by deployed extensions
|
// SF_PKG_ROOT — absolute path to sf-run package root. Used by deployed extensions
|
||||||
// (e.g. auto.ts resume path) to import modules like resource-loader.js that live
|
// (e.g. auto.ts resume path) to import modules like resource-loader.js that live
|
||||||
// in the package tree, not in the deployed ~/.sf/agent/ tree.
|
// in the package tree, not in the deployed ~/.sf/agent/ tree.
|
||||||
process.env.SF_PKG_ROOT = gsdRoot
|
process.env.SF_PKG_ROOT = sfRootDir
|
||||||
|
|
||||||
// RTK environment — make ~/.sf/agent/bin visible to all child-process paths,
|
// RTK environment — make ~/.sf/agent/bin visible to all child-process paths,
|
||||||
// not just the bash tool, and force-disable RTK telemetry for SF-managed use.
|
// not just the bash tool, and force-disable RTK telemetry for SF-managed use.
|
||||||
|
|
@ -123,8 +123,8 @@ applyRtkProcessEnv(process.env)
|
||||||
// Without this, extensions (e.g. browser-tools) can't resolve dependencies like
|
// Without this, extensions (e.g. browser-tools) can't resolve dependencies like
|
||||||
// `playwright` because jiti resolves modules from pi-coding-agent's location, not sf's.
|
// `playwright` because jiti resolves modules from pi-coding-agent's location, not sf's.
|
||||||
// Prepending sf's node_modules to NODE_PATH fixes this for all extensions.
|
// Prepending sf's node_modules to NODE_PATH fixes this for all extensions.
|
||||||
const gsdNodeModules = join(gsdRoot, 'node_modules')
|
const sfNodeModules = join(sfRootDir, 'node_modules')
|
||||||
process.env.NODE_PATH = [gsdNodeModules, process.env.NODE_PATH]
|
process.env.NODE_PATH = [sfNodeModules, process.env.NODE_PATH]
|
||||||
.filter(Boolean)
|
.filter(Boolean)
|
||||||
.join(delimiter)
|
.join(delimiter)
|
||||||
// Force Node to re-evaluate module search paths with the updated NODE_PATH.
|
// Force Node to re-evaluate module search paths with the updated NODE_PATH.
|
||||||
|
|
@ -145,8 +145,8 @@ process.env.SF_BIN_PATH = process.env.SF_BIN_PATH || process.argv[1]
|
||||||
// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched sf extension
|
// SF_WORKFLOW_PATH — absolute path to bundled SF-WORKFLOW.md, used by patched sf extension
|
||||||
// when dispatching workflow prompts. Prefers dist/resources/ (stable, set at build time)
|
// when dispatching workflow prompts. Prefers dist/resources/ (stable, set at build time)
|
||||||
// over src/resources/ (live working tree) — see resource-loader.ts for rationale.
|
// over src/resources/ (live working tree) — see resource-loader.ts for rationale.
|
||||||
const distRes = join(gsdRoot, 'dist', 'resources')
|
const distRes = join(sfRootDir, 'dist', 'resources')
|
||||||
const srcRes = join(gsdRoot, 'src', 'resources')
|
const srcRes = join(sfRootDir, 'src', 'resources')
|
||||||
const resourcesDir = existsSync(distRes) ? distRes : srcRes
|
const resourcesDir = existsSync(distRes) ? distRes : srcRes
|
||||||
process.env.SF_WORKFLOW_PATH = join(resourcesDir, 'SF-WORKFLOW.md')
|
process.env.SF_WORKFLOW_PATH = join(resourcesDir, 'SF-WORKFLOW.md')
|
||||||
|
|
||||||
|
|
@ -182,8 +182,8 @@ if (process.env.HTTP_PROXY || process.env.HTTPS_PROXY || process.env.http_proxy
|
||||||
// On Windows without Developer Mode or admin rights, symlinkSync will throw even for
|
// On Windows without Developer Mode or admin rights, symlinkSync will throw even for
|
||||||
// 'junction' type — so we fall back to cpSync (a full directory copy) which works
|
// 'junction' type — so we fall back to cpSync (a full directory copy) which works
|
||||||
// everywhere without elevated permissions.
|
// everywhere without elevated permissions.
|
||||||
const sfRunScopeDir = join(gsdNodeModules, '@sf-run')
|
const sfRunScopeDir = join(sfNodeModules, '@sf-run')
|
||||||
const packagesDir = join(gsdRoot, 'packages')
|
const packagesDir = join(sfRootDir, 'packages')
|
||||||
const wsPackages = ['native', 'pi-agent-core', 'pi-ai', 'pi-coding-agent', 'pi-tui']
|
const wsPackages = ['native', 'pi-agent-core', 'pi-ai', 'pi-coding-agent', 'pi-tui']
|
||||||
try {
|
try {
|
||||||
if (!existsSync(sfRunScopeDir)) mkdirSync(sfRunScopeDir, { recursive: true })
|
if (!existsSync(sfRunScopeDir)) mkdirSync(sfRunScopeDir, { recursive: true })
|
||||||
|
|
|
||||||
|
|
@ -750,14 +750,22 @@ export function buildResourceLoader(
|
||||||
const registry = loadRegistry()
|
const registry = loadRegistry()
|
||||||
const piAgentDir = join(homedir(), '.pi', 'agent')
|
const piAgentDir = join(homedir(), '.pi', 'agent')
|
||||||
const piExtensionsDir = join(piAgentDir, 'extensions')
|
const piExtensionsDir = join(piAgentDir, 'extensions')
|
||||||
|
const piLegacyExtensionsDir = join(homedir(), '.pi', 'extensions')
|
||||||
const bundledKeys = getBundledExtensionKeys()
|
const bundledKeys = getBundledExtensionKeys()
|
||||||
const piExtensionPaths = discoverExtensionEntryPaths(piExtensionsDir)
|
|
||||||
.filter((entryPath) => !bundledKeys.has(getExtensionKey(entryPath, piExtensionsDir)))
|
const discoverPiExtensions = (dir: string): string[] =>
|
||||||
.filter((entryPath) => {
|
discoverExtensionEntryPaths(dir)
|
||||||
const manifest = readManifestFromEntryPath(entryPath)
|
.filter((entryPath) => !bundledKeys.has(getExtensionKey(entryPath, dir)))
|
||||||
if (!manifest) return true
|
.filter((entryPath) => {
|
||||||
return isExtensionEnabled(registry, manifest.id)
|
const manifest = readManifestFromEntryPath(entryPath)
|
||||||
})
|
if (!manifest) return true
|
||||||
|
return isExtensionEnabled(registry, manifest.id)
|
||||||
|
})
|
||||||
|
|
||||||
|
const piExtensionPaths = [
|
||||||
|
...discoverPiExtensions(piExtensionsDir),
|
||||||
|
...discoverPiExtensions(piLegacyExtensionsDir),
|
||||||
|
]
|
||||||
|
|
||||||
// Print-mode callers pass their own additional extension paths (e.g. --extension
|
// Print-mode callers pass their own additional extension paths (e.g. --extension
|
||||||
// flags). Non-print mode uses the implicit pi-extensions discovery above.
|
// flags). Non-print mode uses the implicit pi-extensions discovery above.
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,9 @@
|
||||||
"id": "genai-proxy",
|
"id": "genai-proxy",
|
||||||
"name": "GenAI Proxy",
|
"name": "GenAI Proxy",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "Exposes SF's AI engine as a standard Google GenAI / OpenAI compatible endpoint.",
|
"description": "OpenAI-compatible proxy for Gemini CLI and GenAI clients",
|
||||||
"tier": "community",
|
"tier": "bundled",
|
||||||
"requires": {
|
"requires": {
|
||||||
"platform": "all"
|
"platform": ">=2.29.0"
|
||||||
},
|
|
||||||
"provides": {
|
|
||||||
"commands": ["/genai-proxy"]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue