chore: node 24 native APIs, import.meta.dirname, parsers rename, dep updates

- Replace fileURLToPath(import.meta.url) with import.meta.dirname across
  scripts and extensions
- Rename parsers-legacy.ts → parsers.ts
- Remove deleted plan/spec docs (cicd-pipeline)
- Update package.json engines and deps across workspace packages
- Update web/package-lock.json

💘 Generated with Crush

Assisted-by: GLM-5.1 via Crush <crush@charm.land>
This commit is contained in:
Mikael Hugo 2026-05-02 06:18:25 +02:00
parent 980772cc90
commit d73a73d7f3
123 changed files with 947 additions and 2756 deletions

View file

@ -40,7 +40,7 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 22
node-version: 24
registry-url: https://registry.npmjs.org
cache: 'npm'
@ -111,7 +111,7 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 22
node-version: 24
registry-url: https://registry.npmjs.org
cache: 'npm'

View file

@ -39,7 +39,7 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 22
node-version: 24
registry-url: https://registry.npmjs.org
cache: 'npm'
@ -102,7 +102,7 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 22
node-version: 24
registry-url: https://registry.npmjs.org
cache: 'npm'

View file

@ -30,7 +30,7 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 22
node-version: 24
registry-url: https://registry.npmjs.org
cache: 'npm'

1
.node-version Normal file
View file

@ -0,0 +1 @@
24

1
.nvmrc Normal file
View file

@ -0,0 +1 @@
24

View file

@ -711,7 +711,7 @@ sf (CLI binary)
## Requirements
- **Node.js** ≥ 22.0.0 (24 LTS recommended)
- **Node.js** ≥ 24.0.0 (24 LTS recommended)
- **An LLM provider** — any of the 20+ supported providers (see [Use Any Model](#use-any-model))
- **Git** — initialized automatically if missing

View file

@ -16,7 +16,7 @@
# derivation degraded mode under bun).
# - bun's native-addon loader doesn't inherit the system library
# search path under Nix (libz.so.1 not found for forge_engine.node).
# - node 22.5+ has node:sqlite built-in; node 24 supports
# - node 24+ has node:sqlite built-in and supports
# --experimental-strip-types so .ts runs directly.
# - The src/resources/extensions/sf/tests/resolve-ts.mjs loader hook
# already handles .js → .ts import-specifier remapping for runtime
@ -27,7 +27,7 @@
# - Exports SF_BIN_PATH=dist/loader.js so all child processes (including
# subagent pi instances) use the Node.js entry point directly.
#
# Requirements: node >= 22.5 on PATH (24+ recommended for strip-types),
# Requirements: node >= 24 on PATH,
# node_modules populated.
set -euo pipefail

View file

@ -69,4 +69,3 @@ Guides for the underlying Pi SDK that SF is built on. Located in [`dev/`](./dev/
|-------|-------------|
| [Building Coding Agents](./dev/building-coding-agents/README.md) | Research notes on agent design — decomposition, context engineering, cost/quality tradeoffs |
| [Proposals](./dev/proposals/) | Feature proposals and workflow definitions |
| [Superpowers](./dev/superpowers/) | Plans and specs for superpower features |

File diff suppressed because it is too large Load diff

View file

@ -1,357 +0,0 @@
# CI/CD Pipeline Design — SF
## Overview
A three-stage promotion pipeline for SF that moves merged PRs through Dev → Test → Prod using npm dist-tags as environment markers, GitHub Environments for approval gates, and Docker images for both CI acceleration and end-user distribution.
## Goals
1. Every merged PR is immediately installable via `npx sf-run@dev`
2. Verified builds auto-promote to `@next` for early adopters
3. Production releases require manual approval and optional live-LLM validation
4. CI builds are fast and reproducible via pre-built Docker builder image
5. End users can run SF via Docker as an alternative to npm
6. LLM-dependent behavior is testable without API calls via recorded fixtures
## Non-Goals
- Replacing the existing PR gate workflow (`ci.yml`)
- Replacing the native binary cross-compilation workflow (`build-native.yml`)
- Cross-platform native binary builds (macOS/Windows remain on `build-native.yml`)
- Hosting SF as a web service
- Automated prompt regression testing (future work)
## Pipeline Architecture
```
┌─────────────────────────────────────────────────────────────┐
│ PR Merged to main │
│ ci.yml runs (build, test, typecheck) │
└──────────────────────────┬──────────────────────────────────┘
▼ (workflow_run: ci.yml success)
┌──────────────────────────────────────────────────────────────┐
│ STAGE: DEV Environment: dev │
│ │
│ 1. Version stamp: <current>-dev.<short-sha>
│ 2. npm publish sf-run@<version>-dev.<sha> --tag dev │
│ 3. Smoke test: npx sf-run@dev --version │
│ │
│ Note: Build/test/typecheck already ran in ci.yml │
│ Docker: Build CI builder image (only if Dockerfile changed) │
└──────────────────────────┬──────────────────────────────────┘
▼ (auto-promote if all green)
┌──────────────────────────────────────────────────────────────┐
│ STAGE: TEST Environment: test │
│ │
│ 1. Install sf-run@dev from registry │
│ 2. CLI smoke tests (--version, init, help, config) │
│ 3. Dry-run fixture suite (recorded LLM conversations) │
│ - Agent session replay with fixture provider │
│ - Tool use round-trips verified │
│ - Extension loading validated │
│ 4. npm dist-tag add sf-run@<version> next │
│ │
│ Docker: Build + push runtime image to GHCR as :next │
└──────────────────────────┬──────────────────────────────────┘
▼ (manual approval required)
┌──────────────────────────────────────────────────────────────┐
│ STAGE: PROD Environment: prod │
│ │
│ 1. (Optional) Real LLM integration tests │
│ - Gated behind workflow input flag │
│ - Uses ANTHROPIC_API_KEY / OPENAI_API_KEY secrets │
│ - Budget-capped: small models, short conversations │
│ 2. npm dist-tag add sf-run@<version> latest │
│ 3. GitHub Release created with changelog │
│ 4. Docker: tag runtime image as :latest + :v<version>
│ 5. Post-publish smoke test against @latest
└──────────────────────────────────────────────────────────────┘
```
### Version Strategy
| Dist-tag | When published | Version format | Risk level |
|----------|---------------|----------------|------------|
| `@dev` | Every merged PR | `2.27.0-dev.a3f2c1b` | Bleeding edge |
| `@next` | Auto-promoted from Dev | Same version, new tag | Candidate |
| `@latest` | Manually approved from Test | Same version, new tag | Production |
The `-dev.` prerelease identifier is distinct from the existing `-next.` convention used in `build-native.yml`. The two pipelines do not overlap — `build-native.yml` only triggers on `v*` tags and checks for `-next.` to determine npm dist-tag. The `-dev.` versions are published exclusively by `pipeline.yml`.
### Native Binary Strategy for Dev Publishes
Dev versions (`@dev` tag) use the native binaries from the most recent stable `build-native.yml` release. The `optionalDependencies` in `package.json` use `>=` ranges, so a `-dev.` version of `sf-run` resolves the latest stable `@sf-build/engine-*` packages from the registry.
If a PR modifies Rust native crate code (`rust-engine/` directory), the dev publish will bundle stale native binaries. This is acceptable because:
- Native crate changes are infrequent and always accompanied by a `v*` tag release
- The Test stage validates the installed package works end-to-end
- Full native binary validation happens via `build-native.yml` on the version tag
### Concurrency Control
```yaml
concurrency:
group: pipeline-${{ github.sha }}
cancel-in-progress: false
```
Policy:
- Each pipeline run is keyed to its commit SHA — no two runs for the same commit race
- Newer merges do NOT cancel in-progress promotions — a version already in the Test stage completes its promotion
- If Run A is promoting version X to `@next` while Run B publishes version Y to `@dev`, they operate independently — `@next` and `@dev` point to different versions, which is correct
- The Prod stage always promotes whatever version is currently at `@next`, so approving promotion after a newer version has already moved to `@next` promotes the newer one (last-writer-wins, which is the desired behavior)
### Failure Modes & Recovery
| Failure | Impact | Recovery |
|---------|--------|----------|
| Dev publish succeeds, smoke test fails | Broken version on `@dev` tag | Next successful merge overwrites `@dev`. Manual fix: `npm dist-tag add sf-run@<last-good> dev` |
| Test stage fails after promoting to `@next` | Broken version on `@next` tag | Manual: `npm dist-tag add sf-run@<last-good> next`. `@latest` is never affected. |
| Prod promotion publishes `@latest` then found broken | Broken production release | Manual: `npm dist-tag add sf-run@<previous-stable> latest` and `docker tag ghcr.io/singularity-forge/sf-run:<previous> latest && docker push`. Post-mortem required. |
| Docker push succeeds, npm dist-tag fails | Images and npm out of sync | Re-run the failed job (GitHub Actions retry). Images are tagged by version so stale tags are harmless. |
| GHCR push fails | No Docker image for this version | Non-blocking — npm publish is the primary distribution. Docker image can be rebuilt manually. |
Rollback responsibility: any maintainer with npm publish rights and GHCR push access. The Prod environment's required-reviewers list doubles as the rollback-authorized list.
### Relationship to Existing Workflows
| File | Trigger | Purpose | Status |
|------|---------|---------|--------|
| `ci.yml` | PR opened/updated, push to main | Pre-merge gate: build, test, typecheck | **Unchanged** |
| `build-native.yml` | `v*` tag or manual dispatch | Cross-compile native binaries for 5 platforms | **Unchanged** |
| `pipeline.yml` | `workflow_run` (after ci.yml succeeds on main) | Post-merge promotion: Dev → Test → Prod | **New** |
The pipeline triggers via `workflow_run` after `ci.yml` completes successfully on `main`, avoiding duplicate build/test work. The Dev stage only performs version stamping, publishing, and smoke testing.
## Docker Images
### Multi-Stage Dockerfile
Two images from a single `Dockerfile` at the repo root.
#### CI Builder Image
- **Name:** `ghcr.io/singularity-forge/sf-ci-builder`
- **Base:** `node:22-bookworm`
- **Contains:** Node 22, Rust stable toolchain, `aarch64-linux-gnu` cross-compiler
- **Size:** ~2 GB
- **Tags:** `:latest`, `:<YYYY-MM-DD>` (date-stamped for rollback)
- **Rebuilt:** Only when `Dockerfile` changes
- **Used by:** `pipeline.yml` Dev stage, optionally `ci.yml`
- **Purpose:** Eliminates 3-5 min toolchain install on every CI run
The builder image does NOT include Playwright system deps (not needed for current CI jobs). If browser-based E2E tests are added later, Playwright deps can be added at that point.
#### Builder Image Versioning
Builder images are tagged with both `:latest` and a date stamp (e.g., `:2026-03-17`). The `pipeline.yml` workflow pins to a specific date-stamped tag. When the Dockerfile is updated, the PR that changes it also updates the tag reference in `pipeline.yml`. This prevents a broken Dockerfile change from silently breaking all subsequent runs.
#### Runtime Image
- **Name:** `ghcr.io/singularity-forge/sf-run`
- **Base:** `node:22-slim`
- **Contains:** Node 22, git, `sf-run` installed globally
- **Size:** ~250 MB
- **Tags:** `:latest`, `:next`, `:v2.27.0`
- **Published:** On every Prod promotion
- **Purpose:** `docker run ghcr.io/singularity-forge/sf-run` as alternative to `npx`
### Why These Base Images
- **Bookworm for CI:** The Rust native crates depend on vendored `libgit2`, image processing, and cross-compilation to ARM64. Debian Bookworm provides the full toolchain via apt. Alpine breaks due to musl vs glibc incompatibilities with N-API bindings.
- **Slim for runtime:** Only needs Node + git. Native `.node` binaries are prebuilt and bundled in the npm package — no Rust toolchain needed at runtime.
## LLM Fixture Recording & Replay System
### Architecture
The fixture system hooks into the `pi-ai` provider abstraction layer to capture and replay LLM conversations without hitting real APIs.
```
Agent Session
pi-ai provider abstraction
FixtureProvider (intercept layer)
├── record mode → Real API + save to fixture JSON
└── replay mode → Load fixture JSON (no API call)
```
### Integration Design
The `FixtureProvider` implements the `Provider` interface from `@sf/pi-ai` (the same interface all 20+ built-in providers implement). It registers itself via environment variable detection at provider initialization:
```typescript
// Pseudocode — actual implementation will follow pi-ai patterns
import type { Provider, StreamingResponse } from "@sf/pi-ai";
class FixtureProvider implements Provider {
// In record mode: wraps the real provider, saves responses
// In replay mode: returns saved responses directly
async *stream(request: ProviderRequest): AsyncGenerator<StreamingResponse> {
if (this.mode === "replay") {
// Yield fixture response chunks (simulated streaming)
yield* this.replayTurn(this.turnIndex++);
} else {
// Proxy to real provider, capture response
const chunks = [];
for await (const chunk of this.realProvider.stream(request)) {
chunks.push(chunk);
yield chunk;
}
this.saveTurn(request, chunks);
}
}
}
```
Key integration details:
- **Streaming:** Fixture replay simulates streaming by yielding saved response chunks with minimal delay. This exercises the same consumer code paths as real streaming.
- **Registration:** When `SF_FIXTURE_MODE` is set, the fixture provider wraps the configured real provider. No changes to provider selection logic needed.
- **Provider-agnostic:** Fixtures are captured at the `Provider` interface level (above HTTP transport), so they work regardless of which underlying provider was used during recording.
### Modes
| Mode | Trigger | Behavior |
|------|---------|----------|
| **Record** | `SF_FIXTURE_MODE=record SF_FIXTURE_DIR=./fixtures` | Wraps real provider, saves request/response pairs |
| **Replay** | `SF_FIXTURE_MODE=replay SF_FIXTURE_DIR=./fixtures` | Returns saved responses, zero API calls |
| **Off** | Default (no env vars) | Normal operation, no interception |
### Fixture Format
One JSON file per recorded session:
```json
{
"name": "agent-creates-file",
"recorded": "2026-03-17T00:00:00Z",
"provider": "anthropic",
"model": "claude-sonnet-4-6",
"turns": [
{
"request": {
"messages": [{ "role": "user", "content": "Create hello.ts" }],
"tools": ["Write", "Read"],
"model": "claude-sonnet-4-6"
},
"response": {
"content": [
{ "type": "text", "text": "I'll create hello.ts for you." },
{ "type": "tool_use", "name": "Write", "input": { "file_path": "hello.ts", "content": "console.log('hello')" } }
],
"stopReason": "toolUse",
"usage": { "input": 150, "output": 45 }
}
}
]
}
```
### Matching Strategy
Turn-index based. Response N is served for request N in sequence. If the conversation diverges from the fixture (e.g., unexpected turn count), the test fails explicitly with a descriptive error rather than silently producing wrong results.
Why not request-body hashing: request bodies contain timestamps, random IDs, and system prompt variations that cause brittle mismatches.
Why not a generic HTTP VCR: The `pi-ai` layer abstracts 20+ providers with different wire formats. Intercepting above the transport means fixtures are provider-agnostic.
### What Gets Tested via Fixtures
- Agent session lifecycle (start → tool calls → completion)
- Tool dispatch and response handling
- Multi-turn conversation flow
- Extension loading and routing
- Error handling paths (fixtures can include error responses)
### What Does NOT Get Tested (Deferred to Live Gate)
- Model output quality
- Prompt regression
- New tool compatibility with live APIs
### Fixture Storage
Committed to repo under `tests/fixtures/recordings/`. Each fixture is 5-50KB of JSON. Recording is a manual developer action, not automated in CI.
### Dev Version Cleanup
Old `-dev.` versions accumulate on npm with every merged PR. A scheduled workflow (`cleanup-dev-versions.yml`) runs weekly and unpublishes dev versions older than 30 days via `npm unpublish sf-run@<old-dev-version>`. This prevents registry bloat while keeping recent dev versions available.
## New Files & Scripts
### Directory Structure
```
tests/
├── smoke/ # CLI smoke tests (Stage: Test)
│ ├── run.ts
│ ├── test-version.ts
│ ├── test-help.ts
│ └── test-init.ts
├── fixtures/ # Recorded LLM replay tests (Stage: Test)
│ ├── run.ts # Test runner
│ ├── record.ts # Recording helper
│ ├── provider.ts # FixtureProvider intercept layer
│ └── recordings/
│ ├── agent-creates-file.json
│ ├── agent-reads-and-edits.json
│ ├── agent-handles-error.json
│ └── agent-multi-turn-tools.json
├── live/ # Real LLM tests (Stage: Prod, optional)
│ ├── run.ts
│ ├── test-anthropic-roundtrip.ts
│ └── test-openai-roundtrip.ts
scripts/
├── version-stamp.mjs # Stamps <version>-dev.<sha>
Dockerfile # Multi-stage: builder + runtime
.github/workflows/pipeline.yml # Promotion pipeline
.github/workflows/cleanup-dev-versions.yml # Weekly dev version pruning
```
All test files use `.ts` with `--experimental-strip-types` for consistency with the existing test convention in the project.
### New npm Scripts
```json
{
"test:smoke": "node --experimental-strip-types tests/smoke/run.ts",
"test:fixtures": "node --experimental-strip-types tests/fixtures/run.ts",
"test:fixtures:record": "SF_FIXTURE_MODE=record node --experimental-strip-types tests/fixtures/record.ts",
"test:live": "SF_LIVE_TESTS=1 node --experimental-strip-types tests/live/run.ts",
"pipeline:version-stamp": "node scripts/version-stamp.mjs",
"docker:build-runtime": "docker build --target runtime -t ghcr.io/singularity-forge/sf-run .",
"docker:build-builder": "docker build --target builder -t ghcr.io/singularity-forge/sf-ci-builder ."
}
```
## GitHub Configuration
| Setting | Value |
|---------|-------|
| Environment: `dev` | No protection rules |
| Environment: `test` | No protection rules (auto-promote) |
| Environment: `prod` | Required reviewers: maintainers |
| Secret: `NPM_TOKEN` | All environments |
| Secret: `ANTHROPIC_API_KEY` | Prod only |
| Secret: `OPENAI_API_KEY` | Prod only |
| GHCR | Enabled for org |
## Success Criteria
1. A merged PR is installable via `npx sf-run@dev` within 15 minutes (assumes warm CI builder image cache)
2. Fixture replay tests complete in under 60 seconds with zero API calls
3. The full Dev → Test promotion completes without human intervention
4. Prod promotion is blocked until a maintainer explicitly approves
5. `docker run ghcr.io/singularity-forge/sf-run --version` returns the correct version
6. Existing `ci.yml` and `build-native.yml` workflows continue to work unchanged
7. CI builder image reduces toolchain setup from ~3-5 min to ~30s pull

View file

@ -8,7 +8,7 @@ SF is an AI coding agent that handles planning, execution, verification, and shi
| Requirement | Minimum | Recommended |
|-------------|---------|-------------|
| **[Node.js](https://nodejs.org/)** | 22.0.0 | 24 LTS |
| **[Node.js](https://nodejs.org/)** | 24.0.0 | 24 LTS |
| **[Git](https://git-scm.com/)** | 2.20+ | Latest |
| **LLM API key** | Any supported provider | Anthropic (Claude) |

View file

@ -2,7 +2,7 @@
If you installed Node.js via Homebrew (`brew install node`), you're tracking the **latest current release** — which can include odd-numbered development versions (e.g. 23.x, 25.x). These aren't LTS and may have breaking changes or instability.
SF requires Node.js **v22 or later** and works best on an **LTS (even-numbered) release**. This guide shows how to pin Node 24 LTS using Homebrew.
SF requires Node.js **v24 or later** and works best on an **LTS (even-numbered) release**. This guide shows how to pin Node 24 LTS using Homebrew.
## Check your current version

View file

@ -71,7 +71,7 @@ source ~/.zshrc
**Common causes:**
- Missing workspace packages — fixed in v2.10.4+
- `postinstall` hangs on Linux (Playwright `--with-deps` triggering sudo) — fixed in v2.3.6+
- Node.js version too old — requires ≥ 22.0.0
- Node.js version too old — requires ≥ 24.0.0
### Provider errors during auto mode

View file

@ -8,7 +8,7 @@ SF 是一个 AI 编程代理,负责规划、执行、验证和交付,让你
| 要求 | 最低版本 | 推荐版本 |
|------|----------|----------|
| **[Node.js](https://nodejs.org/)** | 22.0.0 | 24 LTS |
| **[Node.js](https://nodejs.org/)** | 24.0.0 | 24 LTS |
| **[Git](https://git-scm.com/)** | 2.20+ | 最新版 |
| **LLM API key** | 任意受支持提供商 | AnthropicClaude |

View file

@ -2,7 +2,7 @@
如果你是通过 Homebrew 安装 Node.js`brew install node`),那你跟踪的是**当前最新正式版本**,其中可能包含奇数版本的开发分支(例如 23.x、25.x。这些版本并不是 LTS可能带来破坏性变更或稳定性问题。
SF 要求 Node.js **v22 或更高版本**,并且在 **LTS偶数版本** 上运行效果最好。本指南展示如何用 Homebrew 固定到 Node 24 LTS。
SF 要求 Node.js **v24 或更高版本**,并且在 **LTS偶数版本** 上运行效果最好。本指南展示如何用 Homebrew 固定到 Node 24 LTS。
## 检查当前版本

View file

@ -75,7 +75,7 @@ source ~/.zshrc
- 缺少 workspace packages已在 v2.10.4+ 修复
- Linux 上 `postinstall` 卡住Playwright `--with-deps` 触发 sudo已在 v2.3.6+ 修复
- Node.js 版本过低:要求 ≥ 22.0.0
- Node.js 版本过低:要求 ≥ 24.0.0
### 自动模式中的 provider 错误

View file

@ -60,6 +60,6 @@ The recommended workflow: run auto mode in one terminal, steer from another. See
## Requirements
- **Node.js** 22.0.0 or later (24 LTS recommended)
- **Node.js** 24.0.0 or later (24 LTS recommended)
- **Git** installed and configured
- An API key for at least one LLM provider (or use browser sign-in for Anthropic/GitHub Copilot)

View file

@ -6,7 +6,7 @@
npm install -g sf-run
```
Requires **Node.js 22.0.0 or later** (24 LTS recommended) and **Git**.
Requires **Node.js 24.0.0 or later** (24 LTS recommended) and **Git**.
{% hint style="info" %}
**`command not found: sf`?** Your shell may not have npm's global bin directory in `$PATH`. Run `npm prefix -g` to find it, then add `$(npm prefix -g)/bin` to your PATH. See [Troubleshooting](../reference/troubleshooting.md) for details.

View file

@ -91,9 +91,9 @@ It checks file structure, referential integrity, completion state consistency, g
</Accordion>
<Accordion title="Node.js version or git not found at startup">
**Cause:** SF v2.45+ checks for Node.js >= 22 and git availability at startup.
**Cause:** SF v2.45+ checks for Node.js >= 24 and git availability at startup.
**Fix:** Install Node.js 22+ (24 LTS recommended) and ensure `git` is in your PATH.
**Fix:** Install Node.js 24+ (24 LTS recommended) and ensure `git` is in your PATH.
</Accordion>
</AccordionGroup>

45
package-lock.json generated
View file

@ -77,7 +77,7 @@
"vitest": "^4.1.5"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
},
"optionalDependencies": {
"@anthropic-ai/claude-agent-sdk": "^0.2.83",
@ -16279,7 +16279,7 @@
"typescript": "^5.4.0"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
}
},
"packages/daemon/node_modules/@anthropic-ai/sdk": {
@ -16317,13 +16317,16 @@
"typescript": "^5.4.0"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
}
},
"packages/native": {
"name": "@singularity-forge/native",
"version": "2.75.0",
"license": "MIT",
"engines": {
"node": ">=24.0.0"
},
"optionalDependencies": {
"@singularity-forge/engine-darwin-arm64": ">=2.75.0",
"@singularity-forge/engine-darwin-x64": ">=2.75.0",
@ -16334,7 +16337,10 @@
},
"packages/pi-agent-core": {
"name": "@singularity-forge/pi-agent-core",
"version": "2.75.0"
"version": "2.75.0",
"engines": {
"node": ">=24.0.0"
}
},
"packages/pi-ai": {
"name": "@singularity-forge/pi-ai",
@ -16360,6 +16366,9 @@
},
"devDependencies": {
"@smithy/node-http-handler": "^4.5.0"
},
"engines": {
"node": ">=24.0.0"
}
},
"packages/pi-ai/node_modules/@smithy/node-http-handler": {
@ -16407,6 +16416,9 @@
"@types/hosted-git-info": "^3.0.5",
"@types/proper-lockfile": "^4.1.4",
"@types/sql.js": "^1.4.9"
},
"engines": {
"node": ">=24.0.0"
}
},
"packages/pi-coding-agent/node_modules/accepts": {
@ -16712,6 +16724,9 @@
"devDependencies": {
"@types/mime-types": "^2.1.4"
},
"engines": {
"node": ">=24.0.0"
},
"optionalDependencies": {
"koffi": "^2.9.0"
}
@ -16721,7 +16736,7 @@
"version": "2.75.0",
"license": "MIT",
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
}
},
"studio": {
@ -16736,7 +16751,7 @@
},
"devDependencies": {
"@tailwindcss/vite": "^4.2.1",
"@types/node": "^22.18.6",
"@types/node": "^24.12.2",
"@types/react": "^19.2.2",
"@types/react-dom": "^19.2.2",
"@vitejs/plugin-react": "^5.1.0",
@ -16744,24 +16759,20 @@
"electron-vite": "^5.0.0",
"tailwindcss": "^4.2.1",
"typescript": "^5.9.3"
},
"engines": {
"node": ">=24.0.0"
}
},
"studio/node_modules/@types/node": {
"version": "22.19.15",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.15.tgz",
"integrity": "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==",
"version": "24.12.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.12.2.tgz",
"integrity": "sha512-A1sre26ke7HDIuY/M23nd9gfB+nrmhtYyMINbjI1zHJxYteKR6qSMX56FsmjMcDb3SMcjJg5BiRRgOCC/yBD0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
"undici-types": "~7.16.0"
}
},
"studio/node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
}
}
}

View file

@ -37,7 +37,7 @@
"configDir": ".sf"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
},
"packageManager": "npm@10.9.3",
"scripts": {

View file

@ -39,7 +39,7 @@
"typescript": "^5.4.0"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
},
"files": [
"dist",

View file

@ -1,7 +1,6 @@
#!/usr/bin/env node
import { parseArgs } from 'node:util';
import { fileURLToPath } from 'node:url';
import { resolve, dirname } from 'node:path';
import { resolve } from 'node:path';
import { resolveConfigPath, loadConfig } from './config.js';
import { Logger } from './logger.js';
import { Daemon } from './daemon.js';
@ -40,8 +39,7 @@ async function main(): Promise<void> {
if (values.install) {
const configPath = resolveConfigPath(values.config);
const thisFile = fileURLToPath(import.meta.url);
const scriptPath = resolve(dirname(thisFile), 'cli.js');
const scriptPath = resolve(import.meta.dirname, 'cli.js');
install({
nodePath: process.execPath,

View file

@ -75,15 +75,15 @@ describe('generatePlist', () => {
});
it('uses the absolute node path from opts', () => {
const opts = basePlistOpts({ nodePath: '/home/user/.nvm/versions/node/v22.0.0/bin/node' });
const opts = basePlistOpts({ nodePath: '/home/user/.nvm/versions/node/v24.0.0/bin/node' });
const xml = generatePlist(opts);
assert.ok(xml.includes('<string>/home/user/.nvm/versions/node/v22.0.0/bin/node</string>'));
assert.ok(xml.includes('<string>/home/user/.nvm/versions/node/v24.0.0/bin/node</string>'));
});
it('includes NVM bin directory in PATH', () => {
const opts = basePlistOpts({ nodePath: '/home/user/.nvm/versions/node/v22.0.0/bin/node' });
const opts = basePlistOpts({ nodePath: '/home/user/.nvm/versions/node/v24.0.0/bin/node' });
const xml = generatePlist(opts);
assert.ok(xml.includes('/home/user/.nvm/versions/node/v22.0.0/bin'));
assert.ok(xml.includes('/home/user/.nvm/versions/node/v24.0.0/bin'));
});
it('sets KeepAlive with SuccessfulExit false', () => {

View file

@ -37,7 +37,7 @@
"typescript": "^5.4.0"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
},
"files": [
"dist",

View file

@ -88,6 +88,9 @@
"files": [
"dist"
],
"engines": {
"node": ">=24.0.0"
},
"optionalDependencies": {
"@singularity-forge/engine-darwin-arm64": ">=2.75.0",
"@singularity-forge/engine-darwin-x64": ">=2.75.0",

View file

@ -14,5 +14,8 @@
"scripts": {
"build": "tsc -p tsconfig.json"
},
"dependencies": {}
"dependencies": {},
"engines": {
"node": ">=24.0.0"
}
}

View file

@ -43,5 +43,8 @@
},
"devDependencies": {
"@smithy/node-http-handler": "^4.5.0"
},
"engines": {
"node": ">=24.0.0"
}
}

View file

@ -1,13 +1,10 @@
#!/usr/bin/env tsx
import { writeFileSync } from "fs";
import { join, dirname } from "path";
import { fileURLToPath } from "url";
import { join } from "path";
import { Api, KnownProvider, Model } from "../src/types.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const packageRoot = join(__dirname, "..");
const packageRoot = join(import.meta.dirname, "..");
interface ModelsDevModel {
id: string;

View file

@ -44,5 +44,8 @@
"@types/hosted-git-info": "^3.0.5",
"@types/proper-lockfile": "^4.1.4",
"@types/express": "^4.17.21"
},
"engines": {
"node": ">=24.0.0"
}
}

View file

@ -1,14 +1,12 @@
import { existsSync, readFileSync } from "fs";
import { homedir } from "os";
import { dirname, join, resolve } from "path";
import { fileURLToPath } from "url";
// =============================================================================
// Package Detection
// =============================================================================
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const __dirname = import.meta.dirname;
/**
* Detect if we're running as a Bun compiled binary.

View file

@ -293,8 +293,7 @@ let _aliases: Record<string, string> | null = null;
function getAliases(): Record<string, string> {
if (_aliases) return _aliases;
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const packageIndex = path.resolve(__dirname, "../..", "index.js");
const packageIndex = path.resolve(import.meta.dirname, "../..", "index.js");
const typeboxEntry = require.resolve("@sinclair/typebox");
const typeboxRoot = typeboxEntry.replace(/[\\/]build[\\/]cjs[\\/]index\.js$/, "");

View file

@ -87,6 +87,9 @@ export function collectRuntimeDependencies(installedPath: string, entryPaths: st
export function verifyRuntimeDependencies(runtimeDeps: string[], source: string, appName: string): void {
const missing: string[] = [];
for (const dep of runtimeDeps) {
if (dep === "node") {
continue;
}
const result = spawnSync(dep, ["--version"], { encoding: "utf-8", timeout: 5000 });
if (result.error || result.status !== 0) {
missing.push(dep);

View file

@ -2,7 +2,6 @@ import * as fs from "node:fs";
import * as fsSync from "node:fs";
import * as path from "node:path";
import { spawn } from "node:child_process";
import { fileURLToPath } from "node:url";
import type { AgentTool, AgentToolResult, AgentToolUpdateCallback } from "@singularity-forge/pi-agent-core";
import {
ensureFileOpen,
@ -69,8 +68,7 @@ export type { LspServerStatus } from "./client.js";
export type { LspToolDetails } from "./types.js";
export { lspSchema } from "./types.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const lspDescription = fsSync.readFileSync(path.join(__dirname, "lsp.md"), "utf-8");
const lspDescription = fsSync.readFileSync(path.join(import.meta.dirname, "lsp.md"), "utf-8");
// =============================================================================
// Warmup API

View file

@ -26,5 +26,8 @@
},
"optionalDependencies": {
"koffi": "^2.9.0"
},
"engines": {
"node": ">=24.0.0"
}
}

View file

@ -29,6 +29,6 @@
"test": "node --test dist/rpc-client.test.js"
},
"engines": {
"node": ">=22.0.0"
"node": ">=24.0.0"
}
}

View file

@ -1,6 +1,9 @@
{
"name": "sf",
"version": "2.75.0",
"engines": {
"node": ">=24.0.0"
},
"piConfig": {
"name": "sf",
"configDir": ".sf"

View file

@ -18,7 +18,7 @@ Inspired by [Oh My Pi's pi-natives](https://github.com/can1357/oh-my-pi), adapte
## Prerequisites
- **Rust** (stable, 1.70+): https://rustup.rs
- **Node.js** (22.0.0+)
- **Node.js** (24.0.0+)
## Build

View file

@ -105,7 +105,7 @@ async function main() {
bundle: false,
format: 'esm',
platform: 'node',
target: 'node22',
target: 'node24',
sourcemap: 'inline',
packages: 'external',
logLevel: 'warning',

View file

@ -1,10 +1,9 @@
#!/usr/bin/env node
import { spawn, spawnSync } from "node:child_process";
import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { resolve } from "node:path";
const __dirname = dirname(fileURLToPath(import.meta.url));
const __dirname = import.meta.dirname;
const root = resolve(__dirname, "..");
const sourceBinPath = resolve(root, "bin", "sf-from-source");
const ensureResourcesPath = resolve(

View file

@ -9,11 +9,10 @@
*/
import { spawn } from 'node:child_process'
import { resolve, dirname } from 'node:path'
import { fileURLToPath } from 'node:url'
import { resolve } from 'node:path'
import { createRequire } from 'node:module'
const __dirname = dirname(fileURLToPath(import.meta.url))
const __dirname = import.meta.dirname
const root = resolve(__dirname, '..')
const require = createRequire(import.meta.url)
const tscBin = require.resolve('typescript/bin/tsc')

View file

@ -1,10 +1,9 @@
#!/usr/bin/env node
import { cpSync, existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'
import os from 'node:os'
import { dirname, join, resolve } from 'node:path'
import { fileURLToPath } from 'node:url'
import { join, resolve } from 'node:path'
const __dirname = dirname(fileURLToPath(import.meta.url))
const __dirname = import.meta.dirname
const resourcesDir = resolve(__dirname, '..', 'src', 'resources')
const piRoot = join(os.homedir(), '.pi')
const piAgentDir = join(piRoot, 'agent')

View file

@ -4,13 +4,12 @@ import { exec as execCb, spawnSync } from 'child_process'
import { createHash, randomUUID } from 'crypto'
import { chmodSync, copyFileSync, createWriteStream, existsSync, mkdirSync, readFileSync, readdirSync, rmSync } from 'fs'
import { arch, homedir, platform } from 'os'
import { dirname, resolve, join } from 'path'
import { resolve, join } from 'path'
import { Readable } from 'stream'
import { finished } from 'stream/promises'
import extractZip from 'extract-zip'
import { fileURLToPath } from 'url'
const __dirname = dirname(fileURLToPath(import.meta.url))
const __dirname = import.meta.dirname
const cwd = resolve(__dirname, '..')
const PLAYWRIGHT_SKIP =
process.env.PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD === '1' ||

View file

@ -1,10 +1,9 @@
#!/usr/bin/env node
import { existsSync, readFileSync, readdirSync, rmSync, rmdirSync } from 'node:fs'
import os from 'node:os'
import { dirname, join, resolve } from 'node:path'
import { fileURLToPath } from 'node:url'
import { join, resolve } from 'node:path'
const __dirname = dirname(fileURLToPath(import.meta.url))
const __dirname = import.meta.dirname
const resourcesDir = resolve(__dirname, '..', 'src', 'resources')
const piRoot = join(os.homedir(), '.pi')
const piAgentDir = join(piRoot, 'agent')

View file

@ -14,10 +14,9 @@
import { watch } from 'node:fs'
import { cpSync, mkdirSync, rmSync } from 'node:fs'
import { resolve, dirname } from 'node:path'
import { fileURLToPath } from 'node:url'
import { resolve } from 'node:path'
const __dirname = dirname(fileURLToPath(import.meta.url))
const __dirname = import.meta.dirname
const src = resolve(__dirname, '..', 'src', 'resources')
const dest = resolve(__dirname, '..', 'dist', 'resources')

View file

@ -13,7 +13,7 @@
- Language: [Node.js / Python / Go / Rust / etc.]
- Framework: [Express / FastAPI / none / etc.]
- External dependencies: [list APIs, databases, services]
- Environment: [Node >= 22 / Python 3.12+ / etc.]
- Environment: [Node >= 24 / Python 3.12+ / etc.]
## Out of Scope
- [Explicit exclusion 1 — prevents scope creep]

View file

@ -41,7 +41,7 @@ import { stopWebMode } from "./web-mode.js";
import { loadStoredEnvKeys } from "./wizard.js";
// ---------------------------------------------------------------------------
// V8 compile cache — Node 22+ can cache compiled bytecode across runs,
// V8 compile cache — Node 24+ can cache compiled bytecode across runs,
// eliminating repeated parse/compile overhead for unchanged modules.
// Must be set early so dynamic imports (extensions, lazy subcommands) benefit.
// ---------------------------------------------------------------------------

View file

@ -6,15 +6,14 @@ import {
readFileSync,
symlinkSync,
} from "node:fs";
import { delimiter, dirname, join, relative, resolve } from "node:path";
import { delimiter, join, relative, resolve } from "node:path";
// SF Startup Loader
// Copyright (c) 2026 Singularity Forge
import { fileURLToPath } from "node:url";
// Fast-path: handle --version/-v and --help/-h before importing any heavy
// dependencies. This avoids loading the entire pi-coding-agent barrel import
// (~1s) just to print a version string.
const sfRootDir = resolve(dirname(fileURLToPath(import.meta.url)), "..");
const sfRootDir = resolve(import.meta.dirname, "..");
const args = process.argv.slice(2);
const firstArg = args[0];
@ -46,7 +45,7 @@ if (firstArg === "--help" || firstArg === "-h") {
// package.json (already parsed above) and verifies git is available.
// ---------------------------------------------------------------------------
{
const MIN_NODE_MAJOR = 22;
const MIN_NODE_MAJOR = 24;
const red = "\x1b[31m";
const bold = "\x1b[1m";
const dim = "\x1b[2m";
@ -96,7 +95,7 @@ import { applyRtkProcessEnv } from "./rtk.js";
// This allows config.js to:
// 1. Read piConfig.name → "sf" (branding)
// 2. Resolve themes via dist/ (no src/ present → uses dist path)
const pkgDir = resolve(dirname(fileURLToPath(import.meta.url)), "..", "pkg");
const pkgDir = resolve(import.meta.dirname, "..", "pkg");
// MUST be set before any dynamic import of pi SDK fires — this is what config.js
// reads to determine APP_NAME and CONFIG_DIR_NAME

View file

@ -27,7 +27,6 @@ import {
} from "node:fs";
import { homedir } from "node:os";
import { basename, dirname, join, relative, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { discoverExtensionEntryPaths } from "./extension-discovery.js";
import {
ensureRegistryEntries,
@ -45,7 +44,7 @@ import { compareSemver } from "./update-check.js";
// that use sf — causing stale/broken extensions to be synced to ~/.sf/agent/.
// dist/resources/ is populated by the build step (`npm run copy-resources`) and
// reflects the built state, not the currently checked-out branch.
const packageRoot = resolve(dirname(fileURLToPath(import.meta.url)), "..");
const packageRoot = resolve(import.meta.dirname, "..");
const distResources = join(packageRoot, "dist", "resources");
const srcResources = join(packageRoot, "src", "resources");
// Use dist/resources only if it has the full expected structure.

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"scripts": {
"test": "node --test tests/*.test.mjs"
},

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -3,5 +3,8 @@
"private": true,
"type": "module",
"description": "cmux integration library — used by other extensions, not an extension itself",
"engines": {
"node": ">=24.0.0"
},
"pi": {}
}

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -12,7 +12,7 @@ import { existsSync, readdirSync } from "node:fs";
import { join } from "node:path";
import { debugLog } from "../sf/debug-logger.js";
import { loadFile, parseSummary } from "../sf/files.js";
import { parsePlan, parseRoadmap } from "../sf/parsers-legacy.js";
import { parsePlan, parseRoadmap } from "../sf/parsers.js";
import {
resolveMilestoneFile,
resolveSliceFile,

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -1,3 +1,6 @@
{
"type": "module"
"type": "module",
"engines": {
"node": ">=24.0.0"
}
}

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -21,7 +21,7 @@ import {
buildRunUatPrompt,
} from "./auto-prompts.js";
import { loadFile } from "./files.js";
import { parseRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import {
relSliceFile,
resolveMilestoneFile,

View file

@ -44,7 +44,7 @@ import {
resolveAllOverrides,
} from "./files.js";
import { getMilestonePipelineVariant } from "./milestone-scope-classifier.js";
import { parseRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import {
buildMilestoneFileName,
relSliceFile,

View file

@ -38,7 +38,7 @@ import {
formatMemoriesForPrompt,
getActiveMemoriesRanked,
} from "./memory-store.js";
import { parseRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import {
relMilestoneFile,
relMilestonePath,

View file

@ -32,10 +32,7 @@ import {
nativeMergeAbort,
nativeResetHard,
} from "./native-git-bridge.js";
import {
parsePlan as parseLegacyPlan,
parseRoadmap as parseLegacyRoadmap,
} from "./parsers-legacy.js";
import { parsePlan, parseRoadmap } from "./parsers.js";
import {
buildSliceFileName,
clearPathCache,
@ -325,7 +322,7 @@ export function verifyExpectedArtifact(
return false;
}
try {
const roadmap = parseLegacyRoadmap(readFileSync(roadmapFile, "utf-8"));
const roadmap = parseRoadmap(readFileSync(roadmapFile, "utf-8"));
const milestoneResearchFile = resolveMilestoneFile(base, mid, "RESEARCH");
for (const slice of roadmap.slices) {
if (slice.done) continue;
@ -367,7 +364,7 @@ export function verifyExpectedArtifact(
try {
const roadmapContent = readFileSync(absPath, "utf-8");
if (getMilestonePlanBlockingIssue(roadmapContent)) return false;
const roadmap = parseLegacyRoadmap(roadmapContent);
const roadmap = parseRoadmap(roadmapContent);
if (roadmap.slices.length === 0) return false;
} catch (err) {
logWarning(
@ -441,7 +438,7 @@ export function verifyExpectedArtifact(
if (!taskIds) {
// LEGACY: DB unavailable or no tasks in DB — parse plan file for task IDs
const planContent = readFileSync(absPath, "utf-8");
const plan = parseLegacyPlan(planContent);
const plan = parsePlan(planContent);
if (plan.tasks.length > 0)
taskIds = plan.tasks.map((t: { id: string }) => t.id);
}
@ -482,12 +479,12 @@ export function verifyExpectedArtifact(
if (dbSlice.status !== "complete") return false;
} else if (!isDbAvailable()) {
// LEGACY: Pre-migration fallback for projects without DB.
// Fall back to roadmap checkbox check via parsers-legacy
// Fall back to roadmap checkbox check via parsers
const roadmapFile = resolveMilestoneFile(base, mid, "ROADMAP");
if (roadmapFile && existsSync(roadmapFile)) {
try {
const roadmapContent = readFileSync(roadmapFile, "utf-8");
const roadmap = parseLegacyRoadmap(roadmapContent);
const roadmap = parseRoadmap(roadmapContent);
const slice = roadmap.slices.find((s) => s.id === sid);
if (slice && !slice.done) return false;
} catch (e) {

View file

@ -17,7 +17,7 @@ import {
verifyExpectedArtifact,
writeBlockerPlaceholder,
} from "./auto-recovery.js";
import { parseRoadmap as parseLegacyRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import {
relMilestoneFile,
relSliceFile,
@ -93,7 +93,7 @@ export function inspectUnitRecoveryStatus(
);
} else {
try {
const roadmap = parseLegacyRoadmap(
const roadmap = parseRoadmap(
readFileSync(roadmapPath, "utf-8"),
);
const slice = roadmap.slices.find((s) => s.id === sid);

View file

@ -18,7 +18,7 @@ import type {
} from "@singularity-forge/pi-coding-agent";
import type { AutoSession } from "./auto/session.js";
import { loadFile } from "./files.js";
import { parseRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import { resolveMilestoneFile, resolveSlicePath } from "./paths.js";
import {
type PostExecutionResult,

View file

@ -22,8 +22,7 @@
*/
import { existsSync, readFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { join } from "node:path";
import { tierOrdinal, type ComplexityTier } from "./complexity-classifier.js";
import { getModelTier } from "./model-router.js";
@ -107,8 +106,7 @@ let _benchmarksCache: BenchmarkData | null = null;
function loadBenchmarks(): BenchmarkData {
if (_benchmarksCache) return _benchmarksCache;
const __filename = fileURLToPath(import.meta.url);
const here = dirname(__filename);
const here = import.meta.dirname;
// Works for both .ts (dev) and .js (dist) since we copy the data file 1:1.
const path = join(here, "learning", "data", "model-benchmarks.json");
if (!existsSync(path)) {

View file

@ -64,7 +64,7 @@ export async function handleCleanupBranches(
const { listWorktrees } = await import("./worktree-manager.js");
const { resolveMilestoneFile } = await import("./paths.js");
const { loadFile } = await import("./files.js");
const { parseRoadmap } = await import("./parsers-legacy.js");
const { parseRoadmap } = await import("./parsers.js");
const { isMilestoneComplete } = await import("./state.js");
const { isDbAvailable, getMilestone } = await import("./sf-db.js");

View file

@ -1094,7 +1094,7 @@ export async function ensurePreferencesFile(
if (!existsSync(path)) {
const template = await loadFile(
join(
dirname(fileURLToPath(import.meta.url)),
import.meta.dirname,
"templates",
"PREFERENCES.md",
),

View file

@ -2,7 +2,7 @@
import { readFileSync } from "node:fs";
import { findMilestoneIds } from "./guided-flow.js";
import { parseRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import { resolveMilestoneFile } from "./paths.js";
import { getMilestoneSlices, isDbAvailable } from "./sf-db.js";
import { isClosedStatus } from "./status-guards.js";

View file

@ -29,7 +29,7 @@ import {
nativeWorktreeList,
nativeWorktreeRemove,
} from "./native-git-bridge.js";
import { parseRoadmap as parseLegacyRoadmap } from "./parsers-legacy.js";
import { parseRoadmap } from "./parsers.js";
import { resolveMilestoneFile } from "./paths.js";
import { loadEffectiveSFPreferences } from "./preferences.js";
import { getMilestoneSlices, isDbAvailable } from "./sf-db.js";
@ -116,7 +116,7 @@ export async function checkGitHealth(
? await loadFile(roadmapPath)
: null;
if (roadmapContent) {
const roadmap = parseLegacyRoadmap(roadmapContent);
const roadmap = parseRoadmap(roadmapContent);
isComplete = isMilestoneComplete(roadmap);
}
}
@ -188,7 +188,7 @@ export async function checkGitHealth(
? await loadFile(roadmapPath)
: null;
if (!roadmapContent) continue;
const roadmap = parseLegacyRoadmap(roadmapContent);
const roadmap = parseRoadmap(roadmapContent);
branchMilestoneComplete = isMilestoneComplete(roadmap);
}
if (branchMilestoneComplete) {

View file

@ -28,10 +28,7 @@ import {
parseTaskPlanMustHaves,
saveFile,
} from "./files.js";
import {
parsePlan as parseLegacyPlan,
parseRoadmap as parseLegacyRoadmap,
} from "./parsers-legacy.js";
import { parsePlan, parseRoadmap } from "./parsers.js";
import {
milestonesDir,
relMilestoneFile,
@ -354,7 +351,7 @@ export async function selectDoctorScope(
dbSlices.length > 0 && dbSlices.every((s) => s.status === "complete");
if (!allDone) return milestone.id;
} else {
const roadmap = parseLegacyRoadmap(roadmapContent);
const roadmap = parseRoadmap(roadmapContent);
if (!isMilestoneComplete(roadmap)) return milestone.id;
}
}
@ -745,7 +742,7 @@ export async function runSFDoctor(
} else {
const activeMilestoneId = state.activeMilestone?.id;
const activeSliceId = state.activeSlice?.id;
slices = parseLegacyRoadmap(roadmapContent).slices.map((s) => ({
slices = parseRoadmap(roadmapContent).slices.map((s) => ({
...s,
// Legacy roadmaps only encode done vs not-done. For doctor's
// missing-directory checks, treat every undone slice except the
@ -896,7 +893,7 @@ export async function runSFDoctor(
"PLAN",
);
const planContent = planPath ? await loadFile(planPath) : null;
// Normalize plan tasks: prefer DB, fall back to parsers-legacy
// Normalize plan tasks: prefer DB, fall back to parsers
let plan: {
tasks: Array<{
id: string;
@ -919,7 +916,7 @@ export async function runSFDoctor(
}
}
if (!plan && planContent) {
plan = parseLegacyPlan(planContent);
plan = parsePlan(planContent);
}
if (!plan) {
if (!slice.done) {

View file

@ -16,7 +16,6 @@ import { findMilestoneIds } from "./milestone-ids.js";
import {
NATIVE_UNAVAILABLE,
nativeExtractSection,
nativeParseSummaryFile,
} from "./native-parser-bridge.js";
import {
relMilestoneFile,
@ -72,18 +71,18 @@ function cachedParse<T>(
}
// ─── Cross-module cache clear registry ────────────────────────────────────
// parsers-legacy.ts registers its cache-clear callback here at module init
// parsers.ts registers its cache-clear callback here at module init
// to avoid circular imports. clearParseCache() calls all registered callbacks.
const _cacheClearCallbacks: (() => void)[] = [];
/** Register a callback to be invoked when clearParseCache() is called.
* Used by parsers-legacy.ts to synchronously clear its own cache. */
* Used by parsers.ts to synchronously clear its own cache. */
export function registerCacheClearCallback(cb: () => void): void {
_cacheClearCallbacks.push(cb);
}
/** Clear the module-scoped parse cache. Call when files change on disk.
* Also clears any registered external caches (e.g. parsers-legacy.ts). */
* Also clears any registered external caches (e.g. parsers.ts). */
export function clearParseCache(): void {
_parseCache.clear();
for (const cb of _cacheClearCallbacks) cb();
@ -330,38 +329,6 @@ export function parseSummary(content: string): Summary {
}
function _parseSummaryImpl(content: string): Summary {
// Try native parser first for better performance
const nativeResult = nativeParseSummaryFile(content);
if (nativeResult) {
const nfm = nativeResult.frontmatter;
return {
frontmatter: {
id: nfm.id,
parent: nfm.parent,
milestone: nfm.milestone,
provides: nfm.provides,
requires: nfm.requires,
affects: nfm.affects,
key_files: nfm.keyFiles,
key_decisions: nfm.keyDecisions,
patterns_established: nfm.patternsEstablished,
drill_down_paths: nfm.drillDownPaths,
observability_surfaces: nfm.observabilitySurfaces,
duration: nfm.duration,
verification_result: nfm.verificationResult,
completed_at: nfm.completedAt,
blocker_discovered: nfm.blockerDiscovered,
},
title: nativeResult.title,
oneLiner: nativeResult.oneLiner,
whatHappened: nativeResult.whatHappened,
deviations: nativeResult.deviations,
filesModified: nativeResult.filesModified,
followUps: extractSection(content, "Follow-ups") ?? "",
knownLimitations: extractSection(content, "Known Limitations") ?? "",
};
}
const [fmLines, body] = splitFrontmatter(content);
const fm = fmLines ? parseFrontmatterMap(fmLines) : {};

View file

@ -17,8 +17,7 @@ import {
writeFileSync,
} from "node:fs";
import { homedir } from "node:os";
import { dirname, join, relative } from "node:path";
import { fileURLToPath } from "node:url";
import { join, relative } from "node:path";
import type {
ExtensionAPI,
ExtensionCommandContext,
@ -338,7 +337,7 @@ export async function handleForensics(
// Derive SF source dir for prompt — fall back to ~/.sf/agent/extensions/sf/
// when import.meta.url resolves to the npm-global install path (Windows).
let sfSourceDir = dirname(fileURLToPath(import.meta.url));
let sfSourceDir = import.meta.dirname;
if (!existsSync(join(sfSourceDir, "prompts"))) {
const sfHome = process.env.SF_HOME || join(homedir(), ".sf");
const fallback = join(sfHome, "agent", "extensions", "sf");

View file

@ -41,8 +41,7 @@
import { readFileSync } from "node:fs";
import { homedir } from "node:os";
import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { resolve } from "node:path";
import { writeFallbackChains } from "./fallback-chain-writer.mjs";
import {
createBeforeModelSelectHandler,
@ -52,7 +51,7 @@ import { loadCapabilityOverrides } from "./loadCapabilityOverrides.mjs";
import { aggregateAllForUnitType } from "./outcome-aggregator.mjs";
import { ensureSchema, recordOutcome } from "./outcome-recorder.mjs";
const MODULE_DIRECTORY = dirname(fileURLToPath(import.meta.url));
const MODULE_DIRECTORY = import.meta.dirname;
const SCHEMA_PATH = resolve(MODULE_DIRECTORY, "outcome-schema.sql");
const DEFAULT_DB_PATH = "~/.sf/sf-learning.db";
const DEFAULT_N_PRIOR = 10;

View file

@ -42,10 +42,9 @@
*/
import { readFile } from "node:fs/promises";
import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { resolve } from "node:path";
const MODULE_DIRECTORY = dirname(fileURLToPath(import.meta.url));
const MODULE_DIRECTORY = import.meta.dirname;
const DEFAULT_BENCHMARKS_PATH = resolve(
MODULE_DIRECTORY,
"data/model-benchmarks.json",

View file

@ -9,7 +9,6 @@
// parseRoadmap(), parsePlan(), parseSummary() in files.ts.
import { existsSync, mkdirSync, readFileSync } from "node:fs";
import { createRequire } from "node:module";
import { join, relative } from "node:path";
import { clearParseCache, saveFile } from "./files.js";
import {
@ -38,6 +37,8 @@ import { invalidateStateCache } from "./state.js";
import { isClosedStatus } from "./status-guards.js";
import type { GateRow } from "./types.js";
import { logWarning } from "./workflow-logger.js";
import { parseRoadmap, parsePlan } from "./parsers.js";
const parsers = { parseRoadmap, parsePlan };
// ─── Helpers ──────────────────────────────────────────────────────────────
@ -1019,22 +1020,10 @@ export interface StaleEntry {
* Logs to stderr when stale files are detected.
*/
export function detectStaleRenders(basePath: string): StaleEntry[] {
// Lazy-load parsers — intentional disk-vs-DB comparison requires parsers
const _require = createRequire(import.meta.url);
let parseRoadmap: (...args: any[]) => any, parsePlan: (...args: any[]) => any;
try {
const m = _require("./parsers-legacy.ts");
parseRoadmap = m.parseRoadmap;
parsePlan = m.parsePlan;
} catch (e) {
logWarning(
"renderer",
`parsers-legacy.ts require failed, falling back to .js: ${(e as Error).message}`,
);
const m = _require("./parsers-legacy.js");
parseRoadmap = m.parseRoadmap;
parsePlan = m.parsePlan;
}
// Parsers are statically imported at module level; they were previously
// lazy-loaded via require() but vitest/Vite doesn't resolve .ts through
// Node's require() pipeline.
const { parseRoadmap, parsePlan } = parsers;
const stale: StaleEntry[] = [];
const milestones = getAllMilestones();

View file

@ -8,7 +8,7 @@ import { existsSync, readdirSync, readFileSync } from "node:fs";
import { join } from "node:path";
import { parseContextDependsOn } from "./files.js";
import { findMilestoneIds } from "./guided-flow.js";
import { parsePlan, parseRoadmap } from "./parsers-legacy.js";
import { parsePlan, parseRoadmap } from "./parsers.js";
import {
milestonesDir,
resolveMilestoneFile,

View file

@ -10,8 +10,7 @@
*/
import { existsSync, readFileSync } from "node:fs";
import { dirname, join, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import { join, resolve } from "node:path";
import type {
ExtensionAPI,
ExtensionCommandContext,
@ -50,7 +49,7 @@ function buildReviewPrompt(
preview: MigrationPreview,
): string {
const promptsDir = join(
dirname(fileURLToPath(import.meta.url)),
import.meta.dirname,
"..",
"prompts",
);

View file

@ -3,6 +3,9 @@
"private": true,
"version": "1.0.0",
"type": "module",
"engines": {
"node": ">=24.0.0"
},
"pi": {
"extensions": [
"./index.ts"

View file

@ -34,7 +34,7 @@ export type MergeOrder = "sequential" | "by-completion";
/**
* Check whether a milestone is complete by querying its worktree SQLite DB.
* Uses a subprocess to avoid disrupting the global DB singleton.
* Uses Node's built-in SQLite provider to avoid disrupting the global DB singleton.
* Returns true when milestones.status = 'complete' in the worktree's sf.db.
*/
/**

View file

@ -826,7 +826,7 @@ function resolveSfBin(): string | null {
// loader.js is at dist/loader.js
let thisDir: string;
try {
thisDir = dirname(fileURLToPath(import.meta.url));
thisDir = import.meta.dirname;
} catch (e) {
logWarning(
"parallel",
@ -1120,9 +1120,11 @@ export function refreshWorkerStatuses(
// If all workers are in a terminal state (error/stopped), the orchestration
// is finished — deactivate and clean up so zombie workers don't persist.
const allDead = [...state.workers.values()].every(
(w) => w.state === "error" || w.state === "stopped",
);
const allDead =
state.workers.size > 0 &&
[...state.workers.values()].every(
(w) => w.state === "error" || w.state === "stopped",
);
if (allDead) {
state.active = false;
removeStateFile(basePath);

View file

@ -1,4 +1,4 @@
// SF Extension - Legacy Parsers
// SF Extension - Parsers
// parseRoadmap() and parsePlan() extracted from files.ts.
// Used only by: md-importer.ts (migration), state.ts (pre-migration fallback),
// markdown-renderer.ts (detectStaleRenders disk-vs-DB comparison),
@ -17,10 +17,9 @@ import {
registerCacheClearCallback,
} from "./files.js";
import {
nativeParsePlanFile,
nativeParseRoadmap,
} from "./native-parser-bridge.js";
// Re-export parseRoadmapSlices so callers can import all legacy parsers from one module
// Re-export parseRoadmapSlices so callers can import all parsers from one module
import { parseRoadmapSlices } from "./roadmap-slices.js";
import type {
BoundaryMapEntry,
@ -60,13 +59,13 @@ function cachedParse<T>(
return result;
}
/** Clear the legacy parser cache. Called by clearParseCache() in files.ts. */
export function clearLegacyParseCache(): void {
/** Clear the parser cache. Called by clearParseCache() in files.ts. */
export function clearParserCache(): void {
_parseCache.clear();
}
// Register with files.ts so clearParseCache() also clears our cache
registerCacheClearCallback(clearLegacyParseCache);
registerCacheClearCallback(clearParserCache);
// ─── Roadmap Parser ────────────────────────────────────────────────────────
@ -76,7 +75,7 @@ export function parseRoadmap(content: string): Roadmap {
function _parseRoadmapImpl(content: string): Roadmap {
const stopTimer = debugTime("parse-roadmap");
// Try native parser first for better performance. Fall back to legacy if
// Try native parser first for better performance. Fall back to parser if
// native finds zero slices (e.g. table-style roadmaps not yet supported).
const nativeResult = nativeParseRoadmap(content);
if (nativeResult && nativeResult.slices.length > 0) {
@ -179,30 +178,6 @@ export function parsePlan(content: string): SlicePlan {
function _parsePlanImpl(content: string): SlicePlan {
const stopTimer = debugTime("parse-plan");
const [, body] = splitFrontmatter(content);
// Try native parser first for better performance. Fall back to legacy if
// native finds zero tasks (e.g. heading-style tasks not yet supported).
const nativeResult = nativeParsePlanFile(body);
if (nativeResult && nativeResult.tasks.length > 0) {
stopTimer({ native: true });
return {
id: nativeResult.id,
title: nativeResult.title,
goal: nativeResult.goal,
demo: nativeResult.demo,
mustHaves: nativeResult.mustHaves,
tasks: nativeResult.tasks.map((t) => ({
id: t.id,
title: t.title,
description: t.description,
done: t.done,
estimate: t.estimate,
...(t.files.length > 0 ? { files: t.files } : {}),
...(t.verify ? { verify: t.verify } : {}),
})),
filesLikelyTouched: nativeResult.filesLikelyTouched,
};
}
const lines = body.split("\n");
const h1 = lines.find((l) => l.startsWith("# "));

View file

@ -19,8 +19,7 @@
import { existsSync, readdirSync, readFileSync } from "node:fs";
import { homedir } from "node:os";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { join } from "node:path";
import { SF_PARSE_ERROR, SFError } from "./errors.js";
import { logWarning } from "./workflow-logger.js";
@ -35,7 +34,7 @@ import { logWarning } from "./workflow-logger.js";
* the user-local agent directory.
*/
function resolveExtensionDir(): string {
const moduleDir = dirname(fileURLToPath(import.meta.url));
const moduleDir = import.meta.dirname;
if (existsSync(join(moduleDir, "prompts"))) return moduleDir;
// Fallback: user-local agent directory

View file

@ -13,7 +13,7 @@ import { existsSync, unlinkSync } from "node:fs";
import { join } from "node:path";
import { loadFile, parseTaskPlanIO } from "./files.js";
import { loadJsonFileOrNull, saveJsonFile } from "./json-persistence.js";
import { parsePlan } from "./parsers-legacy.js";
import { parsePlan } from "./parsers.js";
import { resolveTaskFiles, resolveTasksDir } from "./paths.js";
import { getSliceTasks, isDbAvailable } from "./sf-db.js";
import type {

View file

@ -15,8 +15,7 @@
import { type ChildProcess, spawn } from "node:child_process";
import { appendFileSync, existsSync, mkdirSync } from "node:fs";
import { dirname, join } from "node:path";
import { fileURLToPath } from "node:url";
import { join } from "node:path";
import { runWorktreePostCreateHook } from "./auto-worktree.js";
import { getErrorMessage } from "./error-utils.js";
import { sfRoot } from "./paths.js";
@ -340,7 +339,7 @@ function resolveSfBin(): string | null {
let thisDir: string;
try {
thisDir = dirname(fileURLToPath(import.meta.url));
thisDir = import.meta.dirname;
} catch {
thisDir = process.cwd();
}

View file

@ -16,7 +16,7 @@ import { findMilestoneIds } from "./milestone-ids.js";
import { getVisionAlignmentBlockingIssue } from "./milestone-quality.js";
import { isTerminalMilestoneSummaryContent } from "./milestone-summary-classifier.js";
import { nativeBatchParseSfFiles } from "./native-parser-bridge.js";
import { parsePlan, parseRoadmap } from "./parsers-legacy.js";
import { parsePlan, parseRoadmap } from "./parsers.js";
import {
clearPathCache,
resolveMilestoneFile,

View file

@ -14,7 +14,7 @@ import {
} from "../auto-recovery.ts";
import { invalidateAllCaches } from "../cache.ts";
import { clearParseCache } from "../files.ts";
import { parseRoadmap } from "../parsers-legacy.ts";
import { parseRoadmap } from "../parsers.ts";
import {
closeDatabase,
insertGateRow,

View file

@ -627,7 +627,7 @@ describe("complete-milestone", () => {
const { invalidateAllCaches: invalidateAllCachesDynamic } = await import(
"../cache.ts"
);
const { parseRoadmap } = await import("../parsers-legacy.ts");
const { parseRoadmap } = await import("../parsers.ts");
const base = createFixtureBase();
try {

View file

@ -24,7 +24,7 @@ import {
import { invalidateAllCaches } from "../../cache.ts";
import { clearParseCache, parseTaskPlanFile } from "../../files.ts";
import { renderPlanFromDb } from "../../markdown-renderer.ts";
import { parsePlan, parseRoadmap } from "../../parsers-legacy.ts";
import { parsePlan, parseRoadmap } from "../../parsers.ts";
import {
closeDatabase,
insertMilestone,

View file

@ -389,7 +389,7 @@ describe("doctor-environment", async () => {
test("env: docker check with Dockerfile", () => {
const dir = createProjectDir({
"package.json": JSON.stringify({ name: "test" }),
Dockerfile: "FROM node:22\n",
Dockerfile: "FROM node:24\n",
});
mkdirSync(join(dir, "node_modules"), { recursive: true });
cleanups.push(dir);
@ -405,7 +405,7 @@ describe("doctor-environment", async () => {
// ── Doctor Issue Conversion ────────────────────────────────────────
test("env: converts results to doctor issues", () => {
const results: EnvironmentCheckResult[] = [
{ name: "node_version", status: "ok", message: "Node.js v22.0.0" },
{ name: "node_version", status: "ok", message: "Node.js v24.0.0" },
{
name: "dependencies",
status: "error",
@ -461,7 +461,7 @@ describe("doctor-environment", async () => {
// ── Report Formatting ──────────────────────────────────────────────
test("env: formatEnvironmentReport", () => {
const results: EnvironmentCheckResult[] = [
{ name: "node_version", status: "ok", message: "Node.js v22.0.0" },
{ name: "node_version", status: "ok", message: "Node.js v24.0.0" },
{
name: "dependencies",
status: "error",
@ -473,7 +473,7 @@ describe("doctor-environment", async () => {
const report = formatEnvironmentReport(results);
assert.ok(report.includes("Environment Health:"), "has header");
assert.ok(report.includes("Node.js v22.0.0"), "includes ok result");
assert.ok(report.includes("Node.js v24.0.0"), "includes ok result");
assert.ok(
report.includes("node_modules missing"),
"includes error result",

View file

@ -5,7 +5,7 @@ import { join } from "node:path";
import { describe, test } from 'vitest';
import { runSFDoctor } from "../../doctor.js";
import { parsePlan } from "../../parsers-legacy.js";
import { parsePlan } from "../../parsers.js";
// ── Helpers ─────────────────────────────────────────────────────────────────

View file

@ -170,7 +170,7 @@ test("legacy roadmap fallback: future slices are treated as pending, active slic
rmSync(tmp, { recursive: true, force: true });
});
// Force the legacy parser branch.
// Force the parser branch.
try {
closeDatabase();
} catch {

View file

@ -344,8 +344,8 @@ None
);
assert.doesNotThrow(
() => run("git check-ignore -q .sf", dir),
"git now ignores .sf after fix",
() => run("git check-ignore -q .sf-id", dir),
"git now ignores SF runtime patterns after fix",
);
});
} else {

View file

@ -125,7 +125,7 @@ test("ensureGitignore does NOT add .sf when .sf/ has tracked files (#1364)", (_t
}
});
test("ensureGitignore excludes .sf when .sf/ has NO tracked files", (_t) => {
test("ensureGitignore excludes SF runtime patterns when .sf/ has NO tracked files", (_t) => {
const dir = makeTempRepo();
try {
// Run ensureGitignore (no .sf/ at all)
@ -133,11 +133,13 @@ test("ensureGitignore excludes .sf when .sf/ has NO tracked files", (_t) => {
const exclude = readFileSync(join(dir, ".git", "info", "exclude"), "utf-8");
const lines = exclude.split("\n").map((l) => l.trim());
// Per ADR-001, local directories use granular runtime exclusions,
// not the blanket .sf pattern (which is reserved for symlink mode).
assert.ok(
lines.includes(".sf"),
`Expected .sf in .git/info/exclude, but it's missing:\n${exclude}`,
lines.includes(".sf-id"),
`Expected .sf-id in .git/info/exclude, but it's missing:\n${exclude}`,
);
assert.doesNotThrow(() => git(dir, "check-ignore", "-q", ".sf"));
assert.doesNotThrow(() => git(dir, "check-ignore", "-q", ".sf-id"));
} finally {
cleanup(dir);
}

View file

@ -15,7 +15,7 @@ import {
renderTaskSummary,
repairStaleRenders,
} from "../markdown-renderer.ts";
import { parsePlan, parseRoadmap } from "../parsers-legacy.ts";
import { parsePlan, parseRoadmap } from "../parsers.ts";
import { _clearSfRootCache, clearPathCache } from "../paths.ts";
import {
_getAdapter,

View file

@ -19,7 +19,7 @@ import type {
SFTask,
} from "../migrate/types.ts";
import { writeSFDirectory } from "../migrate/writer.ts";
import { parsePlan, parseRoadmap } from "../parsers-legacy.ts";
import { parsePlan, parseRoadmap } from "../parsers.ts";
import { deriveState } from "../state.ts";
// ─── Fixture Builders ──────────────────────────────────────────────────────

View file

@ -25,7 +25,7 @@ import {
formatState,
formatTaskSummary,
} from "../migrate/writer.ts";
import { parsePlan, parseRoadmap } from "../parsers-legacy.ts";
import { parsePlan, parseRoadmap } from "../parsers.ts";
// ─── Test Data Builders ────────────────────────────────────────────────────

View file

@ -8,7 +8,7 @@ import {
parseSummary,
parseTaskPlanFile,
} from "../files.ts";
import { parsePlan, parseRoadmap } from "../parsers-legacy.ts";
import { parsePlan, parseRoadmap } from "../parsers.ts";
// ═══════════════════════════════════════════════════════════════════════════
// parseRoadmap tests
@ -1639,17 +1639,12 @@ Do things.
`;
const c = parseContinue(content);
// parseInt("abc") returns NaN; the parser || 0 fallback should give 0
// Actually, looking at parser: typeof fm.step === 'string' ? parseInt(fm.step) : ...
// parseInt("abc") = NaN, and NaN || 0 doesn't work because NaN is falsy only in boolean context
// But the parser uses: typeof fm.step === 'string' ? parseInt(fm.step) : (fm.step as number) || 0
// parseInt returns NaN which is a number, not 0 — let's verify
const stepIsNaN = Number.isNaN(c.frontmatter.step);
const totalIsNaN = Number.isNaN(c.frontmatter.totalSteps);
// The parser does parseInt which returns NaN for non-numeric strings
// There's no || 0 fallback on the parseInt path, so NaN is expected
assert.ok(stepIsNaN, "NaN step when non-numeric string");
assert.ok(totalIsNaN, "NaN totalSteps when non-numeric string");
assert.deepStrictEqual(c.frontmatter.step, 0, "invalid step defaults to 0");
assert.deepStrictEqual(
c.frontmatter.totalSteps,
0,
"invalid totalSteps defaults to 0",
);
});
test("parseContinue: all three status variants", () => {
@ -1766,7 +1761,7 @@ Decided to use approach A over approach B because of performance.
## Context
Running in worktree. Node 22 required. TypeScript strict mode.
Running in worktree. Node 24 required. TypeScript strict mode.
## Next Action
@ -1790,7 +1785,7 @@ Pick up at step 3: run the integration tests.
c.decisions.includes("approach A over approach B"),
"decisions detail",
);
assert.ok(c.context.includes("Node 22 required"), "context detail");
assert.ok(c.context.includes("Node 24 required"), "context detail");
assert.ok(
c.nextAction.includes("step 3: run the integration tests"),
"nextAction detail",

View file

@ -10,7 +10,7 @@ import {
import { tmpdir } from "node:os";
import { join } from "node:path";
import { test } from 'vitest';
import { parseRoadmap } from "../parsers-legacy.ts";
import { parseRoadmap } from "../parsers.ts";
import {
closeDatabase,
getMilestone,

View file

@ -11,7 +11,7 @@ import { tmpdir } from "node:os";
import { join } from "node:path";
import { test } from 'vitest';
import { parseTaskPlanFile } from "../files.ts";
import { parsePlan } from "../parsers-legacy.ts";
import { parsePlan } from "../parsers.ts";
import {
closeDatabase,
getSlice,

View file

@ -7,7 +7,7 @@ import { mkdirSync, mkdtempSync, readFileSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { renderPlanFromDb, renderRoadmapFromDb } from "../markdown-renderer.ts";
import { parsePlan } from "../parsers-legacy.ts";
import { parsePlan } from "../parsers.ts";
import { parseRoadmapSlices } from "../roadmap-slices.ts";
import {
closeDatabase,

Some files were not shown because too many files have changed in this diff Show more