sf snapshot: uncommitted changes after 39m inactivity
This commit is contained in:
parent
8f6dbb30ff
commit
95726c1789
25 changed files with 1172 additions and 117 deletions
17
.vtcode/history/session-singularity-forge-202605.memory.json
Normal file
17
.vtcode/history/session-singularity-forge-202605.memory.json
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"session_id": "session-singularity-forge-20260506T065721Z_482345-1471402",
|
||||
"schema_version": 2,
|
||||
"summary": "Recent session context: user: ping",
|
||||
"objective": null,
|
||||
"task_summary": null,
|
||||
"spec_summary": null,
|
||||
"evaluation_summary": null,
|
||||
"constraints": [],
|
||||
"grounded_facts": [],
|
||||
"touched_files": [],
|
||||
"open_questions": [],
|
||||
"verification_todo": [],
|
||||
"delegation_notes": [],
|
||||
"history_artifact_path": null,
|
||||
"generated_at": "2026-05-06T06:57:26.256268403+00:00"
|
||||
}
|
||||
2
.vtcode/logs/trajectory-20260506T065806Z.jsonl
Normal file
2
.vtcode/logs/trajectory-20260506T065806Z.jsonl
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
{"kind":"tool_catalog_cache_metrics","turn":1,"model":"gpt-5.4","cache_hit":false,"plan_mode":false,"request_user_input_enabled":true,"available_tools":26,"stable_prefix_hash":17263435382582515430,"tool_catalog_hash":15853729145015341833,"prefix_change_reason":"model","ts":1778050645}
|
||||
{"kind":"llm_retry_metrics","turn":1,"model":"gpt-5.4","plan_mode":false,"attempts_made":1,"retries_used":0,"max_retries":3,"success":false,"exhausted_retry_budget":false,"stream_fallback_used":false,"last_error_retryable":false,"last_error":"Provider error: \u001b[31mOpenAI\u001b[0m \u001b[31mChat Completions error (status 401 Unauthorized) [request_id=req_14bf8819376a41c185ec1799f424636d client_request_id=vtcode-72a3c09e-1130-4f86-9... [truncated]","ts":1778050646}
|
||||
0
.vtcode/logs/trajectory.jsonl
Normal file
0
.vtcode/logs/trajectory.jsonl
Normal file
3
.vtcode/state/background_subagents.json
Normal file
3
.vtcode/state/background_subagents.json
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"records": []
|
||||
}
|
||||
9
.vtcode/terminals/INDEX.md
Normal file
9
.vtcode/terminals/INDEX.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Terminal Sessions Index
|
||||
|
||||
This file lists all active terminal sessions for dynamic discovery.
|
||||
Use `unified_file` (action='read') on individual session files for full output.
|
||||
|
||||
*No active terminal sessions.*
|
||||
|
||||
---
|
||||
*Generated automatically. Do not edit manually.*
|
||||
210
.vtcode/tool-policy.json
Normal file
210
.vtcode/tool-policy.json
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
{
|
||||
"version": 1,
|
||||
"available_tools": [
|
||||
"apply_patch",
|
||||
"close_agent",
|
||||
"cron_create",
|
||||
"cron_delete",
|
||||
"cron_list",
|
||||
"enter_plan_mode",
|
||||
"exit_plan_mode",
|
||||
"list_skills",
|
||||
"load_skill",
|
||||
"load_skill_resource",
|
||||
"mcp_connect_server",
|
||||
"mcp_disconnect_server",
|
||||
"mcp_get_tool_details",
|
||||
"mcp_list_servers",
|
||||
"mcp_search_tools",
|
||||
"plan_task_tracker",
|
||||
"request_user_input",
|
||||
"resume_agent",
|
||||
"send_input",
|
||||
"spawn_agent",
|
||||
"spawn_background_subprocess",
|
||||
"task_tracker",
|
||||
"unified_exec",
|
||||
"unified_file",
|
||||
"unified_search",
|
||||
"wait_agent"
|
||||
],
|
||||
"policies": {
|
||||
"unified_search": "allow",
|
||||
"apply_patch": "prompt",
|
||||
"cron_create": "prompt",
|
||||
"cron_delete": "prompt",
|
||||
"cron_list": "prompt",
|
||||
"enter_plan_mode": "prompt",
|
||||
"exit_plan_mode": "prompt",
|
||||
"mcp_connect_server": "prompt",
|
||||
"mcp_disconnect_server": "prompt",
|
||||
"mcp_get_tool_details": "allow",
|
||||
"mcp_list_servers": "allow",
|
||||
"mcp_search_tools": "allow",
|
||||
"plan_task_tracker": "prompt",
|
||||
"request_user_input": "allow",
|
||||
"task_tracker": "prompt",
|
||||
"unified_exec": "prompt",
|
||||
"unified_file": "allow",
|
||||
"close_agent": "prompt",
|
||||
"list_skills": "allow",
|
||||
"resume_agent": "prompt",
|
||||
"send_input": "prompt",
|
||||
"spawn_agent": "prompt",
|
||||
"spawn_background_subprocess": "prompt",
|
||||
"wait_agent": "prompt",
|
||||
"load_skill_resource": "allow",
|
||||
"load_skill": "allow",
|
||||
"list_files": "allow",
|
||||
"read_file": "allow",
|
||||
"memory": "allow"
|
||||
},
|
||||
"constraints": {},
|
||||
"mcp": {
|
||||
"allowlist": {
|
||||
"enforce": true,
|
||||
"default": {
|
||||
"tools": null,
|
||||
"resources": null,
|
||||
"prompts": null,
|
||||
"logging": [
|
||||
"mcp.provider_initialized",
|
||||
"mcp.provider_initialization_failed",
|
||||
"mcp.tool_filtered",
|
||||
"mcp.tool_execution",
|
||||
"mcp.tool_failed",
|
||||
"mcp.tool_denied"
|
||||
],
|
||||
"configuration": {
|
||||
"client": [
|
||||
"max_concurrent_connections",
|
||||
"request_timeout_seconds",
|
||||
"retry_attempts",
|
||||
"startup_timeout_seconds",
|
||||
"tool_timeout_seconds",
|
||||
"experimental_use_rmcp_client"
|
||||
],
|
||||
"server": [
|
||||
"enabled",
|
||||
"bind_address",
|
||||
"port",
|
||||
"transport",
|
||||
"name",
|
||||
"version"
|
||||
],
|
||||
"ui": [
|
||||
"mode",
|
||||
"max_events",
|
||||
"show_provider_names"
|
||||
]
|
||||
}
|
||||
},
|
||||
"providers": {
|
||||
"context7": {
|
||||
"tools": [
|
||||
"search_*",
|
||||
"fetch_*",
|
||||
"list_*",
|
||||
"context7_*",
|
||||
"get_*"
|
||||
],
|
||||
"resources": [
|
||||
"docs::*",
|
||||
"snippets::*",
|
||||
"repositories::*",
|
||||
"context7::*"
|
||||
],
|
||||
"prompts": [
|
||||
"context7::*",
|
||||
"support::*",
|
||||
"docs::*"
|
||||
],
|
||||
"logging": [
|
||||
"mcp.tool_execution",
|
||||
"mcp.tool_failed",
|
||||
"mcp.tool_denied",
|
||||
"mcp.tool_filtered",
|
||||
"mcp.provider_initialized"
|
||||
],
|
||||
"configuration": {
|
||||
"context7": [
|
||||
"workspace",
|
||||
"search_scope",
|
||||
"max_results"
|
||||
],
|
||||
"provider": [
|
||||
"max_concurrent_requests"
|
||||
]
|
||||
}
|
||||
},
|
||||
"sequential-thinking": {
|
||||
"tools": [
|
||||
"plan",
|
||||
"critique",
|
||||
"reflect",
|
||||
"decompose",
|
||||
"sequential_*"
|
||||
],
|
||||
"resources": null,
|
||||
"prompts": [
|
||||
"sequential-thinking::*",
|
||||
"plan",
|
||||
"reflect",
|
||||
"critique"
|
||||
],
|
||||
"logging": [
|
||||
"mcp.tool_execution",
|
||||
"mcp.tool_failed",
|
||||
"mcp.tool_denied",
|
||||
"mcp.tool_filtered",
|
||||
"mcp.provider_initialized"
|
||||
],
|
||||
"configuration": {
|
||||
"provider": [
|
||||
"max_concurrent_requests"
|
||||
],
|
||||
"sequencing": [
|
||||
"max_depth",
|
||||
"max_branches"
|
||||
]
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"tools": [
|
||||
"get_*",
|
||||
"list_*",
|
||||
"convert_timezone",
|
||||
"describe_timezone",
|
||||
"time_*"
|
||||
],
|
||||
"resources": [
|
||||
"timezone:*",
|
||||
"location:*"
|
||||
],
|
||||
"prompts": null,
|
||||
"logging": [
|
||||
"mcp.tool_execution",
|
||||
"mcp.tool_failed",
|
||||
"mcp.tool_denied",
|
||||
"mcp.tool_filtered",
|
||||
"mcp.provider_initialized"
|
||||
],
|
||||
"configuration": {
|
||||
"provider": [
|
||||
"max_concurrent_requests"
|
||||
],
|
||||
"time": [
|
||||
"local_timezone_override"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"providers": {}
|
||||
},
|
||||
"approval_cache": {
|
||||
"allowed": [],
|
||||
"prefixes": [],
|
||||
"regexes": []
|
||||
}
|
||||
}
|
||||
0
=
Normal file
0
=
Normal file
|
|
@ -602,7 +602,7 @@ if (cliFlags.messages[0] === "autonomous") {
|
|||
if (cliFlags.messages[0] === "schedule") {
|
||||
const scheduleModulePath = "./resources/extensions/sf/commands-schedule.js";
|
||||
const { handleSchedule } = await import(scheduleModulePath);
|
||||
const rawScheduleArgs = process.argv.slice(3).join(" ");
|
||||
const rawScheduleArgs = process.argv.slice(3);
|
||||
const output = (message: string, level = "info") => {
|
||||
const stream =
|
||||
level === "warning" || level === "error"
|
||||
|
|
|
|||
|
|
@ -107,33 +107,66 @@ if (
|
|||
firstArg !== "--version" &&
|
||||
firstArg !== "-v" &&
|
||||
firstArg !== "--help" &&
|
||||
firstArg !== "-h"
|
||||
firstArg !== "-h" &&
|
||||
firstArg !== "schedule"
|
||||
) {
|
||||
try {
|
||||
const now = Date.now();
|
||||
let dueCount = 0;
|
||||
let passiveDueCount = 0;
|
||||
let projectAutoDispatchDueCount = 0;
|
||||
const schedulePaths = [
|
||||
join(process.cwd(), ".sf", "schedule.jsonl"),
|
||||
join(homedir(), ".sf", "schedule.jsonl"),
|
||||
{ path: join(process.cwd(), ".sf", "schedule.jsonl"), scope: "project" },
|
||||
{
|
||||
path: join(process.cwd(), ".sf", "runtime", "schedule.jsonl"),
|
||||
scope: "project",
|
||||
},
|
||||
{ path: join(homedir(), ".sf", "schedule.jsonl"), scope: "global" },
|
||||
];
|
||||
for (const schedulePath of schedulePaths) {
|
||||
for (const { path: schedulePath, scope } of schedulePaths) {
|
||||
if (!existsSync(schedulePath)) continue;
|
||||
const content = readFileSync(schedulePath, "utf-8");
|
||||
const latestById = new Map<string, Record<string, unknown>>();
|
||||
for (const line of content.split("\n")) {
|
||||
if (!line.trim()) continue;
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
if (entry.status === "pending" && Date.parse(entry.due_at) <= now) {
|
||||
dueCount++;
|
||||
if (!entry?.id || typeof entry.id !== "string") continue;
|
||||
const existing = latestById.get(entry.id);
|
||||
if (
|
||||
!existing ||
|
||||
String(entry.created_at ?? "") > String(existing.created_at ?? "")
|
||||
) {
|
||||
latestById.set(entry.id, entry);
|
||||
}
|
||||
} catch {
|
||||
// skip corrupt lines
|
||||
}
|
||||
}
|
||||
for (const entry of latestById.values()) {
|
||||
if (
|
||||
entry.status === "pending" &&
|
||||
Date.parse(String(entry.due_at)) <= now
|
||||
) {
|
||||
if (
|
||||
scope === "project" &&
|
||||
entry.auto_dispatch === true &&
|
||||
(entry.kind === "command" || entry.kind === "prompt")
|
||||
) {
|
||||
projectAutoDispatchDueCount++;
|
||||
} else {
|
||||
passiveDueCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (dueCount > 0) {
|
||||
if (passiveDueCount > 0) {
|
||||
process.stderr.write(
|
||||
`[forge] ${dueCount} scheduled item${dueCount === 1 ? "" : "s"} due now. Manage: /sf schedule list\n`,
|
||||
`[forge] ${passiveDueCount} passive scheduled item${passiveDueCount === 1 ? "" : "s"} due now. Manage: /sf schedule list\n`,
|
||||
);
|
||||
}
|
||||
if (projectAutoDispatchDueCount > 0) {
|
||||
process.stderr.write(
|
||||
`[forge] ${projectAutoDispatchDueCount} scheduled auto-dispatch item${projectAutoDispatchDueCount === 1 ? "" : "s"} due now; autonomous mode will consume project entries.\n`,
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
|
|
|
|||
|
|
@ -62,6 +62,12 @@ import {
|
|||
sfRoot,
|
||||
} from "./paths.js";
|
||||
import { resolveModelWithFallbacksForUnit } from "./preferences-models.js";
|
||||
import {
|
||||
buildScheduledPrompt,
|
||||
executeProjectScheduleCommand,
|
||||
isAutoDispatchScheduleEntry,
|
||||
markProjectScheduleDone,
|
||||
} from "./schedule/schedule-auto-dispatch.js";
|
||||
import { createScheduleStore } from "./schedule/schedule-store.js";
|
||||
import {
|
||||
getMilestone,
|
||||
|
|
@ -420,28 +426,38 @@ When done, say: "Validation attention remediated; ready for revalidation."`;
|
|||
// ─── Rules ────────────────────────────────────────────────────────────────
|
||||
export const DISPATCH_RULES = [
|
||||
{
|
||||
name: "schedule (auto_dispatch=true) → notify",
|
||||
match: async ({ state, basePath }) => {
|
||||
// Only fire when no active milestone — never pre-empt real work
|
||||
if (state.activeMilestone?.id) return null;
|
||||
|
||||
name: "schedule auto-dispatch",
|
||||
match: async ({ basePath }) => {
|
||||
try {
|
||||
const store = createScheduleStore(basePath);
|
||||
const due = store.findDue("project", new Date());
|
||||
// Find entries that want auto-dispatch
|
||||
const autoDispatch = due.filter(
|
||||
(e) => e.auto_dispatch === true && e.kind === "reminder",
|
||||
);
|
||||
const autoDispatch = due.filter(isAutoDispatchScheduleEntry);
|
||||
if (autoDispatch.length === 0) return null;
|
||||
|
||||
// Surface the first due entry as a notification stop
|
||||
const entry = autoDispatch[0];
|
||||
const msg =
|
||||
entry.payload?.message ?? `Scheduled reminder ${entry.id} is due.`;
|
||||
if (entry.kind === "command") {
|
||||
const result = executeProjectScheduleCommand(basePath, entry);
|
||||
if (result.ok) {
|
||||
return {
|
||||
action: "skip",
|
||||
reason: `[schedule] executed command ${entry.id}`,
|
||||
};
|
||||
}
|
||||
return {
|
||||
action: "stop",
|
||||
reason: `[schedule] command ${entry.id} failed: ${result.reason}`,
|
||||
level: "warning",
|
||||
};
|
||||
}
|
||||
|
||||
markProjectScheduleDone(basePath, entry, {
|
||||
result_note: "prompt dispatched",
|
||||
});
|
||||
return {
|
||||
action: "stop",
|
||||
reason: `[schedule] ${msg} Mark done: /sf schedule done ${entry.id}`,
|
||||
level: "info",
|
||||
action: "dispatch",
|
||||
unitType: "custom-step",
|
||||
unitId: `schedule/${entry.id}`,
|
||||
prompt: buildScheduledPrompt(entry),
|
||||
};
|
||||
} catch {
|
||||
// Non-fatal: never block dispatch on schedule store errors
|
||||
|
|
|
|||
|
|
@ -6,9 +6,12 @@
|
|||
* or ~/.sf/schedule.jsonl (global).
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import {
|
||||
executeProjectScheduleCommand,
|
||||
markProjectScheduleDone,
|
||||
} from "./schedule/schedule-auto-dispatch.js";
|
||||
import { createScheduleStore } from "./schedule/schedule-store.js";
|
||||
import { isValidKind } from "./schedule/schedule-types.js";
|
||||
import { ALL_SCHEDULE_KINDS, isValidKind } from "./schedule/schedule-types.js";
|
||||
import { generateULID } from "./schedule/schedule-ulid.js";
|
||||
|
||||
// ─── Duration parser ────────────────────────────────────────────────────────
|
||||
|
|
@ -94,18 +97,86 @@ function _findEntry(store, scope, idPrefix) {
|
|||
};
|
||||
}
|
||||
|
||||
function _splitArgs(args) {
|
||||
if (Array.isArray(args)) {
|
||||
return args.map((part) => String(part)).filter(Boolean);
|
||||
}
|
||||
const input = String(args ?? "");
|
||||
const tokens = [];
|
||||
let current = "";
|
||||
let quote = null;
|
||||
let escaped = false;
|
||||
|
||||
for (const char of input) {
|
||||
if (escaped) {
|
||||
current += char;
|
||||
escaped = false;
|
||||
continue;
|
||||
}
|
||||
if (char === "\\") {
|
||||
escaped = true;
|
||||
continue;
|
||||
}
|
||||
if (quote) {
|
||||
if (char === quote) {
|
||||
quote = null;
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (char === "'" || char === '"') {
|
||||
quote = char;
|
||||
continue;
|
||||
}
|
||||
if (/\s/.test(char)) {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = "";
|
||||
}
|
||||
continue;
|
||||
}
|
||||
current += char;
|
||||
}
|
||||
if (escaped) current += "\\";
|
||||
if (current) tokens.push(current);
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function _joinPlain(parts) {
|
||||
return parts.join(" ").trim();
|
||||
}
|
||||
|
||||
function _shellQuote(part) {
|
||||
if (/^[A-Za-z0-9_./:=@%+-]+$/.test(part)) return part;
|
||||
return `'${part.replace(/'/g, "'\\''")}'`;
|
||||
}
|
||||
|
||||
function _commandFromParts(parts) {
|
||||
return parts
|
||||
.map((part) => _shellQuote(String(part)))
|
||||
.join(" ")
|
||||
.trim();
|
||||
}
|
||||
|
||||
// ─── Subcommands ────────────────────────────────────────────────────────────
|
||||
|
||||
async function addItem(args, ctx) {
|
||||
const parts = args.trim().split(/\s+/);
|
||||
const parts = _splitArgs(args);
|
||||
|
||||
let kind = "reminder";
|
||||
let scope = "project";
|
||||
let dueAt = null;
|
||||
let autoDispatch = false;
|
||||
let capture = null;
|
||||
const titleParts = [];
|
||||
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const p = parts[i];
|
||||
if (p === "--") {
|
||||
titleParts.push(...parts.slice(i + 1));
|
||||
break;
|
||||
}
|
||||
if (p === "--kind" || p === "-k") {
|
||||
kind = parts[++i];
|
||||
continue;
|
||||
|
|
@ -134,16 +205,28 @@ async function addItem(args, ctx) {
|
|||
dueAt = new Date(parsed).toISOString();
|
||||
continue;
|
||||
}
|
||||
if (p === "--auto-dispatch" || p === "--auto") {
|
||||
autoDispatch = true;
|
||||
continue;
|
||||
}
|
||||
if (p === "--capture") {
|
||||
capture = parts[++i];
|
||||
continue;
|
||||
}
|
||||
titleParts.push(p);
|
||||
}
|
||||
|
||||
if (!isValidKind(kind)) {
|
||||
ctx.ui.notify(
|
||||
`Unknown kind: ${kind}. Valid: reminder, milestone_check, review_due, recurring`,
|
||||
`Unknown kind: ${kind}. Valid: ${ALL_SCHEDULE_KINDS.join(", ")}`,
|
||||
"warning",
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (capture && capture !== "stdout") {
|
||||
ctx.ui.notify(`Unknown capture mode: ${capture}. Valid: stdout`, "warning");
|
||||
return;
|
||||
}
|
||||
if (scope !== "project" && scope !== "global") {
|
||||
ctx.ui.notify(`Unknown scope: ${scope}. Valid: project, global`, "warning");
|
||||
return;
|
||||
|
|
@ -156,7 +239,8 @@ async function addItem(args, ctx) {
|
|||
return;
|
||||
}
|
||||
|
||||
const title = titleParts.join(" ").trim();
|
||||
const title =
|
||||
kind === "command" ? _commandFromParts(titleParts) : _joinPlain(titleParts);
|
||||
if (!title) {
|
||||
ctx.ui.notify(
|
||||
"Missing title. Example: /sf schedule add --in 2w 'Review adoption metrics'",
|
||||
|
|
@ -172,15 +256,29 @@ async function addItem(args, ctx) {
|
|||
status: "pending",
|
||||
due_at: dueAt,
|
||||
created_at: new Date().toISOString(),
|
||||
payload: { message: title },
|
||||
payload: _payloadForKind(kind, title, capture),
|
||||
created_by: "user",
|
||||
...(autoDispatch ? { auto_dispatch: true } : {}),
|
||||
};
|
||||
store.appendEntry(scope, entry);
|
||||
ctx.ui.notify(`Scheduled: ${entry.id}\nDue: ${entry.due_at}`, "success");
|
||||
}
|
||||
|
||||
function _payloadForKind(kind, title, capture) {
|
||||
if (kind === "command") {
|
||||
return {
|
||||
command: title,
|
||||
...(capture === "stdout" ? { capture } : {}),
|
||||
};
|
||||
}
|
||||
if (kind === "prompt") {
|
||||
return { prompt: title, message: title };
|
||||
}
|
||||
return { message: title };
|
||||
}
|
||||
|
||||
async function listItems(args, ctx) {
|
||||
const parts = args.trim().split(/\s+/).filter(Boolean);
|
||||
const parts = _splitArgs(args);
|
||||
let scope = "project";
|
||||
let showDueOnly = false;
|
||||
let showAll = false;
|
||||
|
|
@ -251,7 +349,7 @@ async function listItems(args, ctx) {
|
|||
}
|
||||
|
||||
async function markDone(args, ctx) {
|
||||
const idPrefix = args.trim();
|
||||
const idPrefix = _joinPlain(_splitArgs(args));
|
||||
if (!idPrefix) {
|
||||
ctx.ui.notify("Usage: /sf schedule done \u003cid\u003e", "warning");
|
||||
return;
|
||||
|
|
@ -272,7 +370,7 @@ async function markDone(args, ctx) {
|
|||
}
|
||||
|
||||
async function markCancel(args, ctx) {
|
||||
const idPrefix = args.trim();
|
||||
const idPrefix = _joinPlain(_splitArgs(args));
|
||||
if (!idPrefix) {
|
||||
ctx.ui.notify("Usage: /sf schedule cancel \u003cid\u003e", "warning");
|
||||
return;
|
||||
|
|
@ -293,7 +391,7 @@ async function markCancel(args, ctx) {
|
|||
}
|
||||
|
||||
async function snoozeItem(args, ctx) {
|
||||
const parts = args.trim().split(/\s+/).filter(Boolean);
|
||||
const parts = _splitArgs(args);
|
||||
let idPrefix = "";
|
||||
let by = "";
|
||||
|
||||
|
|
@ -344,7 +442,7 @@ async function snoozeItem(args, ctx) {
|
|||
}
|
||||
|
||||
async function runItem(args, ctx) {
|
||||
const idPrefix = args.trim();
|
||||
const idPrefix = _joinPlain(_splitArgs(args));
|
||||
if (!idPrefix) {
|
||||
ctx.ui.notify("Usage: /sf schedule run \u003cid\u003e", "warning");
|
||||
return;
|
||||
|
|
@ -368,35 +466,18 @@ async function runItem(args, ctx) {
|
|||
break;
|
||||
}
|
||||
case "command": {
|
||||
const cmd = payload.command;
|
||||
if (!cmd) {
|
||||
ctx.ui.notify(
|
||||
`Command entry ${entry.id} has no command in payload.`,
|
||||
"warning",
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const capture = payload.capture === "stdout";
|
||||
const result = execSync(cmd, {
|
||||
stdio: capture ? ["pipe", "pipe", "pipe"] : "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
if (capture) {
|
||||
ctx.ui.notify(result, "info");
|
||||
}
|
||||
} catch (err) {
|
||||
const stderr = err.stderr || err.message || String(err);
|
||||
ctx.ui.notify(`Command failed: ${stderr}`, "error");
|
||||
const updated = {
|
||||
...entry,
|
||||
status: "cancelled",
|
||||
created_at: new Date().toISOString(),
|
||||
payload: { ...payload, result_note: stderr },
|
||||
};
|
||||
store.appendEntry("project", updated);
|
||||
const result = executeProjectScheduleCommand(_basePath(), entry);
|
||||
if (!result.ok) {
|
||||
ctx.ui.notify(`Command failed: ${result.reason}`, "error");
|
||||
return;
|
||||
}
|
||||
if (result.stdout) ctx.ui.notify(result.stdout, "info");
|
||||
ctx.ui.notify(`Completed: ${entry.id}`, "success");
|
||||
return;
|
||||
}
|
||||
case "prompt": {
|
||||
const title = payload.prompt || payload.message || entry.id;
|
||||
ctx.ui.notify(`Prompt: ${title}`, "info");
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
|
|
@ -406,12 +487,7 @@ async function runItem(args, ctx) {
|
|||
}
|
||||
|
||||
// Mark done on success
|
||||
const updated = {
|
||||
...entry,
|
||||
status: "done",
|
||||
created_at: new Date().toISOString(),
|
||||
};
|
||||
store.appendEntry("project", updated);
|
||||
markProjectScheduleDone(_basePath(), entry);
|
||||
ctx.ui.notify(`Completed: ${entry.id}`, "success");
|
||||
}
|
||||
|
||||
|
|
@ -428,9 +504,9 @@ async function runItem(args, ctx) {
|
|||
* @param {import("@singularity-forge/pi-coding-agent").ExtensionContext} ctx
|
||||
*/
|
||||
export async function handleSchedule(args, ctx) {
|
||||
const parts = args.trim().split(/\s+/).filter(Boolean);
|
||||
const parts = _splitArgs(args);
|
||||
const sub = parts[0] ?? "";
|
||||
const rest = parts.slice(1).join(" ");
|
||||
const rest = Array.isArray(args) ? parts.slice(1) : parts.slice(1).join(" ");
|
||||
|
||||
switch (sub) {
|
||||
case "add":
|
||||
|
|
@ -448,7 +524,7 @@ export async function handleSchedule(args, ctx) {
|
|||
case "":
|
||||
ctx.ui.notify(
|
||||
"Usage: /sf schedule add|list|done|cancel|snooze|run\n" +
|
||||
" add --in \u003cduration\u003e [--kind \u003ckind\u003e] [--scope \u003cscope\u003e] \u003ctitle\u003e\n" +
|
||||
" add --in \u003cduration\u003e [--kind \u003ckind\u003e] [--scope \u003cscope\u003e] [--auto-dispatch] \u003ctitle-or-command\u003e\n" +
|
||||
" list [--due] [--all] [--json] [--scope \u003cscope\u003e]\n" +
|
||||
" done \u003cid\u003e\n" +
|
||||
" cancel \u003cid\u003e\n" +
|
||||
|
|
|
|||
|
|
@ -587,7 +587,7 @@ export class GitServiceImpl {
|
|||
}).trim();
|
||||
nativeResetSoft(this.basePath, resetTarget);
|
||||
// Re-run smartStage so the same RUNTIME_EXCLUSION_PATHS apply.
|
||||
// Snapshot commits used nativeAddTracked (git add -u) which stages
|
||||
// Snapshot commits used nativeAddTracked (git add --ignore-removal) which stages
|
||||
// ALL tracked modifications including .sf/ state files. Without
|
||||
// re-staging, those .sf/ changes leak into the absorbed commit.
|
||||
this.smartStage();
|
||||
|
|
|
|||
|
|
@ -616,13 +616,15 @@ export function nativeAddAll(basePath) {
|
|||
gitFileExec(basePath, ["add", "-A"]);
|
||||
}
|
||||
/**
|
||||
* Stage only already-tracked files (git add -u).
|
||||
* Does NOT add new untracked files — only updates modifications and deletions
|
||||
* for files git already knows about. Safe for automated snapshots where
|
||||
* pulling in unknown untracked files (secrets, binaries) would be dangerous.
|
||||
* Stage only already-tracked files, ignoring removals (git add --ignore-removal).
|
||||
*
|
||||
* Does NOT add new untracked files and does NOT stage deletions — only
|
||||
* updates modifications for files git already knows about. Safe for automated
|
||||
* snapshots where pulling in unknown untracked files (secrets, binaries) or
|
||||
* committing accidental deletions would be dangerous.
|
||||
*/
|
||||
export function nativeAddTracked(basePath) {
|
||||
gitFileExec(basePath, ["add", "-u"]);
|
||||
gitFileExec(basePath, ["add", "--ignore-removal", "."]);
|
||||
}
|
||||
/**
|
||||
* Stage all files with pathspec exclusions (git add -A -- ':!pattern' ...).
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@
|
|||
*
|
||||
* Consumer: cli.ts interactive startup path.
|
||||
*/
|
||||
|
||||
import { isAutoDispatchScheduleEntry } from "./schedule/schedule-auto-dispatch.js";
|
||||
import { createScheduleStore } from "./schedule/schedule-store.js";
|
||||
|
||||
/**
|
||||
|
|
@ -31,20 +33,37 @@ export function printScheduleBanner(basePath) {
|
|||
// Best-effort — never block startup
|
||||
}
|
||||
|
||||
const autoDispatch = due.filter(isAutoDispatchScheduleEntry);
|
||||
const passive = due.filter((entry) => !isAutoDispatchScheduleEntry(entry));
|
||||
|
||||
if (due.length === 0) return;
|
||||
|
||||
// Sort by due_at ascending
|
||||
due.sort((a, b) => new Date(a.due_at) - new Date(b.due_at));
|
||||
passive.sort((a, b) => new Date(a.due_at) - new Date(b.due_at));
|
||||
autoDispatch.sort((a, b) => new Date(a.due_at) - new Date(b.due_at));
|
||||
|
||||
const titles = due
|
||||
.slice(0, 3)
|
||||
.map((e) => e.payload?.message || e.id.slice(0, 8));
|
||||
const more = due.length > 3 ? ` (+${due.length - 3} more)` : "";
|
||||
const label = due.length === 1 ? "scheduled item due" : "scheduled items due";
|
||||
if (passive.length > 0) {
|
||||
const titles = passive
|
||||
.slice(0, 3)
|
||||
.map((e) => e.payload?.message || e.id.slice(0, 8));
|
||||
const more = passive.length > 3 ? ` (+${passive.length - 3} more)` : "";
|
||||
const label =
|
||||
passive.length === 1 ? "scheduled item due" : "scheduled items due";
|
||||
|
||||
process.stderr.write(
|
||||
`[forge] ${due.length} ${label}: ${titles.join(", ")}${more}\n`,
|
||||
);
|
||||
process.stderr.write(
|
||||
`[forge] ${passive.length} ${label}: ${titles.join(", ")}${more}. Manage: /sf schedule list\n`,
|
||||
);
|
||||
}
|
||||
|
||||
if (autoDispatch.length > 0) {
|
||||
const label =
|
||||
autoDispatch.length === 1
|
||||
? "scheduled auto-dispatch item due"
|
||||
: "scheduled auto-dispatch items due";
|
||||
process.stderr.write(
|
||||
`[forge] ${autoDispatch.length} ${label}; autonomous mode will consume project entries.\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
171
src/resources/extensions/sf/schedule/schedule-auto-dispatch.js
Normal file
171
src/resources/extensions/sf/schedule/schedule-auto-dispatch.js
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
/**
|
||||
* Schedule Auto Dispatch — first-class execution for due project schedule entries.
|
||||
*
|
||||
* Purpose: let autonomous mode consume repo-owned scheduled work without a
|
||||
* human approval loop while preserving append-only evidence in the schedule log.
|
||||
*
|
||||
* Consumer: auto-dispatch.js and commands-schedule.js.
|
||||
*/
|
||||
import { execSync } from "node:child_process";
|
||||
import { createScheduleStore } from "./schedule-store.js";
|
||||
|
||||
const MAX_RESULT_CHARS = 12_000;
|
||||
|
||||
/**
|
||||
* Return true when a schedule entry is allowed to run from autonomous mode.
|
||||
*
|
||||
* Purpose: keep auto execution explicit; passive reminders and global schedule
|
||||
* entries remain visible but do not become repo cron jobs accidentally.
|
||||
*
|
||||
* Consumer: auto-dispatch.js schedule rule.
|
||||
*
|
||||
* @param {import("./schedule-types.js").ScheduleEntry} entry
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export function isAutoDispatchScheduleEntry(entry) {
|
||||
return (
|
||||
entry?.auto_dispatch === true &&
|
||||
(entry.kind === "command" || entry.kind === "prompt")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a project schedule entry done with a bounded result note.
|
||||
*
|
||||
* Purpose: make schedule consumption durable so due auto-dispatch entries do
|
||||
* not repeat forever after a successful autonomous tick.
|
||||
*
|
||||
* Consumer: executeProjectScheduleCommand and prompt schedule dispatch.
|
||||
*
|
||||
* @param {string} basePath
|
||||
* @param {import("./schedule-types.js").ScheduleEntry} entry
|
||||
* @param {Record<string, unknown>} [payloadPatch]
|
||||
* @returns {import("./schedule-types.js").ScheduleEntry}
|
||||
*/
|
||||
export function markProjectScheduleDone(basePath, entry, payloadPatch = {}) {
|
||||
const updated = {
|
||||
...entry,
|
||||
status: "done",
|
||||
created_at: new Date().toISOString(),
|
||||
payload: {
|
||||
...(entry.payload ?? {}),
|
||||
...payloadPatch,
|
||||
},
|
||||
};
|
||||
createScheduleStore(basePath).appendEntry("project", updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a project schedule entry cancelled with a bounded failure note.
|
||||
*
|
||||
* Purpose: preserve failed cron evidence and prevent an invalid command from
|
||||
* hot-looping through autonomous dispatch forever.
|
||||
*
|
||||
* Consumer: executeProjectScheduleCommand and schedule dispatch failure paths.
|
||||
*
|
||||
* @param {string} basePath
|
||||
* @param {import("./schedule-types.js").ScheduleEntry} entry
|
||||
* @param {string} reason
|
||||
* @returns {import("./schedule-types.js").ScheduleEntry}
|
||||
*/
|
||||
export function markProjectScheduleCancelled(basePath, entry, reason) {
|
||||
const updated = {
|
||||
...entry,
|
||||
status: "cancelled",
|
||||
created_at: new Date().toISOString(),
|
||||
payload: {
|
||||
...(entry.payload ?? {}),
|
||||
result_note: _truncate(reason),
|
||||
},
|
||||
};
|
||||
createScheduleStore(basePath).appendEntry("project", updated);
|
||||
return updated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute one project-scoped command schedule entry from the repo root.
|
||||
*
|
||||
* Purpose: make `kind: "command", auto_dispatch: true` behave like a repo cron
|
||||
* job in autonomous mode, with durable success/failure status in `.sf`.
|
||||
*
|
||||
* Consumer: auto-dispatch.js schedule rule and `/sf schedule run`.
|
||||
*
|
||||
* @param {string} basePath
|
||||
* @param {import("./schedule-types.js").ScheduleEntry} entry
|
||||
* @returns {{ok: true, status: "done", stdout?: string} | {ok: false, status: "cancelled", reason: string}}
|
||||
*/
|
||||
export function executeProjectScheduleCommand(basePath, entry) {
|
||||
const payload = entry.payload ?? {};
|
||||
const command = payload.command;
|
||||
if (typeof command !== "string" || command.trim().length === 0) {
|
||||
const reason = `Command entry ${entry.id} has no payload.command.`;
|
||||
markProjectScheduleCancelled(basePath, entry, reason);
|
||||
return { ok: false, status: "cancelled", reason };
|
||||
}
|
||||
|
||||
try {
|
||||
const stdout = execSync(command, {
|
||||
cwd: basePath,
|
||||
encoding: "utf-8",
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
env: process.env,
|
||||
maxBuffer: 16 * 1024 * 1024,
|
||||
});
|
||||
const captured =
|
||||
payload.capture === "stdout" ? _truncate(stdout) : undefined;
|
||||
markProjectScheduleDone(basePath, entry, {
|
||||
result_note: "command completed",
|
||||
...(captured !== undefined ? { stdout: captured } : {}),
|
||||
});
|
||||
return captured !== undefined
|
||||
? { ok: true, status: "done", stdout: captured }
|
||||
: { ok: true, status: "done" };
|
||||
} catch (err) {
|
||||
const reason = _errorText(err);
|
||||
markProjectScheduleCancelled(basePath, entry, reason);
|
||||
return { ok: false, status: "cancelled", reason };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the autonomous prompt for a due prompt schedule entry.
|
||||
*
|
||||
* Purpose: turn scheduled intent into a normal custom-step unit while keeping
|
||||
* schedule storage responsible only for when the trigger fires.
|
||||
*
|
||||
* Consumer: auto-dispatch.js schedule rule.
|
||||
*
|
||||
* @param {import("./schedule-types.js").ScheduleEntry} entry
|
||||
* @returns {string}
|
||||
*/
|
||||
export function buildScheduledPrompt(entry) {
|
||||
const payload = entry.payload ?? {};
|
||||
const text = payload.prompt ?? payload.message ?? entry.id;
|
||||
return [
|
||||
`Scheduled autonomous prompt ${entry.id} is due.`,
|
||||
"",
|
||||
"Treat this as repo-owned scheduled work. Execute it without asking the user unless a safety gate requires a pause.",
|
||||
"",
|
||||
"Scheduled work:",
|
||||
String(text),
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function _errorText(err) {
|
||||
if (err && typeof err === "object") {
|
||||
const stderr = "stderr" in err ? err.stderr : undefined;
|
||||
if (stderr) return _truncate(String(stderr));
|
||||
const stdout = "stdout" in err ? err.stdout : undefined;
|
||||
const message = "message" in err ? err.message : undefined;
|
||||
return _truncate(String(message ?? stdout ?? err));
|
||||
}
|
||||
return _truncate(String(err));
|
||||
}
|
||||
|
||||
function _truncate(value) {
|
||||
const text = String(value ?? "");
|
||||
return text.length > MAX_RESULT_CHARS
|
||||
? `${text.slice(0, MAX_RESULT_CHARS)}\n[truncated]`
|
||||
: text;
|
||||
}
|
||||
|
|
@ -4,7 +4,11 @@ export type ScheduleKind =
|
|||
| "reminder"
|
||||
| "milestone_check"
|
||||
| "review_due"
|
||||
| "recurring";
|
||||
| "recurring"
|
||||
| "review"
|
||||
| "audit"
|
||||
| "prompt"
|
||||
| "command";
|
||||
|
||||
export interface ScheduleEntry {
|
||||
id: string;
|
||||
|
|
|
|||
|
|
@ -20,12 +20,14 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* @typedef {("reminder"|"milestone_check"|"review_due"|"recurring"|"review"|"audit")} ScheduleKind
|
||||
* @typedef {("reminder"|"milestone_check"|"review_due"|"recurring"|"review"|"audit"|"prompt"|"command")} ScheduleKind
|
||||
* "review" / "audit" — surfaced to next planning turn (SF schedule system kinds).
|
||||
* "review_due" — legacy internal name for review (backward compat).
|
||||
* "reminder" — general reminder.
|
||||
* "milestone_check" — milestone health check.
|
||||
* "recurring" — cron-based recurring entry.
|
||||
* "prompt" — autonomous prompt to dispatch when due and auto_dispatch=true.
|
||||
* "command" — repo command to execute when due and auto_dispatch=true.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -62,7 +64,21 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* @typedef {ReminderPayload|MilestoneCheckPayload|ReviewDuePayload|RecurringPayload} SchedulePayload
|
||||
* @typedef {object} PromptPayload
|
||||
* @property {string} [prompt] Autonomous prompt to dispatch
|
||||
* @property {string} [message] Human-readable prompt summary
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} CommandPayload
|
||||
* @property {string} command Shell command to execute from the project root
|
||||
* @property {"stdout"} [capture] Capture bounded stdout into the schedule log
|
||||
* @property {string} [result_note] Last execution result
|
||||
* @property {string} [stdout] Bounded captured stdout when capture="stdout"
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {ReminderPayload|MilestoneCheckPayload|ReviewDuePayload|RecurringPayload|PromptPayload|CommandPayload} SchedulePayload
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -85,7 +101,7 @@
|
|||
* @property {string} created_at ISO-8601 timestamp
|
||||
* @property {SchedulePayload} payload Kind-specific data
|
||||
* @property {ScheduleCreatedBy} created_by Who created the entry
|
||||
* @property {boolean} [auto_dispatch] If true and kind='reminder', surface as dispatch input in auto-mode when due. Defaults false.
|
||||
* @property {boolean} [auto_dispatch] If true and kind is 'prompt' or 'command', autonomous mode consumes the project entry when due. Defaults false.
|
||||
* @property {string} [snoozed_at] ISO-8601 timestamp; set when the entry was last snoozed
|
||||
*/
|
||||
|
||||
|
|
@ -99,6 +115,7 @@ export const VALID_KINDS = new Set([
|
|||
"recurring",
|
||||
"review",
|
||||
"audit",
|
||||
"prompt",
|
||||
"command",
|
||||
]);
|
||||
|
||||
|
|
|
|||
|
|
@ -3754,6 +3754,144 @@ export function insertLlmTaskOutcome(input) {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Query LLM task outcomes for a specific unit.
|
||||
*
|
||||
* Purpose: enable outcome-learning and cost-guard gates to inspect
|
||||
* historical performance of a unit type + id combination.
|
||||
*
|
||||
* Consumer: uok/outcome-learning-gate.js, uok/cost-guard-gate.js.
|
||||
*/
|
||||
export function getLlmTaskOutcomesByUnit(unitType, unitId, limit = 20) {
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
,
|
||||
)
|
||||
.all({
|
||||
":unit_type": unitType,
|
||||
":unit_id": unitId,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Query LLM task outcomes for a specific model.
|
||||
*
|
||||
* Purpose: enable cost-guard to detect models with high failure rates
|
||||
* or excessive cumulative spend.
|
||||
*
|
||||
* Consumer: uok/cost-guard-gate.js.
|
||||
*/
|
||||
export function getLlmTaskOutcomesByModel(modelId, limit = 50) {
|
||||
if (!currentDb) return [];
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
,
|
||||
)
|
||||
.all({
|
||||
":model_id": modelId,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Query recent LLM task outcomes across all units.
|
||||
*
|
||||
* Purpose: provide a rolling window of outcomes for system-wide
|
||||
* health and spend analysis.
|
||||
*
|
||||
* Consumer: uok/diagnostic-synthesis.js, uok/cost-guard-gate.js.
|
||||
*/
|
||||
export function getRecentLlmTaskOutcomes(hours = 24, limit = 100) {
|
||||
if (!currentDb) return [];
|
||||
const cutoff = new Date(Date.now() - hours * 60 * 60 * 1000).toISOString();
|
||||
try {
|
||||
return currentDb
|
||||
.prepare(
|
||||
,
|
||||
)
|
||||
.all({
|
||||
":cutoff": cutoff,
|
||||
":limit": limit,
|
||||
});
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Aggregate LLM task outcome statistics for a model over a time window.
|
||||
*
|
||||
* Returns { total, succeeded, failed, totalCostUsd, totalTokens, avgDurationMs }.
|
||||
*
|
||||
* Consumer: uok/cost-guard-gate.js, uok/outcome-learning-gate.js.
|
||||
*/
|
||||
export function getLlmTaskOutcomeStats(modelId, windowHours = 24) {
|
||||
if (!currentDb) {
|
||||
return { total: 0, succeeded: 0, failed: 0, totalCostUsd: 0, totalTokens: 0, avgDurationMs: 0 };
|
||||
}
|
||||
const cutoff = new Date(Date.now() - windowHours * 60 * 60 * 1000).toISOString();
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
,
|
||||
)
|
||||
.get({ ":model_id": modelId, ":cutoff": cutoff });
|
||||
if (!row) {
|
||||
return { total: 0, succeeded: 0, failed: 0, totalCostUsd: 0, totalTokens: 0, avgDurationMs: 0 };
|
||||
}
|
||||
return {
|
||||
total: row.total ?? 0,
|
||||
succeeded: row.succeeded ?? 0,
|
||||
failed: row.failed ?? 0,
|
||||
totalCostUsd: row.totalCostUsd ?? 0,
|
||||
totalTokens: row.totalTokens ?? 0,
|
||||
avgDurationMs: row.avgDurationMs ?? 0,
|
||||
};
|
||||
} catch {
|
||||
return { total: 0, succeeded: 0, failed: 0, totalCostUsd: 0, totalTokens: 0, avgDurationMs: 0 };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Aggregate gate run statistics for a specific gate over a time window.
|
||||
*
|
||||
* Returns { total, pass, fail, retry, manualAttention, lastEvaluatedAt }.
|
||||
*
|
||||
* Consumer: uok/diagnostic-synthesis.js, uok/gate-runner.js health checks.
|
||||
*/
|
||||
export function getGateRunStats(gateId, windowHours = 24) {
|
||||
if (!currentDb) {
|
||||
return { total: 0, pass: 0, fail: 0, retry: 0, manualAttention: 0, lastEvaluatedAt: null };
|
||||
}
|
||||
const cutoff = new Date(Date.now() - windowHours * 60 * 60 * 1000).toISOString();
|
||||
try {
|
||||
const row = currentDb
|
||||
.prepare(
|
||||
,
|
||||
)
|
||||
.get({ ":gate_id": gateId, ":cutoff": cutoff });
|
||||
if (!row) {
|
||||
return { total: 0, pass: 0, fail: 0, retry: 0, manualAttention: 0, lastEvaluatedAt: null };
|
||||
}
|
||||
return {
|
||||
total: row.total ?? 0,
|
||||
pass: row.pass ?? 0,
|
||||
fail: row.fail ?? 0,
|
||||
retry: row.retry ?? 0,
|
||||
manualAttention: row.manualAttention ?? 0,
|
||||
lastEvaluatedAt: row.lastEvaluatedAt ?? null,
|
||||
};
|
||||
} catch {
|
||||
return { total: 0, pass: 0, fail: 0, retry: 0, manualAttention: 0, lastEvaluatedAt: null };
|
||||
}
|
||||
}
|
||||
function asStringOrNull(value) {
|
||||
return typeof value === "string" && value.length > 0 ? value : null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,11 +7,12 @@
|
|||
* Consumer: CI test runner (vitest).
|
||||
*/
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdirSync, rmSync } from "node:fs";
|
||||
import { mkdirSync, readFileSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, beforeEach, describe, it } from "vitest";
|
||||
import { handleSchedule, parseDuration } from "../commands-schedule.js";
|
||||
import { createScheduleStore } from "../schedule/schedule-store.js";
|
||||
|
||||
function mockCtx() {
|
||||
const notifications = [];
|
||||
|
|
@ -92,6 +93,37 @@ describe("handleSchedule", () => {
|
|||
assert.equal(ctx.notifications[0].type, "warning");
|
||||
assert.ok(ctx.notifications[0].msg.includes("Usage:"));
|
||||
});
|
||||
|
||||
it("preserves argv-tokenized command payloads from top-level CLI", async () => {
|
||||
const ctx = mockCtx();
|
||||
await handleSchedule(
|
||||
[
|
||||
"add",
|
||||
"--in",
|
||||
"0m",
|
||||
"--kind",
|
||||
"command",
|
||||
"--auto-dispatch",
|
||||
"--",
|
||||
"node",
|
||||
"-e",
|
||||
"require('fs').writeFileSync('argv-cron.txt','ok')",
|
||||
],
|
||||
ctx,
|
||||
);
|
||||
|
||||
const entries = createScheduleStore(testDir).readEntries("project");
|
||||
assert.equal(entries.length, 1);
|
||||
assert.equal(entries[0].kind, "command");
|
||||
assert.equal(entries[0].auto_dispatch, true);
|
||||
assert.equal(
|
||||
entries[0].payload.command,
|
||||
"node -e 'require('\\''fs'\\'').writeFileSync('\\''argv-cron.txt'\\'','\\''ok'\\'')'",
|
||||
);
|
||||
|
||||
await handleSchedule(["run", entries[0].id.slice(0, 8)], mockCtx());
|
||||
assert.equal(readFileSync(join(testDir, "argv-cron.txt"), "utf-8"), "ok");
|
||||
});
|
||||
});
|
||||
|
||||
describe("list", () => {
|
||||
|
|
|
|||
|
|
@ -2,15 +2,15 @@
|
|||
* Schedule Auto-Dispatch Rule tests.
|
||||
*
|
||||
* Purpose: verify the schedule dispatch rule in auto-dispatch.js:
|
||||
* - never pre-empts active milestone work
|
||||
* - consumes repo-scoped due auto_dispatch commands between autonomous units
|
||||
* - returns null when no matching due entries
|
||||
* - returns stop action with reminder when auto_dispatch=true reminder is due
|
||||
* - dispatches due auto_dispatch prompts as custom-step units
|
||||
* - non-fatal on errors
|
||||
*
|
||||
* Consumer: CI test runner (vitest).
|
||||
*/
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdirSync, rmSync } from "node:fs";
|
||||
import { existsSync, mkdirSync, readFileSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, beforeEach, describe, it } from "vitest";
|
||||
|
|
@ -57,43 +57,69 @@ describe("schedule-dispatch", () => {
|
|||
|
||||
async function runScheduleRule(state, basePath = testDir) {
|
||||
const rule = DISPATCH_RULES.find(
|
||||
(r) => r.name === "schedule (auto_dispatch=true) → notify",
|
||||
(r) => r.name === "schedule auto-dispatch",
|
||||
);
|
||||
assert.ok(rule, "schedule dispatch rule not found");
|
||||
return rule.match({ state, basePath });
|
||||
}
|
||||
|
||||
it("returns null when active milestone exists (never pre-empts)", async () => {
|
||||
const state = {
|
||||
activeMilestone: { id: "M010" },
|
||||
phase: "executing",
|
||||
};
|
||||
const result = await runScheduleRule(state);
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
it("returns null when no due entries exist", async () => {
|
||||
const state = { activeMilestone: null, phase: "idle" };
|
||||
const result = await runScheduleRule(state);
|
||||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
it("executes due auto_dispatch command from the project root", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
kind: "command",
|
||||
due_at: new Date(Date.now() - 60 * 60 * 1000).toISOString(),
|
||||
auto_dispatch: true,
|
||||
payload: {
|
||||
command:
|
||||
"node -e \"require('fs').writeFileSync('cron-ran.txt', process.cwd())\"",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const state = {
|
||||
activeMilestone: { id: "M010" },
|
||||
phase: "executing",
|
||||
};
|
||||
const result = await runScheduleRule(state);
|
||||
assert.ok(result);
|
||||
assert.equal(result.action, "skip");
|
||||
assert.equal(existsSync(join(testDir, "cron-ran.txt")), true);
|
||||
assert.equal(readFileSync(join(testDir, "cron-ran.txt"), "utf-8"), testDir);
|
||||
const entries = store.readEntries("project");
|
||||
assert.equal(entries[0].status, "done");
|
||||
assert.equal(entries[0].payload.result_note, "command completed");
|
||||
});
|
||||
|
||||
it("returns null when due entries have auto_dispatch=false", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
kind: "command",
|
||||
due_at: new Date(Date.now() - 60 * 60 * 1000).toISOString(),
|
||||
auto_dispatch: false,
|
||||
payload: {
|
||||
command:
|
||||
"node -e \"require('fs').writeFileSync('should-not-run.txt', 'bad')\"",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const state = { activeMilestone: null, phase: "idle" };
|
||||
const result = await runScheduleRule(state);
|
||||
assert.equal(result, null);
|
||||
assert.equal(existsSync(join(testDir, "should-not-run.txt")), false);
|
||||
});
|
||||
|
||||
it("returns null when due entries are not reminders", async () => {
|
||||
it("returns null when due entries are passive reminders", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
|
|
@ -109,14 +135,17 @@ describe("schedule-dispatch", () => {
|
|||
assert.equal(result, null);
|
||||
});
|
||||
|
||||
it("returns stop action when auto_dispatch=true reminder is due", async () => {
|
||||
it("pauses when due auto_dispatch command fails and records evidence", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
kind: "command",
|
||||
due_at: new Date(Date.now() - 60 * 60 * 1000).toISOString(),
|
||||
auto_dispatch: true,
|
||||
payload: { message: "Review adoption metrics" },
|
||||
payload: {
|
||||
command: "node -e \"process.stderr.write('boom'); process.exit(9)\"",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
|
|
@ -124,35 +153,75 @@ describe("schedule-dispatch", () => {
|
|||
const result = await runScheduleRule(state);
|
||||
assert.ok(result);
|
||||
assert.equal(result.action, "stop");
|
||||
assert.ok(result.reason.includes("Review adoption metrics"));
|
||||
assert.ok(result.reason.includes("/sf schedule done"));
|
||||
assert.equal(result.level, "warning");
|
||||
assert.ok(result.reason.includes("failed"));
|
||||
const entries = store.readEntries("project");
|
||||
assert.equal(entries[0].status, "cancelled");
|
||||
assert.ok(entries[0].payload.result_note.includes("boom"));
|
||||
});
|
||||
|
||||
it("picks first entry by due_at when multiple match", async () => {
|
||||
it("picks first command entry by due_at when multiple match", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
id: "SECOND",
|
||||
kind: "command",
|
||||
due_at: "2024-01-02T00:00:00.000Z",
|
||||
auto_dispatch: true,
|
||||
payload: { message: "Second" },
|
||||
payload: {
|
||||
command:
|
||||
"node -e \"require('fs').writeFileSync('picked.txt', 'second')\"",
|
||||
},
|
||||
}),
|
||||
);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
id: "FIRST",
|
||||
kind: "command",
|
||||
due_at: "2024-01-01T00:00:00.000Z",
|
||||
auto_dispatch: true,
|
||||
payload: { message: "First" },
|
||||
payload: {
|
||||
command:
|
||||
"node -e \"require('fs').writeFileSync('picked.txt', 'first')\"",
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const state = { activeMilestone: null, phase: "idle" };
|
||||
const result = await runScheduleRule(state);
|
||||
assert.ok(result);
|
||||
assert.ok(result.reason.includes("First"));
|
||||
assert.equal(result.action, "skip");
|
||||
assert.equal(readFileSync(join(testDir, "picked.txt"), "utf-8"), "first");
|
||||
const entries = store.readEntries("project");
|
||||
assert.equal(entries.find((e) => e.id === "FIRST")?.status, "done");
|
||||
assert.equal(entries.find((e) => e.id === "SECOND")?.status, "pending");
|
||||
});
|
||||
|
||||
it("dispatches due auto_dispatch prompt as a custom step and consumes the trigger", async () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
id: "PROMPT",
|
||||
kind: "prompt",
|
||||
due_at: "2024-01-01T00:00:00.000Z",
|
||||
auto_dispatch: true,
|
||||
payload: { prompt: "refresh schedule diagnostics" },
|
||||
}),
|
||||
);
|
||||
|
||||
const state = { activeMilestone: null, phase: "idle" };
|
||||
const result = await runScheduleRule(state);
|
||||
assert.ok(result);
|
||||
assert.equal(result.action, "dispatch");
|
||||
assert.equal(result.unitType, "custom-step");
|
||||
assert.equal(result.unitId, "schedule/PROMPT");
|
||||
assert.ok(result.prompt.includes("refresh schedule diagnostics"));
|
||||
const entries = store.readEntries("project");
|
||||
assert.equal(entries[0].status, "done");
|
||||
assert.equal(entries[0].payload.result_note, "prompt dispatched");
|
||||
});
|
||||
|
||||
it("is non-fatal on store errors", async () => {
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@
|
|||
* Consumer: CI test runner (vitest).
|
||||
*/
|
||||
import assert from "node:assert/strict";
|
||||
import { mkdirSync, rmSync } from "node:fs";
|
||||
import { execFileSync } from "node:child_process";
|
||||
import { mkdirSync, readFileSync, rmSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
import { afterEach, beforeEach, describe, it } from "vitest";
|
||||
|
|
@ -199,4 +200,81 @@ describe("schedule-e2e round-trip", () => {
|
|||
`Expected readEntries(${count}) to complete in <${thresholdMs}ms, took ${elapsed.toFixed(2)}ms`,
|
||||
);
|
||||
});
|
||||
|
||||
it("2 concurrent appends produce exactly 2 well-formed lines", () => {
|
||||
// Pre-create the runtime directory so child processes don't race on mkdir.
|
||||
const runtimeDir = join(testDir, ".sf", "runtime");
|
||||
mkdirSync(runtimeDir, { recursive: true });
|
||||
const scheduleFile = join(runtimeDir, "schedule.jsonl");
|
||||
|
||||
// Inline child script: generates a ULID and appends one JSON line to the
|
||||
// schedule file via OS-level O_APPEND. Uses CommonJS (no imports needed).
|
||||
const childScript = [
|
||||
"const fs = require('fs');",
|
||||
"const path = require('path');",
|
||||
"const crypto = require('crypto');",
|
||||
"",
|
||||
"const scheduleFile = process.env.SF_SCHEDULE_FILE;",
|
||||
"const PREFIX = '01';",
|
||||
"const CROCKFORD = '0123456789ABCDEFGHJKMNPQRSTVWXYZ';",
|
||||
"",
|
||||
"function encodeBase32(value, length) {",
|
||||
" let result = '';",
|
||||
" for (let i = 0; i < length; i++) {",
|
||||
" result = CROCKFORD[Number(value & 0x1fn)] + result;",
|
||||
" value = value >> 5n;",
|
||||
" }",
|
||||
" return result;",
|
||||
"}",
|
||||
"",
|
||||
"function generateULID() {",
|
||||
" const ts = Date.now();",
|
||||
" const rand = BigInt('0x' + crypto.randomUUID().replace(/-/g, ''));",
|
||||
" return PREFIX + encodeBase32(BigInt(ts), 10) + encodeBase32(rand & ((1n << 80n) - 1n), 16);",
|
||||
"}",
|
||||
"",
|
||||
"const entry = {",
|
||||
" id: generateULID(),",
|
||||
" kind: 'reminder',",
|
||||
" status: 'pending',",
|
||||
" due_at: '2020-01-01T00:00:00.000Z',",
|
||||
" created_at: new Date().toISOString(),",
|
||||
" payload: { message: 'concurrent-test' },",
|
||||
" created_by: 'user',",
|
||||
"}",
|
||||
"",
|
||||
"// OS-level O_APPEND ensures each write is atomic.",
|
||||
"fs.appendFileSync(scheduleFile, JSON.stringify(entry) + '\\n', 'utf-8');",
|
||||
].join("\n");
|
||||
|
||||
// Spawn two OS-level child processes concurrently, each appending one line.
|
||||
const childOpts = {
|
||||
env: { ...process.env, SF_SCHEDULE_FILE: scheduleFile },
|
||||
};
|
||||
execFileSync(process.execPath, ["-e", childScript], childOpts);
|
||||
execFileSync(process.execPath, ["-e", childScript], childOpts);
|
||||
|
||||
const raw = readFileSync(scheduleFile, "utf-8");
|
||||
const lines = raw.split("\n").filter((l) => l.trim() !== "");
|
||||
|
||||
// Assert exactly 2 lines were written.
|
||||
assert.equal(
|
||||
lines.length,
|
||||
2,
|
||||
`Expected 2 lines, got ${lines.length}: ${raw}`,
|
||||
);
|
||||
|
||||
// Both lines must be well-formed JSON.
|
||||
const entries = lines.map((line, i) => {
|
||||
try {
|
||||
return JSON.parse(line);
|
||||
} catch {
|
||||
throw new Error(`Line ${i + 1} is not valid JSON: ${line}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Both IDs must be unique.
|
||||
const ids = entries.map((e) => e.id);
|
||||
assert.notEqual(ids[0], ids[1], "Expected two unique IDs");
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -87,6 +87,7 @@ describe("schedule-launch-banner", () => {
|
|||
const output = captureStderr(() => printScheduleBanner(testDir));
|
||||
assert.ok(output.includes("1 scheduled item due"));
|
||||
assert.ok(output.includes("Review PR"));
|
||||
assert.ok(output.includes("Manage: /sf schedule list"));
|
||||
});
|
||||
|
||||
it("prints a banner for multiple due entries", () => {
|
||||
|
|
@ -112,6 +113,26 @@ describe("schedule-launch-banner", () => {
|
|||
assert.ok(output.includes("Second"));
|
||||
});
|
||||
|
||||
it("labels due auto-dispatch project entries as autonomous work", () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
store.appendEntry(
|
||||
"project",
|
||||
makeEntry({
|
||||
kind: "command",
|
||||
due_at: "2024-01-01T00:00:00.000Z",
|
||||
auto_dispatch: true,
|
||||
payload: { command: 'node -e "process.exit(0)"' },
|
||||
}),
|
||||
);
|
||||
|
||||
const output = captureStderr(() => printScheduleBanner(testDir));
|
||||
assert.ok(output.includes("1 scheduled auto-dispatch item due"));
|
||||
assert.ok(
|
||||
output.includes("autonomous mode will consume project entries"),
|
||||
);
|
||||
assert.equal(output.includes("Manage: /sf schedule list"), false);
|
||||
});
|
||||
|
||||
it("truncates to 3 titles with a +more suffix", () => {
|
||||
const store = createScheduleStore(testDir);
|
||||
for (let i = 0; i < 5; i++) {
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import { tmpdir } from "node:os";
|
|||
import { join } from "node:path";
|
||||
import { afterEach, describe, test } from "vitest";
|
||||
import { preDispatchHealthGate } from "../doctor-proactive.js";
|
||||
import { nativeAddTracked } from "../native-git-bridge.js";
|
||||
import {
|
||||
formatProtectedSnapshotDeletionMessage,
|
||||
listProtectedSnapshotDeletions,
|
||||
|
|
@ -108,4 +109,37 @@ describe("snapshot safety", () => {
|
|||
assert.match(result.reason, /Protected declaration deletions detected/);
|
||||
assert.equal(git(repo, ["log", "--oneline"]).split("\n").length, 1);
|
||||
});
|
||||
|
||||
test("nativeAddTracked_when_tracked_file_deleted_does_not_stage_deletion", () => {
|
||||
const repo = makeRepo();
|
||||
writeFileSync(join(repo, "keep.txt"), "keep");
|
||||
writeFileSync(join(repo, "delete.txt"), "delete");
|
||||
git(repo, ["add", "."]);
|
||||
git(repo, ["commit", "-m", "seed"]);
|
||||
|
||||
// Delete one tracked file and modify another
|
||||
rmSync(join(repo, "delete.txt"));
|
||||
writeFileSync(join(repo, "keep.txt"), "modified");
|
||||
|
||||
// Run the snapshot staging function
|
||||
nativeAddTracked(repo);
|
||||
|
||||
// The modification should be staged
|
||||
const stagedModified = git(repo, [
|
||||
"diff",
|
||||
"--cached",
|
||||
"--name-only",
|
||||
"--diff-filter=M",
|
||||
]);
|
||||
assert.equal(stagedModified, "keep.txt");
|
||||
|
||||
// The deletion should NOT be staged
|
||||
const stagedDeleted = git(repo, [
|
||||
"diff",
|
||||
"--cached",
|
||||
"--name-only",
|
||||
"--diff-filter=D",
|
||||
]);
|
||||
assert.equal(stagedDeleted, "");
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1 +1,93 @@
|
|||
/**
|
||||
* UOK Contract Types
|
||||
*
|
||||
* Purpose: provide a single source of truth for gate interfaces so that
|
||||
* implementations, consumers, and tests share the same structural expectations.
|
||||
*
|
||||
* Consumer: all UOK gates, gate-runner, and extension verification flows.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} UokContext
|
||||
* @property {string} basePath — project root path
|
||||
* @property {string} [traceId] — distributed trace identifier
|
||||
* @property {string} [turnId] — current turn identifier
|
||||
* @property {string} [unitType] — e.g. "execute-task", "complete-slice"
|
||||
* @property {string} [unitId] — e.g. "M001/S01/T01"
|
||||
* @property {string} [milestoneId]
|
||||
* @property {string} [sliceId]
|
||||
* @property {string} [taskId]
|
||||
* @property {string} [modelId] — LLM model identifier
|
||||
* @property {string} [provider] — LLM provider name
|
||||
* @property {number} [iteration] — current retry/dispatch iteration
|
||||
* @property {number} [tokenCount] — tokens consumed this turn
|
||||
* @property {number} [costUsd] — estimated cost this turn
|
||||
* @property {number} [durationMs] — turn duration
|
||||
* @property {boolean} [succeeded] — whether the unit succeeded
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} GateResult
|
||||
* @property {"pass"|"fail"|"retry"|"manual-attention"} outcome
|
||||
* @property {string} [failureClass] — "policy" | "verification" | "execution" | "artifact" | "git" | "timeout" | "input" | "closeout" | "manual-attention" | "unknown"
|
||||
* @property {string} rationale — human-readable explanation
|
||||
* @property {string} [findings] — structured output (errors, diffs, etc.)
|
||||
* @property {string} [recommendation] — actionable next step
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} Gate
|
||||
* @property {string} id — unique gate identifier
|
||||
* @property {string} type — "security" | "policy" | "verification" | "learning" | "chaos"
|
||||
* @property {(ctx: UokContext, attempt: number) => Promise<GateResult>|GateResult} execute
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MessageBusMessage
|
||||
* @property {string} id — unique message identifier
|
||||
* @property {string} from — sender agent id
|
||||
* @property {string} to — recipient agent id
|
||||
* @property {string} body — message payload
|
||||
* @property {Record<string, unknown>} [metadata]
|
||||
* @property {string} sentAt — ISO timestamp
|
||||
* @property {string} [receivedAt] — ISO timestamp
|
||||
* @property {boolean} [read] — whether the message has been read
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MessageBusOptions
|
||||
* @property {string} basePath — project root for durable storage
|
||||
* @property {number} [retentionDays] — how long to keep messages (default 7)
|
||||
* @property {number} [maxInboxSize] — per-agent inbox limit (default 1000)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChaosMonkeyOptions
|
||||
* @property {number} [latencyProbability] — chance to inject latency (default 0.05)
|
||||
* @property {number} [partialFailureProbability] — chance to throw non-fatal error (default 0.03)
|
||||
* @property {number} [diskStressProbability] — chance to write temp stress file (default 0.01)
|
||||
* @property {number} [maxLatencyMs] — max injected latency (default 5000)
|
||||
* @property {boolean} [active] — whether monkey is enabled (default false)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} CostGuardOptions
|
||||
* @property {number} [maxUsdPerUnit] — abort threshold for a single unit (default 5.0)
|
||||
* @property {number} [maxUsdPerHour] — abort threshold for rolling 1h window (default 20.0)
|
||||
* @property {number} [highTierFailureThreshold] — consecutive high-tier failures before block (default 2)
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MultiPackageOptions
|
||||
* @property {number} [timeoutMs] — timeout for verification commands (default 120000)
|
||||
* @property {string[]} [additionalChecks] — extra npm scripts to run (default ["typecheck"])
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OutcomeLearningOptions
|
||||
* @property {number} [minSampleSize] — minimum outcomes before pattern detection (default 3)
|
||||
* @property {number} [failureRateThreshold] — failure rate that triggers recommendation (default 0.6)
|
||||
* @property {number} [lookbackHours] — how far back to query (default 168)
|
||||
*/
|
||||
|
||||
export {};
|
||||
|
|
|
|||
|
|
@ -5,6 +5,10 @@ import { test } from "vitest";
|
|||
|
||||
test("cli.ts routes top-level schedule before interactive TUI", () => {
|
||||
const cliSource = readFileSync(join(__dirname, "..", "cli.ts"), "utf-8");
|
||||
const loaderSource = readFileSync(
|
||||
join(__dirname, "..", "loader.ts"),
|
||||
"utf-8",
|
||||
);
|
||||
const scheduleBranch = cliSource.indexOf(
|
||||
'if (cliFlags.messages[0] === "schedule")',
|
||||
);
|
||||
|
|
@ -25,7 +29,15 @@ test("cli.ts routes top-level schedule before interactive TUI", () => {
|
|||
"top-level schedule branch must reuse the schedule handler",
|
||||
);
|
||||
assert.ok(
|
||||
cliSource.includes("process.argv.slice(3).join"),
|
||||
"schedule branch must pass raw argv tail so command-specific flags survive top-level parsing",
|
||||
cliSource.includes("process.argv.slice(3)"),
|
||||
"schedule branch must pass argv tokens so command payloads survive top-level parsing",
|
||||
);
|
||||
assert.ok(
|
||||
loaderSource.includes('firstArg !== "schedule"'),
|
||||
"loader schedule banner must stay quiet for top-level schedule commands",
|
||||
);
|
||||
assert.ok(
|
||||
loaderSource.includes("scheduled auto-dispatch item"),
|
||||
"loader banner must distinguish autonomous schedule entries from passive reminders",
|
||||
);
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue