sf snapshot: pre-dispatch, uncommitted changes after 41m inactivity

This commit is contained in:
Mikael Hugo 2026-05-04 21:59:01 +02:00
parent b8a5a01de4
commit 4053819854
43 changed files with 3436 additions and 3103 deletions

View file

@ -1,6 +1,6 @@
{
"last_session_id": "67e970c5-7790-4d38-ba0b-527b9f349c49",
"last_event_key": "67e970c5-7790-4d38-ba0b-527b9f349c49:transcript:70f7463d95fcfa9de1ead358c8fab10cd302abfc43cc274eb68fa952a0c97675",
"last_event_key": "67e970c5-7790-4d38-ba0b-527b9f349c49:transcript:01389baa63d7cd14460c1725484e72f23651a4b02cc12b87f3b6f1bf6043a8d0",
"last_prompted_session_id": "",
"last_reason": "short-session",
"last_prompted_at": "",
@ -8,5 +8,5 @@
"last_actionable_message_count": 0,
"deep_interview_lock_active": false,
"deep_interview_lock_source": "/home/mhugo/code/singularity-forge/.omg/state/deep-interview.json",
"updated_at": "2026-05-04T17:09:50.283Z"
"updated_at": "2026-05-04T19:57:31.227Z"
}

View file

@ -9,11 +9,17 @@ node_modules/**
**/__pycache__/**
*.pyc
*.egg-info/**
build/**
dist/**
target/**
vendor/**
coverage/**
**/build/**
**/dist/**
**/target/**
**/vendor/**
**/coverage/**
.cache/**
tmp/**
*.log
dist-test/**
packages/*/dist/**
packages/*/target/**
rust-engine/target/**
rust-engine/addon/*.node
**/tsconfig.tsbuildinfo

File diff suppressed because it is too large Load diff

View file

@ -7,13 +7,13 @@ use napi::bindgen_prelude::*;
/// Normalize a raw glob string: fix path separators, optionally prepend `**/`
/// for recursive matching, and close any unclosed `{` alternation groups.
pub fn build_glob_pattern(glob: &str, recursive: bool) -> String {
let normalized = glob.replace('\\', "/");
let pattern = if !recursive || normalized.contains('/') || normalized.starts_with("**") {
normalized
} else {
format!("**/{normalized}")
};
fix_unclosed_braces(pattern)
let normalized = glob.replace('\\', "/");
let pattern = if !recursive || normalized.contains('/') || normalized.starts_with("**") {
normalized
} else {
format!("**/{normalized}")
};
fix_unclosed_braces(pattern)
}
/// Compile a glob pattern string into a [`GlobSet`].
@ -21,25 +21,25 @@ pub fn build_glob_pattern(glob: &str, recursive: bool) -> String {
/// When `recursive` is true, simple patterns (no path separators, no leading
/// `**`) are automatically prefixed with `**/`.
pub fn compile_glob(glob: &str, recursive: bool) -> Result<GlobSet> {
let mut builder = GlobSetBuilder::new();
let pattern = build_glob_pattern(glob, recursive);
let glob = GlobBuilder::new(&pattern)
.literal_separator(true)
.build()
.map_err(|err| Error::from_reason(format!("Invalid glob pattern: {err}")))?;
builder.add(glob);
builder
.build()
.map_err(|err| Error::from_reason(format!("Failed to build glob matcher: {err}")))
let mut builder = GlobSetBuilder::new();
let pattern = build_glob_pattern(glob, recursive);
let glob = GlobBuilder::new(&pattern)
.literal_separator(true)
.build()
.map_err(|err| Error::from_reason(format!("Invalid glob pattern: {err}")))?;
builder.add(glob);
builder
.build()
.map_err(|err| Error::from_reason(format!("Failed to build glob matcher: {err}")))
}
/// Like [`compile_glob`], but accepts an `Option<&str>` — returns `Ok(None)`
/// when the input is `None`, empty, or whitespace-only.
pub fn try_compile_glob(glob: Option<&str>, recursive: bool) -> Result<Option<GlobSet>> {
let Some(glob) = glob.map(str::trim).filter(|v| !v.is_empty()) else {
return Ok(None);
};
compile_glob(glob, recursive).map(Some)
let Some(glob) = glob.map(str::trim).filter(|v| !v.is_empty()) else {
return Ok(None);
};
compile_glob(glob, recursive).map(Some)
}
/// Close unclosed `{` alternation groups in a glob pattern.
@ -47,70 +47,70 @@ pub fn try_compile_glob(glob: Option<&str>, recursive: bool) -> Result<Option<Gl
/// LLMs occasionally produce patterns like `*.{ts,js` without the closing `}`.
/// Rather than failing, we append the missing braces.
fn fix_unclosed_braces(pattern: String) -> String {
let opens = pattern.chars().filter(|&c| c == '{').count();
let closes = pattern.chars().filter(|&c| c == '}').count();
if opens > closes {
let mut fixed = pattern;
for _ in 0..(opens - closes) {
fixed.push('}');
}
fixed
} else {
pattern
}
let opens = pattern.chars().filter(|&c| c == '{').count();
let closes = pattern.chars().filter(|&c| c == '}').count();
if opens > closes {
let mut fixed = pattern;
for _ in 0..(opens - closes) {
fixed.push('}');
}
fixed
} else {
pattern
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
#[test]
fn simple_pattern_gets_recursive_prefix() {
assert_eq!(build_glob_pattern("*.ts", true), "**/*.ts");
}
#[test]
fn simple_pattern_gets_recursive_prefix() {
assert_eq!(build_glob_pattern("*.ts", true), "**/*.ts");
}
#[test]
fn pattern_with_path_stays_as_is() {
assert_eq!(build_glob_pattern("src/*.ts", true), "src/*.ts");
}
#[test]
fn pattern_with_path_stays_as_is() {
assert_eq!(build_glob_pattern("src/*.ts", true), "src/*.ts");
}
#[test]
fn already_recursive_pattern_unchanged() {
assert_eq!(build_glob_pattern("**/*.rs", true), "**/*.rs");
}
#[test]
fn already_recursive_pattern_unchanged() {
assert_eq!(build_glob_pattern("**/*.rs", true), "**/*.rs");
}
#[test]
fn non_recursive_keeps_simple_pattern() {
assert_eq!(build_glob_pattern("*.ts", false), "*.ts");
}
#[test]
fn non_recursive_keeps_simple_pattern() {
assert_eq!(build_glob_pattern("*.ts", false), "*.ts");
}
#[test]
fn backslashes_normalized() {
assert_eq!(build_glob_pattern("src\\**\\*.ts", true), "src/**/*.ts");
}
#[test]
fn backslashes_normalized() {
assert_eq!(build_glob_pattern("src\\**\\*.ts", true), "src/**/*.ts");
}
#[test]
fn unclosed_brace_gets_closed() {
assert_eq!(build_glob_pattern("*.{ts,tsx,js", true), "**/*.{ts,tsx,js}");
}
#[test]
fn unclosed_brace_gets_closed() {
assert_eq!(build_glob_pattern("*.{ts,tsx,js", true), "**/*.{ts,tsx,js}");
}
#[test]
fn deeply_unclosed_braces_all_closed() {
assert_eq!(build_glob_pattern("{a,{b,c}", true), "**/{a,{b,c}}");
}
#[test]
fn deeply_unclosed_braces_all_closed() {
assert_eq!(build_glob_pattern("{a,{b,c}", true), "**/{a,{b,c}}");
}
#[test]
fn balanced_braces_unchanged() {
assert_eq!(build_glob_pattern("*.{ts,js}", true), "**/*.{ts,js}");
}
#[test]
fn balanced_braces_unchanged() {
assert_eq!(build_glob_pattern("*.{ts,js}", true), "**/*.{ts,js}");
}
#[test]
fn compile_glob_accepts_valid_pattern() {
assert!(compile_glob("*.ts", true).is_ok());
}
#[test]
fn compile_glob_accepts_valid_pattern() {
assert!(compile_glob("*.ts", true).is_ok());
}
#[test]
fn compile_glob_fixes_unclosed_brace() {
assert!(compile_glob("*.{ts,tsx,js", true).is_ok());
}
#[test]
fn compile_glob_fixes_unclosed_brace() {
assert!(compile_glob("*.{ts,tsx,js", true).is_ok());
}
}

View file

@ -8,97 +8,95 @@ mod parsers;
use std::{borrow::Cow, collections::HashMap, fmt, path::Path};
use ast_grep_core::{
Doc, Language, Node,
matcher::{KindMatcher, Pattern, PatternBuilder, PatternError},
meta_var::MetaVariable,
tree_sitter::{LanguageExt, StrDoc, TSLanguage, TSRange},
matcher::{KindMatcher, Pattern, PatternBuilder, PatternError},
meta_var::MetaVariable,
tree_sitter::{LanguageExt, StrDoc, TSLanguage, TSRange},
Doc, Language, Node,
};
/// Implements a stub language (no expando / `pre_process_pattern` needed).
/// Use when the language grammar accepts `$VAR` as valid identifiers.
macro_rules! impl_lang {
($lang:ident, $func:ident) => {
#[derive(Clone, Copy, Debug)]
pub struct $lang;
impl Language for $lang {
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
($lang:ident, $func:ident) => {
#[derive(Clone, Copy, Debug)]
pub struct $lang;
impl Language for $lang {
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self
.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
}
impl LanguageExt for $lang {
fn get_ts_language(&self) -> TSLanguage {
parsers::$func().into()
}
}
};
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
}
impl LanguageExt for $lang {
fn get_ts_language(&self) -> TSLanguage {
parsers::$func().into()
}
}
};
}
fn pre_process_pattern(expando: char, query: &str) -> Cow<'_, str> {
let mut ret = Vec::with_capacity(query.len());
let mut dollar_count = 0;
for c in query.chars() {
if c == '$' {
dollar_count += 1;
continue;
}
let need_replace = matches!(c, 'A'..='Z' | '_') || dollar_count == 3;
let sigil = if need_replace { expando } else { '$' };
ret.extend(std::iter::repeat_n(sigil, dollar_count));
dollar_count = 0;
ret.push(c);
}
let sigil = if dollar_count == 3 { expando } else { '$' };
ret.extend(std::iter::repeat_n(sigil, dollar_count));
Cow::Owned(ret.into_iter().collect())
let mut ret = Vec::with_capacity(query.len());
let mut dollar_count = 0;
for c in query.chars() {
if c == '$' {
dollar_count += 1;
continue;
}
let need_replace = matches!(c, 'A'..='Z' | '_') || dollar_count == 3;
let sigil = if need_replace { expando } else { '$' };
ret.extend(std::iter::repeat_n(sigil, dollar_count));
dollar_count = 0;
ret.push(c);
}
let sigil = if dollar_count == 3 { expando } else { '$' };
ret.extend(std::iter::repeat_n(sigil, dollar_count));
Cow::Owned(ret.into_iter().collect())
}
/// Implements a language with `expando_char` / `pre_process_pattern`.
/// Use when the language does NOT accept `$` as a valid identifier character.
macro_rules! impl_lang_expando {
($lang:ident, $func:ident, $char:expr) => {
#[derive(Clone, Copy, Debug)]
pub struct $lang;
impl Language for $lang {
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
($lang:ident, $func:ident, $char:expr) => {
#[derive(Clone, Copy, Debug)]
pub struct $lang;
impl Language for $lang {
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self
.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn expando_char(&self) -> char {
$char
}
fn expando_char(&self) -> char {
$char
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
pre_process_pattern(self.expando_char(), query)
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
pre_process_pattern(self.expando_char(), query)
}
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
}
impl LanguageExt for $lang {
fn get_ts_language(&self) -> TSLanguage {
parsers::$func().into()
}
}
};
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
}
impl LanguageExt for $lang {
fn get_ts_language(&self) -> TSLanguage {
parsers::$func().into()
}
}
};
}
// ── Customized languages with expando_char ──────────────────────────────
@ -154,93 +152,97 @@ impl_lang!(Regex, language_regex);
pub struct Html;
impl Language for Html {
fn expando_char(&self) -> char {
'z'
}
fn expando_char(&self) -> char {
'z'
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
pre_process_pattern(self.expando_char(), query)
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
pre_process_pattern(self.expando_char(), query)
}
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
fn kind_to_id(&self, kind: &str) -> u16 {
self.get_ts_language().id_for_node_kind(kind, true)
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self
.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn field_to_id(&self, field: &str) -> Option<u16> {
self.get_ts_language()
.field_id_for_name(field)
.map(|f| f.get())
}
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
fn build_pattern(&self, builder: &PatternBuilder) -> Result<Pattern, PatternError> {
builder.build(|src| StrDoc::try_new(src, *self))
}
}
impl LanguageExt for Html {
fn get_ts_language(&self) -> TSLanguage {
parsers::language_html()
}
fn get_ts_language(&self) -> TSLanguage {
parsers::language_html()
}
fn injectable_languages(&self) -> Option<&'static [&'static str]> {
Some(&["css", "js", "ts", "tsx", "scss", "less", "stylus", "coffee"])
}
fn injectable_languages(&self) -> Option<&'static [&'static str]> {
Some(&["css", "js", "ts", "tsx", "scss", "less", "stylus", "coffee"])
}
fn extract_injections<L: LanguageExt>(
&self,
root: Node<StrDoc<L>>,
) -> HashMap<String, Vec<TSRange>> {
let lang = root.lang();
let mut map = HashMap::new();
let matcher = KindMatcher::new("script_element", lang.clone());
for script in root.find_all(matcher) {
let injected = find_html_lang(&script).unwrap_or_else(|| "js".into());
let content = script.children().find(|c| c.kind() == "raw_text");
if let Some(content) = content {
map.entry(injected)
.or_insert_with(Vec::new)
.push(node_to_range(&content));
}
}
let matcher = KindMatcher::new("style_element", lang.clone());
for style in root.find_all(matcher) {
let injected = find_html_lang(&style).unwrap_or_else(|| "css".into());
let content = style.children().find(|c| c.kind() == "raw_text");
if let Some(content) = content {
map.entry(injected)
.or_insert_with(Vec::new)
.push(node_to_range(&content));
}
}
map
}
fn extract_injections<L: LanguageExt>(
&self,
root: Node<StrDoc<L>>,
) -> HashMap<String, Vec<TSRange>> {
let lang = root.lang();
let mut map = HashMap::new();
let matcher = KindMatcher::new("script_element", lang.clone());
for script in root.find_all(matcher) {
let injected = find_html_lang(&script).unwrap_or_else(|| "js".into());
let content = script.children().find(|c| c.kind() == "raw_text");
if let Some(content) = content {
map.entry(injected)
.or_insert_with(Vec::new)
.push(node_to_range(&content));
}
}
let matcher = KindMatcher::new("style_element", lang.clone());
for style in root.find_all(matcher) {
let injected = find_html_lang(&style).unwrap_or_else(|| "css".into());
let content = style.children().find(|c| c.kind() == "raw_text");
if let Some(content) = content {
map.entry(injected)
.or_insert_with(Vec::new)
.push(node_to_range(&content));
}
}
map
}
}
fn find_html_lang<D: Doc>(node: &Node<D>) -> Option<String> {
let html = node.lang();
let attr_matcher = KindMatcher::new("attribute", html.clone());
let name_matcher = KindMatcher::new("attribute_name", html.clone());
let val_matcher = KindMatcher::new("attribute_value", html.clone());
node.find_all(attr_matcher).find_map(|attr| {
let name = attr.find(&name_matcher)?;
if name.text() != "lang" {
return None;
}
let val = attr.find(&val_matcher)?;
Some(val.text().to_string())
})
let html = node.lang();
let attr_matcher = KindMatcher::new("attribute", html.clone());
let name_matcher = KindMatcher::new("attribute_name", html.clone());
let val_matcher = KindMatcher::new("attribute_value", html.clone());
node.find_all(attr_matcher).find_map(|attr| {
let name = attr.find(&name_matcher)?;
if name.text() != "lang" {
return None;
}
let val = attr.find(&val_matcher)?;
Some(val.text().to_string())
})
}
fn node_to_range<D: Doc>(node: &Node<D>) -> TSRange {
let r = node.range();
let start = node.start_pos();
let sp = start.byte_point();
let sp = tree_sitter::Point::new(sp.0, sp.1);
let end = node.end_pos();
let ep = end.byte_point();
let ep = tree_sitter::Point::new(ep.0, ep.1);
TSRange { start_byte: r.start, end_byte: r.end, start_point: sp, end_point: ep }
let r = node.range();
let start = node.start_pos();
let sp = start.byte_point();
let sp = tree_sitter::Point::new(sp.0, sp.1);
let end = node.end_pos();
let ep = end.byte_point();
let ep = tree_sitter::Point::new(ep.0, ep.1);
TSRange {
start_byte: r.start,
end_byte: r.end,
start_point: sp,
end_point: ep,
}
}
// ── SupportLang enum ────────────────────────────────────────────────────
@ -248,106 +250,106 @@ fn node_to_range<D: Doc>(node: &Node<D>) -> TSRange {
/// All supported languages for ast-grep structural search/replace.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum SupportLang {
Bash,
C,
Cpp,
CSharp,
Css,
Diff,
Elixir,
Go,
Haskell,
Hcl,
Html,
Java,
JavaScript,
Json,
Julia,
Kotlin,
Lua,
Make,
Markdown,
Nix,
ObjC,
Odin,
Php,
Python,
Regex,
Ruby,
Rust,
Scala,
Solidity,
Starlark,
Swift,
Toml,
Tsx,
TypeScript,
Verilog,
Xml,
Yaml,
Zig,
Bash,
C,
Cpp,
CSharp,
Css,
Diff,
Elixir,
Go,
Haskell,
Hcl,
Html,
Java,
JavaScript,
Json,
Julia,
Kotlin,
Lua,
Make,
Markdown,
Nix,
ObjC,
Odin,
Php,
Python,
Regex,
Ruby,
Rust,
Scala,
Solidity,
Starlark,
Swift,
Toml,
Tsx,
TypeScript,
Verilog,
Xml,
Yaml,
Zig,
}
impl SupportLang {
pub const fn all_langs() -> &'static [Self] {
use SupportLang::*;
&[
Bash, C, Cpp, CSharp, Css, Diff, Elixir, Go, Haskell, Hcl, Html, Java, JavaScript, Json,
Julia, Kotlin, Lua, Make, Markdown, Nix, ObjC, Odin, Php, Python, Regex, Ruby, Rust,
Scala, Solidity, Starlark, Swift, Toml, Tsx, TypeScript, Verilog, Xml, Yaml, Zig,
]
}
pub const fn all_langs() -> &'static [Self] {
use SupportLang::*;
&[
Bash, C, Cpp, CSharp, Css, Diff, Elixir, Go, Haskell, Hcl, Html, Java, JavaScript,
Json, Julia, Kotlin, Lua, Make, Markdown, Nix, ObjC, Odin, Php, Python, Regex, Ruby,
Rust, Scala, Solidity, Starlark, Swift, Toml, Tsx, TypeScript, Verilog, Xml, Yaml, Zig,
]
}
/// The canonical lowercase name used as a stable key in alias maps,
/// file-type inference results, and error messages.
pub const fn canonical_name(self) -> &'static str {
match self {
Self::Bash => "bash",
Self::C => "c",
Self::Cpp => "cpp",
Self::CSharp => "csharp",
Self::Css => "css",
Self::Diff => "diff",
Self::Elixir => "elixir",
Self::Go => "go",
Self::Haskell => "haskell",
Self::Hcl => "hcl",
Self::Html => "html",
Self::Java => "java",
Self::JavaScript => "javascript",
Self::Json => "json",
Self::Julia => "julia",
Self::Kotlin => "kotlin",
Self::Lua => "lua",
Self::Make => "make",
Self::Markdown => "markdown",
Self::Nix => "nix",
Self::ObjC => "objc",
Self::Odin => "odin",
Self::Php => "php",
Self::Python => "python",
Self::Regex => "regex",
Self::Ruby => "ruby",
Self::Rust => "rust",
Self::Scala => "scala",
Self::Solidity => "solidity",
Self::Starlark => "starlark",
Self::Swift => "swift",
Self::Toml => "toml",
Self::Tsx => "tsx",
Self::TypeScript => "typescript",
Self::Verilog => "verilog",
Self::Xml => "xml",
Self::Yaml => "yaml",
Self::Zig => "zig",
}
}
/// The canonical lowercase name used as a stable key in alias maps,
/// file-type inference results, and error messages.
pub const fn canonical_name(self) -> &'static str {
match self {
Self::Bash => "bash",
Self::C => "c",
Self::Cpp => "cpp",
Self::CSharp => "csharp",
Self::Css => "css",
Self::Diff => "diff",
Self::Elixir => "elixir",
Self::Go => "go",
Self::Haskell => "haskell",
Self::Hcl => "hcl",
Self::Html => "html",
Self::Java => "java",
Self::JavaScript => "javascript",
Self::Json => "json",
Self::Julia => "julia",
Self::Kotlin => "kotlin",
Self::Lua => "lua",
Self::Make => "make",
Self::Markdown => "markdown",
Self::Nix => "nix",
Self::ObjC => "objc",
Self::Odin => "odin",
Self::Php => "php",
Self::Python => "python",
Self::Regex => "regex",
Self::Ruby => "ruby",
Self::Rust => "rust",
Self::Scala => "scala",
Self::Solidity => "solidity",
Self::Starlark => "starlark",
Self::Swift => "swift",
Self::Toml => "toml",
Self::Tsx => "tsx",
Self::TypeScript => "typescript",
Self::Verilog => "verilog",
Self::Xml => "xml",
Self::Yaml => "yaml",
Self::Zig => "zig",
}
}
}
impl fmt::Display for SupportLang {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
}
// ── Dispatch macro ──────────────────────────────────────────────────────
@ -408,104 +410,104 @@ macro_rules! impl_lang_method {
}
impl Language for SupportLang {
impl_lang_method!(kind_to_id, (kind: &str) => u16);
impl_lang_method!(kind_to_id, (kind: &str) => u16);
impl_lang_method!(field_to_id, (field: &str) => Option<u16>);
impl_lang_method!(field_to_id, (field: &str) => Option<u16>);
impl_lang_method!(meta_var_char, () => char);
impl_lang_method!(meta_var_char, () => char);
impl_lang_method!(expando_char, () => char);
impl_lang_method!(expando_char, () => char);
impl_lang_method!(extract_meta_var, (source: &str) => Option<MetaVariable>);
impl_lang_method!(extract_meta_var, (source: &str) => Option<MetaVariable>);
impl_lang_method!(build_pattern, (builder: &PatternBuilder) => Result<Pattern, PatternError>);
impl_lang_method!(build_pattern, (builder: &PatternBuilder) => Result<Pattern, PatternError>);
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
execute_lang_method! { self, pre_process_pattern, query }
}
fn pre_process_pattern<'q>(&self, query: &'q str) -> Cow<'q, str> {
execute_lang_method! { self, pre_process_pattern, query }
}
fn from_path<P: AsRef<Path>>(path: P) -> Option<Self> {
from_extension(path.as_ref())
}
fn from_path<P: AsRef<Path>>(path: P) -> Option<Self> {
from_extension(path.as_ref())
}
}
impl LanguageExt for SupportLang {
impl_lang_method!(get_ts_language, () => TSLanguage);
impl_lang_method!(get_ts_language, () => TSLanguage);
impl_lang_method!(injectable_languages, () => Option<&'static [&'static str]>);
impl_lang_method!(injectable_languages, () => Option<&'static [&'static str]>);
fn extract_injections<L: LanguageExt>(
&self,
root: Node<StrDoc<L>>,
) -> HashMap<String, Vec<TSRange>> {
match self {
Self::Html => Html.extract_injections(root),
_ => HashMap::new(),
}
}
fn extract_injections<L: LanguageExt>(
&self,
root: Node<StrDoc<L>>,
) -> HashMap<String, Vec<TSRange>> {
match self {
Self::Html => Html.extract_injections(root),
_ => HashMap::new(),
}
}
}
// ── File extension mapping ──────────────────────────────────────────────
const fn extensions(lang: SupportLang) -> &'static [&'static str] {
use SupportLang::*;
match lang {
Bash => {
&["bash", "bats", "cgi", "command", "env", "fcgi", "ksh", "sh", "tmux", "tool", "zsh"]
},
C => &["c", "h"],
Cpp => &["cc", "hpp", "cpp", "c++", "hh", "cxx", "cu", "ino"],
CSharp => &["cs"],
Css => &["css", "scss"],
Diff => &["diff", "patch"],
Elixir => &["ex", "exs"],
Go => &["go"],
Haskell => &["hs"],
Hcl => &["hcl", "tf", "tfvars"],
Html => &["html", "htm", "xhtml"],
Java => &["java"],
JavaScript => &["cjs", "js", "mjs", "jsx"],
Json => &["json"],
Julia => &["jl"],
Kotlin => &["kt", "ktm", "kts"],
Lua => &["lua"],
Make => &["mk", "mak"],
Markdown => &["md", "markdown", "mdx"],
Nix => &["nix"],
ObjC => &["m"],
Odin => &["odin"],
Php => &["php"],
Python => &["py", "py3", "pyi", "bzl"],
Regex => &[], // regex has no file extension
Ruby => &["rb", "rbw", "gemspec"],
Rust => &["rs"],
Scala => &["scala", "sc", "sbt"],
Solidity => &["sol"],
Starlark => &["star", "bzl"],
Swift => &["swift"],
Toml => &["toml"],
Tsx => &["tsx"],
TypeScript => &["ts", "cts", "mts"],
Verilog => &["v", "sv", "svh", "vh"],
Xml => &["xml", "xsl", "xslt", "svg", "plist"],
Yaml => &["yaml", "yml"],
Zig => &["zig"],
}
use SupportLang::*;
match lang {
Bash => &[
"bash", "bats", "cgi", "command", "env", "fcgi", "ksh", "sh", "tmux", "tool", "zsh",
],
C => &["c", "h"],
Cpp => &["cc", "hpp", "cpp", "c++", "hh", "cxx", "cu", "ino"],
CSharp => &["cs"],
Css => &["css", "scss"],
Diff => &["diff", "patch"],
Elixir => &["ex", "exs"],
Go => &["go"],
Haskell => &["hs"],
Hcl => &["hcl", "tf", "tfvars"],
Html => &["html", "htm", "xhtml"],
Java => &["java"],
JavaScript => &["cjs", "js", "mjs", "jsx"],
Json => &["json"],
Julia => &["jl"],
Kotlin => &["kt", "ktm", "kts"],
Lua => &["lua"],
Make => &["mk", "mak"],
Markdown => &["md", "markdown", "mdx"],
Nix => &["nix"],
ObjC => &["m"],
Odin => &["odin"],
Php => &["php"],
Python => &["py", "py3", "pyi", "bzl"],
Regex => &[], // regex has no file extension
Ruby => &["rb", "rbw", "gemspec"],
Rust => &["rs"],
Scala => &["scala", "sc", "sbt"],
Solidity => &["sol"],
Starlark => &["star", "bzl"],
Swift => &["swift"],
Toml => &["toml"],
Tsx => &["tsx"],
TypeScript => &["ts", "cts", "mts"],
Verilog => &["v", "sv", "svh", "vh"],
Xml => &["xml", "xsl", "xslt", "svg", "plist"],
Yaml => &["yaml", "yml"],
Zig => &["zig"],
}
}
/// Guess language from file extension.
fn from_extension(path: &Path) -> Option<SupportLang> {
let ext = path.extension()?.to_str()?;
// Special cases: Makefile has no extension
if ext.is_empty() {
let name = path.file_name()?.to_str()?;
return match name {
"Makefile" | "makefile" | "GNUmakefile" => Some(SupportLang::Make),
_ => None,
};
}
SupportLang::all_langs()
.iter()
.copied()
.find(|&l| extensions(l).contains(&ext))
let ext = path.extension()?.to_str()?;
// Special cases: Makefile has no extension
if ext.is_empty() {
let name = path.file_name()?.to_str()?;
return match name {
"Makefile" | "makefile" | "GNUmakefile" => Some(SupportLang::Make),
_ => None,
};
}
SupportLang::all_langs()
.iter()
.copied()
.find(|&l| extensions(l).contains(&ext))
}

View file

@ -3,116 +3,116 @@
use ast_grep_core::tree_sitter::TSLanguage;
pub fn language_bash() -> TSLanguage {
tree_sitter_bash::LANGUAGE.into()
tree_sitter_bash::LANGUAGE.into()
}
pub fn language_c() -> TSLanguage {
tree_sitter_c::LANGUAGE.into()
tree_sitter_c::LANGUAGE.into()
}
pub fn language_cpp() -> TSLanguage {
tree_sitter_cpp::LANGUAGE.into()
tree_sitter_cpp::LANGUAGE.into()
}
pub fn language_c_sharp() -> TSLanguage {
tree_sitter_c_sharp::LANGUAGE.into()
tree_sitter_c_sharp::LANGUAGE.into()
}
pub fn language_css() -> TSLanguage {
tree_sitter_css::LANGUAGE.into()
tree_sitter_css::LANGUAGE.into()
}
pub fn language_diff() -> TSLanguage {
tree_sitter_diff::LANGUAGE.into()
tree_sitter_diff::LANGUAGE.into()
}
pub fn language_elixir() -> TSLanguage {
tree_sitter_elixir::LANGUAGE.into()
tree_sitter_elixir::LANGUAGE.into()
}
pub fn language_go() -> TSLanguage {
tree_sitter_go::LANGUAGE.into()
tree_sitter_go::LANGUAGE.into()
}
pub fn language_haskell() -> TSLanguage {
tree_sitter_haskell::LANGUAGE.into()
tree_sitter_haskell::LANGUAGE.into()
}
pub fn language_hcl() -> TSLanguage {
tree_sitter_hcl::LANGUAGE.into()
tree_sitter_hcl::LANGUAGE.into()
}
pub fn language_html() -> TSLanguage {
tree_sitter_html::LANGUAGE.into()
tree_sitter_html::LANGUAGE.into()
}
pub fn language_java() -> TSLanguage {
tree_sitter_java::LANGUAGE.into()
tree_sitter_java::LANGUAGE.into()
}
pub fn language_javascript() -> TSLanguage {
tree_sitter_javascript::LANGUAGE.into()
tree_sitter_javascript::LANGUAGE.into()
}
pub fn language_json() -> TSLanguage {
tree_sitter_json::LANGUAGE.into()
tree_sitter_json::LANGUAGE.into()
}
pub fn language_julia() -> TSLanguage {
tree_sitter_julia::LANGUAGE.into()
tree_sitter_julia::LANGUAGE.into()
}
pub fn language_kotlin() -> TSLanguage {
tree_sitter_kotlin::LANGUAGE.into()
tree_sitter_kotlin::LANGUAGE.into()
}
pub fn language_lua() -> TSLanguage {
tree_sitter_lua::LANGUAGE.into()
tree_sitter_lua::LANGUAGE.into()
}
pub fn language_make() -> TSLanguage {
tree_sitter_make::LANGUAGE.into()
tree_sitter_make::LANGUAGE.into()
}
pub fn language_markdown() -> TSLanguage {
tree_sitter_md::LANGUAGE.into()
tree_sitter_md::LANGUAGE.into()
}
pub fn language_nix() -> TSLanguage {
tree_sitter_nix::LANGUAGE.into()
tree_sitter_nix::LANGUAGE.into()
}
pub fn language_objc() -> TSLanguage {
tree_sitter_objc::LANGUAGE.into()
tree_sitter_objc::LANGUAGE.into()
}
pub fn language_odin() -> TSLanguage {
tree_sitter_odin::LANGUAGE.into()
tree_sitter_odin::LANGUAGE.into()
}
pub fn language_php() -> TSLanguage {
tree_sitter_php::LANGUAGE_PHP_ONLY.into()
tree_sitter_php::LANGUAGE_PHP_ONLY.into()
}
pub fn language_python() -> TSLanguage {
tree_sitter_python::LANGUAGE.into()
tree_sitter_python::LANGUAGE.into()
}
pub fn language_regex() -> TSLanguage {
tree_sitter_regex::LANGUAGE.into()
tree_sitter_regex::LANGUAGE.into()
}
pub fn language_ruby() -> TSLanguage {
tree_sitter_ruby::LANGUAGE.into()
tree_sitter_ruby::LANGUAGE.into()
}
pub fn language_rust() -> TSLanguage {
tree_sitter_rust::LANGUAGE.into()
tree_sitter_rust::LANGUAGE.into()
}
pub fn language_scala() -> TSLanguage {
tree_sitter_scala::LANGUAGE.into()
tree_sitter_scala::LANGUAGE.into()
}
pub fn language_solidity() -> TSLanguage {
tree_sitter_solidity::LANGUAGE.into()
tree_sitter_solidity::LANGUAGE.into()
}
pub fn language_starlark() -> TSLanguage {
tree_sitter_starlark::LANGUAGE.into()
tree_sitter_starlark::LANGUAGE.into()
}
pub fn language_swift() -> TSLanguage {
tree_sitter_swift::LANGUAGE.into()
tree_sitter_swift::LANGUAGE.into()
}
pub fn language_toml() -> TSLanguage {
tree_sitter_toml_ng::LANGUAGE.into()
tree_sitter_toml_ng::LANGUAGE.into()
}
pub fn language_tsx() -> TSLanguage {
tree_sitter_typescript::LANGUAGE_TSX.into()
tree_sitter_typescript::LANGUAGE_TSX.into()
}
pub fn language_typescript() -> TSLanguage {
tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()
tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()
}
pub fn language_verilog() -> TSLanguage {
tree_sitter_verilog::LANGUAGE.into()
tree_sitter_verilog::LANGUAGE.into()
}
pub fn language_xml() -> TSLanguage {
tree_sitter_xml::LANGUAGE_XML.into()
tree_sitter_xml::LANGUAGE_XML.into()
}
pub fn language_yaml() -> TSLanguage {
tree_sitter_yaml::LANGUAGE.into()
tree_sitter_yaml::LANGUAGE.into()
}
pub fn language_zig() -> TSLanguage {
tree_sitter_zig::LANGUAGE.into()
tree_sitter_zig::LANGUAGE.into()
}

View file

@ -143,7 +143,11 @@ pub struct DiffResult {
/// - ` N line` for context
/// - ` ... ` for skipped context
#[napi(js_name = "generateDiff")]
pub fn generate_diff(old_content: String, new_content: String, context_lines: Option<u32>) -> DiffResult {
pub fn generate_diff(
old_content: String,
new_content: String,
context_lines: Option<u32>,
) -> DiffResult {
let context = context_lines.unwrap_or(4) as usize;
generate_diff_impl(&old_content, &new_content, context)
}
@ -192,36 +196,59 @@ fn generate_diff_impl(old_content: &str, new_content: &str, context_lines: usize
.iter()
.map(|s| s.to_string())
.collect();
parts.push(Part { tag: PartTag::Equal, lines });
parts.push(Part {
tag: PartTag::Equal,
lines,
});
}
similar::DiffOp::Delete { old_index, old_len, .. } => {
similar::DiffOp::Delete {
old_index, old_len, ..
} => {
let lines: Vec<String> = old_lines[*old_index..*old_index + *old_len]
.iter()
.map(|s| s.to_string())
.collect();
parts.push(Part { tag: PartTag::Removed, lines });
parts.push(Part {
tag: PartTag::Removed,
lines,
});
}
similar::DiffOp::Insert { new_index, new_len, .. } => {
similar::DiffOp::Insert {
new_index, new_len, ..
} => {
let lines: Vec<String> = new_lines[*new_index..*new_index + *new_len]
.iter()
.map(|s| s.to_string())
.collect();
parts.push(Part { tag: PartTag::Added, lines });
parts.push(Part {
tag: PartTag::Added,
lines,
});
}
similar::DiffOp::Replace {
old_index, old_len, new_index, new_len, ..
old_index,
old_len,
new_index,
new_len,
..
} => {
let del_lines: Vec<String> = old_lines[*old_index..*old_index + *old_len]
.iter()
.map(|s| s.to_string())
.collect();
parts.push(Part { tag: PartTag::Removed, lines: del_lines });
parts.push(Part {
tag: PartTag::Removed,
lines: del_lines,
});
let ins_lines: Vec<String> = new_lines[*new_index..*new_index + *new_len]
.iter()
.map(|s| s.to_string())
.collect();
parts.push(Part { tag: PartTag::Added, lines: ins_lines });
parts.push(Part {
tag: PartTag::Added,
lines: ins_lines,
});
}
}
}
@ -274,11 +301,7 @@ fn generate_diff_impl(old_content: &str, new_content: &str, context_lines: usize
}
if skip_start > 0 {
output.push(format!(
" {:>width$} ...",
"",
width = line_num_width
));
output.push(format!(" {:>width$} ...", "", width = line_num_width));
old_line_num += skip_start;
new_line_num += skip_start;
}
@ -291,11 +314,7 @@ fn generate_diff_impl(old_content: &str, new_content: &str, context_lines: usize
}
if skip_end > 0 {
output.push(format!(
" {:>width$} ...",
"",
width = line_num_width
));
output.push(format!(" {:>width$} ...", "", width = line_num_width));
old_line_num += skip_end;
new_line_num += skip_end;
}

View file

@ -379,9 +379,8 @@ pub fn apply_workspace_edit(
// ── Phase 2: commit ──────────────────────────────────────────────────
let mut file_results: Vec<WorkspaceEditFileResult> = Vec::with_capacity(staged.len());
let mut succeeded = 0usize;
for (final_path, tmp_path, new_bytes, edits_applied) in &staged {
for (succeeded, (final_path, tmp_path, new_bytes, edits_applied)) in staged.iter().enumerate() {
if let Err(e) = fs::rename(tmp_path, final_path) {
// Cleanup remaining staged tmps (including this one if rename failed
// before touching the original).
@ -402,7 +401,6 @@ pub fn apply_workspace_edit(
edits_applied: *edits_applied,
bytes_written: new_bytes.len() as u32,
});
succeeded += 1;
}
// ── fsync parent directories (deduplicated) ──────────────────────────

View file

@ -225,9 +225,9 @@ fn collect_matches(
let score = score_fuzzy_path(
&entry.path,
is_directory,
&query_lower,
&normalized_query,
&query_chars,
query_lower,
normalized_query,
query_chars,
);
if score == 0 {
continue;

View file

@ -141,22 +141,27 @@ fn parse_frontmatter_map_internal(lines: &[&str]) -> Vec<(String, FmValue)> {
for line in lines {
// Nested object property (4-space indent with key: value)
if line.starts_with(" ") && !line.starts_with(" ") {
if current_array.is_some() && current_obj.is_some() {
let rest = line.trim_start();
if let Some(colon_pos) = rest.find(": ") {
let k = &rest[..colon_pos];
let v = rest[colon_pos + 2..].trim();
if k.chars().all(|c| c.is_alphanumeric() || c == '_') {
current_obj.as_mut().unwrap().push((k.to_string(), v.to_string()));
continue;
if line.starts_with(" ")
&& !line.starts_with(" ")
&& current_array.is_some()
&& current_obj.is_some()
{
let rest = line.trim_start();
if let Some(colon_pos) = rest.find(": ") {
let k = &rest[..colon_pos];
let v = rest[colon_pos + 2..].trim();
if k.chars().all(|c| c.is_alphanumeric() || c == '_') {
if let Some(current_obj) = current_obj.as_mut() {
current_obj.push((k.to_string(), v.to_string()));
}
} else if rest.ends_with(':') {
let k = &rest[..rest.len() - 1];
if k.chars().all(|c| c.is_alphanumeric() || c == '_') {
current_obj.as_mut().unwrap().push((k.to_string(), String::new()));
continue;
continue;
}
} else if let Some(k) = rest.strip_suffix(':') {
if k.chars().all(|c| c.is_alphanumeric() || c == '_') {
if let Some(current_obj) = current_obj.as_mut() {
current_obj.push((k.to_string(), String::new()));
}
continue;
}
}
}
@ -187,7 +192,10 @@ fn parse_frontmatter_map_internal(lines: &[&str]) -> Vec<(String, FmValue)> {
}
}
current_array.as_mut().unwrap().push(FmArrayItem::Str(val.to_string()));
current_array
.as_mut()
.unwrap()
.push(FmArrayItem::Str(val.to_string()));
continue;
}
@ -481,18 +489,15 @@ fn parse_roadmap_internal(content: &str) -> NativeRoadmap {
.unwrap_or("")
.to_string();
let sc_section = extract_section_internal(content, "Success Criteria", 2)
.or_else(|| {
let idx = content.find("**Success Criteria:**")?;
let rest = &content[idx..];
let next_section = rest.find("\n---");
let block = &rest[..next_section.unwrap_or(rest.len())];
let first_newline = block.find('\n')?;
Some(block[first_newline + 1..].to_string())
});
let success_criteria = sc_section
.map(|s| parse_bullets(&s))
.unwrap_or_default();
let sc_section = extract_section_internal(content, "Success Criteria", 2).or_else(|| {
let idx = content.find("**Success Criteria:**")?;
let rest = &content[idx..];
let next_section = rest.find("\n---");
let block = &rest[..next_section.unwrap_or(rest.len())];
let first_newline = block.find('\n')?;
Some(block[first_newline + 1..].to_string())
});
let success_criteria = sc_section.map(|s| parse_bullets(&s)).unwrap_or_default();
let slices = parse_roadmap_slices_internal(content);
let boundary_map = parse_boundary_map_internal(content);
@ -511,7 +516,7 @@ fn parse_roadmap_slices_internal(content: &str) -> Vec<NativeRoadmapSlice> {
Some(idx) => {
let start = idx + "## Slices".len();
let rest = &content[start..];
let rest = rest.trim_start_matches(|c: char| c == '\r' || c == '\n');
let rest = rest.trim_start_matches(['\r', '\n']);
let end = rest.find("\n## ").unwrap_or(rest.len());
rest[..end].trim_end()
}
@ -532,8 +537,8 @@ fn parse_roadmap_slices_internal(content: &str) -> Vec<NativeRoadmapSlice> {
if let Some(ref mut s) = current_slice {
let trimmed = line.trim();
if trimmed.starts_with('>') {
let demo = trimmed[1..].trim();
if let Some(demo) = trimmed.strip_prefix('>') {
let demo = demo.trim();
let demo = if demo.to_lowercase().starts_with("after this:") {
demo["after this:".len()..].trim()
} else {
@ -621,8 +626,7 @@ fn parse_boundary_map_internal(content: &str) -> Vec<NativeBoundaryMapEntry> {
let mut entries = Vec::new();
for (heading, section_content) in h3_sections {
let arrow_pos = heading.find('\u{2192}')
.or_else(|| heading.find("->"));
let arrow_pos = heading.find('\u{2192}').or_else(|| heading.find("->"));
if let Some(pos) = arrow_pos {
let arrow_len = if heading[pos..].starts_with('\u{2192}') {
@ -630,8 +634,16 @@ fn parse_boundary_map_internal(content: &str) -> Vec<NativeBoundaryMapEntry> {
} else {
2
};
let from_slice = heading[..pos].trim().split_whitespace().next().unwrap_or("").to_string();
let to_slice = heading[pos + arrow_len..].trim().split_whitespace().next().unwrap_or("").to_string();
let from_slice = heading[..pos]
.split_whitespace()
.next()
.unwrap_or("")
.to_string();
let to_slice = heading[pos + arrow_len..]
.split_whitespace()
.next()
.unwrap_or("")
.to_string();
let mut produces = String::new();
let mut consumes = String::new();
@ -1057,13 +1069,9 @@ pub fn parse_plan_file(content: String) -> NativePlan {
let id = fm_id.unwrap_or(heading_id);
let goal = extract_bold_field(body, "Goal")
.unwrap_or("")
.to_string();
let goal = extract_bold_field(body, "Goal").unwrap_or("").to_string();
let demo = extract_bold_field(body, "Demo")
.unwrap_or("")
.to_string();
let demo = extract_bold_field(body, "Demo").unwrap_or("").to_string();
let must_haves = extract_section_internal(body, "Must-Haves", 2)
.map(|s| parse_bullets(&s))
@ -1129,10 +1137,7 @@ fn parse_plan_tasks(body: &str) -> Vec<NativeTaskEntry> {
let after_bold = &after_bracket[2 + bold_end + 2..];
let estimate = if let Some(est_start) = after_bold.find("`est:") {
let val_start = est_start + 5;
let val_end = after_bold[val_start..]
.find('`')
.unwrap_or(0)
+ val_start;
let val_end = after_bold[val_start..].find('`').unwrap_or(0) + val_start;
after_bold[val_start..val_end].to_string()
} else {
String::new()
@ -1259,11 +1264,9 @@ pub fn parse_summary_file(content: String) -> NativeSummary {
result
};
let what_happened = extract_section_internal(body, "What Happened", 2)
.unwrap_or_default();
let what_happened = extract_section_internal(body, "What Happened", 2).unwrap_or_default();
let deviations = extract_section_internal(body, "Deviations", 2)
.unwrap_or_default();
let deviations = extract_section_internal(body, "Deviations", 2).unwrap_or_default();
let files_modified = extract_section_internal(body, "Files Created/Modified", 2)
.or_else(|| extract_section_internal(body, "Files Modified", 2))
@ -1327,8 +1330,7 @@ fn parse_summary_frontmatter(fm_map: &[(String, FmValue)]) -> NativeSummaryFront
};
let blocker_str = get_scalar("blocker_discovered");
let blocker_discovered =
blocker_str == "true" || blocker_str == "yes" || blocker_str == "True";
let blocker_discovered = blocker_str == "true" || blocker_str == "yes" || blocker_str == "True";
NativeSummaryFrontmatter {
id: get_scalar("id"),
@ -1359,15 +1361,20 @@ fn parse_files_modified(section: &str) -> Vec<NativeFileModified> {
};
// Parse `path` — description or `path` - description
if text.starts_with('`') {
if let Some(end_tick) = text[1..].find('`') {
let path = text[1..1 + end_tick].to_string();
let rest = text[1 + end_tick + 1..].trim();
let description = if rest.starts_with("") || rest.starts_with("") || rest.starts_with('-') {
rest[rest.find(|c: char| c != '—' && c != '' && c != '-').unwrap_or(rest.len())..].trim().to_string()
} else {
rest.to_string()
};
if let Some(rest) = text.strip_prefix('`') {
if let Some(end_tick) = rest.find('`') {
let path = rest[..end_tick].to_string();
let rest = rest[end_tick + 1..].trim();
let description =
if rest.starts_with("") || rest.starts_with("") || rest.starts_with('-') {
rest[rest
.find(|c: char| c != '—' && c != '' && c != '-')
.unwrap_or(rest.len())..]
.trim()
.to_string()
} else {
rest.to_string()
};
files.push(NativeFileModified { path, description });
}
}

View file

@ -36,17 +36,26 @@ fn git_err(context: &str, e: git2::Error) -> Error {
/// Prevents path traversal attacks via patterns like `../../etc/passwd`.
fn validate_path_within_repo(repo_path: &str, file_path: &str) -> Result<std::path::PathBuf> {
let repo_dir = std::fs::canonicalize(repo_path).map_err(|e| {
Error::new(Status::GenericFailure, format!("Failed to canonicalize repo path '{repo_path}': {e}"))
Error::new(
Status::GenericFailure,
format!("Failed to canonicalize repo path '{repo_path}': {e}"),
)
})?;
let full_path = repo_dir.join(file_path);
let canonical = if full_path.exists() {
std::fs::canonicalize(&full_path).map_err(|e| {
Error::new(Status::GenericFailure, format!("Failed to canonicalize path '{file_path}': {e}"))
Error::new(
Status::GenericFailure,
format!("Failed to canonicalize path '{file_path}': {e}"),
)
})?
} else if let Some(parent) = full_path.parent() {
if parent.exists() {
let cp = std::fs::canonicalize(parent).map_err(|e| {
Error::new(Status::GenericFailure, format!("Failed to canonicalize parent of '{file_path}': {e}"))
Error::new(
Status::GenericFailure,
format!("Failed to canonicalize parent of '{file_path}': {e}"),
)
})?;
cp.join(full_path.file_name().unwrap_or_default())
} else {
@ -56,7 +65,10 @@ fn validate_path_within_repo(repo_path: &str, file_path: &str) -> Result<std::pa
full_path.clone()
};
if !canonical.starts_with(&repo_dir) {
return Err(Error::new(Status::GenericFailure, format!("Path '{file_path}' escapes repository boundary")));
return Err(Error::new(
Status::GenericFailure,
format!("Path '{file_path}' escapes repository boundary"),
));
}
Ok(canonical)
}
@ -159,9 +171,7 @@ pub struct GitMergeResult {
#[napi]
pub fn git_current_branch(repo_path: String) -> Result<Option<String>> {
let repo = open_repo(&repo_path)?;
let head = repo
.head()
.map_err(|e| git_err("Failed to read HEAD", e))?;
let head = repo.head().map_err(|e| git_err("Failed to read HEAD", e))?;
if head.is_branch() {
Ok(head.shorthand().map(String::from))
@ -200,9 +210,7 @@ pub fn git_main_branch(repo_path: String) -> Result<String> {
return Ok("master".to_string());
}
let head = repo
.head()
.map_err(|e| git_err("Failed to read HEAD", e))?;
let head = repo.head().map_err(|e| git_err("Failed to read HEAD", e))?;
Ok(head.shorthand().unwrap_or("HEAD").to_string())
}
@ -358,11 +366,7 @@ pub fn git_has_staged_changes(repo_path: String) -> Result<bool> {
/// When `from_ref` is "HEAD" and `to_ref` is "INDEX", diffs index vs HEAD (staged).
/// Replaces: `git diff --stat HEAD`, `git diff --stat --cached HEAD`
#[napi]
pub fn git_diff_stat(
repo_path: String,
from_ref: String,
to_ref: String,
) -> Result<GitDiffStat> {
pub fn git_diff_stat(repo_path: String, from_ref: String, to_ref: String) -> Result<GitDiffStat> {
let repo = open_repo(&repo_path)?;
let diff = match (from_ref.as_str(), to_ref.as_str()) {
@ -506,14 +510,11 @@ pub fn git_diff_numstat(
// Count added/removed lines per file using the patch API
for (i, _) in diff.deltas().enumerate() {
if let Ok(patch) = git2::Patch::from_diff(&diff, i) {
if let Some(patch) = patch {
let (_, additions, deletions) = patch.line_stats()
.unwrap_or((0, 0, 0));
if let Some(entry) = results.get_mut(i) {
entry.added = additions as u32;
entry.removed = deletions as u32;
}
if let Ok(Some(patch)) = git2::Patch::from_diff(&diff, i) {
let (_, additions, deletions) = patch.line_stats().unwrap_or((0, 0, 0));
if let Some(entry) = results.get_mut(i) {
entry.added = additions as u32;
entry.removed = deletions as u32;
}
}
}
@ -783,7 +784,9 @@ pub fn git_ls_files(repo_path: String, pathspec: String) -> Result<Vec<String>>
let mut files = Vec::new();
for entry in index.iter() {
let path = String::from_utf8_lossy(&entry.path).to_string();
if path.starts_with(&pathspec) || (pathspec.ends_with('/') && path.starts_with(pathspec.trim_end_matches('/'))) {
if path.starts_with(&pathspec)
|| (pathspec.ends_with('/') && path.starts_with(pathspec.trim_end_matches('/')))
{
files.push(path);
}
}
@ -1030,11 +1033,7 @@ pub fn git_reset_paths(repo_path: String, paths: Vec<String>) -> Result<()> {
/// Returns the commit SHA.
/// Replaces: `git commit -m <message>`, `git commit --no-verify -F -`
#[napi]
pub fn git_commit(
repo_path: String,
message: String,
allow_empty: Option<bool>,
) -> Result<String> {
pub fn git_commit(repo_path: String, message: String, allow_empty: Option<bool>) -> Result<String> {
let repo = open_repo(&repo_path)?;
let mut index = repo
.index()
@ -1045,8 +1044,7 @@ pub fn git_commit(
let merge_msg_path = repo.path().join("MERGE_MSG");
let squash_msg_path = repo.path().join("SQUASH_MSG");
if merge_msg_path.exists() {
std::fs::read_to_string(&merge_msg_path)
.unwrap_or_else(|_| "Merge commit".to_string())
std::fs::read_to_string(&merge_msg_path).unwrap_or_else(|_| "Merge commit".to_string())
} else if squash_msg_path.exists() {
std::fs::read_to_string(&squash_msg_path)
.unwrap_or_else(|_| "Squash commit".to_string())
@ -1107,8 +1105,12 @@ pub fn git_commit(
for msg_file in &["SQUASH_MSG", "MERGE_MSG"] {
let msg_path = repo.path().join(msg_file);
if msg_path.exists() {
std::fs::remove_file(&msg_path)
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to clean up {msg_file}: {e}")))?;
std::fs::remove_file(&msg_path).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to clean up {msg_file}: {e}"),
)
})?;
}
}
@ -1183,11 +1185,19 @@ pub fn git_checkout_theirs(repo_path: String, paths: Vec<String>) -> Result<()>
.map_err(|e| git_err(&format!("Failed to find blob for '{path}'"), e))?;
let full_path = validate_path_within_repo(&repo_path, path)?;
if let Some(parent) = full_path.parent() {
std::fs::create_dir_all(parent)
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to create directory for '{path}': {e}")))?;
std::fs::create_dir_all(parent).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to create directory for '{path}': {e}"),
)
})?;
}
std::fs::write(&full_path, blob.content())
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to write '{path}': {e}")))?;
std::fs::write(&full_path, blob.content()).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to write '{path}': {e}"),
)
})?;
}
}
@ -1233,8 +1243,12 @@ pub fn git_merge_squash(repo_path: String, branch: String) -> Result<GitMergeRes
let mut checkout_opts = CheckoutBuilder::new();
checkout_opts.safe().allow_conflicts(true);
repo.merge(&[&annotated], Some(&mut merge_opts), Some(&mut checkout_opts))
.map_err(|e| git_err("Failed to merge", e))?;
repo.merge(
&[&annotated],
Some(&mut merge_opts),
Some(&mut checkout_opts),
)
.map_err(|e| git_err("Failed to merge", e))?;
// Check for conflicts
let index = repo
@ -1277,9 +1291,7 @@ pub fn git_merge_abort(repo_path: String) -> Result<()> {
let repo = open_repo(&repo_path)?;
// Reset to HEAD
let head = repo
.head()
.map_err(|e| git_err("Failed to read HEAD", e))?;
let head = repo.head().map_err(|e| git_err("Failed to read HEAD", e))?;
let obj = head
.peel(ObjectType::Commit)
.map_err(|e| git_err("Failed to peel HEAD", e))?;
@ -1321,12 +1333,20 @@ pub fn git_rebase_abort(repo_path: String) -> Result<()> {
// Clean up rebase state directories
if rebase_merge.exists() {
std::fs::remove_dir_all(&rebase_merge)
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to remove rebase-merge state: {e}")))?;
std::fs::remove_dir_all(&rebase_merge).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to remove rebase-merge state: {e}"),
)
})?;
}
if rebase_apply.exists() {
std::fs::remove_dir_all(&rebase_apply)
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to remove rebase-apply state: {e}")))?;
std::fs::remove_dir_all(&rebase_apply).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to remove rebase-apply state: {e}"),
)
})?;
}
}
@ -1341,9 +1361,7 @@ pub fn git_rebase_abort(repo_path: String) -> Result<()> {
pub fn git_reset_hard(repo_path: String) -> Result<()> {
let repo = open_repo(&repo_path)?;
let head = repo
.head()
.map_err(|e| git_err("Failed to read HEAD", e))?;
let head = repo.head().map_err(|e| git_err("Failed to read HEAD", e))?;
let obj = head
.peel(ObjectType::Commit)
.map_err(|e| git_err("Failed to peel HEAD", e))?;
@ -1385,11 +1403,7 @@ pub fn git_branch_delete(repo_path: String, branch: String, force: Option<bool>)
/// Force-reset a branch to point at a target ref.
/// Replaces: `git branch -f <branch> <target>`
#[napi]
pub fn git_branch_force_reset(
repo_path: String,
branch: String,
target: String,
) -> Result<()> {
pub fn git_branch_force_reset(repo_path: String, branch: String, target: String) -> Result<()> {
let repo = open_repo(&repo_path)?;
let target_commit = repo
@ -1446,10 +1460,8 @@ pub fn git_rm_cached(
removed.push(format!("rm '{entry_path}'"));
}
}
} else {
if index.remove_path(Path::new(path)).is_ok() {
removed.push(format!("rm '{path}'"));
}
} else if index.remove_path(Path::new(path)).is_ok() {
removed.push(format!("rm '{path}'"));
}
}
@ -1472,13 +1484,18 @@ pub fn git_rm_force(repo_path: String, paths: Vec<String>) -> Result<()> {
.map_err(|e| git_err("Failed to read index", e))?;
for path in &paths {
index.remove_path(Path::new(path))
index
.remove_path(Path::new(path))
.map_err(|e| git_err(&format!("Failed to remove '{path}' from index"), e))?;
// Also delete from working tree (with path traversal validation)
let full_path = validate_path_within_repo(&repo_path, path)?;
if full_path.exists() {
std::fs::remove_file(&full_path)
.map_err(|e| Error::new(Status::GenericFailure, format!("Failed to delete '{path}': {e}")))?;
std::fs::remove_file(&full_path).map_err(|e| {
Error::new(
Status::GenericFailure,
format!("Failed to delete '{path}': {e}"),
)
})?;
}
}
@ -1523,10 +1540,7 @@ pub fn git_worktree_add(
repo.worktree(
&branch, // worktree name
Path::new(&wt_path),
Some(
git2::WorktreeAddOptions::new()
.reference(Some(&reference)),
),
Some(git2::WorktreeAddOptions::new().reference(Some(&reference))),
)
.map_err(|e| git_err(&format!("Failed to add worktree at '{wt_path}'"), e))?;
@ -1615,8 +1629,7 @@ pub fn git_worktree_prune(repo_path: String) -> Result<()> {
pub fn git_revert_commit(repo_path: String, sha: String) -> Result<()> {
let repo = open_repo(&repo_path)?;
let oid = git2::Oid::from_str(&sha)
.map_err(|e| git_err(&format!("Invalid SHA '{sha}'"), e))?;
let oid = git2::Oid::from_str(&sha).map_err(|e| git_err(&format!("Invalid SHA '{sha}'"), e))?;
let commit = repo
.find_commit(oid)

View file

@ -175,8 +175,12 @@ fn run_glob(
}
let mut matches = if config.use_cache {
let scan =
fs_cache::get_or_scan(&config.root, config.include_hidden, config.use_gitignore, &ct)?;
let scan = fs_cache::get_or_scan(
&config.root,
config.include_hidden,
config.use_gitignore,
&ct,
)?;
let mut matches = filter_entries(&scan.entries, &glob_set, &config, on_match, &ct)?;
// Empty-result recheck: if we got zero matches from a cached scan that's old
// enough, force a rescan and try once more before returning empty.

View file

@ -81,10 +81,7 @@ mod tests {
#[test]
fn unclosed_brace_gets_closed() {
assert_eq!(
build_glob_pattern("*.{ts,tsx,js", true),
"**/*.{ts,tsx,js}"
);
assert_eq!(build_glob_pattern("*.{ts,tsx,js", true), "**/*.{ts,tsx,js}");
}
#[test]

View file

@ -15,109 +15,109 @@ static SCOPE_MATCHERS: OnceLock<ScopeMatchers> = OnceLock::new();
// Thread-local cache for scope -> color index lookups
thread_local! {
static SCOPE_COLOR_CACHE: RefCell<HashMap<Scope, usize>> = RefCell::new(HashMap::with_capacity(256));
static SCOPE_COLOR_CACHE: RefCell<HashMap<Scope, usize>> = RefCell::new(HashMap::with_capacity(256));
}
fn get_syntax_set() -> &'static SyntaxSet {
SYNTAX_SET.get_or_init(SyntaxSet::load_defaults_newlines)
SYNTAX_SET.get_or_init(SyntaxSet::load_defaults_newlines)
}
/// Pre-compiled scope patterns for fast matching.
struct ScopeMatchers {
// Comment (index 0)
comment: Scope,
// Comment (index 0)
comment: Scope,
// String (index 4)
string: Scope,
constant_character: Scope,
meta_string: Scope,
// String (index 4)
string: Scope,
constant_character: Scope,
meta_string: Scope,
// Number (index 5)
constant_numeric: Scope,
constant_integer: Scope,
constant: Scope,
// Number (index 5)
constant_numeric: Scope,
constant_integer: Scope,
constant: Scope,
// Keyword (index 1)
keyword: Scope,
storage_type: Scope,
storage_modifier: Scope,
// Keyword (index 1)
keyword: Scope,
storage_type: Scope,
storage_modifier: Scope,
// Function (index 2)
entity_name_function: Scope,
support_function: Scope,
meta_function_call: Scope,
variable_function: Scope,
// Function (index 2)
entity_name_function: Scope,
support_function: Scope,
meta_function_call: Scope,
variable_function: Scope,
// Type (index 6)
entity_name_type: Scope,
support_type: Scope,
support_class: Scope,
entity_name_class: Scope,
entity_name_struct: Scope,
entity_name_enum: Scope,
entity_name_interface: Scope,
entity_name_trait: Scope,
// Type (index 6)
entity_name_type: Scope,
support_type: Scope,
support_class: Scope,
entity_name_class: Scope,
entity_name_struct: Scope,
entity_name_enum: Scope,
entity_name_interface: Scope,
entity_name_trait: Scope,
// Operator (index 7)
keyword_operator: Scope,
punctuation_accessor: Scope,
// Operator (index 7)
keyword_operator: Scope,
punctuation_accessor: Scope,
// Punctuation (index 8)
punctuation: Scope,
// Punctuation (index 8)
punctuation: Scope,
// Variable (index 3)
variable: Scope,
entity_name: Scope,
meta_path: Scope,
// Variable (index 3)
variable: Scope,
entity_name: Scope,
meta_path: Scope,
// Diff (indices 9, 10)
markup_inserted: Scope,
markup_deleted: Scope,
meta_diff_header: Scope,
meta_diff_range: Scope,
// Diff (indices 9, 10)
markup_inserted: Scope,
markup_deleted: Scope,
meta_diff_header: Scope,
meta_diff_range: Scope,
}
impl ScopeMatchers {
fn new() -> Self {
Self {
comment: Scope::new("comment").unwrap(),
string: Scope::new("string").unwrap(),
constant_character: Scope::new("constant.character").unwrap(),
meta_string: Scope::new("meta.string").unwrap(),
constant_numeric: Scope::new("constant.numeric").unwrap(),
constant_integer: Scope::new("constant.integer").unwrap(),
constant: Scope::new("constant").unwrap(),
keyword: Scope::new("keyword").unwrap(),
storage_type: Scope::new("storage.type").unwrap(),
storage_modifier: Scope::new("storage.modifier").unwrap(),
entity_name_function: Scope::new("entity.name.function").unwrap(),
support_function: Scope::new("support.function").unwrap(),
meta_function_call: Scope::new("meta.function-call").unwrap(),
variable_function: Scope::new("variable.function").unwrap(),
entity_name_type: Scope::new("entity.name.type").unwrap(),
support_type: Scope::new("support.type").unwrap(),
support_class: Scope::new("support.class").unwrap(),
entity_name_class: Scope::new("entity.name.class").unwrap(),
entity_name_struct: Scope::new("entity.name.struct").unwrap(),
entity_name_enum: Scope::new("entity.name.enum").unwrap(),
entity_name_interface: Scope::new("entity.name.interface").unwrap(),
entity_name_trait: Scope::new("entity.name.trait").unwrap(),
keyword_operator: Scope::new("keyword.operator").unwrap(),
punctuation_accessor: Scope::new("punctuation.accessor").unwrap(),
punctuation: Scope::new("punctuation").unwrap(),
variable: Scope::new("variable").unwrap(),
entity_name: Scope::new("entity.name").unwrap(),
meta_path: Scope::new("meta.path").unwrap(),
markup_inserted: Scope::new("markup.inserted").unwrap(),
markup_deleted: Scope::new("markup.deleted").unwrap(),
meta_diff_header: Scope::new("meta.diff.header").unwrap(),
meta_diff_range: Scope::new("meta.diff.range").unwrap(),
}
}
fn new() -> Self {
Self {
comment: Scope::new("comment").unwrap(),
string: Scope::new("string").unwrap(),
constant_character: Scope::new("constant.character").unwrap(),
meta_string: Scope::new("meta.string").unwrap(),
constant_numeric: Scope::new("constant.numeric").unwrap(),
constant_integer: Scope::new("constant.integer").unwrap(),
constant: Scope::new("constant").unwrap(),
keyword: Scope::new("keyword").unwrap(),
storage_type: Scope::new("storage.type").unwrap(),
storage_modifier: Scope::new("storage.modifier").unwrap(),
entity_name_function: Scope::new("entity.name.function").unwrap(),
support_function: Scope::new("support.function").unwrap(),
meta_function_call: Scope::new("meta.function-call").unwrap(),
variable_function: Scope::new("variable.function").unwrap(),
entity_name_type: Scope::new("entity.name.type").unwrap(),
support_type: Scope::new("support.type").unwrap(),
support_class: Scope::new("support.class").unwrap(),
entity_name_class: Scope::new("entity.name.class").unwrap(),
entity_name_struct: Scope::new("entity.name.struct").unwrap(),
entity_name_enum: Scope::new("entity.name.enum").unwrap(),
entity_name_interface: Scope::new("entity.name.interface").unwrap(),
entity_name_trait: Scope::new("entity.name.trait").unwrap(),
keyword_operator: Scope::new("keyword.operator").unwrap(),
punctuation_accessor: Scope::new("punctuation.accessor").unwrap(),
punctuation: Scope::new("punctuation").unwrap(),
variable: Scope::new("variable").unwrap(),
entity_name: Scope::new("entity.name").unwrap(),
meta_path: Scope::new("meta.path").unwrap(),
markup_inserted: Scope::new("markup.inserted").unwrap(),
markup_deleted: Scope::new("markup.deleted").unwrap(),
meta_diff_header: Scope::new("meta.diff.header").unwrap(),
meta_diff_range: Scope::new("meta.diff.range").unwrap(),
}
}
}
fn get_scope_matchers() -> &'static ScopeMatchers {
SCOPE_MATCHERS.get_or_init(ScopeMatchers::new)
SCOPE_MATCHERS.get_or_init(ScopeMatchers::new)
}
/// Theme colors for syntax highlighting.
@ -125,228 +125,240 @@ fn get_scope_matchers() -> &'static ScopeMatchers {
#[derive(Debug)]
#[napi(object)]
pub struct HighlightColors {
/// ANSI color for comments.
pub comment: String,
/// ANSI color for keywords.
pub keyword: String,
/// ANSI color for function names.
pub function: String,
/// ANSI color for variables and identifiers.
pub variable: String,
/// ANSI color for string literals.
pub string: String,
/// ANSI color for numeric literals.
pub number: String,
/// ANSI color for type identifiers.
#[napi(js_name = "type")]
pub r#type: String,
/// ANSI color for operators.
pub operator: String,
/// ANSI color for punctuation tokens.
pub punctuation: String,
/// ANSI color for diff inserted lines.
#[napi(js_name = "inserted")]
pub inserted: Option<String>,
/// ANSI color for diff deleted lines.
#[napi(js_name = "deleted")]
pub deleted: Option<String>,
/// ANSI color for comments.
pub comment: String,
/// ANSI color for keywords.
pub keyword: String,
/// ANSI color for function names.
pub function: String,
/// ANSI color for variables and identifiers.
pub variable: String,
/// ANSI color for string literals.
pub string: String,
/// ANSI color for numeric literals.
pub number: String,
/// ANSI color for type identifiers.
#[napi(js_name = "type")]
pub r#type: String,
/// ANSI color for operators.
pub operator: String,
/// ANSI color for punctuation tokens.
pub punctuation: String,
/// ANSI color for diff inserted lines.
#[napi(js_name = "inserted")]
pub inserted: Option<String>,
/// ANSI color for diff deleted lines.
#[napi(js_name = "deleted")]
pub deleted: Option<String>,
}
/// Language alias mappings: (aliases, target syntax name).
/// Used for languages not in syntect's default set or with non-standard names.
const LANG_ALIASES: &[(&[&str], &str)] = &[
(&["ts", "tsx", "typescript", "js", "jsx", "javascript", "mjs", "cjs"], "JavaScript"),
(&["py", "python"], "Python"),
(&["rb", "ruby"], "Ruby"),
(&["rs", "rust"], "Rust"),
(&["go", "golang"], "Go"),
(&["java"], "Java"),
(&["kt", "kotlin"], "Java"),
(&["swift"], "Objective-C"),
(&["c", "h"], "C"),
(&["cpp", "cc", "cxx", "c++", "hpp", "hxx", "hh"], "C++"),
(&["cs", "csharp"], "C#"),
(&["php"], "PHP"),
(&["sh", "bash", "zsh", "shell"], "Bash"),
(&["fish"], "Shell-Unix-Generic"),
(&["ps1", "powershell"], "PowerShell"),
(&["html", "htm"], "HTML"),
(&["css"], "CSS"),
(&["scss"], "SCSS"),
(&["sass"], "Sass"),
(&["less"], "LESS"),
(&["json"], "JSON"),
(&["yaml", "yml"], "YAML"),
(&["toml"], "TOML"),
(&["xml"], "XML"),
(&["md", "markdown"], "Markdown"),
(&["sql"], "SQL"),
(&["lua"], "Lua"),
(&["perl", "pl"], "Perl"),
(&["r"], "R"),
(&["scala"], "Scala"),
(&["clj", "clojure"], "Clojure"),
(&["ex", "exs", "elixir"], "Ruby"),
(&["erl", "erlang"], "Erlang"),
(&["hs", "haskell"], "Haskell"),
(&["ml", "ocaml"], "OCaml"),
(&["vim"], "VimL"),
(&["graphql", "gql"], "GraphQL"),
(&["proto", "protobuf"], "Protocol Buffers"),
(&["tf", "hcl", "terraform"], "Terraform"),
(&["dockerfile", "docker"], "Dockerfile"),
(&["makefile", "make"], "Makefile"),
(&["cmake"], "CMake"),
(&["ini", "cfg", "conf", "config", "properties"], "INI"),
(&["diff", "patch"], "Diff"),
(&["gitignore", "gitattributes", "gitmodules"], "Git Ignore"),
(
&[
"ts",
"tsx",
"typescript",
"js",
"jsx",
"javascript",
"mjs",
"cjs",
],
"JavaScript",
),
(&["py", "python"], "Python"),
(&["rb", "ruby"], "Ruby"),
(&["rs", "rust"], "Rust"),
(&["go", "golang"], "Go"),
(&["java"], "Java"),
(&["kt", "kotlin"], "Java"),
(&["swift"], "Objective-C"),
(&["c", "h"], "C"),
(&["cpp", "cc", "cxx", "c++", "hpp", "hxx", "hh"], "C++"),
(&["cs", "csharp"], "C#"),
(&["php"], "PHP"),
(&["sh", "bash", "zsh", "shell"], "Bash"),
(&["fish"], "Shell-Unix-Generic"),
(&["ps1", "powershell"], "PowerShell"),
(&["html", "htm"], "HTML"),
(&["css"], "CSS"),
(&["scss"], "SCSS"),
(&["sass"], "Sass"),
(&["less"], "LESS"),
(&["json"], "JSON"),
(&["yaml", "yml"], "YAML"),
(&["toml"], "TOML"),
(&["xml"], "XML"),
(&["md", "markdown"], "Markdown"),
(&["sql"], "SQL"),
(&["lua"], "Lua"),
(&["perl", "pl"], "Perl"),
(&["r"], "R"),
(&["scala"], "Scala"),
(&["clj", "clojure"], "Clojure"),
(&["ex", "exs", "elixir"], "Ruby"),
(&["erl", "erlang"], "Erlang"),
(&["hs", "haskell"], "Haskell"),
(&["ml", "ocaml"], "OCaml"),
(&["vim"], "VimL"),
(&["graphql", "gql"], "GraphQL"),
(&["proto", "protobuf"], "Protocol Buffers"),
(&["tf", "hcl", "terraform"], "Terraform"),
(&["dockerfile", "docker"], "Dockerfile"),
(&["makefile", "make"], "Makefile"),
(&["cmake"], "CMake"),
(&["ini", "cfg", "conf", "config", "properties"], "INI"),
(&["diff", "patch"], "Diff"),
(&["gitignore", "gitattributes", "gitmodules"], "Git Ignore"),
];
/// Find syntax name from alias table using case-insensitive comparison.
#[inline]
fn find_alias(lang: &str) -> Option<&'static str> {
LANG_ALIASES
.iter()
.find(|(aliases, _)| aliases.iter().any(|a| lang.eq_ignore_ascii_case(a)))
.map(|(_, target)| *target)
LANG_ALIASES
.iter()
.find(|(aliases, _)| aliases.iter().any(|a| lang.eq_ignore_ascii_case(a)))
.map(|(_, target)| *target)
}
/// Check if language is in the alias table.
#[inline]
fn is_known_alias(lang: &str) -> bool {
LANG_ALIASES
.iter()
.any(|(aliases, _)| aliases.iter().any(|a| lang.eq_ignore_ascii_case(a)))
LANG_ALIASES
.iter()
.any(|(aliases, _)| aliases.iter().any(|a| lang.eq_ignore_ascii_case(a)))
}
/// Compute the color index for a single scope (uncached).
#[inline]
fn compute_scope_color(s: Scope) -> usize {
let m = get_scope_matchers();
let m = get_scope_matchers();
// Comment (index 0)
if m.comment.is_prefix_of(s) {
return 0;
}
// Comment (index 0)
if m.comment.is_prefix_of(s) {
return 0;
}
// Diff inserted (index 9)
if m.markup_inserted.is_prefix_of(s) {
return 9;
}
// Diff inserted (index 9)
if m.markup_inserted.is_prefix_of(s) {
return 9;
}
// Diff deleted (index 10)
if m.markup_deleted.is_prefix_of(s) {
return 10;
}
// Diff deleted (index 10)
if m.markup_deleted.is_prefix_of(s) {
return 10;
}
// Diff header/range -> keyword (index 1)
if m.meta_diff_header.is_prefix_of(s) || m.meta_diff_range.is_prefix_of(s) {
return 1;
}
// Diff header/range -> keyword (index 1)
if m.meta_diff_header.is_prefix_of(s) || m.meta_diff_range.is_prefix_of(s) {
return 1;
}
// String (index 4)
if m.string.is_prefix_of(s)
|| m.constant_character.is_prefix_of(s)
|| m.meta_string.is_prefix_of(s)
{
return 4;
}
// String (index 4)
if m.string.is_prefix_of(s)
|| m.constant_character.is_prefix_of(s)
|| m.meta_string.is_prefix_of(s)
{
return 4;
}
// Number (index 5)
if m.constant_numeric.is_prefix_of(s) || m.constant_integer.is_prefix_of(s) {
return 5;
}
// Number (index 5)
if m.constant_numeric.is_prefix_of(s) || m.constant_integer.is_prefix_of(s) {
return 5;
}
// Keyword (index 1)
if m.keyword.is_prefix_of(s)
|| m.storage_type.is_prefix_of(s)
|| m.storage_modifier.is_prefix_of(s)
{
return 1;
}
// Keyword (index 1)
if m.keyword.is_prefix_of(s)
|| m.storage_type.is_prefix_of(s)
|| m.storage_modifier.is_prefix_of(s)
{
return 1;
}
// Function (index 2)
if m.entity_name_function.is_prefix_of(s)
|| m.support_function.is_prefix_of(s)
|| m.meta_function_call.is_prefix_of(s)
|| m.variable_function.is_prefix_of(s)
{
return 2;
}
// Function (index 2)
if m.entity_name_function.is_prefix_of(s)
|| m.support_function.is_prefix_of(s)
|| m.meta_function_call.is_prefix_of(s)
|| m.variable_function.is_prefix_of(s)
{
return 2;
}
// Type (index 6)
if m.entity_name_type.is_prefix_of(s)
|| m.support_type.is_prefix_of(s)
|| m.support_class.is_prefix_of(s)
|| m.entity_name_class.is_prefix_of(s)
|| m.entity_name_struct.is_prefix_of(s)
|| m.entity_name_enum.is_prefix_of(s)
|| m.entity_name_interface.is_prefix_of(s)
|| m.entity_name_trait.is_prefix_of(s)
{
return 6;
}
// Type (index 6)
if m.entity_name_type.is_prefix_of(s)
|| m.support_type.is_prefix_of(s)
|| m.support_class.is_prefix_of(s)
|| m.entity_name_class.is_prefix_of(s)
|| m.entity_name_struct.is_prefix_of(s)
|| m.entity_name_enum.is_prefix_of(s)
|| m.entity_name_interface.is_prefix_of(s)
|| m.entity_name_trait.is_prefix_of(s)
{
return 6;
}
// Operator (index 7)
if m.keyword_operator.is_prefix_of(s) || m.punctuation_accessor.is_prefix_of(s) {
return 7;
}
// Operator (index 7)
if m.keyword_operator.is_prefix_of(s) || m.punctuation_accessor.is_prefix_of(s) {
return 7;
}
// Punctuation (index 8)
if m.punctuation.is_prefix_of(s) {
return 8;
}
// Punctuation (index 8)
if m.punctuation.is_prefix_of(s) {
return 8;
}
// Variable (index 3)
if m.variable.is_prefix_of(s) || m.entity_name.is_prefix_of(s) || m.meta_path.is_prefix_of(s) {
return 3;
}
// Variable (index 3)
if m.variable.is_prefix_of(s) || m.entity_name.is_prefix_of(s) || m.meta_path.is_prefix_of(s) {
return 3;
}
// Generic constant -> number (index 5)
if m.constant.is_prefix_of(s) {
return 5;
}
// Generic constant -> number (index 5)
if m.constant.is_prefix_of(s) {
return 5;
}
// No match
usize::MAX
// No match
usize::MAX
}
/// Determine the semantic color category from a scope stack.
/// Uses per-scope caching to avoid repeated prefix checks.
#[inline]
fn scope_to_color_index(scope: &ScopeStack) -> usize {
SCOPE_COLOR_CACHE.with(|cache| {
let mut cache = cache.borrow_mut();
SCOPE_COLOR_CACHE.with(|cache| {
let mut cache = cache.borrow_mut();
// Walk from innermost to outermost scope
for s in scope.as_slice().iter().rev() {
let color_idx = *cache.entry(*s).or_insert_with(|| compute_scope_color(*s));
if color_idx != usize::MAX {
return color_idx;
}
}
// Walk from innermost to outermost scope
for s in scope.as_slice().iter().rev() {
let color_idx = *cache.entry(*s).or_insert_with(|| compute_scope_color(*s));
if color_idx != usize::MAX {
return color_idx;
}
}
usize::MAX
})
usize::MAX
})
}
/// Find the appropriate syntax for a language name.
fn find_syntax<'a>(ss: &'a SyntaxSet, lang: &str) -> Option<&'a SyntaxReference> {
// Direct name/token match (syntect APIs are case-insensitive)
if let Some(syn) = ss.find_syntax_by_token(lang) {
return Some(syn);
}
// Direct name/token match (syntect APIs are case-insensitive)
if let Some(syn) = ss.find_syntax_by_token(lang) {
return Some(syn);
}
// Extension-based match
if let Some(syn) = ss.find_syntax_by_extension(lang) {
return Some(syn);
}
// Extension-based match
if let Some(syn) = ss.find_syntax_by_extension(lang) {
return Some(syn);
}
// Alias lookup for languages not in syntect's default set
let alias = find_alias(lang)?;
// Alias lookup for languages not in syntect's default set
let alias = find_alias(lang)?;
ss.find_syntax_by_name(alias)
.or_else(|| ss.find_syntax_by_token(alias))
ss.find_syntax_by_name(alias)
.or_else(|| ss.find_syntax_by_token(alias))
}
/// Highlight code and return ANSI-colored lines.
@ -361,93 +373,93 @@ fn find_syntax<'a>(ss: &'a SyntaxSet, lang: &str) -> Option<&'a SyntaxReference>
/// fails.
#[napi(js_name = "highlightCode")]
pub fn highlight_code(code: String, lang: Option<String>, colors: HighlightColors) -> String {
let inserted = colors.inserted.as_deref().unwrap_or("");
let deleted = colors.deleted.as_deref().unwrap_or("");
let inserted = colors.inserted.as_deref().unwrap_or("");
let deleted = colors.deleted.as_deref().unwrap_or("");
// Color palette as array for quick indexing
let palette = [
colors.comment.as_str(), // 0
colors.keyword.as_str(), // 1
colors.function.as_str(), // 2
colors.variable.as_str(), // 3
colors.string.as_str(), // 4
colors.number.as_str(), // 5
colors.r#type.as_str(), // 6
colors.operator.as_str(), // 7
colors.punctuation.as_str(), // 8
inserted, // 9
deleted, // 10
];
// Color palette as array for quick indexing
let palette = [
colors.comment.as_str(), // 0
colors.keyword.as_str(), // 1
colors.function.as_str(), // 2
colors.variable.as_str(), // 3
colors.string.as_str(), // 4
colors.number.as_str(), // 5
colors.r#type.as_str(), // 6
colors.operator.as_str(), // 7
colors.punctuation.as_str(), // 8
inserted, // 9
deleted, // 10
];
let ss = get_syntax_set();
let ss = get_syntax_set();
// Find syntax for the language
let syntax = match &lang {
Some(l) => find_syntax(ss, l),
None => None,
}
.unwrap_or_else(|| ss.find_syntax_plain_text());
// Find syntax for the language
let syntax = match &lang {
Some(l) => find_syntax(ss, l),
None => None,
}
.unwrap_or_else(|| ss.find_syntax_plain_text());
let mut parse_state = ParseState::new(syntax);
let mut scope_stack = ScopeStack::new();
let mut result = String::with_capacity(code.len() * 2);
let mut parse_state = ParseState::new(syntax);
let mut scope_stack = ScopeStack::new();
let mut result = String::with_capacity(code.len() * 2);
for line in syntect::util::LinesWithEndings::from(code.as_str()) {
let Ok(ops) = parse_state.parse_line(line, ss) else {
// Parse error - append unhighlighted line and continue
result.push_str(line);
continue;
};
for line in syntect::util::LinesWithEndings::from(code.as_str()) {
let Ok(ops) = parse_state.parse_line(line, ss) else {
// Parse error - append unhighlighted line and continue
result.push_str(line);
continue;
};
let mut prev_end = 0;
for (offset, op) in ops {
let offset = offset.min(line.len());
let mut prev_end = 0;
for (offset, op) in ops {
let offset = offset.min(line.len());
// Output text BEFORE this operation using current scope
if offset > prev_end {
let text = &line[prev_end..offset];
let color_idx = scope_to_color_index(&scope_stack);
// Output text BEFORE this operation using current scope
if offset > prev_end {
let text = &line[prev_end..offset];
let color_idx = scope_to_color_index(&scope_stack);
if color_idx < palette.len() && !palette[color_idx].is_empty() {
result.push_str(palette[color_idx]);
result.push_str(text);
result.push_str("\x1b[39m");
} else {
result.push_str(text);
}
}
prev_end = offset;
if color_idx < palette.len() && !palette[color_idx].is_empty() {
result.push_str(palette[color_idx]);
result.push_str(text);
result.push_str("\x1b[39m");
} else {
result.push_str(text);
}
}
prev_end = offset;
// Now apply scope operation for NEXT segment
match op {
ScopeStackOp::Push(scope) => {
scope_stack.push(scope);
},
ScopeStackOp::Pop(count) => {
for _ in 0..count {
scope_stack.pop();
}
},
ScopeStackOp::Restore | ScopeStackOp::Clear(_) | ScopeStackOp::Noop => {},
}
}
// Now apply scope operation for NEXT segment
match op {
ScopeStackOp::Push(scope) => {
scope_stack.push(scope);
}
ScopeStackOp::Pop(count) => {
for _ in 0..count {
scope_stack.pop();
}
}
ScopeStackOp::Restore | ScopeStackOp::Clear(_) | ScopeStackOp::Noop => {}
}
}
// Output remaining text with current scope
if prev_end < line.len() {
let text = &line[prev_end..];
let color_idx = scope_to_color_index(&scope_stack);
// Output remaining text with current scope
if prev_end < line.len() {
let text = &line[prev_end..];
let color_idx = scope_to_color_index(&scope_stack);
if color_idx < palette.len() && !palette[color_idx].is_empty() {
result.push_str(palette[color_idx]);
result.push_str(text);
result.push_str("\x1b[39m");
} else {
result.push_str(text);
}
}
}
if color_idx < palette.len() && !palette[color_idx].is_empty() {
result.push_str(palette[color_idx]);
result.push_str(text);
result.push_str("\x1b[39m");
} else {
result.push_str(text);
}
}
}
result
result
}
/// Check if a language is supported for highlighting.
@ -455,18 +467,18 @@ pub fn highlight_code(code: String, lang: Option<String>, colors: HighlightColor
/// mapping.
#[napi(js_name = "supportsLanguage")]
pub fn supports_language(lang: String) -> bool {
if is_known_alias(&lang) {
return true;
}
if is_known_alias(&lang) {
return true;
}
// Fall back to direct syntax lookup
let ss = get_syntax_set();
find_syntax(ss, &lang).is_some()
// Fall back to direct syntax lookup
let ss = get_syntax_set();
find_syntax(ss, &lang).is_some()
}
/// Get list of supported languages.
#[napi(js_name = "getSupportedLanguages")]
pub fn get_supported_languages() -> Vec<String> {
let ss = get_syntax_set();
ss.syntaxes().iter().map(|s| s.name.clone()).collect()
let ss = get_syntax_set();
ss.syntaxes().iter().map(|s| s.name.clone()).collect()
}

View file

@ -9,9 +9,9 @@
use std::{io::Cursor, sync::Arc};
use image::{
DynamicImage, ImageFormat, ImageReader,
codecs::{jpeg::JpegEncoder, webp::WebPEncoder},
imageops::FilterType,
DynamicImage, ImageFormat, ImageReader,
};
use napi::bindgen_prelude::*;
use napi_derive::napi;
@ -22,15 +22,15 @@ use crate::task;
#[napi]
pub enum SamplingFilter {
/// Nearest-neighbor sampling (fast, low quality).
Nearest = 1,
Nearest = 1,
/// Triangle filter (linear interpolation).
Triangle = 2,
Triangle = 2,
/// Catmull-Rom filter with sharper edges.
CatmullRom = 3,
/// Gaussian filter for smoother results.
Gaussian = 4,
Gaussian = 4,
/// Lanczos3 filter for high-quality downscaling.
Lanczos3 = 5,
Lanczos3 = 5,
}
impl From<SamplingFilter> for FilterType {
@ -81,7 +81,9 @@ impl NativeImage {
#[napi(js_name = "encode")]
pub fn encode(&self, format: u8, quality: u8) -> task::Async<Vec<u8>> {
let img = Arc::clone(&self.img);
task::blocking("image.encode", (), move |_| encode_image(&img, format, quality))
task::blocking("image.encode", (), move |_| {
encode_image(&img, format, quality)
})
}
/// Resize to exact dimensions. Returns a new NativeImage.
@ -89,7 +91,9 @@ impl NativeImage {
pub fn resize(&self, width: u32, height: u32, filter: SamplingFilter) -> ImageTask {
let img = Arc::clone(&self.img);
task::blocking("image.resize", (), move |_| {
Ok(Self { img: Arc::new(img.resize_exact(width, height, filter.into())) })
Ok(Self {
img: Arc::new(img.resize_exact(width, height, filter.into())),
})
})
}
}
@ -122,27 +126,29 @@ fn encode_image(img: &DynamicImage, format: u8, quality: u8) -> Result<Vec<u8>>
img.write_to(&mut Cursor::new(&mut buffer), ImageFormat::Png)
.map_err(|e| Error::from_reason(format!("Failed to encode PNG: {e}")))?;
Ok(buffer)
},
}
1 => {
let mut buffer = Vec::with_capacity(encode_capacity(w, h, 3)?);
let encoder = JpegEncoder::new_with_quality(&mut buffer, quality);
img.write_with_encoder(encoder)
.map_err(|e| Error::from_reason(format!("Failed to encode JPEG: {e}")))?;
Ok(buffer)
},
}
2 => {
let mut buffer = Vec::with_capacity(encode_capacity(w, h, 4)?);
let encoder = WebPEncoder::new_lossless(&mut buffer);
img.write_with_encoder(encoder)
.map_err(|e| Error::from_reason(format!("Failed to encode WebP: {e}")))?;
Ok(buffer)
},
}
3 => {
let mut buffer = Vec::with_capacity(encode_capacity(w, h, 1)?);
img.write_to(&mut Cursor::new(&mut buffer), ImageFormat::Gif)
.map_err(|e| Error::from_reason(format!("Failed to encode GIF: {e}")))?;
Ok(buffer)
},
_ => Err(Error::from_reason(format!("Invalid image format: {format}"))),
}
_ => Err(Error::from_reason(format!(
"Invalid image format: {format}"
))),
}
}

View file

@ -169,12 +169,24 @@ fn handle_truncated_value(result: &mut String) {
// at the end after a value-position character
if len > 0 {
let last = bytes[len - 1];
if last.is_ascii_digit() || last == b'.' || last == b'-' || last == b'e' || last == b'E' || last == b'+' {
if last.is_ascii_digit()
|| last == b'.'
|| last == b'-'
|| last == b'e'
|| last == b'E'
|| last == b'+'
{
// Walk backwards to find the start of the number-like token
let mut start = len;
while start > 0 {
let b = bytes[start - 1];
if b.is_ascii_digit() || b == b'.' || b == b'-' || b == b'e' || b == b'E' || b == b'+' {
if b.is_ascii_digit()
|| b == b'.'
|| b == b'-'
|| b == b'e'
|| b == b'E'
|| b == b'+'
{
start -= 1;
} else {
break;
@ -228,12 +240,8 @@ fn handle_truncated_value(result: &mut String) {
/// Convert a serde_json::Value to a napi JsUnknown.
fn serde_value_to_napi(env: &Env, value: &serde_json::Value) -> Result<napi::JsUnknown> {
match value {
serde_json::Value::Null => {
env.get_null().map(|v| v.into_unknown())
}
serde_json::Value::Bool(b) => {
env.get_boolean(*b).map(|v| v.into_unknown())
}
serde_json::Value::Null => env.get_null().map(|v| v.into_unknown()),
serde_json::Value::Bool(b) => env.get_boolean(*b).map(|v| v.into_unknown()),
serde_json::Value::Number(n) => {
if let Some(i) = n.as_i64() {
// Use i32 if it fits, otherwise f64
@ -248,9 +256,7 @@ fn serde_value_to_napi(env: &Env, value: &serde_json::Value) -> Result<napi::JsU
env.get_null().map(|v| v.into_unknown())
}
}
serde_json::Value::String(s) => {
env.create_string(s).map(|v| v.into_unknown())
}
serde_json::Value::String(s) => env.create_string(s).map(|v| v.into_unknown()),
serde_json::Value::Array(arr) => {
let mut js_arr = env.create_array_with_length(arr.len())?;
for (idx, item) in arr.iter().enumerate() {

View file

@ -13,224 +13,234 @@ use napi_derive::napi;
#[cfg(target_os = "linux")]
mod platform {
use std::fs;
use std::fs;
/// Collect all descendant PIDs of `pid` into `pids`.
/// Skips branches when `/proc/{pid}/children` cannot be read.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
let children_path = format!("/proc/{pid}/task/{pid}/children");
let Ok(content) = fs::read_to_string(&children_path) else {
return;
};
/// Collect all descendant PIDs of `pid` into `pids`.
/// Skips branches when `/proc/{pid}/children` cannot be read.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
let children_path = format!("/proc/{pid}/task/{pid}/children");
let Ok(content) = fs::read_to_string(&children_path) else {
return;
};
for part in content.split_whitespace() {
if let Ok(child_pid) = part.parse::<i32>() {
pids.push(child_pid);
collect_descendants(child_pid, pids);
}
}
}
for part in content.split_whitespace() {
if let Ok(child_pid) = part.parse::<i32>() {
pids.push(child_pid);
collect_descendants(child_pid, pids);
}
}
}
/// Send `signal` to `pid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_pid(pid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(pid, signal) == 0 }
}
/// Send `signal` to `pid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_pid(pid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(pid, signal) == 0 }
}
/// Get the process group id for `pid`.
/// Returns `None` when the process does not exist or is inaccessible.
pub fn process_group_id(pid: i32) -> Option<i32> {
// SAFETY: `libc::getpgid` is safe to call with any pid
let pgid = unsafe { libc::getpgid(pid) };
if pgid < 0 { None } else { Some(pgid) }
}
/// Get the process group id for `pid`.
/// Returns `None` when the process does not exist or is inaccessible.
pub fn process_group_id(pid: i32) -> Option<i32> {
// SAFETY: `libc::getpgid` is safe to call with any pid
let pgid = unsafe { libc::getpgid(pid) };
if pgid < 0 {
None
} else {
Some(pgid)
}
}
/// Send `signal` to the process group `pgid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_process_group(pgid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(-pgid, signal) == 0 }
}
/// Send `signal` to the process group `pgid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_process_group(pgid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(-pgid, signal) == 0 }
}
}
#[cfg(target_os = "macos")]
mod platform {
use std::ptr;
use std::ptr;
#[link(name = "proc", kind = "dylib")]
unsafe extern "C" {
fn proc_listchildpids(ppid: i32, buffer: *mut i32, buffersize: i32) -> i32;
}
#[link(name = "proc", kind = "dylib")]
unsafe extern "C" {
fn proc_listchildpids(ppid: i32, buffer: *mut i32, buffersize: i32) -> i32;
}
/// Collect all descendant PIDs of `pid` into `pids` using libproc.
/// Skips branches when libproc returns no children.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
// First call to get count
// SAFETY: passing null buffer with size 0 to query child count is valid per
// libproc API.
let count = unsafe { proc_listchildpids(pid, ptr::null_mut(), 0) };
if count <= 0 {
return;
}
/// Collect all descendant PIDs of `pid` into `pids` using libproc.
/// Skips branches when libproc returns no children.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
// First call to get count
// SAFETY: passing null buffer with size 0 to query child count is valid per
// libproc API.
let count = unsafe { proc_listchildpids(pid, ptr::null_mut(), 0) };
if count <= 0 {
return;
}
let mut buffer = vec![0i32; count as usize];
// SAFETY: buffer is correctly sized and aligned for `count` i32 elements.
let actual = unsafe {
proc_listchildpids(pid, buffer.as_mut_ptr(), (buffer.len() * size_of::<i32>()) as i32)
};
let mut buffer = vec![0i32; count as usize];
// SAFETY: buffer is correctly sized and aligned for `count` i32 elements.
let actual = unsafe {
proc_listchildpids(
pid,
buffer.as_mut_ptr(),
(buffer.len() * size_of::<i32>()) as i32,
)
};
if actual <= 0 {
return;
}
if actual <= 0 {
return;
}
let child_count = actual as usize / size_of::<i32>();
for &child_pid in &buffer[..child_count] {
if child_pid > 0 {
pids.push(child_pid);
collect_descendants(child_pid, pids);
}
}
}
let child_count = actual as usize / size_of::<i32>();
for &child_pid in &buffer[..child_count] {
if child_pid > 0 {
pids.push(child_pid);
collect_descendants(child_pid, pids);
}
}
}
/// Send `signal` to `pid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_pid(pid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(pid, signal) == 0 }
}
/// Send `signal` to `pid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_pid(pid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(pid, signal) == 0 }
}
/// Get the process group id for `pid`.
/// Returns `None` when the process does not exist or is inaccessible.
pub fn process_group_id(pid: i32) -> Option<i32> {
// SAFETY: libc::getpgid is safe to call with any pid
let pgid = unsafe { libc::getpgid(pid) };
if pgid < 0 { None } else { Some(pgid) }
}
/// Get the process group id for `pid`.
/// Returns `None` when the process does not exist or is inaccessible.
pub fn process_group_id(pid: i32) -> Option<i32> {
// SAFETY: libc::getpgid is safe to call with any pid
let pgid = unsafe { libc::getpgid(pid) };
if pgid < 0 {
None
} else {
Some(pgid)
}
}
/// Send `signal` to the process group `pgid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_process_group(pgid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(-pgid, signal) == 0 }
}
/// Send `signal` to the process group `pgid`.
/// Returns true when the signal is delivered successfully.
pub fn kill_process_group(pgid: i32, signal: i32) -> bool {
// SAFETY: libc::kill is safe to call with any pid/signal combination
unsafe { libc::kill(-pgid, signal) == 0 }
}
}
#[cfg(target_os = "windows")]
mod platform {
use std::{collections::HashMap, mem};
use std::{collections::HashMap, mem};
#[repr(C)]
#[allow(non_snake_case, reason = "Windows PROCESSENTRY32W field names must match Win32 ABI")]
struct PROCESSENTRY32W {
dwSize: u32,
cntUsage: u32,
th32ProcessID: u32,
th32DefaultHeapID: usize,
th32ModuleID: u32,
cntThreads: u32,
th32ParentProcessID: u32,
pcPriClassBase: i32,
dwFlags: u32,
szExeFile: [u16; 260],
}
#[repr(C)]
#[allow(
non_snake_case,
reason = "Windows PROCESSENTRY32W field names must match Win32 ABI"
)]
struct PROCESSENTRY32W {
dwSize: u32,
cntUsage: u32,
th32ProcessID: u32,
th32DefaultHeapID: usize,
th32ModuleID: u32,
cntThreads: u32,
th32ParentProcessID: u32,
pcPriClassBase: i32,
dwFlags: u32,
szExeFile: [u16; 260],
}
type Handle = *mut std::ffi::c_void;
const INVALID_HANDLE_VALUE: Handle = -1isize as Handle;
const TH32CS_SNAPPROCESS: u32 = 0x00000002;
const PROCESS_TERMINATE: u32 = 0x0001;
type Handle = *mut std::ffi::c_void;
const INVALID_HANDLE_VALUE: Handle = -1isize as Handle;
const TH32CS_SNAPPROCESS: u32 = 0x00000002;
const PROCESS_TERMINATE: u32 = 0x0001;
#[link(name = "kernel32")]
unsafe extern "system" {
fn CreateToolhelp32Snapshot(dwFlags: u32, th32ProcessID: u32) -> Handle;
fn Process32FirstW(hSnapshot: Handle, lppe: *mut PROCESSENTRY32W) -> i32;
fn Process32NextW(hSnapshot: Handle, lppe: *mut PROCESSENTRY32W) -> i32;
fn CloseHandle(hObject: Handle) -> i32;
fn OpenProcess(dwDesiredAccess: u32, bInheritHandle: i32, dwProcessId: u32) -> Handle;
fn TerminateProcess(hProcess: Handle, uExitCode: u32) -> i32;
}
#[link(name = "kernel32")]
unsafe extern "system" {
fn CreateToolhelp32Snapshot(dwFlags: u32, th32ProcessID: u32) -> Handle;
fn Process32FirstW(hSnapshot: Handle, lppe: *mut PROCESSENTRY32W) -> i32;
fn Process32NextW(hSnapshot: Handle, lppe: *mut PROCESSENTRY32W) -> i32;
fn CloseHandle(hObject: Handle) -> i32;
fn OpenProcess(dwDesiredAccess: u32, bInheritHandle: i32, dwProcessId: u32) -> Handle;
fn TerminateProcess(hProcess: Handle, uExitCode: u32) -> i32;
}
/// Build a map of `parent_pid` -> [`child_pids`] for all processes.
fn build_process_tree() -> HashMap<u32, Vec<u32>> {
let mut tree: HashMap<u32, Vec<u32>> = HashMap::new();
/// Build a map of `parent_pid` -> [`child_pids`] for all processes.
fn build_process_tree() -> HashMap<u32, Vec<u32>> {
let mut tree: HashMap<u32, Vec<u32>> = HashMap::new();
// SAFETY: Toolhelp snapshot APIs are called with initialized structs and valid
// handles.
unsafe {
let snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if snapshot == INVALID_HANDLE_VALUE {
return tree;
}
// SAFETY: Toolhelp snapshot APIs are called with initialized structs and valid
// handles.
unsafe {
let snapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);
if snapshot == INVALID_HANDLE_VALUE {
return tree;
}
let mut entry: PROCESSENTRY32W = mem::zeroed();
entry.dwSize = mem::size_of::<PROCESSENTRY32W>() as u32;
let mut entry: PROCESSENTRY32W = mem::zeroed();
entry.dwSize = mem::size_of::<PROCESSENTRY32W>() as u32;
if Process32FirstW(snapshot, &raw mut entry) != 0 {
loop {
tree
.entry(entry.th32ParentProcessID)
.or_default()
.push(entry.th32ProcessID);
if Process32FirstW(snapshot, &raw mut entry) != 0 {
loop {
tree.entry(entry.th32ParentProcessID)
.or_default()
.push(entry.th32ProcessID);
if Process32NextW(snapshot, &raw mut entry) == 0 {
break;
}
}
}
if Process32NextW(snapshot, &raw mut entry) == 0 {
break;
}
}
}
CloseHandle(snapshot);
}
CloseHandle(snapshot);
}
tree
}
tree
}
/// Collect all descendant PIDs of `pid` into `pids`.
/// Uses a snapshot of the current process table.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
let tree = build_process_tree();
collect_descendants_from_tree(pid as u32, &tree, pids);
}
/// Collect all descendant PIDs of `pid` into `pids`.
/// Uses a snapshot of the current process table.
pub fn collect_descendants(pid: i32, pids: &mut Vec<i32>) {
let tree = build_process_tree();
collect_descendants_from_tree(pid as u32, &tree, pids);
}
fn collect_descendants_from_tree(
pid: u32,
tree: &HashMap<u32, Vec<u32>>,
pids: &mut Vec<i32>,
) {
if let Some(children) = tree.get(&pid) {
for &child_pid in children {
pids.push(child_pid as i32);
collect_descendants_from_tree(child_pid, tree, pids);
}
}
}
fn collect_descendants_from_tree(pid: u32, tree: &HashMap<u32, Vec<u32>>, pids: &mut Vec<i32>) {
if let Some(children) = tree.get(&pid) {
for &child_pid in children {
pids.push(child_pid as i32);
collect_descendants_from_tree(child_pid, tree, pids);
}
}
}
/// Terminate `pid` (Windows ignores `signal`).
/// Returns true when the process is terminated.
pub fn kill_pid(pid: i32, _signal: i32) -> bool {
// SAFETY: OpenProcess/TerminateProcess are called with kernel-provided process
// IDs and handles are always closed.
unsafe {
let handle = OpenProcess(PROCESS_TERMINATE, 0, pid as u32);
if handle.is_null() || handle == INVALID_HANDLE_VALUE {
return false;
}
let result = TerminateProcess(handle, 1);
CloseHandle(handle);
result != 0
}
}
/// Terminate `pid` (Windows ignores `signal`).
/// Returns true when the process is terminated.
pub fn kill_pid(pid: i32, _signal: i32) -> bool {
// SAFETY: OpenProcess/TerminateProcess are called with kernel-provided process
// IDs and handles are always closed.
unsafe {
let handle = OpenProcess(PROCESS_TERMINATE, 0, pid as u32);
if handle.is_null() || handle == INVALID_HANDLE_VALUE {
return false;
}
let result = TerminateProcess(handle, 1);
CloseHandle(handle);
result != 0
}
}
/// Process groups are not exposed on Windows.
/// Always returns `None`.
pub const fn process_group_id(_pid: i32) -> Option<i32> {
None
}
/// Process groups are not exposed on Windows.
/// Always returns `None`.
pub const fn process_group_id(_pid: i32) -> Option<i32> {
None
}
/// Process groups are not exposed on Windows.
/// Always returns `false`.
pub const fn kill_process_group(_pgid: i32, _signal: i32) -> bool {
false
}
/// Process groups are not exposed on Windows.
/// Always returns `false`.
pub const fn kill_process_group(_pgid: i32, _signal: i32) -> bool {
false
}
}
/// Kill a process tree (the process and all its descendants).
@ -240,24 +250,24 @@ mod platform {
/// Returns the number of processes successfully killed.
#[napi]
pub fn kill_tree(pid: i32, signal: i32) -> u32 {
let mut descendants = Vec::new();
platform::collect_descendants(pid, &mut descendants);
let mut descendants = Vec::new();
platform::collect_descendants(pid, &mut descendants);
let mut killed = 0u32;
let mut killed = 0u32;
// Kill children first (deepest first by reversing the DFS order)
for &child_pid in descendants.iter().rev() {
if platform::kill_pid(child_pid, signal) {
killed += 1;
}
}
// Kill children first (deepest first by reversing the DFS order)
for &child_pid in descendants.iter().rev() {
if platform::kill_pid(child_pid, signal) {
killed += 1;
}
}
// Kill the root process last
if platform::kill_pid(pid, signal) {
killed += 1;
}
// Kill the root process last
if platform::kill_pid(pid, signal) {
killed += 1;
}
killed
killed
}
/// List all descendant PIDs of `pid`.
@ -265,16 +275,16 @@ pub fn kill_tree(pid: i32, signal: i32) -> u32 {
/// Returns an empty array if the process has no children or doesn't exist.
#[napi]
pub fn list_descendants(pid: i32) -> Vec<i32> {
let mut descendants = Vec::new();
platform::collect_descendants(pid, &mut descendants);
descendants
let mut descendants = Vec::new();
platform::collect_descendants(pid, &mut descendants);
descendants
}
/// Get the process group id for `pid`.
/// Returns `null` when the process is missing or unsupported on the platform.
#[napi]
pub fn process_group_id(pid: i32) -> Option<i32> {
platform::process_group_id(pid)
platform::process_group_id(pid)
}
/// Kill an entire process group.
@ -284,5 +294,5 @@ pub fn process_group_id(pid: i32) -> Option<i32> {
/// Returns false on Windows (process groups not supported).
#[napi]
pub fn kill_process_group(pgid: i32, signal: i32) -> bool {
platform::kill_process_group(pgid, signal)
platform::kill_process_group(pgid, signal)
}

View file

@ -47,19 +47,15 @@ pub struct StreamChunkResult {
/// strips ANSI escape sequences, removes control characters (except tab and
/// newline), removes carriage returns, and filters Unicode format characters.
#[napi(js_name = "processStreamChunk")]
pub fn process_stream_chunk(
chunk: Buffer,
state: Option<StreamState>,
) -> StreamChunkResult {
pub fn process_stream_chunk(chunk: Buffer, state: Option<StreamState>) -> StreamChunkResult {
let state = state.unwrap_or_default();
let bytes = chunk.as_ref();
// Prepend any pending bytes from previous chunk
let mut input: Vec<u8>;
let src: &[u8] = if !state.utf8_pending.is_empty() || !state.ansi_pending.is_empty() {
input = Vec::with_capacity(
state.ansi_pending.len() + state.utf8_pending.len() + bytes.len(),
);
input =
Vec::with_capacity(state.ansi_pending.len() + state.utf8_pending.len() + bytes.len());
input.extend_from_slice(&state.ansi_pending);
input.extend_from_slice(&state.utf8_pending);
input.extend_from_slice(bytes);
@ -134,7 +130,7 @@ fn find_incomplete_utf8_tail(bytes: &[u8]) -> usize {
// that starts an incomplete sequence.
let len = bytes.len();
// Check at most the last 3 bytes (max UTF-8 continuation trail)
let check_start = if len > 3 { len - 3 } else { 0 };
let check_start = len.saturating_sub(3);
for i in (check_start..len).rev() {
let b = bytes[i];
@ -326,8 +322,8 @@ fn could_be_incomplete_ansi(bytes: &[u8], pos: usize) -> bool {
// CSI: ESC [ ... <final byte 0x40-0x7E>
b'[' => {
// If we don't see a final byte, it's incomplete
for j in (pos + 2)..bytes.len() {
if (0x40..=0x7E).contains(&bytes[j]) {
for byte in bytes.iter().skip(pos + 2) {
if (0x40..=0x7E).contains(byte) {
return false; // found terminator — it's complete (but malformed since ansi_sequence_len returned None)
}
}
@ -335,11 +331,11 @@ fn could_be_incomplete_ansi(bytes: &[u8], pos: usize) -> bool {
}
// OSC: ESC ] ... (terminated by BEL or ST)
b']' => {
for j in (pos + 2)..bytes.len() {
if bytes[j] == 0x07 {
for (j, byte) in bytes.iter().enumerate().skip(pos + 2) {
if *byte == 0x07 {
return false;
}
if bytes[j] == 0x1B && j + 1 < bytes.len() && bytes[j + 1] == b'\\' {
if *byte == 0x1B && j + 1 < bytes.len() && bytes[j + 1] == b'\\' {
return false;
}
}
@ -347,8 +343,8 @@ fn could_be_incomplete_ansi(bytes: &[u8], pos: usize) -> bool {
}
// DCS, SOS, PM, APC
b'P' | b'X' | b'^' | b'_' => {
for j in (pos + 2)..bytes.len() {
if bytes[j] == 0x1B && j + 1 < bytes.len() && bytes[j + 1] == b'\\' {
for (j, byte) in bytes.iter().enumerate().skip(pos + 2) {
if *byte == 0x1B && j + 1 < bytes.len() && bytes[j + 1] == b'\\' {
return false;
}
}
@ -358,8 +354,8 @@ fn could_be_incomplete_ansi(bytes: &[u8], pos: usize) -> bool {
0x40..=0x7E => false,
// Intermediate bytes (ESC + intermediate + final)
0x20..=0x2F => {
for j in (pos + 2)..bytes.len() {
if (0x30..=0x7E).contains(&bytes[j]) {
for byte in bytes.iter().skip(pos + 2) {
if (0x30..=0x7E).contains(byte) {
return false;
}
}
@ -383,8 +379,8 @@ fn ansi_sequence_len(bytes: &[u8], pos: usize) -> Option<usize> {
match bytes[pos + 1] {
// CSI: ESC [
b'[' => {
for j in (pos + 2)..len {
if (0x40..=0x7E).contains(&bytes[j]) {
for (j, byte) in bytes.iter().enumerate().take(len).skip(pos + 2) {
if (0x40..=0x7E).contains(byte) {
return Some(j - pos + 1);
}
}
@ -392,11 +388,11 @@ fn ansi_sequence_len(bytes: &[u8], pos: usize) -> Option<usize> {
}
// OSC: ESC ]
b']' => {
for j in (pos + 2)..len {
if bytes[j] == 0x07 {
for (j, byte) in bytes.iter().enumerate().take(len).skip(pos + 2) {
if *byte == 0x07 {
return Some(j - pos + 1);
}
if bytes[j] == 0x1B && j + 1 < len && bytes[j + 1] == b'\\' {
if *byte == 0x1B && j + 1 < len && bytes[j + 1] == b'\\' {
return Some(j - pos + 2);
}
}
@ -404,8 +400,8 @@ fn ansi_sequence_len(bytes: &[u8], pos: usize) -> Option<usize> {
}
// DCS, SOS, PM, APC — terminated by ST (ESC \)
b'P' | b'X' | b'^' | b'_' => {
for j in (pos + 2)..len {
if bytes[j] == 0x1B && j + 1 < len && bytes[j + 1] == b'\\' {
for (j, byte) in bytes.iter().enumerate().take(len).skip(pos + 2) {
if *byte == 0x1B && j + 1 < len && bytes[j + 1] == b'\\' {
return Some(j - pos + 2);
}
}
@ -413,8 +409,8 @@ fn ansi_sequence_len(bytes: &[u8], pos: usize) -> Option<usize> {
}
// ESC + intermediates (0x20-0x2F) + final byte (0x30-0x7E)
0x20..=0x2F => {
for j in (pos + 2)..len {
if (0x30..=0x7E).contains(&bytes[j]) {
for (j, byte) in bytes.iter().enumerate().take(len).skip(pos + 2) {
if (0x30..=0x7E).contains(byte) {
return Some(j - pos + 1);
}
}
@ -661,9 +657,8 @@ mod tests {
fn process_chunk(bytes: &[u8], state: Option<StreamState>) -> StreamChunkResult {
let state = state.unwrap_or_default();
let mut input: Vec<u8> = Vec::with_capacity(
state.ansi_pending.len() + state.utf8_pending.len() + bytes.len(),
);
let mut input: Vec<u8> =
Vec::with_capacity(state.ansi_pending.len() + state.utf8_pending.len() + bytes.len());
input.extend_from_slice(&state.ansi_pending);
input.extend_from_slice(&state.utf8_pending);
input.extend_from_slice(bytes);

View file

@ -208,10 +208,9 @@ fn find_symbol_matches(
) -> Result<Vec<SymbolMatch>> {
let mut compiled: Vec<Pattern> = Vec::new();
for pat_str in patterns {
match Pattern::try_new(pat_str, lang) {
Ok(p) => compiled.push(p),
Err(_) => {} // skip patterns that don't compile for this lang variant
}
if let Ok(p) = Pattern::try_new(pat_str, lang) {
compiled.push(p);
} // skip patterns that don't compile for this lang variant
}
if compiled.is_empty() {
return Err(Error::from_reason(

View file

@ -9,7 +9,7 @@
use std::time::{Duration, Instant};
use napi::{Env, Error, Result, Task, bindgen_prelude::*};
use napi::{bindgen_prelude::*, Env, Error, Result, Task};
// ─────────────────────────────────────────────────────────────────────────────
// Cancellation

File diff suppressed because it is too large Load diff

View file

@ -44,7 +44,11 @@ pub fn truncate_tail(text: String, max_bytes: u32) -> TruncateResult {
// Fast path: fits entirely
if total_bytes <= max {
let line_count = memchr::memchr_iter(b'\n', text.as_bytes()).count()
+ if text.is_empty() || text.ends_with('\n') { 0 } else { 1 };
+ if text.is_empty() || text.ends_with('\n') {
0
} else {
1
};
return TruncateResult {
text,
truncated: false,
@ -73,7 +77,9 @@ pub fn truncate_tail(text: String, max_bytes: u32) -> TruncateResult {
let kept_lines = count_lines(kept);
TruncateResult {
text: std::str::from_utf8(kept).expect("split at newline boundary preserves UTF-8").to_owned(),
text: std::str::from_utf8(kept)
.expect("split at newline boundary preserves UTF-8")
.to_owned(),
truncated: true,
original_lines,
kept_lines,
@ -93,7 +99,11 @@ pub fn truncate_head(text: String, max_bytes: u32) -> TruncateResult {
// Fast path
if total_bytes <= max {
let line_count = memchr::memchr_iter(b'\n', text.as_bytes()).count()
+ if text.is_empty() || text.ends_with('\n') { 0 } else { 1 };
+ if text.is_empty() || text.ends_with('\n') {
0
} else {
1
};
return TruncateResult {
text,
truncated: false,
@ -124,7 +134,9 @@ pub fn truncate_head(text: String, max_bytes: u32) -> TruncateResult {
let kept_lines = count_lines(kept);
TruncateResult {
text: std::str::from_utf8(kept).expect("split at newline boundary preserves UTF-8").to_owned(),
text: std::str::from_utf8(kept)
.expect("split at newline boundary preserves UTF-8")
.to_owned(),
truncated: true,
original_lines,
kept_lines,
@ -138,11 +150,7 @@ pub fn truncate_head(text: String, max_bytes: u32) -> TruncateResult {
/// - `"head"`: keep the end (tail truncation removes head)
/// - `"both"`: keep beginning and end, elide the middle
#[napi(js_name = "truncateOutput")]
pub fn truncate_output(
text: String,
max_bytes: u32,
mode: Option<String>,
) -> TruncateOutputResult {
pub fn truncate_output(text: String, max_bytes: u32, mode: Option<String>) -> TruncateOutputResult {
let max = max_bytes as usize;
if text.len() <= max {

View file

@ -13,7 +13,7 @@ use napi_derive::napi;
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
use std::sync::{Arc, mpsc};
use std::sync::{mpsc, Arc};
use std::thread::{self, JoinHandle};
use std::time::{Duration, Instant};
@ -81,8 +81,8 @@ fn build_ignore_set(patterns: &[String]) -> std::result::Result<GlobSet, String>
}
fn event_kind(kind: &EventKind) -> Option<&'static str> {
use notify::EventKind::*;
use notify::event::ModifyKind;
use notify::EventKind::*;
match kind {
Create(_) => Some("create"),
@ -182,8 +182,9 @@ pub fn watch_tree(
build_ignore_set(&ignore_patterns).map_err(|e| Error::new(Status::InvalidArg, e))?;
let has_ignores = !ignore_patterns.is_empty();
let tsfn: ThreadsafeFunction<Vec<WatchEvent>> = on_events
.create_threadsafe_function(0, |ctx: ThreadSafeCallContext<Vec<WatchEvent>>| {
let tsfn: ThreadsafeFunction<Vec<WatchEvent>> = on_events.create_threadsafe_function(
0,
|ctx: ThreadSafeCallContext<Vec<WatchEvent>>| {
let events: Vec<WatchEvent> = ctx.value;
let env = ctx.env;
let mut arr = env.create_array_with_length(events.len())?;
@ -194,7 +195,8 @@ pub fn watch_tree(
arr.set_element(i as u32, obj)?;
}
Ok(vec![arr])
})?;
},
)?;
let (sender, receiver) = mpsc::channel();
let mut watcher = RecommendedWatcher::new(
@ -203,7 +205,12 @@ pub fn watch_tree(
},
Config::default(),
)
.map_err(|e| Error::new(Status::GenericFailure, format!("failed to create watcher: {e}")))?;
.map_err(|e| {
Error::new(
Status::GenericFailure,
format!("failed to create watcher: {e}"),
)
})?;
let mode = if recursive {
RecursiveMode::Recursive

View file

@ -11,33 +11,33 @@ use napi_derive::napi;
/// the input string is converted to UTF-8 bytes and hashed.
#[napi(js_name = "xxHash32")]
pub fn xx_hash32(input: String, seed: u32) -> u32 {
xxhash_rust::xxh32::xxh32(input.as_bytes(), seed)
xxhash_rust::xxh32::xxh32(input.as_bytes(), seed)
}
#[cfg(test)]
mod tests {
use super::*;
use super::*;
/// Reference vectors verified against the pure-JS implementation.
#[test]
fn known_vectors() {
// Empty string, seed 0
assert_eq!(xx_hash32(String::new(), 0), 0x02CC5D05);
// "hello", seed 0
assert_eq!(xx_hash32("hello".into(), 0), 0xFB0DA52A);
// "hello", seed 42
assert_eq!(xx_hash32("hello".into(), 42), 0x0AA8E13E);
}
/// Reference vectors verified against the pure-JS implementation.
#[test]
fn known_vectors() {
// Empty string, seed 0
assert_eq!(xx_hash32(String::new(), 0), 0x02CC5D05);
// "hello", seed 0
assert_eq!(xx_hash32("hello".into(), 0), 0xFB0DA52A);
// "hello", seed 42
assert_eq!(xx_hash32("hello".into(), 42), 0x0AA8E13E);
}
#[test]
fn short_and_long_inputs() {
// < 16 bytes (no stripe loop)
let short = xx_hash32("abc".into(), 0);
assert_ne!(short, 0);
#[test]
fn short_and_long_inputs() {
// < 16 bytes (no stripe loop)
let short = xx_hash32("abc".into(), 0);
assert_ne!(short, 0);
// >= 16 bytes (enters stripe loop)
let long = xx_hash32("abcdefghijklmnop".into(), 0);
assert_ne!(long, 0);
assert_ne!(short, long);
}
// >= 16 bytes (enters stripe loop)
let long = xx_hash32("abcdefghijklmnop".into(), 0);
assert_ne!(long, 0);
assert_ne!(short, long);
}
}

View file

@ -1,10 +0,0 @@
export interface RemoteConfig {
endpoint: string;
apiKey?: string;
timeout?: number;
}
export function resolveRemoteConfig(): RemoteConfig;
export function resolveRemotePreferenceConfig(hydrateTokens?: boolean): RemoteConfig;
export function getRemoteConfigStatus(): string;
export function isValidChannelId(channel: string, id: string): boolean;

View file

@ -1,8 +0,0 @@
export function setFetchAllowedUrls(hostnames: string[]): void;
export function getFetchAllowedUrls(): string[];
export function isBlockedUrl(url: string): boolean;
export function normalizeQuery(query: string): string;
export function toDedupeKey(url: string): string;
export function extractDomain(url: string): string;
export function detectFreshness(query: string): string | null;
export function detectDomainHints(query: string): string[];

View file

@ -1,2 +0,0 @@
export const SCAFFOLD_FILES: string[];
export function ensureAgenticDocsScaffold(basePath?: string): void;

View file

@ -1,24 +0,0 @@
export const PROJECT_RAG_MCP_SERVER_NAME: string;
export function detectProjectRag(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): unknown;
export function resolveProjectRagBinary(env?: NodeJS.ProcessEnv): string | null;
export function resolveSiftBinary(env?: NodeJS.ProcessEnv): string | null;
export function detectSift(_projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): unknown;
export function ensureSiftIndexWarmup(projectRoot: string, prefs: Record<string, unknown>, options?: Record<string, unknown>): Promise<unknown>;
export function resolveProjectRagBuildJobs(env?: NodeJS.ProcessEnv): number;
export function findProjectRagSourceDir(projectRoot: string, env?: NodeJS.ProcessEnv): string | null;
export function resolveProjectRagBinaryForProject(projectRoot: string, env?: NodeJS.ProcessEnv): string | null;
export function buildProjectRagMcpServerConfig(projectRoot?: string, env?: NodeJS.ProcessEnv): Record<string, unknown>;
export function buildProjectRagBinary(projectRoot: string, env?: NodeJS.ProcessEnv): boolean;
export function ensureProjectRagMcpConfig(projectRoot: string, env?: NodeJS.ProcessEnv): void;
export function resolveCodebaseIndexerBackendName(prefs: Record<string, unknown>): string;
export function resolveEffectiveCodebaseIndexerBackendName(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): string;
export function getCodebaseIndexerBackend(prefsOrName: Record<string, unknown> | string): unknown;
export function detectCodebaseIndexer(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): unknown;
export function formatCodebaseIndexerStatus(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): string;
export function buildCodeIntelligenceContextBlock(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): string;
export function formatProjectRagStatus(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): string;
export function formatSiftStatus(projectRoot: string, prefs: Record<string, unknown>, env?: NodeJS.ProcessEnv): string;
export const PROJECT_RAG_CODEBASE_INDEXER_BACKEND: Record<string, unknown>;
export const SIFT_CODEBASE_INDEXER_BACKEND: Record<string, unknown>;
export const NO_CODEBASE_INDEXER_BACKEND: Record<string, unknown>;
export const CODEBASE_INDEXER_BACKENDS: Record<string, unknown>;

View file

@ -1,15 +0,0 @@
export interface DocCheckResult {
checkedAt: string;
repoRoot: string;
checks: Array<{ file: string; status: string; message?: string }>;
summary: {
total: number;
ok: number;
empty: number;
stub: number;
missing: number;
};
}
export function checkDocsScaffold(repoRoot: string): DocCheckResult;
export function formatDocCheckReport(report: DocCheckResult): string;

View file

@ -1,25 +0,0 @@
export function validateTitle(title: string): string | null;
export function buildStateMarkdown(state: Record<string, unknown>): string;
export interface DoctorIssue {
severity: "error" | "warning";
code: string;
scope: string;
unitId: string;
message: string;
file?: string;
fixable?: boolean;
}
export interface DoctorReport {
ok: boolean;
basePath: string;
issues: DoctorIssue[];
fixesApplied: string[];
timing?: Record<string, number>;
scope?: string;
}
export function runSFDoctor(basePath: string, options?: Record<string, unknown>): Promise<DoctorReport>;
export function formatDoctorReport(report: DoctorReport): string;
export function formatDoctorReportJson(report: DoctorReport): string;

View file

@ -1,6 +0,0 @@
export function isSfGitignored(basePath?: string): boolean;
export function hasGitTrackedSfFiles(basePath?: string): boolean;
export function ensureGitInfoExclude(basePath?: string): void;
export function ensureGitignore(basePath?: string, options?: Record<string, unknown>): void;
export function untrackRuntimeFiles(basePath?: string): void;
export function ensurePreferences(basePath?: string): void;

View file

@ -1,52 +0,0 @@
export function nativeGetCurrentBranch(basePath: string): string;
export function nativeDetectMainBranch(basePath: string): string;
export function nativeBranchExists(basePath: string, branch: string): boolean;
export function nativeHasMergeConflicts(basePath: string): boolean;
export function nativeWorkingTreeStatus(basePath: string): string;
export function nativeHasChanges(basePath: string): boolean;
export function _resetHasChangesCache(): void;
export function nativeCommitCountBetween(basePath: string, fromRef: string, toRef: string): number;
export function nativeIsRepo(basePath: string): boolean;
export function nativeHasStagedChanges(basePath: string): boolean;
export function nativeDiffStat(basePath: string, fromRef: string, toRef: string): string;
export function nativeDiffNameStatus(basePath: string, fromRef: string, toRef: string, pathspec?: string, useMergeBase?: boolean): string[];
export function nativeDiffNumstat(basePath: string, fromRef: string, toRef: string): string;
export function nativeDiffContent(basePath: string, fromRef: string, toRef: string, pathspec?: string, exclude?: string[], useMergeBase?: boolean): string;
export function nativeLogOneline(basePath: string, fromRef?: string, toRef?: string): string[];
export function nativeWorktreeList(basePath: string): string[];
export function nativeBranchList(basePath: string, pattern?: string): string[];
export function nativeBranchListMerged(basePath: string, target: string, pattern?: string): string[];
export function nativeLsFiles(basePath: string, pathspec?: string): string[];
export function nativeForEachRef(basePath: string, prefix?: string): string[];
export function nativeConflictFiles(basePath: string): string[];
export function nativeBatchInfo(basePath: string): Record<string, unknown>;
export function nativeInit(basePath: string, initialBranch?: string): void;
export function nativeAddAll(basePath: string): void;
export function nativeAddTracked(basePath: string): void;
export function nativeAddAllWithExclusions(basePath: string, exclusions: string[]): void;
export function nativeAddPaths(basePath: string, paths: string[]): void;
export function nativeResetPaths(basePath: string, paths: string[]): void;
export function nativeCommit(basePath: string, message: string, options?: Record<string, unknown>): void;
export function nativeCheckoutBranch(basePath: string, branch: string): void;
export function nativeCheckoutTheirs(basePath: string, paths: string[]): void;
export function nativeMergeSquash(basePath: string, branch: string): void;
export function nativeMergeAbort(basePath: string): void;
export function nativeRebaseAbort(basePath: string): void;
export function nativeResetHard(basePath: string): void;
export function nativeResetSoft(basePath: string, target?: string): void;
export function nativeCommitSubject(basePath: string, ref: string): string;
export function nativeBranchDelete(basePath: string, branch: string, force?: boolean): void;
export function nativeBranchForceReset(basePath: string, branch: string, target: string): void;
export function nativeRmCached(basePath: string, paths: string[], recursive?: boolean): void;
export function nativeRmForce(basePath: string, paths: string[]): void;
export function nativeWorktreeAdd(basePath: string, wtPath: string, branch: string, createBranch?: boolean, startPoint?: string): void;
export function nativeWorktreeRemove(basePath: string, wtPath: string, force?: boolean): void;
export function nativeWorktreePrune(basePath: string): void;
export function nativeRevertCommit(basePath: string, sha: string): void;
export function nativeRevertAbort(basePath: string): void;
export function nativeUpdateRef(basePath: string, refname: string, target: string): void;
export function isNativeGitAvailable(): boolean;
export function nativeIsAncestor(basePath: string, ancestor: string, descendant: string): boolean;
export function nativeLastCommitEpoch(basePath: string, ref?: string): number;
export function nativeUnpushedCount(basePath: string, branch: string): number;
export function getCommitsBehindMain(worktreePath: string, mainRef: string): number;

View file

@ -1,30 +0,0 @@
export function clearPathCache(): void;
export function buildMilestoneFileName(milestoneId: string, suffix: string): string;
export function buildSliceFileName(sliceId: string, suffix: string): string;
export function buildTaskFileName(taskId: string, suffix: string): string;
export function resolveDir(parentDir: string, idPrefix: string): string;
export function resolveFile(dir: string, idPrefix: string, suffix: string): string;
export function resolveTaskFiles(tasksDir: string, suffix: string): string[];
export function resolveTaskJsonFiles(tasksDir: string, suffix: string): string[];
export const SF_ROOT_FILES: Record<string, string>;
export function _clearSfRootCache(): void;
export function sfRoot(basePath?: string): string;
export const projectRoot: typeof sfRoot;
export function isRunningOnSelf(basePath?: string): boolean;
export function _resetSelfDetectionCache(): void;
export function sfRuntimeRoot(basePath?: string): string;
export function milestonesDir(basePath?: string): string;
export function resolveRuntimeFile(basePath?: string): string;
export function resolveSfRootFile(basePath: string, key: string): string;
export function relSfRootFile(key: string): string;
export function resolveMilestonePath(basePath: string, milestoneId: string): string;
export function resolveMilestoneFile(basePath: string, milestoneId: string, suffix: string): string;
export function resolveSlicePath(basePath: string, milestoneId: string, sliceId: string): string;
export function resolveSliceFile(basePath: string, milestoneId: string, sliceId: string, suffix: string): string;
export function resolveTasksDir(basePath: string, milestoneId: string, sliceId: string): string;
export function resolveTaskFile(basePath: string, milestoneId: string, sliceId: string, taskId: string, suffix: string): string;
export function relMilestonePath(basePath: string, milestoneId: string): string;
export function relMilestoneFile(basePath: string, milestoneId: string, suffix: string): string;
export function relSlicePath(basePath: string, milestoneId: string, sliceId: string): string;
export function relSliceFile(basePath: string, milestoneId: string, sliceId: string, suffix: string): string;
export function relTaskFile(basePath: string, milestoneId: string, sliceId: string, taskId: string, suffix: string): string;

View file

@ -1,20 +0,0 @@
export function filterModelsByProviderModelAllow(models: unknown[], providerModelAllow: unknown, providerModelBlock: unknown): unknown[];
export function isProviderAllowedByLists(provider: string, allowedProviders: string[], blockedProviders: string[]): boolean;
export function isProviderAllowedForAdvisor(providerKey: string, prefs: Record<string, unknown>): boolean;
export function resolveModelForUnit(unitType: string): string;
export function resolveModelWithFallbacksForUnit(unitType: string, options?: Record<string, unknown>): string;
export function resolveDefaultSessionModel(sessionProvider: string): string;
export function isCustomProvider(provider: string): boolean;
export function getNextFallbackModel(currentModelId: string, modelConfig: Record<string, unknown>): string | null;
export function isTransientNetworkError(errorMsg: string): boolean;
export function validateModelId(modelId: string): boolean;
export function updatePreferencesModels(models: unknown[]): void;
export function updateSubscriptionTokensUsed(provider: string, tokensConsumed: number): void;
export function resolveDynamicRoutingConfig(): Record<string, unknown>;
export function resolvePersistModelChanges(): boolean;
export function resolveAutoSupervisorConfig(): Record<string, unknown>;
export function resolveProfileDefaults(profile: string): Record<string, unknown>;
export function resolveEffectiveProfile(): string;
export function resolveInlineLevel(): string;
export function resolveContextSelection(): string;
export function resolveSearchProviderFromPreferences(): string;

View file

@ -1,19 +0,0 @@
export function resolveSkillDiscoveryMode(): string;
export function resolveSkillStalenessDays(): number;
export function getGlobalSFPreferencesPath(): string;
export function getLegacyGlobalSFPreferencesPath(): string;
export function getProjectSFPreferencesPath(): string;
export function loadGlobalSFPreferences(): Record<string, unknown>;
export function loadProjectSFPreferences(): Record<string, unknown>;
export function loadEffectiveSFPreferences(): {
path: string;
preferences: Record<string, unknown>;
} | null;
export function _resetParseWarningFlag(): void;
export function parsePreferencesMarkdown(content: string): Record<string, unknown>;
export function applyModeDefaults(mode: string, prefs: Record<string, unknown>): Record<string, unknown>;
export function renderPreferencesForSystemPrompt(preferences: Record<string, unknown>, resolutions: Record<string, unknown>): string;
export function resolvePostUnitHooks(): string[];
export function resolvePreDispatchHooks(): string[];
export function getIsolationMode(): string;
export function resolveParallelConfig(prefs: Record<string, unknown>): Record<string, unknown>;

View file

@ -34,7 +34,7 @@ After reflection is confirmed, decide the approach based on the actual scope —
Before asking your first question, do a mandatory investigation pass. This is not optional.
1. **Scout the codebase** — use in-process `grep`, `find`, `ls`, and `lsp` first; use `codebase_search` for Sift-backed hybrid retrieval; use `scout` for broad unfamiliar areas that need a separate explorer. Understand what already exists, what patterns are established, what constraints current code imposes.
1. **Scout the codebase** — use `codebase_search` for conceptual, behavioral, or architectural discovery (e.g. "how does X work?", "where is Y handled?"); use in-process `grep`, `find`, `ls`, and `lsp` for exact identifier matches or structural navigation. Use `scout` for broad unfamiliar areas that need a separate explorer. Understand what already exists, what patterns are established, what constraints current code imposes.
2. **Check library docs — DeepWiki first.** Use `ask_question` / `read_wiki_structure` / `read_wiki_contents` (DeepWiki) as the default for any GitHub-hosted library or framework the user mentioned. Fall back to `resolve_library` / `get_library_docs` (Context7) for npm/pypi/crates packages DeepWiki doesn't have. **Context7 free tier is capped at 1000 req/month — spend those on cases DeepWiki can't cover.** Get current facts about capabilities, constraints, API shapes, version-specific behavior.
3. **Web search**`search-the-web` if the domain is unfamiliar, if you need current best practices, or if the user referenced external services/APIs you need facts about. Use `fetch_page` for full content when snippets aren't enough.

View file

@ -15,7 +15,7 @@ Apply `pm-planning` skill thinking throughout: use Working Backwards to anchor o
### Before your first question round
Do a lightweight targeted investigation so your questions are grounded in reality:
- Scout the codebase with in-process `grep`, `find`, `ls`, and `lsp` first; use `codebase_search` for Sift-backed hybrid retrieval; use `scout` for broad unfamiliar areas that need a separate explorer
- Scout the codebase: use `codebase_search` for conceptual, behavioral, or architectural discovery (e.g. "how does X work?", "where is Y handled?"); use in-process `grep`, `find`, `ls`, and `lsp` for exact identifier matches or structural navigation. Use `scout` for broad unfamiliar areas that need a separate explorer.
- If the `PROJECT CODE INTELLIGENCE` block says Project RAG is configured, use its MCP search tools for broad concept, symbol, schema, and git-history lookup before manually reading files
- Check the roadmap context above (if present) to understand what surrounds this milestone
- **Library docs — DeepWiki first.** Use `ask_question` / `read_wiki_structure` / `read_wiki_contents` (DeepWiki) for any GitHub-hosted library. Fall back to `resolve_library` / `get_library_docs` (Context7) only when DeepWiki doesn't have it (Context7 is capped at 1000 req/month free tier).

View file

@ -11,7 +11,7 @@ Your goal is **not** to center the discussion on tech stack trivia, naming conve
### Before your first question round
Do a lightweight targeted investigation so your questions are grounded in reality:
- Scout the codebase with in-process `grep`, `find`, `ls`, and `lsp` first; use `codebase_search` for Sift-backed hybrid retrieval; use `scout` for broad unfamiliar areas that need a separate explorer
- Scout the codebase: use `codebase_search` for conceptual, behavioral, or architectural discovery (e.g. "how does X work?", "where is Y handled?"); use in-process `grep`, `find`, `ls`, and `lsp` for exact identifier matches or structural navigation. Use `scout` for broad unfamiliar areas that need a separate explorer.
- Check the roadmap context above to understand what surrounds this slice — what comes before, what depends on it
- **Library docs — DeepWiki first.** Use `ask_question` / `read_wiki_structure` / `read_wiki_contents` (DeepWiki) for any GitHub-hosted library. Fall back to `resolve_library` / `get_library_docs` (Context7) only when DeepWiki doesn't have it (Context7 is capped at 1000 req/month free tier).
- Identify the 35 biggest behavioural unknowns: things where the user's answer will materially change what gets built

View file

@ -26,7 +26,7 @@ Never fabricate or simulate user input during this discussion. Never generate fa
- Check library docs **DeepWiki first** (`ask_question` / `read_wiki_structure` / `read_wiki_contents`) for any GitHub-hosted library or framework — AI-indexed, no free-tier cap. Fall back to Context7 (`resolve_library` / `get_library_docs`) for npm/pypi/crates packages DeepWiki doesn't cover. Context7 free tier is 1000 req/month — don't spend those on cases DeepWiki covers.
- Do web searches (`search-the-web`) to verify the landscape — what solutions exist, what's changed recently, what's the current best practice. Use `freshness` for recency-sensitive queries, `domain` to target specific sites. Use `fetch_page` to read the full content of promising URLs when snippets aren't enough. **Budget:** You have a limited number of web searches per turn (typically 3-5). Prefer DeepWiki → Context7 → web search for docs; use `search_and_read` for one-shot topic research. Do NOT repeat the same or similar queries. Distribute searches across turns rather than clustering them.
- Scout the codebase with in-process `grep`, `find`, `ls`, and `lsp` first; use `codebase_search` for Sift-backed hybrid retrieval; use `scout` for broad unfamiliar areas that need a separate explorer. Understand what already exists, what patterns are established, what constraints current code imposes
- Scout the codebase: use `codebase_search` for conceptual, behavioral, or architectural discovery (e.g. "how does X work?", "where is Y handled?"); use in-process `grep`, `find`, `ls`, and `lsp` for exact identifier matches or structural navigation. Use `scout` for broad unfamiliar areas that need a separate explorer. Understand what already exists, what patterns are established, what constraints current code imposes.
Don't go deep — just enough that your next question reflects what's actually true rather than what you assume.

View file

@ -161,7 +161,7 @@ Templates showing the expected format for each artifact type are in:
**Code navigation:** Use `lsp` for definition, type_definition, implementation, references, incoming_calls, outgoing_calls, hover, signature, symbols, rename, code_actions, format, and diagnostics. Falls back gracefully if no server is available. Never `grep` for a symbol definition when `lsp` can resolve it semantically. Never shell out to prettier/rustfmt/gofmt when `lsp format` is available. After editing code, use `lsp diagnostics` to verify no type errors were introduced.
**Codebase exploration:** Prefer in-process SF tools first: `grep` for exact text search, `find`/`ls` for filesystem discovery, and `lsp` for structural navigation. These avoid shelling out and use SF's native backends where available. Use `.sf/CODEBASE.md` for durable orientation. If the `PROJECT CODE INTELLIGENCE` block says Project RAG is configured, use its MCP tools for broad hybrid semantic + BM25 code retrieval before manual file-by-file reading. Use `codebase_search` when Sift-backed hybrid retrieval is a better fit than exact search. Use `subagent` with `scout` for broad unfamiliar subsystem mapping that needs an explorer's judgment. Never read files one-by-one to "explore" — search first, then read what's relevant.
**Codebase exploration:** For conceptual, behavioral, or architectural discovery (e.g. "how does X work?", "where is Y handled?"), use `codebase_search` first. Its hybrid BM25+Vector retrieval is significantly more effective than grep for navigating unfamiliar logic. Use in-process SF tools like `grep` for exact text matches when you already have a specific identifier, and `find`/`ls` for literal filesystem discovery. Use `lsp` for structural navigation (definitions, references). Use `.sf/CODEBASE.md` for durable orientation. If the `PROJECT CODE INTELLIGENCE` block says Project RAG is configured, use its MCP tools for broad hybrid semantic + BM25 code retrieval before manual file-by-file reading. Never read files one-by-one to "explore" — search first, then read what's relevant.
**Swarm dispatch:** Let the system decide whether swarming fits before dispatching multiple execution subagents. Use a 2-3 worker same-model swarm only when the work splits into independent shards with explicit file/directory ownership, shard-local verification, low conflict risk, and clear wall-clock savings. Do not swarm shared-interface edits, lockfiles, migrations, single-failure debugging, or sequence-dependent work. The parent agent remains coordinator: assign ownership, synthesize results, inspect dirty files, resolve conflicts, and run final verification.

View file

@ -1,10 +0,0 @@
export function readRepoMeta(externalPath: string): Record<string, unknown>;
export function isInheritedRepo(basePath?: string): boolean;
export function validateProjectId(id: string): boolean;
export function repoIdentity(basePath?: string): Record<string, unknown>;
export function externalSfRoot(basePath?: string): string | null;
export function externalProjectsRoot(): string;
export function cleanNumberedSfVariants(projectPath: string): string;
export function hasExternalProjectState(externalPath: string): boolean;
export function ensureSfSymlink(projectPath: string): string;
export function isInsideWorktree(cwd: string): boolean;

View file

@ -1,29 +0,0 @@
export interface Span {
id: string;
name: string;
startTime: number;
endTime?: number;
attributes: Record<string, unknown>;
children: Span[];
}
export interface Trace {
id: string;
rootSpan: Span;
startTime: number;
endTime?: number;
attributes: Record<string, unknown>;
}
export function isTraceEnabled(): boolean;
export function initTraceCollector(projectRoot: string, sessionId: string | null | undefined, command: string, model: string | null): Trace | null;
export function flushTrace(projectRoot: string): void;
export function getActiveTrace(): Trace | null;
export function startUnitSpan(unitType: string, unitId: string, attributes?: Record<string, unknown>): Span | null;
export function startToolSpan(parentSpan: Span, toolName: string, toolCallId: string, attributes?: Record<string, unknown>): Span;
export function completeSpan(span: Span, status?: string): void;
export function traceEvent(span: Span, name: string, attrs: Record<string, unknown>): void;
export function traceError(span: Span, message: string, stack?: string): void;
export function findTraceSpan(id: string): Span | null;
export function setTraceCost(inputTokens: number, outputTokens: number, cacheReadTokens: number, cacheWriteTokens: number, costUsd: number): void;
export function setTraceExitCode(code: number): void;

View file

@ -1,16 +0,0 @@
export interface MilestoneRef {
id: string;
title?: string;
}
export interface SFState {
milestones: unknown[];
slices: unknown[];
tasks: unknown[];
activeMilestone?: MilestoneRef;
lastCompletedMilestone?: MilestoneRef;
activeSlice?: MilestoneRef;
activeTask?: MilestoneRef;
phase?: string;
nextAction?: string;
}