Cherry-pick tool bug fixes from pi-mono upstream
- compaction: fix repeated compaction dropping kept messages (#2608) Re-summarize from previous compaction's firstKeptEntryId instead of prevCompactionIndex+1; use buildSessionContext for accurate tokensBefore - edit: add multi-edit support via edits[] array Single call can update multiple disjoint regions in one file; applyEditsToNormalizedContent matches all edits against original content and applies in reverse order for stable offsets - bash: persist full output when line-count truncation occurs (#2852) ensureTempFile now called on any truncation, not only byte overflow; prevents data loss when output exceeds line limit before byte threshold - bash-executor: same fix for remote/operations-based execution ensureTempFile includes SF cleanup registration (registerTempCleanup, bashTempFiles tracking) - grep: include lineText from rg JSON events to avoid per-match file reads Eliminates stall when context=0 on broad searches (#3148) - agent-session: forward isError override from afterToolCall extension hook Allows extensions to change error status of tool results (#3051) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
1c4b289d89
commit
f153521c24
7 changed files with 298 additions and 150 deletions
|
|
@ -598,6 +598,7 @@ export class AgentSession {
|
|||
return {
|
||||
content: resultResult.content ?? undefined,
|
||||
details: resultResult.details ?? undefined,
|
||||
isError: resultResult.isError ?? isError,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -234,6 +234,20 @@ export async function executeBashWithOperations(
|
|||
|
||||
let streamState2: StreamState | undefined;
|
||||
|
||||
const ensureTempFile = () => {
|
||||
if (tempFilePath) {
|
||||
return;
|
||||
}
|
||||
registerTempCleanup();
|
||||
const id = randomBytes(8).toString("hex");
|
||||
tempFilePath = join(tmpdir(), `pi-bash-${id}.log`);
|
||||
bashTempFiles.add(tempFilePath);
|
||||
tempFileStream = createWriteStream(tempFilePath);
|
||||
for (const chunk of outputChunks) {
|
||||
tempFileStream.write(chunk);
|
||||
}
|
||||
};
|
||||
|
||||
const onData = (data: Buffer) => {
|
||||
totalBytes += data.length;
|
||||
|
||||
|
|
@ -243,15 +257,8 @@ export async function executeBashWithOperations(
|
|||
const text = result.text;
|
||||
|
||||
// Start writing to temp file if exceeds threshold
|
||||
if (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {
|
||||
registerTempCleanup();
|
||||
const id = randomBytes(8).toString("hex");
|
||||
tempFilePath = join(tmpdir(), `pi-bash-${id}.log`);
|
||||
bashTempFiles.add(tempFilePath);
|
||||
tempFileStream = createWriteStream(tempFilePath);
|
||||
for (const chunk of outputChunks) {
|
||||
tempFileStream.write(chunk);
|
||||
}
|
||||
if (totalBytes > DEFAULT_MAX_BYTES) {
|
||||
ensureTempFile();
|
||||
}
|
||||
|
||||
if (tempFileStream) {
|
||||
|
|
@ -284,6 +291,9 @@ export async function executeBashWithOperations(
|
|||
|
||||
const fullOutput = outputChunks.join("");
|
||||
const truncationResult = truncateTail(fullOutput);
|
||||
if (truncationResult.truncated) {
|
||||
ensureTempFile();
|
||||
}
|
||||
const cancelled = options?.signal?.aborted ?? false;
|
||||
|
||||
return {
|
||||
|
|
@ -302,6 +312,9 @@ export async function executeBashWithOperations(
|
|||
if (options?.signal?.aborted) {
|
||||
const fullOutput = outputChunks.join("");
|
||||
const truncationResult = truncateTail(fullOutput);
|
||||
if (truncationResult.truncated) {
|
||||
ensureTempFile();
|
||||
}
|
||||
return {
|
||||
output: truncationResult.truncated ? truncationResult.content : fullOutput,
|
||||
exitCode: undefined,
|
||||
|
|
|
|||
|
|
@ -10,9 +10,8 @@ import type { AssistantMessage, Model, Usage } from "@singularity-forge/pi-ai";
|
|||
import { completeSimple } from "@singularity-forge/pi-ai";
|
||||
import { COMPACTION_KEEP_RECENT_TOKENS, COMPACTION_RESERVE_TOKENS } from "../constants.js";
|
||||
import { convertToLlm } from "../messages.js";
|
||||
import type { CompactionEntry, SessionEntry } from "../session-manager.js";
|
||||
import { buildSessionContext, type CompactionEntry, type SessionEntry } from "../session-manager.js";
|
||||
import {
|
||||
collectMessages,
|
||||
computeFileLists,
|
||||
createFileOps,
|
||||
createSummarizationMessage,
|
||||
|
|
@ -25,6 +24,13 @@ import {
|
|||
serializeConversation,
|
||||
} from "./utils.js";
|
||||
|
||||
function getMessageFromEntryForCompaction(entry: SessionEntry): AgentMessage | undefined {
|
||||
if (entry.type === "compaction") {
|
||||
return undefined;
|
||||
}
|
||||
return getMessageFromEntry(entry);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// File Operation Tracking
|
||||
// ============================================================================
|
||||
|
|
@ -669,12 +675,17 @@ export function prepareCompaction(
|
|||
break;
|
||||
}
|
||||
}
|
||||
const boundaryStart = prevCompactionIndex + 1;
|
||||
let previousSummary: string | undefined;
|
||||
let boundaryStart = 0;
|
||||
if (prevCompactionIndex >= 0) {
|
||||
const prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;
|
||||
previousSummary = prevCompaction.summary;
|
||||
const firstKeptEntryIndex = pathEntries.findIndex((entry) => entry.id === prevCompaction.firstKeptEntryId);
|
||||
boundaryStart = firstKeptEntryIndex >= 0 ? firstKeptEntryIndex : prevCompactionIndex + 1;
|
||||
}
|
||||
const boundaryEnd = pathEntries.length;
|
||||
|
||||
const usageStart = prevCompactionIndex >= 0 ? prevCompactionIndex : 0;
|
||||
const usageMessages = collectMessages(pathEntries, usageStart, boundaryEnd);
|
||||
const tokensBefore = estimateContextTokens(usageMessages).tokens;
|
||||
const tokensBefore = estimateContextTokens(buildSessionContext(pathEntries).messages).tokens;
|
||||
|
||||
const cutPoint = findCutPoint(pathEntries, boundaryStart, boundaryEnd, settings.keepRecentTokens);
|
||||
|
||||
|
|
@ -688,18 +699,19 @@ export function prepareCompaction(
|
|||
const historyEnd = cutPoint.isSplitTurn ? cutPoint.turnStartIndex : cutPoint.firstKeptEntryIndex;
|
||||
|
||||
// Messages to summarize (will be discarded after summary)
|
||||
const messagesToSummarize = collectMessages(pathEntries, boundaryStart, historyEnd);
|
||||
const messagesToSummarize: AgentMessage[] = [];
|
||||
for (let i = boundaryStart; i < historyEnd; i++) {
|
||||
const msg = getMessageFromEntryForCompaction(pathEntries[i]);
|
||||
if (msg) messagesToSummarize.push(msg);
|
||||
}
|
||||
|
||||
// Messages for turn prefix summary (if splitting a turn)
|
||||
const turnPrefixMessages = cutPoint.isSplitTurn
|
||||
? collectMessages(pathEntries, cutPoint.turnStartIndex, cutPoint.firstKeptEntryIndex)
|
||||
: [];
|
||||
|
||||
// Get previous summary for iterative update
|
||||
let previousSummary: string | undefined;
|
||||
if (prevCompactionIndex >= 0) {
|
||||
const prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;
|
||||
previousSummary = prevCompaction.summary;
|
||||
const turnPrefixMessages: AgentMessage[] = [];
|
||||
if (cutPoint.isSplitTurn) {
|
||||
for (let i = cutPoint.turnStartIndex; i < cutPoint.firstKeptEntryIndex; i++) {
|
||||
const msg = getMessageFromEntryForCompaction(pathEntries[i]);
|
||||
if (msg) turnPrefixMessages.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract file operations from messages and previous compaction
|
||||
|
|
|
|||
|
|
@ -342,26 +342,28 @@ export function createBashTool(cwd: string, options?: BashToolOptions): AgentToo
|
|||
// Keep more than we need so we have enough for truncation
|
||||
const maxChunksBytes = DEFAULT_MAX_BYTES * 2;
|
||||
|
||||
const ensureTempFile = () => {
|
||||
if (spillFilePath) return;
|
||||
if (artifactManager) {
|
||||
const allocated = artifactManager.allocatePath("bash");
|
||||
spillFilePath = allocated.path;
|
||||
spillArtifactId = allocated.id;
|
||||
} else {
|
||||
spillFilePath = getTempFilePath();
|
||||
}
|
||||
spillFileStream = createWriteStream(spillFilePath);
|
||||
for (const chunk of chunks) spillFileStream.write(chunk);
|
||||
};
|
||||
|
||||
const handleData = (data: Buffer) => {
|
||||
totalBytes += data.length;
|
||||
|
||||
// Start writing to file once we exceed the threshold
|
||||
if (totalBytes > DEFAULT_MAX_BYTES && !spillFilePath) {
|
||||
if (artifactManager) {
|
||||
const allocated = artifactManager.allocatePath("bash");
|
||||
spillFilePath = allocated.path;
|
||||
spillArtifactId = allocated.id;
|
||||
} else {
|
||||
spillFilePath = getTempFilePath();
|
||||
}
|
||||
spillFileStream = createWriteStream(spillFilePath);
|
||||
// Write all buffered chunks to the file
|
||||
for (const chunk of chunks) {
|
||||
spillFileStream.write(chunk);
|
||||
}
|
||||
// Start writing to a file once output exceeds the in-memory threshold.
|
||||
if (totalBytes > DEFAULT_MAX_BYTES) {
|
||||
ensureTempFile();
|
||||
}
|
||||
|
||||
// Write to temp file if we have one
|
||||
// Write to spill file if we have one
|
||||
if (spillFileStream) {
|
||||
spillFileStream.write(data);
|
||||
}
|
||||
|
|
@ -381,6 +383,9 @@ export function createBashTool(cwd: string, options?: BashToolOptions): AgentToo
|
|||
const fullBuffer = Buffer.concat(chunks);
|
||||
const fullText = fullBuffer.toString("utf-8");
|
||||
const truncation = truncateTail(fullText);
|
||||
if (truncation.truncated) {
|
||||
ensureTempFile();
|
||||
}
|
||||
onUpdate({
|
||||
content: [{ type: "text", text: truncation.content || "" }],
|
||||
details: {
|
||||
|
|
@ -398,17 +403,16 @@ export function createBashTool(cwd: string, options?: BashToolOptions): AgentToo
|
|||
env: spawnContext.env,
|
||||
})
|
||||
.then(({ exitCode }) => {
|
||||
// Close temp file stream
|
||||
if (spillFileStream) {
|
||||
spillFileStream.end();
|
||||
}
|
||||
|
||||
// Combine all buffered chunks
|
||||
const fullBuffer = Buffer.concat(chunks);
|
||||
const fullOutput = fullBuffer.toString("utf-8");
|
||||
|
||||
// Apply tail truncation
|
||||
const truncation = truncateTail(fullOutput);
|
||||
if (truncation.truncated) {
|
||||
ensureTempFile();
|
||||
}
|
||||
// Close spill file stream before building the final result.
|
||||
if (spillFileStream) spillFileStream.end();
|
||||
let outputText = truncation.content || "(no output)";
|
||||
|
||||
// Build details with truncation info
|
||||
|
|
|
|||
|
|
@ -63,6 +63,23 @@ export interface FuzzyMatchResult {
|
|||
contentForReplacement: string;
|
||||
}
|
||||
|
||||
export interface ReplaceEdit {
|
||||
oldText: string;
|
||||
newText: string;
|
||||
}
|
||||
|
||||
interface MatchedEdit {
|
||||
editIndex: number;
|
||||
matchIndex: number;
|
||||
matchLength: number;
|
||||
newText: string;
|
||||
}
|
||||
|
||||
export interface AppliedEditsResult {
|
||||
baseContent: string;
|
||||
newContent: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find oldText in content, trying exact match first, then fuzzy match.
|
||||
*
|
||||
|
|
@ -109,6 +126,127 @@ export function stripBom(content: string): { bom: string; text: string } {
|
|||
return content.startsWith("\uFEFF") ? { bom: "\uFEFF", text: content.slice(1) } : { bom: "", text: content };
|
||||
}
|
||||
|
||||
function countOccurrences(content: string, oldText: string): number {
|
||||
const fuzzyContent = normalizeForFuzzyMatch(content);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(oldText);
|
||||
return fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
}
|
||||
|
||||
function getNotFoundError(path: string, editIndex: number, totalEdits: number): Error {
|
||||
if (totalEdits === 1) {
|
||||
return new Error(
|
||||
`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
|
||||
);
|
||||
}
|
||||
return new Error(
|
||||
`Could not find edits[${editIndex}] in ${path}. The oldText must match exactly including all whitespace and newlines.`,
|
||||
);
|
||||
}
|
||||
|
||||
function getDuplicateError(path: string, editIndex: number, totalEdits: number, occurrences: number): Error {
|
||||
if (totalEdits === 1) {
|
||||
return new Error(
|
||||
`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
|
||||
);
|
||||
}
|
||||
return new Error(
|
||||
`Found ${occurrences} occurrences of edits[${editIndex}] in ${path}. Each oldText must be unique. Please provide more context to make it unique.`,
|
||||
);
|
||||
}
|
||||
|
||||
function getEmptyOldTextError(path: string, editIndex: number, totalEdits: number): Error {
|
||||
if (totalEdits === 1) {
|
||||
return new Error(`oldText must not be empty in ${path}.`);
|
||||
}
|
||||
return new Error(`edits[${editIndex}].oldText must not be empty in ${path}.`);
|
||||
}
|
||||
|
||||
function getNoChangeError(path: string, totalEdits: number): Error {
|
||||
if (totalEdits === 1) {
|
||||
return new Error(
|
||||
`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`,
|
||||
);
|
||||
}
|
||||
return new Error(`No changes made to ${path}. The replacements produced identical content.`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply one or more exact-text replacements to LF-normalized content.
|
||||
*
|
||||
* All edits are matched against the same original content. Replacements are
|
||||
* then applied in reverse order so offsets remain stable. If any edit needs
|
||||
* fuzzy matching, the operation runs in fuzzy-normalized content space to
|
||||
* preserve current single-edit behavior.
|
||||
*/
|
||||
export function applyEditsToNormalizedContent(
|
||||
normalizedContent: string,
|
||||
edits: ReplaceEdit[],
|
||||
path: string,
|
||||
): AppliedEditsResult {
|
||||
const normalizedEdits = edits.map((edit) => ({
|
||||
oldText: normalizeToLF(edit.oldText),
|
||||
newText: normalizeToLF(edit.newText),
|
||||
}));
|
||||
|
||||
for (let i = 0; i < normalizedEdits.length; i++) {
|
||||
if (normalizedEdits[i].oldText.length === 0) {
|
||||
throw getEmptyOldTextError(path, i, normalizedEdits.length);
|
||||
}
|
||||
}
|
||||
|
||||
const initialMatches = normalizedEdits.map((edit) => fuzzyFindText(normalizedContent, edit.oldText));
|
||||
const baseContent = initialMatches.some((match) => match.usedFuzzyMatch)
|
||||
? normalizeForFuzzyMatch(normalizedContent)
|
||||
: normalizedContent;
|
||||
|
||||
const matchedEdits: MatchedEdit[] = [];
|
||||
for (let i = 0; i < normalizedEdits.length; i++) {
|
||||
const edit = normalizedEdits[i];
|
||||
const matchResult = fuzzyFindText(baseContent, edit.oldText);
|
||||
if (!matchResult.found) {
|
||||
throw getNotFoundError(path, i, normalizedEdits.length);
|
||||
}
|
||||
|
||||
const occurrences = countOccurrences(baseContent, edit.oldText);
|
||||
if (occurrences > 1) {
|
||||
throw getDuplicateError(path, i, normalizedEdits.length, occurrences);
|
||||
}
|
||||
|
||||
matchedEdits.push({
|
||||
editIndex: i,
|
||||
matchIndex: matchResult.index,
|
||||
matchLength: matchResult.matchLength,
|
||||
newText: edit.newText,
|
||||
});
|
||||
}
|
||||
|
||||
matchedEdits.sort((a, b) => a.matchIndex - b.matchIndex);
|
||||
for (let i = 1; i < matchedEdits.length; i++) {
|
||||
const previous = matchedEdits[i - 1];
|
||||
const current = matchedEdits[i];
|
||||
if (previous.matchIndex + previous.matchLength > current.matchIndex) {
|
||||
throw new Error(
|
||||
`edits[${previous.editIndex}] and edits[${current.editIndex}] overlap in ${path}. Merge them into one edit or target disjoint regions.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let newContent = baseContent;
|
||||
for (let i = matchedEdits.length - 1; i >= 0; i--) {
|
||||
const edit = matchedEdits[i];
|
||||
newContent =
|
||||
newContent.substring(0, edit.matchIndex) +
|
||||
edit.newText +
|
||||
newContent.substring(edit.matchIndex + edit.matchLength);
|
||||
}
|
||||
|
||||
if (baseContent === newContent) {
|
||||
throw getNoChangeError(path, normalizedEdits.length);
|
||||
}
|
||||
|
||||
return { baseContent, newContent };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unified diff string with line numbers and context.
|
||||
*
|
||||
|
|
@ -330,13 +468,12 @@ function buildLineDiffLinear(oldLines: string[], newLines: string[]): LineDiffOp
|
|||
}
|
||||
|
||||
/**
|
||||
* Compute the diff for an edit operation without applying it.
|
||||
* Compute the diff for one or more edit operations without applying them.
|
||||
* Used for preview rendering in the TUI before the tool executes.
|
||||
*/
|
||||
export async function computeEditDiff(
|
||||
export async function computeEditsDiff(
|
||||
path: string,
|
||||
oldText: string,
|
||||
newText: string,
|
||||
edits: ReplaceEdit[],
|
||||
cwd: string,
|
||||
): Promise<EditDiffResult | EditDiffError> {
|
||||
const absolutePath = resolveToCwd(path, cwd);
|
||||
|
|
@ -354,45 +491,8 @@ export async function computeEditDiff(
|
|||
|
||||
// Strip BOM before matching (LLM won't include invisible BOM in oldText)
|
||||
const { text: content } = stripBom(rawContent);
|
||||
|
||||
const normalizedContent = normalizeToLF(content);
|
||||
const normalizedOldText = normalizeToLF(oldText);
|
||||
const normalizedNewText = normalizeToLF(newText);
|
||||
|
||||
// Find the old text using fuzzy matching (tries exact match first, then fuzzy)
|
||||
const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
|
||||
|
||||
if (!matchResult.found) {
|
||||
return {
|
||||
error: `Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Count occurrences using fuzzy-normalized content for consistency
|
||||
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(normalizedOldText);
|
||||
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
|
||||
if (occurrences > 1) {
|
||||
return {
|
||||
error: `Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Compute the new content using the matched position
|
||||
// When fuzzy matching was used, contentForReplacement is the normalized version
|
||||
const baseContent = matchResult.contentForReplacement;
|
||||
const newContent =
|
||||
baseContent.substring(0, matchResult.index) +
|
||||
normalizedNewText +
|
||||
baseContent.substring(matchResult.index + matchResult.matchLength);
|
||||
|
||||
// Check if it would actually change anything
|
||||
if (baseContent === newContent) {
|
||||
return {
|
||||
error: `No changes would be made to ${path}. The replacement produces identical content.`,
|
||||
};
|
||||
}
|
||||
const { baseContent, newContent } = applyEditsToNormalizedContent(normalizedContent, edits, path);
|
||||
|
||||
// Generate the diff
|
||||
return generateDiffString(baseContent, newContent);
|
||||
|
|
@ -400,3 +500,16 @@ export async function computeEditDiff(
|
|||
return { error: err instanceof Error ? err.message : String(err) };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the diff for a single edit operation without applying it.
|
||||
* Kept as a convenience wrapper for single-edit callers.
|
||||
*/
|
||||
export async function computeEditDiff(
|
||||
path: string,
|
||||
oldText: string,
|
||||
newText: string,
|
||||
cwd: string,
|
||||
): Promise<EditDiffResult | EditDiffError> {
|
||||
return computeEditsDiff(path, [{ oldText, newText }], cwd);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,13 @@ import { type Static, Type } from "@sinclair/typebox";
|
|||
import { constants } from "fs";
|
||||
import { access as fsAccess, readFile as fsReadFile, writeFile as fsWriteFile } from "fs/promises";
|
||||
import {
|
||||
applyEditsToNormalizedContent,
|
||||
detectLineEnding,
|
||||
fuzzyFindText,
|
||||
generateDiffString,
|
||||
normalizeForFuzzyMatch,
|
||||
normalizeToLF,
|
||||
type ReplaceEdit,
|
||||
restoreLineEndings,
|
||||
stripBom,
|
||||
} from "./edit-diff.js";
|
||||
|
|
@ -16,8 +18,15 @@ import { resolveToCwd } from "./path-utils.js";
|
|||
|
||||
const editSchema = Type.Object({
|
||||
path: Type.String({ description: "Path to the file to edit (relative or absolute)" }),
|
||||
oldText: Type.String({ description: "Exact text to find and replace (must match exactly)" }),
|
||||
newText: Type.String({ description: "New text to replace the old text with" }),
|
||||
oldText: Type.Optional(Type.String({ description: "Exact text to find and replace. Use for a single replacement." })),
|
||||
newText: Type.Optional(Type.String({ description: "New text to replace oldText with. Required when oldText is provided." })),
|
||||
edits: Type.Optional(Type.Array(
|
||||
Type.Object({
|
||||
oldText: Type.String({ description: "Exact text to find (must be unique in the file)" }),
|
||||
newText: Type.String({ description: "Replacement text" }),
|
||||
}),
|
||||
{ description: "Multiple disjoint replacements in the same file. Use instead of oldText/newText for multi-region edits." },
|
||||
)),
|
||||
});
|
||||
|
||||
export type EditToolInput = Static<typeof editSchema>;
|
||||
|
|
@ -60,11 +69,11 @@ export function createEditTool(cwd: string, options?: EditToolOptions): AgentToo
|
|||
name: "edit",
|
||||
label: "edit",
|
||||
description:
|
||||
"Edit a file by replacing exact text. The oldText must match exactly (including whitespace). Use this for precise, surgical edits.",
|
||||
"Edit a file using exact text replacement. Use oldText/newText for a single replacement. Use edits[] when changing multiple separate, disjoint regions in the same file in one call — each edits[].oldText must be unique and non-overlapping.",
|
||||
parameters: editSchema,
|
||||
execute: async (
|
||||
_toolCallId: string,
|
||||
{ path, oldText, newText }: { path: string; oldText: string; newText: string },
|
||||
{ path, oldText, newText, edits: editsInput }: EditToolInput,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
const absolutePath = resolveToCwd(path, cwd);
|
||||
|
|
@ -124,38 +133,47 @@ export function createEditTool(cwd: string, options?: EditToolOptions): AgentToo
|
|||
|
||||
const originalEnding = detectLineEnding(content);
|
||||
const normalizedContent = normalizeToLF(content);
|
||||
const normalizedOldText = normalizeToLF(oldText);
|
||||
const normalizedNewText = normalizeToLF(newText);
|
||||
|
||||
// Find the old text using fuzzy matching (tries exact match first, then fuzzy)
|
||||
const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
|
||||
let baseContent: string;
|
||||
let newContent: string;
|
||||
|
||||
if (!matchResult.found) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
if (editsInput && editsInput.length > 0) {
|
||||
// Multi-edit mode: apply all edits at once
|
||||
const result = applyEditsToNormalizedContent(normalizedContent, editsInput as ReplaceEdit[], path);
|
||||
baseContent = result.baseContent;
|
||||
newContent = result.newContent;
|
||||
} else if (oldText !== undefined && newText !== undefined) {
|
||||
// Single-edit mode: fuzzy match single replacement
|
||||
const normalizedOldText = normalizeToLF(oldText);
|
||||
const normalizedNewText = normalizeToLF(newText);
|
||||
|
||||
const matchResult = fuzzyFindText(normalizedContent, normalizedOldText);
|
||||
if (!matchResult.found) {
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
reject(new Error(`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`));
|
||||
return;
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`Could not find the exact text in ${path}. The old text must match exactly including all whitespace and newlines.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Count occurrences using fuzzy-normalized content for consistency
|
||||
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(normalizedOldText);
|
||||
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
|
||||
if (occurrences > 1) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
const fuzzyContent = normalizeForFuzzyMatch(normalizedContent);
|
||||
const fuzzyOldText = normalizeForFuzzyMatch(normalizedOldText);
|
||||
const occurrences = fuzzyContent.split(fuzzyOldText).length - 1;
|
||||
if (occurrences > 1) {
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
reject(new Error(`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`));
|
||||
return;
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`Found ${occurrences} occurrences of the text in ${path}. The text must be unique. Please provide more context to make it unique.`,
|
||||
),
|
||||
);
|
||||
|
||||
baseContent = matchResult.contentForReplacement;
|
||||
newContent = baseContent.substring(0, matchResult.index) + normalizedNewText + baseContent.substring(matchResult.index + matchResult.matchLength);
|
||||
|
||||
if (baseContent === newContent) {
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
reject(new Error(`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`));
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
if (signal) signal.removeEventListener("abort", onAbort);
|
||||
reject(new Error("Edit tool input is invalid. Provide either oldText and newText, or edits[]."));
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -164,27 +182,6 @@ export function createEditTool(cwd: string, options?: EditToolOptions): AgentToo
|
|||
return;
|
||||
}
|
||||
|
||||
// Perform replacement using the matched text position
|
||||
// When fuzzy matching was used, contentForReplacement is the normalized version
|
||||
const baseContent = matchResult.contentForReplacement;
|
||||
const newContent =
|
||||
baseContent.substring(0, matchResult.index) +
|
||||
normalizedNewText +
|
||||
baseContent.substring(matchResult.index + matchResult.matchLength);
|
||||
|
||||
// Verify the replacement actually changed something
|
||||
if (baseContent === newContent) {
|
||||
if (signal) {
|
||||
signal.removeEventListener("abort", onAbort);
|
||||
}
|
||||
reject(
|
||||
new Error(
|
||||
`No changes made to ${path}. The replacement produced identical content. This might indicate an issue with special characters or the text not existing as expected.`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const finalContent = bom + restoreLineEndings(newContent, originalEnding);
|
||||
await ops.writeFile(absolutePath, finalContent);
|
||||
|
||||
|
|
|
|||
|
|
@ -230,9 +230,8 @@ export function createGrepTool(cwd: string, options?: GrepToolOptions): AgentToo
|
|||
return block;
|
||||
};
|
||||
|
||||
// Collect matches during streaming, format after
|
||||
const matches: Array<{ filePath: string; lineNumber: number }> = [];
|
||||
|
||||
// Collect matches during streaming, then format them after rg exits.
|
||||
const matches: Array<{ filePath: string; lineNumber: number; lineText?: string }> = [];
|
||||
rl.on("line", (line) => {
|
||||
if (!line.trim() || matchCount >= effectiveLimit) {
|
||||
return;
|
||||
|
|
@ -249,11 +248,9 @@ export function createGrepTool(cwd: string, options?: GrepToolOptions): AgentToo
|
|||
matchCount++;
|
||||
const filePath = event.data?.path?.text;
|
||||
const lineNumber = event.data?.line_number;
|
||||
|
||||
if (filePath && typeof lineNumber === "number") {
|
||||
matches.push({ filePath, lineNumber });
|
||||
}
|
||||
|
||||
const lineText = event.data?.lines?.text;
|
||||
if (filePath && typeof lineNumber === "number")
|
||||
matches.push({ filePath, lineNumber, lineText });
|
||||
if (matchCount >= effectiveLimit) {
|
||||
matchLimitReached = true;
|
||||
stopChild(true);
|
||||
|
|
@ -289,8 +286,19 @@ export function createGrepTool(cwd: string, options?: GrepToolOptions): AgentToo
|
|||
|
||||
// Format matches (async to support remote file reading)
|
||||
for (const match of matches) {
|
||||
const block = await formatBlock(match.filePath, match.lineNumber);
|
||||
outputLines.push(...block);
|
||||
if (contextValue === 0 && match.lineText !== undefined) {
|
||||
const relativePath = formatPath(match.filePath);
|
||||
const sanitized = match.lineText
|
||||
.replace(/\r\n/g, "\n")
|
||||
.replace(/\r/g, "")
|
||||
.replace(/\n$/, "");
|
||||
const { text: truncatedText, wasTruncated } = truncateLine(sanitized);
|
||||
if (wasTruncated) linesTruncated = true;
|
||||
outputLines.push(`${relativePath}:${match.lineNumber}: ${truncatedText}`);
|
||||
} else {
|
||||
const block = await formatBlock(match.filePath, match.lineNumber);
|
||||
outputLines.push(...block);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply byte truncation (no line limit since we already have match limit)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue