diff --git a/packages/junior/src/chat/prompt.ts b/packages/junior/src/chat/prompt.ts
index 4d253f8f..3ad78fc3 100644
--- a/packages/junior/src/chat/prompt.ts
+++ b/packages/junior/src/chat/prompt.ts
@@ -248,6 +248,40 @@ function formatActiveMcpCatalogsForPrompt(
return lines.join("\n");
}
+interface ToolPromptContext {
+ name: string;
+ promptGuidelines?: string[];
+ promptSnippet?: string;
+}
+
+function formatToolGuidanceForPrompt(
+ tools: ToolPromptContext[],
+): string | null {
+ const guidedTools = tools.filter(
+ (tool) =>
+ Boolean(tool.promptSnippet?.trim()) ||
+ (tool.promptGuidelines?.length ?? 0) > 0,
+ );
+ if (guidedTools.length === 0) {
+ return null;
+ }
+
+ const lines: string[] = [];
+ for (const tool of guidedTools) {
+ lines.push(` `);
+ if (tool.promptSnippet?.trim()) {
+ lines.push(` - ${escapeXml(tool.promptSnippet.trim())}`);
+ }
+ if (tool.promptGuidelines && tool.promptGuidelines.length > 0) {
+ for (const guideline of tool.promptGuidelines) {
+ lines.push(` - ${escapeXml(guideline)}`);
+ }
+ }
+ lines.push(" ");
+ }
+ return lines.join("\n");
+}
+
function formatReferenceFilesLines(): string[] | null {
const files = listReferenceFiles();
if (files.length === 0) {
@@ -527,6 +561,7 @@ function buildCapabilitiesSection(params: {
availableSkills: SkillMetadata[];
activeSkills: Skill[];
activeMcpCatalogs: ActiveMcpCatalogSummary[];
+ toolGuidance?: ToolPromptContext[];
}): string {
const blocks: string[] = [];
blocks.push(formatAvailableSkillsForPrompt(params.availableSkills));
@@ -539,6 +574,11 @@ function buildCapabilitiesSection(params: {
blocks.push(renderTagBlock("active-mcp-catalogs", activeCatalogs));
}
+ const toolGuidance = formatToolGuidanceForPrompt(params.toolGuidance ?? []);
+ if (toolGuidance) {
+ blocks.push(renderTagBlock("tool-guidance", toolGuidance));
+ }
+
const providerCatalog = formatProviderCatalogForPrompt();
if (providerCatalog) {
blocks.push(renderTagBlock("providers", providerCatalog));
@@ -551,6 +591,7 @@ type TurnContextPromptInput = {
availableSkills: SkillMetadata[];
activeSkills: Skill[];
activeMcpCatalogs?: ActiveMcpCatalogSummary[];
+ toolGuidance?: ToolPromptContext[];
runtime?: {
channelId?: string;
fastModelId?: string;
@@ -607,6 +648,7 @@ export function buildTurnContextPrompt(params: TurnContextPromptInput): string {
availableSkills: params.availableSkills,
activeSkills: params.activeSkills,
activeMcpCatalogs: params.activeMcpCatalogs ?? [],
+ toolGuidance: params.toolGuidance ?? [],
}),
buildContextSection({
requester: params.requester,
diff --git a/packages/junior/src/chat/respond.ts b/packages/junior/src/chat/respond.ts
index df39b39b..b4e8cadd 100644
--- a/packages/junior/src/chat/respond.ts
+++ b/packages/junior/src/chat/respond.ts
@@ -39,6 +39,7 @@ import type { ThreadArtifactsState } from "@/chat/state/artifacts";
import type { ConversationPendingAuthState } from "@/chat/state/conversation";
import { createTools } from "@/chat/tools";
import { resolveChannelCapabilities } from "@/chat/tools/channel-capabilities";
+import type { ToolDefinition } from "@/chat/tools/definition";
import { toActiveMcpCatalogSummaries } from "@/chat/tools/skill/mcp-tool-summary";
import type { ImageGenerateToolDeps } from "@/chat/tools/types";
import { createAdvisorToolDefinitions } from "@/chat/tools/advisor/tool";
@@ -828,6 +829,14 @@ export async function generateAssistantReply(
},
);
+ const toolGuidance = Object.entries(
+ tools as Record>,
+ ).map(([name, definition]) => ({
+ name,
+ promptGuidelines: definition.promptGuidelines,
+ promptSnippet: definition.promptSnippet,
+ }));
+
syncResumeState();
for (const skill of activeSkills) {
await turnMcpToolManager.activateForSkill(skill);
@@ -849,6 +858,7 @@ export async function generateAssistantReply(
availableSkills,
activeSkills,
activeMcpCatalogs,
+ toolGuidance,
runtime: {
channelId: toolChannelId,
fastModelId: botConfig.fastModelId,
diff --git a/packages/junior/src/chat/sandbox/sandbox.ts b/packages/junior/src/chat/sandbox/sandbox.ts
index 42d30dd3..c364aa7f 100644
--- a/packages/junior/src/chat/sandbox/sandbox.ts
+++ b/packages/junior/src/chat/sandbox/sandbox.ts
@@ -17,6 +17,12 @@ import {
} from "@/chat/sandbox/skill-sync";
import type { SandboxWorkspace } from "@/chat/sandbox/workspace";
import type { SkillMetadata } from "@/chat/skills";
+import { editFile } from "@/chat/tools/sandbox/edit-file";
+import { findFiles } from "@/chat/tools/sandbox/find-files";
+import { positiveInteger } from "@/chat/tools/sandbox/file-utils";
+import { grepFiles } from "@/chat/tools/sandbox/grep";
+import { listDir } from "@/chat/tools/sandbox/list-dir";
+import { sliceFileContent } from "@/chat/tools/sandbox/read-file";
// Spec: specs/security-policy.md (sandbox isolation, network policy, credential lifecycle)
// Spec: specs/logging/tracing-spec.md (required sandbox span semantics)
@@ -60,7 +66,15 @@ export interface SandboxExecutor {
dispose(): Promise;
}
-const SANDBOX_TOOL_NAMES = new Set(["bash", "readFile", "writeFile"]);
+const SANDBOX_TOOL_NAMES = new Set([
+ "bash",
+ "readFile",
+ "editFile",
+ "grep",
+ "findFiles",
+ "listDir",
+ "writeFile",
+]);
function parseHeaderTransforms(
raw: unknown,
@@ -171,6 +185,7 @@ export function createSandboxExecutor(options?: {
): Promise> => {
const headerTransforms = parseHeaderTransforms(rawInput.headerTransforms);
const env = parseEnv(rawInput.env);
+ const timeoutMs = positiveInteger(rawInput.timeoutMs);
logSandboxBootRequest("tool.bash", {
"app.sandbox.command_length": command.length,
});
@@ -187,6 +202,7 @@ export function createSandboxExecutor(options?: {
command,
...(headerTransforms ? { headerTransforms } : {}),
...(env ? { env } : {}),
+ ...(timeoutMs ? { timeoutMs } : {}),
});
setSpanAttributes({
"process.exit.code": response.exitCode,
@@ -222,7 +238,7 @@ export function createSandboxExecutor(options?: {
cwd: SANDBOX_WORKSPACE_ROOT,
exit_code: result.exitCode,
signal: null,
- timed_out: false,
+ timed_out: Boolean(result.timedOut),
stdout: result.stdout,
stderr: result.stderr,
stdout_truncated: result.stdoutTruncated,
@@ -238,6 +254,8 @@ export function createSandboxExecutor(options?: {
if (!filePath) {
throw new Error("path is required");
}
+ const offset = positiveInteger(rawInput.offset);
+ const limit = positiveInteger(rawInput.limit);
if (!sessionManager.getSandboxId()) {
const hostPath =
@@ -254,11 +272,12 @@ export function createSandboxExecutor(options?: {
});
setSpanStatus("ok");
return {
- result: {
+ result: sliceFileContent({
content,
path: filePath,
- success: true,
- } as T,
+ offset,
+ limit,
+ }) as T,
};
} catch (error) {
if (!isHostFileMissingError(error)) {
@@ -288,9 +307,12 @@ export function createSandboxExecutor(options?: {
});
setSpanStatus("ok");
return {
- content,
- path: filePath,
- success: true,
+ ...sliceFileContent({
+ content,
+ path: filePath,
+ offset,
+ limit,
+ }),
};
},
);
@@ -338,6 +360,139 @@ export function createSandboxExecutor(options?: {
};
};
+ const executeEditFileTool = async (
+ rawInput: Record,
+ ): Promise> => {
+ const filePath = String(rawInput.path ?? "").trim();
+ if (!filePath) {
+ throw new Error("path is required");
+ }
+ if (!Array.isArray(rawInput.edits)) {
+ throw new Error("edits is required");
+ }
+
+ logSandboxBootRequest("tool.editFile", {
+ "file.path": filePath,
+ });
+ const executors = await sessionManager.ensureToolExecutors();
+ const result = await withSandboxSpan(
+ "sandbox.editFile",
+ "sandbox.fs.edit",
+ {
+ "app.sandbox.path.length": filePath.length,
+ "app.sandbox.edit.count": rawInput.edits.length,
+ },
+ async () => {
+ const response = await editFile({
+ fs: executors.fs,
+ path: filePath,
+ edits: rawInput.edits as Array<{ oldText: string; newText: string }>,
+ });
+ setSpanStatus("ok");
+ return response;
+ },
+ );
+
+ return { result: result as T };
+ };
+
+ const executeGrepTool = async (
+ rawInput: Record,
+ ): Promise> => {
+ const pattern = String(rawInput.pattern ?? "");
+ if (!pattern) {
+ throw new Error("pattern is required");
+ }
+
+ logSandboxBootRequest("tool.grep");
+ const contextLines = positiveInteger(rawInput.context);
+ const limit = positiveInteger(rawInput.limit);
+ const executors = await sessionManager.ensureToolExecutors();
+ const result = await withSandboxSpan(
+ "sandbox.grep",
+ "sandbox.fs.search",
+ {
+ "app.sandbox.pattern.length": pattern.length,
+ },
+ async () => {
+ const response = await grepFiles({
+ fs: executors.fs,
+ pattern,
+ ...(typeof rawInput.path === "string" ? { path: rawInput.path } : {}),
+ ...(typeof rawInput.glob === "string" ? { glob: rawInput.glob } : {}),
+ ...(typeof rawInput.ignoreCase === "boolean"
+ ? { ignoreCase: rawInput.ignoreCase }
+ : {}),
+ ...(typeof rawInput.literal === "boolean"
+ ? { literal: rawInput.literal }
+ : {}),
+ ...(contextLines ? { context: contextLines } : {}),
+ ...(limit ? { limit } : {}),
+ });
+ setSpanStatus("ok");
+ return response;
+ },
+ );
+
+ return { result: result as T };
+ };
+
+ const executeFindFilesTool = async (
+ rawInput: Record,
+ ): Promise> => {
+ const pattern = String(rawInput.pattern ?? "");
+ if (!pattern) {
+ throw new Error("pattern is required");
+ }
+
+ logSandboxBootRequest("tool.findFiles");
+ const limit = positiveInteger(rawInput.limit);
+ const executors = await sessionManager.ensureToolExecutors();
+ const result = await withSandboxSpan(
+ "sandbox.findFiles",
+ "sandbox.fs.find",
+ {
+ "app.sandbox.pattern.length": pattern.length,
+ },
+ async () => {
+ const response = await findFiles({
+ fs: executors.fs,
+ pattern,
+ ...(typeof rawInput.path === "string" ? { path: rawInput.path } : {}),
+ ...(limit ? { limit } : {}),
+ });
+ setSpanStatus("ok");
+ return response;
+ },
+ );
+
+ return { result: result as T };
+ };
+
+ const executeListDirTool = async (
+ rawInput: Record,
+ ): Promise> => {
+ logSandboxBootRequest("tool.listDir");
+ const limit = positiveInteger(rawInput.limit);
+ const executors = await sessionManager.ensureToolExecutors();
+ const result = await withSandboxSpan(
+ "sandbox.listDir",
+ "sandbox.fs.list",
+ {},
+ async () => {
+ const response = await listDir({
+ fs: executors.fs,
+ ...(typeof rawInput.path === "string" ? { path: rawInput.path } : {}),
+ ...(limit ? { limit } : {}),
+ });
+ setSpanStatus("ok");
+ return response;
+ },
+ );
+
+ return { result: result as T };
+ };
+
const execute = async (
params: SandboxExecutionInput,
): Promise> => {
@@ -364,6 +519,22 @@ export function createSandboxExecutor(options?: {
return await executeReadFileTool(rawInput);
}
+ if (params.toolName === "editFile") {
+ return await executeEditFileTool(rawInput);
+ }
+
+ if (params.toolName === "grep") {
+ return await executeGrepTool(rawInput);
+ }
+
+ if (params.toolName === "findFiles") {
+ return await executeFindFilesTool(rawInput);
+ }
+
+ if (params.toolName === "listDir") {
+ return await executeListDirTool(rawInput);
+ }
+
if (params.toolName === "writeFile") {
return await executeWriteFileTool(rawInput);
}
diff --git a/packages/junior/src/chat/sandbox/session.ts b/packages/junior/src/chat/sandbox/session.ts
index dc51c026..b3b35c84 100644
--- a/packages/junior/src/chat/sandbox/session.ts
+++ b/packages/junior/src/chat/sandbox/session.ts
@@ -24,6 +24,7 @@ import {
import { syncSkillsToSandbox } from "@/chat/sandbox/skill-sync";
import type { SandboxCommandResult } from "@/chat/sandbox/workspace";
import type { SkillMetadata } from "@/chat/skills";
+import type { SandboxFileSystem } from "@/chat/tools/sandbox/file-utils";
const DEFAULT_MAX_OUTPUT_LENGTH = 30_000;
const SANDBOX_RUNTIME = "node22";
@@ -49,18 +50,21 @@ interface SandboxToolExecutors {
headers: Record;
}>;
env?: Record;
+ timeoutMs?: number;
}) => Promise<{
stdout: string;
stderr: string;
exitCode: number;
stdoutTruncated: boolean;
stderrTruncated: boolean;
+ timedOut?: boolean;
}>;
readFile: (input: { path: string }) => Promise<{ content: string }>;
writeFile: (input: {
path: string;
content: string;
}) => Promise<{ success: boolean }>;
+ fs: SandboxFileSystem;
}
interface SandboxSessionManager {
@@ -637,16 +641,46 @@ export function createSandboxSessionManager(options?: {
env: input.env,
pathPrefix: `${SANDBOX_RUNTIME_BIN_DIR}:$PATH`,
});
+ const controller =
+ input.timeoutMs && input.timeoutMs > 0
+ ? new AbortController()
+ : undefined;
+ let timedOut = false;
+ const timeoutId = controller
+ ? setTimeout(() => {
+ timedOut = true;
+ controller.abort();
+ }, input.timeoutMs)
+ : undefined;
return await withTemporaryHeaderTransforms(
sandboxInstance,
input.headerTransforms,
async () => {
- const commandResult = await sandboxInstance.runCommand({
- cmd: "bash",
- args: ["-c", script],
- cwd: SANDBOX_WORKSPACE_ROOT,
- });
- return await readCommandOutput(commandResult);
+ try {
+ const commandResult = await sandboxInstance.runCommand({
+ cmd: "bash",
+ args: ["-c", script],
+ cwd: SANDBOX_WORKSPACE_ROOT,
+ ...(controller ? { signal: controller.signal } : {}),
+ });
+ return await readCommandOutput(commandResult);
+ } catch (error) {
+ if (timedOut) {
+ return {
+ stdout: "",
+ stderr: `Command timed out after ${input.timeoutMs}ms`,
+ exitCode: 124,
+ stdoutTruncated: false,
+ stderrTruncated: false,
+ timedOut: true,
+ };
+ }
+ throw error;
+ } finally {
+ if (timeoutId) {
+ clearTimeout(timeoutId);
+ }
+ }
},
);
},
@@ -660,6 +694,7 @@ export function createSandboxSessionManager(options?: {
toolCallId: "sandbox-write-file",
messages: [],
})) as { success: boolean },
+ fs: sandboxInstance.fs as SandboxFileSystem,
};
};
diff --git a/packages/junior/src/chat/tools/agent-tools.ts b/packages/junior/src/chat/tools/agent-tools.ts
index 75d4728b..6a1dbe97 100644
--- a/packages/junior/src/chat/tools/agent-tools.ts
+++ b/packages/junior/src/chat/tools/agent-tools.ts
@@ -33,6 +33,8 @@ export function createAgentTools(
label: toolName,
description: toolDef.description,
parameters: toolDef.inputSchema,
+ prepareArguments: toolDef.prepareArguments,
+ executionMode: toolDef.executionMode,
execute: async (toolCallId: unknown, params: unknown) => {
const normalizedToolCallId =
typeof toolCallId === "string" && toolCallId.length > 0
diff --git a/packages/junior/src/chat/tools/definition.ts b/packages/junior/src/chat/tools/definition.ts
index cfedc6b8..24b310d3 100644
--- a/packages/junior/src/chat/tools/definition.ts
+++ b/packages/junior/src/chat/tools/definition.ts
@@ -1,10 +1,15 @@
import type { ToolAnnotations } from "@modelcontextprotocol/sdk/types.js";
import type { Static, TSchema } from "@sinclair/typebox";
+import type { ToolExecutionMode } from "@mariozechner/pi-agent-core";
export interface ToolDefinition {
description: string;
inputSchema: TInputSchema;
annotations?: ToolAnnotations;
+ promptSnippet?: string;
+ promptGuidelines?: string[];
+ prepareArguments?: (args: unknown) => Static;
+ executionMode?: ToolExecutionMode;
execute?: (
input: Static,
options: { experimental_context?: unknown },
diff --git a/packages/junior/src/chat/tools/execution/build-sandbox-input.ts b/packages/junior/src/chat/tools/execution/build-sandbox-input.ts
index fd0d7873..bbeebf2e 100644
--- a/packages/junior/src/chat/tools/execution/build-sandbox-input.ts
+++ b/packages/junior/src/chat/tools/execution/build-sandbox-input.ts
@@ -3,11 +3,69 @@ export function buildSandboxInput(
toolName: string,
params: Record,
): Record {
+ const optionalNumber = (value: unknown) =>
+ typeof value === "number" && Number.isFinite(value) ? value : undefined;
+
if (toolName === "bash") {
- return { command: String(params.command ?? "") };
+ return {
+ command: String(params.command ?? ""),
+ ...(optionalNumber(params.timeoutMs)
+ ? { timeoutMs: optionalNumber(params.timeoutMs) }
+ : {}),
+ };
}
if (toolName === "readFile") {
- return { path: String(params.path ?? "") };
+ return {
+ path: String(params.path ?? ""),
+ ...(optionalNumber(params.offset)
+ ? { offset: optionalNumber(params.offset) }
+ : {}),
+ ...(optionalNumber(params.limit)
+ ? { limit: optionalNumber(params.limit) }
+ : {}),
+ };
+ }
+ if (toolName === "editFile") {
+ return {
+ path: String(params.path ?? ""),
+ edits: Array.isArray(params.edits) ? params.edits : [],
+ };
+ }
+ if (toolName === "grep") {
+ return {
+ pattern: String(params.pattern ?? ""),
+ ...(typeof params.path === "string" ? { path: params.path } : {}),
+ ...(typeof params.glob === "string" ? { glob: params.glob } : {}),
+ ...(typeof params.ignoreCase === "boolean"
+ ? { ignoreCase: params.ignoreCase }
+ : {}),
+ ...(typeof params.literal === "boolean"
+ ? { literal: params.literal }
+ : {}),
+ ...(optionalNumber(params.context)
+ ? { context: optionalNumber(params.context) }
+ : {}),
+ ...(optionalNumber(params.limit)
+ ? { limit: optionalNumber(params.limit) }
+ : {}),
+ };
+ }
+ if (toolName === "findFiles") {
+ return {
+ pattern: String(params.pattern ?? ""),
+ ...(typeof params.path === "string" ? { path: params.path } : {}),
+ ...(optionalNumber(params.limit)
+ ? { limit: optionalNumber(params.limit) }
+ : {}),
+ };
+ }
+ if (toolName === "listDir") {
+ return {
+ ...(typeof params.path === "string" ? { path: params.path } : {}),
+ ...(optionalNumber(params.limit)
+ ? { limit: optionalNumber(params.limit) }
+ : {}),
+ };
}
if (toolName === "writeFile") {
return {
diff --git a/packages/junior/src/chat/tools/index.ts b/packages/junior/src/chat/tools/index.ts
index ee3efc64..5f5780d3 100644
--- a/packages/junior/src/chat/tools/index.ts
+++ b/packages/junior/src/chat/tools/index.ts
@@ -1,5 +1,9 @@
import { createBashTool } from "@/chat/tools/sandbox/bash";
+import { createEditFileTool } from "@/chat/tools/sandbox/edit-file";
+import { createFindFilesTool } from "@/chat/tools/sandbox/find-files";
+import { createGrepTool } from "@/chat/tools/sandbox/grep";
import { createAttachFileTool } from "@/chat/tools/sandbox/attach-file";
+import { createListDirTool } from "@/chat/tools/sandbox/list-dir";
import type { SkillMetadata } from "@/chat/skills";
import { createImageGenerateTool } from "@/chat/tools/web/image-generate";
import { createCallMcpToolTool } from "@/chat/tools/skill/call-mcp-tool";
@@ -92,6 +96,10 @@ export function createTools(
bash: createBashTool(),
attachFile: createAttachFileTool(context.sandbox, hooks),
readFile: createReadFileTool(),
+ editFile: createEditFileTool(),
+ grep: createGrepTool(),
+ findFiles: createFindFilesTool(),
+ listDir: createListDirTool(),
writeFile: createWriteFileTool(),
webSearch: createWebSearchTool(),
webFetch: createWebFetchTool(hooks),
diff --git a/packages/junior/src/chat/tools/sandbox/bash.ts b/packages/junior/src/chat/tools/sandbox/bash.ts
index 2a5f9dea..a9592a08 100644
--- a/packages/junior/src/chat/tools/sandbox/bash.ts
+++ b/packages/junior/src/chat/tools/sandbox/bash.ts
@@ -1,6 +1,7 @@
import { tool } from "@/chat/tools/definition";
import { Type } from "@sinclair/typebox";
+/** Create the sandbox shell tool definition exposed to the agent. */
export function createBashTool() {
return tool({
description:
@@ -11,6 +12,13 @@ export function createBashTool() {
minLength: 1,
description: "Bash command to run inside the sandbox.",
}),
+ timeoutMs: Type.Optional(
+ Type.Integer({
+ minimum: 1000,
+ description:
+ "Optional command timeout in milliseconds. Use for commands that may hang.",
+ }),
+ ),
},
{ additionalProperties: false },
),
diff --git a/packages/junior/src/chat/tools/sandbox/edit-file.ts b/packages/junior/src/chat/tools/sandbox/edit-file.ts
new file mode 100644
index 00000000..635607c8
--- /dev/null
+++ b/packages/junior/src/chat/tools/sandbox/edit-file.ts
@@ -0,0 +1,348 @@
+import {
+ normalizeToLf,
+ resolveWorkspacePath,
+ type SandboxFileSystem,
+} from "@/chat/tools/sandbox/file-utils";
+import { Type } from "@sinclair/typebox";
+import { tool } from "@/chat/tools/definition";
+
+interface EditReplacement {
+ oldText: string;
+ newText: string;
+}
+
+interface EditFileResult {
+ content: [{ type: "text"; text: string }];
+ details: {
+ diff: string;
+ first_changed_line?: number;
+ ok: true;
+ path: string;
+ replacements: number;
+ };
+}
+
+interface EditFileInput {
+ path: string;
+ edits: EditReplacement[];
+}
+
+interface MatchedEdit {
+ editIndex: number;
+ matchIndex: number;
+ matchLength: number;
+ newText: string;
+}
+
+function detectLineEnding(value: string): "\r\n" | "\n" {
+ return value.includes("\r\n") ? "\r\n" : "\n";
+}
+
+function restoreLineEndings(value: string, lineEnding: "\r\n" | "\n"): string {
+ return lineEnding === "\r\n" ? value.replace(/\n/g, "\r\n") : value;
+}
+
+function stripBom(value: string): { bom: string; text: string } {
+ return value.startsWith("\uFEFF")
+ ? { bom: "\uFEFF", text: value.slice(1) }
+ : { bom: "", text: value };
+}
+
+function countOccurrences(content: string, target: string): number {
+ let count = 0;
+ let start = 0;
+ while (target.length > 0) {
+ const index = content.indexOf(target, start);
+ if (index === -1) break;
+ count += 1;
+ start = index + target.length;
+ }
+ return count;
+}
+
+function firstChangedLine(
+ oldContent: string,
+ newContent: string,
+): number | undefined {
+ const oldLines = oldContent.split("\n");
+ const newLines = newContent.split("\n");
+ const count = Math.max(oldLines.length, newLines.length);
+ for (let index = 0; index < count; index += 1) {
+ if (oldLines[index] !== newLines[index]) {
+ return index + 1;
+ }
+ }
+ return undefined;
+}
+
+function buildCompactDiff(
+ oldContent: string,
+ newContent: string,
+): {
+ diff: string;
+ firstChangedLine?: number;
+} {
+ const oldLines = oldContent.split("\n");
+ const newLines = newContent.split("\n");
+ let prefix = 0;
+ while (
+ prefix < oldLines.length &&
+ prefix < newLines.length &&
+ oldLines[prefix] === newLines[prefix]
+ ) {
+ prefix += 1;
+ }
+
+ let oldSuffix = oldLines.length - 1;
+ let newSuffix = newLines.length - 1;
+ while (
+ oldSuffix >= prefix &&
+ newSuffix >= prefix &&
+ oldLines[oldSuffix] === newLines[newSuffix]
+ ) {
+ oldSuffix -= 1;
+ newSuffix -= 1;
+ }
+
+ const contextStart = Math.max(0, prefix - 3);
+ const newContextEnd = Math.min(newLines.length - 1, newSuffix + 3);
+ const oldContextEnd = Math.min(oldLines.length - 1, oldSuffix + 3);
+ const width = String(Math.max(oldLines.length, newLines.length)).length;
+ const output: string[] = [];
+
+ if (contextStart > 0) {
+ output.push(` ${"".padStart(width)} ...`);
+ }
+ for (let index = contextStart; index < prefix; index += 1) {
+ output.push(` ${String(index + 1).padStart(width)} ${oldLines[index]}`);
+ }
+ for (let index = prefix; index <= oldSuffix; index += 1) {
+ output.push(`-${String(index + 1).padStart(width)} ${oldLines[index]}`);
+ }
+ for (let index = prefix; index <= newSuffix; index += 1) {
+ output.push(`+${String(index + 1).padStart(width)} ${newLines[index]}`);
+ }
+ for (let index = newSuffix + 1; index <= newContextEnd; index += 1) {
+ output.push(` ${String(index + 1).padStart(width)} ${newLines[index]}`);
+ }
+ if (
+ newContextEnd < newLines.length - 1 ||
+ oldContextEnd < oldLines.length - 1
+ ) {
+ output.push(` ${"".padStart(width)} ...`);
+ }
+
+ return {
+ diff: output.join("\n"),
+ firstChangedLine: firstChangedLine(oldContent, newContent),
+ };
+}
+
+function validateAndApplyEdits(
+ content: string,
+ edits: EditReplacement[],
+ filePath: string,
+): { baseContent: string; newContent: string } {
+ if (!Array.isArray(edits) || edits.length === 0) {
+ throw new Error("editFile requires at least one edit.");
+ }
+
+ const normalizedEdits = edits.map((edit, index) => {
+ if (typeof edit.oldText !== "string" || edit.oldText.length === 0) {
+ throw new Error(
+ `edits[${index}].oldText must not be empty in ${filePath}.`,
+ );
+ }
+ if (typeof edit.newText !== "string") {
+ throw new Error(
+ `edits[${index}].newText must be a string in ${filePath}.`,
+ );
+ }
+ return {
+ oldText: normalizeToLf(edit.oldText),
+ newText: normalizeToLf(edit.newText),
+ };
+ });
+
+ const matchedEdits: MatchedEdit[] = [];
+ for (let index = 0; index < normalizedEdits.length; index += 1) {
+ const edit = normalizedEdits[index];
+ const matchIndex = content.indexOf(edit.oldText);
+ if (matchIndex === -1) {
+ throw new Error(
+ `Could not find edits[${index}] in ${filePath}. oldText must match exactly including whitespace and newlines.`,
+ );
+ }
+ const occurrences = countOccurrences(content, edit.oldText);
+ if (occurrences > 1) {
+ throw new Error(
+ `Found ${occurrences} occurrences of edits[${index}] in ${filePath}. Each oldText must be unique.`,
+ );
+ }
+ matchedEdits.push({
+ editIndex: index,
+ matchIndex,
+ matchLength: edit.oldText.length,
+ newText: edit.newText,
+ });
+ }
+
+ matchedEdits.sort((a, b) => a.matchIndex - b.matchIndex);
+ for (let index = 1; index < matchedEdits.length; index += 1) {
+ const previous = matchedEdits[index - 1];
+ const current = matchedEdits[index];
+ if (previous.matchIndex + previous.matchLength > current.matchIndex) {
+ throw new Error(
+ `edits[${previous.editIndex}] and edits[${current.editIndex}] overlap in ${filePath}. Merge overlapping replacements into one edit.`,
+ );
+ }
+ }
+
+ let newContent = content;
+ for (let index = matchedEdits.length - 1; index >= 0; index -= 1) {
+ const edit = matchedEdits[index];
+ newContent =
+ newContent.slice(0, edit.matchIndex) +
+ edit.newText +
+ newContent.slice(edit.matchIndex + edit.matchLength);
+ }
+
+ if (newContent === content) {
+ throw new Error(`No changes made to ${filePath}.`);
+ }
+
+ return { baseContent: content, newContent };
+}
+
+/** Accept common edit argument variants before Pi validates the canonical schema. */
+export function prepareEditFileArguments(input: unknown): EditFileInput {
+ if (!input || typeof input !== "object") {
+ return input as EditFileInput;
+ }
+
+ const raw = { ...(input as Record) };
+ if (typeof raw.edits === "string") {
+ try {
+ raw.edits = JSON.parse(raw.edits);
+ } catch {
+ return raw as unknown as EditFileInput;
+ }
+ }
+
+ const edits = Array.isArray(raw.edits) ? [...raw.edits] : [];
+ const oldText = raw.oldText ?? raw.old_text;
+ const newText = raw.newText ?? raw.new_text;
+ if (typeof oldText === "string" && typeof newText === "string") {
+ edits.push({ oldText, newText });
+ }
+
+ if (edits.length > 0) {
+ raw.edits = edits.map((edit) => {
+ if (!edit || typeof edit !== "object") {
+ return edit;
+ }
+ const record = edit as Record;
+ const { old_text, new_text, ...rest } = record;
+ return {
+ ...rest,
+ oldText: record.oldText ?? old_text,
+ newText: record.newText ?? new_text,
+ };
+ });
+ }
+
+ delete raw.oldText;
+ delete raw.old_text;
+ delete raw.newText;
+ delete raw.new_text;
+ return raw as unknown as EditFileInput;
+}
+
+/** Apply exact, ordered file replacements through the sandbox filesystem API. */
+export async function editFile(params: {
+ edits: EditReplacement[];
+ fs: SandboxFileSystem;
+ path: string;
+}): Promise {
+ const filePath = resolveWorkspacePath(params.path);
+ const rawContent = await params.fs.readFile(filePath, { encoding: "utf8" });
+ const { bom, text } = stripBom(rawContent);
+ const lineEnding = detectLineEnding(text);
+ const normalizedContent = normalizeToLf(text);
+ const { baseContent, newContent } = validateAndApplyEdits(
+ normalizedContent,
+ params.edits,
+ params.path,
+ );
+ await params.fs.writeFile(
+ filePath,
+ bom + restoreLineEndings(newContent, lineEnding),
+ { encoding: "utf8" },
+ );
+
+ const diff = buildCompactDiff(baseContent, newContent);
+ return {
+ content: [
+ {
+ type: "text",
+ text: `Successfully replaced ${params.edits.length} block(s) in ${params.path}.`,
+ },
+ ],
+ details: {
+ diff: diff.diff,
+ first_changed_line: diff.firstChangedLine,
+ ok: true,
+ path: params.path,
+ replacements: params.edits.length,
+ },
+ };
+}
+
+const editReplacementSchema = Type.Object(
+ {
+ oldText: Type.String({
+ minLength: 1,
+ description:
+ "Exact text to replace. It must be unique in the original file and must not overlap another edit.",
+ }),
+ newText: Type.String({
+ description: "Replacement text for this edit.",
+ }),
+ },
+ { additionalProperties: false },
+);
+
+/** Create the sandbox edit tool definition exposed to the agent. */
+export function createEditFileTool() {
+ return tool({
+ description:
+ "Edit one sandbox workspace file with exact text replacements. Use for precise changes to existing files. Each oldText must match exactly, be unique, and not overlap another edit. Returns a diff.",
+ promptSnippet: "existing-file exact edits; returns diff",
+ promptGuidelines: [
+ "prefer over writeFile for targeted changes",
+ "oldText exact, unique, non-overlapping",
+ "multiple same-file changes: one edits[] call",
+ ],
+ prepareArguments: prepareEditFileArguments,
+ executionMode: "sequential",
+ inputSchema: Type.Object(
+ {
+ path: Type.String({
+ minLength: 1,
+ description: "Path to edit in the sandbox workspace.",
+ }),
+ edits: Type.Array(editReplacementSchema, {
+ minItems: 1,
+ description:
+ "Exact replacements matched against the original file, not incrementally.",
+ }),
+ },
+ { additionalProperties: false },
+ ),
+ execute: async () => {
+ throw new Error(
+ "editFile can only run when sandbox execution is enabled.",
+ );
+ },
+ });
+}
diff --git a/packages/junior/src/chat/tools/sandbox/file-utils.ts b/packages/junior/src/chat/tools/sandbox/file-utils.ts
new file mode 100644
index 00000000..477f9d60
--- /dev/null
+++ b/packages/junior/src/chat/tools/sandbox/file-utils.ts
@@ -0,0 +1,180 @@
+import path from "node:path";
+import { SANDBOX_WORKSPACE_ROOT } from "@/chat/sandbox/paths";
+
+export const MAX_TEXT_CHARS = 60_000;
+const SKIPPED_DIRECTORIES = new Set([".git", "node_modules"]);
+
+interface SandboxFileStat {
+ isDirectory(): boolean;
+}
+
+export interface SandboxFileSystem {
+ readFile(
+ filePath: string,
+ options: { encoding: BufferEncoding },
+ ): Promise;
+ writeFile(
+ filePath: string,
+ content: string,
+ options?: { encoding?: BufferEncoding },
+ ): Promise;
+ readdir(filePath: string): Promise;
+ stat(filePath: string): Promise;
+}
+
+export interface TextSearchResultDetails {
+ ok: true;
+ path: string;
+ truncated: boolean;
+}
+
+/** Normalize model-supplied numeric knobs before they reach filesystem tools. */
+export function positiveInteger(value: unknown): number | undefined {
+ if (typeof value !== "number" || !Number.isFinite(value)) {
+ return undefined;
+ }
+ const integer = Math.floor(value);
+ return integer > 0 ? integer : undefined;
+}
+
+/** Compare and slice text in one line-ending space while preserving callers' output rules. */
+export function normalizeToLf(value: string): string {
+ return value.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
+}
+
+/** Keep tool output inside the model-facing budget with an explicit notice. */
+export function truncateText(
+ value: string,
+ maxChars = MAX_TEXT_CHARS,
+): { content: string; truncated: boolean } {
+ if (value.length <= maxChars) {
+ return { content: value, truncated: false };
+ }
+
+ const removed = value.length - maxChars;
+ return {
+ content: `${value.slice(0, maxChars)}\n\n[output truncated: ${removed} characters removed]`,
+ truncated: true,
+ };
+}
+
+function escapeRegExp(value: string): string {
+ return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
+}
+
+function globToRegExp(pattern: string): RegExp {
+ let source = "";
+ for (let index = 0; index < pattern.length; index += 1) {
+ const char = pattern[index];
+ const next = pattern[index + 1];
+ if (char === "*" && next === "*") {
+ if (pattern[index + 2] === "/") {
+ source += "(?:.*/)?";
+ index += 2;
+ continue;
+ }
+ source += ".*";
+ index += 1;
+ continue;
+ }
+ if (char === "*") {
+ source += "[^/]*";
+ continue;
+ }
+ if (char === "?") {
+ source += "[^/]";
+ continue;
+ }
+ source += escapeRegExp(char);
+ }
+ return new RegExp(`^${source}$`);
+}
+
+function matchesGlob(relativePath: string, pattern: string): boolean {
+ const matcher = globToRegExp(pattern);
+ if (matcher.test(relativePath)) {
+ return true;
+ }
+ if (
+ pattern.startsWith("**/") &&
+ matchesGlob(relativePath, pattern.slice(3))
+ ) {
+ return true;
+ }
+ return (
+ !pattern.includes("/") && matcher.test(path.posix.basename(relativePath))
+ );
+}
+
+/** Keep sandbox filesystem tools scoped to the mounted workspace root. */
+export function resolveWorkspacePath(
+ input: string | undefined,
+ fallback = ".",
+): string {
+ const requested = (input ?? "").trim() || fallback;
+ const absolute = requested.startsWith("/")
+ ? requested
+ : path.posix.join(SANDBOX_WORKSPACE_ROOT, requested);
+ const normalized = path.posix.normalize(absolute);
+ if (
+ normalized !== SANDBOX_WORKSPACE_ROOT &&
+ !normalized.startsWith(`${SANDBOX_WORKSPACE_ROOT}/`)
+ ) {
+ throw new Error(
+ `Path must stay within ${SANDBOX_WORKSPACE_ROOT}: ${requested}`,
+ );
+ }
+ return normalized;
+}
+
+/** Share bounded workspace traversal across search tools so their skip rules stay aligned. */
+export async function collectFiles(params: {
+ fs: SandboxFileSystem;
+ root: string;
+ limit?: number;
+ pattern?: string;
+}): Promise<{ files: string[]; limitReached: boolean }> {
+ const files: string[] = [];
+ let limitReached = false;
+
+ const visit = async (dirPath: string): Promise => {
+ const entries = (await params.fs.readdir(dirPath)).sort((a, b) =>
+ a.toLowerCase().localeCompare(b.toLowerCase()),
+ );
+ for (const entry of entries) {
+ const fullPath = path.posix.join(dirPath, entry);
+ const stat = await params.fs.stat(fullPath);
+ if (stat.isDirectory()) {
+ if (!SKIPPED_DIRECTORIES.has(entry)) {
+ await visit(fullPath);
+ }
+ if (limitReached) return;
+ continue;
+ }
+
+ const relativePath = path.posix.relative(params.root, fullPath);
+ if (!params.pattern || matchesGlob(relativePath, params.pattern)) {
+ files.push(fullPath);
+ if (params.limit && files.length >= params.limit) {
+ limitReached = true;
+ return;
+ }
+ }
+ }
+ };
+
+ const stat = await params.fs.stat(params.root);
+ if (!stat.isDirectory()) {
+ const relativePath = path.posix.basename(params.root);
+ return {
+ files:
+ !params.pattern || matchesGlob(relativePath, params.pattern)
+ ? [params.root]
+ : [],
+ limitReached: false,
+ };
+ }
+
+ await visit(params.root);
+ return { files, limitReached };
+}
diff --git a/packages/junior/src/chat/tools/sandbox/find-files.ts b/packages/junior/src/chat/tools/sandbox/find-files.ts
new file mode 100644
index 00000000..b2092e58
--- /dev/null
+++ b/packages/junior/src/chat/tools/sandbox/find-files.ts
@@ -0,0 +1,115 @@
+import path from "node:path";
+import { Type } from "@sinclair/typebox";
+import {
+ MAX_TEXT_CHARS,
+ collectFiles,
+ positiveInteger,
+ resolveWorkspacePath,
+ truncateText,
+ type SandboxFileSystem,
+ type TextSearchResultDetails,
+} from "@/chat/tools/sandbox/file-utils";
+import { tool } from "@/chat/tools/definition";
+
+const DEFAULT_FIND_LIMIT = 1000;
+
+interface FindFilesResult {
+ content: [{ type: "text"; text: string }];
+ details: TextSearchResultDetails & {
+ result_limit_reached?: number;
+ };
+}
+
+/** Find workspace files with structured limits instead of ad hoc shell output. */
+export async function findFiles(params: {
+ fs: SandboxFileSystem;
+ limit?: unknown;
+ path?: string;
+ pattern: string;
+}): Promise {
+ if (!params.pattern.trim()) {
+ throw new Error("pattern is required");
+ }
+
+ const root = resolveWorkspacePath(params.path);
+ const limit = positiveInteger(params.limit) ?? DEFAULT_FIND_LIMIT;
+ const { files, limitReached } = await collectFiles({
+ fs: params.fs,
+ root,
+ pattern: params.pattern,
+ limit,
+ });
+ const relativePaths = files.map((filePath) =>
+ path.posix.relative(root, filePath),
+ );
+ const bounded = truncateText(
+ relativePaths.length > 0
+ ? relativePaths.join("\n")
+ : "No files found matching pattern",
+ );
+ const notices: string[] = [];
+ if (limitReached) {
+ notices.push(
+ `${limit} results limit reached. Refine pattern or raise limit.`,
+ );
+ }
+ if (bounded.truncated) {
+ notices.push(`${MAX_TEXT_CHARS} character output limit reached.`);
+ }
+
+ return {
+ content: [
+ {
+ type: "text",
+ text:
+ notices.length > 0
+ ? `${bounded.content}\n\n[${notices.join(" ")}]`
+ : bounded.content,
+ },
+ ],
+ details: {
+ ok: true,
+ path: params.path ?? ".",
+ truncated: limitReached || bounded.truncated,
+ ...(limitReached ? { result_limit_reached: limit } : {}),
+ },
+ };
+}
+
+/** Create the sandbox file discovery tool definition exposed to the agent. */
+export function createFindFilesTool() {
+ return tool({
+ description:
+ "Find sandbox workspace files by glob pattern. Returns bounded paths relative to the search root and skips dependency/cache directories.",
+ annotations: { readOnlyHint: true, destructiveHint: false },
+ inputSchema: Type.Object(
+ {
+ pattern: Type.String({
+ minLength: 1,
+ description:
+ "Glob pattern to match, for example '*.ts', '**/*.json', or 'src/**/*.test.ts'.",
+ }),
+ path: Type.Optional(
+ Type.String({
+ minLength: 1,
+ description:
+ "Directory or file path in the sandbox workspace. Defaults to the workspace root.",
+ }),
+ ),
+ limit: Type.Optional(
+ Type.Integer({
+ minimum: 1,
+ description:
+ "Maximum number of file paths to return. Defaults to 1000.",
+ }),
+ ),
+ },
+ { additionalProperties: false },
+ ),
+ execute: async () => {
+ throw new Error(
+ "findFiles can only run when sandbox execution is enabled.",
+ );
+ },
+ });
+}
diff --git a/packages/junior/src/chat/tools/sandbox/grep.ts b/packages/junior/src/chat/tools/sandbox/grep.ts
new file mode 100644
index 00000000..eaa7f93c
--- /dev/null
+++ b/packages/junior/src/chat/tools/sandbox/grep.ts
@@ -0,0 +1,235 @@
+import path from "node:path";
+import { Type } from "@sinclair/typebox";
+import {
+ MAX_TEXT_CHARS,
+ collectFiles,
+ normalizeToLf,
+ positiveInteger,
+ resolveWorkspacePath,
+ truncateText,
+ type SandboxFileSystem,
+ type TextSearchResultDetails,
+} from "@/chat/tools/sandbox/file-utils";
+import { tool } from "@/chat/tools/definition";
+
+const DEFAULT_GREP_LIMIT = 100;
+const MAX_GREP_LINE_CHARS = 500;
+
+interface GrepResult {
+ content: [{ type: "text"; text: string }];
+ details: TextSearchResultDetails & {
+ line_truncated?: boolean;
+ match_limit_reached?: number;
+ };
+}
+
+function truncateGrepLine(value: string): { line: string; truncated: boolean } {
+ if (value.length <= MAX_GREP_LINE_CHARS) {
+ return { line: value, truncated: false };
+ }
+ return {
+ line: `${value.slice(0, MAX_GREP_LINE_CHARS)}... [line truncated]`,
+ truncated: true,
+ };
+}
+
+function lineMatches(params: {
+ ignoreCase?: boolean;
+ literal?: boolean;
+ line: string;
+ pattern: string;
+ regex?: RegExp;
+}): boolean {
+ if (!params.literal) {
+ return Boolean(params.regex?.test(params.line));
+ }
+
+ if (params.ignoreCase) {
+ return params.line.toLowerCase().includes(params.pattern.toLowerCase());
+ }
+ return params.line.includes(params.pattern);
+}
+
+/** Search workspace file contents with bounded, line-numbered output. */
+export async function grepFiles(params: {
+ context?: unknown;
+ fs: SandboxFileSystem;
+ glob?: string;
+ ignoreCase?: boolean;
+ limit?: unknown;
+ literal?: boolean;
+ path?: string;
+ pattern: string;
+}): Promise {
+ if (!params.pattern) {
+ throw new Error("pattern is required");
+ }
+
+ const root = resolveWorkspacePath(params.path);
+ const limit = positiveInteger(params.limit) ?? DEFAULT_GREP_LIMIT;
+ const context = positiveInteger(params.context) ?? 0;
+ const regex = params.literal
+ ? undefined
+ : new RegExp(params.pattern, params.ignoreCase ? "i" : "");
+ const { files } = await collectFiles({
+ fs: params.fs,
+ root,
+ pattern: params.glob,
+ });
+ const output: string[] = [];
+ let matchCount = 0;
+ let matchLimitReached = false;
+ let lineTruncated = false;
+
+ for (const filePath of files) {
+ if (matchLimitReached) break;
+ let content: string;
+ try {
+ content = await params.fs.readFile(filePath, { encoding: "utf8" });
+ } catch {
+ continue;
+ }
+ if (content.includes("\u0000")) {
+ continue;
+ }
+
+ const lines = normalizeToLf(content).split("\n");
+ const relativePath =
+ files.length === 1 && filePath === root
+ ? path.posix.basename(filePath)
+ : path.posix.relative(root, filePath);
+ const matchedLines: number[] = [];
+ for (let lineIndex = 0; lineIndex < lines.length; lineIndex += 1) {
+ if (
+ !lineMatches({
+ ignoreCase: params.ignoreCase,
+ line: lines[lineIndex],
+ literal: params.literal,
+ pattern: params.pattern,
+ regex,
+ })
+ ) {
+ continue;
+ }
+ if (matchCount >= limit) {
+ matchLimitReached = true;
+ break;
+ }
+ matchCount += 1;
+ matchedLines.push(lineIndex);
+ }
+
+ const matchedLineSet = new Set(matchedLines);
+ const emittedLines = new Set();
+ for (const lineIndex of matchedLines) {
+ const start = Math.max(0, lineIndex - context);
+ const end = Math.min(lines.length - 1, lineIndex + context);
+ for (let current = start; current <= end; current += 1) {
+ if (emittedLines.has(current)) {
+ continue;
+ }
+ emittedLines.add(current);
+ const truncated = truncateGrepLine(lines[current]);
+ lineTruncated ||= truncated.truncated;
+ const separator = matchedLineSet.has(current) ? ":" : "-";
+ output.push(
+ `${relativePath}${separator}${current + 1}${separator} ${truncated.line}`,
+ );
+ }
+ }
+ }
+
+ const bounded = truncateText(
+ output.length > 0 ? output.join("\n") : "No matches found",
+ );
+ const notices: string[] = [];
+ if (matchLimitReached) {
+ notices.push(
+ `${limit} matches limit reached. Refine pattern or raise limit.`,
+ );
+ }
+ if (lineTruncated) {
+ notices.push(
+ `Some lines were truncated to ${MAX_GREP_LINE_CHARS} characters.`,
+ );
+ }
+ if (bounded.truncated) {
+ notices.push(`${MAX_TEXT_CHARS} character output limit reached.`);
+ }
+
+ return {
+ content: [
+ {
+ type: "text",
+ text:
+ notices.length > 0
+ ? `${bounded.content}\n\n[${notices.join(" ")}]`
+ : bounded.content,
+ },
+ ],
+ details: {
+ ok: true,
+ path: params.path ?? ".",
+ truncated: matchLimitReached || lineTruncated || bounded.truncated,
+ ...(matchLimitReached ? { match_limit_reached: limit } : {}),
+ ...(lineTruncated ? { line_truncated: true } : {}),
+ },
+ };
+}
+
+/** Create the sandbox grep tool definition exposed to the agent. */
+export function createGrepTool() {
+ return tool({
+ description:
+ "Search sandbox workspace file contents. Returns bounded matching lines with file paths and line numbers. Respects path/glob filters and includes truncation notices.",
+ annotations: { readOnlyHint: true, destructiveHint: false },
+ inputSchema: Type.Object(
+ {
+ pattern: Type.String({
+ minLength: 1,
+ description: "Regex pattern or literal text to search for.",
+ }),
+ path: Type.Optional(
+ Type.String({
+ minLength: 1,
+ description:
+ "Directory or file path in the sandbox workspace. Defaults to the workspace root.",
+ }),
+ ),
+ glob: Type.Optional(
+ Type.String({
+ minLength: 1,
+ description:
+ "Optional glob filter such as '*.ts' or '**/*.test.ts'.",
+ }),
+ ),
+ ignoreCase: Type.Optional(
+ Type.Boolean({
+ description: "Whether matching should ignore case.",
+ }),
+ ),
+ literal: Type.Optional(
+ Type.Boolean({
+ description: "Treat pattern as literal text instead of regex.",
+ }),
+ ),
+ context: Type.Optional(
+ Type.Integer({
+ minimum: 0,
+ description: "Number of surrounding context lines to include.",
+ }),
+ ),
+ limit: Type.Optional(
+ Type.Integer({
+ minimum: 1,
+ description: "Maximum matches to return. Defaults to 100.",
+ }),
+ ),
+ },
+ { additionalProperties: false },
+ ),
+ execute: async () => {
+ throw new Error("grep can only run when sandbox execution is enabled.");
+ },
+ });
+}
diff --git a/packages/junior/src/chat/tools/sandbox/list-dir.ts b/packages/junior/src/chat/tools/sandbox/list-dir.ts
new file mode 100644
index 00000000..36560eca
--- /dev/null
+++ b/packages/junior/src/chat/tools/sandbox/list-dir.ts
@@ -0,0 +1,116 @@
+import path from "node:path";
+import { Type } from "@sinclair/typebox";
+import {
+ MAX_TEXT_CHARS,
+ positiveInteger,
+ resolveWorkspacePath,
+ truncateText,
+ type SandboxFileSystem,
+ type TextSearchResultDetails,
+} from "@/chat/tools/sandbox/file-utils";
+import { tool } from "@/chat/tools/definition";
+
+const DEFAULT_LIST_LIMIT = 500;
+
+interface ListDirResult {
+ content: [{ type: "text"; text: string }];
+ details: TextSearchResultDetails & {
+ entry_limit_reached?: number;
+ };
+}
+
+/** List workspace directories without forcing the model through shell output. */
+export async function listDir(params: {
+ fs: SandboxFileSystem;
+ limit?: unknown;
+ path?: string;
+}): Promise {
+ const dirPath = resolveWorkspacePath(params.path);
+ const limit = positiveInteger(params.limit) ?? DEFAULT_LIST_LIMIT;
+ const stat = await params.fs.stat(dirPath);
+ if (!stat.isDirectory()) {
+ throw new Error(`Not a directory: ${params.path ?? "."}`);
+ }
+
+ const entries = (await params.fs.readdir(dirPath)).sort((a, b) =>
+ a.toLowerCase().localeCompare(b.toLowerCase()),
+ );
+ const output: string[] = [];
+ let entryLimitReached = false;
+ for (const entry of entries) {
+ if (output.length >= limit) {
+ entryLimitReached = true;
+ break;
+ }
+ const entryPath = path.posix.join(dirPath, entry);
+ try {
+ const entryStat = await params.fs.stat(entryPath);
+ output.push(`${entry}${entryStat.isDirectory() ? "/" : ""}`);
+ } catch {
+ continue;
+ }
+ }
+
+ const bounded = truncateText(
+ output.length > 0 ? output.join("\n") : "(empty directory)",
+ );
+ const notices: string[] = [];
+ if (entryLimitReached) {
+ notices.push(
+ `${limit} entries limit reached. Use a higher limit to continue.`,
+ );
+ }
+ if (bounded.truncated) {
+ notices.push(`${MAX_TEXT_CHARS} character output limit reached.`);
+ }
+
+ return {
+ content: [
+ {
+ type: "text",
+ text:
+ notices.length > 0
+ ? `${bounded.content}\n\n[${notices.join(" ")}]`
+ : bounded.content,
+ },
+ ],
+ details: {
+ ok: true,
+ path: params.path ?? ".",
+ truncated: entryLimitReached || bounded.truncated,
+ ...(entryLimitReached ? { entry_limit_reached: limit } : {}),
+ },
+ };
+}
+
+/** Create the sandbox directory listing tool definition exposed to the agent. */
+export function createListDirTool() {
+ return tool({
+ description:
+ "List a sandbox workspace directory. Returns sorted entries with '/' suffixes for directories and bounded truncation notices.",
+ annotations: { readOnlyHint: true, destructiveHint: false },
+ inputSchema: Type.Object(
+ {
+ path: Type.Optional(
+ Type.String({
+ minLength: 1,
+ description:
+ "Directory path in the sandbox workspace. Defaults to the workspace root.",
+ }),
+ ),
+ limit: Type.Optional(
+ Type.Integer({
+ minimum: 1,
+ description: "Maximum entries to return. Defaults to 500.",
+ }),
+ ),
+ },
+ { additionalProperties: false },
+ ),
+ execute: async () => {
+ throw new Error(
+ "listDir can only run when sandbox execution is enabled.",
+ );
+ },
+ });
+}
diff --git a/packages/junior/src/chat/tools/sandbox/read-file.ts b/packages/junior/src/chat/tools/sandbox/read-file.ts
index ed0a5f08..4e53c5b4 100644
--- a/packages/junior/src/chat/tools/sandbox/read-file.ts
+++ b/packages/junior/src/chat/tools/sandbox/read-file.ts
@@ -1,10 +1,66 @@
import { Type } from "@sinclair/typebox";
+import {
+ normalizeToLf,
+ positiveInteger,
+} from "@/chat/tools/sandbox/file-utils";
import { tool } from "@/chat/tools/definition";
+const DEFAULT_READ_LIMIT = 1000;
+
+interface TextRangeResult {
+ content: string;
+ end_line?: number;
+ path: string;
+ start_line: number;
+ success: true;
+ total_lines: number;
+ truncated: boolean;
+ continuation?: string;
+}
+
+/** Return a bounded line window so large files can be read incrementally. */
+export function sliceFileContent(params: {
+ content: string;
+ limit?: unknown;
+ offset?: unknown;
+ path: string;
+}): TextRangeResult {
+ const normalized = normalizeToLf(params.content);
+ const lines = normalized.length === 0 ? [] : normalized.split("\n");
+ const requestedOffset = positiveInteger(params.offset);
+ const requestedLimit = positiveInteger(params.limit);
+ const startLine = requestedOffset ?? 1;
+ const maxLines = requestedLimit ?? DEFAULT_READ_LIMIT;
+ const startIndex = Math.min(lines.length, startLine - 1);
+ const selected = lines.slice(startIndex, startIndex + maxLines);
+ const endLine =
+ selected.length > 0 ? startLine + selected.length - 1 : startLine - 1;
+ const truncated = startIndex > 0 || endLine < lines.length;
+ const rangeRequested =
+ requestedOffset !== undefined || requestedLimit !== undefined;
+
+ return {
+ content:
+ !rangeRequested && !truncated ? params.content : selected.join("\n"),
+ end_line: selected.length > 0 ? endLine : undefined,
+ path: params.path,
+ start_line: startLine,
+ success: true,
+ total_lines: lines.length,
+ truncated,
+ ...(endLine < lines.length
+ ? {
+ continuation: `Read more with offset=${endLine + 1} and limit=${maxLines}.`,
+ }
+ : {}),
+ };
+}
+
+/** Create the sandbox read tool definition exposed to the agent. */
export function createReadFileTool() {
return tool({
description:
- "Read a file from the sandbox workspace. Use when you need exact file contents to verify facts or make edits safely. Do not use for broad discovery when search tools are better.",
+ "Read a bounded line range from a file in the sandbox workspace. Use when you need exact file contents to verify facts or make edits safely. Prefer grep/findFiles/listDir for broad discovery.",
annotations: { readOnlyHint: true, destructiveHint: false },
inputSchema: Type.Object(
{
@@ -12,6 +68,18 @@ export function createReadFileTool() {
minLength: 1,
description: "Path to the file in the sandbox workspace.",
}),
+ offset: Type.Optional(
+ Type.Integer({
+ minimum: 1,
+ description: "1-indexed line number to start reading from.",
+ }),
+ ),
+ limit: Type.Optional(
+ Type.Integer({
+ minimum: 1,
+ description: "Maximum number of lines to read. Defaults to 1000.",
+ }),
+ ),
},
{ additionalProperties: false },
),
diff --git a/packages/junior/src/chat/tools/sandbox/write-file.ts b/packages/junior/src/chat/tools/sandbox/write-file.ts
index 2b35c4d7..2fe651f5 100644
--- a/packages/junior/src/chat/tools/sandbox/write-file.ts
+++ b/packages/junior/src/chat/tools/sandbox/write-file.ts
@@ -1,10 +1,14 @@
import { Type } from "@sinclair/typebox";
import { tool } from "@/chat/tools/definition";
+/** Create the sandbox full-file write tool definition exposed to the agent. */
export function createWriteFileTool() {
return tool({
description:
"Write UTF-8 content to a file in the sandbox workspace. Use for intentional file creation or replacement after validation. Do not use for exploratory analysis-only turns.",
+ promptSnippet: "new file or deliberate full-file replacement",
+ promptGuidelines: ["targeted existing-file changes: editFile"],
+ executionMode: "sequential",
inputSchema: Type.Object(
{
path: Type.String({
diff --git a/packages/junior/tests/integration/advisor/advisor-tool.test.ts b/packages/junior/tests/integration/advisor/advisor-tool.test.ts
index d4744f4e..9269a5d6 100644
--- a/packages/junior/tests/integration/advisor/advisor-tool.test.ts
+++ b/packages/junior/tests/integration/advisor/advisor-tool.test.ts
@@ -190,6 +190,9 @@ describe("advisor tool", () => {
);
expect(Object.keys(advisorDefinitions).sort()).toEqual([
+ "findFiles",
+ "grep",
+ "listDir",
"readFile",
"slackCanvasRead",
"slackChannelListMessages",
diff --git a/packages/junior/tests/unit/misc/sandbox-executor.test.ts b/packages/junior/tests/unit/misc/sandbox-executor.test.ts
index 1d7a6fae..854554a1 100644
--- a/packages/junior/tests/unit/misc/sandbox-executor.test.ts
+++ b/packages/junior/tests/unit/misc/sandbox-executor.test.ts
@@ -787,8 +787,12 @@ describe("createSandboxExecutor", () => {
expect(response.result).toEqual({
content: "Reference note",
+ end_line: 1,
path: `${sandboxSkillDir("demo-skill")}/references/note.md`,
+ start_line: 1,
success: true,
+ total_lines: 1,
+ truncated: false,
});
expect(sandboxGetMock).not.toHaveBeenCalled();
expect(sandboxCreateMock).not.toHaveBeenCalled();
@@ -825,8 +829,12 @@ describe("createSandboxExecutor", () => {
expect(response.result).toEqual({
content: "from sandbox",
+ end_line: 1,
path: `${sandboxSkillDir("demo-skill")}/references/missing.md`,
+ start_line: 1,
success: true,
+ total_lines: 1,
+ truncated: false,
});
expect(sandboxCreateMock).toHaveBeenCalledTimes(1);
});
@@ -868,8 +876,12 @@ describe("createSandboxExecutor", () => {
expect(response.result).toEqual({
content: "Sandbox note",
+ end_line: 1,
path: `${sandboxSkillDir("demo-skill")}/references/note.md`,
+ start_line: 1,
success: true,
+ total_lines: 1,
+ truncated: false,
});
expect(sandboxGetMock).toHaveBeenCalledWith({ sandboxId: "sbx_existing" });
});
diff --git a/packages/junior/tests/unit/prompt.test.ts b/packages/junior/tests/unit/prompt.test.ts
index 035da8fe..3741b371 100644
--- a/packages/junior/tests/unit/prompt.test.ts
+++ b/packages/junior/tests/unit/prompt.test.ts
@@ -69,4 +69,28 @@ describe("prompt builders", () => {
expect(turnContext).not.toContain("");
});
+
+ it("puts tool guidance in turn context, not the static system prompt", () => {
+ const systemPrompt = buildSystemPrompt();
+ const turnContext = buildTurnContextPrompt({
+ availableSkills: [],
+ activeSkills: [],
+ activeMcpCatalogs: [],
+ invocation: null,
+ toolGuidance: [
+ {
+ name: "editFile",
+ promptSnippet: "exact edits",
+ promptGuidelines: ["unique oldText"],
+ },
+ ],
+ turnState: "fresh",
+ });
+
+ expect(systemPrompt).not.toContain("");
+ expect(turnContext).toContain("");
+ expect(turnContext).toContain('name="editFile"');
+ expect(turnContext).toContain("- exact edits");
+ expect(turnContext).toContain("- unique oldText");
+ });
});
diff --git a/packages/junior/tests/unit/tools/agent-tools.test.ts b/packages/junior/tests/unit/tools/agent-tools.test.ts
index 6c7ef0e4..d5be629e 100644
--- a/packages/junior/tests/unit/tools/agent-tools.test.ts
+++ b/packages/junior/tests/unit/tools/agent-tools.test.ts
@@ -175,6 +175,27 @@ describe("createAgentTools", () => {
expect(onToolCall).toHaveBeenCalledWith("bash", { command: "which gh" });
});
+ it("forwards Pi tool preparation metadata", () => {
+ const sandbox = new SkillSandbox([], []);
+ const prepareArguments = vi.fn((args: unknown) => args as never);
+ const [editTool] = createAgentTools(
+ {
+ editFile: {
+ description: "edit",
+ inputSchema: {} as any,
+ prepareArguments,
+ executionMode: "sequential",
+ execute: async () => ({ ok: true }),
+ },
+ },
+ sandbox,
+ {},
+ );
+
+ expect(editTool?.prepareArguments).toBe(prepareArguments);
+ expect(editTool?.executionMode).toBe("sequential");
+ });
+
it("records tool call arguments and result on the execute_tool span", async () => {
const sandbox = new SkillSandbox([], []);
const [bashTool] = createAgentTools(
diff --git a/packages/junior/tests/unit/tools/execution/build-sandbox-input.test.ts b/packages/junior/tests/unit/tools/execution/build-sandbox-input.test.ts
index 7afbd3e8..414f5555 100644
--- a/packages/junior/tests/unit/tools/execution/build-sandbox-input.test.ts
+++ b/packages/junior/tests/unit/tools/execution/build-sandbox-input.test.ts
@@ -6,12 +6,74 @@ describe("buildSandboxInput", () => {
expect(buildSandboxInput("bash", { command: "ls -la" })).toEqual({
command: "ls -la",
});
+ expect(
+ buildSandboxInput("bash", { command: "sleep 10", timeoutMs: 1000 }),
+ ).toEqual({
+ command: "sleep 10",
+ timeoutMs: 1000,
+ });
});
it("normalizes readFile path", () => {
expect(buildSandboxInput("readFile", { path: "/tmp/file.txt" })).toEqual({
path: "/tmp/file.txt",
});
+ expect(
+ buildSandboxInput("readFile", {
+ path: "/tmp/file.txt",
+ offset: 10,
+ limit: 20,
+ }),
+ ).toEqual({
+ path: "/tmp/file.txt",
+ offset: 10,
+ limit: 20,
+ });
+ });
+
+ it("normalizes sandbox discovery tool params", () => {
+ expect(
+ buildSandboxInput("grep", {
+ pattern: "needle",
+ path: "src",
+ glob: "*.ts",
+ ignoreCase: true,
+ literal: true,
+ context: 1,
+ limit: 2,
+ }),
+ ).toEqual({
+ pattern: "needle",
+ path: "src",
+ glob: "*.ts",
+ ignoreCase: true,
+ literal: true,
+ context: 1,
+ limit: 2,
+ });
+ expect(
+ buildSandboxInput("findFiles", {
+ pattern: "**/*.ts",
+ path: "src",
+ limit: 5,
+ }),
+ ).toEqual({
+ pattern: "**/*.ts",
+ path: "src",
+ limit: 5,
+ });
+ expect(buildSandboxInput("listDir", { path: "src", limit: 5 })).toEqual({
+ path: "src",
+ limit: 5,
+ });
+ });
+
+ it("normalizes editFile params", () => {
+ const edits = [{ oldText: "before", newText: "after" }];
+ expect(buildSandboxInput("editFile", { path: "src/a.ts", edits })).toEqual({
+ path: "src/a.ts",
+ edits,
+ });
});
it("normalizes writeFile path and content", () => {
@@ -28,6 +90,7 @@ describe("buildSandboxInput", () => {
it("handles missing fields with empty strings", () => {
expect(buildSandboxInput("bash", {})).toEqual({ command: "" });
expect(buildSandboxInput("readFile", {})).toEqual({ path: "" });
+ expect(buildSandboxInput("editFile", {})).toEqual({ path: "", edits: [] });
expect(buildSandboxInput("writeFile", {})).toEqual({
path: "",
content: "",
diff --git a/packages/junior/tests/unit/tools/sandbox/file-tools.test.ts b/packages/junior/tests/unit/tools/sandbox/file-tools.test.ts
new file mode 100644
index 00000000..7262cc1f
--- /dev/null
+++ b/packages/junior/tests/unit/tools/sandbox/file-tools.test.ts
@@ -0,0 +1,220 @@
+import path from "node:path";
+import { describe, expect, it } from "vitest";
+import { SANDBOX_WORKSPACE_ROOT } from "@/chat/sandbox/paths";
+import {
+ editFile,
+ prepareEditFileArguments,
+} from "@/chat/tools/sandbox/edit-file";
+import { findFiles } from "@/chat/tools/sandbox/find-files";
+import { grepFiles } from "@/chat/tools/sandbox/grep";
+import { listDir } from "@/chat/tools/sandbox/list-dir";
+import { sliceFileContent } from "@/chat/tools/sandbox/read-file";
+import type { SandboxFileSystem } from "@/chat/tools/sandbox/file-utils";
+
+function workspacePath(filePath: string): string {
+ return path.posix.join(SANDBOX_WORKSPACE_ROOT, filePath);
+}
+
+function createMemoryFs(initialFiles: Record) {
+ const files = new Map(
+ Object.entries(initialFiles).map(([filePath, content]) => [
+ workspacePath(filePath),
+ content,
+ ]),
+ );
+
+ const hasDirectory = (directoryPath: string) =>
+ [...files.keys()].some((filePath) =>
+ filePath.startsWith(`${directoryPath}/`),
+ );
+
+ const fs: SandboxFileSystem = {
+ async readFile(filePath) {
+ const content = files.get(filePath);
+ if (content === undefined) {
+ throw new Error(`missing file: ${filePath}`);
+ }
+ return content;
+ },
+ async writeFile(filePath, content) {
+ files.set(filePath, content);
+ },
+ async readdir(directoryPath) {
+ if (!hasDirectory(directoryPath)) {
+ throw new Error(`missing directory: ${directoryPath}`);
+ }
+ const entries = new Set();
+ for (const filePath of files.keys()) {
+ if (!filePath.startsWith(`${directoryPath}/`)) continue;
+ const remainder = filePath.slice(directoryPath.length + 1);
+ const [entry] = remainder.split("/");
+ if (entry) entries.add(entry);
+ }
+ return [...entries];
+ },
+ async stat(filePath) {
+ if (files.has(filePath)) {
+ return { isDirectory: () => false };
+ }
+ if (hasDirectory(filePath)) {
+ return { isDirectory: () => true };
+ }
+ throw new Error(`missing path: ${filePath}`);
+ },
+ };
+
+ return {
+ fs,
+ read(filePath: string) {
+ return files.get(workspacePath(filePath));
+ },
+ };
+}
+
+describe("sandbox file tools", () => {
+ it("slices readFile content with continuation metadata", () => {
+ expect(
+ sliceFileContent({
+ content: "one\ntwo\nthree",
+ path: "notes.txt",
+ offset: 2,
+ limit: 1,
+ }),
+ ).toEqual({
+ content: "two",
+ continuation: "Read more with offset=3 and limit=1.",
+ end_line: 2,
+ path: "notes.txt",
+ start_line: 2,
+ success: true,
+ total_lines: 3,
+ truncated: true,
+ });
+ });
+
+ it("applies exact edits and preserves line endings", async () => {
+ const memory = createMemoryFs({
+ "src/app.ts": "one\r\ntwo\r\nthree\r\n",
+ });
+
+ const result = await editFile({
+ fs: memory.fs,
+ path: "src/app.ts",
+ edits: [{ oldText: "two\nthree", newText: "TWO\nTHREE" }],
+ });
+
+ expect(memory.read("src/app.ts")).toBe("one\r\nTWO\r\nTHREE\r\n");
+ expect(result.details).toMatchObject({
+ ok: true,
+ path: "src/app.ts",
+ replacements: 1,
+ });
+ expect(result.details.diff).toContain("+2 TWO");
+ });
+
+ it("rejects ambiguous exact edits", async () => {
+ const memory = createMemoryFs({
+ "src/app.ts": "same\nsame\n",
+ });
+
+ await expect(
+ editFile({
+ fs: memory.fs,
+ path: "src/app.ts",
+ edits: [{ oldText: "same", newText: "changed" }],
+ }),
+ ).rejects.toThrow("Found 2 occurrences");
+ });
+
+ it("prepares common edit argument variants", () => {
+ expect(
+ prepareEditFileArguments({
+ path: "src/app.ts",
+ old_text: "before",
+ new_text: "after",
+ }),
+ ).toEqual({
+ path: "src/app.ts",
+ edits: [{ oldText: "before", newText: "after" }],
+ });
+ });
+
+ it("lists, finds, and searches files without shelling out", async () => {
+ const memory = createMemoryFs({
+ "README.md": "hello",
+ "src/app.ts": "const needle = true;\n",
+ "src/nested/test.ts": "needle again\n",
+ });
+
+ await expect(listDir({ fs: memory.fs, path: "src" })).resolves.toEqual({
+ content: [{ type: "text", text: "app.ts\nnested/" }],
+ details: { ok: true, path: "src", truncated: false },
+ });
+ await expect(
+ findFiles({ fs: memory.fs, path: "src", pattern: "*.ts" }),
+ ).resolves.toEqual({
+ content: [{ type: "text", text: "app.ts\nnested/test.ts" }],
+ details: { ok: true, path: "src", truncated: false },
+ });
+ await expect(
+ grepFiles({
+ fs: memory.fs,
+ path: "src",
+ pattern: "needle",
+ literal: true,
+ }),
+ ).resolves.toMatchObject({
+ content: [
+ {
+ type: "text",
+ text: "app.ts:1: const needle = true;\nnested/test.ts:1: needle again",
+ },
+ ],
+ details: { ok: true, path: "src", truncated: false },
+ });
+ });
+
+ it("matches globstar directories with or without nested segments", async () => {
+ const memory = createMemoryFs({
+ "src/app.ts": "top",
+ "src/nested/test.ts": "nested",
+ "src/nested/test.js": "ignored",
+ });
+
+ await expect(
+ findFiles({ fs: memory.fs, pattern: "src/**/*.ts" }),
+ ).resolves.toEqual({
+ content: [{ type: "text", text: "src/app.ts\nsrc/nested/test.ts" }],
+ details: { ok: true, path: ".", truncated: false },
+ });
+ });
+
+ it("deduplicates overlapping grep context lines", async () => {
+ const memory = createMemoryFs({
+ "src/app.ts": ["before", "needle one", "needle two", "after"].join("\n"),
+ });
+
+ await expect(
+ grepFiles({
+ fs: memory.fs,
+ path: "src",
+ pattern: "needle",
+ literal: true,
+ context: 1,
+ }),
+ ).resolves.toMatchObject({
+ content: [
+ {
+ type: "text",
+ text: [
+ "app.ts-1- before",
+ "app.ts:2: needle one",
+ "app.ts:3: needle two",
+ "app.ts-4- after",
+ ].join("\n"),
+ },
+ ],
+ details: { ok: true, path: "src", truncated: false },
+ });
+ });
+});