Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 22 additions & 17 deletions src/compaction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,6 @@ function generateSummaryId(content: string): string {
}

/** Maximum characters for the deterministic fallback truncation (512 tokens * 4 chars). */
const FALLBACK_MAX_CHARS = 512 * 4;
const DEFAULT_LEAF_CHUNK_TOKENS = 20_000;
const CONDENSED_MIN_INPUT_RATIO = 0.1;

Expand Down Expand Up @@ -982,8 +981,13 @@ export class CompactionEngine {
}

/**
* Run three-level summarization escalation:
* normal -> aggressive -> deterministic fallback.
* Run two-level summarization escalation with explicit error handling:
* normal -> aggressive -> fail (do NOT truncate to garbage).
*
* If both normal and aggressive summarization fail (return result >= input tokens),
* returns null. The caller MUST NOT persist these failed attempts.
* This forces the compaction engine to bail and retry on the next turn, instead
* of creating useless garbage "fallback" summaries that pollute the DAG.
*/
private async summarizeWithEscalation(params: {
sourceText: string;
Expand All @@ -992,17 +996,18 @@ export class CompactionEngine {
}): Promise<{ content: string; level: CompactionLevel } | null> {
const sourceText = params.sourceText.trim();
if (!sourceText) {
return {
content: "[Truncated from 0 tokens]",
level: "fallback",
};
return null;
}
const inputTokens = Math.max(1, estimateTokens(sourceText));

const runSummarizer = async (aggressiveMode: boolean): Promise<string | null> => {
const output = await params.summarize(sourceText, aggressiveMode, params.options);
const trimmed = output.trim();
return trimmed || null;
try {
const output = await params.summarize(sourceText, aggressiveMode, params.options);
const trimmed = output.trim();
return trimmed || null;
} catch {
return null;
}
};

const initialSummary = await runSummarizer(false);
Expand All @@ -1021,13 +1026,13 @@ export class CompactionEngine {
level = "aggressive";

if (estimateTokens(summaryText) >= inputTokens) {
const truncated =
sourceText.length > FALLBACK_MAX_CHARS
? sourceText.slice(0, FALLBACK_MAX_CHARS)
: sourceText;
summaryText = `${truncated}
[Truncated from ${inputTokens} tokens]`;
level = "fallback";
// Both normal and aggressive modes failed to compress.
// Return null instead of truncating — the caller will skip
// this compaction and retry on the next turn.
console.warn(
`[lcm] summarization failed to compress (input=${inputTokens}, aggressive=${estimateTokens(summaryText)}); skipping`,
);
return null;
}
}

Expand Down
57 changes: 57 additions & 0 deletions src/db/migration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,16 @@ function ensureSummaryDepthColumn(db: DatabaseSync): void {
}
}

function ensureSummaryLevelColumn(db: DatabaseSync): void {
const summaryColumns = db.prepare(`PRAGMA table_info(summaries)`).all() as SummaryColumnInfo[];
const hasLevel = summaryColumns.some((col) => col.name === "level");
if (!hasLevel) {
db.exec(
`ALTER TABLE summaries ADD COLUMN level TEXT NOT NULL DEFAULT 'normal' CHECK (level IN ('normal', 'aggressive', 'fallback'))`
);
}
}

function ensureSummaryMetadataColumns(db: DatabaseSync): void {
const summaryColumns = db.prepare(`PRAGMA table_info(summaries)`).all() as SummaryColumnInfo[];
const hasEarliestAt = summaryColumns.some((col) => col.name === "earliest_at");
Expand Down Expand Up @@ -183,6 +193,50 @@ function backfillSummaryDepths(db: DatabaseSync): void {
}
}

function backfillSummaryLevels(db: DatabaseSync): void {
// Strategy: check for fallback summaries in compaction events (message_parts with part_type='compaction')
// 1. Query all message_parts with part_type='compaction'
// 2. Parse metadata JSON to find summaries with level='fallback'
// 3. Update those summaries to level='fallback'
// 4. Scan remaining summaries for truncation canary in content

try {
// Phase 1: extract fallback events from message_parts metadata
const fallbackSummaryIds = new Set<string>();
const eventRows = db
.prepare(
`SELECT part_id, metadata
FROM message_parts
WHERE part_type = 'compaction' AND metadata IS NOT NULL`
)
.all() as Array<{ part_id: string; metadata: string | null }>;

for (const row of eventRows) {
if (!row.metadata) continue;
try {
const meta = JSON.parse(row.metadata);
if (meta.level === 'fallback' && meta.createdSummaryIds) {
const ids = Array.isArray(meta.createdSummaryIds) ? meta.createdSummaryIds : [];
for (const id of ids) {
if (typeof id === 'string') {
fallbackSummaryIds.add(id);
}
}
}
} catch {
// Skip malformed metadata
}
}

// Phase 2: update extracted fallback summaries
for (const summaryId of fallbackSummaryIds) {
db.prepare(`UPDATE summaries SET level = 'fallback' WHERE summary_id = ?`).run(summaryId);
}
} catch {
// Backfill is best-effort; swallow errors to avoid blocking migration
}
}

function backfillSummaryMetadata(db: DatabaseSync): void {
const conversationRows = db
.prepare(`SELECT DISTINCT conversation_id FROM summaries`)
Expand Down Expand Up @@ -386,6 +440,7 @@ export function runLcmMigrations(
conversation_id INTEGER NOT NULL REFERENCES conversations(conversation_id) ON DELETE CASCADE,
kind TEXT NOT NULL CHECK (kind IN ('leaf', 'condensed')),
depth INTEGER NOT NULL DEFAULT 0,
level TEXT NOT NULL DEFAULT 'normal' CHECK (level IN ('normal', 'aggressive', 'fallback')),
content TEXT NOT NULL,
token_count INTEGER NOT NULL,
earliest_at TEXT,
Expand Down Expand Up @@ -499,8 +554,10 @@ export function runLcmMigrations(

db.exec(`CREATE UNIQUE INDEX IF NOT EXISTS conversations_session_key_idx ON conversations (session_key)`);
ensureSummaryDepthColumn(db);
ensureSummaryLevelColumn(db);
ensureSummaryMetadataColumns(db);
backfillSummaryDepths(db);
backfillSummaryLevels(db);
backfillSummaryMetadata(db);

const fts5Available = options?.fts5Available ?? getLcmDbFeatures(db).fts5Available;
Expand Down
8 changes: 8 additions & 0 deletions src/plugin/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import { createLcmDescribeTool } from "../tools/lcm-describe-tool.js";
import { createLcmExpandQueryTool } from "../tools/lcm-expand-query-tool.js";
import { createLcmExpandTool } from "../tools/lcm-expand-tool.js";
import { createLcmGrepTool } from "../tools/lcm-grep-tool.js";
import { createLcmRepairTool } from "../tools/lcm-repair-command.js";
import type { LcmDependencies } from "../types.js";

/** Parse `agent:<agentId>:<suffix...>` session keys. */
Expand Down Expand Up @@ -1354,6 +1355,13 @@ const lcmPlugin = {
requesterSessionKey: ctx.sessionKey,
}),
);
api.registerTool((ctx) =>
createLcmRepairTool({
deps,
lcm,
sessionKey: ctx.sessionKey,
}),
);

logStartupBannerOnce({
key: "plugin-loaded",
Expand Down
3 changes: 3 additions & 0 deletions src/store/summary-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ export type SummaryRecord = {
conversationId: number;
kind: SummaryKind;
depth: number;
level?: "normal" | "aggressive" | "fallback";
content: string;
tokenCount: number;
fileIds: string[];
Expand Down Expand Up @@ -98,6 +99,7 @@ interface SummaryRow {
conversation_id: number;
kind: SummaryKind;
depth: number;
level?: string;
content: string;
token_count: number;
file_ids: string;
Expand Down Expand Up @@ -176,6 +178,7 @@ function toSummaryRecord(row: SummaryRow): SummaryRecord {
kind: row.kind,
depth: row.depth,
content: row.content,
level: (row.level as SummaryRecord["level"]) ?? "normal",
tokenCount: row.token_count,
fileIds,
earliestAt: row.earliest_at ? new Date(row.earliest_at) : null,
Expand Down
Loading
Loading