Reduce resume OOM risk and fix update-config skill init (#304)

* Reduce resume transcript memory pressure

* Fix update-config bundled skill schema generation

---------

Co-authored-by: pr0ln <pr0ln@pr0lnui-Macmini.local>
This commit is contained in:
pr0ln
2026-04-04 11:15:01 +09:00
committed by GitHub
parent fb221baa21
commit 694c242865
4 changed files with 294 additions and 13 deletions

View File

@@ -0,0 +1,23 @@
import { afterEach, expect, test } from 'bun:test'
import { clearBundledSkills, getBundledSkills } from '../bundledSkills.js'
import { registerUpdateConfigSkill } from './updateConfig.js'
afterEach(() => {
clearBundledSkills()
})
test('update-config skill can generate its prompt without JSON Schema conversion errors', async () => {
registerUpdateConfigSkill()
const skill = getBundledSkills().find(command => command.name === 'update-config')
expect(skill).toBeDefined()
expect(skill?.type).toBe('prompt')
const blocks = await skill!.getPromptForCommand('', {} as never)
expect(blocks.length).toBeGreaterThan(0)
expect(blocks[0]).toMatchObject({ type: 'text' })
expect((blocks[0] as { text: string }).text).toContain(
'## Full Settings JSON Schema',
)
})

View File

@@ -3,7 +3,11 @@ import { mkdtemp, rm, writeFile } from 'node:fs/promises'
import { tmpdir } from 'node:os'
import { join } from 'node:path'
import { buildConversationChain, loadTranscriptFile } from './sessionStorage.ts'
import {
buildConversationChain,
loadTranscriptFile,
stripPersistedToolUseResultsFromJSONLBuffer,
} from './sessionStorage.ts'
const tempDirs: string[] = []
const sessionId = '00000000-0000-4000-8000-000000000999'
@@ -194,3 +198,64 @@ test('loadTranscriptFile fails closed when preserved-segment anchor is missing',
const chain = buildConversationChain(messages, messages.get(id(25))!)
expect(chain.map(message => message.uuid)).toEqual([id(25)])
})
test('stripPersistedToolUseResultsFromJSONLBuffer drops raw toolUseResult while preserving persisted preview content', () => {
const persisted = user(id(31), null, 'placeholder')
persisted.message = {
role: 'user',
content: [
{
type: 'tool_result',
tool_use_id: 'tool-31',
is_error: false,
content: '<persisted-output>\nPreview text\n</persisted-output>',
},
],
}
;(persisted as typeof persisted & { toolUseResult?: unknown }).toolUseResult = {
stdout: 'x'.repeat(200_000),
stderr: '',
}
const raw = Buffer.from(`${JSON.stringify(persisted)}\n`)
const sanitized = stripPersistedToolUseResultsFromJSONLBuffer(raw)
const [parsed] = JSON.parse(`[${sanitized.toString('utf8').trim()}]`) as Array<
typeof persisted & { toolUseResult?: unknown }
>
expect(parsed?.toolUseResult).toBeUndefined()
expect(
(parsed?.message.content as Array<{ content: string }>)[0]?.content,
).toContain('Preview text')
})
test('loadTranscriptFile omits raw toolUseResult for persisted-output transcript entries', async () => {
const persisted = user(id(41), null, 'placeholder')
persisted.message = {
role: 'user',
content: [
{
type: 'tool_result',
tool_use_id: 'tool-41',
is_error: false,
content: '<persisted-output>\nPreview text\n</persisted-output>',
},
],
}
;(persisted as typeof persisted & { toolUseResult?: unknown }).toolUseResult = {
stdout: 'y'.repeat(200_000),
stderr: '',
}
const filePath = await writeJsonl([persisted])
const { messages } = await loadTranscriptFile(filePath)
const loaded = messages.get(id(41)) as (typeof persisted & {
toolUseResult?: unknown
}) | undefined
expect(loaded).toBeDefined()
expect(loaded?.toolUseResult).toBeUndefined()
expect(
(loaded?.message.content as Array<{ content: string }>)[0]?.content,
).toContain('Preview text')
})

View File

@@ -2013,6 +2013,199 @@ function applyPreservedSegmentRelinks(
return { relinkFailed }
}
const PERSISTED_OUTPUT_TAG = Buffer.from('<persisted-output>')
const TOOL_USE_RESULT_KEY = Buffer.from('"toolUseResult":')
function isJsonWhitespaceByte(byte: number | undefined): boolean {
return byte === 0x20 || byte === 0x09 || byte === 0x0a || byte === 0x0d
}
function skipJsonWhitespace(buf: Buffer, index: number, end: number): number {
let i = index
while (i < end && isJsonWhitespaceByte(buf[i])) i++
return i
}
function findJsonValueEnd(buf: Buffer, start: number, end: number): number {
let i = skipJsonWhitespace(buf, start, end)
const first = buf[i]
if (first === undefined) return end
if (first === 0x22) {
i++
let escapeNext = false
while (i < end) {
const byte = buf[i]!
if (escapeNext) {
escapeNext = false
} else if (byte === 0x5c) {
escapeNext = true
} else if (byte === 0x22) {
return i + 1
}
i++
}
return end
}
if (first === 0x7b || first === 0x5b) {
const stack = [first]
i++
let inString = false
let escapeNext = false
while (i < end) {
const byte = buf[i]!
if (escapeNext) {
escapeNext = false
} else if (inString) {
if (byte === 0x5c) escapeNext = true
else if (byte === 0x22) inString = false
} else if (byte === 0x22) {
inString = true
} else if (byte === 0x7b || byte === 0x5b) {
stack.push(byte)
} else if (byte === 0x7d || byte === 0x5d) {
const open = stack.at(-1)
if (
(byte === 0x7d && open === 0x7b) ||
(byte === 0x5d && open === 0x5b)
) {
stack.pop()
if (stack.length === 0) return i + 1
}
}
i++
}
return end
}
while (i < end) {
const byte = buf[i]!
if (byte === 0x2c || byte === 0x7d) return i
i++
}
return end
}
function stripPersistedToolUseResultFromLine(line: Buffer): Buffer {
if (
line.indexOf(PERSISTED_OUTPUT_TAG) === -1 ||
line.indexOf(TOOL_USE_RESULT_KEY) === -1
) {
return line
}
let depth = 0
let inString = false
let escapeNext = false
const keyLen = TOOL_USE_RESULT_KEY.length
for (let i = 0; i <= line.length - keyLen; i++) {
const byte = line[i]!
if (escapeNext) {
escapeNext = false
continue
}
if (inString) {
if (byte === 0x5c) escapeNext = true
else if (byte === 0x22) inString = false
continue
}
if (byte === 0x22) {
if (
depth === 1 &&
line.compare(TOOL_USE_RESULT_KEY, 0, keyLen, i, i + keyLen) === 0
) {
const valueEnd = findJsonValueEnd(line, i + keyLen, line.length)
let removeStart = i
let removeEnd = valueEnd
const afterValue = skipJsonWhitespace(line, valueEnd, line.length)
if (afterValue < line.length && line[afterValue] === 0x2c) {
removeEnd = afterValue + 1
} else {
let beforeKey = i - 1
while (beforeKey >= 0 && isJsonWhitespaceByte(line[beforeKey])) {
beforeKey--
}
if (beforeKey >= 0 && line[beforeKey] === 0x2c) {
removeStart = beforeKey
}
}
return Buffer.concat([
line.subarray(0, removeStart),
line.subarray(removeEnd),
])
}
inString = true
continue
}
if (byte === 0x7b || byte === 0x5b) depth++
else if (byte === 0x7d || byte === 0x5d) depth--
}
return line
}
export function stripPersistedToolUseResultsFromJSONLBuffer(buf: Buffer): Buffer {
if (
buf.indexOf(PERSISTED_OUTPUT_TAG) === -1 ||
buf.indexOf(TOOL_USE_RESULT_KEY) === -1
) {
return buf
}
const NEWLINE = 0x0a
const chunks: Buffer[] = []
let changed = false
let start = 0
while (start < buf.length) {
let end = buf.indexOf(NEWLINE, start)
let hasNewline = true
if (end === -1) {
end = buf.length
hasNewline = false
}
const line = buf.subarray(start, end)
const sanitized = stripPersistedToolUseResultFromLine(line)
if (sanitized !== line) changed = true
chunks.push(sanitized)
if (hasNewline) chunks.push(buf.subarray(end, end + 1))
start = end + (hasNewline ? 1 : 0)
}
return changed ? Buffer.concat(chunks) : buf
}
function forEachParsedJSONLBufferEntry<T>(
buf: Buffer,
visit: (entry: T) => void,
): void {
const bufLen = buf.length
let start = 0
if (buf[0] === 0xef && buf[1] === 0xbb && buf[2] === 0xbf) {
start = 3
}
while (start < bufLen) {
let end = buf.indexOf(0x0a, start)
if (end === -1) end = bufLen
const line = buf.toString('utf8', start, end).trim()
start = end + 1
if (!line) continue
try {
visit(jsonParse(line) as T)
} catch {
// Skip malformed lines
}
}
}
/**
* Delete messages that Snip executions removed from the in-memory array,
* and relink parentUuid across the gaps.
@@ -3636,15 +3829,19 @@ export async function loadTranscriptFile(
buf = walkChainBeforeParse(buf)
}
// Resume path: once a tool_result has been replaced with a persisted
// preview, loading the original toolUseResult blob back into memory is
// wasted work and can OOM before the replacement re-hydration runs.
buf = stripPersistedToolUseResultsFromJSONLBuffer(buf)
// First pass: process metadata-only lines collected during the boundary scan.
// These populate the session-scoped maps (agentSettings, modes, prNumbers,
// etc.) for entries written before the compact boundary. Any overlap with
// the post-boundary buffer is harmless — later values overwrite earlier ones.
if (metadataLines && metadataLines.length > 0) {
const metaEntries = parseJSONL<Entry>(
forEachParsedJSONLBufferEntry<Entry>(
Buffer.from(metadataLines.join('\n')),
)
for (const entry of metaEntries) {
entry => {
if (entry.type === 'summary' && entry.leafUuid) {
summaries.set(entry.leafUuid, entry.summary)
} else if (entry.type === 'custom-title' && entry.sessionId) {
@@ -3666,11 +3863,9 @@ export async function loadTranscriptFile(
prUrls.set(entry.sessionId, entry.prUrl)
prRepositories.set(entry.sessionId, entry.prRepository)
}
}
})
}
const entries = parseJSONL<Entry>(buf)
// Bridge map for legacy progress entries: progress_uuid → progress_parent_uuid.
// PR #24099 removed progress from isTranscriptMessage, so old transcripts with
// progress in the parentUuid chain would truncate at buildConversationChain
@@ -3680,7 +3875,7 @@ export async function loadTranscriptFile(
// rewrite any subsequent message whose parentUuid lands in the bridge.
const progressBridge = new Map<UUID, UUID | null>()
for (const entry of entries) {
forEachParsedJSONLBufferEntry<Entry>(buf, entry => {
// Legacy progress check runs before the Entry-typed else-if chain —
// progress is not in the Entry union, so checking it after TypeScript
// has narrowed `entry` intersects to `never`.
@@ -3695,9 +3890,7 @@ export async function loadTranscriptFile(
? (progressBridge.get(parent) ?? null)
: parent,
)
continue
}
if (isTranscriptMessage(entry)) {
} else if (isTranscriptMessage(entry)) {
if (entry.parentUuid && progressBridge.has(entry.parentUuid)) {
entry.parentUuid = progressBridge.get(entry.parentUuid) ?? null
}
@@ -3754,7 +3947,7 @@ export async function loadTranscriptFile(
} else if (entry.type === 'marble-origami-snapshot') {
contextCollapseSnapshot = entry
}
}
})
} catch {
// File doesn't exist or can't be read
}

View File

@@ -559,7 +559,7 @@ export const SettingsSchema = lazySchema(() =>
enabledPlugins: z
.record(
z.string(),
z.union([z.array(z.string()), z.boolean(), z.undefined()]),
z.union([z.array(z.string()), z.boolean()]),
)
.optional()
.describe(