Compare commits

...

3 Commits

Author SHA1 Message Date
gnanam1990
93c5aefd9e fix(plugins): sanitize env before spawning git so /plugin marketplace add works (#751)
Git 2.30+ refuses to start when any environment value contains a NUL,
CR, or LF character ("Unsafe environment: control characters are not
allowed in values"). User shells frequently leak such values — a
copy-pasted API key with a trailing newline, a terminal-set variable
with embedded escape sequences — which made every /plugin marketplace
add and /plugin install fail with that error before git even ran.

Add a small shared helper that builds the env passed to git child
processes and drops keys whose name or value contains a control
character. The legacy GIT_NO_PROMPT_ENV overrides (terminal prompt
disabled, askpass cleared) move into the same helper. Apply it to
every git invocation in marketplaceManager.ts (5 sites: gitPull,
gitClone, sparse-checkout, post-sparse checkout, reconcileSparseCheckout)
and pluginLoader.ts (8 sites: clone, fetch, checkout in both gitClone
and installFromGitSubdir).

A debug-level warning is logged once per process listing the dropped
key NAMES (not values) so the user can clean them up in their shell.

- src/utils/plugins/gitEnv.ts (new): sanitizeEnvForGit + buildGitChildEnv
- src/utils/plugins/gitEnv.test.ts (new): 10 unit tests covering CR/LF/NUL
  in values, control char in key name, undefined values, defaults,
  extras override
- src/utils/plugins/marketplaceManager.ts: replace 5 inline env spreads
  with buildGitChildEnv()
- src/utils/plugins/pluginLoader.ts: pass env: buildGitChildEnv() to 8
  git exec sites that previously inherited process.env unfiltered

Verified locally on Linux: before fix, git --version with a leaked
control-char env value fails with "Unsafe environment"; after fix it
runs cleanly. Live marketplaceManager.gitClone against a real GitHub
repo with the same leaked env succeeds and the repo is materialized
on disk.
2026-04-28 11:07:07 +05:30
viudes
6ea3eb6483 feat(api): deterministic request-body serialization via stableStringify (#882)
* feat(api): deterministic request-body serialization via stableStringify

Add `stableStringify` helper that emits JSON with object keys sorted
lexicographically at every depth (arrays preserved). Adopt it in the
OpenAI-compatible shim and the Codex Responses-API shim for the outgoing
request body.

WHY: OpenAI / Kimi / DeepSeek / Codex use implicit prefix caching keyed
on exact request bytes. Spurious insertion-order differences in
spread-merged body objects otherwise invalidate the cache on every turn.
Also a pre-requisite for Anthropic `cache_control` breakpoint hits.

Byte-equivalent to `JSON.stringify` when keys already happen to be in
lexical insertion order, so strictly additive across providers.

* fix(api): preserve circular-ref TypeError in stableStringify + cover GitHub fallback

Replace two-pass sortingReplacer approach with a single-pass deepSort that
tracks ancestor objects via WeakSet, throwing TypeError on cycles (same
contract as native JSON.stringify) and correctly handling DAGs via
try/finally cleanup. Switch the GitHub Copilot /responses fallback in
openaiShim.ts from JSON.stringify to stableStringify so that path is also
byte-stable for prefix caching.

Regression coverage added: top-level cycle, deep nested cycle, DAG safety.

* fix(api): align stableStringify with native JSON.stringify pre-processing

Replicate native JSON.stringify pre-processing inside deepSort so
serialization output matches native behavior beyond key ordering:

- invoke toJSON(key) when present (Date, URL, user classes); pass ''
  at top-level, property name for nested values, index string for
  array elements
- unbox Number/String/Boolean wrappers via valueOf() so new Boolean(false)
  doesn't get truthy-coerced
- run cycle detection on the post-toJSON value so a toJSON returning
  an ancestor still throws TypeError; DAGs continue to not throw
- drop properties whose toJSON returns undefined, matching native

Add focused stableStringify.test.ts (21 cases) asserting equality with
JSON.stringify across toJSON paths, wrapper unboxing, cycle/DAG handling,
and sortKeysDeep parity.
2026-04-27 23:33:15 +08:00
vrdons
f699c1f2fc fix routing path (#923) 2026-04-27 20:05:17 +08:00
10 changed files with 688 additions and 25 deletions

View File

@@ -170,7 +170,7 @@ For best results, use models with strong tool/function calling support.
OpenClaude can route different agents to different models through settings-based routing. This is useful for cost optimization or splitting work by model strength.
Add to `~/.claude/settings.json`:
Add to `~/.openclaude.json`:
```json
{

View File

@@ -2,6 +2,7 @@ import { APIError } from '@anthropic-ai/sdk'
import { buildAnthropicUsageFromRawUsage } from './cacheMetrics.js'
import { compressToolHistory } from './compressToolHistory.js'
import { fetchWithProxyRetry } from './fetchWithProxyRetry.js'
import { stableStringify } from '../../utils/stableStringify.js'
import type {
ResolvedCodexCredentials,
ResolvedProviderRequest,
@@ -559,7 +560,9 @@ export async function performCodexRequest(options: {
{
method: 'POST',
headers,
body: JSON.stringify(body),
// WHY: byte-identity required for implicit prefix caching on
// OpenAI Responses API. See src/utils/stableStringify.ts.
body: stableStringify(body),
signal: options.signal,
},
)

View File

@@ -74,7 +74,12 @@ import {
hasToolFieldMapping,
} from './toolArgumentNormalization.js'
import { logApiCallStart, logApiCallEnd } from '../../utils/requestLogging.js'
import { createStreamState, processStreamChunk, getStreamStats } from '../../utils/streamingOptimizer.js'
import {
createStreamState,
processStreamChunk,
getStreamStats,
} from '../../utils/streamingOptimizer.js'
import { stableStringify } from '../../utils/stableStringify.js'
type SecretValueSource = Partial<{
OPENAI_API_KEY: string
@@ -1852,12 +1857,17 @@ class OpenAIShimMessages {
return false
}
let serializedBody = JSON.stringify(
// WHY: byte-identity required for implicit prefix caching in
// OpenAI/Kimi/DeepSeek. stableStringify sorts object keys at every
// depth so spurious insertion-order differences across rebuilds of
// `body` (spread-merge, conditional assignments above) don't bust
// the provider's prefix hash.
let serializedBody = stableStringify(
request.transport === 'responses' ? buildResponsesBody() : body,
)
const refreshSerializedBody = (): void => {
serializedBody = JSON.stringify(
serializedBody = stableStringify(
request.transport === 'responses' ? buildResponsesBody() : body,
)
}
@@ -2036,7 +2046,7 @@ class OpenAIShimMessages {
responsesResponse = await fetchWithProxyRetry(responsesUrl, {
method: 'POST',
headers,
body: JSON.stringify(responsesBody),
body: stableStringify(responsesBody),
signal: options?.signal,
})
} catch (error) {

View File

@@ -0,0 +1,104 @@
import { afterEach, beforeEach, describe, expect, test } from 'bun:test'
import {
__resetGitEnvWarningForTesting,
buildGitChildEnv,
sanitizeEnvForGit,
} from './gitEnv.js'
describe('sanitizeEnvForGit', () => {
test('drops values containing LF', () => {
const result = sanitizeEnvForGit({
GOOD: 'value',
BAD_NEWLINE: 'line1\nline2',
})
expect(result.env).toEqual({ GOOD: 'value' })
expect(result.dropped).toEqual(['BAD_NEWLINE'])
})
test('drops values containing CR', () => {
const result = sanitizeEnvForGit({
GOOD: 'value',
BAD_CR: 'value\r',
})
expect(result.dropped).toEqual(['BAD_CR'])
})
test('drops values containing NUL', () => {
const result = sanitizeEnvForGit({
GOOD: 'value',
BAD_NUL: 'a\0b',
})
expect(result.dropped).toEqual(['BAD_NUL'])
})
test('drops keys whose name itself contains a control character', () => {
const result = sanitizeEnvForGit({
'BAD\nKEY': 'safe-value',
GOOD: 'value',
})
expect(result.env).toEqual({ GOOD: 'value' })
expect(result.dropped).toEqual(['BAD\nKEY'])
})
test('skips entries explicitly set to undefined without listing them as dropped', () => {
const result = sanitizeEnvForGit({
GOOD: 'value',
MAYBE: undefined,
})
expect(result.env).toEqual({ GOOD: 'value' })
expect(result.dropped).toEqual([])
})
test('returns input unchanged when nothing is unsafe', () => {
const env = { PATH: '/usr/bin:/bin', HOME: '/home/user', GIT_TERMINAL_PROMPT: '0' }
const result = sanitizeEnvForGit(env)
expect(result.env).toEqual(env)
expect(result.dropped).toEqual([])
})
})
describe('buildGitChildEnv', () => {
const ORIGINAL_BAD_KEY = 'OPENCLAUDE_TEST_BAD_ENV_FOR_GIT'
let originalValue: string | undefined
beforeEach(() => {
__resetGitEnvWarningForTesting()
originalValue = process.env[ORIGINAL_BAD_KEY]
})
afterEach(() => {
if (originalValue === undefined) {
delete process.env[ORIGINAL_BAD_KEY]
} else {
process.env[ORIGINAL_BAD_KEY] = originalValue
}
})
test('always sets the no-prompt overrides', () => {
const env = buildGitChildEnv()
expect(env.GIT_TERMINAL_PROMPT).toBe('0')
expect(env.GIT_ASKPASS).toBe('')
})
test('drops process.env values containing control characters (issue #751)', () => {
process.env[ORIGINAL_BAD_KEY] = 'paste-with-newline\n'
const env = buildGitChildEnv()
expect(env[ORIGINAL_BAD_KEY]).toBeUndefined()
expect(env.GIT_TERMINAL_PROMPT).toBe('0')
})
test('caller extras override process.env and the no-prompt defaults', () => {
const env = buildGitChildEnv({
GIT_TERMINAL_PROMPT: '1',
CUSTOM_KEY: 'custom-value',
})
expect(env.GIT_TERMINAL_PROMPT).toBe('1')
expect(env.CUSTOM_KEY).toBe('custom-value')
})
test('caller-provided unsafe extras are also dropped', () => {
const env = buildGitChildEnv({ EXTRA_BAD: 'a\rb' })
expect(env.EXTRA_BAD).toBeUndefined()
})
})

View File

@@ -0,0 +1,70 @@
import { logForDebugging } from '../debug.js'
/**
* Git 2.30+ refuses to start when any environment value contains a NUL,
* CR, or LF character ("Unsafe environment: control characters are not
* allowed in values"). User shells frequently leak such values — a
* copy-pasted API key with a trailing newline, or a terminal-set
* variable with embedded escape sequences — which would otherwise break
* every plugin clone or pull. We drop offending entries before forwarding
* the environment to git.
*/
const GIT_UNSAFE_VALUE_RE = /[\0\r\n]/
const GIT_NO_PROMPT_ENV = {
GIT_TERMINAL_PROMPT: '0', // Prevent terminal credential prompts
GIT_ASKPASS: '', // Disable askpass GUI programs
}
let warnedAboutDroppedEnvKeys = false
/**
* Returns a copy of `env` with any entries whose key OR value contains
* a NUL/CR/LF removed. The list of dropped key names is returned so
* callers can log it without exposing the (possibly secret) values.
*/
export function sanitizeEnvForGit(
env: NodeJS.ProcessEnv,
): { env: NodeJS.ProcessEnv; dropped: string[] } {
const sanitized: NodeJS.ProcessEnv = {}
const dropped: string[] = []
for (const [key, value] of Object.entries(env)) {
if (value === undefined) continue
if (GIT_UNSAFE_VALUE_RE.test(key) || GIT_UNSAFE_VALUE_RE.test(value)) {
dropped.push(key)
continue
}
sanitized[key] = value
}
return { env: sanitized, dropped }
}
/**
* Build the environment object passed to a git child process. Merges
* `process.env` with the no-prompt overrides and any caller extras,
* then strips entries that would trigger git's unsafe-value check. The
* first batch of dropped key names is logged once per process so the
* user can clean them up in their shell.
*/
export function buildGitChildEnv(
extras?: NodeJS.ProcessEnv,
): NodeJS.ProcessEnv {
const merged = { ...process.env, ...GIT_NO_PROMPT_ENV, ...(extras ?? {}) }
const { env, dropped } = sanitizeEnvForGit(merged)
if (dropped.length > 0 && !warnedAboutDroppedEnvKeys) {
warnedAboutDroppedEnvKeys = true
logForDebugging(
`git child env: dropped ${dropped.length} key(s) containing control characters: ${dropped.join(', ')}. Git 2.30+ rejects them; clean these up in your shell to forward them to git.`,
{ level: 'warn' },
)
}
return env
}
/**
* Test-only escape hatch that resets the once-per-process warning flag
* so unit tests can exercise the warning path repeatedly.
*/
export function __resetGitEnvWarningForTesting(): void {
warnedAboutDroppedEnvKeys = false
}

View File

@@ -53,6 +53,7 @@ import {
getAddDirExtraMarketplaces,
} from './addDirPluginSettings.js'
import { markPluginVersionOrphaned } from './cacheUtils.js'
import { buildGitChildEnv } from './gitEnv.js'
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
import { removeAllPluginsForMarketplace } from './installedPluginsManager.js'
import {
@@ -506,11 +507,6 @@ function seedDirFor(installLocation: string): string | undefined {
* Provides helpful error messages for common failure scenarios.
* If a ref is specified, fetches and checks out that specific branch or tag.
*/
// Environment variables to prevent git from prompting for credentials
const GIT_NO_PROMPT_ENV = {
GIT_TERMINAL_PROMPT: '0', // Prevent terminal credential prompts
GIT_ASKPASS: '', // Disable askpass GUI programs
}
const DEFAULT_PLUGIN_GIT_TIMEOUT_MS = 120 * 1000
@@ -531,7 +527,7 @@ export async function gitPull(
options?: { disableCredentialHelper?: boolean; sparsePaths?: string[] },
): Promise<{ code: number; stderr: string }> {
logForDebugging(`git pull: cwd=${cwd} ref=${ref ?? 'default'}`)
const env = { ...process.env, ...GIT_NO_PROMPT_ENV }
const env = buildGitChildEnv()
const baseArgs = ['-c', 'core.hooksPath=/dev/null']
const credentialArgs = options?.disableCredentialHelper
? ['-c', 'credential.helper=']
@@ -844,7 +840,7 @@ export async function gitClone(
const result = await execFileNoThrowWithCwd(gitExe(), args, {
timeout: timeoutMs,
stdin: 'ignore',
env: { ...process.env, ...GIT_NO_PROMPT_ENV },
env: buildGitChildEnv(),
})
// Scrub credentials from execa's error/stderr fields before any logging or
@@ -870,7 +866,7 @@ export async function gitClone(
cwd: targetPath,
timeout: timeoutMs,
stdin: 'ignore',
env: { ...process.env, ...GIT_NO_PROMPT_ENV },
env: buildGitChildEnv(),
},
)
if (sparseResult.code !== 0) {
@@ -889,7 +885,7 @@ export async function gitClone(
cwd: targetPath,
timeout: timeoutMs,
stdin: 'ignore',
env: { ...process.env, ...GIT_NO_PROMPT_ENV },
env: buildGitChildEnv(),
},
)
if (checkoutResult.code !== 0) {
@@ -1040,7 +1036,7 @@ export async function reconcileSparseCheckout(
cwd: string,
sparsePaths: string[] | undefined,
): Promise<{ code: number; stderr: string }> {
const env = { ...process.env, ...GIT_NO_PROMPT_ENV }
const env = buildGitChildEnv()
if (sparsePaths && sparsePaths.length > 0) {
return execFileNoThrowWithCwd(

View File

@@ -87,6 +87,7 @@ import { getAddDirEnabledPlugins } from './addDirPluginSettings.js'
import { verifyAndDemote } from './dependencyResolver.js'
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
import { checkGitAvailable } from './gitAvailability.js'
import { buildGitChildEnv } from './gitEnv.js'
import { getInMemoryInstalledPlugins } from './installedPluginsManager.js'
import { getManagedPluginNames } from './managedPlugins.js'
import {
@@ -560,7 +561,9 @@ export async function gitClone(
args.push(gitUrl, targetPath)
const cloneStarted = performance.now()
const cloneResult = await execFileNoThrow(gitExe(), args)
const cloneResult = await execFileNoThrow(gitExe(), args, {
env: buildGitChildEnv(),
})
if (cloneResult.code !== 0) {
logPluginFetch(
@@ -579,7 +582,7 @@ export async function gitClone(
const shallowFetchResult = await execFileNoThrowWithCwd(
gitExe(),
['fetch', '--depth', '1', 'origin', sha],
{ cwd: targetPath },
{ cwd: targetPath, env: buildGitChildEnv() },
)
if (shallowFetchResult.code !== 0) {
@@ -591,7 +594,7 @@ export async function gitClone(
const unshallowResult = await execFileNoThrowWithCwd(
gitExe(),
['fetch', '--unshallow'],
{ cwd: targetPath },
{ cwd: targetPath, env: buildGitChildEnv() },
)
if (unshallowResult.code !== 0) {
@@ -612,7 +615,7 @@ export async function gitClone(
const checkoutResult = await execFileNoThrowWithCwd(
gitExe(),
['checkout', sha],
{ cwd: targetPath },
{ cwd: targetPath, env: buildGitChildEnv() },
)
if (checkoutResult.code !== 0) {
@@ -745,7 +748,9 @@ export async function installFromGitSubdir(
}
cloneArgs.push(gitUrl, cloneDir)
const cloneResult = await execFileNoThrow(gitExe(), cloneArgs)
const cloneResult = await execFileNoThrow(gitExe(), cloneArgs, {
env: buildGitChildEnv(),
})
if (cloneResult.code !== 0) {
throw new Error(
`Failed to clone repository for git-subdir source: ${cloneResult.stderr}`,
@@ -756,7 +761,7 @@ export async function installFromGitSubdir(
const sparseResult = await execFileNoThrowWithCwd(
gitExe(),
['sparse-checkout', 'set', '--cone', '--', subdirPath],
{ cwd: cloneDir },
{ cwd: cloneDir, env: buildGitChildEnv() },
)
if (sparseResult.code !== 0) {
throw new Error(
@@ -775,7 +780,7 @@ export async function installFromGitSubdir(
const fetchSha = await execFileNoThrowWithCwd(
gitExe(),
['fetch', '--depth', '1', 'origin', sha],
{ cwd: cloneDir },
{ cwd: cloneDir, env: buildGitChildEnv() },
)
if (fetchSha.code !== 0) {
logForDebugging(
@@ -784,7 +789,7 @@ export async function installFromGitSubdir(
const unshallow = await execFileNoThrowWithCwd(
gitExe(),
['fetch', '--unshallow'],
{ cwd: cloneDir },
{ cwd: cloneDir, env: buildGitChildEnv() },
)
if (unshallow.code !== 0) {
throw new Error(`Failed to fetch commit ${sha}: ${unshallow.stderr}`)
@@ -793,7 +798,7 @@ export async function installFromGitSubdir(
const checkout = await execFileNoThrowWithCwd(
gitExe(),
['checkout', sha],
{ cwd: cloneDir },
{ cwd: cloneDir, env: buildGitChildEnv() },
)
if (checkout.code !== 0) {
throw new Error(`Failed to checkout commit ${sha}: ${checkout.stderr}`)
@@ -808,9 +813,11 @@ export async function installFromGitSubdir(
const [checkout, revParse] = await Promise.all([
execFileNoThrowWithCwd(gitExe(), ['checkout', 'HEAD'], {
cwd: cloneDir,
env: buildGitChildEnv(),
}),
execFileNoThrowWithCwd(gitExe(), ['rev-parse', 'HEAD'], {
cwd: cloneDir,
env: buildGitChildEnv(),
}),
])
if (checkout.code !== 0) {

View File

@@ -0,0 +1,142 @@
import { describe, expect, test } from 'bun:test'
import { sortKeysDeep, stableStringify } from './stableStringify.js'
// These tests pin byte-level stability of serialization helpers. The
// invariant that matters for implicit prefix caching in OpenAI / Kimi /
// DeepSeek / Codex — and for Anthropic cache_control breakpoints — is:
// semantically-equal inputs must produce byte-identical output across
// invocations and across key-order permutations.
describe('stableStringify', () => {
test('two invocations with the same object produce identical strings', () => {
const a = stableStringify({ b: 1, a: 2 })
const b = stableStringify({ b: 1, a: 2 })
expect(a).toBe(b)
})
test('key order at the top level does not affect output', () => {
expect(stableStringify({ a: 1, b: 2 })).toBe(stableStringify({ b: 2, a: 1 }))
})
test('key order at nested depths does not affect output', () => {
const x = { outer: { z: 1, a: 2, m: { b: 3, a: 4 } } }
const y = { outer: { m: { a: 4, b: 3 }, a: 2, z: 1 } }
expect(stableStringify(x)).toBe(stableStringify(y))
})
test('array element order IS preserved (semantic in API contracts)', () => {
expect(stableStringify({ messages: ['a', 'b', 'c'] })).not.toBe(
stableStringify({ messages: ['c', 'b', 'a'] }),
)
})
test('arrays of objects have keys sorted inside each element', () => {
const out = stableStringify({
tools: [
{ name: 'Bash', description: 'run' },
{ description: 'read', name: 'Read' },
],
})
expect(out).toBe(
'{"tools":[{"description":"run","name":"Bash"},{"description":"read","name":"Read"}]}',
)
})
test('undefined values are omitted (matches JSON.stringify)', () => {
const out = stableStringify({ a: undefined, b: 1 })
expect(out).toBe('{"b":1}')
})
test('primitive and null pass through unchanged', () => {
expect(stableStringify(null)).toBe('null')
expect(stableStringify(42)).toBe('42')
expect(stableStringify('x')).toBe('"x"')
expect(stableStringify(true)).toBe('true')
})
test('throws TypeError on circular structures (same behavior as JSON.stringify)', () => {
const obj: Record<string, unknown> = {}
obj.self = obj
// The exact message varies by engine (V8: "Converting circular structure
// to JSON", Bun: "JSON.stringify cannot serialize cyclic structures.").
// We only pin the error class — same contract as native JSON.stringify.
expect(() => stableStringify(obj)).toThrow(TypeError)
expect(() => JSON.stringify(obj)).toThrow(TypeError)
})
test('throws TypeError on circular references nested deep in the graph', () => {
const inner: Record<string, unknown> = { val: 1 }
const outer = { a: { b: inner } }
inner.cycle = outer
expect(() => stableStringify(outer)).toThrow(TypeError)
})
test('does not throw on DAGs (same object referenced from multiple keys)', () => {
const shared = { x: 1 }
// Native JSON.stringify handles this fine — stableStringify must too.
expect(() => stableStringify({ a: shared, b: shared })).not.toThrow()
expect(stableStringify({ a: shared, b: shared })).toBe(
'{"a":{"x":1},"b":{"x":1}}',
)
})
})
describe('sortKeysDeep', () => {
test('returns an object with sorted keys at every depth', () => {
const sorted = sortKeysDeep({
b: 1,
a: { y: 2, x: { d: 3, c: 4 } },
}) as Record<string, unknown>
expect(Object.keys(sorted)).toEqual(['a', 'b'])
expect(Object.keys(sorted.a as Record<string, unknown>)).toEqual([
'x',
'y',
])
})
test('arrays are preserved element-wise', () => {
const sorted = sortKeysDeep([
{ b: 1, a: 2 },
{ d: 3, c: 4 },
]) as Array<Record<string, unknown>>
expect(Object.keys(sorted[0]!)).toEqual(['a', 'b'])
expect(Object.keys(sorted[1]!)).toEqual(['c', 'd'])
})
})
describe('prefix caching invariants — end-to-end', () => {
// This is the real payload shape that an OpenAI-compatible body
// takes on its way to the upstream provider. We exercise it via
// stableStringify to verify that rebuilding the body with different
// key insertion orders yields the same bytes.
const bodyA = {
model: 'gpt-4o-mini',
stream: true,
messages: [
{ role: 'system', content: 'you are helpful' },
{ role: 'user', content: 'hi' },
],
tools: [{ name: 't', description: 'x' }],
temperature: 0.7,
top_p: 1,
}
const bodyB = {
top_p: 1,
temperature: 0.7,
tools: [{ description: 'x', name: 't' }],
messages: [
{ content: 'you are helpful', role: 'system' },
{ content: 'hi', role: 'user' },
],
stream: true,
model: 'gpt-4o-mini',
}
test('two spread-merged request bodies produce identical stable bytes', () => {
expect(stableStringify(bodyA)).toBe(stableStringify(bodyB))
})
test('calling stableStringify twice yields identical bytes (idempotent)', () => {
expect(stableStringify(bodyA)).toBe(stableStringify(bodyA))
})
})

View File

@@ -0,0 +1,199 @@
import { describe, expect, test } from 'bun:test'
import { sortKeysDeep, stableStringify } from './stableStringify'
/**
* Contract: `stableStringify(input)` must equal `JSON.stringify(input)`
* for every value where the latter is well-defined, except that object
* keys are emitted in lexicographic order at every depth. These tests
* focus on the native pre-processing semantics — `toJSON(key)` and
* primitive-wrapper unboxing — that the deep-sort path must preserve.
*/
describe('stableStringify — toJSON semantics', () => {
test('Date at top level → ISO string', () => {
const d = new Date('2024-01-02T03:04:05.678Z')
expect(stableStringify(d)).toBe(JSON.stringify(d))
})
test('Date nested in object → ISO string + sorted keys', () => {
const d = new Date('2024-01-02T03:04:05.678Z')
const input = { z: 1, when: d, a: 'x' }
expect(stableStringify(input)).toBe(
`{"a":"x","␟when␟":"PLACEHOLDER","z":1}`
.replace('␟when␟', 'when')
.replace('"PLACEHOLDER"', JSON.stringify(d.toISOString())),
)
})
test('Date inside an array → each element converted', () => {
const a = new Date('2024-01-02T03:04:05.678Z')
const b = new Date('2025-06-07T08:09:10.111Z')
const input = [a, b]
expect(stableStringify(input)).toBe(JSON.stringify(input))
})
test('URL value serializes via URL.prototype.toJSON', () => {
const u = new URL('https://example.com/path?q=1')
expect(stableStringify(u)).toBe(JSON.stringify(u))
expect(stableStringify({ url: u })).toBe(JSON.stringify({ url: u }))
})
test('custom class with toJSON returning a plain object → keys sorted', () => {
class Thing {
toJSON() {
return { z: 1, a: 2, m: 3 }
}
}
const out = stableStringify(new Thing())
expect(out).toBe('{"a":2,"m":3,"z":1}')
})
test('toJSON(key) receives the property name for object values', () => {
const seen: string[] = []
class Trace {
toJSON(k: string) {
seen.push(k)
return k
}
}
const t = new Trace()
stableStringify({ alpha: t, beta: t })
// Object keys are sorted, so toJSON is invoked alpha-first.
expect(seen).toEqual(['alpha', 'beta'])
})
test('toJSON(key) receives the array index as a string for array elements', () => {
const seen: string[] = []
class Trace {
toJSON(k: string) {
seen.push(k)
return k
}
}
const t = new Trace()
stableStringify([t, t, t])
expect(seen).toEqual(['0', '1', '2'])
})
test('toJSON(key) receives empty string at top level', () => {
let captured: string | undefined
class Trace {
toJSON(k: string) {
captured = k
return 'ok'
}
}
stableStringify(new Trace())
expect(captured).toBe('')
})
test('toJSON returning undefined drops the property (matches native)', () => {
class Hidden {
toJSON() {
return undefined
}
}
const input = { a: 1, gone: new Hidden(), b: 2 }
expect(stableStringify(input)).toBe(JSON.stringify(input))
expect(stableStringify(input)).toBe('{"a":1,"b":2}')
})
test('nested mix: object with a Date field and a regular field → keys sorted, Date as ISO', () => {
const d = new Date('2024-01-02T03:04:05.678Z')
const input = { z: { when: d, a: 1 }, a: 'first' }
expect(stableStringify(input)).toBe(
`{"a":"first","z":{"a":1,"when":${JSON.stringify(d.toISOString())}}}`,
)
})
})
describe('stableStringify — primitive wrapper unboxing', () => {
test('new Number at top level → numeric primitive', () => {
const n = new Number(42)
expect(stableStringify(n)).toBe(JSON.stringify(n))
expect(stableStringify(n)).toBe('42')
})
test('new String at top level → string primitive', () => {
const s = new String('hello')
expect(stableStringify(s)).toBe(JSON.stringify(s))
expect(stableStringify(s)).toBe('"hello"')
})
test('new Boolean at top level → boolean primitive', () => {
const b = new Boolean(true)
expect(stableStringify(b)).toBe(JSON.stringify(b))
expect(stableStringify(b)).toBe('true')
})
test('new Boolean(false) at top level → false', () => {
const b = new Boolean(false)
expect(stableStringify(b)).toBe(JSON.stringify(b))
expect(stableStringify(b)).toBe('false')
})
test('boxed wrappers as object values → primitives + sorted keys', () => {
const input = {
z: new Number(1),
a: new String('x'),
m: new Boolean(false),
}
expect(stableStringify(input)).toBe('{"a":"x","m":false,"z":1}')
// Native form: same primitive shape (without sort guarantee).
expect(JSON.parse(stableStringify(input))).toEqual(JSON.parse(JSON.stringify(input)))
})
})
describe('stableStringify — cycles vs DAGs', () => {
test('top-level cycle throws TypeError (regression guard)', () => {
const obj: Record<string, unknown> = { a: 1 }
obj.self = obj
expect(() => stableStringify(obj)).toThrow(TypeError)
})
test('deep cycle throws TypeError', () => {
const a: Record<string, unknown> = { name: 'a' }
const b: Record<string, unknown> = { name: 'b' }
a.next = b
b.back = a
expect(() => stableStringify(a)).toThrow(TypeError)
})
test('toJSON returning an ancestor still triggers the cycle check', () => {
type Node = { name: string; child?: { toJSON(): Node } }
const parent: Node = { name: 'parent' }
parent.child = {
toJSON() {
return parent
},
}
expect(() => stableStringify(parent)).toThrow(TypeError)
})
test('DAG (same object referenced twice via different keys) does NOT throw', () => {
const shared = { v: 1 }
const input = { left: shared, right: shared }
expect(() => stableStringify(input)).not.toThrow()
expect(stableStringify(input)).toBe('{"left":{"v":1},"right":{"v":1}}')
})
test('DAG of arrays does NOT throw', () => {
const shared = [1, 2, 3]
const input = { a: shared, b: shared }
expect(() => stableStringify(input)).not.toThrow()
expect(stableStringify(input)).toBe('{"a":[1,2,3],"b":[1,2,3]}')
})
})
describe('sortKeysDeep — same toJSON/unbox semantics', () => {
test('returns the post-toJSON, post-unbox sorted shape', () => {
const d = new Date('2024-01-02T03:04:05.678Z')
const out = sortKeysDeep({ z: 1, a: new Number(7), when: d }) as Record<
string,
unknown
>
expect(out).toEqual({ a: 7, when: d.toISOString(), z: 1 })
// Key order in the returned object is lexicographic.
expect(Object.keys(out)).toEqual(['a', 'when', 'z'])
})
})

View File

@@ -0,0 +1,132 @@
/**
* Deterministic JSON serialization.
*
* WHY: OpenAI / Kimi / DeepSeek / Codex all use **implicit prefix caching**
* — the server hashes the request prefix and reuses cached reasoning if
* the bytes match exactly. Even a trivial key-order difference between
* two otherwise-identical requests invalidates the hash and forces a
* full re-parse.
*
* This is also a pre-requisite for Anthropic / Bedrock / Vertex
* `cache_control` breakpoints: ephemeral cache entries match on exact
* content, so a re-ordered object literal busts the breakpoint.
*
* `JSON.stringify` is nondeterministic across engines and across
* successive iterations when objects carry keys added at different
* times (V8 preserves insertion order, which is the common failure
* mode when building a body from spread-merged configs).
*
* This helper recursively sorts object keys. Arrays preserve order
* (element order IS semantically significant in message/content arrays).
*
* Complements `sortKeysDeep` in src/services/remoteManagedSettings and
* src/services/policyLimits. Those two are INTENTIONALLY separate:
* - remoteManagedSettings: matches Python `json.dumps(sort_keys=True)`
* byte-for-byte to validate server-computed checksums. Must NOT
* drop undefined (Python preserves null).
* - policyLimits: uses `localeCompare` (keeps legacy behavior; locale-
* sensitive but stable for a given runtime).
* - this module (stableStringify): byte-identity for API body caching.
* Drops undefined to match `JSON.stringify` — the openaiShim/codexShim
* body is always downstream of `JSON.stringify` semantics.
* Do not consolidate without auditing the 3 callers — each has a
* different server-compat contract.
*/
/**
* Returns a byte-stable JSON string representation.
* - Object keys are emitted in lexicographic order at every depth.
* - Array element order is preserved.
* - Undefined values are dropped (matching `JSON.stringify`).
* - Indentation matches the `space` argument (0 by default → compact).
*
* Native `JSON.stringify` pre-processing is preserved before sorting:
* - `toJSON(key)` is invoked on objects that define it (own or
* inherited — covers `Date`, `URL`, and any user class). The `key`
* argument is the property name for nested object values, the array
* index as a string for array elements, and `''` for the top-level
* call, matching native semantics.
* - Boxed primitive wrappers (`new Number(...)`, `new String(...)`,
* `new Boolean(...)`) are unboxed to their primitive form.
* Both happen BEFORE the array/object branches dispatch, so the value
* actually walked is the post-conversion form. If `toJSON` returns
* `undefined`, the value is dropped from its parent (matching native
* `JSON.stringify`).
*
* Single-pass: `deepSort` walks the (possibly converted) value tree
* once, building a sorted clone. A `WeakSet` of ancestors tracks the
* current path through the object graph so that circular references
* throw `TypeError` (same contract as native `JSON.stringify`). The
* cycle check runs on the post-`toJSON` value, so a `toJSON` impl that
* returns an ancestor still throws. Ancestors are always removed in a
* `finally` block when unwinding out of each object branch (even on
* exception), so DAG inputs — where the same object is reachable via
* multiple keys — are handled correctly and do not throw.
*/
export function stableStringify(value: unknown, space?: number): string {
return JSON.stringify(deepSort(value, new WeakSet(), ''), null, space)
}
/**
* Returns a deep-sorted clone of the input: object keys lexicographic
* at every depth, arrays preserved. Useful when callers need to feed
* the sorted shape into a downstream serializer (e.g., when they must
* call `JSON.stringify` with a custom spacing or replacer).
*
* Applies the same `toJSON(key)` invocation and primitive-wrapper
* unboxing as `stableStringify`, so the returned shape mirrors what
* native `JSON.stringify` would have walked.
*/
export function sortKeysDeep<T>(value: T): T {
return deepSort(value, new WeakSet(), '') as T
}
function deepSort(
value: unknown,
ancestors: WeakSet<object>,
key: string,
): unknown {
// Step 1: invoke toJSON(key) if present — matches native pre-processing.
if (
value !== null &&
typeof value === 'object' &&
typeof (value as { toJSON?: unknown }).toJSON === 'function'
) {
value = (value as { toJSON: (k: string) => unknown }).toJSON(key)
}
// Step 2: unbox primitive wrappers.
if (value instanceof Number) value = Number(value)
else if (value instanceof String) value = String(value)
else if (value instanceof Boolean) value = Boolean(value.valueOf())
// Step 3: primitives short-circuit (post-toJSON the value may now be one).
if (value === null || typeof value !== 'object') return value
// Step 4: arrays — element key is the index as a string.
if (Array.isArray(value)) {
return value.map((v, i) => deepSort(v, ancestors, String(i)))
}
// Step 5: cycle check on the post-toJSON value.
if (ancestors.has(value as object)) {
throw new TypeError('Converting circular structure to JSON')
}
ancestors.add(value as object)
try {
const sorted: Record<string, unknown> = {}
for (const k of Object.keys(value as Record<string, unknown>).sort()) {
const child = deepSort(
(value as Record<string, unknown>)[k],
ancestors,
k,
)
if (child === undefined) continue
sorted[k] = child
}
return sorted
} finally {
ancestors.delete(value as object)
}
}