fix: normalize malformed Bash tool arguments from OpenAI-compatible providers

This commit is contained in:
gnanam1990
2026-04-05 16:35:41 +05:30
parent 39f3b2babd
commit 91df124064
3 changed files with 1055 additions and 9 deletions

View File

@@ -500,6 +500,922 @@ test('preserves Gemini tool call extra_content from streaming chunks', async ()
}) })
}) })
test('normalizes plain string Bash tool arguments from OpenAI-compatible responses', async () => {
globalThis.fetch = (async (_input, _init) => {
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
message: {
role: 'assistant',
tool_calls: [
{
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: 'pwd',
},
},
],
},
finish_reason: 'tool_calls',
},
],
usage: {
prompt_tokens: 12,
completion_tokens: 4,
total_tokens: 16,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const message = await client.beta.messages.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: false,
}) as {
stop_reason?: string
content?: Array<Record<string, unknown>>
}
expect(message.stop_reason).toBe('tool_use')
expect(message.content).toEqual([
{
type: 'tool_use',
id: 'function-call-1',
name: 'Bash',
input: { command: 'pwd' },
},
])
})
test('normalizes Bash tool arguments that are valid JSON literals', async () => {
globalThis.fetch = (async (_input, _init) => {
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
message: {
role: 'assistant',
tool_calls: [
{
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '123',
},
},
],
},
finish_reason: 'tool_calls',
},
],
usage: {
prompt_tokens: 12,
completion_tokens: 4,
total_tokens: 16,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const message = await client.beta.messages.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: false,
}) as {
content?: Array<Record<string, unknown>>
}
expect(message.content).toEqual([
{
type: 'tool_use',
id: 'function-call-1',
name: 'Bash',
input: { command: '123' },
},
])
})
test('normalizes plain string Bash tool arguments in streaming responses', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: 'pwd',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":"pwd"}')
})
test('normalizes plain string Bash tool arguments when streaming starts with an empty chunk', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
tool_calls: [
{
index: 0,
type: 'function',
function: {
arguments: 'pwd',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":"pwd"}')
})
test('normalizes plain string Bash tool arguments when streaming starts with whitespace', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: ' ',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
tool_calls: [
{
index: 0,
type: 'function',
function: {
arguments: 'pwd',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":" pwd"}')
})
test('normalizes streaming Bash arguments that begin with bracket syntax', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '[ -f package.json ] && pwd',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":"[ -f package.json ] && pwd"}')
})
test('normalizes streaming Bash arguments when the first chunk is only an opening brace', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '{',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
tool_calls: [
{
index: 0,
type: 'function',
function: {
arguments: ' pwd; }',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":"{ pwd; }"}')
})
test('repairs truncated structured Bash JSON in streaming responses', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '{"command":"pwd"',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const normalizedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(normalizedInput).toBe('{"command":"pwd"}')
})
test('does not normalize incomplete streamed Bash commands when finish_reason is length', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: 'rg --fi',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'length',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const streamedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(streamedInput).toBe('rg --fi')
})
test('does not repair truncated Bash objects that do not contain command', async () => {
globalThis.fetch = (async (_input, _init) => {
const chunks = makeStreamChunks([
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {
role: 'assistant',
tool_calls: [
{
index: 0,
id: 'function-call-1',
type: 'function',
function: {
name: 'Bash',
arguments: '{"cwd":"/tmp"',
},
},
],
},
finish_reason: null,
},
],
},
{
id: 'chatcmpl-1',
object: 'chat.completion.chunk',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
index: 0,
delta: {},
finish_reason: 'tool_calls',
},
],
},
])
return makeSseResponse(chunks)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const result = await client.beta.messages
.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use Bash' }],
max_tokens: 64,
stream: true,
})
.withResponse()
const events: Array<Record<string, unknown>> = []
for await (const event of result.data) {
events.push(event)
}
const streamedInput = events
.filter(
event =>
event.type === 'content_block_delta' &&
typeof event.delta === 'object' &&
event.delta !== null &&
(event.delta as Record<string, unknown>).type === 'input_json_delta',
)
.map(event => (event.delta as Record<string, unknown>).partial_json)
.join('')
expect(streamedInput).toBe('{"cwd":"/tmp"')
})
test('preserves raw input for unknown plain string tool arguments', async () => {
globalThis.fetch = (async (_input, _init) => {
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
message: {
role: 'assistant',
tool_calls: [
{
id: 'function-call-1',
type: 'function',
function: {
name: 'UnknownTool',
arguments: 'pwd',
},
},
],
},
finish_reason: 'tool_calls',
},
],
usage: {
prompt_tokens: 12,
completion_tokens: 4,
total_tokens: 16,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const message = await client.beta.messages.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use tool' }],
max_tokens: 64,
stream: false,
}) as {
content?: Array<Record<string, unknown>>
}
expect(message.content).toEqual([
{
type: 'tool_use',
id: 'function-call-1',
name: 'UnknownTool',
input: { raw: 'pwd' },
},
])
})
test('preserves parsed string input for unknown JSON string tool arguments', async () => {
globalThis.fetch = (async (_input, _init) => {
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'google/gemini-3.1-pro-preview',
choices: [
{
message: {
role: 'assistant',
tool_calls: [
{
id: 'function-call-1',
type: 'function',
function: {
name: 'UnknownTool',
arguments: '"pwd"',
},
},
],
},
finish_reason: 'tool_calls',
},
],
usage: {
prompt_tokens: 12,
completion_tokens: 4,
total_tokens: 16,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
const message = await client.beta.messages.create({
model: 'google/gemini-3.1-pro-preview',
system: 'test system',
messages: [{ role: 'user', content: 'Use tool' }],
max_tokens: 64,
stream: false,
}) as {
content?: Array<Record<string, unknown>>
}
expect(message.content).toEqual([
{
type: 'tool_use',
id: 'function-call-1',
name: 'UnknownTool',
input: 'pwd',
},
])
})
test('sanitizes malformed MCP tool schemas before sending them to OpenAI', async () => { test('sanitizes malformed MCP tool schemas before sending them to OpenAI', async () => {
let requestBody: Record<string, unknown> | undefined let requestBody: Record<string, unknown> | undefined

View File

@@ -42,6 +42,9 @@ import {
} from './providerConfig.js' } from './providerConfig.js'
import { sanitizeSchemaForOpenAICompat } from '../../utils/schemaSanitizer.js' import { sanitizeSchemaForOpenAICompat } from '../../utils/schemaSanitizer.js'
import { redactSecretValueForDisplay } from '../../utils/providerProfile.js' import { redactSecretValueForDisplay } from '../../utils/providerProfile.js'
import {
normalizeToolArguments,
} from './toolArgumentNormalization.js'
type SecretValueSource = Partial<{ type SecretValueSource = Partial<{
OPENAI_API_KEY: string OPENAI_API_KEY: string
@@ -476,6 +479,43 @@ function convertChunkUsage(
} }
} }
function repairPossiblyTruncatedObjectJson(raw: string): string | null {
try {
const parsed = JSON.parse(raw)
return parsed && typeof parsed === 'object' && !Array.isArray(parsed) && typeof (parsed as Record<string, unknown>).command === 'string'
? raw
: null
} catch {
const combinations = [
'}',
'"}',
']}',
'"]}',
'}}',
'"}}',
']}}',
'"]}}',
'"]}]}',
'}]}',
]
for (const combo of combinations) {
try {
const repaired = raw + combo
const parsed = JSON.parse(repaired)
if (
parsed &&
typeof parsed === 'object' &&
!Array.isArray(parsed) &&
typeof (parsed as Record<string, unknown>).command === 'string'
) {
return repaired
}
} catch {}
}
return null
}
}
/** /**
* Async generator that transforms an OpenAI SSE stream into * Async generator that transforms an OpenAI SSE stream into
* Anthropic-format BetaRawMessageStreamEvent objects. * Anthropic-format BetaRawMessageStreamEvent objects.
@@ -486,7 +526,16 @@ async function* openaiStreamToAnthropic(
): AsyncGenerator<AnthropicStreamEvent> { ): AsyncGenerator<AnthropicStreamEvent> {
const messageId = makeMessageId() const messageId = makeMessageId()
let contentBlockIndex = 0 let contentBlockIndex = 0
const activeToolCalls = new Map<number, { id: string; name: string; index: number; jsonBuffer: string }>() const activeToolCalls = new Map<
number,
{
id: string
name: string
index: number
jsonBuffer: string
normalizeAtStop: boolean
}
>()
let hasEmittedContentStart = false let hasEmittedContentStart = false
let lastStopReason: 'tool_use' | 'max_tokens' | 'end_turn' | null = null let lastStopReason: 'tool_use' | 'max_tokens' | 'end_turn' | null = null
let hasEmittedFinalUsage = false let hasEmittedFinalUsage = false
@@ -577,11 +626,14 @@ async function* openaiStreamToAnthropic(
} }
const toolBlockIndex = contentBlockIndex const toolBlockIndex = contentBlockIndex
const initialArguments = tc.function.arguments ?? ''
const normalizeAtStop = tc.function.name === 'Bash'
activeToolCalls.set(tc.index, { activeToolCalls.set(tc.index, {
id: tc.id, id: tc.id,
name: tc.function.name, name: tc.function.name,
index: toolBlockIndex, index: toolBlockIndex,
jsonBuffer: tc.function.arguments ?? '', jsonBuffer: initialArguments,
normalizeAtStop,
}) })
yield { yield {
@@ -598,7 +650,7 @@ async function* openaiStreamToAnthropic(
contentBlockIndex++ contentBlockIndex++
// Emit any initial arguments // Emit any initial arguments
if (tc.function.arguments) { if (tc.function.arguments && !normalizeAtStop) {
yield { yield {
type: 'content_block_delta', type: 'content_block_delta',
index: toolBlockIndex, index: toolBlockIndex,
@@ -615,6 +667,11 @@ async function* openaiStreamToAnthropic(
if (tc.function.arguments) { if (tc.function.arguments) {
active.jsonBuffer += tc.function.arguments active.jsonBuffer += tc.function.arguments
} }
if (active.normalizeAtStop) {
continue
}
yield { yield {
type: 'content_block_delta', type: 'content_block_delta',
index: active.index, index: active.index,
@@ -642,6 +699,33 @@ async function* openaiStreamToAnthropic(
} }
// Close active tool calls // Close active tool calls
for (const [, tc] of activeToolCalls) { for (const [, tc] of activeToolCalls) {
if (tc.normalizeAtStop) {
let partialJson = tc.jsonBuffer
if (choice.finish_reason === 'tool_calls') {
const repairedStructuredJson = repairPossiblyTruncatedObjectJson(
tc.jsonBuffer,
)
if (repairedStructuredJson) {
partialJson = repairedStructuredJson
} else if (!/^\s*\{\s*"/.test(tc.jsonBuffer)) {
partialJson = JSON.stringify(
normalizeToolArguments(tc.name, tc.jsonBuffer),
)
}
}
yield {
type: 'content_block_delta',
index: tc.index,
delta: {
type: 'input_json_delta',
partial_json: partialJson,
},
}
yield { type: 'content_block_stop', index: tc.index }
continue
}
let suffixToAdd = '' let suffixToAdd = ''
if (tc.jsonBuffer) { if (tc.jsonBuffer) {
try { try {
@@ -1087,12 +1171,10 @@ class OpenAIShimMessages {
if (choice?.message?.tool_calls) { if (choice?.message?.tool_calls) {
for (const tc of choice.message.tool_calls) { for (const tc of choice.message.tool_calls) {
let input: unknown const input = normalizeToolArguments(
try { tc.function.name,
input = JSON.parse(tc.function.arguments) tc.function.arguments,
} catch { )
input = { raw: tc.function.arguments }
}
content.push({ content.push({
type: 'tool_use', type: 'tool_use',
id: tc.id, id: tc.id,

View File

@@ -0,0 +1,48 @@
const STRING_ARGUMENT_TOOL_FIELDS: Record<string, string> = {
Bash: 'command',
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value)
}
function getPlainStringToolArgumentField(toolName: string): string | null {
return STRING_ARGUMENT_TOOL_FIELDS[toolName] ?? null
}
function wrapPlainStringToolArguments(
toolName: string,
value: string,
): Record<string, string> | null {
const field = getPlainStringToolArgumentField(toolName)
if (!field) return null
return { [field]: value }
}
export function normalizeToolArguments(
toolName: string,
rawArguments: string | undefined,
): unknown {
if (rawArguments === undefined) return {}
try {
const parsed = JSON.parse(rawArguments)
if (isRecord(parsed)) {
return parsed
}
if (toolName === 'Bash') {
if (typeof parsed === 'string') {
return wrapPlainStringToolArguments(toolName, parsed) ?? parsed
}
return wrapPlainStringToolArguments(toolName, rawArguments) ?? rawArguments
}
if (typeof parsed === 'string') {
return wrapPlainStringToolArguments(toolName, parsed) ?? parsed
}
return parsed
} catch {
return (
wrapPlainStringToolArguments(toolName, rawArguments) ?? { raw: rawArguments }
)
}
}