Merge pull request #135 from auriti/fix/shim-reliability-and-protocol-compliance

fix: shim reliability and protocol compliance overhaul
This commit is contained in:
Kevin Codex
2026-04-02 21:15:44 +08:00
committed by GitHub
2 changed files with 52 additions and 21 deletions

View File

@@ -1,3 +1,4 @@
import { APIError } from '@anthropic-ai/sdk'
import type { import type {
ResolvedCodexCredentials, ResolvedCodexCredentials,
ResolvedProviderRequest, ResolvedProviderRequest,
@@ -234,7 +235,10 @@ export function convertAnthropicMessagesToResponsesInput(
items.push({ items.push({
type: 'function_call_output', type: 'function_call_output',
call_id: callId, call_id: callId,
output: convertToolResultToText(toolResult.content), output: (() => {
const out = convertToolResultToText(toolResult.content)
return toolResult.is_error ? `Error: ${out}` : out
})(),
}) })
} }
@@ -458,6 +462,7 @@ function convertToolChoice(toolChoice: unknown): unknown {
if (!choice?.type) return undefined if (!choice?.type) return undefined
if (choice.type === 'auto') return 'auto' if (choice.type === 'auto') return 'auto'
if (choice.type === 'any') return 'required' if (choice.type === 'any') return 'required'
if (choice.type === 'none') return 'none'
if (choice.type === 'tool' && choice.name) { if (choice.type === 'tool' && choice.name) {
return { return {
type: 'function', type: 'function',
@@ -558,7 +563,13 @@ export async function performCodexRequest(options: {
if (!response.ok) { if (!response.ok) {
const errorBody = await response.text().catch(() => 'unknown error') const errorBody = await response.text().catch(() => 'unknown error')
throw new Error(`Codex API error ${response.status}: ${errorBody}`) let errorResponse: object | undefined
try { errorResponse = JSON.parse(errorBody) } catch { /* raw text */ }
throw APIError.generate(
response.status, errorResponse,
`Codex API error ${response.status}: ${errorBody}`,
response.headers as unknown as Record<string, string>,
)
} }
return response return response
@@ -638,11 +649,9 @@ export async function collectCodexCompletedResponse(
for await (const event of readSseEvents(response)) { for await (const event of readSseEvents(response)) {
if (event.event === 'response.failed') { if (event.event === 'response.failed') {
throw new Error( const msg = event.data?.response?.error?.message ??
event.data?.response?.error?.message ?? event.data?.error?.message ?? 'Codex response failed'
event.data?.error?.message ?? throw APIError.generate(500, undefined, msg, {} as Record<string, string>)
'Codex response failed',
)
} }
if ( if (
@@ -655,7 +664,10 @@ export async function collectCodexCompletedResponse(
} }
if (!completedResponse) { if (!completedResponse) {
throw new Error('Codex response ended without a completed payload') throw APIError.generate(
500, undefined, 'Codex response ended without a completed payload',
{} as Record<string, string>,
)
} }
return completedResponse return completedResponse
@@ -811,11 +823,9 @@ export async function* codexStreamToAnthropic(
} }
if (event.event === 'response.failed') { if (event.event === 'response.failed') {
throw new Error( const msg = payload?.response?.error?.message ??
payload?.response?.error?.message ?? payload?.error?.message ?? 'Codex response failed'
payload?.error?.message ?? throw APIError.generate(500, undefined, msg, {} as Record<string, string>)
'Codex response failed',
)
} }
} }

View File

@@ -21,6 +21,7 @@
* OPENAI_MODEL — optional; use github:copilot or openai/gpt-4.1 style IDs * OPENAI_MODEL — optional; use github:copilot or openai/gpt-4.1 style IDs
*/ */
import { APIError } from '@anthropic-ai/sdk'
import { isEnvTruthy } from '../../utils/envUtils.js' import { isEnvTruthy } from '../../utils/envUtils.js'
import { hydrateGithubModelsTokenFromSecureStorage } from '../../utils/githubModelsCredentials.js' import { hydrateGithubModelsTokenFromSecureStorage } from '../../utils/githubModelsCredentials.js'
import { import {
@@ -33,6 +34,7 @@ import {
type ShimCreateParams, type ShimCreateParams,
} from './codexShim.js' } from './codexShim.js'
import { import {
isLocalProviderUrl,
resolveCodexApiCredentials, resolveCodexApiCredentials,
resolveProviderRequest, resolveProviderRequest,
} from './providerConfig.js' } from './providerConfig.js'
@@ -214,7 +216,10 @@ function convertMessages(
const assistantMsg: OpenAIMessage = { const assistantMsg: OpenAIMessage = {
role: 'assistant', role: 'assistant',
content: convertContentBlocks(textContent) as string, content: (() => {
const c = convertContentBlocks(textContent)
return typeof c === 'string' ? c : Array.isArray(c) ? c.map((p: { text?: string }) => p.text ?? '').join('') : ''
})(),
} }
if (toolUses.length > 0) { if (toolUses.length > 0) {
@@ -243,7 +248,10 @@ function convertMessages(
} else { } else {
result.push({ result.push({
role: 'assistant', role: 'assistant',
content: convertContentBlocks(content) as string, content: (() => {
const c = convertContentBlocks(content)
return typeof c === 'string' ? c : Array.isArray(c) ? c.map((p: { text?: string }) => p.text ?? '').join('') : ''
})(),
}) })
} }
} }
@@ -618,7 +626,8 @@ async function* openaiStreamToAnthropic(
if ( if (
!hasEmittedFinalUsage && !hasEmittedFinalUsage &&
chunkUsage && chunkUsage &&
(chunk.choices?.length ?? 0) === 0 (chunk.choices?.length ?? 0) === 0 &&
lastStopReason !== null
) { ) {
yield { yield {
type: 'message_delta', type: 'message_delta',
@@ -667,9 +676,12 @@ class OpenAIShimMessages {
) { ) {
const self = this const self = this
let httpResponse: Response | undefined
const promise = (async () => { const promise = (async () => {
const request = resolveProviderRequest({ model: params.model }) const request = resolveProviderRequest({ model: params.model })
const response = await self._doRequest(request, params, options) const response = await self._doRequest(request, params, options)
httpResponse = response
if (params.stream) { if (params.stream) {
return new OpenAIShimStream( return new OpenAIShimStream(
@@ -696,8 +708,9 @@ class OpenAIShimMessages {
const data = await promise const data = await promise
return { return {
data, data,
response: new Response(), response: httpResponse ?? new Response(),
request_id: makeMessageId(), request_id:
httpResponse?.headers.get('x-request-id') ?? makeMessageId(),
} }
} }
@@ -778,7 +791,7 @@ class OpenAIShimMessages {
body.max_completion_tokens = maxCompletionTokensValue body.max_completion_tokens = maxCompletionTokensValue
} }
if (params.stream) { if (params.stream && !isLocalProviderUrl(request.baseUrl)) {
body.stream_options = { include_usage: true } body.stream_options = { include_usage: true }
} }
@@ -894,12 +907,20 @@ class OpenAIShimMessages {
const errorBody = await response.text().catch(() => 'unknown error') const errorBody = await response.text().catch(() => 'unknown error')
const rateHint = const rateHint =
isGithub && response.status === 429 ? formatRetryAfterHint(response) : '' isGithub && response.status === 429 ? formatRetryAfterHint(response) : ''
throw new Error( let errorResponse: object | undefined
try { errorResponse = JSON.parse(errorBody) } catch { /* raw text */ }
throw APIError.generate(
response.status,
errorResponse,
`OpenAI API error ${response.status}: ${errorBody}${rateHint}`, `OpenAI API error ${response.status}: ${errorBody}${rateHint}`,
response.headers as unknown as Record<string, string>,
) )
} }
throw new Error('OpenAI shim: request loop exited unexpectedly') throw APIError.generate(
500, undefined, 'OpenAI shim: request loop exited unexpectedly',
{} as Record<string, string>,
)
} }
private _convertNonStreamingResponse( private _convertNonStreamingResponse(