Add Codex plan/spark provider support

This commit is contained in:
vp
2026-04-01 10:44:35 +03:00
parent 2d7aa9c841
commit cbeed0f76f
13 changed files with 1560 additions and 117 deletions

View File

@@ -0,0 +1,172 @@
import { afterEach, describe, expect, test } from 'bun:test'
import { mkdtempSync, rmSync, writeFileSync } from 'node:fs'
import { join } from 'node:path'
import { tmpdir } from 'node:os'
import {
codexStreamToAnthropic,
convertAnthropicMessagesToResponsesInput,
convertCodexResponseToAnthropicMessage,
} from './codexShim.js'
import {
resolveCodexApiCredentials,
resolveProviderRequest,
} from './providerConfig.js'
const tempDirs: string[] = []
afterEach(() => {
while (tempDirs.length > 0) {
const dir = tempDirs.pop()
if (dir) rmSync(dir, { recursive: true, force: true })
}
})
function createTempAuthJson(payload: Record<string, unknown>): string {
const dir = mkdtempSync(join(tmpdir(), 'openclaude-codex-'))
tempDirs.push(dir)
const authPath = join(dir, 'auth.json')
writeFileSync(authPath, JSON.stringify(payload), 'utf8')
return authPath
}
async function collectStreamEventTypes(responseText: string): Promise<string[]> {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode(responseText))
controller.close()
},
})
const events: string[] = []
for await (const event of codexStreamToAnthropic(new Response(stream), 'gpt-5.4')) {
events.push(event.type)
}
return events
}
describe('Codex provider config', () => {
test('resolves codexplan alias to Codex transport with reasoning', () => {
const resolved = resolveProviderRequest({ model: 'codexplan' })
expect(resolved.transport).toBe('codex_responses')
expect(resolved.resolvedModel).toBe('gpt-5.4')
expect(resolved.reasoning).toEqual({ effort: 'high' })
})
test('loads Codex credentials from auth.json fallback', () => {
const authPath = createTempAuthJson({
tokens: {
access_token: 'header.payload.signature',
account_id: 'acct_test',
},
})
const credentials = resolveCodexApiCredentials({
CODEX_AUTH_JSON_PATH: authPath,
} as NodeJS.ProcessEnv)
expect(credentials.apiKey).toBe('header.payload.signature')
expect(credentials.accountId).toBe('acct_test')
expect(credentials.source).toBe('auth.json')
})
})
describe('Codex request translation', () => {
test('converts assistant tool use and user tool result into Responses items', () => {
const items = convertAnthropicMessagesToResponsesInput([
{
role: 'assistant',
content: [
{ type: 'text', text: 'Working...' },
{ type: 'tool_use', id: 'call_123', name: 'search', input: { q: 'x' } },
],
},
{
role: 'user',
content: [
{ type: 'tool_result', tool_use_id: 'call_123', content: 'done' },
],
},
])
expect(items).toEqual([
{
type: 'message',
role: 'assistant',
content: [{ type: 'output_text', text: 'Working...' }],
},
{
type: 'function_call',
id: 'fc_123',
call_id: 'call_123',
name: 'search',
arguments: '{"q":"x"}',
},
{
type: 'function_call_output',
call_id: 'call_123',
output: 'done',
},
])
})
test('converts completed Codex tool response into Anthropic message', () => {
const message = convertCodexResponseToAnthropicMessage(
{
id: 'resp_1',
model: 'gpt-5.3-codex-spark',
output: [
{
type: 'function_call',
id: 'fc_1',
call_id: 'call_1',
name: 'ping',
arguments: '{"value":"ping"}',
},
],
usage: { input_tokens: 12, output_tokens: 4 },
},
'gpt-5.3-codex-spark',
)
expect(message.stop_reason).toBe('tool_use')
expect(message.content).toEqual([
{
type: 'tool_use',
id: 'call_1',
name: 'ping',
input: { value: 'ping' },
},
])
})
test('translates Codex SSE text stream into Anthropic events', async () => {
const responseText = [
'event: response.output_item.added',
'data: {"type":"response.output_item.added","item":{"id":"msg_1","type":"message","status":"in_progress","content":[],"role":"assistant"},"output_index":0,"sequence_number":0}',
'',
'event: response.content_part.added',
'data: {"type":"response.content_part.added","content_index":0,"item_id":"msg_1","output_index":0,"part":{"type":"output_text","text":""},"sequence_number":1}',
'',
'event: response.output_text.delta',
'data: {"type":"response.output_text.delta","content_index":0,"delta":"ok","item_id":"msg_1","output_index":0,"sequence_number":2}',
'',
'event: response.output_item.done',
'data: {"type":"response.output_item.done","item":{"id":"msg_1","type":"message","status":"completed","content":[{"type":"output_text","text":"ok"}],"role":"assistant"},"output_index":0,"sequence_number":3}',
'',
'event: response.completed',
'data: {"type":"response.completed","response":{"id":"resp_1","status":"completed","model":"gpt-5.4","output":[{"type":"message","role":"assistant","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":2,"output_tokens":1}},"sequence_number":4}',
'',
].join('\n')
const eventTypes = await collectStreamEventTypes(responseText)
expect(eventTypes).toEqual([
'message_start',
'content_block_start',
'content_block_delta',
'content_block_stop',
'message_delta',
'message_stop',
])
})
})

View File

@@ -0,0 +1,740 @@
import type {
ResolvedCodexCredentials,
ResolvedProviderRequest,
} from './providerConfig.js'
export interface AnthropicUsage {
input_tokens: number
output_tokens: number
cache_creation_input_tokens: number
cache_read_input_tokens: number
}
export interface AnthropicStreamEvent {
type: string
message?: Record<string, unknown>
index?: number
content_block?: Record<string, unknown>
delta?: Record<string, unknown>
usage?: Partial<AnthropicUsage>
}
export interface ShimCreateParams {
model: string
messages: Array<Record<string, unknown>>
system?: unknown
tools?: Array<Record<string, unknown>>
max_tokens: number
stream?: boolean
temperature?: number
top_p?: number
tool_choice?: unknown
metadata?: unknown
[key: string]: unknown
}
type ResponsesInputPart =
| { type: 'input_text'; text: string }
| { type: 'output_text'; text: string }
| { type: 'input_image'; image_url: string }
type ResponsesInputItem =
| {
type: 'message'
role: 'user' | 'assistant'
content: ResponsesInputPart[]
}
| {
type: 'function_call'
id: string
call_id: string
name: string
arguments: string
}
| {
type: 'function_call_output'
call_id: string
output: string
}
type ResponsesTool = {
type: 'function'
name: string
description: string
parameters: Record<string, unknown>
strict?: boolean
}
type CodexSseEvent = {
event: string
data: Record<string, any>
}
function makeUsage(usage?: {
input_tokens?: number
output_tokens?: number
}): AnthropicUsage {
return {
input_tokens: usage?.input_tokens ?? 0,
output_tokens: usage?.output_tokens ?? 0,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
}
}
function makeMessageId(): string {
return `msg_${Math.random().toString(36).slice(2)}${Date.now().toString(36)}`
}
function normalizeToolUseId(toolUseId: string | undefined): {
id: string
callId: string
} {
const value = (toolUseId || '').trim()
if (!value) {
return {
id: 'fc_unknown',
callId: 'call_unknown',
}
}
if (value.startsWith('call_')) {
return {
id: `fc_${value.slice('call_'.length)}`,
callId: value,
}
}
if (value.startsWith('fc_')) {
return {
id: value,
callId: `call_${value.slice('fc_'.length)}`,
}
}
return {
id: `fc_${value}`,
callId: value,
}
}
function convertSystemPrompt(system: unknown): string {
if (!system) return ''
if (typeof system === 'string') return system
if (Array.isArray(system)) {
return system
.map((block: { type?: string; text?: string }) =>
block.type === 'text' ? (block.text ?? '') : '',
)
.join('\n\n')
}
return String(system)
}
function convertToolResultToText(content: unknown): string {
if (typeof content === 'string') return content
if (!Array.isArray(content)) return JSON.stringify(content ?? '')
const chunks: string[] = []
for (const block of content) {
if (block?.type === 'text' && typeof block.text === 'string') {
chunks.push(block.text)
continue
}
if (block?.type === 'image') {
const src = block.source
if (src?.type === 'url' && src.url) {
chunks.push(`[Image](${src.url})`)
}
continue
}
if (typeof block?.text === 'string') {
chunks.push(block.text)
}
}
return chunks.join('\n')
}
function convertContentBlocksToResponsesParts(
content: unknown,
role: 'user' | 'assistant',
): ResponsesInputPart[] {
const textType = role === 'assistant' ? 'output_text' : 'input_text'
if (typeof content === 'string') {
return [{ type: textType, text: content }]
}
if (!Array.isArray(content)) {
return [{ type: textType, text: String(content ?? '') }]
}
const parts: ResponsesInputPart[] = []
for (const block of content) {
switch (block?.type) {
case 'text':
parts.push({ type: textType, text: block.text ?? '' })
break
case 'image': {
if (role === 'assistant') break
const source = block.source
if (source?.type === 'base64') {
parts.push({
type: 'input_image',
image_url: `data:${source.media_type};base64,${source.data}`,
})
} else if (source?.type === 'url' && source.url) {
parts.push({
type: 'input_image',
image_url: source.url,
})
}
break
}
case 'thinking':
if (block.thinking) {
parts.push({
type: textType,
text: `<thinking>${block.thinking}</thinking>`,
})
}
break
case 'tool_use':
case 'tool_result':
break
default:
if (typeof block?.text === 'string') {
parts.push({ type: textType, text: block.text })
}
}
}
return parts
}
export function convertAnthropicMessagesToResponsesInput(
messages: Array<{ role?: string; message?: { role?: string; content?: unknown }; content?: unknown }>,
): ResponsesInputItem[] {
const items: ResponsesInputItem[] = []
for (const message of messages) {
const inner = message.message ?? message
const role = (inner as { role?: string }).role ?? message.role
const content = (inner as { content?: unknown }).content
if (role === 'user') {
if (Array.isArray(content)) {
const toolResults = content.filter(
(block: { type?: string }) => block.type === 'tool_result',
)
const otherContent = content.filter(
(block: { type?: string }) => block.type !== 'tool_result',
)
for (const toolResult of toolResults) {
const { callId } = normalizeToolUseId(toolResult.tool_use_id)
items.push({
type: 'function_call_output',
call_id: callId,
output: convertToolResultToText(toolResult.content),
})
}
const parts = convertContentBlocksToResponsesParts(otherContent, 'user')
if (parts.length > 0) {
items.push({
type: 'message',
role: 'user',
content: parts,
})
}
continue
}
items.push({
type: 'message',
role: 'user',
content: convertContentBlocksToResponsesParts(content, 'user'),
})
continue
}
if (role === 'assistant') {
const textBlocks = Array.isArray(content)
? content.filter((block: { type?: string }) => block.type !== 'tool_use')
: content
const parts = convertContentBlocksToResponsesParts(textBlocks, 'assistant')
if (parts.length > 0) {
items.push({
type: 'message',
role: 'assistant',
content: parts,
})
}
if (Array.isArray(content)) {
for (const toolUse of content.filter(
(block: { type?: string }) => block.type === 'tool_use',
)) {
const normalized = normalizeToolUseId(toolUse.id)
items.push({
type: 'function_call',
id: normalized.id,
call_id: normalized.callId,
name: toolUse.name ?? 'tool',
arguments:
typeof toolUse.input === 'string'
? toolUse.input
: JSON.stringify(toolUse.input ?? {}),
})
}
}
}
}
return items.filter(item =>
item.type !== 'message' || item.content.length > 0,
)
}
export function convertToolsToResponsesTools(
tools: Array<{ name?: string; description?: string; input_schema?: Record<string, unknown> }>,
): ResponsesTool[] {
return tools
.filter(tool => tool.name && tool.name !== 'ToolSearchTool')
.map(tool => ({
type: 'function',
name: tool.name ?? 'tool',
description: tool.description ?? '',
parameters: tool.input_schema ?? { type: 'object', properties: {} },
strict: true,
}))
}
function convertToolChoice(toolChoice: unknown): unknown {
const choice = toolChoice as { type?: string; name?: string } | undefined
if (!choice?.type) return undefined
if (choice.type === 'auto') return 'auto'
if (choice.type === 'any') return 'required'
if (choice.type === 'tool' && choice.name) {
return {
type: 'function',
name: choice.name,
}
}
return undefined
}
export async function performCodexRequest(options: {
request: ResolvedProviderRequest
credentials: ResolvedCodexCredentials
params: ShimCreateParams
defaultHeaders: Record<string, string>
signal?: AbortSignal
}): Promise<Response> {
const input = convertAnthropicMessagesToResponsesInput(
options.params.messages as Array<{
role?: string
message?: { role?: string; content?: unknown }
content?: unknown
}>,
)
const body: Record<string, unknown> = {
model: options.request.resolvedModel,
input: input.length > 0
? input
: [
{
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: '' }],
},
],
store: false,
stream: true,
}
const instructions = convertSystemPrompt(options.params.system)
if (instructions) {
body.instructions = instructions
}
const toolChoice = convertToolChoice(options.params.tool_choice)
if (toolChoice) {
body.tool_choice = toolChoice
}
if (options.params.tools && options.params.tools.length > 0) {
const convertedTools = convertToolsToResponsesTools(
options.params.tools as Array<{
name?: string
description?: string
input_schema?: Record<string, unknown>
}>,
)
if (convertedTools.length > 0) {
body.tools = convertedTools
body.parallel_tool_calls = true
body.tool_choice ??= 'auto'
}
}
if (options.request.reasoning) {
body.reasoning = options.request.reasoning
}
if (options.params.temperature !== undefined) {
body.temperature = options.params.temperature
}
if (options.params.top_p !== undefined) {
body.top_p = options.params.top_p
}
const headers: Record<string, string> = {
'Content-Type': 'application/json',
...options.defaultHeaders,
Authorization: `Bearer ${options.credentials.apiKey}`,
}
if (options.credentials.accountId) {
headers['chatgpt-account-id'] = options.credentials.accountId
}
headers.originator ??= 'openclaude'
const response = await fetch(`${options.request.baseUrl}/responses`, {
method: 'POST',
headers,
body: JSON.stringify(body),
signal: options.signal,
})
if (!response.ok) {
const errorBody = await response.text().catch(() => 'unknown error')
throw new Error(`Codex API error ${response.status}: ${errorBody}`)
}
return response
}
async function* readSseEvents(response: Response): AsyncGenerator<CodexSseEvent> {
const reader = response.body?.getReader()
if (!reader) return
const decoder = new TextDecoder()
let buffer = ''
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const chunks = buffer.split('\n\n')
buffer = chunks.pop() ?? ''
for (const chunk of chunks) {
const lines = chunk
.split('\n')
.map(line => line.trim())
.filter(Boolean)
if (lines.length === 0) continue
const eventLine = lines.find(line => line.startsWith('event: '))
const dataLines = lines.filter(line => line.startsWith('data: '))
if (!eventLine || dataLines.length === 0) continue
const event = eventLine.slice(7).trim()
const rawData = dataLines.map(line => line.slice(6)).join('\n')
if (rawData === '[DONE]') continue
let data: Record<string, any>
try {
const parsed = JSON.parse(rawData)
if (!parsed || typeof parsed !== 'object') continue
data = parsed as Record<string, any>
} catch {
continue
}
yield { event, data }
}
}
}
function determineStopReason(
response: Record<string, any> | undefined,
sawToolUse: boolean,
): 'end_turn' | 'tool_use' | 'max_tokens' {
const output = Array.isArray(response?.output) ? response.output : []
if (
sawToolUse ||
output.some((item: { type?: string }) => item?.type === 'function_call')
) {
return 'tool_use'
}
const incompleteReason = response?.incomplete_details?.reason
if (
typeof incompleteReason === 'string' &&
incompleteReason.includes('max_output_tokens')
) {
return 'max_tokens'
}
return 'end_turn'
}
export async function collectCodexCompletedResponse(
response: Response,
): Promise<Record<string, any>> {
let completedResponse: Record<string, any> | undefined
for await (const event of readSseEvents(response)) {
if (event.event === 'response.failed') {
throw new Error(
event.data?.response?.error?.message ??
event.data?.error?.message ??
'Codex response failed',
)
}
if (
event.event === 'response.completed' ||
event.event === 'response.incomplete'
) {
completedResponse = event.data?.response
break
}
}
if (!completedResponse) {
throw new Error('Codex response ended without a completed payload')
}
return completedResponse
}
export async function* codexStreamToAnthropic(
response: Response,
model: string,
): AsyncGenerator<AnthropicStreamEvent> {
const messageId = makeMessageId()
const toolBlocksByItemId = new Map<
string,
{ index: number; toolUseId: string }
>()
let activeTextBlockIndex: number | null = null
let nextContentBlockIndex = 0
let sawToolUse = false
let finalResponse: Record<string, any> | undefined
const closeActiveTextBlock = async function* () {
if (activeTextBlockIndex === null) return
yield {
type: 'content_block_stop',
index: activeTextBlockIndex,
}
activeTextBlockIndex = null
}
const startTextBlockIfNeeded = async function* () {
if (activeTextBlockIndex !== null) return
activeTextBlockIndex = nextContentBlockIndex++
yield {
type: 'content_block_start',
index: activeTextBlockIndex,
content_block: { type: 'text', text: '' },
}
}
yield {
type: 'message_start',
message: {
id: messageId,
type: 'message',
role: 'assistant',
content: [],
model,
stop_reason: null,
stop_sequence: null,
usage: makeUsage(),
},
}
for await (const event of readSseEvents(response)) {
const payload = event.data
if (event.event === 'response.output_item.added') {
const item = payload.item
if (item?.type === 'function_call') {
yield* closeActiveTextBlock()
const blockIndex = nextContentBlockIndex++
const toolUseId = item.call_id ?? item.id ?? `call_${blockIndex}`
toolBlocksByItemId.set(String(item.id ?? toolUseId), {
index: blockIndex,
toolUseId,
})
sawToolUse = true
yield {
type: 'content_block_start',
index: blockIndex,
content_block: {
type: 'tool_use',
id: toolUseId,
name: item.name ?? 'tool',
input: {},
},
}
if (item.arguments) {
yield {
type: 'content_block_delta',
index: blockIndex,
delta: {
type: 'input_json_delta',
partial_json: item.arguments,
},
}
}
}
continue
}
if (event.event === 'response.content_part.added') {
if (payload.part?.type === 'output_text') {
yield* startTextBlockIfNeeded()
}
continue
}
if (event.event === 'response.output_text.delta') {
yield* startTextBlockIfNeeded()
if (activeTextBlockIndex !== null) {
yield {
type: 'content_block_delta',
index: activeTextBlockIndex,
delta: {
type: 'text_delta',
text: payload.delta ?? '',
},
}
}
continue
}
if (event.event === 'response.function_call_arguments.delta') {
const toolBlock = toolBlocksByItemId.get(String(payload.item_id ?? ''))
if (toolBlock) {
yield {
type: 'content_block_delta',
index: toolBlock.index,
delta: {
type: 'input_json_delta',
partial_json: payload.delta ?? '',
},
}
}
continue
}
if (event.event === 'response.output_item.done') {
const item = payload.item
if (item?.type === 'function_call') {
const toolBlock = toolBlocksByItemId.get(String(item.id ?? ''))
if (toolBlock) {
yield {
type: 'content_block_stop',
index: toolBlock.index,
}
toolBlocksByItemId.delete(String(item.id))
}
} else if (item?.type === 'message') {
yield* closeActiveTextBlock()
}
continue
}
if (
event.event === 'response.completed' ||
event.event === 'response.incomplete'
) {
finalResponse = payload.response
break
}
if (event.event === 'response.failed') {
throw new Error(
payload?.response?.error?.message ??
payload?.error?.message ??
'Codex response failed',
)
}
}
yield* closeActiveTextBlock()
for (const toolBlock of toolBlocksByItemId.values()) {
yield {
type: 'content_block_stop',
index: toolBlock.index,
}
}
yield {
type: 'message_delta',
delta: {
stop_reason: determineStopReason(finalResponse, sawToolUse),
stop_sequence: null,
},
usage: {
input_tokens: finalResponse?.usage?.input_tokens ?? 0,
output_tokens: finalResponse?.usage?.output_tokens ?? 0,
},
}
yield { type: 'message_stop' }
}
export function convertCodexResponseToAnthropicMessage(
data: Record<string, any>,
model: string,
): Record<string, unknown> {
const content: Array<Record<string, unknown>> = []
const output = Array.isArray(data.output) ? data.output : []
for (const item of output) {
if (item?.type === 'message' && Array.isArray(item.content)) {
for (const part of item.content) {
if (part?.type === 'output_text') {
content.push({
type: 'text',
text: part.text ?? '',
})
}
}
continue
}
if (item?.type === 'function_call') {
let input: unknown
try {
input = JSON.parse(item.arguments ?? '{}')
} catch {
input = { raw: item.arguments ?? '' }
}
content.push({
type: 'tool_use',
id: item.call_id ?? item.id ?? makeMessageId(),
name: item.name ?? 'tool',
input,
})
}
}
return {
id: data.id ?? makeMessageId(),
type: 'message',
role: 'assistant',
content,
model: data.model ?? model,
stop_reason: determineStopReason(data, content.some(item => item.type === 'tool_use')),
stop_sequence: null,
usage: makeUsage(data.usage),
}
}

View File

@@ -13,28 +13,26 @@
* OPENAI_API_KEY=sk-... — API key (optional for local models)
* OPENAI_BASE_URL=http://... — base URL (default: https://api.openai.com/v1)
* OPENAI_MODEL=gpt-4o — default model override
* CODEX_API_KEY / ~/.codex/auth.json — Codex auth for codexplan/codexspark
*/
import {
codexStreamToAnthropic,
collectCodexCompletedResponse,
convertCodexResponseToAnthropicMessage,
performCodexRequest,
type AnthropicStreamEvent,
type ShimCreateParams,
} from './codexShim.js'
import {
resolveCodexApiCredentials,
resolveProviderRequest,
} from './providerConfig.js'
// ---------------------------------------------------------------------------
// Types — minimal subset of Anthropic SDK types we need to produce
// ---------------------------------------------------------------------------
interface AnthropicUsage {
input_tokens: number
output_tokens: number
cache_creation_input_tokens: number
cache_read_input_tokens: number
}
interface AnthropicStreamEvent {
type: string
message?: Record<string, unknown>
index?: number
content_block?: Record<string, unknown>
delta?: Record<string, unknown>
usage?: Partial<AnthropicUsage>
}
// ---------------------------------------------------------------------------
// Message format conversion: Anthropic → OpenAI
// ---------------------------------------------------------------------------
@@ -447,20 +445,6 @@ async function* openaiStreamToAnthropic(
// The shim client — duck-types as Anthropic SDK
// ---------------------------------------------------------------------------
interface ShimCreateParams {
model: string
messages: Array<Record<string, unknown>>
system?: unknown
tools?: Array<Record<string, unknown>>
max_tokens: number
stream?: boolean
temperature?: number
top_p?: number
tool_choice?: unknown
metadata?: unknown
[key: string]: unknown
}
class OpenAIShimStream {
private generator: AsyncGenerator<AnthropicStreamEvent>
// The controller property is checked by claude.ts to distinguish streams from error messages
@@ -476,17 +460,9 @@ class OpenAIShimStream {
}
class OpenAIShimMessages {
private baseUrl: string
private apiKey: string
private defaultHeaders: Record<string, string>
constructor(
baseUrl: string,
apiKey: string,
defaultHeaders: Record<string, string>,
) {
this.baseUrl = baseUrl
this.apiKey = apiKey
constructor(defaultHeaders: Record<string, string>) {
this.defaultHeaders = defaultHeaders
}
@@ -496,20 +472,30 @@ class OpenAIShimMessages {
) {
const self = this
// Return a thenable that also has .withResponse()
const promise = (async () => {
const response = await self._doRequest(params, options)
const request = resolveProviderRequest({ model: params.model })
const response = await self._doRequest(request, params, options)
if (params.stream) {
return new OpenAIShimStream(
openaiStreamToAnthropic(response, params.model),
request.transport === 'codex_responses'
? codexStreamToAnthropic(response, request.resolvedModel)
: openaiStreamToAnthropic(response, request.resolvedModel),
)
}
// Non-streaming: parse the full response and convert
if (request.transport === 'codex_responses') {
const data = await collectCodexCompletedResponse(response)
return convertCodexResponseToAnthropicMessage(
data,
request.resolvedModel,
)
}
const data = await response.json()
return self._convertNonStreamingResponse(data, params.model)
return self._convertNonStreamingResponse(data, request.resolvedModel)
})()
// Add .withResponse() for streaming path (claude.ts uses this)
;(promise as unknown as Record<string, unknown>).withResponse =
async () => {
const data = await promise
@@ -524,6 +510,43 @@ class OpenAIShimMessages {
}
private async _doRequest(
request: ReturnType<typeof resolveProviderRequest>,
params: ShimCreateParams,
options?: { signal?: AbortSignal; headers?: Record<string, string> },
): Promise<Response> {
if (request.transport === 'codex_responses') {
const credentials = resolveCodexApiCredentials()
if (!credentials.apiKey) {
const authHint = credentials.authPath
? ` or place a Codex auth.json at ${credentials.authPath}`
: ''
throw new Error(
`Codex auth is required for ${request.requestedModel}. Set CODEX_API_KEY${authHint}.`,
)
}
if (!credentials.accountId) {
throw new Error(
'Codex auth is missing chatgpt_account_id. Re-login with the Codex CLI or set CHATGPT_ACCOUNT_ID/CODEX_ACCOUNT_ID.',
)
}
return performCodexRequest({
request,
credentials,
params,
defaultHeaders: {
...this.defaultHeaders,
...(options?.headers ?? {}),
},
signal: options?.signal,
})
}
return this._doOpenAIRequest(request, params, options)
}
private async _doOpenAIRequest(
request: ReturnType<typeof resolveProviderRequest>,
params: ShimCreateParams,
options?: { signal?: AbortSignal; headers?: Record<string, string> },
): Promise<Response> {
@@ -537,7 +560,7 @@ class OpenAIShimMessages {
)
const body: Record<string, unknown> = {
model: params.model,
model: request.resolvedModel,
messages: openaiMessages,
max_tokens: params.max_tokens,
stream: params.stream ?? false,
@@ -550,7 +573,6 @@ class OpenAIShimMessages {
if (params.temperature !== undefined) body.temperature = params.temperature
if (params.top_p !== undefined) body.top_p = params.top_p
// Convert tools
if (params.tools && params.tools.length > 0) {
const converted = convertTools(
params.tools as Array<{
@@ -561,7 +583,6 @@ class OpenAIShimMessages {
)
if (converted.length > 0) {
body.tools = converted
// Convert tool_choice
if (params.tool_choice) {
const tc = params.tool_choice as { type?: string; name?: string }
if (tc.type === 'auto') {
@@ -578,18 +599,18 @@ class OpenAIShimMessages {
}
}
const url = `${this.baseUrl}/chat/completions`
const headers: Record<string, string> = {
'Content-Type': 'application/json',
...this.defaultHeaders,
...(options?.headers ?? {}),
}
if (this.apiKey) {
headers['Authorization'] = `Bearer ${this.apiKey}`
const apiKey = process.env.OPENAI_API_KEY ?? ''
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`
}
const response = await fetch(url, {
const response = await fetch(`${request.baseUrl}/chat/completions`, {
method: 'POST',
headers,
body: JSON.stringify(body),
@@ -598,9 +619,7 @@ class OpenAIShimMessages {
if (!response.ok) {
const errorBody = await response.text().catch(() => 'unknown error')
throw new Error(
`OpenAI API error ${response.status}: ${errorBody}`,
)
throw new Error(`OpenAI API error ${response.status}: ${errorBody}`)
}
return response
@@ -680,45 +699,22 @@ class OpenAIShimMessages {
class OpenAIShimBeta {
messages: OpenAIShimMessages
constructor(
baseUrl: string,
apiKey: string,
defaultHeaders: Record<string, string>,
) {
this.messages = new OpenAIShimMessages(baseUrl, apiKey, defaultHeaders)
constructor(defaultHeaders: Record<string, string>) {
this.messages = new OpenAIShimMessages(defaultHeaders)
}
}
/**
* Creates an Anthropic SDK-compatible client that routes requests
* to an OpenAI-compatible API endpoint.
*
* Usage:
* CLAUDE_CODE_USE_OPENAI=1 OPENAI_API_KEY=sk-... OPENAI_MODEL=gpt-4o
*/
export function createOpenAIShimClient(options: {
defaultHeaders?: Record<string, string>
maxRetries?: number
timeout?: number
}): unknown {
const baseUrl = (
process.env.OPENAI_BASE_URL ??
process.env.OPENAI_API_BASE ??
'https://api.openai.com/v1'
).replace(/\/+$/, '')
const apiKey = process.env.OPENAI_API_KEY ?? ''
const headers = {
const beta = new OpenAIShimBeta({
...(options.defaultHeaders ?? {}),
}
})
const beta = new OpenAIShimBeta(baseUrl, apiKey, headers)
// Duck-type as Anthropic client
return {
beta,
// Some code paths access .messages directly (non-beta)
messages: beta.messages,
}
}

View File

@@ -0,0 +1,313 @@
import { existsSync, readFileSync } from 'node:fs'
import { homedir } from 'node:os'
import { join } from 'node:path'
export const DEFAULT_OPENAI_BASE_URL = 'https://api.openai.com/v1'
export const DEFAULT_CODEX_BASE_URL = 'https://chatgpt.com/backend-api/codex'
const CODEX_ALIAS_MODELS: Record<
string,
{
model: string
reasoningEffort?: ReasoningEffort
}
> = {
codexplan: {
model: 'gpt-5.4',
reasoningEffort: 'high',
},
codexspark: {
model: 'gpt-5.3-codex-spark',
},
} as const
type CodexAlias = keyof typeof CODEX_ALIAS_MODELS
type ReasoningEffort = 'low' | 'medium' | 'high'
export type ProviderTransport = 'chat_completions' | 'codex_responses'
export type ResolvedProviderRequest = {
transport: ProviderTransport
requestedModel: string
resolvedModel: string
baseUrl: string
reasoning?: {
effort: ReasoningEffort
}
}
export type ResolvedCodexCredentials = {
apiKey: string
accountId?: string
authPath?: string
source: 'env' | 'auth.json' | 'none'
}
type ModelDescriptor = {
raw: string
baseModel: string
reasoning?: {
effort: ReasoningEffort
}
}
const LOCALHOST_HOSTNAMES = new Set(['localhost', '127.0.0.1', '::1'])
function asTrimmedString(value: unknown): string | undefined {
return typeof value === 'string' && value.trim() ? value.trim() : undefined
}
function readNestedString(
value: unknown,
paths: string[][],
): string | undefined {
for (const path of paths) {
let current = value
let valid = true
for (const key of path) {
if (!current || typeof current !== 'object' || !(key in current)) {
valid = false
break
}
current = (current as Record<string, unknown>)[key]
}
if (!valid) continue
const stringValue = asTrimmedString(current)
if (stringValue) return stringValue
}
return undefined
}
function decodeJwtPayload(token: string): Record<string, unknown> | undefined {
const parts = token.split('.')
if (parts.length < 2) return undefined
try {
const normalized = parts[1].replace(/-/g, '+').replace(/_/g, '/')
const padded = normalized + '='.repeat((4 - (normalized.length % 4)) % 4)
const json = Buffer.from(padded, 'base64').toString('utf8')
const parsed = JSON.parse(json)
return parsed && typeof parsed === 'object'
? (parsed as Record<string, unknown>)
: undefined
} catch {
return undefined
}
}
function parseReasoningEffort(value: string | undefined): ReasoningEffort | undefined {
if (!value) return undefined
const normalized = value.trim().toLowerCase()
if (normalized === 'low' || normalized === 'medium' || normalized === 'high') {
return normalized
}
return undefined
}
function parseModelDescriptor(model: string): ModelDescriptor {
const trimmed = model.trim()
const queryIndex = trimmed.indexOf('?')
if (queryIndex === -1) {
const alias = trimmed.toLowerCase() as CodexAlias
const aliasConfig = CODEX_ALIAS_MODELS[alias]
if (aliasConfig) {
return {
raw: trimmed,
baseModel: aliasConfig.model,
reasoning: aliasConfig.reasoningEffort
? { effort: aliasConfig.reasoningEffort }
: undefined,
}
}
return {
raw: trimmed,
baseModel: trimmed,
}
}
const baseModel = trimmed.slice(0, queryIndex).trim()
const params = new URLSearchParams(trimmed.slice(queryIndex + 1))
const alias = baseModel.toLowerCase() as CodexAlias
const aliasConfig = CODEX_ALIAS_MODELS[alias]
const resolvedBaseModel = aliasConfig?.model ?? baseModel
const reasoning =
parseReasoningEffort(params.get('reasoning') ?? undefined) ??
(aliasConfig?.reasoningEffort
? { effort: aliasConfig.reasoningEffort }
: undefined)
return {
raw: trimmed,
baseModel: resolvedBaseModel,
reasoning: typeof reasoning === 'string' ? { effort: reasoning } : reasoning,
}
}
function isCodexAlias(model: string): boolean {
const normalized = model.trim().toLowerCase()
const base = normalized.split('?', 1)[0] ?? normalized
return base in CODEX_ALIAS_MODELS
}
export function isLocalProviderUrl(baseUrl: string | undefined): boolean {
if (!baseUrl) return false
try {
return LOCALHOST_HOSTNAMES.has(new URL(baseUrl).hostname)
} catch {
return false
}
}
export function isCodexBaseUrl(baseUrl: string | undefined): boolean {
if (!baseUrl) return false
try {
const parsed = new URL(baseUrl)
return (
parsed.hostname === 'chatgpt.com' &&
parsed.pathname.replace(/\/+$/, '') === '/backend-api/codex'
)
} catch {
return false
}
}
export function resolveProviderRequest(options?: {
model?: string
baseUrl?: string
fallbackModel?: string
}): ResolvedProviderRequest {
const requestedModel =
options?.model?.trim() ||
process.env.OPENAI_MODEL?.trim() ||
options?.fallbackModel?.trim() ||
'gpt-4o'
const descriptor = parseModelDescriptor(requestedModel)
const rawBaseUrl =
options?.baseUrl ??
process.env.OPENAI_BASE_URL ??
process.env.OPENAI_API_BASE ??
undefined
const transport: ProviderTransport =
isCodexAlias(requestedModel) || isCodexBaseUrl(rawBaseUrl)
? 'codex_responses'
: 'chat_completions'
return {
transport,
requestedModel,
resolvedModel: descriptor.baseModel,
baseUrl:
(rawBaseUrl ??
(transport === 'codex_responses'
? DEFAULT_CODEX_BASE_URL
: DEFAULT_OPENAI_BASE_URL)
).replace(/\/+$/, ''),
reasoning: descriptor.reasoning,
}
}
export function resolveCodexAuthPath(
env: NodeJS.ProcessEnv = process.env,
): string {
const explicit = asTrimmedString(env.CODEX_AUTH_JSON_PATH)
if (explicit) return explicit
const codexHome = asTrimmedString(env.CODEX_HOME)
if (codexHome) return join(codexHome, 'auth.json')
return join(homedir(), '.codex', 'auth.json')
}
export function parseChatgptAccountId(
token: string | undefined,
): string | undefined {
if (!token) return undefined
const payload = decodeJwtPayload(token)
const fromClaim = asTrimmedString(
payload?.['https://api.openai.com/auth.chatgpt_account_id'],
)
if (fromClaim) return fromClaim
return asTrimmedString(payload?.chatgpt_account_id)
}
function loadCodexAuthJson(
authPath: string,
): Record<string, unknown> | undefined {
if (!existsSync(authPath)) return undefined
try {
const raw = readFileSync(authPath, 'utf8')
const parsed = JSON.parse(raw)
return parsed && typeof parsed === 'object'
? (parsed as Record<string, unknown>)
: undefined
} catch {
return undefined
}
}
export function resolveCodexApiCredentials(
env: NodeJS.ProcessEnv = process.env,
): ResolvedCodexCredentials {
const envApiKey = asTrimmedString(env.CODEX_API_KEY)
const envAccountId =
asTrimmedString(env.CODEX_ACCOUNT_ID) ??
asTrimmedString(env.CHATGPT_ACCOUNT_ID)
if (envApiKey) {
return {
apiKey: envApiKey,
accountId: envAccountId ?? parseChatgptAccountId(envApiKey),
source: 'env',
}
}
const authPath = resolveCodexAuthPath(env)
const authJson = loadCodexAuthJson(authPath)
if (!authJson) {
return {
apiKey: '',
authPath,
source: 'none',
}
}
const apiKey = readNestedString(authJson, [
['access_token'],
['accessToken'],
['tokens', 'access_token'],
['tokens', 'accessToken'],
['auth', 'access_token'],
['auth', 'accessToken'],
['token', 'access_token'],
['token', 'accessToken'],
['tokens', 'id_token'],
['tokens', 'idToken'],
])
const accountId =
envAccountId ??
readNestedString(authJson, [
['account_id'],
['accountId'],
['tokens', 'account_id'],
['tokens', 'accountId'],
['auth', 'account_id'],
['auth', 'accountId'],
]) ??
parseChatgptAccountId(apiKey)
if (!apiKey) {
return {
apiKey: '',
accountId,
authPath,
source: 'none',
}
}
return {
apiKey,
accountId,
authPath,
source: 'auth.json',
}
}