diff --git a/src/services/api/client.test.ts b/src/services/api/client.test.ts index fae88c55..30df3b93 100644 --- a/src/services/api/client.test.ts +++ b/src/services/api/client.test.ts @@ -14,6 +14,7 @@ type ShimClient = { const originalFetch = globalThis.fetch const originalMacro = (globalThis as Record).MACRO const originalEnv = { + CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, GEMINI_API_KEY: process.env.GEMINI_API_KEY, GEMINI_MODEL: process.env.GEMINI_MODEL, @@ -25,6 +26,15 @@ const originalEnv = { OPENAI_MODEL: process.env.OPENAI_MODEL, ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY, ANTHROPIC_AUTH_TOKEN: process.env.ANTHROPIC_AUTH_TOKEN, + ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS, +} + +function restoreEnv(key: string, value: string | undefined): void { + if (value === undefined) { + delete process.env[key] + } else { + process.env[key] = value + } } beforeEach(() => { @@ -35,27 +45,31 @@ beforeEach(() => { process.env.GEMINI_BASE_URL = 'https://gemini.example/v1beta/openai' process.env.GEMINI_AUTH_MODE = 'api-key' + delete process.env.CLAUDE_CODE_USE_OPENAI delete process.env.GOOGLE_API_KEY delete process.env.OPENAI_API_KEY delete process.env.OPENAI_BASE_URL delete process.env.OPENAI_MODEL delete process.env.ANTHROPIC_API_KEY delete process.env.ANTHROPIC_AUTH_TOKEN + delete process.env.ANTHROPIC_CUSTOM_HEADERS }) afterEach(() => { ;(globalThis as Record).MACRO = originalMacro - process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI - process.env.GEMINI_API_KEY = originalEnv.GEMINI_API_KEY - process.env.GEMINI_MODEL = originalEnv.GEMINI_MODEL - process.env.GEMINI_BASE_URL = originalEnv.GEMINI_BASE_URL - process.env.GEMINI_AUTH_MODE = originalEnv.GEMINI_AUTH_MODE - process.env.GOOGLE_API_KEY = originalEnv.GOOGLE_API_KEY - process.env.OPENAI_API_KEY = originalEnv.OPENAI_API_KEY - process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL - process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL - process.env.ANTHROPIC_API_KEY = originalEnv.ANTHROPIC_API_KEY - process.env.ANTHROPIC_AUTH_TOKEN = originalEnv.ANTHROPIC_AUTH_TOKEN + restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI) + restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI) + restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY) + restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL) + restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL) + restoreEnv('GEMINI_AUTH_MODE', originalEnv.GEMINI_AUTH_MODE) + restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY) + restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY) + restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL) + restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL) + restoreEnv('ANTHROPIC_API_KEY', originalEnv.ANTHROPIC_API_KEY) + restoreEnv('ANTHROPIC_AUTH_TOKEN', originalEnv.ANTHROPIC_AUTH_TOKEN) + restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS) globalThis.fetch = originalFetch }) @@ -122,3 +136,135 @@ test('routes Gemini provider requests through the OpenAI-compatible shim', async model: 'gemini-2.0-flash', }) }) + +test('strips Anthropic-specific custom headers before sending OpenAI-compatible shim requests', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_OPENAI = '1' + process.env.OPENAI_API_KEY = 'openai-test-key' + process.env.OPENAI_BASE_URL = 'http://example.test/v1' + process.env.OPENAI_MODEL = 'gpt-4o' + process.env.ANTHROPIC_CUSTOM_HEADERS = [ + 'anthropic-version: 2023-06-01', + 'anthropic-beta: prompt-caching-2024-07-31', + 'x-anthropic-additional-protection: true', + 'x-claude-remote-session-id: remote-123', + 'x-app: cli', + 'x-safe-header: keep-me', + ].join('\n') + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-openai', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = (await getAnthropicClient({ + maxRetries: 0, + model: 'gpt-4o', + })) as unknown as ShimClient + + await client.beta.messages.create({ + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-app')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer openai-test-key') +}) + +test('strips Anthropic-specific custom headers on providerOverride shim requests too', async () => { + let capturedHeaders: Headers | undefined + + process.env.ANTHROPIC_CUSTOM_HEADERS = [ + 'anthropic-version: 2023-06-01', + 'anthropic-beta: prompt-caching-2024-07-31', + 'x-claude-remote-session-id: remote-123', + 'x-safe-header: keep-me', + ].join('\n') + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-provider-override', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = (await getAnthropicClient({ + maxRetries: 0, + providerOverride: { + model: 'gpt-4o', + baseURL: 'http://example.test/v1', + apiKey: 'provider-test-key', + }, + })) as unknown as ShimClient + + await client.beta.messages.create({ + model: 'unused', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-test-key') +}) diff --git a/src/services/api/openaiShim.test.ts b/src/services/api/openaiShim.test.ts index db0c9c2e..4889b6d3 100644 --- a/src/services/api/openaiShim.test.ts +++ b/src/services/api/openaiShim.test.ts @@ -7,6 +7,10 @@ const originalEnv = { OPENAI_BASE_URL: process.env.OPENAI_BASE_URL, OPENAI_API_KEY: process.env.OPENAI_API_KEY, OPENAI_MODEL: process.env.OPENAI_MODEL, + CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB, + GITHUB_TOKEN: process.env.GITHUB_TOKEN, + GH_TOKEN: process.env.GH_TOKEN, + CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, GEMINI_API_KEY: process.env.GEMINI_API_KEY, GOOGLE_API_KEY: process.env.GOOGLE_API_KEY, @@ -15,6 +19,7 @@ const originalEnv = { GEMINI_BASE_URL: process.env.GEMINI_BASE_URL, GEMINI_MODEL: process.env.GEMINI_MODEL, GOOGLE_CLOUD_PROJECT: process.env.GOOGLE_CLOUD_PROJECT, + ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS, } const originalFetch = globalThis.fetch @@ -70,6 +75,10 @@ beforeEach(() => { process.env.OPENAI_BASE_URL = 'http://example.test/v1' process.env.OPENAI_API_KEY = 'test-key' delete process.env.OPENAI_MODEL + delete process.env.CLAUDE_CODE_USE_GITHUB + delete process.env.GITHUB_TOKEN + delete process.env.GH_TOKEN + delete process.env.CLAUDE_CODE_USE_OPENAI delete process.env.CLAUDE_CODE_USE_GEMINI delete process.env.GEMINI_API_KEY delete process.env.GOOGLE_API_KEY @@ -78,12 +87,17 @@ beforeEach(() => { delete process.env.GEMINI_BASE_URL delete process.env.GEMINI_MODEL delete process.env.GOOGLE_CLOUD_PROJECT + delete process.env.ANTHROPIC_CUSTOM_HEADERS }) afterEach(() => { restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL) restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY) restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL) + restoreEnv('CLAUDE_CODE_USE_GITHUB', originalEnv.CLAUDE_CODE_USE_GITHUB) + restoreEnv('GITHUB_TOKEN', originalEnv.GITHUB_TOKEN) + restoreEnv('GH_TOKEN', originalEnv.GH_TOKEN) + restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI) restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI) restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY) restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY) @@ -92,9 +106,227 @@ afterEach(() => { restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL) restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL) restoreEnv('GOOGLE_CLOUD_PROJECT', originalEnv.GOOGLE_CLOUD_PROJECT) + restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS) globalThis.fetch = originalFetch }) +test('strips canonical Anthropic headers from direct shim defaultHeaders', async () => { + let capturedHeaders: Headers | undefined + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-1', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = createOpenAIShimClient({ + defaultHeaders: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-anthropic-additional-protection': 'true', + 'x-claude-remote-session-id': 'remote-123', + 'x-app': 'cli', + 'x-client-app': 'sdk', + 'x-safe-header': 'keep-me', + }, + }) as OpenAIShimClient + + await client.beta.messages.create({ + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-app')).toBeNull() + expect(capturedHeaders?.get('x-client-app')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') +}) + +test('strips canonical Anthropic headers from per-request shim headers too', async () => { + let capturedHeaders: Headers | undefined + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-1', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = createOpenAIShimClient({}) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') +}) + +test('strips Anthropic-specific headers on GitHub Codex transport requests', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_GITHUB = '1' + process.env.OPENAI_API_KEY = 'github-test-key' + delete process.env.OPENAI_BASE_URL + delete process.env.OPENAI_MODEL + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response('', { + status: 200, + headers: { + 'Content-Type': 'text/event-stream', + }, + }) + }) as FetchType + + const client = createOpenAIShimClient({}) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'github:gpt-5-codex', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: true, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-anthropic-additional-protection': 'true', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer github-test-key') + expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7') +}) + +test('strips Anthropic-specific headers on GitHub Codex transport with providerOverride API key', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_GITHUB = '1' + process.env.OPENAI_API_KEY = 'env-should-not-win' + delete process.env.OPENAI_BASE_URL + delete process.env.OPENAI_MODEL + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response('', { + status: 200, + headers: { + 'Content-Type': 'text/event-stream', + }, + }) + }) as FetchType + + const client = createOpenAIShimClient({ + providerOverride: { + model: 'github:gpt-5-codex', + baseURL: 'https://api.githubcopilot.com', + apiKey: 'provider-override-key', + }, + }) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'ignored', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: true, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'x-claude-remote-session-id': 'remote-123', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-override-key') + expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7') +}) + test('preserves usage from final OpenAI stream chunk with empty choices', async () => { globalThis.fetch = (async (_input, init) => { const url = typeof _input === 'string' ? _input : _input.url diff --git a/src/services/api/openaiShim.ts b/src/services/api/openaiShim.ts index c72b8c24..04c9450c 100644 --- a/src/services/api/openaiShim.ts +++ b/src/services/api/openaiShim.ts @@ -80,6 +80,32 @@ function isGithubModelsMode(): boolean { return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) } +function filterAnthropicHeaders( + headers: Record | undefined, +): Record { + if (!headers) return {} + + const filtered: Record = {} + for (const [key, value] of Object.entries(headers)) { + const lower = key.toLowerCase() + if ( + lower.startsWith('x-anthropic') || + lower.startsWith('anthropic-') || + lower.startsWith('x-claude') || + lower === 'x-app' || + lower === 'x-client-app' || + lower === 'authorization' || + lower === 'x-api-key' || + lower === 'api-key' + ) { + continue + } + filtered[key] = value + } + + return filtered +} + function hasGeminiApiHost(baseUrl: string | undefined): boolean { if (!baseUrl) return false @@ -989,7 +1015,7 @@ class OpenAIShimMessages { private providerOverride?: { model: string; baseURL: string; apiKey: string } constructor(defaultHeaders: Record, reasoningEffort?: 'low' | 'medium' | 'high' | 'xhigh', providerOverride?: { model: string; baseURL: string; apiKey: string }) { - this.defaultHeaders = defaultHeaders + this.defaultHeaders = filterAnthropicHeaders(defaultHeaders) this.reasoningEffort = reasoningEffort this.providerOverride = providerOverride } @@ -1099,7 +1125,7 @@ class OpenAIShimMessages { params, defaultHeaders: { ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), ...COPILOT_HEADERS, }, signal: options?.signal, @@ -1131,7 +1157,7 @@ class OpenAIShimMessages { params, defaultHeaders: { ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), }, signal: options?.signal, }) @@ -1223,7 +1249,7 @@ class OpenAIShimMessages { const headers: Record = { 'Content-Type': 'application/json', ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), } const isGemini = isGeminiMode()