From 07621a6f8d0918170281869a47b5dbff90e71594 Mon Sep 17 00:00:00 2001 From: ibaaaaal Date: Fri, 10 Apr 2026 20:56:40 +0700 Subject: [PATCH] fix: scrub canonical Anthropic headers from 3P shim requests (#499) * Stop canonical Anthropic headers from leaking into 3P shim requests The remaining blocker from PR #268 was that canonical Anthropic headers such as `anthropic-version` and `anthropic-beta` could still ride through supported 3P paths even after the earlier x-anthropic/x-claude scrubber work. This tightens header filtering inside the shim itself so direct defaultHeaders, env-driven client setup, providerOverride routing, and per-request header injection all share the same scrubber. Constraint: Preserve non-Anthropic custom headers and provider auth while stripping only Anthropic/OpenClaude-internal headers from 3P requests Rejected: Rely on client.ts filtering alone | direct shim construction and per-request headers would still leave gaps Confidence: high Scope-risk: narrow Reversibility: clean Directive: Keep header scrubbing centralized in the shim so new call paths do not reopen 3P leakage bugs Tested: bun test src/services/api/openaiShim.test.ts src/services/api/client.test.ts src/utils/context.test.ts Tested: bun run test:provider Tested: bun run build && node dist/cli.mjs --version Not-tested: bun run typecheck (repository baseline currently fails in many unrelated files) * Keep OpenAI client tests from restoring undefined env as strings The new header-leak regression tests in client.test.ts restored environment variables via direct assignment, which can leave literal "undefined" strings in process.env when the original value was unset. This switches the teardown over to the same restore helper pattern already used in openaiShim.test.ts. Constraint: Keep the fix limited to test hygiene without altering runtime behavior Rejected: Restore only the two env vars Copilot called out | using one helper for all test env restores is simpler and less error-prone Confidence: high Scope-risk: narrow Reversibility: clean Directive: Use restore helpers for env teardown in tests so unset values stay deleted instead of becoming the string "undefined" Tested: bun test src/services/api/client.test.ts src/services/api/openaiShim.test.ts src/utils/context.test.ts Not-tested: Full provider suite (unchanged runtime path) * Prevent GitHub Codex requests from forwarding unsanitized Anthropic headers A base-sync with upstream exposed a separate GitHub+Codex transport branch that still merged per-request headers raw before adding Copilot headers. This keeps the filter aligned across Codex-family paths and adds explicit regression tests for GitHub Codex routing, including providerOverride. Constraint: Must not push or modify GitHub state while validating the reviewer concern Rejected: Leave the GitHub Codex path unchanged | runtime repro showed anthropic-* headers still leaked after the upstream sync Confidence: high Scope-risk: narrow Directive: Keep header scrubbing consistent across every Codex-family transport branch when provider routing changes Tested: bun test src/services/api/openaiShim.test.ts Tested: bun test src/services/api/client.test.ts src/services/api/codexShim.test.ts src/services/api/providerConfig.github.test.ts Tested: bun run build Not-tested: Full repository test suite --- src/services/api/client.test.ts | 168 ++++++++++++++++++-- src/services/api/openaiShim.test.ts | 232 ++++++++++++++++++++++++++++ src/services/api/openaiShim.ts | 34 +++- 3 files changed, 419 insertions(+), 15 deletions(-) diff --git a/src/services/api/client.test.ts b/src/services/api/client.test.ts index fae88c55..30df3b93 100644 --- a/src/services/api/client.test.ts +++ b/src/services/api/client.test.ts @@ -14,6 +14,7 @@ type ShimClient = { const originalFetch = globalThis.fetch const originalMacro = (globalThis as Record).MACRO const originalEnv = { + CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, GEMINI_API_KEY: process.env.GEMINI_API_KEY, GEMINI_MODEL: process.env.GEMINI_MODEL, @@ -25,6 +26,15 @@ const originalEnv = { OPENAI_MODEL: process.env.OPENAI_MODEL, ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY, ANTHROPIC_AUTH_TOKEN: process.env.ANTHROPIC_AUTH_TOKEN, + ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS, +} + +function restoreEnv(key: string, value: string | undefined): void { + if (value === undefined) { + delete process.env[key] + } else { + process.env[key] = value + } } beforeEach(() => { @@ -35,27 +45,31 @@ beforeEach(() => { process.env.GEMINI_BASE_URL = 'https://gemini.example/v1beta/openai' process.env.GEMINI_AUTH_MODE = 'api-key' + delete process.env.CLAUDE_CODE_USE_OPENAI delete process.env.GOOGLE_API_KEY delete process.env.OPENAI_API_KEY delete process.env.OPENAI_BASE_URL delete process.env.OPENAI_MODEL delete process.env.ANTHROPIC_API_KEY delete process.env.ANTHROPIC_AUTH_TOKEN + delete process.env.ANTHROPIC_CUSTOM_HEADERS }) afterEach(() => { ;(globalThis as Record).MACRO = originalMacro - process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI - process.env.GEMINI_API_KEY = originalEnv.GEMINI_API_KEY - process.env.GEMINI_MODEL = originalEnv.GEMINI_MODEL - process.env.GEMINI_BASE_URL = originalEnv.GEMINI_BASE_URL - process.env.GEMINI_AUTH_MODE = originalEnv.GEMINI_AUTH_MODE - process.env.GOOGLE_API_KEY = originalEnv.GOOGLE_API_KEY - process.env.OPENAI_API_KEY = originalEnv.OPENAI_API_KEY - process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL - process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL - process.env.ANTHROPIC_API_KEY = originalEnv.ANTHROPIC_API_KEY - process.env.ANTHROPIC_AUTH_TOKEN = originalEnv.ANTHROPIC_AUTH_TOKEN + restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI) + restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI) + restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY) + restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL) + restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL) + restoreEnv('GEMINI_AUTH_MODE', originalEnv.GEMINI_AUTH_MODE) + restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY) + restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY) + restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL) + restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL) + restoreEnv('ANTHROPIC_API_KEY', originalEnv.ANTHROPIC_API_KEY) + restoreEnv('ANTHROPIC_AUTH_TOKEN', originalEnv.ANTHROPIC_AUTH_TOKEN) + restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS) globalThis.fetch = originalFetch }) @@ -122,3 +136,135 @@ test('routes Gemini provider requests through the OpenAI-compatible shim', async model: 'gemini-2.0-flash', }) }) + +test('strips Anthropic-specific custom headers before sending OpenAI-compatible shim requests', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_OPENAI = '1' + process.env.OPENAI_API_KEY = 'openai-test-key' + process.env.OPENAI_BASE_URL = 'http://example.test/v1' + process.env.OPENAI_MODEL = 'gpt-4o' + process.env.ANTHROPIC_CUSTOM_HEADERS = [ + 'anthropic-version: 2023-06-01', + 'anthropic-beta: prompt-caching-2024-07-31', + 'x-anthropic-additional-protection: true', + 'x-claude-remote-session-id: remote-123', + 'x-app: cli', + 'x-safe-header: keep-me', + ].join('\n') + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-openai', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = (await getAnthropicClient({ + maxRetries: 0, + model: 'gpt-4o', + })) as unknown as ShimClient + + await client.beta.messages.create({ + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-app')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer openai-test-key') +}) + +test('strips Anthropic-specific custom headers on providerOverride shim requests too', async () => { + let capturedHeaders: Headers | undefined + + process.env.ANTHROPIC_CUSTOM_HEADERS = [ + 'anthropic-version: 2023-06-01', + 'anthropic-beta: prompt-caching-2024-07-31', + 'x-claude-remote-session-id: remote-123', + 'x-safe-header: keep-me', + ].join('\n') + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-provider-override', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = (await getAnthropicClient({ + maxRetries: 0, + providerOverride: { + model: 'gpt-4o', + baseURL: 'http://example.test/v1', + apiKey: 'provider-test-key', + }, + })) as unknown as ShimClient + + await client.beta.messages.create({ + model: 'unused', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-test-key') +}) diff --git a/src/services/api/openaiShim.test.ts b/src/services/api/openaiShim.test.ts index db0c9c2e..4889b6d3 100644 --- a/src/services/api/openaiShim.test.ts +++ b/src/services/api/openaiShim.test.ts @@ -7,6 +7,10 @@ const originalEnv = { OPENAI_BASE_URL: process.env.OPENAI_BASE_URL, OPENAI_API_KEY: process.env.OPENAI_API_KEY, OPENAI_MODEL: process.env.OPENAI_MODEL, + CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB, + GITHUB_TOKEN: process.env.GITHUB_TOKEN, + GH_TOKEN: process.env.GH_TOKEN, + CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, GEMINI_API_KEY: process.env.GEMINI_API_KEY, GOOGLE_API_KEY: process.env.GOOGLE_API_KEY, @@ -15,6 +19,7 @@ const originalEnv = { GEMINI_BASE_URL: process.env.GEMINI_BASE_URL, GEMINI_MODEL: process.env.GEMINI_MODEL, GOOGLE_CLOUD_PROJECT: process.env.GOOGLE_CLOUD_PROJECT, + ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS, } const originalFetch = globalThis.fetch @@ -70,6 +75,10 @@ beforeEach(() => { process.env.OPENAI_BASE_URL = 'http://example.test/v1' process.env.OPENAI_API_KEY = 'test-key' delete process.env.OPENAI_MODEL + delete process.env.CLAUDE_CODE_USE_GITHUB + delete process.env.GITHUB_TOKEN + delete process.env.GH_TOKEN + delete process.env.CLAUDE_CODE_USE_OPENAI delete process.env.CLAUDE_CODE_USE_GEMINI delete process.env.GEMINI_API_KEY delete process.env.GOOGLE_API_KEY @@ -78,12 +87,17 @@ beforeEach(() => { delete process.env.GEMINI_BASE_URL delete process.env.GEMINI_MODEL delete process.env.GOOGLE_CLOUD_PROJECT + delete process.env.ANTHROPIC_CUSTOM_HEADERS }) afterEach(() => { restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL) restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY) restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL) + restoreEnv('CLAUDE_CODE_USE_GITHUB', originalEnv.CLAUDE_CODE_USE_GITHUB) + restoreEnv('GITHUB_TOKEN', originalEnv.GITHUB_TOKEN) + restoreEnv('GH_TOKEN', originalEnv.GH_TOKEN) + restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI) restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI) restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY) restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY) @@ -92,9 +106,227 @@ afterEach(() => { restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL) restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL) restoreEnv('GOOGLE_CLOUD_PROJECT', originalEnv.GOOGLE_CLOUD_PROJECT) + restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS) globalThis.fetch = originalFetch }) +test('strips canonical Anthropic headers from direct shim defaultHeaders', async () => { + let capturedHeaders: Headers | undefined + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-1', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = createOpenAIShimClient({ + defaultHeaders: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-anthropic-additional-protection': 'true', + 'x-claude-remote-session-id': 'remote-123', + 'x-app': 'cli', + 'x-client-app': 'sdk', + 'x-safe-header': 'keep-me', + }, + }) as OpenAIShimClient + + await client.beta.messages.create({ + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-app')).toBeNull() + expect(capturedHeaders?.get('x-client-app')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') +}) + +test('strips canonical Anthropic headers from per-request shim headers too', async () => { + let capturedHeaders: Headers | undefined + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response( + JSON.stringify({ + id: 'chatcmpl-1', + model: 'gpt-4o', + choices: [ + { + message: { + role: 'assistant', + content: 'ok', + }, + finish_reason: 'stop', + }, + ], + usage: { + prompt_tokens: 8, + completion_tokens: 3, + total_tokens: 11, + }, + }), + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ) + }) as FetchType + + const client = createOpenAIShimClient({}) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'gpt-4o', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: false, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') +}) + +test('strips Anthropic-specific headers on GitHub Codex transport requests', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_GITHUB = '1' + process.env.OPENAI_API_KEY = 'github-test-key' + delete process.env.OPENAI_BASE_URL + delete process.env.OPENAI_MODEL + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response('', { + status: 200, + headers: { + 'Content-Type': 'text/event-stream', + }, + }) + }) as FetchType + + const client = createOpenAIShimClient({}) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'github:gpt-5-codex', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: true, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'anthropic-beta': 'prompt-caching-2024-07-31', + 'x-anthropic-additional-protection': 'true', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('anthropic-beta')).toBeNull() + expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer github-test-key') + expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7') +}) + +test('strips Anthropic-specific headers on GitHub Codex transport with providerOverride API key', async () => { + let capturedHeaders: Headers | undefined + + process.env.CLAUDE_CODE_USE_GITHUB = '1' + process.env.OPENAI_API_KEY = 'env-should-not-win' + delete process.env.OPENAI_BASE_URL + delete process.env.OPENAI_MODEL + + globalThis.fetch = (async (_input, init) => { + capturedHeaders = new Headers(init?.headers) + + return new Response('', { + status: 200, + headers: { + 'Content-Type': 'text/event-stream', + }, + }) + }) as FetchType + + const client = createOpenAIShimClient({ + providerOverride: { + model: 'github:gpt-5-codex', + baseURL: 'https://api.githubcopilot.com', + apiKey: 'provider-override-key', + }, + }) as OpenAIShimClient + + await client.beta.messages.create( + { + model: 'ignored', + system: 'test system', + messages: [{ role: 'user', content: 'hello' }], + max_tokens: 64, + stream: true, + }, + { + headers: { + 'anthropic-version': '2023-06-01', + 'x-claude-remote-session-id': 'remote-123', + 'x-safe-header': 'keep-me', + }, + }, + ) + + expect(capturedHeaders?.get('anthropic-version')).toBeNull() + expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull() + expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me') + expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-override-key') + expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7') +}) + test('preserves usage from final OpenAI stream chunk with empty choices', async () => { globalThis.fetch = (async (_input, init) => { const url = typeof _input === 'string' ? _input : _input.url diff --git a/src/services/api/openaiShim.ts b/src/services/api/openaiShim.ts index c72b8c24..04c9450c 100644 --- a/src/services/api/openaiShim.ts +++ b/src/services/api/openaiShim.ts @@ -80,6 +80,32 @@ function isGithubModelsMode(): boolean { return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) } +function filterAnthropicHeaders( + headers: Record | undefined, +): Record { + if (!headers) return {} + + const filtered: Record = {} + for (const [key, value] of Object.entries(headers)) { + const lower = key.toLowerCase() + if ( + lower.startsWith('x-anthropic') || + lower.startsWith('anthropic-') || + lower.startsWith('x-claude') || + lower === 'x-app' || + lower === 'x-client-app' || + lower === 'authorization' || + lower === 'x-api-key' || + lower === 'api-key' + ) { + continue + } + filtered[key] = value + } + + return filtered +} + function hasGeminiApiHost(baseUrl: string | undefined): boolean { if (!baseUrl) return false @@ -989,7 +1015,7 @@ class OpenAIShimMessages { private providerOverride?: { model: string; baseURL: string; apiKey: string } constructor(defaultHeaders: Record, reasoningEffort?: 'low' | 'medium' | 'high' | 'xhigh', providerOverride?: { model: string; baseURL: string; apiKey: string }) { - this.defaultHeaders = defaultHeaders + this.defaultHeaders = filterAnthropicHeaders(defaultHeaders) this.reasoningEffort = reasoningEffort this.providerOverride = providerOverride } @@ -1099,7 +1125,7 @@ class OpenAIShimMessages { params, defaultHeaders: { ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), ...COPILOT_HEADERS, }, signal: options?.signal, @@ -1131,7 +1157,7 @@ class OpenAIShimMessages { params, defaultHeaders: { ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), }, signal: options?.signal, }) @@ -1223,7 +1249,7 @@ class OpenAIShimMessages { const headers: Record = { 'Content-Type': 'application/json', ...this.defaultHeaders, - ...(options?.headers ?? {}), + ...filterAnthropicHeaders(options?.headers), } const isGemini = isGeminiMode()