fix: scrub canonical Anthropic headers from 3P shim requests (#499)

* Stop canonical Anthropic headers from leaking into 3P shim requests

The remaining blocker from PR #268 was that canonical Anthropic headers such as
`anthropic-version` and `anthropic-beta` could still ride through supported 3P
paths even after the earlier x-anthropic/x-claude scrubber work. This tightens
header filtering inside the shim itself so direct defaultHeaders, env-driven
client setup, providerOverride routing, and per-request header injection all
share the same scrubber.

Constraint: Preserve non-Anthropic custom headers and provider auth while stripping only Anthropic/OpenClaude-internal headers from 3P requests
Rejected: Rely on client.ts filtering alone | direct shim construction and per-request headers would still leave gaps
Confidence: high
Scope-risk: narrow
Reversibility: clean
Directive: Keep header scrubbing centralized in the shim so new call paths do not reopen 3P leakage bugs
Tested: bun test src/services/api/openaiShim.test.ts src/services/api/client.test.ts src/utils/context.test.ts
Tested: bun run test:provider
Tested: bun run build && node dist/cli.mjs --version
Not-tested: bun run typecheck (repository baseline currently fails in many unrelated files)

* Keep OpenAI client tests from restoring undefined env as strings

The new header-leak regression tests in client.test.ts restored environment
variables via direct assignment, which can leave literal "undefined" strings in
process.env when the original value was unset. This switches the teardown over
to the same restore helper pattern already used in openaiShim.test.ts.

Constraint: Keep the fix limited to test hygiene without altering runtime behavior
Rejected: Restore only the two env vars Copilot called out | using one helper for all test env restores is simpler and less error-prone
Confidence: high
Scope-risk: narrow
Reversibility: clean
Directive: Use restore helpers for env teardown in tests so unset values stay deleted instead of becoming the string "undefined"
Tested: bun test src/services/api/client.test.ts src/services/api/openaiShim.test.ts src/utils/context.test.ts
Not-tested: Full provider suite (unchanged runtime path)

* Prevent GitHub Codex requests from forwarding unsanitized Anthropic headers

A base-sync with upstream exposed a separate GitHub+Codex transport branch
that still merged per-request headers raw before adding Copilot headers.
This keeps the filter aligned across Codex-family paths and adds explicit
regression tests for GitHub Codex routing, including providerOverride.

Constraint: Must not push or modify GitHub state while validating the reviewer concern
Rejected: Leave the GitHub Codex path unchanged | runtime repro showed anthropic-* headers still leaked after the upstream sync
Confidence: high
Scope-risk: narrow
Directive: Keep header scrubbing consistent across every Codex-family transport branch when provider routing changes
Tested: bun test src/services/api/openaiShim.test.ts
Tested: bun test src/services/api/client.test.ts src/services/api/codexShim.test.ts src/services/api/providerConfig.github.test.ts
Tested: bun run build
Not-tested: Full repository test suite
This commit is contained in:
ibaaaaal
2026-04-10 20:56:40 +07:00
committed by GitHub
parent 692471850f
commit 07621a6f8d
3 changed files with 419 additions and 15 deletions

View File

@@ -14,6 +14,7 @@ type ShimClient = {
const originalFetch = globalThis.fetch const originalFetch = globalThis.fetch
const originalMacro = (globalThis as Record<string, unknown>).MACRO const originalMacro = (globalThis as Record<string, unknown>).MACRO
const originalEnv = { const originalEnv = {
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
GEMINI_API_KEY: process.env.GEMINI_API_KEY, GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GEMINI_MODEL: process.env.GEMINI_MODEL, GEMINI_MODEL: process.env.GEMINI_MODEL,
@@ -25,6 +26,15 @@ const originalEnv = {
OPENAI_MODEL: process.env.OPENAI_MODEL, OPENAI_MODEL: process.env.OPENAI_MODEL,
ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY, ANTHROPIC_API_KEY: process.env.ANTHROPIC_API_KEY,
ANTHROPIC_AUTH_TOKEN: process.env.ANTHROPIC_AUTH_TOKEN, ANTHROPIC_AUTH_TOKEN: process.env.ANTHROPIC_AUTH_TOKEN,
ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS,
}
function restoreEnv(key: string, value: string | undefined): void {
if (value === undefined) {
delete process.env[key]
} else {
process.env[key] = value
}
} }
beforeEach(() => { beforeEach(() => {
@@ -35,27 +45,31 @@ beforeEach(() => {
process.env.GEMINI_BASE_URL = 'https://gemini.example/v1beta/openai' process.env.GEMINI_BASE_URL = 'https://gemini.example/v1beta/openai'
process.env.GEMINI_AUTH_MODE = 'api-key' process.env.GEMINI_AUTH_MODE = 'api-key'
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.GOOGLE_API_KEY delete process.env.GOOGLE_API_KEY
delete process.env.OPENAI_API_KEY delete process.env.OPENAI_API_KEY
delete process.env.OPENAI_BASE_URL delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_MODEL delete process.env.OPENAI_MODEL
delete process.env.ANTHROPIC_API_KEY delete process.env.ANTHROPIC_API_KEY
delete process.env.ANTHROPIC_AUTH_TOKEN delete process.env.ANTHROPIC_AUTH_TOKEN
delete process.env.ANTHROPIC_CUSTOM_HEADERS
}) })
afterEach(() => { afterEach(() => {
;(globalThis as Record<string, unknown>).MACRO = originalMacro ;(globalThis as Record<string, unknown>).MACRO = originalMacro
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI)
process.env.GEMINI_API_KEY = originalEnv.GEMINI_API_KEY restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI)
process.env.GEMINI_MODEL = originalEnv.GEMINI_MODEL restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY)
process.env.GEMINI_BASE_URL = originalEnv.GEMINI_BASE_URL restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL)
process.env.GEMINI_AUTH_MODE = originalEnv.GEMINI_AUTH_MODE restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL)
process.env.GOOGLE_API_KEY = originalEnv.GOOGLE_API_KEY restoreEnv('GEMINI_AUTH_MODE', originalEnv.GEMINI_AUTH_MODE)
process.env.OPENAI_API_KEY = originalEnv.OPENAI_API_KEY restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY)
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY)
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL)
process.env.ANTHROPIC_API_KEY = originalEnv.ANTHROPIC_API_KEY restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL)
process.env.ANTHROPIC_AUTH_TOKEN = originalEnv.ANTHROPIC_AUTH_TOKEN restoreEnv('ANTHROPIC_API_KEY', originalEnv.ANTHROPIC_API_KEY)
restoreEnv('ANTHROPIC_AUTH_TOKEN', originalEnv.ANTHROPIC_AUTH_TOKEN)
restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS)
globalThis.fetch = originalFetch globalThis.fetch = originalFetch
}) })
@@ -122,3 +136,135 @@ test('routes Gemini provider requests through the OpenAI-compatible shim', async
model: 'gemini-2.0-flash', model: 'gemini-2.0-flash',
}) })
}) })
test('strips Anthropic-specific custom headers before sending OpenAI-compatible shim requests', async () => {
let capturedHeaders: Headers | undefined
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_API_KEY = 'openai-test-key'
process.env.OPENAI_BASE_URL = 'http://example.test/v1'
process.env.OPENAI_MODEL = 'gpt-4o'
process.env.ANTHROPIC_CUSTOM_HEADERS = [
'anthropic-version: 2023-06-01',
'anthropic-beta: prompt-caching-2024-07-31',
'x-anthropic-additional-protection: true',
'x-claude-remote-session-id: remote-123',
'x-app: cli',
'x-safe-header: keep-me',
].join('\n')
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response(
JSON.stringify({
id: 'chatcmpl-openai',
model: 'gpt-4o',
choices: [
{
message: {
role: 'assistant',
content: 'ok',
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 8,
completion_tokens: 3,
total_tokens: 11,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = (await getAnthropicClient({
maxRetries: 0,
model: 'gpt-4o',
})) as unknown as ShimClient
await client.beta.messages.create({
model: 'gpt-4o',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: false,
})
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
expect(capturedHeaders?.get('x-app')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
expect(capturedHeaders?.get('authorization')).toBe('Bearer openai-test-key')
})
test('strips Anthropic-specific custom headers on providerOverride shim requests too', async () => {
let capturedHeaders: Headers | undefined
process.env.ANTHROPIC_CUSTOM_HEADERS = [
'anthropic-version: 2023-06-01',
'anthropic-beta: prompt-caching-2024-07-31',
'x-claude-remote-session-id: remote-123',
'x-safe-header: keep-me',
].join('\n')
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response(
JSON.stringify({
id: 'chatcmpl-provider-override',
model: 'gpt-4o',
choices: [
{
message: {
role: 'assistant',
content: 'ok',
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 8,
completion_tokens: 3,
total_tokens: 11,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = (await getAnthropicClient({
maxRetries: 0,
providerOverride: {
model: 'gpt-4o',
baseURL: 'http://example.test/v1',
apiKey: 'provider-test-key',
},
})) as unknown as ShimClient
await client.beta.messages.create({
model: 'unused',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: false,
})
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-test-key')
})

View File

@@ -7,6 +7,10 @@ const originalEnv = {
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL, OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
OPENAI_API_KEY: process.env.OPENAI_API_KEY, OPENAI_API_KEY: process.env.OPENAI_API_KEY,
OPENAI_MODEL: process.env.OPENAI_MODEL, OPENAI_MODEL: process.env.OPENAI_MODEL,
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
GH_TOKEN: process.env.GH_TOKEN,
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI, CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
GEMINI_API_KEY: process.env.GEMINI_API_KEY, GEMINI_API_KEY: process.env.GEMINI_API_KEY,
GOOGLE_API_KEY: process.env.GOOGLE_API_KEY, GOOGLE_API_KEY: process.env.GOOGLE_API_KEY,
@@ -15,6 +19,7 @@ const originalEnv = {
GEMINI_BASE_URL: process.env.GEMINI_BASE_URL, GEMINI_BASE_URL: process.env.GEMINI_BASE_URL,
GEMINI_MODEL: process.env.GEMINI_MODEL, GEMINI_MODEL: process.env.GEMINI_MODEL,
GOOGLE_CLOUD_PROJECT: process.env.GOOGLE_CLOUD_PROJECT, GOOGLE_CLOUD_PROJECT: process.env.GOOGLE_CLOUD_PROJECT,
ANTHROPIC_CUSTOM_HEADERS: process.env.ANTHROPIC_CUSTOM_HEADERS,
} }
const originalFetch = globalThis.fetch const originalFetch = globalThis.fetch
@@ -70,6 +75,10 @@ beforeEach(() => {
process.env.OPENAI_BASE_URL = 'http://example.test/v1' process.env.OPENAI_BASE_URL = 'http://example.test/v1'
process.env.OPENAI_API_KEY = 'test-key' process.env.OPENAI_API_KEY = 'test-key'
delete process.env.OPENAI_MODEL delete process.env.OPENAI_MODEL
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.GITHUB_TOKEN
delete process.env.GH_TOKEN
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.GEMINI_API_KEY delete process.env.GEMINI_API_KEY
delete process.env.GOOGLE_API_KEY delete process.env.GOOGLE_API_KEY
@@ -78,12 +87,17 @@ beforeEach(() => {
delete process.env.GEMINI_BASE_URL delete process.env.GEMINI_BASE_URL
delete process.env.GEMINI_MODEL delete process.env.GEMINI_MODEL
delete process.env.GOOGLE_CLOUD_PROJECT delete process.env.GOOGLE_CLOUD_PROJECT
delete process.env.ANTHROPIC_CUSTOM_HEADERS
}) })
afterEach(() => { afterEach(() => {
restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL) restoreEnv('OPENAI_BASE_URL', originalEnv.OPENAI_BASE_URL)
restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY) restoreEnv('OPENAI_API_KEY', originalEnv.OPENAI_API_KEY)
restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL) restoreEnv('OPENAI_MODEL', originalEnv.OPENAI_MODEL)
restoreEnv('CLAUDE_CODE_USE_GITHUB', originalEnv.CLAUDE_CODE_USE_GITHUB)
restoreEnv('GITHUB_TOKEN', originalEnv.GITHUB_TOKEN)
restoreEnv('GH_TOKEN', originalEnv.GH_TOKEN)
restoreEnv('CLAUDE_CODE_USE_OPENAI', originalEnv.CLAUDE_CODE_USE_OPENAI)
restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI) restoreEnv('CLAUDE_CODE_USE_GEMINI', originalEnv.CLAUDE_CODE_USE_GEMINI)
restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY) restoreEnv('GEMINI_API_KEY', originalEnv.GEMINI_API_KEY)
restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY) restoreEnv('GOOGLE_API_KEY', originalEnv.GOOGLE_API_KEY)
@@ -92,9 +106,227 @@ afterEach(() => {
restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL) restoreEnv('GEMINI_BASE_URL', originalEnv.GEMINI_BASE_URL)
restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL) restoreEnv('GEMINI_MODEL', originalEnv.GEMINI_MODEL)
restoreEnv('GOOGLE_CLOUD_PROJECT', originalEnv.GOOGLE_CLOUD_PROJECT) restoreEnv('GOOGLE_CLOUD_PROJECT', originalEnv.GOOGLE_CLOUD_PROJECT)
restoreEnv('ANTHROPIC_CUSTOM_HEADERS', originalEnv.ANTHROPIC_CUSTOM_HEADERS)
globalThis.fetch = originalFetch globalThis.fetch = originalFetch
}) })
test('strips canonical Anthropic headers from direct shim defaultHeaders', async () => {
let capturedHeaders: Headers | undefined
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'gpt-4o',
choices: [
{
message: {
role: 'assistant',
content: 'ok',
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 8,
completion_tokens: 3,
total_tokens: 11,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({
defaultHeaders: {
'anthropic-version': '2023-06-01',
'anthropic-beta': 'prompt-caching-2024-07-31',
'x-anthropic-additional-protection': 'true',
'x-claude-remote-session-id': 'remote-123',
'x-app': 'cli',
'x-client-app': 'sdk',
'x-safe-header': 'keep-me',
},
}) as OpenAIShimClient
await client.beta.messages.create({
model: 'gpt-4o',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: false,
})
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
expect(capturedHeaders?.get('x-app')).toBeNull()
expect(capturedHeaders?.get('x-client-app')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
})
test('strips canonical Anthropic headers from per-request shim headers too', async () => {
let capturedHeaders: Headers | undefined
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response(
JSON.stringify({
id: 'chatcmpl-1',
model: 'gpt-4o',
choices: [
{
message: {
role: 'assistant',
content: 'ok',
},
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: 8,
completion_tokens: 3,
total_tokens: 11,
},
}),
{
headers: {
'Content-Type': 'application/json',
},
},
)
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
await client.beta.messages.create(
{
model: 'gpt-4o',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: false,
},
{
headers: {
'anthropic-version': '2023-06-01',
'anthropic-beta': 'prompt-caching-2024-07-31',
'x-safe-header': 'keep-me',
},
},
)
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
})
test('strips Anthropic-specific headers on GitHub Codex transport requests', async () => {
let capturedHeaders: Headers | undefined
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.OPENAI_API_KEY = 'github-test-key'
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_MODEL
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response('', {
status: 200,
headers: {
'Content-Type': 'text/event-stream',
},
})
}) as FetchType
const client = createOpenAIShimClient({}) as OpenAIShimClient
await client.beta.messages.create(
{
model: 'github:gpt-5-codex',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: true,
},
{
headers: {
'anthropic-version': '2023-06-01',
'anthropic-beta': 'prompt-caching-2024-07-31',
'x-anthropic-additional-protection': 'true',
'x-safe-header': 'keep-me',
},
},
)
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('anthropic-beta')).toBeNull()
expect(capturedHeaders?.get('x-anthropic-additional-protection')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
expect(capturedHeaders?.get('authorization')).toBe('Bearer github-test-key')
expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7')
})
test('strips Anthropic-specific headers on GitHub Codex transport with providerOverride API key', async () => {
let capturedHeaders: Headers | undefined
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.OPENAI_API_KEY = 'env-should-not-win'
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_MODEL
globalThis.fetch = (async (_input, init) => {
capturedHeaders = new Headers(init?.headers)
return new Response('', {
status: 200,
headers: {
'Content-Type': 'text/event-stream',
},
})
}) as FetchType
const client = createOpenAIShimClient({
providerOverride: {
model: 'github:gpt-5-codex',
baseURL: 'https://api.githubcopilot.com',
apiKey: 'provider-override-key',
},
}) as OpenAIShimClient
await client.beta.messages.create(
{
model: 'ignored',
system: 'test system',
messages: [{ role: 'user', content: 'hello' }],
max_tokens: 64,
stream: true,
},
{
headers: {
'anthropic-version': '2023-06-01',
'x-claude-remote-session-id': 'remote-123',
'x-safe-header': 'keep-me',
},
},
)
expect(capturedHeaders?.get('anthropic-version')).toBeNull()
expect(capturedHeaders?.get('x-claude-remote-session-id')).toBeNull()
expect(capturedHeaders?.get('x-safe-header')).toBe('keep-me')
expect(capturedHeaders?.get('authorization')).toBe('Bearer provider-override-key')
expect(capturedHeaders?.get('editor-plugin-version')).toBe('copilot-chat/0.26.7')
})
test('preserves usage from final OpenAI stream chunk with empty choices', async () => { test('preserves usage from final OpenAI stream chunk with empty choices', async () => {
globalThis.fetch = (async (_input, init) => { globalThis.fetch = (async (_input, init) => {
const url = typeof _input === 'string' ? _input : _input.url const url = typeof _input === 'string' ? _input : _input.url

View File

@@ -80,6 +80,32 @@ function isGithubModelsMode(): boolean {
return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
} }
function filterAnthropicHeaders(
headers: Record<string, string> | undefined,
): Record<string, string> {
if (!headers) return {}
const filtered: Record<string, string> = {}
for (const [key, value] of Object.entries(headers)) {
const lower = key.toLowerCase()
if (
lower.startsWith('x-anthropic') ||
lower.startsWith('anthropic-') ||
lower.startsWith('x-claude') ||
lower === 'x-app' ||
lower === 'x-client-app' ||
lower === 'authorization' ||
lower === 'x-api-key' ||
lower === 'api-key'
) {
continue
}
filtered[key] = value
}
return filtered
}
function hasGeminiApiHost(baseUrl: string | undefined): boolean { function hasGeminiApiHost(baseUrl: string | undefined): boolean {
if (!baseUrl) return false if (!baseUrl) return false
@@ -989,7 +1015,7 @@ class OpenAIShimMessages {
private providerOverride?: { model: string; baseURL: string; apiKey: string } private providerOverride?: { model: string; baseURL: string; apiKey: string }
constructor(defaultHeaders: Record<string, string>, reasoningEffort?: 'low' | 'medium' | 'high' | 'xhigh', providerOverride?: { model: string; baseURL: string; apiKey: string }) { constructor(defaultHeaders: Record<string, string>, reasoningEffort?: 'low' | 'medium' | 'high' | 'xhigh', providerOverride?: { model: string; baseURL: string; apiKey: string }) {
this.defaultHeaders = defaultHeaders this.defaultHeaders = filterAnthropicHeaders(defaultHeaders)
this.reasoningEffort = reasoningEffort this.reasoningEffort = reasoningEffort
this.providerOverride = providerOverride this.providerOverride = providerOverride
} }
@@ -1099,7 +1125,7 @@ class OpenAIShimMessages {
params, params,
defaultHeaders: { defaultHeaders: {
...this.defaultHeaders, ...this.defaultHeaders,
...(options?.headers ?? {}), ...filterAnthropicHeaders(options?.headers),
...COPILOT_HEADERS, ...COPILOT_HEADERS,
}, },
signal: options?.signal, signal: options?.signal,
@@ -1131,7 +1157,7 @@ class OpenAIShimMessages {
params, params,
defaultHeaders: { defaultHeaders: {
...this.defaultHeaders, ...this.defaultHeaders,
...(options?.headers ?? {}), ...filterAnthropicHeaders(options?.headers),
}, },
signal: options?.signal, signal: options?.signal,
}) })
@@ -1223,7 +1249,7 @@ class OpenAIShimMessages {
const headers: Record<string, string> = { const headers: Record<string, string> = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
...this.defaultHeaders, ...this.defaultHeaders,
...(options?.headers ?? {}), ...filterAnthropicHeaders(options?.headers),
} }
const isGemini = isGeminiMode() const isGemini = isGeminiMode()