Compare commits

...

11 Commits

Author SHA1 Message Date
gnanam1990
3a25d71004 fix: comprehensive tool argument normalization hardening
- Remove all { raw: ... } returns that caused InputValidationError with
  z.strictObject schemas — return {} instead for clean Zod errors
- Extend normalizeAtStop buffering to all mapped tools (Read, Write,
  Edit, Glob, Grep) so streaming paths also get normalized
- Make repairPossiblyTruncatedObjectJson generic — repair any valid
  JSON object, not just ones with a command field
- Export hasToolFieldMapping for streaming normalizeAtStop decision
- Skip normalization on finish_reason: length to preserve raw truncated
  buffer
- Update all test expectations to match new behavior

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-06 18:38:33 +05:30
gnanam1990
50efbe5614 fix: skip streaming normalization on finish_reason length
Truncated tool calls (finish_reason: 'length') now preserve the raw
buffer instead of normalizing into executable commands, preventing
incomplete commands from becoming runnable.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-06 18:08:01 +05:30
gnanam1990
b20d878b76 fix: extend tool argument normalization to all tools and harden edge cases
- Extend STRING_ARGUMENT_TOOL_FIELDS to normalize Read, Write, Edit,
  Glob, and Grep plain-string arguments (fixes "Invalid tool parameters"
  errors reported by VennDev)
- Normalize streaming Bash args regardless of finish_reason, not only
  when finish_reason is 'tool_calls'
- Broaden isLikelyStructuredObjectLiteral to catch malformed object-shaped
  strings like {command:"pwd"} and {'command':'pwd'} (fixes CR2 from
  Vasanthdev2004)
- Apply blank/object-literal guard to all tools, not just Bash
- Extract duplicated JSON repair suffix combinations into shared constant
- Add 32 isolated unit tests for toolArgumentNormalization

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-06 18:01:24 +05:30
gnanam1990
f2fc454baf test: isolate provider profile env assertions 2026-04-06 17:25:27 +05:30
gnanam1990
10f17d38ea test: stabilize rebased PR 385 checks 2026-04-06 17:25:01 +05:30
gnanam1990
889c472ddb fix: preserve malformed Bash JSON literals 2026-04-06 17:22:58 +05:30
gnanam1990
0ad7746b7a fix: keep invalid Bash tool args from becoming commands 2026-04-06 17:22:58 +05:30
gnanam1990
91df124064 fix: normalize malformed Bash tool arguments from OpenAI-compatible providers 2026-04-06 17:22:58 +05:30
Kevin Codex
39f3b2babd test: isolate latest main suite regressions (#427) 2026-04-06 19:50:31 +08:00
Agent_J
ff7d49990d feat: GitHub provider lifecycle and onboarding hardening (#351)
* feat: improve GitHub provider onboarding and lifecycle

* fix: address copilot review in provider manager

* fix: address follow-up copilot review comments

* test: resolve rebase conflict in provider profiles suite

* fix: clear stale github hydrated marker

* fix: harden github onboarding auth precedence

* fix: remove merge markers from provider tests

* fix: resolve latest copilot onboarding comments

---------

Co-authored-by: KRATOS <84986124+gnanam1990@users.noreply.github.com>
2026-04-06 19:18:58 +08:00
Vasanth T
8ece290087 fix: suppress startup dialogs when input is buffered (#423)
Co-authored-by: OpenClaude Worker 3 <worker-3@openclaude.local>
2026-04-06 18:31:38 +08:00
29 changed files with 2768 additions and 190 deletions

View File

@@ -2,6 +2,7 @@ import type { Command } from '../../commands.js'
const onboardGithub: Command = {
name: 'onboard-github',
aliases: ['onboarding-github', 'onboardgithub', 'onboardinggithub'],
description:
'Interactive setup for GitHub Models: device login or PAT, saved to secure storage',
type: 'local-jsx',

View File

@@ -0,0 +1,148 @@
import { describe, expect, test } from 'bun:test'
import {
activateGithubOnboardingMode,
applyGithubOnboardingProcessEnv,
buildGithubOnboardingSettingsEnv,
hasExistingGithubModelsLoginToken,
shouldForceGithubRelogin,
} from './onboard-github.js'
describe('shouldForceGithubRelogin', () => {
test.each(['force', '--force', 'relogin', '--relogin', 'reauth', '--reauth'])(
'treats %s as force re-login',
arg => {
expect(shouldForceGithubRelogin(arg)).toBe(true)
},
)
test('returns false for empty or unknown args', () => {
expect(shouldForceGithubRelogin('')).toBe(false)
expect(shouldForceGithubRelogin(undefined)).toBe(false)
expect(shouldForceGithubRelogin('something-else')).toBe(false)
})
test('treats force flags as present in multi-word args', () => {
expect(shouldForceGithubRelogin('--force extra')).toBe(true)
expect(shouldForceGithubRelogin('foo --relogin bar')).toBe(true)
expect(shouldForceGithubRelogin('abc reauth xyz')).toBe(true)
})
})
describe('hasExistingGithubModelsLoginToken', () => {
test('returns true when GITHUB_TOKEN is present', () => {
expect(
hasExistingGithubModelsLoginToken({ GITHUB_TOKEN: 'token' }, ''),
).toBe(true)
})
test('returns true when GH_TOKEN is present', () => {
expect(
hasExistingGithubModelsLoginToken({ GH_TOKEN: 'token' }, ''),
).toBe(true)
})
test('returns true when stored token exists', () => {
expect(hasExistingGithubModelsLoginToken({}, 'stored-token')).toBe(true)
})
test('returns false when both env and stored token are missing', () => {
expect(hasExistingGithubModelsLoginToken({}, '')).toBe(false)
})
})
describe('onboarding auth precedence cleanup', () => {
test('clears preexisting OpenAI auth when switching to GitHub', () => {
const env: NodeJS.ProcessEnv = {
CLAUDE_CODE_USE_OPENAI: '1',
OPENAI_MODEL: 'gpt-4o',
OPENAI_API_KEY: 'sk-stale-openai-key',
OPENAI_ORG: 'org-old',
OPENAI_PROJECT: 'project-old',
OPENAI_ORGANIZATION: 'org-legacy',
OPENAI_BASE_URL: 'https://api.openai.com/v1',
OPENAI_API_BASE: 'https://api.openai.com/v1',
CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED: '1',
CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID: 'profile_old',
}
applyGithubOnboardingProcessEnv('github:copilot', env)
expect(env.CLAUDE_CODE_USE_GITHUB).toBe('1')
expect(env.OPENAI_MODEL).toBe('github:copilot')
expect(env.OPENAI_API_KEY).toBeUndefined()
expect(env.OPENAI_ORG).toBeUndefined()
expect(env.OPENAI_PROJECT).toBeUndefined()
expect(env.OPENAI_ORGANIZATION).toBeUndefined()
expect(env.OPENAI_BASE_URL).toBeUndefined()
expect(env.OPENAI_API_BASE).toBeUndefined()
expect(env.CLAUDE_CODE_USE_OPENAI).toBeUndefined()
expect(env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED).toBeUndefined()
expect(env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID).toBeUndefined()
const settingsEnv = buildGithubOnboardingSettingsEnv('github:copilot')
expect(settingsEnv.CLAUDE_CODE_USE_GITHUB).toBe('1')
expect(settingsEnv.OPENAI_MODEL).toBe('github:copilot')
expect(settingsEnv.OPENAI_API_KEY).toBeUndefined()
expect(settingsEnv.OPENAI_ORG).toBeUndefined()
expect(settingsEnv.OPENAI_PROJECT).toBeUndefined()
expect(settingsEnv.OPENAI_ORGANIZATION).toBeUndefined()
})
})
describe('activateGithubOnboardingMode', () => {
test('activates settings/env/hydration in order when merge succeeds', () => {
const calls: string[] = []
const result = activateGithubOnboardingMode(' github:copilot ', {
mergeSettingsEnv: model => {
calls.push(`merge:${model}`)
return { ok: true }
},
applyProcessEnv: model => {
calls.push(`apply:${model}`)
},
hydrateToken: () => {
calls.push('hydrate')
},
onChangeAPIKey: () => {
calls.push('onChangeAPIKey')
},
})
expect(result).toEqual({ ok: true })
expect(calls).toEqual([
'merge:github:copilot',
'apply:github:copilot',
'hydrate',
'onChangeAPIKey',
])
})
test('stops activation when settings merge fails', () => {
const calls: string[] = []
const result = activateGithubOnboardingMode(DEFAULT_MODEL_FOR_TESTS, {
mergeSettingsEnv: () => {
calls.push('merge')
return { ok: false, detail: 'settings write failed' }
},
applyProcessEnv: () => {
calls.push('apply')
},
hydrateToken: () => {
calls.push('hydrate')
},
onChangeAPIKey: () => {
calls.push('onChangeAPIKey')
},
})
expect(result).toEqual({ ok: false, detail: 'settings write failed' })
expect(calls).toEqual(['merge'])
})
})
const DEFAULT_MODEL_FOR_TESTS = 'github:copilot'

View File

@@ -12,11 +12,20 @@ import {
import type { LocalJSXCommandCall } from '../../types/command.js'
import {
hydrateGithubModelsTokenFromSecureStorage,
readGithubModelsToken,
saveGithubModelsToken,
} from '../../utils/githubModelsCredentials.js'
import { updateSettingsForSource } from '../../utils/settings/settings.js'
const DEFAULT_MODEL = 'github:copilot'
const FORCE_RELOGIN_ARGS = new Set([
'force',
'--force',
'relogin',
'--relogin',
'reauth',
'--reauth',
])
type Step =
| 'menu'
@@ -24,17 +33,72 @@ type Step =
| 'pat'
| 'error'
export function shouldForceGithubRelogin(args?: string): boolean {
const normalized = (args ?? '').trim().toLowerCase()
if (!normalized) {
return false
}
return normalized.split(/\s+/).some(arg => FORCE_RELOGIN_ARGS.has(arg))
}
export function hasExistingGithubModelsLoginToken(
env: NodeJS.ProcessEnv = process.env,
storedToken?: string,
): boolean {
const envToken = env.GITHUB_TOKEN?.trim() || env.GH_TOKEN?.trim()
if (envToken) {
return true
}
const persisted = (storedToken ?? readGithubModelsToken())?.trim()
return Boolean(persisted)
}
export function buildGithubOnboardingSettingsEnv(
model: string,
): Record<string, string | undefined> {
return {
CLAUDE_CODE_USE_GITHUB: '1',
OPENAI_MODEL: model,
OPENAI_API_KEY: undefined,
OPENAI_ORG: undefined,
OPENAI_PROJECT: undefined,
OPENAI_ORGANIZATION: undefined,
OPENAI_BASE_URL: undefined,
OPENAI_API_BASE: undefined,
CLAUDE_CODE_USE_OPENAI: undefined,
CLAUDE_CODE_USE_GEMINI: undefined,
CLAUDE_CODE_USE_BEDROCK: undefined,
CLAUDE_CODE_USE_VERTEX: undefined,
CLAUDE_CODE_USE_FOUNDRY: undefined,
}
}
export function applyGithubOnboardingProcessEnv(
model: string,
env: NodeJS.ProcessEnv = process.env,
): void {
env.CLAUDE_CODE_USE_GITHUB = '1'
env.OPENAI_MODEL = model
delete env.OPENAI_API_KEY
delete env.OPENAI_ORG
delete env.OPENAI_PROJECT
delete env.OPENAI_ORGANIZATION
delete env.OPENAI_BASE_URL
delete env.OPENAI_API_BASE
delete env.CLAUDE_CODE_USE_OPENAI
delete env.CLAUDE_CODE_USE_GEMINI
delete env.CLAUDE_CODE_USE_BEDROCK
delete env.CLAUDE_CODE_USE_VERTEX
delete env.CLAUDE_CODE_USE_FOUNDRY
delete env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED
delete env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID
}
function mergeUserSettingsEnv(model: string): { ok: boolean; detail?: string } {
const { error } = updateSettingsForSource('userSettings', {
env: {
CLAUDE_CODE_USE_GITHUB: '1',
OPENAI_MODEL: model,
CLAUDE_CODE_USE_OPENAI: undefined as any,
CLAUDE_CODE_USE_GEMINI: undefined as any,
CLAUDE_CODE_USE_BEDROCK: undefined as any,
CLAUDE_CODE_USE_VERTEX: undefined as any,
CLAUDE_CODE_USE_FOUNDRY: undefined as any,
},
env: buildGithubOnboardingSettingsEnv(model) as any,
})
if (error) {
return { ok: false, detail: error.message }
@@ -42,6 +106,32 @@ function mergeUserSettingsEnv(model: string): { ok: boolean; detail?: string } {
return { ok: true }
}
export function activateGithubOnboardingMode(
model: string = DEFAULT_MODEL,
options?: {
mergeSettingsEnv?: (model: string) => { ok: boolean; detail?: string }
applyProcessEnv?: (model: string) => void
hydrateToken?: () => void
onChangeAPIKey?: () => void
},
): { ok: boolean; detail?: string } {
const normalizedModel = model.trim() || DEFAULT_MODEL
const mergeSettingsEnv = options?.mergeSettingsEnv ?? mergeUserSettingsEnv
const applyProcessEnv = options?.applyProcessEnv ?? applyGithubOnboardingProcessEnv
const hydrateToken =
options?.hydrateToken ?? hydrateGithubModelsTokenFromSecureStorage
const merged = mergeSettingsEnv(normalizedModel)
if (!merged.ok) {
return merged
}
applyProcessEnv(normalizedModel)
hydrateToken()
options?.onChangeAPIKey?.()
return { ok: true }
}
function OnboardGithub(props: {
onDone: Parameters<LocalJSXCommandCall>[0]
onChangeAPIKey: () => void
@@ -64,19 +154,17 @@ function OnboardGithub(props: {
setStep('error')
return
}
const merged = mergeUserSettingsEnv(model.trim() || DEFAULT_MODEL)
if (!merged.ok) {
const activated = activateGithubOnboardingMode(model, {
onChangeAPIKey,
})
if (!activated.ok) {
setErrorMsg(
`Token saved, but settings were not updated: ${merged.detail ?? 'unknown error'}. ` +
`Token saved, but settings were not updated: ${activated.detail ?? 'unknown error'}. ` +
`Add env CLAUDE_CODE_USE_GITHUB=1 and OPENAI_MODEL to ~/.claude/settings.json manually.`,
)
setStep('error')
return
}
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.OPENAI_MODEL = model.trim() || DEFAULT_MODEL
hydrateGithubModelsTokenFromSecureStorage()
onChangeAPIKey()
onDone(
'GitHub Models onboard complete. Token stored in secure storage; user settings updated. Restart if the model does not switch.',
{ display: 'user' },
@@ -147,11 +235,11 @@ function OnboardGithub(props: {
{deviceHint.verification_uri}
</Text>
<Text dimColor>
A browser window may have opened. Waiting for authorization
A browser window may have opened. Waiting for authorization...
</Text>
</>
) : (
<Text dimColor>Requesting device code from GitHub</Text>
<Text dimColor>Requesting device code from GitHub...</Text>
)}
<Spinner />
</Box>
@@ -206,7 +294,7 @@ function OnboardGithub(props: {
<Text bold>GitHub Models setup</Text>
<Text dimColor>
Stores your token in the OS credential store (macOS Keychain when available)
and enables CLAUDE_CODE_USE_GITHUB in your user settings no export
and enables CLAUDE_CODE_USE_GITHUB in your user settings - no export
GITHUB_TOKEN needed for future runs.
</Text>
<Select
@@ -227,7 +315,28 @@ function OnboardGithub(props: {
)
}
export const call: LocalJSXCommandCall = async (onDone, context) => {
export const call: LocalJSXCommandCall = async (onDone, context, args) => {
const forceRelogin = shouldForceGithubRelogin(args)
if (hasExistingGithubModelsLoginToken() && !forceRelogin) {
const activated = activateGithubOnboardingMode(DEFAULT_MODEL, {
onChangeAPIKey: context.onChangeAPIKey,
})
if (!activated.ok) {
onDone(
`GitHub token detected, but settings activation failed: ${activated.detail ?? 'unknown error'}. ` +
'Set CLAUDE_CODE_USE_GITHUB=1 and OPENAI_MODEL=github:copilot in user settings manually.',
{ display: 'system' },
)
return null
}
onDone(
'GitHub Models already authorized. Activated GitHub Models mode using your existing token. Use /onboard-github --force to re-authenticate.',
{ display: 'user' },
)
return null
}
return (
<OnboardGithub
onDone={onDone}

View File

@@ -275,6 +275,21 @@ test('buildCurrentProviderSummary does not relabel local gpt-5.4 providers as Co
expect(summary.endpointLabel).toBe('http://127.0.0.1:8080/v1')
})
test('buildCurrentProviderSummary recognizes GitHub Models mode', () => {
const summary = buildCurrentProviderSummary({
processEnv: {
CLAUDE_CODE_USE_GITHUB: '1',
OPENAI_MODEL: 'github:copilot',
OPENAI_BASE_URL: 'https://models.github.ai/inference',
},
persisted: null,
})
expect(summary.providerLabel).toBe('GitHub Models')
expect(summary.modelLabel).toBe('github:copilot')
expect(summary.endpointLabel).toBe('https://models.github.ai/inference')
})
test('getProviderWizardDefaults ignores poisoned current provider values', () => {
const defaults = getProviderWizardDefaults({
OPENAI_API_KEY: 'sk-secret-12345678',

View File

@@ -178,6 +178,23 @@ export function buildCurrentProviderSummary(options?: {
}
}
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
return {
providerLabel: 'GitHub Models',
modelLabel: getSafeDisplayValue(
processEnv.OPENAI_MODEL ?? 'github:copilot',
processEnv,
),
endpointLabel: getSafeDisplayValue(
processEnv.OPENAI_BASE_URL ??
processEnv.OPENAI_API_BASE ??
'https://models.github.ai/inference',
processEnv,
),
savedProfileLabel,
}
}
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_OPENAI)) {
const request = resolveProviderRequest({
model: processEnv.OPENAI_MODEL,

View File

@@ -5,6 +5,7 @@ import { useKeybinding } from '../keybindings/useKeybinding.js'
import type { ProviderProfile } from '../utils/config.js'
import {
addProviderProfile,
applyActiveProviderProfileFromConfig,
deleteProviderProfile,
getActiveProviderProfile,
getProviderPresetDefaults,
@@ -14,6 +15,14 @@ import {
type ProviderProfileInput,
updateProviderProfile,
} from '../utils/providerProfiles.js'
import {
clearGithubModelsToken,
GITHUB_MODELS_HYDRATED_ENV_MARKER,
hydrateGithubModelsTokenFromSecureStorage,
readGithubModelsToken,
} from '../utils/githubModelsCredentials.js'
import { isEnvTruthy } from '../utils/envUtils.js'
import { updateSettingsForSource } from '../utils/settings/settings.js'
import { Select } from './CustomSelect/index.js'
import { Pane } from './design-system/Pane.js'
import TextInput from './TextInput.js'
@@ -75,6 +84,13 @@ const FORM_STEPS: Array<{
},
]
const GITHUB_PROVIDER_ID = '__github_models__'
const GITHUB_PROVIDER_LABEL = 'GitHub Models'
const GITHUB_PROVIDER_DEFAULT_MODEL = 'github:copilot'
const GITHUB_PROVIDER_DEFAULT_BASE_URL = 'https://models.github.ai/inference'
type GithubCredentialSource = 'stored' | 'env' | 'none'
function toDraft(profile: ProviderProfile): ProviderDraft {
return {
name: profile.name,
@@ -102,11 +118,65 @@ function profileSummary(profile: ProviderProfile, isActive: boolean): string {
return `${providerKind} · ${profile.baseUrl} · ${profile.model} · ${keyInfo}${activeSuffix}`
}
function getGithubCredentialSource(
processEnv: NodeJS.ProcessEnv = process.env,
): GithubCredentialSource {
if (readGithubModelsToken()?.trim()) {
return 'stored'
}
if (processEnv.GITHUB_TOKEN?.trim() || processEnv.GH_TOKEN?.trim()) {
return 'env'
}
return 'none'
}
function isGithubProviderAvailable(
processEnv: NodeJS.ProcessEnv = process.env,
): boolean {
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
return true
}
return getGithubCredentialSource(processEnv) !== 'none'
}
function getGithubProviderModel(
processEnv: NodeJS.ProcessEnv = process.env,
): string {
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
return processEnv.OPENAI_MODEL?.trim() || GITHUB_PROVIDER_DEFAULT_MODEL
}
return GITHUB_PROVIDER_DEFAULT_MODEL
}
function getGithubProviderSummary(
isActive: boolean,
credentialSource: GithubCredentialSource,
processEnv: NodeJS.ProcessEnv = process.env,
): string {
const credentialSummary =
credentialSource === 'stored'
? 'token stored'
: credentialSource === 'env'
? 'token via env'
: 'no token found'
const activeSuffix = isActive ? ' (active)' : ''
return `github-models · ${GITHUB_PROVIDER_DEFAULT_BASE_URL} · ${getGithubProviderModel(processEnv)} · ${credentialSummary}${activeSuffix}`
}
export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
const [profiles, setProfiles] = React.useState(() => getProviderProfiles())
const [activeProfileId, setActiveProfileId] = React.useState(
() => getActiveProviderProfile()?.id,
)
const [githubProviderAvailable, setGithubProviderAvailable] = React.useState(() =>
isGithubProviderAvailable(),
)
const [githubCredentialSource, setGithubCredentialSource] = React.useState<GithubCredentialSource>(
() => getGithubCredentialSource(),
)
const [isGithubActive, setIsGithubActive] = React.useState(() =>
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB),
)
const [screen, setScreen] = React.useState<Screen>(
mode === 'first-run' ? 'select-preset' : 'menu',
)
@@ -130,12 +200,116 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
const nextProfiles = getProviderProfiles()
setProfiles(nextProfiles)
setActiveProfileId(getActiveProviderProfile()?.id)
setGithubProviderAvailable(isGithubProviderAvailable())
setGithubCredentialSource(getGithubCredentialSource())
setIsGithubActive(isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB))
}
function clearStartupProviderOverrideFromUserSettings(): string | null {
const { error } = updateSettingsForSource('userSettings', {
env: {
CLAUDE_CODE_USE_OPENAI: undefined as any,
CLAUDE_CODE_USE_GEMINI: undefined as any,
CLAUDE_CODE_USE_GITHUB: undefined as any,
CLAUDE_CODE_USE_BEDROCK: undefined as any,
CLAUDE_CODE_USE_VERTEX: undefined as any,
CLAUDE_CODE_USE_FOUNDRY: undefined as any,
},
})
return error ? error.message : null
}
function closeWithCancelled(message: string): void {
onDone({ action: 'cancelled', message })
}
function activateGithubProvider(): string | null {
const { error } = updateSettingsForSource('userSettings', {
env: {
CLAUDE_CODE_USE_GITHUB: '1',
OPENAI_MODEL: GITHUB_PROVIDER_DEFAULT_MODEL,
OPENAI_API_KEY: undefined as any,
OPENAI_ORG: undefined as any,
OPENAI_PROJECT: undefined as any,
OPENAI_ORGANIZATION: undefined as any,
OPENAI_BASE_URL: undefined as any,
OPENAI_API_BASE: undefined as any,
CLAUDE_CODE_USE_OPENAI: undefined as any,
CLAUDE_CODE_USE_GEMINI: undefined as any,
CLAUDE_CODE_USE_BEDROCK: undefined as any,
CLAUDE_CODE_USE_VERTEX: undefined as any,
CLAUDE_CODE_USE_FOUNDRY: undefined as any,
},
})
if (error) {
return error.message
}
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.OPENAI_MODEL = GITHUB_PROVIDER_DEFAULT_MODEL
delete process.env.OPENAI_API_KEY
delete process.env.OPENAI_ORG
delete process.env.OPENAI_PROJECT
delete process.env.OPENAI_ORGANIZATION
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_BASE
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED
delete process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
hydrateGithubModelsTokenFromSecureStorage()
return null
}
function deleteGithubProvider(): string | null {
const storedTokenBeforeClear = readGithubModelsToken()?.trim()
const cleared = clearGithubModelsToken()
if (!cleared.success) {
return cleared.warning ?? 'Could not clear GitHub credentials.'
}
const { error } = updateSettingsForSource('userSettings', {
env: {
CLAUDE_CODE_USE_GITHUB: undefined as any,
OPENAI_MODEL: undefined as any,
OPENAI_BASE_URL: undefined as any,
OPENAI_API_BASE: undefined as any,
},
})
if (error) {
return error.message
}
const hydratedTokenInSession = process.env.GITHUB_TOKEN?.trim()
if (
process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER] === '1' &&
hydratedTokenInSession &&
(!storedTokenBeforeClear || hydratedTokenInSession === storedTokenBeforeClear)
) {
delete process.env.GITHUB_TOKEN
}
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
delete process.env.OPENAI_MODEL
delete process.env.OPENAI_API_KEY
delete process.env.OPENAI_ORG
delete process.env.OPENAI_PROJECT
delete process.env.OPENAI_ORGANIZATION
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_BASE
// Restore active provider profile immediately when one exists.
applyActiveProviderProfileFromConfig()
return null
}
function startCreateFromPreset(preset: ProviderPreset): void {
const defaults = getProviderPresetDefaults(preset)
const nextDraft = {
@@ -187,11 +361,20 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
return
}
const isActiveSavedProfile = getActiveProviderProfile()?.id === saved.id
const settingsOverrideError = isActiveSavedProfile
? clearStartupProviderOverrideFromUserSettings()
: null
refreshProfiles()
setStatusMessage(
const successMessage =
editingProfileId
? `Updated provider: ${saved.name}`
: `Added provider: ${saved.name} (now active)`,
: `Added provider: ${saved.name} (now active)`
setStatusMessage(
settingsOverrideError
? `${successMessage}. Warning: could not clear startup provider override (${settingsOverrideError}).`
: successMessage,
)
if (mode === 'first-run') {
@@ -413,6 +596,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
function renderMenu(): React.ReactNode {
const hasProfiles = profiles.length > 0
const hasSelectableProviders = hasProfiles || githubProviderAvailable
const options = [
{
@@ -424,7 +608,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
value: 'activate',
label: 'Set active provider',
description: 'Switch the active provider profile',
disabled: !hasProfiles,
disabled: !hasSelectableProviders,
},
{
value: 'edit',
@@ -436,7 +620,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
value: 'delete',
label: 'Delete provider',
description: 'Remove a provider profile',
disabled: !hasProfiles,
disabled: !hasSelectableProviders,
},
{
value: 'done',
@@ -455,14 +639,25 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
</Text>
{statusMessage && <Text>{statusMessage}</Text>}
<Box flexDirection="column">
{profiles.length === 0 ? (
{profiles.length === 0 && !githubProviderAvailable ? (
<Text dimColor>No provider profiles configured yet.</Text>
) : (
profiles.map(profile => (
<Text key={profile.id} dimColor>
- {profile.name}: {profileSummary(profile, profile.id === activeProfileId)}
</Text>
))
<>
{profiles.map(profile => (
<Text key={profile.id} dimColor>
- {profile.name}: {profileSummary(profile, profile.id === activeProfileId)}
</Text>
))}
{githubProviderAvailable ? (
<Text dimColor>
- {GITHUB_PROVIDER_LABEL}:{' '}
{getGithubProviderSummary(
isGithubActive,
githubCredentialSource,
)}
</Text>
) : null}
</>
)}
</Box>
<Select
@@ -474,7 +669,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
setScreen('select-preset')
break
case 'activate':
if (profiles.length > 0) {
if (hasSelectableProviders) {
setScreen('select-active')
}
break
@@ -484,7 +679,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
}
break
case 'delete':
if (profiles.length > 0) {
if (hasSelectableProviders) {
setScreen('select-delete')
}
break
@@ -504,8 +699,29 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
title: string,
emptyMessage: string,
onSelect: (profileId: string) => void,
options?: { includeGithub?: boolean },
): React.ReactNode {
if (profiles.length === 0) {
const includeGithub = options?.includeGithub ?? false
const selectOptions = profiles.map(profile => ({
value: profile.id,
label:
profile.id === activeProfileId
? `${profile.name} (active)`
: profile.name,
description: `${profile.provider === 'anthropic' ? 'anthropic' : 'openai-compatible'} · ${profile.baseUrl} · ${profile.model}`,
}))
if (includeGithub && githubProviderAvailable) {
selectOptions.push({
value: GITHUB_PROVIDER_ID,
label: isGithubActive
? `${GITHUB_PROVIDER_LABEL} (active)`
: GITHUB_PROVIDER_LABEL,
description: `github-models · ${GITHUB_PROVIDER_DEFAULT_BASE_URL} · ${getGithubProviderModel()}`,
})
}
if (selectOptions.length === 0) {
return (
<Box flexDirection="column" gap={1}>
<Text color="remember" bold>
@@ -528,25 +744,16 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
)
}
const options = profiles.map(profile => ({
value: profile.id,
label:
profile.id === activeProfileId
? `${profile.name} (active)`
: profile.name,
description: `${profile.provider === 'anthropic' ? 'anthropic' : 'openai-compatible'} · ${profile.baseUrl} · ${profile.model}`,
}))
return (
<Box flexDirection="column" gap={1}>
<Text color="remember" bold>
{title}
</Text>
<Select
options={options}
options={selectOptions}
onChange={onSelect}
onCancel={() => setScreen('menu')}
visibleOptionCount={Math.min(10, Math.max(2, options.length))}
visibleOptionCount={Math.min(10, Math.max(2, selectOptions.length))}
/>
</Box>
)
@@ -566,16 +773,36 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
'Set active provider',
'No providers available. Add one first.',
profileId => {
if (profileId === GITHUB_PROVIDER_ID) {
const githubError = activateGithubProvider()
if (githubError) {
setErrorMessage(`Could not activate GitHub provider: ${githubError}`)
setScreen('menu')
return
}
refreshProfiles()
setStatusMessage(`Active provider: ${GITHUB_PROVIDER_LABEL}`)
setScreen('menu')
return
}
const active = setActiveProviderProfile(profileId)
if (!active) {
setErrorMessage('Could not change active provider.')
setScreen('menu')
return
}
const settingsOverrideError =
clearStartupProviderOverrideFromUserSettings()
refreshProfiles()
setStatusMessage(`Active provider: ${active.name}`)
setStatusMessage(
settingsOverrideError
? `Active provider: ${active.name}. Warning: could not clear startup provider override (${settingsOverrideError}).`
: `Active provider: ${active.name}`,
)
setScreen('menu')
},
{ includeGithub: true },
)
break
case 'select-edit':
@@ -592,15 +819,35 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
'Delete provider',
'No providers available. Add one first.',
profileId => {
if (profileId === GITHUB_PROVIDER_ID) {
const githubDeleteError = deleteGithubProvider()
if (githubDeleteError) {
setErrorMessage(`Could not delete GitHub provider: ${githubDeleteError}`)
} else {
refreshProfiles()
setStatusMessage('GitHub provider deleted')
}
setScreen('menu')
return
}
const result = deleteProviderProfile(profileId)
if (!result.removed) {
setErrorMessage('Could not delete provider.')
} else {
const settingsOverrideError = result.activeProfileId
? clearStartupProviderOverrideFromUserSettings()
: null
refreshProfiles()
setStatusMessage('Provider deleted')
setStatusMessage(
settingsOverrideError
? `Provider deleted. Warning: could not clear startup provider override (${settingsOverrideError}).`
: 'Provider deleted',
)
}
setScreen('menu')
},
{ includeGithub: true },
)
break
case 'menu':

View File

@@ -27,6 +27,21 @@ async function flushClipboardCopy(): Promise<void> {
await new Promise(resolve => setTimeout(resolve, 0))
}
async function waitForExecCall(
command: string,
attempts = 20,
): Promise<(typeof execFileNoThrowMock.mock.calls)[number] | undefined> {
for (let attempt = 0; attempt < attempts; attempt++) {
const call = execFileNoThrowMock.mock.calls.find(([cmd]) => cmd === command)
if (call) {
return call
}
await flushClipboardCopy()
}
return undefined
}
describe('Windows clipboard fallback', () => {
beforeEach(() => {
execFileNoThrowMock.mockClear()
@@ -62,9 +77,7 @@ describe('Windows clipboard fallback', () => {
await setClipboard('Привет мир')
await flushClipboardCopy()
const windowsCall = execFileNoThrowMock.mock.calls.find(
([cmd]) => cmd === 'powershell',
)
const windowsCall = await waitForExecCall('powershell')
expect(windowsCall?.[2]).toMatchObject({
stdin: 'ignore',

View File

@@ -237,6 +237,7 @@ import { useOfficialMarketplaceNotification } from 'src/hooks/useOfficialMarketp
import { usePromptsFromClaudeInChrome } from 'src/hooks/usePromptsFromClaudeInChrome.js';
import { getTipToShowOnSpinner, recordShownTip } from 'src/services/tips/tipScheduler.js';
import type { Theme } from 'src/utils/theme.js';
import { isPromptTypingSuppressionActive } from './replInputSuppression.js';
import { checkAndDisableBypassPermissionsIfNeeded, checkAndDisableAutoModeIfNeeded, useKickOffCheckAndDisableBypassPermissionsIfNeeded, useKickOffCheckAndDisableAutoModeIfNeeded } from 'src/utils/permissions/bypassPermissionsKillswitch.js';
import { SandboxManager } from 'src/utils/sandbox/sandbox-adapter.js';
import { SANDBOX_NETWORK_ACCESS_TOOL_NAME } from 'src/cli/structuredIO.js';
@@ -1336,6 +1337,7 @@ export function REPL({
const [inputValue, setInputValueRaw] = useState(() => consumeEarlyInput());
const inputValueRef = useRef(inputValue);
inputValueRef.current = inputValue;
const promptTypingSuppressionActive = isPromptTypingSuppressionActive(isPromptInputActive, inputValue);
const insertTextRef = useRef<{
insert: (text: string) => void;
setInputWithCursor: (value: string, cursor: number) => void;
@@ -2028,7 +2030,7 @@ export function REPL({
if (isMessageSelectorVisible) return 'message-selector';
// Suppress interrupt dialogs while user is actively typing
if (isPromptInputActive) return undefined;
if (promptTypingSuppressionActive) return undefined;
if (sandboxPermissionRequestQueue[0]) return 'sandbox-permission';
// Permission/interactive dialogs (show unless blocked by toolJSX)
@@ -2071,7 +2073,7 @@ export function REPL({
const focusedInputDialog = getFocusedInputDialog();
// True when permission prompts exist but are hidden because the user is typing
const hasSuppressedDialogs = isPromptInputActive && (sandboxPermissionRequestQueue[0] || toolUseConfirmQueue[0] || promptQueue[0] || workerSandboxPermissions.queue[0] || elicitation.queue[0] || showingCostDialog);
const hasSuppressedDialogs = promptTypingSuppressionActive && (sandboxPermissionRequestQueue[0] || toolUseConfirmQueue[0] || promptQueue[0] || workerSandboxPermissions.queue[0] || elicitation.queue[0] || showingCostDialog);
// Keep ref in sync so timer callbacks can read the current value
focusedInputDialogRef.current = focusedInputDialog;

View File

@@ -0,0 +1,18 @@
import { describe, expect, it } from 'bun:test'
import { isPromptTypingSuppressionActive } from './replInputSuppression.js'
describe('isPromptTypingSuppressionActive', () => {
it('suppresses dialogs when early input already exists', () => {
expect(isPromptTypingSuppressionActive(false, 'hello')).toBe(true)
})
it('does not suppress dialogs for empty or whitespace-only input', () => {
expect(isPromptTypingSuppressionActive(false, '')).toBe(false)
expect(isPromptTypingSuppressionActive(false, ' ')).toBe(false)
})
it('keeps suppression active while the typing flag is set', () => {
expect(isPromptTypingSuppressionActive(true, '')).toBe(true)
})
})

View File

@@ -0,0 +1,6 @@
export function isPromptTypingSuppressionActive(
isPromptInputActive: boolean,
inputValue: string,
): boolean {
return isPromptInputActive || inputValue.trim().length > 0
}

File diff suppressed because it is too large Load Diff

View File

@@ -42,6 +42,10 @@ import {
} from './providerConfig.js'
import { sanitizeSchemaForOpenAICompat } from '../../utils/schemaSanitizer.js'
import { redactSecretValueForDisplay } from '../../utils/providerProfile.js'
import {
normalizeToolArguments,
hasToolFieldMapping,
} from './toolArgumentNormalization.js'
type SecretValueSource = Partial<{
OPENAI_API_KEY: string
@@ -476,6 +480,30 @@ function convertChunkUsage(
}
}
const JSON_REPAIR_SUFFIXES = [
'}', '"}', ']}', '"]}', '}}', '"}}', ']}}', '"]}}', '"]}]}', '}]}'
]
function repairPossiblyTruncatedObjectJson(raw: string): string | null {
try {
const parsed = JSON.parse(raw)
return parsed && typeof parsed === 'object' && !Array.isArray(parsed)
? raw
: null
} catch {
for (const combo of JSON_REPAIR_SUFFIXES) {
try {
const repaired = raw + combo
const parsed = JSON.parse(repaired)
if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
return repaired
}
} catch {}
}
return null
}
}
/**
* Async generator that transforms an OpenAI SSE stream into
* Anthropic-format BetaRawMessageStreamEvent objects.
@@ -486,7 +514,16 @@ async function* openaiStreamToAnthropic(
): AsyncGenerator<AnthropicStreamEvent> {
const messageId = makeMessageId()
let contentBlockIndex = 0
const activeToolCalls = new Map<number, { id: string; name: string; index: number; jsonBuffer: string }>()
const activeToolCalls = new Map<
number,
{
id: string
name: string
index: number
jsonBuffer: string
normalizeAtStop: boolean
}
>()
let hasEmittedContentStart = false
let lastStopReason: 'tool_use' | 'max_tokens' | 'end_turn' | null = null
let hasEmittedFinalUsage = false
@@ -577,11 +614,14 @@ async function* openaiStreamToAnthropic(
}
const toolBlockIndex = contentBlockIndex
const initialArguments = tc.function.arguments ?? ''
const normalizeAtStop = hasToolFieldMapping(tc.function.name)
activeToolCalls.set(tc.index, {
id: tc.id,
name: tc.function.name,
index: toolBlockIndex,
jsonBuffer: tc.function.arguments ?? '',
jsonBuffer: initialArguments,
normalizeAtStop,
})
yield {
@@ -598,7 +638,7 @@ async function* openaiStreamToAnthropic(
contentBlockIndex++
// Emit any initial arguments
if (tc.function.arguments) {
if (tc.function.arguments && !normalizeAtStop) {
yield {
type: 'content_block_delta',
index: toolBlockIndex,
@@ -615,6 +655,11 @@ async function* openaiStreamToAnthropic(
if (tc.function.arguments) {
active.jsonBuffer += tc.function.arguments
}
if (active.normalizeAtStop) {
continue
}
yield {
type: 'content_block_delta',
index: active.index,
@@ -642,16 +687,44 @@ async function* openaiStreamToAnthropic(
}
// Close active tool calls
for (const [, tc] of activeToolCalls) {
if (tc.normalizeAtStop) {
let partialJson: string
if (choice.finish_reason === 'length') {
// Truncated by max tokens — preserve raw buffer to avoid
// turning an incomplete tool call into an executable command
partialJson = tc.jsonBuffer
} else {
const repairedStructuredJson = repairPossiblyTruncatedObjectJson(
tc.jsonBuffer,
)
if (repairedStructuredJson) {
partialJson = repairedStructuredJson
} else {
partialJson = JSON.stringify(
normalizeToolArguments(tc.name, tc.jsonBuffer),
)
}
}
yield {
type: 'content_block_delta',
index: tc.index,
delta: {
type: 'input_json_delta',
partial_json: partialJson,
},
}
yield { type: 'content_block_stop', index: tc.index }
continue
}
let suffixToAdd = ''
if (tc.jsonBuffer) {
try {
JSON.parse(tc.jsonBuffer)
} catch {
const str = tc.jsonBuffer.trimEnd()
const combinations = [
'}', '"}', ']}', '"]}', '}}', '"}}', ']}}', '"]}}', '"]}]}', '}]}'
]
for (const combo of combinations) {
for (const combo of JSON_REPAIR_SUFFIXES) {
try {
JSON.parse(str + combo)
suffixToAdd = combo
@@ -1087,12 +1160,10 @@ class OpenAIShimMessages {
if (choice?.message?.tool_calls) {
for (const tc of choice.message.tool_calls) {
let input: unknown
try {
input = JSON.parse(tc.function.arguments)
} catch {
input = { raw: tc.function.arguments }
}
const input = normalizeToolArguments(
tc.function.name,
tc.function.arguments,
)
content.push({
type: 'tool_use',
id: tc.id,

View File

@@ -0,0 +1,180 @@
import { describe, expect, test } from 'bun:test'
import { normalizeToolArguments } from './toolArgumentNormalization'
describe('normalizeToolArguments', () => {
describe('Bash tool', () => {
test('wraps plain string into { command }', () => {
expect(normalizeToolArguments('Bash', 'pwd')).toEqual({ command: 'pwd' })
})
test('wraps multi-word command', () => {
expect(normalizeToolArguments('Bash', 'ls -la /tmp')).toEqual({
command: 'ls -la /tmp',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments('Bash', '{"command":"echo hi"}'),
).toEqual({ command: 'echo hi' })
})
test('returns empty object for blank string', () => {
expect(normalizeToolArguments('Bash', '')).toEqual({})
expect(normalizeToolArguments('Bash', ' ')).toEqual({})
})
test('returns parsed blank for JSON-encoded blank string', () => {
expect(normalizeToolArguments('Bash', '""')).toEqual('')
expect(normalizeToolArguments('Bash', '" "')).toEqual(' ')
})
test('returns empty object for malformed structured object literal', () => {
expect(normalizeToolArguments('Bash', '{ "command": "pwd"')).toEqual({})
})
test.each([
['{command:"pwd"}'],
["{'command':'pwd'}"],
['{command: pwd}'],
])(
'returns empty object for malformed object-shaped string %s (does not wrap into command)',
(input) => {
expect(normalizeToolArguments('Bash', input)).toEqual({})
},
)
test.each([
['false', false],
['null', null],
['[]', [] as unknown[]],
['0', 0],
['true', true],
['123', 123],
])(
'preserves JSON literal %s as-is (does not wrap into command)',
(input, expected) => {
expect(normalizeToolArguments('Bash', input)).toEqual(expected)
},
)
test('wraps JSON-encoded string into { command }', () => {
expect(normalizeToolArguments('Bash', '"pwd"')).toEqual({
command: 'pwd',
})
})
})
describe('undefined arguments', () => {
test('returns empty object for undefined', () => {
expect(normalizeToolArguments('Bash', undefined)).toEqual({})
expect(normalizeToolArguments('UnknownTool', undefined)).toEqual({})
})
})
describe('Read tool', () => {
test('wraps plain string into { file_path }', () => {
expect(normalizeToolArguments('Read', '/home/user/file.txt')).toEqual({
file_path: '/home/user/file.txt',
})
})
test('wraps JSON-encoded string into { file_path }', () => {
expect(normalizeToolArguments('Read', '"/home/user/file.txt"')).toEqual({
file_path: '/home/user/file.txt',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments('Read', '{"file_path":"/tmp/f.txt","limit":10}'),
).toEqual({ file_path: '/tmp/f.txt', limit: 10 })
})
})
describe('Write tool', () => {
test('wraps plain string into { file_path }', () => {
expect(normalizeToolArguments('Write', '/tmp/out.txt')).toEqual({
file_path: '/tmp/out.txt',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments(
'Write',
'{"file_path":"/tmp/out.txt","content":"hello"}',
),
).toEqual({ file_path: '/tmp/out.txt', content: 'hello' })
})
})
describe('Edit tool', () => {
test('wraps plain string into { file_path }', () => {
expect(normalizeToolArguments('Edit', '/tmp/edit.ts')).toEqual({
file_path: '/tmp/edit.ts',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments(
'Edit',
'{"file_path":"/tmp/f.ts","old_string":"a","new_string":"b"}',
),
).toEqual({ file_path: '/tmp/f.ts', old_string: 'a', new_string: 'b' })
})
})
describe('Glob tool', () => {
test('wraps plain string into { pattern }', () => {
expect(normalizeToolArguments('Glob', '**/*.ts')).toEqual({
pattern: '**/*.ts',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments('Glob', '{"pattern":"*.js","path":"/src"}'),
).toEqual({ pattern: '*.js', path: '/src' })
})
})
describe('Grep tool', () => {
test('wraps plain string into { pattern }', () => {
expect(normalizeToolArguments('Grep', 'TODO')).toEqual({
pattern: 'TODO',
})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments('Grep', '{"pattern":"fixme","path":"/src"}'),
).toEqual({ pattern: 'fixme', path: '/src' })
})
})
describe('unknown tools', () => {
test('returns empty object for plain string (no known field mapping)', () => {
expect(normalizeToolArguments('UnknownTool', 'some value')).toEqual({})
})
test('passes through structured JSON object', () => {
expect(
normalizeToolArguments('UnknownTool', '{"key":"val"}'),
).toEqual({ key: 'val' })
})
test('preserves JSON literals as-is', () => {
expect(normalizeToolArguments('UnknownTool', 'false')).toEqual(false)
expect(normalizeToolArguments('UnknownTool', 'null')).toEqual(null)
expect(normalizeToolArguments('UnknownTool', '[]')).toEqual([])
})
test('returns parsed string for JSON-encoded string on unknown tools', () => {
expect(normalizeToolArguments('UnknownTool', '"hello"')).toEqual(
'hello',
)
})
})
})

View File

@@ -0,0 +1,69 @@
const STRING_ARGUMENT_TOOL_FIELDS: Record<string, string> = {
Bash: 'command',
Read: 'file_path',
Write: 'file_path',
Edit: 'file_path',
Glob: 'pattern',
Grep: 'pattern',
}
function isBlankString(value: string): boolean {
return value.trim().length === 0
}
function isLikelyStructuredObjectLiteral(value: string): boolean {
// Match object-like patterns with key-value syntax:
// {"key":, {key:, {'key':, { "key" :, etc.
// But NOT bash compound commands like { pwd; } or { echo hi; }
return /^\s*\{\s*['"]?\w+['"]?\s*:/.test(value)
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value)
}
function getPlainStringToolArgumentField(toolName: string): string | null {
return STRING_ARGUMENT_TOOL_FIELDS[toolName] ?? null
}
export function hasToolFieldMapping(toolName: string): boolean {
return toolName in STRING_ARGUMENT_TOOL_FIELDS
}
function wrapPlainStringToolArguments(
toolName: string,
value: string,
): Record<string, string> | null {
const field = getPlainStringToolArgumentField(toolName)
if (!field) return null
return { [field]: value }
}
export function normalizeToolArguments(
toolName: string,
rawArguments: string | undefined,
): unknown {
if (rawArguments === undefined) return {}
try {
const parsed = JSON.parse(rawArguments)
if (isRecord(parsed)) {
return parsed
}
// Parsed as a non-object JSON value (string, number, boolean, null, array)
if (typeof parsed === 'string' && !isBlankString(parsed)) {
return wrapPlainStringToolArguments(toolName, parsed) ?? parsed
}
// For blank strings, booleans, null, arrays — pass through as-is
// and let Zod schema validation produce a meaningful error
return parsed
} catch {
// rawArguments is not valid JSON — treat as a plain string
if (isBlankString(rawArguments) || isLikelyStructuredObjectLiteral(rawArguments)) {
// Blank or looks like a malformed object literal — don't wrap into
// a tool field to avoid turning garbage into executable input
return {}
}
return wrapPlainStringToolArguments(toolName, rawArguments) ?? {}
}
}

View File

@@ -1,6 +1,7 @@
import { afterEach, describe, expect, mock, test } from 'bun:test'
import {
DEFAULT_GITHUB_DEVICE_SCOPE,
GitHubDeviceFlowError,
pollAccessToken,
requestDeviceCode,
@@ -48,6 +49,81 @@ describe('requestDeviceCode', () => {
requestDeviceCode({ clientId: 'x', fetchImpl: globalThis.fetch }),
).rejects.toThrow(GitHubDeviceFlowError)
})
test('uses OAuth-safe default scope', async () => {
let capturedScope = ''
globalThis.fetch = mock((_url: RequestInfo | URL, init?: RequestInit) => {
const body = init?.body
if (body instanceof URLSearchParams) {
capturedScope = body.get('scope') ?? ''
} else {
capturedScope = new URLSearchParams(String(body ?? '')).get('scope') ?? ''
}
return Promise.resolve(
new Response(
JSON.stringify({
device_code: 'abc',
user_code: 'ABCD-1234',
verification_uri: 'https://github.com/login/device',
}),
{ status: 200 },
),
)
})
await requestDeviceCode({ clientId: 'test-client', fetchImpl: globalThis.fetch })
expect(capturedScope).toBe(DEFAULT_GITHUB_DEVICE_SCOPE)
expect(capturedScope).toBe('read:user')
})
test('retries with OAuth-safe scope on invalid_scope', async () => {
const scopesSeen: string[] = []
let callCount = 0
globalThis.fetch = mock((_url: RequestInfo | URL, init?: RequestInit) => {
const body = init?.body
const scope =
body instanceof URLSearchParams
? body.get('scope') ?? ''
: new URLSearchParams(String(body ?? '')).get('scope') ?? ''
scopesSeen.push(scope)
callCount++
if (callCount === 1) {
return Promise.resolve(
new Response(
JSON.stringify({
error: 'invalid_scope',
error_description: 'invalid models scope',
}),
{ status: 400 },
),
)
}
return Promise.resolve(
new Response(
JSON.stringify({
device_code: 'abc',
user_code: 'ABCD-1234',
verification_uri: 'https://github.com/login/device',
}),
{ status: 200 },
),
)
})
const result = await requestDeviceCode({
clientId: 'test-client',
scope: 'read:user,models:read',
fetchImpl: globalThis.fetch,
})
expect(result.device_code).toBe('abc')
expect(callCount).toBe(2)
expect(scopesSeen).toEqual(['read:user,models:read', 'read:user'])
})
})
describe('pollAccessToken', () => {

View File

@@ -10,8 +10,10 @@ export const GITHUB_DEVICE_CODE_URL = 'https://github.com/login/device/code'
export const GITHUB_DEVICE_ACCESS_TOKEN_URL =
'https://github.com/login/oauth/access_token'
/** Match runtime devsper github_oauth DEFAULT_SCOPE */
export const DEFAULT_GITHUB_DEVICE_SCOPE = 'read:user,models:read'
// OAuth app device flow does not accept the GitHub Models permission token
// scope (models:read). Use an OAuth-safe default.
const OAUTH_SAFE_GITHUB_DEVICE_SCOPE = 'read:user'
export const DEFAULT_GITHUB_DEVICE_SCOPE = OAUTH_SAFE_GITHUB_DEVICE_SCOPE
export class GitHubDeviceFlowError extends Error {
constructor(message: string) {
@@ -51,38 +53,61 @@ export async function requestDeviceCode(options?: {
)
}
const fetchFn = options?.fetchImpl ?? fetch
const res = await fetchFn(GITHUB_DEVICE_CODE_URL, {
method: 'POST',
headers: { Accept: 'application/json' },
body: new URLSearchParams({
client_id: clientId,
scope: options?.scope ?? DEFAULT_GITHUB_DEVICE_SCOPE,
}),
})
if (!res.ok) {
const text = await res.text().catch(() => '')
throw new GitHubDeviceFlowError(
`Device code request failed: ${res.status} ${text}`,
)
}
const data = (await res.json()) as Record<string, unknown>
const device_code = data.device_code
const user_code = data.user_code
const verification_uri = data.verification_uri
if (
typeof device_code !== 'string' ||
typeof user_code !== 'string' ||
typeof verification_uri !== 'string'
) {
throw new GitHubDeviceFlowError('Malformed device code response from GitHub')
}
return {
device_code,
user_code,
verification_uri,
expires_in: typeof data.expires_in === 'number' ? data.expires_in : 900,
interval: typeof data.interval === 'number' ? data.interval : 5,
const requestedScope =
options?.scope?.trim() || DEFAULT_GITHUB_DEVICE_SCOPE
const scopesToTry =
requestedScope === OAUTH_SAFE_GITHUB_DEVICE_SCOPE
? [requestedScope]
: [requestedScope, OAUTH_SAFE_GITHUB_DEVICE_SCOPE]
let lastError = 'Device code request failed.'
for (const scope of scopesToTry) {
const res = await fetchFn(GITHUB_DEVICE_CODE_URL, {
method: 'POST',
headers: { Accept: 'application/json' },
body: new URLSearchParams({
client_id: clientId,
scope,
}),
})
if (!res.ok) {
const text = await res.text().catch(() => '')
lastError = `Device code request failed: ${res.status} ${text}`
const isInvalidScope = /invalid_scope/i.test(text)
const canRetryWithFallback =
scope !== OAUTH_SAFE_GITHUB_DEVICE_SCOPE && isInvalidScope
if (canRetryWithFallback) {
continue
}
throw new GitHubDeviceFlowError(lastError)
}
const data = (await res.json()) as Record<string, unknown>
const device_code = data.device_code
const user_code = data.user_code
const verification_uri = data.verification_uri
if (
typeof device_code !== 'string' ||
typeof user_code !== 'string' ||
typeof verification_uri !== 'string'
) {
throw new GitHubDeviceFlowError(
'Malformed device code response from GitHub',
)
}
return {
device_code,
user_code,
verification_uri,
expires_in: typeof data.expires_in === 'number' ? data.expires_in : 900,
interval: typeof data.interval === 'number' ? data.interval : 5,
}
}
throw new GitHubDeviceFlowError(lastError)
}
export type PollOptions = {

View File

@@ -9,6 +9,7 @@ import { getGlobalConfig, saveGlobalConfig } from '../utils/config.js'
import { toError } from '../utils/errors.js'
import { logError } from '../utils/log.js'
import { applyConfigEnvironmentVariables } from '../utils/managedEnv.js'
import { persistActiveProviderProfileModel } from '../utils/providerProfiles.js'
import {
permissionModeFromString,
toExternalPermissionMode,
@@ -110,6 +111,12 @@ export function onChangeAppState({
// Save to settings
updateSettingsForSource('userSettings', { model: newState.mainLoopModel })
setMainLoopModelOverride(newState.mainLoopModel)
// Keep active provider profiles in sync with /model choices so restarts
// keep using the last selected model instead of the profile's old default.
if (process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED === '1') {
persistActiveProviderProfileModel(newState.mainLoopModel)
}
}
// expandedView → persist as showExpandedTodos + showSpinnerTree for backwards compat

View File

@@ -10,6 +10,8 @@ describe('hydrateGithubModelsTokenFromSecureStorage', () => {
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
GITHUB_TOKEN: process.env.GITHUB_TOKEN,
GH_TOKEN: process.env.GH_TOKEN,
CLAUDE_CODE_GITHUB_TOKEN_HYDRATED:
process.env.CLAUDE_CODE_GITHUB_TOKEN_HYDRATED,
CLAUDE_CODE_SIMPLE: process.env.CLAUDE_CODE_SIMPLE,
}
@@ -43,11 +45,13 @@ describe('hydrateGithubModelsTokenFromSecureStorage', () => {
)
hydrateGithubModelsTokenFromSecureStorage()
expect(process.env.GITHUB_TOKEN).toBe('stored-secret')
expect(process.env.CLAUDE_CODE_GITHUB_TOKEN_HYDRATED).toBe('1')
})
test('does not override existing GITHUB_TOKEN', async () => {
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.GITHUB_TOKEN = 'already'
delete process.env.CLAUDE_CODE_GITHUB_TOKEN_HYDRATED
mock.module('./secureStorage/index.js', () => ({
getSecureStorage: () => ({
@@ -62,5 +66,6 @@ describe('hydrateGithubModelsTokenFromSecureStorage', () => {
)
hydrateGithubModelsTokenFromSecureStorage()
expect(process.env.GITHUB_TOKEN).toBe('already')
expect(process.env.CLAUDE_CODE_GITHUB_TOKEN_HYDRATED).toBeUndefined()
})
})

View File

@@ -3,6 +3,8 @@ import { getSecureStorage } from './secureStorage/index.js'
/** JSON key in the shared OpenClaude secure storage blob. */
export const GITHUB_MODELS_STORAGE_KEY = 'githubModels' as const
export const GITHUB_MODELS_HYDRATED_ENV_MARKER =
'CLAUDE_CODE_GITHUB_TOKEN_HYDRATED' as const
export type GithubModelsCredentialBlob = {
accessToken: string
@@ -27,18 +29,28 @@ export function readGithubModelsToken(): string | undefined {
*/
export function hydrateGithubModelsTokenFromSecureStorage(): void {
if (!isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)) {
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
return
}
if (process.env.GITHUB_TOKEN?.trim() || process.env.GH_TOKEN?.trim()) {
if (process.env.GH_TOKEN?.trim()) {
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
return
}
if (process.env.GITHUB_TOKEN?.trim()) {
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
return
}
if (isBareMode()) {
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
return
}
const t = readGithubModelsToken()
if (t) {
process.env.GITHUB_TOKEN = t
process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER] = '1'
return
}
delete process.env[GITHUB_MODELS_HYDRATED_ENV_MARKER]
}
export function saveGithubModelsToken(token: string): {

View File

@@ -80,7 +80,9 @@ export function getUserSpecifiedModelSetting(): ModelSetting | undefined {
const provider = getAPIProvider()
specifiedModel =
(provider === 'gemini' ? process.env.GEMINI_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' ? process.env.OPENAI_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' || provider === 'github'
? process.env.OPENAI_MODEL
: undefined) ||
(provider === 'firstParty' ? process.env.ANTHROPIC_MODEL : undefined) ||
settings.model ||
undefined
@@ -237,6 +239,10 @@ export function getDefaultMainLoopModelSetting(): ModelName | ModelAlias {
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'
}
// GitHub provider: always use the configured GitHub model
if (getAPIProvider() === 'github') {
return process.env.OPENAI_MODEL || 'github:copilot'
}
// Codex provider: always use the configured Codex model (default gpt-5.4)
if (getAPIProvider() === 'codex') {
return process.env.OPENAI_MODEL || 'gpt-5.4'

View File

@@ -0,0 +1,83 @@
import { afterEach, beforeEach, expect, mock, test } from 'bun:test'
import { resetModelStringsForTestingOnly } from '../../bootstrap/state.js'
import { saveGlobalConfig } from '../config.js'
async function importFreshModelOptionsModule() {
mock.restore()
mock.module('./providers.js', () => ({
getAPIProvider: () => 'github',
}))
const nonce = `${Date.now()}-${Math.random()}`
return import(`./modelOptions.js?ts=${nonce}`)
}
const originalEnv = {
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
CLAUDE_CODE_USE_BEDROCK: process.env.CLAUDE_CODE_USE_BEDROCK,
CLAUDE_CODE_USE_VERTEX: process.env.CLAUDE_CODE_USE_VERTEX,
CLAUDE_CODE_USE_FOUNDRY: process.env.CLAUDE_CODE_USE_FOUNDRY,
OPENAI_MODEL: process.env.OPENAI_MODEL,
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
ANTHROPIC_CUSTOM_MODEL_OPTION: process.env.ANTHROPIC_CUSTOM_MODEL_OPTION,
}
beforeEach(() => {
mock.restore()
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.OPENAI_MODEL
delete process.env.OPENAI_BASE_URL
delete process.env.ANTHROPIC_CUSTOM_MODEL_OPTION
resetModelStringsForTestingOnly()
})
afterEach(() => {
process.env.CLAUDE_CODE_USE_GITHUB = originalEnv.CLAUDE_CODE_USE_GITHUB
process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI
process.env.CLAUDE_CODE_USE_BEDROCK = originalEnv.CLAUDE_CODE_USE_BEDROCK
process.env.CLAUDE_CODE_USE_VERTEX = originalEnv.CLAUDE_CODE_USE_VERTEX
process.env.CLAUDE_CODE_USE_FOUNDRY = originalEnv.CLAUDE_CODE_USE_FOUNDRY
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
process.env.ANTHROPIC_CUSTOM_MODEL_OPTION =
originalEnv.ANTHROPIC_CUSTOM_MODEL_OPTION
saveGlobalConfig(current => ({
...current,
additionalModelOptionsCache: [],
additionalModelOptionsCacheScope: undefined,
openaiAdditionalModelOptionsCache: [],
openaiAdditionalModelOptionsCacheByProfile: {},
providerProfiles: [],
activeProviderProfileId: undefined,
}))
resetModelStringsForTestingOnly()
})
test('GitHub provider exposes only default + GitHub model in /model options', async () => {
process.env.CLAUDE_CODE_USE_GITHUB = '1'
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
process.env.OPENAI_MODEL = 'github:copilot'
delete process.env.ANTHROPIC_CUSTOM_MODEL_OPTION
const { getModelOptions } = await importFreshModelOptionsModule()
const options = getModelOptions(false)
const nonDefault = options.filter(
(option: { value: unknown }) => option.value !== null,
)
expect(nonDefault.length).toBe(1)
expect(nonDefault[0]?.value).toBe('github:copilot')
})

View File

@@ -352,6 +352,18 @@ function getCodexModelOptions(): ModelOption[] {
// @[MODEL LAUNCH]: Update the model picker lists below to include/reorder options for the new model.
// Each user tier (ant, Max/Team Premium, Pro/Team Standard/Enterprise, PAYG 1P, PAYG 3P) has its own list.
function getModelOptionsBase(fastMode = false): ModelOption[] {
if (getAPIProvider() === 'github') {
const githubModel = process.env.OPENAI_MODEL?.trim() || 'github:copilot'
return [
getDefaultOptionForUser(fastMode),
{
value: githubModel,
label: githubModel,
description: 'GitHub Models default',
},
]
}
// When using Ollama, show models from the Ollama server instead of Claude models
if (getAPIProvider() === 'openai' && isOllamaProvider()) {
const defaultOption = getDefaultOptionForUser(fastMode)
@@ -579,6 +591,10 @@ function getKnownModelOption(model: string): ModelOption | null {
}
export function getModelOptions(fastMode = false): ModelOption[] {
if (getAPIProvider() === 'github') {
return filterModelOptionsByAllowlist(getModelOptionsBase(fastMode))
}
const options = getModelOptionsBase(fastMode)
// Add the custom model from the ANTHROPIC_CUSTOM_MODEL_OPTION env var

View File

@@ -0,0 +1,54 @@
import { afterEach, expect, test } from 'bun:test'
import { resetModelStringsForTestingOnly } from '../../bootstrap/state.js'
import { parseUserSpecifiedModel } from './model.js'
import { getModelStrings } from './modelStrings.js'
const originalEnv = {
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
CLAUDE_CODE_USE_BEDROCK: process.env.CLAUDE_CODE_USE_BEDROCK,
CLAUDE_CODE_USE_VERTEX: process.env.CLAUDE_CODE_USE_VERTEX,
CLAUDE_CODE_USE_FOUNDRY: process.env.CLAUDE_CODE_USE_FOUNDRY,
}
function clearProviderFlags(): void {
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
}
afterEach(() => {
process.env.CLAUDE_CODE_USE_GITHUB = originalEnv.CLAUDE_CODE_USE_GITHUB
process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI
process.env.CLAUDE_CODE_USE_BEDROCK = originalEnv.CLAUDE_CODE_USE_BEDROCK
process.env.CLAUDE_CODE_USE_VERTEX = originalEnv.CLAUDE_CODE_USE_VERTEX
process.env.CLAUDE_CODE_USE_FOUNDRY = originalEnv.CLAUDE_CODE_USE_FOUNDRY
resetModelStringsForTestingOnly()
})
test('GitHub provider model strings are concrete IDs', () => {
clearProviderFlags()
process.env.CLAUDE_CODE_USE_GITHUB = '1'
const modelStrings = getModelStrings()
for (const value of Object.values(modelStrings)) {
expect(typeof value).toBe('string')
expect(value.trim().length).toBeGreaterThan(0)
}
})
test('GitHub provider model strings are safe to parse', () => {
clearProviderFlags()
process.env.CLAUDE_CODE_USE_GITHUB = '1'
const modelStrings = getModelStrings()
expect(() => parseUserSpecifiedModel(modelStrings.sonnet46 as any)).not.toThrow()
})

View File

@@ -25,7 +25,7 @@ const MODEL_KEYS = Object.keys(ALL_MODEL_CONFIGS) as ModelKey[]
function getBuiltinModelStrings(provider: APIProvider): ModelStrings {
// Codex piggybacks on the OpenAI provider transport for Anthropic tier aliases.
// Reuse OpenAI mappings so model string lookups never return undefined.
const providerKey = provider === 'codex' ? 'openai' : provider
const providerKey = provider === 'codex' || provider === 'github' ? 'openai' : provider
const out = {} as ModelStrings
for (const key of MODEL_KEYS) {
out[key] = ALL_MODEL_CONFIGS[key][providerKey]

View File

@@ -1,4 +1,4 @@
import { describe, expect, test, afterEach } from 'bun:test'
import { afterEach, beforeEach, describe, expect, test } from 'bun:test'
import {
parseProviderFlag,
applyProviderFlag,
@@ -8,18 +8,26 @@ import {
const originalEnv = { ...process.env }
const RESET_KEYS = [
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_GITHUB',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'OPENAI_BASE_URL',
'OPENAI_API_KEY',
'OPENAI_MODEL',
'GEMINI_MODEL',
] as const
beforeEach(() => {
for (const key of RESET_KEYS) {
delete process.env[key]
}
})
afterEach(() => {
for (const key of [
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_GITHUB',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'OPENAI_BASE_URL',
'OPENAI_API_KEY',
'OPENAI_MODEL',
'GEMINI_MODEL',
]) {
for (const key of RESET_KEYS) {
if (originalEnv[key] === undefined) delete process.env[key]
else process.env[key] = originalEnv[key]
}

View File

@@ -485,6 +485,26 @@ test('buildStartupEnvFromProfile leaves explicit provider selections untouched',
assert.equal(env.OPENAI_API_KEY, undefined)
})
test('buildStartupEnvFromProfile leaves profile-managed env untouched', async () => {
const processEnv = {
CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED: '1',
ANTHROPIC_BASE_URL: 'https://api.anthropic.com',
ANTHROPIC_MODEL: 'claude-sonnet-4-6',
}
const env = await buildStartupEnvFromProfile({
persisted: profile('openai', {
OPENAI_API_KEY: 'sk-persisted',
OPENAI_MODEL: 'gpt-4o',
}),
processEnv,
})
assert.equal(env, processEnv)
assert.equal(env.ANTHROPIC_MODEL, 'claude-sonnet-4-6')
assert.equal(env.OPENAI_MODEL, undefined)
})
test('buildStartupEnvFromProfile treats explicit falsey provider flags as user intent', async () => {
const processEnv = {
CLAUDE_CODE_USE_OPENAI: '0',

View File

@@ -407,6 +407,11 @@ export function deleteProfileFile(options?: ProfileFileLocation): string {
export function hasExplicitProviderSelection(
processEnv: NodeJS.ProcessEnv = process.env,
): boolean {
// If env was already applied from a provider profile, preserve it.
if (processEnv.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED === '1') {
return true
}
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||

View File

@@ -2,10 +2,15 @@ import { afterEach, describe, expect, mock, test } from 'bun:test'
import type { ProviderProfile } from './config.js'
async function importFreshProvidersModule() {
return import(`./model/providers.ts?ts=${Date.now()}-${Math.random()}`)
}
const originalEnv = { ...process.env }
const RESTORED_KEYS = [
'CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED',
'CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID',
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_GITHUB',
@@ -21,8 +26,35 @@ const RESTORED_KEYS = [
'ANTHROPIC_API_KEY',
] as const
type MockConfigState = {
providerProfiles: ProviderProfile[]
activeProviderProfileId?: string
openaiAdditionalModelOptionsCache: unknown[]
openaiAdditionalModelOptionsCacheByProfile: Record<string, unknown[]>
additionalModelOptionsCache?: unknown[]
additionalModelOptionsCacheScope?: string
}
function createMockConfigState(): MockConfigState {
return {
providerProfiles: [],
activeProviderProfileId: undefined,
openaiAdditionalModelOptionsCache: [],
openaiAdditionalModelOptionsCacheByProfile: {},
additionalModelOptionsCache: [],
additionalModelOptionsCacheScope: undefined,
}
}
let mockConfigState: MockConfigState = createMockConfigState()
function saveMockGlobalConfig(
updater: (current: MockConfigState) => MockConfigState,
): void {
mockConfigState = updater(mockConfigState)
}
afterEach(() => {
mock.restore()
for (const key of RESTORED_KEYS) {
if (originalEnv[key] === undefined) {
delete process.env[key]
@@ -30,8 +62,31 @@ afterEach(() => {
process.env[key] = originalEnv[key]
}
}
mock.restore()
mockConfigState = createMockConfigState()
})
async function importFreshProviderProfileModules() {
mock.restore()
mock.module('./config.js', () => ({
getGlobalConfig: () => mockConfigState,
saveGlobalConfig: (
updater: (current: MockConfigState) => MockConfigState,
) => {
mockConfigState = updater(mockConfigState)
},
}))
const nonce = `${Date.now()}-${Math.random()}`
const providers = await import(`./model/providers.js?ts=${nonce}`)
const providerProfiles = await import(`./providerProfiles.js?ts=${nonce}`)
return {
...providers,
...providerProfiles,
}
}
function buildProfile(overrides: Partial<ProviderProfile> = {}): ProviderProfile {
return {
id: 'provider_test',
@@ -43,57 +98,31 @@ function buildProfile(overrides: Partial<ProviderProfile> = {}): ProviderProfile
}
}
async function importFreshProviderModules() {
mock.restore()
let configState = {
providerProfiles: [] as ProviderProfile[],
activeProviderProfileId: undefined as string | undefined,
openaiAdditionalModelOptionsCache: [] as any[],
openaiAdditionalModelOptionsCacheByProfile: {} as Record<string, any[]>,
}
mock.module('./config.js', () => ({
getGlobalConfig: () => configState,
saveGlobalConfig: (
updater: (current: typeof configState) => typeof configState,
) => {
configState = updater(configState)
},
}))
const providerProfiles = await import(
`./providerProfiles.js?ts=${Date.now()}-${Math.random()}`
)
const providers = await import(
`./model/providers.js?ts=${Date.now()}-${Math.random()}`
)
return {
...providerProfiles,
...providers,
}
}
describe('applyProviderProfileToProcessEnv', () => {
test('openai profile clears competing gemini/github flags', async () => {
const { applyProviderProfileToProcessEnv } =
await importFreshProviderProfileModules()
process.env.CLAUDE_CODE_USE_GEMINI = '1'
process.env.CLAUDE_CODE_USE_GITHUB = '1'
const { applyProviderProfileToProcessEnv, getAPIProvider } =
await importFreshProviderModules()
applyProviderProfileToProcessEnv(buildProfile())
const { getAPIProvider: getFreshAPIProvider } =
await importFreshProvidersModule()
expect(process.env.CLAUDE_CODE_USE_GEMINI).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_GITHUB).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(getAPIProvider()).toBe('openai')
expect(String(process.env.CLAUDE_CODE_USE_OPENAI)).toBe('1')
expect(process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID).toBe(
'provider_test',
)
expect(getFreshAPIProvider()).toBe('openai')
})
test('anthropic profile clears competing gemini/github flags', async () => {
const { applyProviderProfileToProcessEnv } =
await importFreshProviderProfileModules()
process.env.CLAUDE_CODE_USE_GEMINI = '1'
process.env.CLAUDE_CODE_USE_GITHUB = '1'
const { applyProviderProfileToProcessEnv, getAPIProvider } =
await importFreshProviderModules()
applyProviderProfileToProcessEnv(
buildProfile({
@@ -102,21 +131,23 @@ describe('applyProviderProfileToProcessEnv', () => {
model: 'claude-sonnet-4-6',
}),
)
const { getAPIProvider: getFreshAPIProvider } =
await importFreshProvidersModule()
expect(process.env.CLAUDE_CODE_USE_GEMINI).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_GITHUB).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBeUndefined()
expect(getAPIProvider()).toBe('firstParty')
expect(getFreshAPIProvider()).toBe('firstParty')
})
})
describe('applyActiveProviderProfileFromConfig', () => {
test('does not override explicit startup provider selection', async () => {
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderProfileModules()
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
process.env.OPENAI_MODEL = 'qwen2.5:3b'
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderModules()
const applied = applyActiveProviderProfileFromConfig({
providerProfiles: [
@@ -135,12 +166,12 @@ describe('applyActiveProviderProfileFromConfig', () => {
})
test('does not override explicit startup selection when profile marker is stale', async () => {
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderProfileModules()
process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED = '1'
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
process.env.OPENAI_MODEL = 'qwen2.5:3b'
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderModules()
const applied = applyActiveProviderProfileFromConfig({
providerProfiles: [
@@ -154,12 +185,74 @@ describe('applyActiveProviderProfileFromConfig', () => {
} as any)
expect(applied).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(String(process.env.CLAUDE_CODE_USE_OPENAI)).toBe('1')
expect(process.env.OPENAI_BASE_URL).toBe('http://localhost:11434/v1')
expect(process.env.OPENAI_MODEL).toBe('qwen2.5:3b')
})
test('re-applies active profile when profile-managed env drifts', async () => {
const { applyActiveProviderProfileFromConfig, applyProviderProfileToProcessEnv } =
await importFreshProviderProfileModules()
applyProviderProfileToProcessEnv(
buildProfile({
id: 'saved_openai',
baseUrl: 'http://192.168.33.108:11434/v1',
model: 'kimi-k2.5:cloud',
}),
)
// Simulate settings/env merge clobbering the model while profile flags remain.
process.env.OPENAI_MODEL = 'github:copilot'
const applied = applyActiveProviderProfileFromConfig({
providerProfiles: [
buildProfile({
id: 'saved_openai',
baseUrl: 'http://192.168.33.108:11434/v1',
model: 'kimi-k2.5:cloud',
}),
],
activeProviderProfileId: 'saved_openai',
} as any)
expect(applied?.id).toBe('saved_openai')
expect(process.env.OPENAI_MODEL).toBe('kimi-k2.5:cloud')
expect(process.env.OPENAI_BASE_URL).toBe('http://192.168.33.108:11434/v1')
})
test('does not re-apply active profile when flags conflict with current provider', async () => {
const { applyActiveProviderProfileFromConfig, applyProviderProfileToProcessEnv } =
await importFreshProviderProfileModules()
applyProviderProfileToProcessEnv(
buildProfile({
id: 'saved_openai',
baseUrl: 'http://192.168.33.108:11434/v1',
model: 'kimi-k2.5:cloud',
}),
)
process.env.CLAUDE_CODE_USE_GITHUB = '1'
process.env.OPENAI_MODEL = 'github:copilot'
const applied = applyActiveProviderProfileFromConfig({
providerProfiles: [
buildProfile({
id: 'saved_openai',
baseUrl: 'http://192.168.33.108:11434/v1',
model: 'kimi-k2.5:cloud',
}),
],
activeProviderProfileId: 'saved_openai',
} as any)
expect(applied).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_GITHUB).toBe('1')
expect(process.env.OPENAI_MODEL).toBe('github:copilot')
})
test('applies active profile when no explicit provider is selected', async () => {
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderProfileModules()
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_GITHUB
@@ -169,8 +262,6 @@ describe('applyActiveProviderProfileFromConfig', () => {
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
process.env.OPENAI_MODEL = 'qwen2.5:3b'
const { applyActiveProviderProfileFromConfig } =
await importFreshProviderModules()
const applied = applyActiveProviderProfileFromConfig({
providerProfiles: [
@@ -184,16 +275,82 @@ describe('applyActiveProviderProfileFromConfig', () => {
} as any)
expect(applied?.id).toBe('saved_openai')
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(String(process.env.CLAUDE_CODE_USE_OPENAI)).toBe('1')
expect(process.env.OPENAI_BASE_URL).toBe('https://api.openai.com/v1')
expect(process.env.OPENAI_MODEL).toBe('gpt-4o')
})
})
describe('persistActiveProviderProfileModel', () => {
test('updates active profile model and current env for profile-managed sessions', async () => {
const {
applyProviderProfileToProcessEnv,
getProviderProfiles,
persistActiveProviderProfileModel,
} = await importFreshProviderProfileModules()
const activeProfile = buildProfile({
id: 'saved_openai',
baseUrl: 'http://192.168.33.108:11434/v1',
model: 'kimi-k2.5:cloud',
})
saveMockGlobalConfig(current => ({
...current,
providerProfiles: [activeProfile],
activeProviderProfileId: activeProfile.id,
}))
applyProviderProfileToProcessEnv(activeProfile)
const updated = persistActiveProviderProfileModel('minimax-m2.5:cloud')
expect(updated?.id).toBe(activeProfile.id)
expect(updated?.model).toBe('minimax-m2.5:cloud')
expect(process.env.OPENAI_MODEL).toBe('minimax-m2.5:cloud')
expect(process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID).toBe(
activeProfile.id,
)
const saved = getProviderProfiles().find(
(profile: ProviderProfile) => profile.id === activeProfile.id,
)
expect(saved?.model).toBe('minimax-m2.5:cloud')
})
test('does not mutate process env when session is not profile-managed', async () => {
const {
getProviderProfiles,
persistActiveProviderProfileModel,
} = await importFreshProviderProfileModules()
const activeProfile = buildProfile({
id: 'saved_openai',
model: 'kimi-k2.5:cloud',
})
saveMockGlobalConfig(current => ({
...current,
providerProfiles: [activeProfile],
activeProviderProfileId: activeProfile.id,
}))
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_MODEL = 'cli-model'
delete process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED
delete process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID
persistActiveProviderProfileModel('minimax-m2.5:cloud')
expect(process.env.OPENAI_MODEL).toBe('cli-model')
const saved = getProviderProfiles().find(
(profile: ProviderProfile) => profile.id === activeProfile.id,
)
expect(saved?.model).toBe('minimax-m2.5:cloud')
})
})
describe('getProviderPresetDefaults', () => {
test('ollama preset defaults to a local Ollama model', async () => {
const { getProviderPresetDefaults } = await importFreshProviderProfileModules()
delete process.env.OPENAI_MODEL
const { getProviderPresetDefaults } = await importFreshProviderModules()
const defaults = getProviderPresetDefaults('ollama')
@@ -205,21 +362,25 @@ describe('getProviderPresetDefaults', () => {
describe('deleteProviderProfile', () => {
test('deleting final profile clears provider env when active profile applied it', async () => {
const {
addProviderProfile,
applyProviderProfileToProcessEnv,
deleteProviderProfile,
} =
await importFreshProviderModules()
const profile = addProviderProfile({
name: 'Only Profile',
provider: 'openai',
baseUrl: 'https://api.openai.com/v1',
model: 'gpt-4o',
apiKey: 'sk-test',
})
} = await importFreshProviderProfileModules()
applyProviderProfileToProcessEnv(
buildProfile({
id: 'only_profile',
baseUrl: 'https://api.openai.com/v1',
model: 'gpt-4o',
apiKey: 'sk-test',
}),
)
expect(profile).not.toBeNull()
saveMockGlobalConfig(current => ({
...current,
providerProfiles: [buildProfile({ id: 'only_profile' })],
activeProviderProfileId: 'only_profile',
}))
const result = deleteProviderProfile(profile!.id)
const result = deleteProviderProfile('only_profile')
expect(result.removed).toBe(true)
expect(result.activeProfileId).toBeUndefined()
@@ -244,30 +405,24 @@ describe('deleteProviderProfile', () => {
})
test('deleting final profile preserves explicit startup provider env', async () => {
const { addProviderProfile, deleteProviderProfile } =
await importFreshProviderModules()
const profile = addProviderProfile({
name: 'Only Profile',
provider: 'openai',
baseUrl: 'https://api.openai.com/v1',
model: 'gpt-4o',
})
expect(profile).not.toBeNull()
process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED = undefined
delete process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED
const { deleteProviderProfile } = await importFreshProviderProfileModules()
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
process.env.OPENAI_MODEL = 'qwen2.5:3b'
const result = deleteProviderProfile(profile!.id)
saveMockGlobalConfig(current => ({
...current,
providerProfiles: [buildProfile({ id: 'only_profile' })],
activeProviderProfileId: 'only_profile',
}))
const result = deleteProviderProfile('only_profile')
expect(result.removed).toBe(true)
expect(result.activeProfileId).toBeUndefined()
expect(process.env.CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(String(process.env.CLAUDE_CODE_USE_OPENAI)).toBe('1')
expect(process.env.OPENAI_BASE_URL).toBe('http://localhost:11434/v1')
expect(process.env.OPENAI_MODEL).toBe('qwen2.5:3b')
})

View File

@@ -37,6 +37,7 @@ export type ProviderPresetDefaults = Omit<ProviderProfileInput, 'provider'> & {
const DEFAULT_OLLAMA_BASE_URL = 'http://localhost:11434/v1'
const DEFAULT_OLLAMA_MODEL = 'llama3.1:8b'
const PROFILE_ENV_APPLIED_FLAG = 'CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED'
const PROFILE_ENV_APPLIED_ID = 'CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED_ID'
function trimValue(value: string | undefined): string {
return value?.trim() ?? ''
@@ -264,6 +265,23 @@ function hasProviderSelectionFlags(
)
}
function hasConflictingProviderFlagsForProfile(
processEnv: NodeJS.ProcessEnv,
profile: ProviderProfile,
): boolean {
if (profile.provider === 'anthropic') {
return hasProviderSelectionFlags(processEnv)
}
return (
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
processEnv.CLAUDE_CODE_USE_FOUNDRY !== undefined
)
}
function sameOptionalEnvValue(
left: string | undefined,
right: string | undefined,
@@ -284,6 +302,10 @@ function isProcessEnvAlignedWithProfile(
return false
}
if (trimOrUndefined(processEnv[PROFILE_ENV_APPLIED_ID]) !== profile.id) {
return false
}
if (profile.provider === 'anthropic') {
return (
!hasProviderSelectionFlags(processEnv) &&
@@ -339,11 +361,13 @@ export function clearProviderProfileEnvFromProcessEnv(
delete processEnv.ANTHROPIC_MODEL
delete processEnv.ANTHROPIC_API_KEY
delete processEnv[PROFILE_ENV_APPLIED_FLAG]
delete processEnv[PROFILE_ENV_APPLIED_ID]
}
export function applyProviderProfileToProcessEnv(profile: ProviderProfile): void {
clearProviderProfileEnvFromProcessEnv()
process.env[PROFILE_ENV_APPLIED_FLAG] = '1'
process.env[PROFILE_ENV_APPLIED_ID] = profile.id
process.env.ANTHROPIC_MODEL = profile.model
if (profile.provider === 'anthropic') {
@@ -386,12 +410,24 @@ export function applyActiveProviderProfileFromConfig(
return undefined
}
const isCurrentEnvProfileManaged =
processEnv[PROFILE_ENV_APPLIED_FLAG] === '1' &&
trimOrUndefined(processEnv[PROFILE_ENV_APPLIED_ID]) === activeProfile.id
if (!options?.force && hasProviderSelectionFlags(processEnv)) {
// Respect explicit startup provider intent. Re-apply only when the
// current process env is already profile-managed and aligned.
if (!isProcessEnvAlignedWithProfile(processEnv, activeProfile)) {
// Respect explicit startup provider intent. Auto-heal only when this
// exact active profile previously applied the current env.
if (!isCurrentEnvProfileManaged) {
return undefined
}
if (hasConflictingProviderFlagsForProfile(processEnv, activeProfile)) {
return undefined
}
if (isProcessEnvAlignedWithProfile(processEnv, activeProfile)) {
return activeProfile
}
}
applyProviderProfileToProcessEnv(activeProfile)
@@ -496,6 +532,61 @@ export function updateProviderProfile(
return updatedProfile
}
export function persistActiveProviderProfileModel(
model: string,
): ProviderProfile | null {
const nextModel = trimOrUndefined(model)
if (!nextModel) {
return null
}
const activeProfile = getActiveProviderProfile()
if (!activeProfile) {
return null
}
saveGlobalConfig(current => {
const currentProfiles = getProviderProfiles(current)
const profileIndex = currentProfiles.findIndex(
profile => profile.id === activeProfile.id,
)
if (profileIndex < 0) {
return current
}
const currentProfile = currentProfiles[profileIndex]
if (currentProfile.model === nextModel) {
return current
}
const nextProfiles = [...currentProfiles]
nextProfiles[profileIndex] = {
...currentProfile,
model: nextModel,
}
return {
...current,
providerProfiles: nextProfiles,
}
})
const resolvedProfile = getActiveProviderProfile()
if (!resolvedProfile || resolvedProfile.id !== activeProfile.id) {
return null
}
if (
process.env[PROFILE_ENV_APPLIED_FLAG] === '1' &&
trimOrUndefined(process.env[PROFILE_ENV_APPLIED_ID]) === resolvedProfile.id
) {
applyProviderProfileToProcessEnv(resolvedProfile)
}
return resolvedProfile
}
export function setActiveProviderProfile(
profileId: string,
): ProviderProfile | null {