Persist active provider profile across restarts (#833)
* Persist active provider profile across restarts * Clear stale startup provider overrides * Fix provider profile restart fallback * Fix provider profile restart fallback * Omit empty OpenAI API key from startup env * Fix startup override settings typing
This commit is contained in:
@@ -46,6 +46,7 @@ import {
|
||||
rankOllamaModels,
|
||||
recommendOllamaModel,
|
||||
} from '../utils/providerRecommendation.js'
|
||||
import { clearStartupProviderOverrides } from '../utils/providerStartupOverrides.js'
|
||||
import { redactUrlForDisplay } from '../utils/urlRedaction.js'
|
||||
import { updateSettingsForSource } from '../utils/settings/settings.js'
|
||||
import {
|
||||
@@ -671,17 +672,7 @@ export function ProviderManager({ mode, onDone }: Props): React.ReactNode {
|
||||
}
|
||||
|
||||
function clearStartupProviderOverrideFromUserSettings(): string | null {
|
||||
const { error } = updateSettingsForSource('userSettings', {
|
||||
env: {
|
||||
CLAUDE_CODE_USE_OPENAI: undefined as any,
|
||||
CLAUDE_CODE_USE_GEMINI: undefined as any,
|
||||
CLAUDE_CODE_USE_GITHUB: undefined as any,
|
||||
CLAUDE_CODE_USE_BEDROCK: undefined as any,
|
||||
CLAUDE_CODE_USE_VERTEX: undefined as any,
|
||||
CLAUDE_CODE_USE_FOUNDRY: undefined as any,
|
||||
},
|
||||
})
|
||||
return error ? error.message : null
|
||||
return clearStartupProviderOverrides()
|
||||
}
|
||||
|
||||
function buildCodexOAuthActivationMessage(options: {
|
||||
|
||||
@@ -6,6 +6,7 @@ import test from 'node:test'
|
||||
|
||||
import { DEFAULT_CODEX_BASE_URL } from '../services/api/providerConfig.ts'
|
||||
import {
|
||||
applySavedProfileToCurrentSession,
|
||||
buildStartupEnvFromProfile,
|
||||
buildAtomicChatProfileEnv,
|
||||
buildCodexProfileEnv,
|
||||
@@ -111,6 +112,24 @@ test('openai launch ignores mismatched persisted ollama env', async () => {
|
||||
assert.equal(env.CHATGPT_ACCOUNT_ID, undefined)
|
||||
})
|
||||
|
||||
test('openai launch omits api key when no key is resolved', async () => {
|
||||
const env = await buildLaunchEnv({
|
||||
profile: 'openai',
|
||||
persisted: profile('openai', {
|
||||
OPENAI_BASE_URL: 'https://api.openai.com/v1',
|
||||
OPENAI_MODEL: 'gpt-4o',
|
||||
}),
|
||||
goal: 'balanced',
|
||||
processEnv: {
|
||||
OPENAI_API_KEY: undefined as any,
|
||||
},
|
||||
})
|
||||
|
||||
assert.equal(env.OPENAI_BASE_URL, 'https://api.openai.com/v1')
|
||||
assert.equal(env.OPENAI_MODEL, 'gpt-4o')
|
||||
assert.equal(Object.hasOwn(env, 'OPENAI_API_KEY'), false)
|
||||
})
|
||||
|
||||
test('openai launch ignores codex shell transport hints', async () => {
|
||||
const env = await buildLaunchEnv({
|
||||
profile: 'openai',
|
||||
@@ -572,6 +591,51 @@ test('buildStartupEnvFromProfile leaves explicit provider selections untouched',
|
||||
assert.equal(env.OPENAI_API_KEY, undefined)
|
||||
})
|
||||
|
||||
test('buildStartupEnvFromProfile preserves explicit GitHub provider settings when the legacy file is stale', async () => {
|
||||
const processEnv = {
|
||||
CLAUDE_CODE_USE_GITHUB: '1',
|
||||
OPENAI_MODEL: 'github:copilot',
|
||||
}
|
||||
|
||||
const env = await buildStartupEnvFromProfile({
|
||||
persisted: profile('openai', {
|
||||
OPENAI_API_KEY: 'sk-stale',
|
||||
OPENAI_MODEL: 'gpt-4o',
|
||||
OPENAI_BASE_URL: 'https://api.openai.com/v1',
|
||||
}),
|
||||
processEnv,
|
||||
})
|
||||
|
||||
assert.equal(env, processEnv)
|
||||
assert.equal(env.CLAUDE_CODE_USE_GITHUB, '1')
|
||||
assert.equal(env.OPENAI_MODEL, 'github:copilot')
|
||||
assert.equal(env.CLAUDE_CODE_USE_OPENAI, undefined)
|
||||
assert.equal(env.OPENAI_API_KEY, undefined)
|
||||
assert.equal(env.OPENAI_BASE_URL, undefined)
|
||||
})
|
||||
|
||||
test('applySavedProfileToCurrentSession can switch away from GitHub provider env', async () => {
|
||||
const processEnv = {
|
||||
CLAUDE_CODE_USE_GITHUB: '1',
|
||||
OPENAI_MODEL: 'github:copilot',
|
||||
}
|
||||
|
||||
const error = await applySavedProfileToCurrentSession({
|
||||
profileFile: profile('ollama', {
|
||||
OPENAI_BASE_URL: 'http://localhost:11434/v1',
|
||||
OPENAI_MODEL: 'llama3.1:8b',
|
||||
}),
|
||||
processEnv,
|
||||
})
|
||||
|
||||
assert.equal(error, null)
|
||||
assert.equal(processEnv.CLAUDE_CODE_USE_GITHUB, undefined)
|
||||
assert.equal(processEnv.CLAUDE_CODE_USE_OPENAI, '1')
|
||||
assert.equal(processEnv.OPENAI_BASE_URL, 'http://localhost:11434/v1')
|
||||
assert.equal(processEnv.OPENAI_MODEL, 'llama3.1:8b')
|
||||
assert.equal(Object.hasOwn(processEnv, 'OPENAI_API_KEY'), false)
|
||||
})
|
||||
|
||||
test('buildStartupEnvFromProfile preserves plural-profile env when the legacy file is stale', async () => {
|
||||
// Regression: a user saves a provider via /provider (plural system).
|
||||
// addProviderProfile does NOT sync the legacy .openclaude-profile.json,
|
||||
|
||||
@@ -42,6 +42,7 @@ export const DEFAULT_MISTRAL_MODEL = 'devstral-latest'
|
||||
|
||||
const PROFILE_ENV_KEYS = [
|
||||
'CLAUDE_CODE_USE_OPENAI',
|
||||
'CLAUDE_CODE_USE_GITHUB',
|
||||
'CLAUDE_CODE_USE_GEMINI',
|
||||
'CLAUDE_CODE_USE_MISTRAL',
|
||||
'CLAUDE_CODE_USE_BEDROCK',
|
||||
@@ -823,7 +824,12 @@ export async function buildLaunchEnv(options: {
|
||||
(useShellOpenAIConfig ? shellOpenAIModel : undefined) ||
|
||||
(usePersistedOpenAIConfig ? persistedOpenAIModel : undefined) ||
|
||||
defaultOpenAIModel
|
||||
env.OPENAI_API_KEY = processEnv.OPENAI_API_KEY || persistedEnv.OPENAI_API_KEY
|
||||
const openAIKey = processEnv.OPENAI_API_KEY || persistedEnv.OPENAI_API_KEY
|
||||
if (openAIKey) {
|
||||
env.OPENAI_API_KEY = openAIKey
|
||||
} else {
|
||||
delete env.OPENAI_API_KEY
|
||||
}
|
||||
delete env.CODEX_API_KEY
|
||||
delete env.CHATGPT_ACCOUNT_ID
|
||||
delete env.CODEX_ACCOUNT_ID
|
||||
@@ -859,6 +865,10 @@ export async function buildStartupEnvFromProfile(options?: {
|
||||
return processEnv
|
||||
}
|
||||
|
||||
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
|
||||
return processEnv
|
||||
}
|
||||
|
||||
if (!persisted) {
|
||||
return processEnv
|
||||
}
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import { mkdtempSync, readFileSync, rmSync } from 'node:fs'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
|
||||
import { afterEach, describe, expect, mock, test } from 'bun:test'
|
||||
|
||||
import type { ProviderProfile } from './config.js'
|
||||
@@ -7,6 +11,7 @@ async function importFreshProvidersModule() {
|
||||
}
|
||||
|
||||
const originalEnv = { ...process.env }
|
||||
const originalCwd = process.cwd()
|
||||
|
||||
const RESTORED_KEYS = [
|
||||
'CLAUDE_CODE_PROVIDER_PROFILE_ENV_APPLIED',
|
||||
@@ -75,6 +80,7 @@ afterEach(() => {
|
||||
|
||||
mock.restore()
|
||||
mockConfigState = createMockConfigState()
|
||||
process.chdir(originalCwd)
|
||||
})
|
||||
|
||||
async function importFreshProviderProfileModules() {
|
||||
@@ -569,6 +575,45 @@ describe('setActiveProviderProfile', () => {
|
||||
)
|
||||
})
|
||||
|
||||
test('persists no-key openai-compatible profiles for restart fallback', async () => {
|
||||
const tempDir = mkdtempSync(join(tmpdir(), 'openclaude-provider-'))
|
||||
process.chdir(tempDir)
|
||||
process.env.OPENAI_API_KEY = 'sk-shell-should-not-persist'
|
||||
|
||||
try {
|
||||
const { setActiveProviderProfile } =
|
||||
await importFreshProviderProfileModules()
|
||||
const ollamaProfile = buildProfile({
|
||||
id: 'ollama_prof',
|
||||
name: 'Ollama',
|
||||
provider: 'openai',
|
||||
baseUrl: 'http://localhost:11434/v1',
|
||||
model: 'llama3.1:8b, qwen2.5:7b',
|
||||
apiKey: '',
|
||||
})
|
||||
|
||||
saveMockGlobalConfig(current => ({
|
||||
...current,
|
||||
providerProfiles: [ollamaProfile],
|
||||
}))
|
||||
|
||||
const result = setActiveProviderProfile('ollama_prof')
|
||||
const persisted = JSON.parse(
|
||||
readFileSync(join(tempDir, '.openclaude-profile.json'), 'utf8'),
|
||||
)
|
||||
|
||||
expect(result?.id).toBe('ollama_prof')
|
||||
expect(persisted.profile).toBe('openai')
|
||||
expect(persisted.env).toEqual({
|
||||
OPENAI_BASE_URL: 'http://localhost:11434/v1',
|
||||
OPENAI_MODEL: 'llama3.1:8b',
|
||||
})
|
||||
} finally {
|
||||
process.chdir(originalCwd)
|
||||
rmSync(tempDir, { recursive: true, force: true })
|
||||
}
|
||||
})
|
||||
|
||||
test('sets ANTHROPIC_MODEL env var when switching to an anthropic-type provider', async () => {
|
||||
const { setActiveProviderProfile } =
|
||||
await importFreshProviderProfileModules()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { randomBytes } from 'crypto'
|
||||
import { isCodexBaseUrl } from '../services/api/providerConfig.js'
|
||||
import {
|
||||
getGlobalConfig,
|
||||
saveGlobalConfig,
|
||||
@@ -12,6 +13,7 @@ import {
|
||||
buildGeminiProfileEnv,
|
||||
buildMistralProfileEnv,
|
||||
buildOpenAIProfileEnv,
|
||||
type ProfileEnv,
|
||||
type ProviderProfile as ProviderProfileStartup,
|
||||
} from './providerProfile.js'
|
||||
|
||||
@@ -832,6 +834,38 @@ export function getProfileModelOptions(profile: ProviderProfile): ModelOption[]
|
||||
}))
|
||||
}
|
||||
|
||||
function buildOpenAICompatibleStartupEnv(
|
||||
activeProfile: ProviderProfile,
|
||||
): ProfileEnv | null {
|
||||
if (isCodexBaseUrl(activeProfile.baseUrl)) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (activeProfile.apiKey) {
|
||||
const strictEnv = buildOpenAIProfileEnv({
|
||||
goal: 'balanced',
|
||||
model: activeProfile.model,
|
||||
baseUrl: activeProfile.baseUrl,
|
||||
apiKey: activeProfile.apiKey,
|
||||
processEnv: {},
|
||||
})
|
||||
if (strictEnv) {
|
||||
return strictEnv
|
||||
}
|
||||
}
|
||||
|
||||
const env: ProfileEnv = {
|
||||
OPENAI_BASE_URL: activeProfile.baseUrl,
|
||||
OPENAI_MODEL: getPrimaryModel(activeProfile.model),
|
||||
}
|
||||
if (activeProfile.apiKey) {
|
||||
env.OPENAI_API_KEY = activeProfile.apiKey
|
||||
} else {
|
||||
delete env.OPENAI_API_KEY
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
export function setActiveProviderProfile(
|
||||
profileId: string,
|
||||
): ProviderProfile | null {
|
||||
@@ -890,15 +924,17 @@ export function setActiveProviderProfile(
|
||||
}) ?? null
|
||||
)
|
||||
default:
|
||||
// anthropic and all openai-compatible providers
|
||||
return (
|
||||
buildOpenAIProfileEnv({
|
||||
model: activeProfile.model,
|
||||
baseUrl: activeProfile.baseUrl,
|
||||
apiKey: activeProfile.apiKey,
|
||||
processEnv: process.env,
|
||||
}) ?? null
|
||||
)
|
||||
return activeProfile.provider === 'anthropic'
|
||||
? (
|
||||
buildOpenAIProfileEnv({
|
||||
goal: 'balanced',
|
||||
model: activeProfile.model,
|
||||
baseUrl: activeProfile.baseUrl,
|
||||
apiKey: activeProfile.apiKey,
|
||||
processEnv: process.env,
|
||||
}) ?? null
|
||||
)
|
||||
: buildOpenAICompatibleStartupEnv(activeProfile)
|
||||
}
|
||||
})()
|
||||
|
||||
|
||||
41
src/utils/providerStartupOverrides.test.ts
Normal file
41
src/utils/providerStartupOverrides.test.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { describe, expect, mock, test } from 'bun:test'
|
||||
|
||||
import { clearStartupProviderOverrides } from './providerStartupOverrides.js'
|
||||
|
||||
describe('clearStartupProviderOverrides', () => {
|
||||
test('removes stale provider env from user settings and global config env', () => {
|
||||
const updateUserSettings = mock(() => ({ error: null }))
|
||||
const saveConfig = mock((updater: (current: {
|
||||
env: Record<string, string>
|
||||
}) => { env: Record<string, string> }) =>
|
||||
updater({
|
||||
env: {
|
||||
CLAUDE_CODE_USE_OPENAI: '1',
|
||||
OPENAI_BASE_URL: 'https://api.minimax.io/v1',
|
||||
OPENAI_MODEL: 'minimax-m2.7',
|
||||
MINIMAX_API_KEY: 'sk-minimax',
|
||||
KEEP_ME: '1',
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
const error = clearStartupProviderOverrides({
|
||||
updateUserSettings,
|
||||
saveConfig,
|
||||
})
|
||||
|
||||
expect(error).toBeNull()
|
||||
expect(updateUserSettings).toHaveBeenCalledWith(
|
||||
'userSettings',
|
||||
expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
CLAUDE_CODE_USE_OPENAI: undefined,
|
||||
OPENAI_BASE_URL: undefined,
|
||||
OPENAI_MODEL: undefined,
|
||||
MINIMAX_API_KEY: undefined,
|
||||
}),
|
||||
}),
|
||||
)
|
||||
expect(saveConfig.mock.results[0]?.value.env).toEqual({ KEEP_ME: '1' })
|
||||
})
|
||||
})
|
||||
87
src/utils/providerStartupOverrides.ts
Normal file
87
src/utils/providerStartupOverrides.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { saveGlobalConfig } from './config.js'
|
||||
import { updateSettingsForSource } from './settings/settings.js'
|
||||
|
||||
export const STARTUP_PROVIDER_OVERRIDE_ENV_KEYS = [
|
||||
'CLAUDE_CODE_USE_OPENAI',
|
||||
'CLAUDE_CODE_USE_GEMINI',
|
||||
'CLAUDE_CODE_USE_MISTRAL',
|
||||
'CLAUDE_CODE_USE_GITHUB',
|
||||
'CLAUDE_CODE_USE_BEDROCK',
|
||||
'CLAUDE_CODE_USE_VERTEX',
|
||||
'CLAUDE_CODE_USE_FOUNDRY',
|
||||
'OPENAI_BASE_URL',
|
||||
'OPENAI_API_BASE',
|
||||
'OPENAI_MODEL',
|
||||
'OPENAI_API_KEY',
|
||||
'OPENAI_ORG',
|
||||
'OPENAI_PROJECT',
|
||||
'OPENAI_ORGANIZATION',
|
||||
'ANTHROPIC_BASE_URL',
|
||||
'ANTHROPIC_MODEL',
|
||||
'ANTHROPIC_API_KEY',
|
||||
'GEMINI_API_KEY',
|
||||
'GOOGLE_API_KEY',
|
||||
'GEMINI_BASE_URL',
|
||||
'GEMINI_MODEL',
|
||||
'GEMINI_ACCESS_TOKEN',
|
||||
'GEMINI_AUTH_MODE',
|
||||
'MISTRAL_BASE_URL',
|
||||
'MISTRAL_MODEL',
|
||||
'MISTRAL_API_KEY',
|
||||
'CODEX_API_KEY',
|
||||
'CODEX_CREDENTIAL_SOURCE',
|
||||
'CHATGPT_ACCOUNT_ID',
|
||||
'CODEX_ACCOUNT_ID',
|
||||
'MINIMAX_API_KEY',
|
||||
'MINIMAX_BASE_URL',
|
||||
'MINIMAX_MODEL',
|
||||
'NVIDIA_API_KEY',
|
||||
'NVIDIA_NIM',
|
||||
] as const
|
||||
|
||||
type GlobalConfigWithEnv = {
|
||||
env?: Record<string, string>
|
||||
}
|
||||
|
||||
type SettingsEnvPatch = Partial<
|
||||
Record<(typeof STARTUP_PROVIDER_OVERRIDE_ENV_KEYS)[number], string>
|
||||
>
|
||||
|
||||
const DELETE_SETTINGS_ENV_VALUE = undefined as unknown as string
|
||||
|
||||
export function clearStartupProviderOverrides(options?: {
|
||||
updateUserSettings?: typeof updateSettingsForSource
|
||||
saveConfig?: typeof saveGlobalConfig
|
||||
}): string | null {
|
||||
const updateUserSettings = options?.updateUserSettings ?? updateSettingsForSource
|
||||
const saveConfig = options?.saveConfig ?? saveGlobalConfig
|
||||
const envPatch = Object.fromEntries(
|
||||
STARTUP_PROVIDER_OVERRIDE_ENV_KEYS.map(key => [
|
||||
key,
|
||||
DELETE_SETTINGS_ENV_VALUE,
|
||||
]),
|
||||
) as SettingsEnvPatch
|
||||
|
||||
const { error } = updateUserSettings('userSettings', { env: envPatch })
|
||||
|
||||
let globalConfigError: string | null = null
|
||||
try {
|
||||
saveConfig((current: GlobalConfigWithEnv) => {
|
||||
const currentEnv = current.env ?? {}
|
||||
let changed = false
|
||||
const nextEnv = { ...currentEnv }
|
||||
for (const key of STARTUP_PROVIDER_OVERRIDE_ENV_KEYS) {
|
||||
if (key in nextEnv) {
|
||||
delete nextEnv[key]
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
return changed ? { ...current, env: nextEnv } : current
|
||||
})
|
||||
} catch (configError) {
|
||||
globalConfigError =
|
||||
configError instanceof Error ? configError.message : String(configError)
|
||||
}
|
||||
|
||||
return error?.message ?? globalConfigError
|
||||
}
|
||||
Reference in New Issue
Block a user