feat: add guided /provider setup

This commit is contained in:
Vasanthdev2004
2026-04-02 13:13:50 +05:30
parent 1059915c84
commit 08f0b6030e
15 changed files with 2111 additions and 233 deletions

View File

@@ -1,3 +1,5 @@
import { existsSync, readFileSync, rmSync, writeFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
DEFAULT_CODEX_BASE_URL,
DEFAULT_OPENAI_BASE_URL,
@@ -7,11 +9,40 @@ import {
} from '../services/api/providerConfig.ts'
import {
getGoalDefaultOpenAIModel,
normalizeRecommendationGoal,
type RecommendationGoal,
} from './providerRecommendation.ts'
import { getOllamaChatBaseUrl } from './providerDiscovery.ts'
const DEFAULT_GEMINI_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
const DEFAULT_GEMINI_MODEL = 'gemini-2.0-flash'
export const PROFILE_FILE_NAME = '.openclaude-profile.json'
export const DEFAULT_GEMINI_BASE_URL =
'https://generativelanguage.googleapis.com/v1beta/openai'
export const DEFAULT_GEMINI_MODEL = 'gemini-2.0-flash'
const PROFILE_ENV_KEYS = [
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'CLAUDE_CODE_USE_FOUNDRY',
'OPENAI_BASE_URL',
'OPENAI_MODEL',
'OPENAI_API_KEY',
'CODEX_API_KEY',
'CHATGPT_ACCOUNT_ID',
'CODEX_ACCOUNT_ID',
'GEMINI_API_KEY',
'GEMINI_MODEL',
'GEMINI_BASE_URL',
'GOOGLE_API_KEY',
] as const
const SECRET_ENV_KEYS = [
'OPENAI_API_KEY',
'CODEX_API_KEY',
'GEMINI_API_KEY',
'GOOGLE_API_KEY',
] as const
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini'
@@ -33,6 +64,35 @@ export type ProfileFile = {
createdAt: string
}
type SecretValueSource = Partial<
Pick<
NodeJS.ProcessEnv & ProfileEnv,
(typeof SECRET_ENV_KEYS)[number]
>
>
type ProfileFileLocation = {
cwd?: string
filePath?: string
}
function resolveProfileFilePath(options?: ProfileFileLocation): string {
if (options?.filePath) {
return options.filePath
}
return resolve(options?.cwd ?? process.cwd(), PROFILE_FILE_NAME)
}
export function isProviderProfile(value: unknown): value is ProviderProfile {
return (
value === 'openai' ||
value === 'ollama' ||
value === 'codex' ||
value === 'gemini'
)
}
export function sanitizeApiKey(
key: string | null | undefined,
): string | undefined {
@@ -40,6 +100,95 @@ export function sanitizeApiKey(
return key
}
function looksLikeSecretValue(value: string): boolean {
const trimmed = value.trim()
if (!trimmed) return false
if (trimmed.startsWith('sk-') || trimmed.startsWith('sk-ant-')) {
return true
}
if (trimmed.startsWith('AIza')) {
return true
}
return false
}
function collectSecretValues(
sources: Array<SecretValueSource | null | undefined>,
): string[] {
const values = new Set<string>()
for (const source of sources) {
if (!source) continue
for (const key of SECRET_ENV_KEYS) {
const value = sanitizeApiKey(source[key])
if (value) {
values.add(value)
}
}
}
return [...values]
}
export function maskSecretForDisplay(
value: string | null | undefined,
): string | undefined {
const sanitized = sanitizeApiKey(value)
if (!sanitized) return undefined
if (sanitized.length <= 8) {
return 'configured'
}
if (sanitized.startsWith('sk-')) {
return `${sanitized.slice(0, 3)}...${sanitized.slice(-4)}`
}
if (sanitized.startsWith('AIza')) {
return `${sanitized.slice(0, 4)}...${sanitized.slice(-4)}`
}
return `${sanitized.slice(0, 2)}...${sanitized.slice(-4)}`
}
export function redactSecretValueForDisplay(
value: string | null | undefined,
...sources: Array<SecretValueSource | null | undefined>
): string | undefined {
if (!value) return undefined
const trimmed = value.trim()
if (!trimmed) return trimmed
const secretValues = collectSecretValues(sources)
if (secretValues.includes(trimmed) || looksLikeSecretValue(trimmed)) {
return maskSecretForDisplay(trimmed) ?? 'configured'
}
return trimmed
}
export function sanitizeProviderConfigValue(
value: string | null | undefined,
...sources: Array<SecretValueSource | null | undefined>
): string | undefined {
if (!value) return undefined
const trimmed = value.trim()
if (!trimmed) return undefined
const secretValues = collectSecretValues(sources)
if (secretValues.includes(trimmed) || looksLikeSecretValue(trimmed)) {
return undefined
}
return trimmed
}
export function buildOllamaProfileEnv(
model: string,
options: {
@@ -71,11 +220,23 @@ export function buildGeminiProfileEnv(options: {
const env: ProfileEnv = {
GEMINI_MODEL:
options.model || processEnv.GEMINI_MODEL || DEFAULT_GEMINI_MODEL,
sanitizeProviderConfigValue(options.model, { GEMINI_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.GEMINI_MODEL,
{ GEMINI_API_KEY: key },
processEnv,
) ||
DEFAULT_GEMINI_MODEL,
GEMINI_API_KEY: key,
}
const baseUrl = options.baseUrl || processEnv.GEMINI_BASE_URL
const baseUrl =
sanitizeProviderConfigValue(options.baseUrl, { GEMINI_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.GEMINI_BASE_URL,
{ GEMINI_API_KEY: key },
processEnv,
)
if (baseUrl) {
env.GEMINI_BASE_URL = baseUrl
}
@@ -97,21 +258,39 @@ export function buildOpenAIProfileEnv(options: {
}
const defaultModel = getGoalDefaultOpenAIModel(options.goal)
const shellOpenAIModel = sanitizeProviderConfigValue(
processEnv.OPENAI_MODEL,
{ OPENAI_API_KEY: key },
processEnv,
)
const shellOpenAIBaseUrl = sanitizeProviderConfigValue(
processEnv.OPENAI_BASE_URL,
{ OPENAI_API_KEY: key },
processEnv,
)
const shellOpenAIRequest = resolveProviderRequest({
model: processEnv.OPENAI_MODEL,
baseUrl: processEnv.OPENAI_BASE_URL,
model: shellOpenAIModel,
baseUrl: shellOpenAIBaseUrl,
fallbackModel: defaultModel,
})
const useShellOpenAIConfig = shellOpenAIRequest.transport === 'chat_completions'
return {
OPENAI_BASE_URL:
options.baseUrl ||
(useShellOpenAIConfig ? processEnv.OPENAI_BASE_URL : undefined) ||
sanitizeProviderConfigValue(
options.baseUrl,
{ OPENAI_API_KEY: key },
processEnv,
) ||
(useShellOpenAIConfig ? shellOpenAIBaseUrl : undefined) ||
DEFAULT_OPENAI_BASE_URL,
OPENAI_MODEL:
options.model ||
(useShellOpenAIConfig ? processEnv.OPENAI_MODEL : undefined) ||
sanitizeProviderConfigValue(
options.model,
{ OPENAI_API_KEY: key },
processEnv,
) ||
(useShellOpenAIConfig ? shellOpenAIModel : undefined) ||
defaultModel,
OPENAI_API_KEY: key,
}
@@ -158,6 +337,61 @@ export function createProfileFile(
}
}
export function loadProfileFile(options?: ProfileFileLocation): ProfileFile | null {
const filePath = resolveProfileFilePath(options)
if (!existsSync(filePath)) {
return null
}
try {
const parsed = JSON.parse(readFileSync(filePath, 'utf8')) as Partial<ProfileFile>
if (!isProviderProfile(parsed.profile) || !parsed.env || typeof parsed.env !== 'object') {
return null
}
return {
profile: parsed.profile,
env: parsed.env,
createdAt:
typeof parsed.createdAt === 'string'
? parsed.createdAt
: new Date().toISOString(),
}
} catch {
return null
}
}
export function saveProfileFile(
profileFile: ProfileFile,
options?: ProfileFileLocation,
): string {
const filePath = resolveProfileFilePath(options)
writeFileSync(filePath, JSON.stringify(profileFile, null, 2), {
encoding: 'utf8',
mode: 0o600,
})
return filePath
}
export function deleteProfileFile(options?: ProfileFileLocation): string {
const filePath = resolveProfileFilePath(options)
rmSync(filePath, { force: true })
return filePath
}
export function hasExplicitProviderSelection(
processEnv: NodeJS.ProcessEnv = process.env,
): boolean {
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
processEnv.CLAUDE_CODE_USE_FOUNDRY !== undefined
)
}
export function selectAutoProfile(
recommendedOllamaModel: string | null,
): ProviderProfile {
@@ -177,6 +411,38 @@ export async function buildLaunchEnv(options: {
options.persisted?.profile === options.profile
? options.persisted.env ?? {}
: {}
const persistedOpenAIModel = sanitizeProviderConfigValue(
persistedEnv.OPENAI_MODEL,
persistedEnv,
)
const persistedOpenAIBaseUrl = sanitizeProviderConfigValue(
persistedEnv.OPENAI_BASE_URL,
persistedEnv,
)
const shellOpenAIModel = sanitizeProviderConfigValue(
processEnv.OPENAI_MODEL,
processEnv,
)
const shellOpenAIBaseUrl = sanitizeProviderConfigValue(
processEnv.OPENAI_BASE_URL,
processEnv,
)
const persistedGeminiModel = sanitizeProviderConfigValue(
persistedEnv.GEMINI_MODEL,
persistedEnv,
)
const persistedGeminiBaseUrl = sanitizeProviderConfigValue(
persistedEnv.GEMINI_BASE_URL,
persistedEnv,
)
const shellGeminiModel = sanitizeProviderConfigValue(
processEnv.GEMINI_MODEL,
processEnv,
)
const shellGeminiBaseUrl = sanitizeProviderConfigValue(
processEnv.GEMINI_BASE_URL,
processEnv,
)
const shellGeminiKey = sanitizeApiKey(
processEnv.GEMINI_API_KEY ?? processEnv.GOOGLE_API_KEY,
@@ -192,12 +458,12 @@ export async function buildLaunchEnv(options: {
delete env.CLAUDE_CODE_USE_OPENAI
env.GEMINI_MODEL =
processEnv.GEMINI_MODEL ||
persistedEnv.GEMINI_MODEL ||
shellGeminiModel ||
persistedGeminiModel ||
DEFAULT_GEMINI_MODEL
env.GEMINI_BASE_URL =
processEnv.GEMINI_BASE_URL ||
persistedEnv.GEMINI_BASE_URL ||
shellGeminiBaseUrl ||
persistedGeminiBaseUrl ||
DEFAULT_GEMINI_BASE_URL
const geminiKey = shellGeminiKey || persistedGeminiKey
@@ -235,9 +501,9 @@ export async function buildLaunchEnv(options: {
const resolveOllamaModel =
options.resolveOllamaDefaultModel ?? (async () => 'llama3.1:8b')
env.OPENAI_BASE_URL = persistedEnv.OPENAI_BASE_URL || getOllamaBaseUrl()
env.OPENAI_BASE_URL = persistedOpenAIBaseUrl || getOllamaBaseUrl()
env.OPENAI_MODEL =
persistedEnv.OPENAI_MODEL ||
persistedOpenAIModel ||
(await resolveOllamaModel(options.goal))
delete env.OPENAI_API_KEY
@@ -250,10 +516,10 @@ export async function buildLaunchEnv(options: {
if (options.profile === 'codex') {
env.OPENAI_BASE_URL =
persistedEnv.OPENAI_BASE_URL && isCodexBaseUrl(persistedEnv.OPENAI_BASE_URL)
? persistedEnv.OPENAI_BASE_URL
persistedOpenAIBaseUrl && isCodexBaseUrl(persistedOpenAIBaseUrl)
? persistedOpenAIBaseUrl
: DEFAULT_CODEX_BASE_URL
env.OPENAI_MODEL = persistedEnv.OPENAI_MODEL || 'codexplan'
env.OPENAI_MODEL = persistedOpenAIModel || 'codexplan'
delete env.OPENAI_API_KEY
const codexKey =
@@ -284,27 +550,27 @@ export async function buildLaunchEnv(options: {
const defaultOpenAIModel = getGoalDefaultOpenAIModel(options.goal)
const shellOpenAIRequest = resolveProviderRequest({
model: processEnv.OPENAI_MODEL,
baseUrl: processEnv.OPENAI_BASE_URL,
model: shellOpenAIModel,
baseUrl: shellOpenAIBaseUrl,
fallbackModel: defaultOpenAIModel,
})
const persistedOpenAIRequest = resolveProviderRequest({
model: persistedEnv.OPENAI_MODEL,
baseUrl: persistedEnv.OPENAI_BASE_URL,
model: persistedOpenAIModel,
baseUrl: persistedOpenAIBaseUrl,
fallbackModel: defaultOpenAIModel,
})
const useShellOpenAIConfig = shellOpenAIRequest.transport === 'chat_completions'
const usePersistedOpenAIConfig =
(!persistedEnv.OPENAI_MODEL && !persistedEnv.OPENAI_BASE_URL) ||
(!persistedOpenAIModel && !persistedOpenAIBaseUrl) ||
persistedOpenAIRequest.transport === 'chat_completions'
env.OPENAI_BASE_URL =
(useShellOpenAIConfig ? processEnv.OPENAI_BASE_URL : undefined) ||
(usePersistedOpenAIConfig ? persistedEnv.OPENAI_BASE_URL : undefined) ||
(useShellOpenAIConfig ? shellOpenAIBaseUrl : undefined) ||
(usePersistedOpenAIConfig ? persistedOpenAIBaseUrl : undefined) ||
DEFAULT_OPENAI_BASE_URL
env.OPENAI_MODEL =
(useShellOpenAIConfig ? processEnv.OPENAI_MODEL : undefined) ||
(usePersistedOpenAIConfig ? persistedEnv.OPENAI_MODEL : undefined) ||
(useShellOpenAIConfig ? shellOpenAIModel : undefined) ||
(usePersistedOpenAIConfig ? persistedOpenAIModel : undefined) ||
defaultOpenAIModel
env.OPENAI_API_KEY = processEnv.OPENAI_API_KEY || persistedEnv.OPENAI_API_KEY
delete env.CODEX_API_KEY
@@ -312,3 +578,44 @@ export async function buildLaunchEnv(options: {
delete env.CODEX_ACCOUNT_ID
return env
}
export async function buildStartupEnvFromProfile(options?: {
persisted?: ProfileFile | null
goal?: RecommendationGoal
processEnv?: NodeJS.ProcessEnv
getOllamaChatBaseUrl?: (baseUrl?: string) => string
resolveOllamaDefaultModel?: (goal: RecommendationGoal) => Promise<string>
}): Promise<NodeJS.ProcessEnv> {
const processEnv = options?.processEnv ?? process.env
if (hasExplicitProviderSelection(processEnv)) {
return processEnv
}
const persisted = options?.persisted ?? loadProfileFile()
if (!persisted) {
return processEnv
}
return buildLaunchEnv({
profile: persisted.profile,
persisted,
goal:
options?.goal ??
normalizeRecommendationGoal(processEnv.OPENCLAUDE_PROFILE_GOAL),
processEnv,
getOllamaChatBaseUrl:
options?.getOllamaChatBaseUrl ?? getOllamaChatBaseUrl,
resolveOllamaDefaultModel: options?.resolveOllamaDefaultModel,
})
}
export function applyProfileEnvToProcessEnv(
targetEnv: NodeJS.ProcessEnv,
nextEnv: NodeJS.ProcessEnv,
): void {
for (const key of PROFILE_ENV_KEYS) {
delete targetEnv[key]
}
Object.assign(targetEnv, nextEnv)
}