Merge origin/main into codex/provider-profile-recommendations

Preserve provider recommendation workflows while integrating Codex profile support, safer launch isolation, and updated docs/scripts from upstream main.
This commit is contained in:
Vasanthdev2004
2026-04-01 17:33:07 +05:30
21 changed files with 2141 additions and 188 deletions

View File

@@ -1,12 +1,16 @@
// @ts-nocheck
import { writeFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
import {
getGoalDefaultOpenAIModel,
normalizeRecommendationGoal,
recommendOllamaModel,
} from '../src/utils/providerRecommendation.ts'
import {
buildCodexProfileEnv,
buildOllamaProfileEnv,
buildOpenAIProfileEnv,
createProfileFile,
@@ -29,7 +33,7 @@ function parseArg(name: string): string | null {
function parseProviderArg(): ProviderProfile | 'auto' {
const p = parseArg('--provider')?.toLowerCase()
if (p === 'openai' || p === 'ollama') return p
if (p === 'openai' || p === 'ollama' || p === 'codex') return p
return 'auto'
}
@@ -37,7 +41,7 @@ async function resolveOllamaModel(
argModel: string | null,
argBaseUrl: string | null,
goal: ReturnType<typeof normalizeRecommendationGoal>,
) : Promise<string | null> {
): Promise<string | null> {
if (argModel) return argModel
const discovered = await listOllamaModels(argBaseUrl || undefined)
@@ -82,19 +86,39 @@ async function main(): Promise<void> {
getOllamaChatBaseUrl,
},
)
} else if (selected === 'codex') {
const builtEnv = buildCodexProfileEnv({
model: argModel,
baseUrl: argBaseUrl,
apiKey: argApiKey || process.env.CODEX_API_KEY || null,
processEnv: process.env,
})
if (!builtEnv) {
const credentials = resolveCodexApiCredentials(
argApiKey
? { ...process.env, CODEX_API_KEY: argApiKey }
: process.env,
)
const authHint = credentials.authPath
? ` or make sure ${credentials.authPath} exists`
: ''
if (!credentials.apiKey) {
console.error(`Codex profile requires CODEX_API_KEY${authHint}.`)
} else {
console.error('Codex profile requires CHATGPT_ACCOUNT_ID or an auth.json that includes it.')
}
process.exit(1)
}
env = builtEnv
} else {
const builtEnv = buildOpenAIProfileEnv({
goal,
model:
argModel ||
process.env.OPENAI_MODEL ||
getGoalDefaultOpenAIModel(goal),
model: argModel || null,
baseUrl: argBaseUrl || null,
apiKey: argApiKey || process.env.OPENAI_API_KEY || null,
processEnv: {
...process.env,
OPENAI_BASE_URL:
argBaseUrl || process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
},
processEnv: process.env,
})
if (!builtEnv) {

View File

@@ -2,6 +2,9 @@
import { spawn } from 'node:child_process'
import { existsSync, readFileSync } from 'node:fs'
import { resolve } from 'node:path'
import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
import {
normalizeRecommendationGoal,
recommendOllamaModel,
@@ -45,7 +48,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
continue
}
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama') && requestedProfile === 'auto') {
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex') && requestedProfile === 'auto') {
requestedProfile = lower as ProviderProfile | 'auto'
continue
}
@@ -76,7 +79,7 @@ function loadPersistedProfile(): ProfileFile | null {
if (!existsSync(path)) return null
try {
const parsed = JSON.parse(readFileSync(path, 'utf8')) as ProfileFile
if (parsed.profile === 'openai' || parsed.profile === 'ollama') {
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex') {
return parsed
}
return null
@@ -123,18 +126,22 @@ function quoteArg(arg: string): string {
}
function printSummary(profile: ProviderProfile, env: NodeJS.ProcessEnv): void {
const keySet = Boolean(env.OPENAI_API_KEY)
const keySet = profile === 'codex'
? Boolean(resolveCodexApiCredentials(env).apiKey)
: Boolean(env.OPENAI_API_KEY)
console.log(`Launching profile: ${profile}`)
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
console.log(`OPENAI_API_KEY_SET=${keySet}`)
console.log(
`${profile === 'codex' ? 'CODEX_API_KEY_SET' : 'OPENAI_API_KEY_SET'}=${keySet}`,
)
}
async function main(): Promise<void> {
const options = parseLaunchOptions(process.argv.slice(2))
const requestedProfile = options.requestedProfile
if (!requestedProfile) {
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|auto] [--fast] [-- <cli args>]')
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
process.exit(1)
}
@@ -155,7 +162,10 @@ async function main(): Promise<void> {
profile = requestedProfile
}
if (profile === 'ollama' && persisted?.profile !== 'ollama') {
if (
profile === 'ollama' &&
(persisted?.profile !== 'ollama' || !persisted?.env?.OPENAI_MODEL)
) {
resolvedOllamaModel ??= await resolveOllamaDefaultModel(options.goal)
if (!resolvedOllamaModel) {
console.error('No viable Ollama chat model was discovered. Pull a chat model first or save one with `bun run profile:init -- --provider ollama --model <model>`.')
@@ -179,6 +189,22 @@ async function main(): Promise<void> {
process.exit(1)
}
if (profile === 'codex') {
const credentials = resolveCodexApiCredentials(env)
if (!credentials.apiKey) {
const authHint = credentials.authPath
? ` or make sure ${credentials.authPath} exists`
: ''
console.error(`CODEX_API_KEY is required for codex profile${authHint}. Run: bun run profile:init -- --provider codex --model codexplan`)
process.exit(1)
}
if (!credentials.accountId) {
console.error('CHATGPT_ACCOUNT_ID is required for codex profile. Set CHATGPT_ACCOUNT_ID/CODEX_ACCOUNT_ID or use an auth.json that includes it.')
process.exit(1)
}
}
printSummary(profile, env)
const doctorCode = await runCommand('bun run scripts/system-check.ts', env)

View File

@@ -2,6 +2,11 @@
import { existsSync, mkdirSync, writeFileSync } from 'node:fs'
import { dirname, join, resolve } from 'node:path'
import { spawnSync } from 'node:child_process'
import {
resolveCodexApiCredentials,
resolveProviderRequest,
isLocalProviderUrl as isProviderLocalUrl,
} from '../src/services/api/providerConfig.js'
type CheckResult = {
ok: boolean
@@ -84,12 +89,7 @@ function checkBuildArtifacts(): CheckResult {
}
function isLocalBaseUrl(baseUrl: string): boolean {
try {
const url = new URL(baseUrl)
return url.hostname === 'localhost' || url.hostname === '127.0.0.1' || url.hostname === '::1'
} catch {
return false
}
return isProviderLocalUrl(baseUrl)
}
function currentBaseUrl(): string {
@@ -105,23 +105,50 @@ function checkOpenAIEnv(): CheckResult[] {
return results
}
const baseUrl = process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1'
const model = process.env.OPENAI_MODEL
const key = process.env.OPENAI_API_KEY
const request = resolveProviderRequest({
model: process.env.OPENAI_MODEL,
baseUrl: process.env.OPENAI_BASE_URL,
})
results.push(pass('Provider mode', 'OpenAI-compatible provider enabled.'))
results.push(
pass(
'Provider mode',
request.transport === 'codex_responses'
? 'Codex responses backend enabled.'
: 'OpenAI-compatible provider enabled.',
),
)
if (!model) {
if (!process.env.OPENAI_MODEL) {
results.push(pass('OPENAI_MODEL', 'Not set. Runtime fallback model will be used.'))
} else {
results.push(pass('OPENAI_MODEL', model))
results.push(pass('OPENAI_MODEL', process.env.OPENAI_MODEL))
}
results.push(pass('OPENAI_BASE_URL', baseUrl))
results.push(pass('OPENAI_BASE_URL', request.baseUrl))
if (request.transport === 'codex_responses') {
const credentials = resolveCodexApiCredentials(process.env)
if (!credentials.apiKey) {
const authHint = credentials.authPath
? `Missing CODEX_API_KEY and no usable auth.json at ${credentials.authPath}.`
: 'Missing CODEX_API_KEY and auth.json fallback.'
results.push(fail('CODEX auth', authHint))
} else if (!credentials.accountId) {
results.push(fail('CHATGPT_ACCOUNT_ID', 'Missing chatgpt_account_id in Codex auth.'))
} else {
const detail = credentials.source === 'env'
? 'Using CODEX_API_KEY.'
: `Using ${credentials.authPath}.`
results.push(pass('CODEX auth', detail))
}
return results
}
const key = process.env.OPENAI_API_KEY
if (key === 'SUA_CHAVE') {
results.push(fail('OPENAI_API_KEY', 'Placeholder value detected: SUA_CHAVE.'))
} else if (!key && !isLocalBaseUrl(baseUrl)) {
} else if (!key && !isLocalBaseUrl(request.baseUrl)) {
results.push(fail('OPENAI_API_KEY', 'Missing key for non-local provider URL.'))
} else if (!key) {
results.push(pass('OPENAI_API_KEY', 'Not set (allowed for local providers like Ollama/LM Studio).'))
@@ -137,22 +164,53 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
}
const baseUrl = currentBaseUrl()
const key = process.env.OPENAI_API_KEY
const endpoint = `${baseUrl.replace(/\/$/, '')}/models`
const request = resolveProviderRequest({
model: process.env.OPENAI_MODEL,
baseUrl: process.env.OPENAI_BASE_URL,
})
const endpoint = request.transport === 'codex_responses'
? `${request.baseUrl}/responses`
: `${request.baseUrl}/models`
const controller = new AbortController()
const timeout = setTimeout(() => controller.abort(), 4000)
try {
const headers: Record<string, string> = {}
if (key) {
headers.Authorization = `Bearer ${key}`
let method = 'GET'
let body: string | undefined
if (request.transport === 'codex_responses') {
const credentials = resolveCodexApiCredentials(process.env)
if (credentials.apiKey) {
headers.Authorization = `Bearer ${credentials.apiKey}`
}
if (credentials.accountId) {
headers['chatgpt-account-id'] = credentials.accountId
}
headers['Content-Type'] = 'application/json'
method = 'POST'
body = JSON.stringify({
model: request.resolvedModel,
instructions: 'Runtime doctor probe.',
input: [
{
type: 'message',
role: 'user',
content: [{ type: 'input_text', text: 'ping' }],
},
],
store: false,
stream: true,
})
} else if (process.env.OPENAI_API_KEY) {
headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}`
}
const response = await fetch(endpoint, {
method: 'GET',
method,
headers,
body,
signal: controller.signal,
})
@@ -209,11 +267,16 @@ function checkOllamaProcessorMode(): CheckResult {
}
function serializeSafeEnvSummary(): Record<string, string | boolean> {
const request = resolveProviderRequest({
model: process.env.OPENAI_MODEL,
baseUrl: process.env.OPENAI_BASE_URL,
})
return {
CLAUDE_CODE_USE_OPENAI: isTruthy(process.env.CLAUDE_CODE_USE_OPENAI),
OPENAI_MODEL: process.env.OPENAI_MODEL ?? '(unset)',
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1',
OPENAI_BASE_URL: request.baseUrl,
OPENAI_API_KEY_SET: Boolean(process.env.OPENAI_API_KEY),
CODEX_API_KEY_SET: Boolean(resolveCodexApiCredentials(process.env).apiKey),
}
}