Merge origin/main into provider-profile-recommendations
This commit is contained in:
@@ -65,6 +65,39 @@ const result = await Bun.build({
|
||||
{
|
||||
name: 'bun-bundle-shim',
|
||||
setup(build) {
|
||||
const internalFeatureStubModules = new Map([
|
||||
[
|
||||
'../daemon/workerRegistry.js',
|
||||
'export async function runDaemonWorker() { throw new Error("Daemon worker is unavailable in the open build."); }',
|
||||
],
|
||||
[
|
||||
'../daemon/main.js',
|
||||
'export async function daemonMain() { throw new Error("Daemon mode is unavailable in the open build."); }',
|
||||
],
|
||||
[
|
||||
'../cli/bg.js',
|
||||
`
|
||||
export async function psHandler() { throw new Error("Background sessions are unavailable in the open build."); }
|
||||
export async function logsHandler() { throw new Error("Background sessions are unavailable in the open build."); }
|
||||
export async function attachHandler() { throw new Error("Background sessions are unavailable in the open build."); }
|
||||
export async function killHandler() { throw new Error("Background sessions are unavailable in the open build."); }
|
||||
export async function handleBgFlag() { throw new Error("Background sessions are unavailable in the open build."); }
|
||||
`,
|
||||
],
|
||||
[
|
||||
'../cli/handlers/templateJobs.js',
|
||||
'export async function templatesMain() { throw new Error("Template jobs are unavailable in the open build."); }',
|
||||
],
|
||||
[
|
||||
'../environment-runner/main.js',
|
||||
'export async function environmentRunnerMain() { throw new Error("Environment runner is unavailable in the open build."); }',
|
||||
],
|
||||
[
|
||||
'../self-hosted-runner/main.js',
|
||||
'export async function selfHostedRunnerMain() { throw new Error("Self-hosted runner is unavailable in the open build."); }',
|
||||
],
|
||||
] as const)
|
||||
|
||||
// Resolve `import { feature } from 'bun:bundle'` to a shim
|
||||
build.onResolve({ filter: /^bun:bundle$/ }, () => ({
|
||||
path: 'bun:bundle',
|
||||
@@ -78,6 +111,26 @@ const result = await Bun.build({
|
||||
}),
|
||||
)
|
||||
|
||||
build.onResolve(
|
||||
{ filter: /^\.\.\/(daemon\/workerRegistry|daemon\/main|cli\/bg|cli\/handlers\/templateJobs|environment-runner\/main|self-hosted-runner\/main)\.js$/ },
|
||||
args => {
|
||||
if (!internalFeatureStubModules.has(args.path)) return null
|
||||
return {
|
||||
path: args.path,
|
||||
namespace: 'internal-feature-stub',
|
||||
}
|
||||
},
|
||||
)
|
||||
build.onLoad(
|
||||
{ filter: /.*/, namespace: 'internal-feature-stub' },
|
||||
args => ({
|
||||
contents:
|
||||
internalFeatureStubModules.get(args.path) ??
|
||||
'export {}',
|
||||
loader: 'js',
|
||||
}),
|
||||
)
|
||||
|
||||
// Resolve react/compiler-runtime to the standalone package
|
||||
build.onResolve({ filter: /^react\/compiler-runtime$/ }, () => ({
|
||||
path: 'react/compiler-runtime',
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
} from '../src/utils/providerRecommendation.ts'
|
||||
import {
|
||||
buildCodexProfileEnv,
|
||||
buildGeminiProfileEnv,
|
||||
buildOllamaProfileEnv,
|
||||
buildOpenAIProfileEnv,
|
||||
createProfileFile,
|
||||
@@ -33,7 +34,7 @@ function parseArg(name: string): string | null {
|
||||
|
||||
function parseProviderArg(): ProviderProfile | 'auto' {
|
||||
const p = parseArg('--provider')?.toLowerCase()
|
||||
if (p === 'openai' || p === 'ollama' || p === 'codex') return p
|
||||
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini') return p
|
||||
return 'auto'
|
||||
}
|
||||
|
||||
@@ -72,7 +73,22 @@ async function main(): Promise<void> {
|
||||
}
|
||||
|
||||
let env: ProfileFile['env']
|
||||
if (selected === 'ollama') {
|
||||
if (selected === 'gemini') {
|
||||
const builtEnv = buildGeminiProfileEnv({
|
||||
model: argModel || null,
|
||||
baseUrl: argBaseUrl || null,
|
||||
apiKey: argApiKey || null,
|
||||
processEnv: process.env,
|
||||
})
|
||||
|
||||
if (!builtEnv) {
|
||||
console.error('Gemini profile requires an API key. Use --api-key or set GEMINI_API_KEY.')
|
||||
console.error('Get a free key at: https://aistudio.google.com/apikey')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
env = builtEnv
|
||||
} else if (selected === 'ollama') {
|
||||
resolvedOllamaModel ??= await resolveOllamaModel(argModel, argBaseUrl, goal)
|
||||
if (!resolvedOllamaModel) {
|
||||
console.error('No viable Ollama chat model was discovered. Pull a chat model first or pass --model explicitly.')
|
||||
@@ -136,7 +152,7 @@ async function main(): Promise<void> {
|
||||
|
||||
console.log(`Saved profile: ${selected}`)
|
||||
console.log(`Goal: ${goal}`)
|
||||
console.log(`Model: ${profile.env.OPENAI_MODEL}`)
|
||||
console.log(`Model: ${profile.env.GEMINI_MODEL || profile.env.OPENAI_MODEL || getGoalDefaultOpenAIModel(goal)}`)
|
||||
console.log(`Path: ${outputPath}`)
|
||||
console.log('Next: bun run dev:profile')
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
|
||||
continue
|
||||
}
|
||||
|
||||
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex') && requestedProfile === 'auto') {
|
||||
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini') && requestedProfile === 'auto') {
|
||||
requestedProfile = lower as ProviderProfile | 'auto'
|
||||
continue
|
||||
}
|
||||
@@ -79,7 +79,7 @@ function loadPersistedProfile(): ProfileFile | null {
|
||||
if (!existsSync(path)) return null
|
||||
try {
|
||||
const parsed = JSON.parse(readFileSync(path, 'utf8')) as ProfileFile
|
||||
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex') {
|
||||
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex' || parsed.profile === 'gemini') {
|
||||
return parsed
|
||||
}
|
||||
return null
|
||||
@@ -126,22 +126,26 @@ function quoteArg(arg: string): string {
|
||||
}
|
||||
|
||||
function printSummary(profile: ProviderProfile, env: NodeJS.ProcessEnv): void {
|
||||
const keySet = profile === 'codex'
|
||||
? Boolean(resolveCodexApiCredentials(env).apiKey)
|
||||
: Boolean(env.OPENAI_API_KEY)
|
||||
console.log(`Launching profile: ${profile}`)
|
||||
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
|
||||
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
|
||||
console.log(
|
||||
`${profile === 'codex' ? 'CODEX_API_KEY_SET' : 'OPENAI_API_KEY_SET'}=${keySet}`,
|
||||
)
|
||||
if (profile === 'gemini') {
|
||||
console.log(`GEMINI_MODEL=${env.GEMINI_MODEL}`)
|
||||
console.log(`GEMINI_API_KEY_SET=${Boolean(env.GEMINI_API_KEY)}`)
|
||||
} else if (profile === 'codex') {
|
||||
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
|
||||
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
|
||||
console.log(`CODEX_API_KEY_SET=${Boolean(resolveCodexApiCredentials(env).apiKey)}`)
|
||||
} else {
|
||||
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
|
||||
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
|
||||
console.log(`OPENAI_API_KEY_SET=${Boolean(env.OPENAI_API_KEY)}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const options = parseLaunchOptions(process.argv.slice(2))
|
||||
const requestedProfile = options.requestedProfile
|
||||
if (!requestedProfile) {
|
||||
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
|
||||
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
@@ -184,6 +188,11 @@ async function main(): Promise<void> {
|
||||
applyFastFlags(env)
|
||||
}
|
||||
|
||||
if (profile === 'gemini' && !env.GEMINI_API_KEY) {
|
||||
console.error('GEMINI_API_KEY is required for gemini profile. Run: bun run profile:init -- --provider gemini --api-key <key>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (profile === 'openai' && (!env.OPENAI_API_KEY || env.OPENAI_API_KEY === 'SUA_CHAVE')) {
|
||||
console.error('OPENAI_API_KEY is required for openai profile and cannot be SUA_CHAVE. Run: bun run profile:init -- --provider openai --api-key <key>')
|
||||
process.exit(1)
|
||||
|
||||
@@ -92,14 +92,49 @@ function isLocalBaseUrl(baseUrl: string): boolean {
|
||||
return isProviderLocalUrl(baseUrl)
|
||||
}
|
||||
|
||||
const GEMINI_DEFAULT_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
|
||||
|
||||
function currentBaseUrl(): string {
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
||||
return process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
|
||||
}
|
||||
return process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1'
|
||||
}
|
||||
|
||||
function checkGeminiEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const model = process.env.GEMINI_MODEL
|
||||
const key = process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY
|
||||
const baseUrl = process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
|
||||
|
||||
results.push(pass('Provider mode', 'Google Gemini provider enabled.'))
|
||||
|
||||
if (!model) {
|
||||
results.push(pass('GEMINI_MODEL', 'Not set. Default gemini-2.0-flash will be used.'))
|
||||
} else {
|
||||
results.push(pass('GEMINI_MODEL', model))
|
||||
}
|
||||
|
||||
results.push(pass('GEMINI_BASE_URL', baseUrl))
|
||||
|
||||
if (!key) {
|
||||
results.push(fail('GEMINI_API_KEY', 'Missing. Set GEMINI_API_KEY or GOOGLE_API_KEY.'))
|
||||
} else {
|
||||
results.push(pass('GEMINI_API_KEY', 'Configured.'))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function checkOpenAIEnv(): CheckResult[] {
|
||||
const results: CheckResult[] = []
|
||||
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
|
||||
if (useGemini) {
|
||||
return checkGeminiEnv()
|
||||
}
|
||||
|
||||
if (!useOpenAI) {
|
||||
results.push(pass('Provider mode', 'Anthropic login flow enabled (CLAUDE_CODE_USE_OPENAI is off).'))
|
||||
return results
|
||||
@@ -160,13 +195,20 @@ function checkOpenAIEnv(): CheckResult[] {
|
||||
}
|
||||
|
||||
async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
|
||||
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
|
||||
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
|
||||
|
||||
if (!useGemini && !useOpenAI) {
|
||||
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
|
||||
const geminiBaseUrl = 'https://generativelanguage.googleapis.com/v1beta/openai'
|
||||
const resolvedBaseUrl = useGemini
|
||||
? (process.env.GEMINI_BASE_URL ?? geminiBaseUrl)
|
||||
: undefined
|
||||
const request = resolveProviderRequest({
|
||||
model: process.env.OPENAI_MODEL,
|
||||
baseUrl: process.env.OPENAI_BASE_URL,
|
||||
baseUrl: resolvedBaseUrl ?? process.env.OPENAI_BASE_URL,
|
||||
})
|
||||
const endpoint = request.transport === 'codex_responses'
|
||||
? `${request.baseUrl}/responses`
|
||||
@@ -203,6 +245,8 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
store: false,
|
||||
stream: true,
|
||||
})
|
||||
} else if (useGemini && (process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY)) {
|
||||
headers.Authorization = `Bearer ${process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY}`
|
||||
} else if (process.env.OPENAI_API_KEY) {
|
||||
headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}`
|
||||
}
|
||||
@@ -228,7 +272,7 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
|
||||
}
|
||||
|
||||
function checkOllamaProcessorMode(): CheckResult {
|
||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
|
||||
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) || isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
||||
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
|
||||
}
|
||||
|
||||
@@ -267,6 +311,14 @@ function checkOllamaProcessorMode(): CheckResult {
|
||||
}
|
||||
|
||||
function serializeSafeEnvSummary(): Record<string, string | boolean> {
|
||||
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
|
||||
return {
|
||||
CLAUDE_CODE_USE_GEMINI: true,
|
||||
GEMINI_MODEL: process.env.GEMINI_MODEL ?? '(unset, default: gemini-2.0-flash)',
|
||||
GEMINI_BASE_URL: process.env.GEMINI_BASE_URL ?? 'https://generativelanguage.googleapis.com/v1beta/openai',
|
||||
GEMINI_API_KEY_SET: Boolean(process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY),
|
||||
}
|
||||
}
|
||||
const request = resolveProviderRequest({
|
||||
model: process.env.OPENAI_MODEL,
|
||||
baseUrl: process.env.OPENAI_BASE_URL,
|
||||
|
||||
Reference in New Issue
Block a user