feat: add native Gemini provider for Google AI models

Adds Google Gemini as a first-class provider using Gemini's OpenAI-compatible
endpoint, supporting gemini-2.0-flash, gemini-2.5-pro, and gemini-2.0-flash-lite
across all three model tiers (opus/sonnet/haiku).

- Add 'gemini' to APIProvider type with CLAUDE_CODE_USE_GEMINI env detection
- Map all 11 model configs to appropriate Gemini models per tier
- Route Gemini through existing OpenAI shim (generativelanguage.googleapis.com)
- Support GEMINI_API_KEY and GOOGLE_API_KEY for authentication
- Fix model display name to show actual Gemini model instead of Claude fallback
- Add Gemini support to provider-launch, provider-bootstrap, system-check scripts
- Add dev:gemini npm script for local development

Bootstrap: bun run profile:init -- --provider gemini --api-key <key>
Launch: bun run dev:gemini

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
gnanam1990
2026-04-01 14:46:04 +05:30
parent 6b6407018d
commit a3d8ab0fec
8 changed files with 185 additions and 31 deletions

View File

@@ -6,7 +6,7 @@ import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
type ProviderProfile = 'openai' | 'ollama' | 'codex'
type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini'
type ProfileFile = {
profile: ProviderProfile
@@ -15,6 +15,9 @@ type ProfileFile = {
OPENAI_MODEL?: string
OPENAI_API_KEY?: string
CODEX_API_KEY?: string
GEMINI_API_KEY?: string
GEMINI_MODEL?: string
GEMINI_BASE_URL?: string
}
createdAt: string
}
@@ -28,7 +31,7 @@ function parseArg(name: string): string | null {
function parseProviderArg(): ProviderProfile | 'auto' {
const p = parseArg('--provider')?.toLowerCase()
if (p === 'openai' || p === 'ollama' || p === 'codex') return p
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini') return p
return 'auto'
}
@@ -69,7 +72,18 @@ async function main(): Promise<void> {
}
const env: ProfileFile['env'] = {}
if (selected === 'ollama') {
if (selected === 'gemini') {
env.GEMINI_MODEL = argModel || process.env.GEMINI_MODEL || 'gemini-2.0-flash'
const key = sanitizeApiKey(argApiKey || process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY || null)
if (!key) {
console.error('Gemini profile requires an API key. Use --api-key or set GEMINI_API_KEY.')
console.error('Get a free key at: https://aistudio.google.com/apikey')
process.exit(1)
}
env.GEMINI_API_KEY = key
if (argBaseUrl) env.GEMINI_BASE_URL = argBaseUrl
} else if (selected === 'ollama') {
env.OPENAI_BASE_URL = argBaseUrl || 'http://localhost:11434/v1'
env.OPENAI_MODEL = argModel || process.env.OPENAI_MODEL || 'llama3.1:8b'
const key = sanitizeApiKey(argApiKey || process.env.OPENAI_API_KEY || null)

View File

@@ -7,7 +7,7 @@ import {
resolveCodexApiCredentials,
} from '../src/services/api/providerConfig.js'
type ProviderProfile = 'openai' | 'ollama' | 'codex'
type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini'
type ProfileFile = {
profile: ProviderProfile
@@ -16,6 +16,9 @@ type ProfileFile = {
OPENAI_MODEL?: string
OPENAI_API_KEY?: string
CODEX_API_KEY?: string
GEMINI_API_KEY?: string
GEMINI_MODEL?: string
GEMINI_BASE_URL?: string
}
}
@@ -37,7 +40,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
continue
}
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex') && requestedProfile === 'auto') {
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini') && requestedProfile === 'auto') {
requestedProfile = lower as ProviderProfile | 'auto'
continue
}
@@ -67,7 +70,7 @@ function loadPersistedProfile(): ProfileFile | null {
if (!existsSync(path)) return null
try {
const parsed = JSON.parse(readFileSync(path, 'utf8')) as ProfileFile
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex') {
if (parsed.profile === 'openai' || parsed.profile === 'ollama' || parsed.profile === 'codex' || parsed.profile === 'gemini') {
return parsed
}
return null
@@ -106,6 +109,21 @@ function runCommand(command: string, env: NodeJS.ProcessEnv): Promise<number> {
function buildEnv(profile: ProviderProfile, persisted: ProfileFile | null): NodeJS.ProcessEnv {
const persistedEnv = persisted?.env ?? {}
if (profile === 'gemini') {
const env: NodeJS.ProcessEnv = {
...process.env,
CLAUDE_CODE_USE_GEMINI: '1',
}
delete env.CLAUDE_CODE_USE_OPENAI
env.GEMINI_MODEL = process.env.GEMINI_MODEL || persistedEnv.GEMINI_MODEL || 'gemini-2.0-flash'
env.GEMINI_API_KEY = process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY || persistedEnv.GEMINI_API_KEY
if (persistedEnv.GEMINI_BASE_URL || process.env.GEMINI_BASE_URL) {
env.GEMINI_BASE_URL = process.env.GEMINI_BASE_URL || persistedEnv.GEMINI_BASE_URL
}
return env
}
const env: NodeJS.ProcessEnv = {
...process.env,
CLAUDE_CODE_USE_OPENAI: '1',
@@ -156,22 +174,26 @@ function quoteArg(arg: string): string {
}
function printSummary(profile: ProviderProfile, env: NodeJS.ProcessEnv): void {
const keySet = profile === 'codex'
? Boolean(resolveCodexApiCredentials(env).apiKey)
: Boolean(env.OPENAI_API_KEY)
console.log(`Launching profile: ${profile}`)
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
console.log(
`${profile === 'codex' ? 'CODEX_API_KEY_SET' : 'OPENAI_API_KEY_SET'}=${keySet}`,
)
if (profile === 'gemini') {
console.log(`GEMINI_MODEL=${env.GEMINI_MODEL}`)
console.log(`GEMINI_API_KEY_SET=${Boolean(env.GEMINI_API_KEY)}`)
} else if (profile === 'codex') {
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
console.log(`CODEX_API_KEY_SET=${Boolean(resolveCodexApiCredentials(env).apiKey)}`)
} else {
console.log(`OPENAI_BASE_URL=${env.OPENAI_BASE_URL}`)
console.log(`OPENAI_MODEL=${env.OPENAI_MODEL}`)
console.log(`OPENAI_API_KEY_SET=${Boolean(env.OPENAI_API_KEY)}`)
}
}
async function main(): Promise<void> {
const options = parseLaunchOptions(process.argv.slice(2))
const requestedProfile = options.requestedProfile
if (!requestedProfile) {
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|auto] [--fast] [-- <cli args>]')
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|auto] [--fast] [-- <cli args>]')
process.exit(1)
}
@@ -193,6 +215,11 @@ async function main(): Promise<void> {
applyFastFlags(env)
}
if (profile === 'gemini' && !env.GEMINI_API_KEY) {
console.error('GEMINI_API_KEY is required for gemini profile. Run: bun run profile:init -- --provider gemini --api-key <key>')
process.exit(1)
}
if (profile === 'openai' && (!env.OPENAI_API_KEY || env.OPENAI_API_KEY === 'SUA_CHAVE')) {
console.error('OPENAI_API_KEY is required for openai profile and cannot be SUA_CHAVE. Run: bun run profile:init -- --provider openai --api-key <key>')
process.exit(1)

View File

@@ -92,14 +92,49 @@ function isLocalBaseUrl(baseUrl: string): boolean {
return isProviderLocalUrl(baseUrl)
}
const GEMINI_DEFAULT_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
function currentBaseUrl(): string {
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
return process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
}
return process.env.OPENAI_BASE_URL ?? 'https://api.openai.com/v1'
}
function checkGeminiEnv(): CheckResult[] {
const results: CheckResult[] = []
const model = process.env.GEMINI_MODEL
const key = process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY
const baseUrl = process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
results.push(pass('Provider mode', 'Google Gemini provider enabled.'))
if (!model) {
results.push(pass('GEMINI_MODEL', 'Not set. Default gemini-2.0-flash will be used.'))
} else {
results.push(pass('GEMINI_MODEL', model))
}
results.push(pass('GEMINI_BASE_URL', baseUrl))
if (!key) {
results.push(fail('GEMINI_API_KEY', 'Missing. Set GEMINI_API_KEY or GOOGLE_API_KEY.'))
} else {
results.push(pass('GEMINI_API_KEY', 'Configured.'))
}
return results
}
function checkOpenAIEnv(): CheckResult[] {
const results: CheckResult[] = []
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
if (useGemini) {
return checkGeminiEnv()
}
if (!useOpenAI) {
results.push(pass('Provider mode', 'Anthropic login flow enabled (CLAUDE_CODE_USE_OPENAI is off).'))
return results
@@ -160,13 +195,20 @@ function checkOpenAIEnv(): CheckResult[] {
}
async function checkBaseUrlReachability(): Promise<CheckResult> {
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
if (!useGemini && !useOpenAI) {
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
}
const geminiBaseUrl = 'https://generativelanguage.googleapis.com/v1beta/openai'
const resolvedBaseUrl = useGemini
? (process.env.GEMINI_BASE_URL ?? geminiBaseUrl)
: undefined
const request = resolveProviderRequest({
model: process.env.OPENAI_MODEL,
baseUrl: process.env.OPENAI_BASE_URL,
baseUrl: resolvedBaseUrl ?? process.env.OPENAI_BASE_URL,
})
const endpoint = request.transport === 'codex_responses'
? `${request.baseUrl}/responses`
@@ -203,6 +245,8 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
store: false,
stream: true,
})
} else if (useGemini && (process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY)) {
headers.Authorization = `Bearer ${process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY}`
} else if (process.env.OPENAI_API_KEY) {
headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}`
}
@@ -228,7 +272,7 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
}
function checkOllamaProcessorMode(): CheckResult {
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
if (!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) || isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
}
@@ -267,6 +311,14 @@ function checkOllamaProcessorMode(): CheckResult {
}
function serializeSafeEnvSummary(): Record<string, string | boolean> {
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
return {
CLAUDE_CODE_USE_GEMINI: true,
GEMINI_MODEL: process.env.GEMINI_MODEL ?? '(unset, default: gemini-2.0-flash)',
GEMINI_BASE_URL: process.env.GEMINI_BASE_URL ?? 'https://generativelanguage.googleapis.com/v1beta/openai',
GEMINI_API_KEY_SET: Boolean(process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY),
}
}
const request = resolveProviderRequest({
model: process.env.OPENAI_MODEL,
baseUrl: process.env.OPENAI_BASE_URL,