Decouple and fix mistral (#595)

* decouple and fix mistral

* fix wrong variable for currentBaseUrl and buildAPIProviderProperties
This commit is contained in:
lunamonke
2026-04-12 08:26:14 +01:00
committed by GitHub
parent b126e38b1a
commit 4c50977f3c
24 changed files with 556 additions and 46 deletions

View File

@@ -137,10 +137,9 @@ export OPENAI_MODEL=llama-3.3-70b-versatile
### Mistral
```bash
export CLAUDE_CODE_USE_OPENAI=1
export OPENAI_API_KEY=...
export OPENAI_BASE_URL=https://api.mistral.ai/v1
export OPENAI_MODEL=mistral-large-latest
export CLAUDE_CODE_USE_MISTRAL=1
export MISTRAL_API_KEY=...
export MISTRAL_MODEL=mistral-large-latest
```
### Azure OpenAI

View File

@@ -112,6 +112,14 @@ def build_default_providers() -> list[Provider]:
big_model=big if "gemini" in big else "gemini-2.5-pro",
small_model=small if "gemini" in small else "gemini-2.0-flash",
),
Provider(
name="mistral",
ping_url="",
api_key_env="MISTRAL_API_KEY",
cost_per_1k_tokens=0.0001,
big_model=big if "mistral" in big else "devstral-latest",
small_model=small if "small" in small else "ministral-3b-latest",
),
Provider(
name="ollama",
ping_url=f"{ollama_url}/api/tags",

View File

@@ -11,6 +11,7 @@ import {
buildAtomicChatProfileEnv,
buildCodexProfileEnv,
buildGeminiProfileEnv,
buildMistralProfileEnv,
buildOllamaProfileEnv,
buildOpenAIProfileEnv,
createProfileFile,
@@ -37,7 +38,7 @@ function parseArg(name: string): string | null {
function parseProviderArg(): ProviderProfile | 'auto' {
const p = parseArg('--provider')?.toLowerCase()
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini' || p === 'atomic-chat') return p
if (p === 'openai' || p === 'ollama' || p === 'codex' || p === 'gemini' || p === 'mistral' || p === 'atomic-chat') return p
return 'auto'
}
@@ -90,6 +91,21 @@ async function main(): Promise<void> {
process.exit(1)
}
env = builtEnv
} else if (selected === 'mistral') {
const builtEnv = buildMistralProfileEnv({
model: argModel || null,
baseUrl: argBaseUrl || null,
apiKey: argApiKey || null,
processEnv: process.env,
})
if (!builtEnv) {
console.error('Mistral profile requires an API key. Use --api-key or set MISTRAL_API_KEY.')
console.error('Get a free key at: https://admin.mistral.ai/organization/api-keys')
process.exit(1)
}
env = builtEnv
} else if (selected === 'ollama') {
resolvedOllamaModel ??= await resolveOllamaModel(argModel, argBaseUrl, goal)
@@ -169,7 +185,7 @@ async function main(): Promise<void> {
console.log(`Saved profile: ${selected}`)
console.log(`Goal: ${goal}`)
console.log(`Model: ${profile.env.GEMINI_MODEL || profile.env.OPENAI_MODEL || getGoalDefaultOpenAIModel(goal)}`)
console.log(`Model: ${profile.env.GEMINI_MODEL || profile.env.MISTRAL_MODEL || profile.env.OPENAI_MODEL || getGoalDefaultOpenAIModel(goal)}`)
console.log(`Path: ${outputPath}`)
console.log('Next: bun run dev:profile')
}

View File

@@ -50,7 +50,7 @@ function parseLaunchOptions(argv: string[]): LaunchOptions {
continue
}
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini' || lower === 'atomic-chat') && requestedProfile === 'auto') {
if ((lower === 'auto' || lower === 'openai' || lower === 'ollama' || lower === 'codex' || lower === 'gemini' || lower ==='mistral' || lower === 'atomic-chat') && requestedProfile === 'auto') {
requestedProfile = lower as ProviderProfile | 'auto'
continue
}
@@ -124,6 +124,8 @@ function printSummary(profile: ProviderProfile): void {
console.log(`Launching profile: ${profile}`)
if (profile === 'gemini') {
console.log('Using configured Gemini provider settings.')
} else if (profile === 'mistral') {
console.log('Using configured Mistral provider settings.')
} else if (profile === 'codex') {
console.log('Using configured Codex/OpenAI-compatible provider settings.')
} else if (profile === 'atomic-chat') {
@@ -139,7 +141,7 @@ async function main(): Promise<void> {
const options = parseLaunchOptions(process.argv.slice(2))
const requestedProfile = options.requestedProfile
if (!requestedProfile) {
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|atomic-chat|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
console.error('Usage: bun run scripts/provider-launch.ts [openai|ollama|codex|gemini|mistral|atomic-chat|mistral|auto] [--fast] [--goal <latency|balanced|coding>] [-- <cli args>]')
process.exit(1)
}
@@ -205,6 +207,11 @@ async function main(): Promise<void> {
process.exit(1)
}
if (profile === 'mistral' && !env.MISTRAL_API_KEY) {
console.error('MISTRAL_API_KEY is required for mistral profile. Run: bun run profile:init -- --provider mistral --api-key <key>')
process.exit(1)
}
if (profile === 'openai' && (!env.OPENAI_API_KEY || env.OPENAI_API_KEY === 'SUA_CHAVE')) {
console.error('OPENAI_API_KEY is required for openai profile and cannot be SUA_CHAVE. Run: bun run profile:init -- --provider openai --api-key <key>')
process.exit(1)

View File

@@ -118,12 +118,16 @@ function isLocalBaseUrl(baseUrl: string): boolean {
}
const GEMINI_DEFAULT_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/openai'
const MISTRAL_DEFAULT_BASE_URL = 'https://api.mistral.ai/v1'
const GITHUB_COPILOT_BASE = 'https://api.githubcopilot.com'
function currentBaseUrl(): string {
if (isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)) {
return process.env.GEMINI_BASE_URL ?? GEMINI_DEFAULT_BASE_URL
}
if (isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
return process.env.MISTRAL_BASE_URL ?? MISTRAL_DEFAULT_BASE_URL
}
if (isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)) {
return process.env.OPENAI_BASE_URL ?? GITHUB_COPILOT_BASE
}
@@ -155,6 +159,31 @@ function checkGeminiEnv(): CheckResult[] {
return results
}
function checkMistralEnv(): CheckResult[] {
const results: CheckResult[] = []
const model = process.env.MISTRAL_MODEL
const key = process.env.MISTRAL_API_KEY
const baseUrl = process.env.MISTRAL_BASE_URL ?? MISTRAL_DEFAULT_BASE_URL
results.push(pass('Provider mode', 'Mistral provider enabled.'))
if (!model) {
results.push(pass('MISTRAL_MODEL', 'Not set. Default will be used at runtime.'))
} else {
results.push(pass('MISTRAL_MODEL', model))
}
results.push(pass('MISTRAL_BASE_URL', baseUrl))
if (!key) {
results.push(fail('MISTRAL_API_KEY', 'Missing. Set MISTRAL_API_KEY.'))
} else {
results.push(pass('MISTRAL_API_KEY', 'Configured.'))
}
return results
}
function checkGithubEnv(): CheckResult[] {
const results: CheckResult[] = []
const baseUrl = process.env.OPENAI_BASE_URL ?? GITHUB_COPILOT_BASE
@@ -186,12 +215,17 @@ function checkOpenAIEnv(): CheckResult[] {
const results: CheckResult[] = []
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
const useGithub = isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
const useMistral = isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
if (useGemini) {
return checkGeminiEnv()
}
if (useMistral) {
return checkMistralEnv()
}
if (useGithub && !useOpenAI) {
return checkGithubEnv()
}
@@ -268,8 +302,9 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
const useGemini = isTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
const useOpenAI = isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)
const useGithub = isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
const useMistral = isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
if (!useGemini && !useOpenAI && !useGithub) {
if (!useGemini && !useOpenAI && !useGithub && !useMistral) {
return pass('Provider reachability', 'Skipped (OpenAI-compatible mode disabled).')
}
@@ -326,6 +361,8 @@ async function checkBaseUrlReachability(): Promise<CheckResult> {
})
} else if (useGemini && (process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY)) {
headers.Authorization = `Bearer ${process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY}`
} else if (useMistral && process.env.MISTRAL_API_KEY) {
headers.Authorization = `Bearer ${process.env.MISTRAL_API_KEY}`
} else if (process.env.OPENAI_API_KEY) {
headers.Authorization = `Bearer ${process.env.OPENAI_API_KEY}`
}
@@ -373,7 +410,8 @@ function checkOllamaProcessorMode(): CheckResult {
if (
!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
) {
return pass('Ollama processor mode', 'Skipped (OpenAI-compatible mode disabled).')
}
@@ -425,6 +463,14 @@ function serializeSafeEnvSummary(): Record<string, string | boolean> {
GEMINI_API_KEY_SET: Boolean(process.env.GEMINI_API_KEY ?? process.env.GOOGLE_API_KEY),
}
}
if (isTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
return {
CLAUDE_CODE_USE_MISTRAL: true,
MISTRAL_MODEL: process.env.MISTRAL_MODEL ?? '(unset, default: devstral-latest)',
MISTRAL_BASE_URL: process.env.MISTRAL_BASE_URL ?? 'https://api.mistral.ai/v1',
MISTRAL_API_KEY_SET: Boolean(process.env.MISTRAL_API_KEY),
}
}
if (
isTruthy(process.env.CLAUDE_CODE_USE_GITHUB) &&
!isTruthy(process.env.CLAUDE_CODE_USE_OPENAI)

View File

@@ -1,20 +1,44 @@
import { afterEach, expect, mock, test } from 'bun:test'
import { getAdditionalModelOptionsCacheScope } from '../../services/api/providerConfig.js'
import { getAPIProvider } from '../../utils/model/providers.js'
const originalEnv = {
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_USE_GEMINI: process.env.CLAUDE_CODE_USE_GEMINI,
CLAUDE_CODE_USE_GITHUB: process.env.CLAUDE_CODE_USE_GITHUB,
CLAUDE_CODE_USE_MISTRAL: process.env.CLAUDE_CODE_USE_MISTRAL,
CLAUDE_CODE_USE_BEDROCK: process.env.CLAUDE_CODE_USE_BEDROCK,
CLAUDE_CODE_USE_VERTEX: process.env.CLAUDE_CODE_USE_VERTEX,
CLAUDE_CODE_USE_FOUNDRY: process.env.CLAUDE_CODE_USE_FOUNDRY,
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
OPENAI_API_BASE: process.env.OPENAI_API_BASE,
OPENAI_MODEL: process.env.OPENAI_MODEL,
}
afterEach(() => {
mock.restore()
process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI
process.env.CLAUDE_CODE_USE_GEMINI = originalEnv.CLAUDE_CODE_USE_GEMINI
process.env.CLAUDE_CODE_USE_GITHUB = originalEnv.CLAUDE_CODE_USE_GITHUB
process.env.CLAUDE_CODE_USE_MISTRAL = originalEnv.CLAUDE_CODE_USE_MISTRAL
process.env.CLAUDE_CODE_USE_BEDROCK = originalEnv.CLAUDE_CODE_USE_BEDROCK
process.env.CLAUDE_CODE_USE_VERTEX = originalEnv.CLAUDE_CODE_USE_VERTEX
process.env.CLAUDE_CODE_USE_FOUNDRY = originalEnv.CLAUDE_CODE_USE_FOUNDRY
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
process.env.OPENAI_API_BASE = originalEnv.OPENAI_API_BASE
process.env.OPENAI_MODEL = originalEnv.OPENAI_MODEL
})
test('opens the model picker without awaiting local model discovery refresh', async () => {
process.env.CLAUDE_CODE_USE_OPENAI = '1'
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.CLAUDE_CODE_USE_MISTRAL
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
delete process.env.CLAUDE_CODE_USE_FOUNDRY
delete process.env.OPENAI_API_BASE
process.env.OPENAI_BASE_URL = 'http://127.0.0.1:8080/v1'
process.env.OPENAI_MODEL = 'qwen2.5-coder-7b-instruct'
@@ -30,7 +54,9 @@ test('opens the model picker without awaiting local model discovery refresh', as
discoverOpenAICompatibleModelOptions,
}))
const { call } = await import(`./model.js?ts=${Date.now()}-${Math.random()}`)
expect(getAdditionalModelOptionsCacheScope()).toBe('openai:http://127.0.0.1:8080/v1')
const { call } = await import('./model.js')
const result = await Promise.race([
call(() => {}, {} as never, ''),
new Promise(resolve => setTimeout(() => resolve('timeout'), 50)),

View File

@@ -284,7 +284,7 @@ function haveSameModelOptions(left: ModelOption[], right: ModelOption[]): boolea
});
}
async function refreshOpenAIModelOptionsCache(): Promise<void> {
if (getAPIProvider() !== 'openai') {
if (!getAdditionalModelOptionsCacheScope()?.startsWith('openai:')) {
return;
}
try {

View File

@@ -22,11 +22,14 @@ import {
import {
buildCodexProfileEnv,
buildGeminiProfileEnv,
buildMistralProfileEnv,
buildOllamaProfileEnv,
buildOpenAIProfileEnv,
createProfileFile,
DEFAULT_GEMINI_BASE_URL,
DEFAULT_GEMINI_MODEL,
DEFAULT_MISTRAL_BASE_URL,
DEFAULT_MISTRAL_MODEL,
deleteProfileFile,
loadProfileFile,
maskSecretForDisplay,
@@ -74,6 +77,14 @@ type Step =
baseUrl: string | null
defaultModel: string
}
| { name: 'mistral-key'; defaultModel: string }
| { name: 'mistral-base'; apiKey: string; defaultModel: string }
| {
name: 'mistral-model'
apiKey: string
baseUrl: string | null
defaultModel: string
}
| { name: 'gemini-auth-method' }
| { name: 'gemini-key' }
| { name: 'gemini-access-token' }
@@ -116,6 +127,8 @@ type ProviderWizardDefaults = {
openAIModel: string
openAIBaseUrl: string
geminiModel: string
mistralModel: string
mistralBaseUrl: string
}
function isEnvTruthy(value: string | undefined): boolean {
@@ -147,11 +160,19 @@ export function getProviderWizardDefaults(
const safeGeminiModel =
sanitizeProviderConfigValue(processEnv.GEMINI_MODEL, processEnv) ||
DEFAULT_GEMINI_MODEL
const safeMistralModel =
sanitizeProviderConfigValue(processEnv.MISTRAL_MODEL, processEnv) ||
DEFAULT_MISTRAL_MODEL
const safeMistralBaseUrl =
sanitizeProviderConfigValue(processEnv.MISTRAL_BASE_URL, processEnv) ||
DEFAULT_MISTRAL_BASE_URL
return {
openAIModel: safeOpenAIModel,
openAIBaseUrl: safeOpenAIBaseUrl,
geminiModel: safeGeminiModel,
mistralModel: safeMistralModel,
mistralBaseUrl: safeMistralBaseUrl,
}
}
@@ -178,6 +199,21 @@ export function buildCurrentProviderSummary(options?: {
}
}
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_MISTRAL)) {
return {
providerLabel: 'Mistral',
modelLabel: getSafeDisplayValue(
processEnv.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
processEnv
),
endpointLabel: getSafeDisplayValue(
processEnv.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
processEnv
),
savedProfileLabel,
}
}
if (isEnvTruthy(processEnv.CLAUDE_CODE_USE_GITHUB)) {
return {
providerLabel: 'GitHub Models',
@@ -259,6 +295,24 @@ function buildSavedProfileSummary(
? 'configured'
: undefined,
}
case 'mistral':
return {
providerLabel: 'Mistral',
modelLabel: getSafeDisplayValue(
env.MISTRAL_MODEL ?? DEFAULT_MISTRAL_MODEL,
process.env,
env,
),
endpointLabel: getSafeDisplayValue(
env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL,
process.env,
env,
),
credentialLabel:
maskSecretForDisplay(env.MISTRAL_API_KEY) !== undefined
? 'configured'
: undefined,
}
case 'codex':
return {
providerLabel: 'Codex',
@@ -473,6 +527,11 @@ function ProviderChooser({
value: 'gemini',
description: 'Use Google Gemini with API key, access token, or local ADC',
},
{
label: 'Mistral',
value: 'mistral',
description: 'Use Mistral with API key'
},
{
label: 'Codex',
value: 'codex',
@@ -971,6 +1030,11 @@ export function ProviderWizard({
})
} else if (value === 'gemini') {
setStep({ name: 'gemini-auth-method' })
} else if (value === 'mistral') {
setStep({
name: 'mistral-key',
defaultModel: defaults.mistralModel,
})
} else if (value === 'clear') {
const filePath = deleteProfileFile()
onDone(`Removed saved provider profile at ${filePath}. Restart OpenClaude to go back to normal startup.`, {
@@ -1110,6 +1174,101 @@ export function ProviderWizard({
/>
)
case 'mistral-key':
return (
<TextEntryDialog
resetStateKey={step.name}
title="Mistral setup"
subtitle="Step 1 of 3"
description={
process.env.MISTRAL_API_KEY
? 'Enter an API key, or leave this blank to reuse the current MISTRAL_API_KEY from this session.'
: 'Enter the API key for your Mistral provider.'
}
initialValue=""
placeholder="..."
mask="*"
allowEmpty={Boolean(process.env.MISTRAL_API_KEY)}
validate={value => {
const candidate = value.trim() || process.env.MISTRAL_API_KEY || ''
return sanitizeApiKey(candidate)
? null
: 'Enter a real API key. Placeholder values like SUA_CHAVE are not valid.'
}}
onSubmit={value => {
const apiKey = value.trim() || process.env.MISTRAL_API_KEY || ''
setStep({
name: 'mistral-base',
apiKey,
defaultModel: step.defaultModel,
})
}}
onCancel={() => setStep({ name: 'choose' })}
/>
)
case 'mistral-base':
return (
<TextEntryDialog
resetStateKey={step.name}
title="Mistral setup"
subtitle="Step 2 of 3"
description={`Optionally enter a base URL. Leave blank for ${DEFAULT_MISTRAL_BASE_URL}.`}
initialValue={
defaults.mistralBaseUrl === DEFAULT_MISTRAL_BASE_URL
? ''
: defaults.mistralBaseUrl
}
placeholder={DEFAULT_MISTRAL_BASE_URL}
allowEmpty
onSubmit={value => {
setStep({
name: 'mistral-model',
apiKey: step.apiKey,
baseUrl: value.trim() || null,
defaultModel: step.defaultModel,
})
}}
onCancel={() =>
setStep({
name: 'mistral-key',
defaultModel: step.defaultModel,
})
}
/>
)
case 'mistral-model':
return (
<TextEntryDialog
resetStateKey={step.name}
title="Mistral setup"
subtitle="Step 3 of 3"
description={`Enter a model name. Leave blank for ${step.defaultModel}.`}
initialValue={defaults.mistralModel ?? step.defaultModel}
placeholder={step.defaultModel}
allowEmpty
onSubmit={value => {
const env = buildMistralProfileEnv({
model: value.trim() || step.defaultModel,
baseUrl: step.baseUrl,
apiKey: step.apiKey,
processEnv: process.env,
})
if (env) {
finishProfileSave(onDone, 'mistral', env)
}
}}
onCancel={() =>
setStep({
name: 'mistral-base',
apiKey: step.apiKey,
defaultModel: step.defaultModel,
})
}
/>
)
case 'gemini-auth-method': {
const hasShellGeminiKey = Boolean(
process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY,

View File

@@ -87,6 +87,7 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
const useGemini = process.env.CLAUDE_CODE_USE_GEMINI === '1' || process.env.CLAUDE_CODE_USE_GEMINI === 'true'
const useGithub = process.env.CLAUDE_CODE_USE_GITHUB === '1' || process.env.CLAUDE_CODE_USE_GITHUB === 'true'
const useOpenAI = process.env.CLAUDE_CODE_USE_OPENAI === '1' || process.env.CLAUDE_CODE_USE_OPENAI === 'true'
const useMistral = process.env.CLAUDE_CODE_USE_MISTRAL === '1' || process.env.CLAUDE_CODE_USE_MISTRAL === 'true'
if (useGemini) {
const model = process.env.GEMINI_MODEL || 'gemini-2.0-flash'
@@ -94,6 +95,12 @@ function detectProvider(): { name: string; model: string; baseUrl: string; isLoc
return { name: 'Google Gemini', model, baseUrl, isLocal: false }
}
if (useMistral) {
const model = process.env.MISTRAL_MODEL || 'devstral-latest'
const baseUrl = process.env.MISTRAL_BASE_URL || 'https://api.mistral.ai/v1'
return { name: 'Mistral', model, baseUrl, isLocal: false }
}
if (useGithub) {
const model = process.env.OPENAI_MODEL || 'github:copilot'
const baseUrl =

View File

@@ -177,7 +177,8 @@ export async function getAnthropicClient({
if (
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
) {
const { createOpenAIShimClient } = await import('./openaiShim.js')
return createOpenAIShimClient({

View File

@@ -61,6 +61,7 @@ type SecretValueSource = Partial<{
GEMINI_API_KEY: string
GOOGLE_API_KEY: string
GEMINI_ACCESS_TOKEN: string
MISTRAL_API_KEY: string
}>
const GITHUB_COPILOT_BASE = 'https://api.githubcopilot.com'
@@ -80,6 +81,10 @@ function isGithubModelsMode(): boolean {
return isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
}
function isMistralMode(): boolean {
return isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
}
function filterAnthropicHeaders(
headers: Record<string, string> | undefined,
): Record<string, string> {
@@ -1210,15 +1215,22 @@ class OpenAIShimMessages {
}
const isGithub = isGithubModelsMode()
const isMistral = isMistralMode()
const githubEndpointType = getGithubEndpointType(request.baseUrl)
const isGithubCopilot = isGithub && githubEndpointType === 'copilot'
const isGithubModels = isGithub && (githubEndpointType === 'models' || githubEndpointType === 'custom')
if (isGithub && body.max_completion_tokens !== undefined) {
if ((isGithub || isMistral) && body.max_completion_tokens !== undefined) {
body.max_tokens = body.max_completion_tokens
delete body.max_completion_tokens
}
// mistral also doesn't recognize body.store
if (isMistral) {
delete body.store
}
if (params.temperature !== undefined) body.temperature = params.temperature
if (params.top_p !== undefined) body.top_p = params.top_p
@@ -1256,9 +1268,8 @@ class OpenAIShimMessages {
...filterAnthropicHeaders(options?.headers),
}
const isGemini = isGeminiMode()
const apiKey =
this.providerOverride?.apiKey ?? process.env.OPENAI_API_KEY ?? ''
const isGemini = isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
const apiKey = this.providerOverride?.apiKey ?? process.env.OPENAI_API_KEY ?? ''
// Detect Azure endpoints by hostname (not raw URL) to prevent bypass via
// path segments like https://evil.com/cognitiveservices.azure.com/
let isAzure = false
@@ -1590,6 +1601,13 @@ export function createOpenAIShimClient(options: {
if (process.env.GEMINI_MODEL && !process.env.OPENAI_MODEL) {
process.env.OPENAI_MODEL = process.env.GEMINI_MODEL
}
} else if (isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)) {
process.env.OPENAI_BASE_URL =
process.env.MISTRAL_BASE_URL ?? 'https://api.mistral.ai/v1'
process.env.OPENAI_API_KEY = process.env.MISTRAL_API_KEY
if (process.env.MISTRAL_MODEL) {
process.env.OPENAI_MODEL = process.env.MISTRAL_MODEL
}
} else if (isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)) {
process.env.OPENAI_BASE_URL ??= GITHUB_COPILOT_BASE
process.env.OPENAI_API_KEY ??=

View File

@@ -7,6 +7,7 @@ import { isEnvTruthy } from '../../utils/envUtils.js'
export const DEFAULT_OPENAI_BASE_URL = 'https://api.openai.com/v1'
export const DEFAULT_CODEX_BASE_URL = 'https://chatgpt.com/backend-api/codex'
export const DEFAULT_MISTRAL_BASE_URL = 'https://api.mistral.ai/v1'
/** Default GitHub Copilot API model when user selects copilot / github:copilot */
export const DEFAULT_GITHUB_MODELS_API_MODEL = 'gpt-4o'
@@ -357,15 +358,20 @@ export function resolveProviderRequest(options?: {
reasoningEffortOverride?: ReasoningEffort
}): ResolvedProviderRequest {
const isGithubMode = isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
const isMistralMode = isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
const requestedModel =
options?.model?.trim() ||
process.env.OPENAI_MODEL?.trim() ||
(isMistralMode
? process.env.MISTRAL_MODEL?.trim()
: process.env.OPENAI_MODEL?.trim()) ||
options?.fallbackModel?.trim() ||
(isGithubMode ? 'github:copilot' : 'gpt-4o')
const descriptor = parseModelDescriptor(requestedModel)
const rawBaseUrl =
asEnvUrl(options?.baseUrl) ??
asEnvUrl(process.env.OPENAI_BASE_URL) ??
asEnvUrl(
isMistralMode ? (process.env.MISTRAL_BASE_URL ?? DEFAULT_MISTRAL_BASE_URL) : process.env.OPENAI_BASE_URL,
) ??
asEnvUrl(process.env.OPENAI_API_BASE)
const githubEndpointType = isGithubMode
@@ -418,6 +424,7 @@ export function resolveProviderRequest(options?: {
export function getAdditionalModelOptionsCacheScope(): string | null {
if (!isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)) {
if (!isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) &&
!isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) &&
!isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) &&
!isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK) &&
!isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX) &&

View File

@@ -118,6 +118,7 @@ export function isAnthropicAuthEnabled(): boolean {
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
// Check if user has configured an external API key source
@@ -1741,6 +1742,7 @@ export function isUsing3PServices(): boolean {
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
)
}

View File

@@ -78,7 +78,8 @@ export function getContextWindowForModel(
const isOpenAIProvider =
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
if (isOpenAIProvider) {
const openaiWindow = getOpenAIContextWindow(model)
if (openaiWindow !== undefined) {
@@ -186,7 +187,8 @@ export function getModelMaxOutputTokens(model: string): {
if (
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
) {
const openaiMax = getOpenAIMaxOutputTokens(model)
if (openaiMax !== undefined) {

View File

@@ -39,6 +39,9 @@ export function getSmallFastModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash-lite'
}
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'ministral-3b-latest'
}
// For OpenAI provider, use OPENAI_MODEL or a sensible default
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o-mini'
@@ -84,9 +87,8 @@ export function getUserSpecifiedModelSetting(): ModelSetting | undefined {
const provider = getAPIProvider()
specifiedModel =
(provider === 'gemini' ? process.env.GEMINI_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' || provider === 'github'
? process.env.OPENAI_MODEL
: undefined) ||
(provider === 'mistral' ? process.env.MISTRAL_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' || provider === 'mistral' || provider === 'github' ? process.env.OPENAI_MODEL : undefined) ||
(provider === 'firstParty' ? process.env.ANTHROPIC_MODEL : undefined) ||
settings.model ||
undefined
@@ -133,6 +135,10 @@ export function getDefaultOpusModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.5-pro-preview-03-25'
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'devstral-latest'
}
// OpenAI provider: use user-specified model or default
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'
@@ -163,6 +169,10 @@ export function getDefaultSonnetModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash'
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'mistral-medium-latest'
}
// OpenAI provider
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'
@@ -187,6 +197,10 @@ export function getDefaultHaikuModel(): ModelName {
if (process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL) {
return process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'ministral-3b-latest'
}
// OpenAI provider
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o-mini'
@@ -256,6 +270,9 @@ export function getDefaultMainLoopModelSetting(): ModelName | ModelAlias {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash'
}
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'devstral-latest'
}
// OpenAI provider: always use the configured OpenAI model
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'

View File

@@ -79,8 +79,10 @@ const OPENAI_CONTEXT_WINDOWS: Record<string, number> = {
'mixtral-8x7b-32768': 32_768,
// Mistral
'mistral-large-latest': 131_072,
'mistral-small-latest': 131_072,
'mistral-large-latest': 256_000,
'mistral-small-latest': 256_000,
'devstral-latest': 256_000,
'ministral-3b-latest': 256_000,
// MiniMax
'MiniMax-M2.7': 204_800,

View File

@@ -11,10 +11,14 @@ export type APIProvider =
| 'gemini'
| 'github'
| 'codex'
| 'mistral'
export function getAPIProvider(): APIProvider {
return isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
? 'gemini'
:
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
? 'mistral'
: isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
? 'github'
: isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)

View File

@@ -145,7 +145,10 @@ describe('applyProviderFlag - vertex', () => {
})
describe('applyProviderFlag - ollama', () => {
test('sets CLAUDE_CODE_USE_OPENAI=1 with Ollama base URL', () => {
test('sets CLAUDE_CODE_USE_OPENAI=1 with Ollama defaults when unset', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_KEY
const result = applyProviderFlag('ollama', [])
expect(result.error).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
@@ -163,6 +166,16 @@ describe('applyProviderFlag - ollama', () => {
applyProviderFlag('ollama', [])
expect(process.env.OPENAI_BASE_URL).toBe('http://my-ollama:11434/v1')
})
test('preserves explicit OPENAI_BASE_URL and OPENAI_API_KEY overrides', () => {
process.env.OPENAI_BASE_URL = 'http://remote-ollama.internal:11434/v1'
process.env.OPENAI_API_KEY = 'secret-token'
applyProviderFlag('ollama', [])
expect(process.env.OPENAI_BASE_URL).toBe('http://remote-ollama.internal:11434/v1')
expect(process.env.OPENAI_API_KEY).toBe('secret-token')
})
})
describe('applyProviderFlag - invalid provider', () => {
@@ -175,6 +188,9 @@ describe('applyProviderFlag - invalid provider', () => {
describe('applyProviderFlagFromArgs', () => {
test('applies ollama provider and model from argv in one step', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_KEY
const result = applyProviderFlagFromArgs([
'--provider',
'ollama',
@@ -185,6 +201,7 @@ describe('applyProviderFlagFromArgs', () => {
expect(result?.error).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(process.env.OPENAI_BASE_URL).toBe('http://localhost:11434/v1')
expect(process.env.OPENAI_API_KEY).toBe('ollama')
expect(process.env.OPENAI_MODEL).toBe('qwen2.5:3b')
})

View File

@@ -7,6 +7,7 @@
* Usage:
* openclaude --provider openai --model gpt-4o
* openclaude --provider gemini --model gemini-2.0-flash
* openclaude --provider mistral --model ministral-3b-latest
* openclaude --provider ollama --model llama3.2
* openclaude --provider anthropic (default, no-op)
*/
@@ -15,6 +16,7 @@ export const VALID_PROVIDERS = [
'anthropic',
'openai',
'gemini',
'mistral',
'github',
'bedrock',
'vertex',
@@ -77,6 +79,13 @@ export function applyProviderFlag(
}
}
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_MISTRAL
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
const model = parseModelFlag(args)
switch (provider as ProviderFlagName) {
@@ -86,17 +95,22 @@ export function applyProviderFlag(
case 'openai':
process.env.CLAUDE_CODE_USE_OPENAI = '1'
if (model) process.env.OPENAI_MODEL ??= model
if (model) process.env.OPENAI_MODEL = model
break
case 'gemini':
process.env.CLAUDE_CODE_USE_GEMINI = '1'
if (model) process.env.GEMINI_MODEL ??= model
if (model) process.env.GEMINI_MODEL = model
break
case 'mistral':
process.env.CLAUDE_CODE_USE_MISTRAL = '1'
if (model) process.env.MISTRAL_MODEL = model
break
case 'github':
process.env.CLAUDE_CODE_USE_GITHUB = '1'
if (model) process.env.OPENAI_MODEL ??= model
if (model) process.env.OPENAI_MODEL = model
break
case 'bedrock':
@@ -109,9 +123,13 @@ export function applyProviderFlag(
case 'ollama':
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL ??= 'http://localhost:11434/v1'
process.env.OPENAI_API_KEY ??= 'ollama'
if (model) process.env.OPENAI_MODEL ??= model
if (!process.env.OPENAI_BASE_URL) {
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
}
if (!process.env.OPENAI_API_KEY) {
process.env.OPENAI_API_KEY = 'ollama'
}
if (model) process.env.OPENAI_MODEL = model
break
}

View File

@@ -19,10 +19,13 @@ export const PROFILE_FILE_NAME = '.openclaude-profile.json'
export const DEFAULT_GEMINI_BASE_URL =
'https://generativelanguage.googleapis.com/v1beta/openai'
export const DEFAULT_GEMINI_MODEL = 'gemini-2.0-flash'
export const DEFAULT_MISTRAL_BASE_URL = 'https://api.mistral.ai/v1'
export const DEFAULT_MISTRAL_MODEL = 'devstral-latest'
const PROFILE_ENV_KEYS = [
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_MISTRAL',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'CLAUDE_CODE_USE_FOUNDRY',
@@ -38,6 +41,9 @@ const PROFILE_ENV_KEYS = [
'GEMINI_MODEL',
'GEMINI_BASE_URL',
'GOOGLE_API_KEY',
'MISTRAL_BASE_URL',
'MISTRAL_API_KEY',
'MISTRAL_MODEL',
] as const
const SECRET_ENV_KEYS = [
@@ -45,9 +51,10 @@ const SECRET_ENV_KEYS = [
'CODEX_API_KEY',
'GEMINI_API_KEY',
'GOOGLE_API_KEY',
'MISTRAL_API_KEY',
] as const
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini' | 'atomic-chat'
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini' | 'atomic-chat' | 'mistral'
export type ProfileEnv = {
OPENAI_BASE_URL?: string
@@ -60,6 +67,9 @@ export type ProfileEnv = {
GEMINI_AUTH_MODE?: 'api-key' | 'access-token' | 'adc'
GEMINI_MODEL?: string
GEMINI_BASE_URL?: string
MISTRAL_BASE_URL?: string
MISTRAL_API_KEY?: string
MISTRAL_MODEL?: string
}
export type ProfileFile = {
@@ -94,7 +104,8 @@ export function isProviderProfile(value: unknown): value is ProviderProfile {
value === 'ollama' ||
value === 'codex' ||
value === 'gemini' ||
value === 'atomic-chat'
value === 'atomic-chat' ||
value === 'mistral'
)
}
@@ -350,6 +361,44 @@ export function buildCodexProfileEnv(options: {
return env
}
export function buildMistralProfileEnv(options: {
model?: string | null
baseUrl?: string | null
apiKey?: string | null
processEnv?: NodeJS.ProcessEnv
}): ProfileEnv | null {
const processEnv = options.processEnv ?? process.env
const key = sanitizeApiKey(options.apiKey ?? processEnv.MISTRAL_API_KEY)
if (!key) {
return null
}
const env: ProfileEnv = {
MISTRAL_API_KEY: key,
MISTRAL_MODEL:
sanitizeProviderConfigValue(options.model, { MISTRAL_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.MISTRAL_MODEL,
{ MISTRAL_API_KEY: key },
processEnv,
) ||
DEFAULT_MISTRAL_MODEL,
}
const baseUrl =
sanitizeProviderConfigValue(options.baseUrl, { MISTRAL_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.MISTRAL_BASE_URL,
{ MISTRAL_API_KEY: key },
processEnv,
)
if (baseUrl) {
env.MISTRAL_BASE_URL = baseUrl
}
return env
}
export function createProfileFile(
profile: ProviderProfile,
env: ProfileEnv,
@@ -416,6 +465,7 @@ export function hasExplicitProviderSelection(
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_MISTRAL !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
processEnv.CLAUDE_CODE_USE_FOUNDRY !== undefined
@@ -540,11 +590,82 @@ export async function buildLaunchEnv(options: {
return env
}
if (options.profile === 'mistral') {
const env: NodeJS.ProcessEnv = {
...processEnv,
CLAUDE_CODE_USE_MISTRAL: '1',
}
delete env.CLAUDE_CODE_USE_OPENAI
delete env.CLAUDE_CODE_USE_GITHUB
delete env.CLAUDE_CODE_USE_GEMINI
delete env.CLAUDE_CODE_USE_BEDROCK
delete env.CLAUDE_CODE_USE_VERTEX
delete env.CLAUDE_CODE_USE_FOUNDRY
const shellMistralModel = sanitizeProviderConfigValue(
processEnv.MISTRAL_MODEL,
processEnv,
)
const persistedMistralModel = sanitizeProviderConfigValue(
persistedEnv.MISTRAL_MODEL,
persistedEnv,
)
const shellMistralBaseUrl = sanitizeProviderConfigValue(
processEnv.MISTRAL_BASE_URL,
processEnv,
)
const persistedMistralBaseUrl = sanitizeProviderConfigValue(
persistedEnv.MISTRAL_BASE_URL,
persistedEnv,
)
env.MISTRAL_MODEL =
shellMistralModel || persistedMistralModel || DEFAULT_MISTRAL_MODEL
const shellMistralKey = sanitizeApiKey(
processEnv.MISTRAL_API_KEY,
)
const persistedMistralKey = sanitizeApiKey(persistedEnv.MISTRAL_API_KEY)
const mistralKey = shellMistralKey || persistedMistralKey
if (mistralKey) {
env.MISTRAL_API_KEY = mistralKey
} else {
delete env.MISTRAL_API_KEY
}
if (shellMistralBaseUrl || persistedMistralBaseUrl) {
env.MISTRAL_BASE_URL = shellMistralBaseUrl || persistedMistralBaseUrl
} else {
delete env.MISTRAL_BASE_URL
}
delete env.GEMINI_API_KEY
delete env.GEMINI_AUTH_MODE
delete env.GEMINI_ACCESS_TOKEN
delete env.GEMINI_MODEL
delete env.GEMINI_BASE_URL
delete env.GOOGLE_API_KEY
delete env.OPENAI_BASE_URL
delete env.OPENAI_MODEL
delete env.OPENAI_API_KEY
delete env.CODEX_API_KEY
delete env.CHATGPT_ACCOUNT_ID
delete env.CODEX_ACCOUNT_ID
return env
}
const env: NodeJS.ProcessEnv = {
...processEnv,
CLAUDE_CODE_USE_OPENAI: '1',
}
delete env.CLAUDE_CODE_USE_MISTRAL
delete env.CLAUDE_CODE_USE_BEDROCK
delete env.CLAUDE_CODE_USE_VERTEX
delete env.CLAUDE_CODE_USE_FOUNDRY
delete env.CLAUDE_CODE_USE_GEMINI
delete env.CLAUDE_CODE_USE_GITHUB
delete env.GEMINI_API_KEY

View File

@@ -13,9 +13,9 @@ export type ProviderPreset =
| 'moonshotai'
| 'deepseek'
| 'gemini'
| 'mistral'
| 'together'
| 'groq'
| 'mistral'
| 'azure-openai'
| 'openrouter'
| 'lmstudio'
@@ -163,6 +163,15 @@ export function getProviderPresetDefaults(
apiKey: '',
requiresApiKey: true,
}
case 'mistral':
return {
provider: 'openai',
name: 'Mistral',
baseUrl: 'https://api.mistral.ai/v1',
model: 'devstral-latest',
apiKey: '',
requiresApiKey: true
}
case 'together':
return {
provider: 'openai',
@@ -181,15 +190,6 @@ export function getProviderPresetDefaults(
apiKey: '',
requiresApiKey: true,
}
case 'mistral':
return {
provider: 'openai',
name: 'Mistral',
baseUrl: 'https://api.mistral.ai/v1',
model: 'mistral-large-latest',
apiKey: '',
requiresApiKey: true,
}
case 'azure-openai':
return {
provider: 'openai',
@@ -258,6 +258,7 @@ function hasProviderSelectionFlags(
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_MISTRAL !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
@@ -319,6 +320,7 @@ function isProcessEnvAlignedWithProfile(
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined &&
processEnv.CLAUDE_CODE_USE_GEMINI === undefined &&
processEnv.CLAUDE_CODE_USE_MISTRAL === undefined &&
processEnv.CLAUDE_CODE_USE_GITHUB === undefined &&
processEnv.CLAUDE_CODE_USE_BEDROCK === undefined &&
processEnv.CLAUDE_CODE_USE_VERTEX === undefined &&
@@ -347,6 +349,7 @@ export function clearProviderProfileEnvFromProcessEnv(
): void {
delete processEnv.CLAUDE_CODE_USE_OPENAI
delete processEnv.CLAUDE_CODE_USE_GEMINI
delete processEnv.CLAUDE_CODE_USE_MISTRAL
delete processEnv.CLAUDE_CODE_USE_GITHUB
delete processEnv.CLAUDE_CODE_USE_BEDROCK
delete processEnv.CLAUDE_CODE_USE_VERTEX

View File

@@ -250,6 +250,8 @@ export function buildAPIProviderProperties(): Property[] {
openai: 'OpenAI-compatible',
codex: 'Codex',
gemini: 'Google Gemini',
github: 'GitHub Models',
mistral: 'Mistral',
}[apiProvider];
properties.push({
label: 'API provider',
@@ -394,6 +396,21 @@ export function buildAPIProviderProperties(): Property[] {
value: redactSecretValueForDisplay(geminiModel, process.env) ?? geminiModel
});
}
} else if (apiProvider === 'mistral') {
const mistralBaseUrl = process.env.MISTRAL_BASE_URL;
if (mistralBaseUrl) {
properties.push({
label: 'Mistral base URL',
value: redactSecretValueForDisplay(mistralBaseUrl, process.env) ?? mistralBaseUrl
})
}
const mistralModel = process.env.MISTRAL_MODEL;
if (mistralModel) {
properties.push({
label: 'Model',
value: redactSecretValueForDisplay(mistralModel, process.env) ?? mistralModel
})
}
}
const proxyUrl = getProxyUrl();
if (proxyUrl) {

View File

@@ -101,6 +101,7 @@ const TEAMMATE_ENV_VARS = [
'CLAUDE_CODE_USE_FOUNDRY',
'CLAUDE_CODE_USE_GITHUB',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_MISTRAL',
'CLAUDE_CODE_USE_OPENAI',
'GITHUB_TOKEN',
'GH_TOKEN',
@@ -111,6 +112,9 @@ const TEAMMATE_ENV_VARS = [
'GEMINI_BASE_URL',
'GEMINI_MODEL',
'GOOGLE_API_KEY',
'MISTRAL_API_KEY',
'MISTRAL_MODEL',
'MISTRAL_BASE_URL',
// Custom API endpoint
'ANTHROPIC_BASE_URL',
// Config directory override

View File

@@ -7,6 +7,7 @@ const SAVED_PROFILES = new Set([
'codex',
'gemini',
'atomic-chat',
'mistral'
]);
const CODEX_ALIAS_MODELS = new Set([
@@ -315,8 +316,10 @@ function getDetail(env, fallback) {
return (
asNonEmptyString(env.OPENAI_MODEL) ||
asNonEmptyString(env.GEMINI_MODEL) ||
asNonEmptyString(env.MISTRAL_MODEL) ||
asNonEmptyString(env.OPENAI_BASE_URL) ||
asNonEmptyString(env.GEMINI_BASE_URL) ||
asNonEmptyString(env.MISTRAL_BASE_URL) ||
fallback
);
}
@@ -339,6 +342,8 @@ function describeSavedProfile(profile) {
return buildProviderState('Ollama', getDetail(profile.env, 'saved profile'), 'profile');
case 'gemini':
return buildProviderState('Gemini', getDetail(profile.env, 'saved profile'), 'profile');
case 'mistral':
return buildProviderState('Mistral', getDetail(profile.env, 'saved profile'), 'profile')
case 'codex':
return buildProviderState('Codex', getDetail(profile.env, 'saved profile'), 'profile');
case 'atomic-chat':
@@ -358,6 +363,10 @@ function describeProviderState({ shimEnabled, env, profile }) {
return buildProviderState('Gemini', getDetail(env, 'from environment'), 'env');
}
if (isEnvTruthy(env.CLAUDE_CODE_USE_MISTRAL)) {
return buildProviderState('Mistral', getDetail(env, 'from environment'), 'env');
}
if (isEnvTruthy(env.CLAUDE_CODE_USE_GITHUB)) {
return buildProviderState('GitHub Models', getDetail(env, 'from environment'), 'env');
}