Decouple and fix mistral (#595)

* decouple and fix mistral

* fix wrong variable for currentBaseUrl and buildAPIProviderProperties
This commit is contained in:
lunamonke
2026-04-12 08:26:14 +01:00
committed by GitHub
parent b126e38b1a
commit 4c50977f3c
24 changed files with 556 additions and 46 deletions

View File

@@ -118,6 +118,7 @@ export function isAnthropicAuthEnabled(): boolean {
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
// Check if user has configured an external API key source
@@ -1741,6 +1742,7 @@ export function isUsing3PServices(): boolean {
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
)
}

View File

@@ -78,7 +78,8 @@ export function getContextWindowForModel(
const isOpenAIProvider =
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
if (isOpenAIProvider) {
const openaiWindow = getOpenAIContextWindow(model)
if (openaiWindow !== undefined) {
@@ -186,7 +187,8 @@ export function getModelMaxOutputTokens(model: string): {
if (
isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
) {
const openaiMax = getOpenAIMaxOutputTokens(model)
if (openaiMax !== undefined) {

View File

@@ -39,6 +39,9 @@ export function getSmallFastModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash-lite'
}
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'ministral-3b-latest'
}
// For OpenAI provider, use OPENAI_MODEL or a sensible default
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o-mini'
@@ -84,9 +87,8 @@ export function getUserSpecifiedModelSetting(): ModelSetting | undefined {
const provider = getAPIProvider()
specifiedModel =
(provider === 'gemini' ? process.env.GEMINI_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' || provider === 'github'
? process.env.OPENAI_MODEL
: undefined) ||
(provider === 'mistral' ? process.env.MISTRAL_MODEL : undefined) ||
(provider === 'openai' || provider === 'gemini' || provider === 'mistral' || provider === 'github' ? process.env.OPENAI_MODEL : undefined) ||
(provider === 'firstParty' ? process.env.ANTHROPIC_MODEL : undefined) ||
settings.model ||
undefined
@@ -133,6 +135,10 @@ export function getDefaultOpusModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.5-pro-preview-03-25'
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'devstral-latest'
}
// OpenAI provider: use user-specified model or default
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'
@@ -163,6 +169,10 @@ export function getDefaultSonnetModel(): ModelName {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash'
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'mistral-medium-latest'
}
// OpenAI provider
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'
@@ -187,6 +197,10 @@ export function getDefaultHaikuModel(): ModelName {
if (process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL) {
return process.env.ANTHROPIC_DEFAULT_HAIKU_MODEL
}
// Mistral provider
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'ministral-3b-latest'
}
// OpenAI provider
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o-mini'
@@ -256,6 +270,9 @@ export function getDefaultMainLoopModelSetting(): ModelName | ModelAlias {
if (getAPIProvider() === 'gemini') {
return process.env.GEMINI_MODEL || 'gemini-2.0-flash'
}
if (getAPIProvider() === 'mistral') {
return process.env.MISTRAL_MODEL || 'devstral-latest'
}
// OpenAI provider: always use the configured OpenAI model
if (getAPIProvider() === 'openai') {
return process.env.OPENAI_MODEL || 'gpt-4o'

View File

@@ -79,8 +79,10 @@ const OPENAI_CONTEXT_WINDOWS: Record<string, number> = {
'mixtral-8x7b-32768': 32_768,
// Mistral
'mistral-large-latest': 131_072,
'mistral-small-latest': 131_072,
'mistral-large-latest': 256_000,
'mistral-small-latest': 256_000,
'devstral-latest': 256_000,
'ministral-3b-latest': 256_000,
// MiniMax
'MiniMax-M2.7': 204_800,

View File

@@ -11,10 +11,14 @@ export type APIProvider =
| 'gemini'
| 'github'
| 'codex'
| 'mistral'
export function getAPIProvider(): APIProvider {
return isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI)
? 'gemini'
:
isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL)
? 'mistral'
: isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB)
? 'github'
: isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI)

View File

@@ -145,7 +145,10 @@ describe('applyProviderFlag - vertex', () => {
})
describe('applyProviderFlag - ollama', () => {
test('sets CLAUDE_CODE_USE_OPENAI=1 with Ollama base URL', () => {
test('sets CLAUDE_CODE_USE_OPENAI=1 with Ollama defaults when unset', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_KEY
const result = applyProviderFlag('ollama', [])
expect(result.error).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
@@ -163,6 +166,16 @@ describe('applyProviderFlag - ollama', () => {
applyProviderFlag('ollama', [])
expect(process.env.OPENAI_BASE_URL).toBe('http://my-ollama:11434/v1')
})
test('preserves explicit OPENAI_BASE_URL and OPENAI_API_KEY overrides', () => {
process.env.OPENAI_BASE_URL = 'http://remote-ollama.internal:11434/v1'
process.env.OPENAI_API_KEY = 'secret-token'
applyProviderFlag('ollama', [])
expect(process.env.OPENAI_BASE_URL).toBe('http://remote-ollama.internal:11434/v1')
expect(process.env.OPENAI_API_KEY).toBe('secret-token')
})
})
describe('applyProviderFlag - invalid provider', () => {
@@ -175,6 +188,9 @@ describe('applyProviderFlag - invalid provider', () => {
describe('applyProviderFlagFromArgs', () => {
test('applies ollama provider and model from argv in one step', () => {
delete process.env.OPENAI_BASE_URL
delete process.env.OPENAI_API_KEY
const result = applyProviderFlagFromArgs([
'--provider',
'ollama',
@@ -185,6 +201,7 @@ describe('applyProviderFlagFromArgs', () => {
expect(result?.error).toBeUndefined()
expect(process.env.CLAUDE_CODE_USE_OPENAI).toBe('1')
expect(process.env.OPENAI_BASE_URL).toBe('http://localhost:11434/v1')
expect(process.env.OPENAI_API_KEY).toBe('ollama')
expect(process.env.OPENAI_MODEL).toBe('qwen2.5:3b')
})

View File

@@ -7,6 +7,7 @@
* Usage:
* openclaude --provider openai --model gpt-4o
* openclaude --provider gemini --model gemini-2.0-flash
* openclaude --provider mistral --model ministral-3b-latest
* openclaude --provider ollama --model llama3.2
* openclaude --provider anthropic (default, no-op)
*/
@@ -15,6 +16,7 @@ export const VALID_PROVIDERS = [
'anthropic',
'openai',
'gemini',
'mistral',
'github',
'bedrock',
'vertex',
@@ -77,6 +79,13 @@ export function applyProviderFlag(
}
}
delete process.env.CLAUDE_CODE_USE_OPENAI
delete process.env.CLAUDE_CODE_USE_GEMINI
delete process.env.CLAUDE_CODE_USE_MISTRAL
delete process.env.CLAUDE_CODE_USE_GITHUB
delete process.env.CLAUDE_CODE_USE_BEDROCK
delete process.env.CLAUDE_CODE_USE_VERTEX
const model = parseModelFlag(args)
switch (provider as ProviderFlagName) {
@@ -86,17 +95,22 @@ export function applyProviderFlag(
case 'openai':
process.env.CLAUDE_CODE_USE_OPENAI = '1'
if (model) process.env.OPENAI_MODEL ??= model
if (model) process.env.OPENAI_MODEL = model
break
case 'gemini':
process.env.CLAUDE_CODE_USE_GEMINI = '1'
if (model) process.env.GEMINI_MODEL ??= model
if (model) process.env.GEMINI_MODEL = model
break
case 'mistral':
process.env.CLAUDE_CODE_USE_MISTRAL = '1'
if (model) process.env.MISTRAL_MODEL = model
break
case 'github':
process.env.CLAUDE_CODE_USE_GITHUB = '1'
if (model) process.env.OPENAI_MODEL ??= model
if (model) process.env.OPENAI_MODEL = model
break
case 'bedrock':
@@ -109,9 +123,13 @@ export function applyProviderFlag(
case 'ollama':
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.OPENAI_BASE_URL ??= 'http://localhost:11434/v1'
process.env.OPENAI_API_KEY ??= 'ollama'
if (model) process.env.OPENAI_MODEL ??= model
if (!process.env.OPENAI_BASE_URL) {
process.env.OPENAI_BASE_URL = 'http://localhost:11434/v1'
}
if (!process.env.OPENAI_API_KEY) {
process.env.OPENAI_API_KEY = 'ollama'
}
if (model) process.env.OPENAI_MODEL = model
break
}

View File

@@ -19,10 +19,13 @@ export const PROFILE_FILE_NAME = '.openclaude-profile.json'
export const DEFAULT_GEMINI_BASE_URL =
'https://generativelanguage.googleapis.com/v1beta/openai'
export const DEFAULT_GEMINI_MODEL = 'gemini-2.0-flash'
export const DEFAULT_MISTRAL_BASE_URL = 'https://api.mistral.ai/v1'
export const DEFAULT_MISTRAL_MODEL = 'devstral-latest'
const PROFILE_ENV_KEYS = [
'CLAUDE_CODE_USE_OPENAI',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_MISTRAL',
'CLAUDE_CODE_USE_BEDROCK',
'CLAUDE_CODE_USE_VERTEX',
'CLAUDE_CODE_USE_FOUNDRY',
@@ -38,6 +41,9 @@ const PROFILE_ENV_KEYS = [
'GEMINI_MODEL',
'GEMINI_BASE_URL',
'GOOGLE_API_KEY',
'MISTRAL_BASE_URL',
'MISTRAL_API_KEY',
'MISTRAL_MODEL',
] as const
const SECRET_ENV_KEYS = [
@@ -45,9 +51,10 @@ const SECRET_ENV_KEYS = [
'CODEX_API_KEY',
'GEMINI_API_KEY',
'GOOGLE_API_KEY',
'MISTRAL_API_KEY',
] as const
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini' | 'atomic-chat'
export type ProviderProfile = 'openai' | 'ollama' | 'codex' | 'gemini' | 'atomic-chat' | 'mistral'
export type ProfileEnv = {
OPENAI_BASE_URL?: string
@@ -60,6 +67,9 @@ export type ProfileEnv = {
GEMINI_AUTH_MODE?: 'api-key' | 'access-token' | 'adc'
GEMINI_MODEL?: string
GEMINI_BASE_URL?: string
MISTRAL_BASE_URL?: string
MISTRAL_API_KEY?: string
MISTRAL_MODEL?: string
}
export type ProfileFile = {
@@ -94,7 +104,8 @@ export function isProviderProfile(value: unknown): value is ProviderProfile {
value === 'ollama' ||
value === 'codex' ||
value === 'gemini' ||
value === 'atomic-chat'
value === 'atomic-chat' ||
value === 'mistral'
)
}
@@ -350,6 +361,44 @@ export function buildCodexProfileEnv(options: {
return env
}
export function buildMistralProfileEnv(options: {
model?: string | null
baseUrl?: string | null
apiKey?: string | null
processEnv?: NodeJS.ProcessEnv
}): ProfileEnv | null {
const processEnv = options.processEnv ?? process.env
const key = sanitizeApiKey(options.apiKey ?? processEnv.MISTRAL_API_KEY)
if (!key) {
return null
}
const env: ProfileEnv = {
MISTRAL_API_KEY: key,
MISTRAL_MODEL:
sanitizeProviderConfigValue(options.model, { MISTRAL_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.MISTRAL_MODEL,
{ MISTRAL_API_KEY: key },
processEnv,
) ||
DEFAULT_MISTRAL_MODEL,
}
const baseUrl =
sanitizeProviderConfigValue(options.baseUrl, { MISTRAL_API_KEY: key }, processEnv) ||
sanitizeProviderConfigValue(
processEnv.MISTRAL_BASE_URL,
{ MISTRAL_API_KEY: key },
processEnv,
)
if (baseUrl) {
env.MISTRAL_BASE_URL = baseUrl
}
return env
}
export function createProfileFile(
profile: ProviderProfile,
env: ProfileEnv,
@@ -416,6 +465,7 @@ export function hasExplicitProviderSelection(
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_MISTRAL !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
processEnv.CLAUDE_CODE_USE_FOUNDRY !== undefined
@@ -540,11 +590,82 @@ export async function buildLaunchEnv(options: {
return env
}
if (options.profile === 'mistral') {
const env: NodeJS.ProcessEnv = {
...processEnv,
CLAUDE_CODE_USE_MISTRAL: '1',
}
delete env.CLAUDE_CODE_USE_OPENAI
delete env.CLAUDE_CODE_USE_GITHUB
delete env.CLAUDE_CODE_USE_GEMINI
delete env.CLAUDE_CODE_USE_BEDROCK
delete env.CLAUDE_CODE_USE_VERTEX
delete env.CLAUDE_CODE_USE_FOUNDRY
const shellMistralModel = sanitizeProviderConfigValue(
processEnv.MISTRAL_MODEL,
processEnv,
)
const persistedMistralModel = sanitizeProviderConfigValue(
persistedEnv.MISTRAL_MODEL,
persistedEnv,
)
const shellMistralBaseUrl = sanitizeProviderConfigValue(
processEnv.MISTRAL_BASE_URL,
processEnv,
)
const persistedMistralBaseUrl = sanitizeProviderConfigValue(
persistedEnv.MISTRAL_BASE_URL,
persistedEnv,
)
env.MISTRAL_MODEL =
shellMistralModel || persistedMistralModel || DEFAULT_MISTRAL_MODEL
const shellMistralKey = sanitizeApiKey(
processEnv.MISTRAL_API_KEY,
)
const persistedMistralKey = sanitizeApiKey(persistedEnv.MISTRAL_API_KEY)
const mistralKey = shellMistralKey || persistedMistralKey
if (mistralKey) {
env.MISTRAL_API_KEY = mistralKey
} else {
delete env.MISTRAL_API_KEY
}
if (shellMistralBaseUrl || persistedMistralBaseUrl) {
env.MISTRAL_BASE_URL = shellMistralBaseUrl || persistedMistralBaseUrl
} else {
delete env.MISTRAL_BASE_URL
}
delete env.GEMINI_API_KEY
delete env.GEMINI_AUTH_MODE
delete env.GEMINI_ACCESS_TOKEN
delete env.GEMINI_MODEL
delete env.GEMINI_BASE_URL
delete env.GOOGLE_API_KEY
delete env.OPENAI_BASE_URL
delete env.OPENAI_MODEL
delete env.OPENAI_API_KEY
delete env.CODEX_API_KEY
delete env.CHATGPT_ACCOUNT_ID
delete env.CODEX_ACCOUNT_ID
return env
}
const env: NodeJS.ProcessEnv = {
...processEnv,
CLAUDE_CODE_USE_OPENAI: '1',
}
delete env.CLAUDE_CODE_USE_MISTRAL
delete env.CLAUDE_CODE_USE_BEDROCK
delete env.CLAUDE_CODE_USE_VERTEX
delete env.CLAUDE_CODE_USE_FOUNDRY
delete env.CLAUDE_CODE_USE_GEMINI
delete env.CLAUDE_CODE_USE_GITHUB
delete env.GEMINI_API_KEY

View File

@@ -13,9 +13,9 @@ export type ProviderPreset =
| 'moonshotai'
| 'deepseek'
| 'gemini'
| 'mistral'
| 'together'
| 'groq'
| 'mistral'
| 'azure-openai'
| 'openrouter'
| 'lmstudio'
@@ -163,6 +163,15 @@ export function getProviderPresetDefaults(
apiKey: '',
requiresApiKey: true,
}
case 'mistral':
return {
provider: 'openai',
name: 'Mistral',
baseUrl: 'https://api.mistral.ai/v1',
model: 'devstral-latest',
apiKey: '',
requiresApiKey: true
}
case 'together':
return {
provider: 'openai',
@@ -181,15 +190,6 @@ export function getProviderPresetDefaults(
apiKey: '',
requiresApiKey: true,
}
case 'mistral':
return {
provider: 'openai',
name: 'Mistral',
baseUrl: 'https://api.mistral.ai/v1',
model: 'mistral-large-latest',
apiKey: '',
requiresApiKey: true,
}
case 'azure-openai':
return {
provider: 'openai',
@@ -258,6 +258,7 @@ function hasProviderSelectionFlags(
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined ||
processEnv.CLAUDE_CODE_USE_GEMINI !== undefined ||
processEnv.CLAUDE_CODE_USE_MISTRAL !== undefined ||
processEnv.CLAUDE_CODE_USE_GITHUB !== undefined ||
processEnv.CLAUDE_CODE_USE_BEDROCK !== undefined ||
processEnv.CLAUDE_CODE_USE_VERTEX !== undefined ||
@@ -319,6 +320,7 @@ function isProcessEnvAlignedWithProfile(
return (
processEnv.CLAUDE_CODE_USE_OPENAI !== undefined &&
processEnv.CLAUDE_CODE_USE_GEMINI === undefined &&
processEnv.CLAUDE_CODE_USE_MISTRAL === undefined &&
processEnv.CLAUDE_CODE_USE_GITHUB === undefined &&
processEnv.CLAUDE_CODE_USE_BEDROCK === undefined &&
processEnv.CLAUDE_CODE_USE_VERTEX === undefined &&
@@ -347,6 +349,7 @@ export function clearProviderProfileEnvFromProcessEnv(
): void {
delete processEnv.CLAUDE_CODE_USE_OPENAI
delete processEnv.CLAUDE_CODE_USE_GEMINI
delete processEnv.CLAUDE_CODE_USE_MISTRAL
delete processEnv.CLAUDE_CODE_USE_GITHUB
delete processEnv.CLAUDE_CODE_USE_BEDROCK
delete processEnv.CLAUDE_CODE_USE_VERTEX

View File

@@ -250,6 +250,8 @@ export function buildAPIProviderProperties(): Property[] {
openai: 'OpenAI-compatible',
codex: 'Codex',
gemini: 'Google Gemini',
github: 'GitHub Models',
mistral: 'Mistral',
}[apiProvider];
properties.push({
label: 'API provider',
@@ -394,6 +396,21 @@ export function buildAPIProviderProperties(): Property[] {
value: redactSecretValueForDisplay(geminiModel, process.env) ?? geminiModel
});
}
} else if (apiProvider === 'mistral') {
const mistralBaseUrl = process.env.MISTRAL_BASE_URL;
if (mistralBaseUrl) {
properties.push({
label: 'Mistral base URL',
value: redactSecretValueForDisplay(mistralBaseUrl, process.env) ?? mistralBaseUrl
})
}
const mistralModel = process.env.MISTRAL_MODEL;
if (mistralModel) {
properties.push({
label: 'Model',
value: redactSecretValueForDisplay(mistralModel, process.env) ?? mistralModel
})
}
}
const proxyUrl = getProxyUrl();
if (proxyUrl) {

View File

@@ -101,6 +101,7 @@ const TEAMMATE_ENV_VARS = [
'CLAUDE_CODE_USE_FOUNDRY',
'CLAUDE_CODE_USE_GITHUB',
'CLAUDE_CODE_USE_GEMINI',
'CLAUDE_CODE_USE_MISTRAL',
'CLAUDE_CODE_USE_OPENAI',
'GITHUB_TOKEN',
'GH_TOKEN',
@@ -111,6 +112,9 @@ const TEAMMATE_ENV_VARS = [
'GEMINI_BASE_URL',
'GEMINI_MODEL',
'GOOGLE_API_KEY',
'MISTRAL_API_KEY',
'MISTRAL_MODEL',
'MISTRAL_BASE_URL',
// Custom API endpoint
'ANTHROPIC_BASE_URL',
// Config directory override