Add DeepSeek V4 flash/pro support and DeepSeek thinking compatibility (#877)
* Add DeepSeek V4 support and thinking compatibility * Fix DeepSeek profile persistence regression * Align multi-model handling with openai-multi-model
This commit is contained in:
@@ -105,6 +105,12 @@ export function modelSupportsThinking(model: string): boolean {
|
||||
if (provider === 'foundry' || provider === 'firstParty') {
|
||||
return !canonical.includes('claude-3-')
|
||||
}
|
||||
if (
|
||||
canonical.startsWith('deepseek-v4-') ||
|
||||
canonical === 'deepseek-reasoner'
|
||||
) {
|
||||
return true
|
||||
}
|
||||
// 3P (Bedrock/Vertex): only Opus 4+ and Sonnet 4+
|
||||
return canonical.includes('sonnet-4') || canonical.includes('opus-4')
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user