fix: restore default context window for unknown 3p models (#494)
* fix: restore default context window for unknown 3p models * fix: add MiniMax context metadata
This commit is contained in:
@@ -94,3 +94,22 @@ test('gpt-5.4 family keeps large max output overrides within provider limits', (
|
||||
expect(getMaxOutputTokensForModel('gpt-5.4-mini')).toBe(128_000)
|
||||
expect(getMaxOutputTokensForModel('gpt-5.4-nano')).toBe(128_000)
|
||||
})
|
||||
|
||||
test('MiniMax-M2.7 uses explicit provider-specific context and output caps', () => {
|
||||
process.env.CLAUDE_CODE_USE_OPENAI = '1'
|
||||
delete process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS
|
||||
|
||||
expect(getContextWindowForModel('MiniMax-M2.7')).toBe(204_800)
|
||||
expect(getModelMaxOutputTokens('MiniMax-M2.7')).toEqual({
|
||||
default: 131_072,
|
||||
upperLimit: 131_072,
|
||||
})
|
||||
expect(getMaxOutputTokensForModel('MiniMax-M2.7')).toBe(131_072)
|
||||
})
|
||||
|
||||
test('unknown openai-compatible models still use the conservative fallback window', () => {
|
||||
process.env.CLAUDE_CODE_USE_OPENAI = '1'
|
||||
delete process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS
|
||||
|
||||
expect(getContextWindowForModel('some-unknown-3p-model')).toBe(8_000)
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user