test: cover deepseek max token limits

This commit is contained in:
Vasanthdev2004
2026-04-01 19:17:58 +05:30
parent 00744a814b
commit 7ef085c605
2 changed files with 38 additions and 1 deletions

37
src/utils/context.test.ts Normal file
View File

@@ -0,0 +1,37 @@
import { afterEach, expect, test } from 'bun:test'
import { getMaxOutputTokensForModel } from '../services/api/claude.ts'
import {
getContextWindowForModel,
getModelMaxOutputTokens,
} from './context.ts'
const originalEnv = {
CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI,
CLAUDE_CODE_MAX_OUTPUT_TOKENS: process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS,
}
afterEach(() => {
process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI
process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS =
originalEnv.CLAUDE_CODE_MAX_OUTPUT_TOKENS
})
test('deepseek-chat uses provider-specific context and output caps', () => {
process.env.CLAUDE_CODE_USE_OPENAI = '1'
delete process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS
expect(getContextWindowForModel('deepseek-chat')).toBe(64_000)
expect(getModelMaxOutputTokens('deepseek-chat')).toEqual({
default: 8_192,
upperLimit: 8_192,
})
expect(getMaxOutputTokensForModel('deepseek-chat')).toBe(8_192)
})
test('deepseek-chat clamps oversized max output overrides to the provider limit', () => {
process.env.CLAUDE_CODE_USE_OPENAI = '1'
process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS = '32000'
expect(getMaxOutputTokensForModel('deepseek-chat')).toBe(8_192)
})