From 7ef085c605dbaec88f998fa1dbf9feb5bc4fbdc1 Mon Sep 17 00:00:00 2001 From: Vasanthdev2004 Date: Wed, 1 Apr 2026 19:17:58 +0530 Subject: [PATCH] test: cover deepseek max token limits --- package.json | 2 +- src/utils/context.test.ts | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 src/utils/context.test.ts diff --git a/package.json b/package.json index 7c595fbe..47052352 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "test:provider-recommendation": "node --test --experimental-strip-types src/utils/providerRecommendation.test.ts src/utils/providerProfile.test.ts", "typecheck": "tsc --noEmit", "smoke": "bun run build && node dist/cli.mjs --version", - "test:provider": "bun test src/services/api/*.test.ts", + "test:provider": "bun test src/services/api/*.test.ts src/utils/context.test.ts", "doctor:runtime": "bun run scripts/system-check.ts", "doctor:runtime:json": "bun run scripts/system-check.ts --json", "doctor:report": "bun run scripts/system-check.ts --out reports/doctor-runtime.json", diff --git a/src/utils/context.test.ts b/src/utils/context.test.ts new file mode 100644 index 00000000..17895b88 --- /dev/null +++ b/src/utils/context.test.ts @@ -0,0 +1,37 @@ +import { afterEach, expect, test } from 'bun:test' + +import { getMaxOutputTokensForModel } from '../services/api/claude.ts' +import { + getContextWindowForModel, + getModelMaxOutputTokens, +} from './context.ts' + +const originalEnv = { + CLAUDE_CODE_USE_OPENAI: process.env.CLAUDE_CODE_USE_OPENAI, + CLAUDE_CODE_MAX_OUTPUT_TOKENS: process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS, +} + +afterEach(() => { + process.env.CLAUDE_CODE_USE_OPENAI = originalEnv.CLAUDE_CODE_USE_OPENAI + process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS = + originalEnv.CLAUDE_CODE_MAX_OUTPUT_TOKENS +}) + +test('deepseek-chat uses provider-specific context and output caps', () => { + process.env.CLAUDE_CODE_USE_OPENAI = '1' + delete process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS + + expect(getContextWindowForModel('deepseek-chat')).toBe(64_000) + expect(getModelMaxOutputTokens('deepseek-chat')).toEqual({ + default: 8_192, + upperLimit: 8_192, + }) + expect(getMaxOutputTokensForModel('deepseek-chat')).toBe(8_192) +}) + +test('deepseek-chat clamps oversized max output overrides to the provider limit', () => { + process.env.CLAUDE_CODE_USE_OPENAI = '1' + process.env.CLAUDE_CODE_MAX_OUTPUT_TOKENS = '32000' + + expect(getMaxOutputTokensForModel('deepseek-chat')).toBe(8_192) +})