Files
orcs-code/src/utils/tokenAnalytics.test.ts
ArkhAngelLifeJiggy 268c0398e4 feat: add thinking token extraction (#798)
* feat: add thinking token tracking and historical analytics

- extractThinkingTokens(): separate thinking from output tokens
- TokenUsageTracker class for historical analytics
- Track: cache hit rate, most used model, requests per hour/day
- Analytics: average tokens per request, totals
- Add tests (7 passing)

PR 4B: Features 1.10 + 1.11

* refactor: extract thinking and analytics to separate files

- Create thinkingTokenExtractor.ts with ThinkingTokenAnalyzer
- Create tokenAnalytics.ts with TokenUsageTracker
- Add production-grade methods and tests
- Update test imports
2026-04-21 23:25:12 +08:00

84 lines
2.4 KiB
TypeScript

import { describe, expect, it, beforeEach } from 'bun:test'
import { TokenUsageTracker } from './tokenAnalytics.js'
describe('TokenUsageTracker', () => {
let tracker: TokenUsageTracker
beforeEach(() => {
tracker = new TokenUsageTracker(100)
})
it('records token usage', () => {
tracker.record({
input_tokens: 1000,
output_tokens: 500,
cache_read_input_tokens: 200,
cache_creation_input_tokens: 100,
model: 'claude-sonnet-4-5-20250514',
})
expect(tracker.size).toBe(1)
})
it('calculates analytics', () => {
tracker.record({
input_tokens: 1000,
output_tokens: 500,
model: 'claude-sonnet-4-5-20250514',
})
tracker.record({
input_tokens: 2000,
output_tokens: 300,
model: 'claude-sonnet-4-5-20250514',
})
const analytics = tracker.getAnalytics()
expect(analytics.totalRequests).toBe(2)
expect(analytics.totalInputTokens).toBe(3000)
expect(analytics.totalOutputTokens).toBe(800)
expect(analytics.averageInputPerRequest).toBe(1500)
expect(analytics.averageOutputPerRequest).toBe(400)
})
it('tracks cache hit rate', () => {
tracker.record({
input_tokens: 1000,
output_tokens: 500,
cache_read_input_tokens: 500, // 33% cache
model: 'claude-sonnet-4-5-20250514',
})
const analytics = tracker.getAnalytics()
expect(analytics.cacheHitRate).toBeGreaterThan(0)
})
it('tracks most used model', () => {
tracker.record({ input_tokens: 1000, output_tokens: 100, model: 'sonnet' })
tracker.record({ input_tokens: 1000, output_tokens: 100, model: 'sonnet' })
tracker.record({ input_tokens: 1000, output_tokens: 100, model: 'opus' })
expect(tracker.getAnalytics().mostUsedModel).toBe('sonnet')
})
it('respects max entries limit', () => {
const smallTracker = new TokenUsageTracker(3)
smallTracker.record({ input_tokens: 1, output_tokens: 1, model: 'a' })
smallTracker.record({ input_tokens: 2, output_tokens: 2, model: 'b' })
smallTracker.record({ input_tokens: 3, output_tokens: 3, model: 'c' })
smallTracker.record({ input_tokens: 4, output_tokens: 4, model: 'd' })
smallTracker.record({ input_tokens: 5, output_tokens: 5, model: 'e' })
expect(smallTracker.size).toBe(3)
})
it('clears history', () => {
tracker.record({ input_tokens: 1000, output_tokens: 100, model: 'test' })
tracker.clear()
expect(tracker.size).toBe(0)
})
})