Merge pull request #153 from auriti/fix/report-openai-cached-tokens

fix: report cached tokens from OpenAI prompt_tokens_details
This commit is contained in:
Kevin Codex
2026-04-02 21:41:47 +08:00
committed by GitHub

View File

@@ -382,6 +382,9 @@ interface OpenAIStreamChunk {
prompt_tokens?: number
completion_tokens?: number
total_tokens?: number
prompt_tokens_details?: {
cached_tokens?: number
}
}
}
@@ -398,7 +401,7 @@ function convertChunkUsage(
input_tokens: usage.prompt_tokens ?? 0,
output_tokens: usage.completion_tokens ?? 0,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
cache_read_input_tokens: usage.prompt_tokens_details?.cached_tokens ?? 0,
}
}
@@ -944,6 +947,9 @@ class OpenAIShimMessages {
usage?: {
prompt_tokens?: number
completion_tokens?: number
prompt_tokens_details?: {
cached_tokens?: number
}
}
},
model: string,
@@ -1009,7 +1015,7 @@ class OpenAIShimMessages {
input_tokens: data.usage?.prompt_tokens ?? 0,
output_tokens: data.usage?.completion_tokens ?? 0,
cache_creation_input_tokens: 0,
cache_read_input_tokens: 0,
cache_read_input_tokens: data.usage?.prompt_tokens_details?.cached_tokens ?? 0,
},
}
}