315 lines
7.8 KiB
TypeScript
315 lines
7.8 KiB
TypeScript
import { afterEach, beforeEach, expect, test } from 'bun:test'
|
|
import { createOpenAIShimClient } from './openaiShim.ts'
|
|
|
|
type FetchType = typeof globalThis.fetch
|
|
|
|
const originalEnv = {
|
|
OPENAI_BASE_URL: process.env.OPENAI_BASE_URL,
|
|
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
|
}
|
|
|
|
const originalFetch = globalThis.fetch
|
|
|
|
type OpenAIShimClient = {
|
|
beta: {
|
|
messages: {
|
|
create: (
|
|
params: Record<string, unknown>,
|
|
options?: Record<string, unknown>,
|
|
) => Promise<unknown> & {
|
|
withResponse: () => Promise<{ data: AsyncIterable<Record<string, unknown>> }>
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
function makeSseResponse(lines: string[]): Response {
|
|
const encoder = new TextEncoder()
|
|
return new Response(
|
|
new ReadableStream({
|
|
start(controller) {
|
|
for (const line of lines) {
|
|
controller.enqueue(encoder.encode(line))
|
|
}
|
|
controller.close()
|
|
},
|
|
}),
|
|
{
|
|
headers: {
|
|
'Content-Type': 'text/event-stream',
|
|
},
|
|
},
|
|
)
|
|
}
|
|
|
|
function makeStreamChunks(chunks: unknown[]): string[] {
|
|
return [
|
|
...chunks.map(chunk => `data: ${JSON.stringify(chunk)}\n\n`),
|
|
'data: [DONE]\n\n',
|
|
]
|
|
}
|
|
|
|
beforeEach(() => {
|
|
process.env.OPENAI_BASE_URL = 'http://example.test/v1'
|
|
process.env.OPENAI_API_KEY = 'test-key'
|
|
})
|
|
|
|
afterEach(() => {
|
|
process.env.OPENAI_BASE_URL = originalEnv.OPENAI_BASE_URL
|
|
process.env.OPENAI_API_KEY = originalEnv.OPENAI_API_KEY
|
|
globalThis.fetch = originalFetch
|
|
})
|
|
|
|
test('preserves usage from final OpenAI stream chunk with empty choices', async () => {
|
|
globalThis.fetch = (async (_input, init) => {
|
|
const url = typeof _input === 'string' ? _input : _input.url
|
|
expect(url).toBe('http://example.test/v1/chat/completions')
|
|
|
|
const body = JSON.parse(String(init?.body))
|
|
expect(body.stream).toBe(true)
|
|
expect(body.stream_options).toEqual({ include_usage: true })
|
|
|
|
const chunks = makeStreamChunks([
|
|
{
|
|
id: 'chatcmpl-1',
|
|
object: 'chat.completion.chunk',
|
|
model: 'fake-model',
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
delta: { role: 'assistant', content: 'hello world' },
|
|
finish_reason: null,
|
|
},
|
|
],
|
|
},
|
|
{
|
|
id: 'chatcmpl-1',
|
|
object: 'chat.completion.chunk',
|
|
model: 'fake-model',
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
delta: {},
|
|
finish_reason: 'stop',
|
|
},
|
|
],
|
|
},
|
|
{
|
|
id: 'chatcmpl-1',
|
|
object: 'chat.completion.chunk',
|
|
model: 'fake-model',
|
|
choices: [],
|
|
usage: {
|
|
prompt_tokens: 123,
|
|
completion_tokens: 45,
|
|
total_tokens: 168,
|
|
},
|
|
},
|
|
])
|
|
|
|
return makeSseResponse(chunks)
|
|
}) as FetchType
|
|
|
|
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
|
|
|
const result = await client.beta.messages
|
|
.create({
|
|
model: 'fake-model',
|
|
system: 'test system',
|
|
messages: [{ role: 'user', content: 'hello' }],
|
|
max_tokens: 64,
|
|
stream: true,
|
|
})
|
|
.withResponse()
|
|
|
|
const events: Array<Record<string, unknown>> = []
|
|
for await (const event of result.data) {
|
|
events.push(event)
|
|
}
|
|
|
|
const usageEvent = events.find(
|
|
event => event.type === 'message_delta' && typeof event.usage === 'object' && event.usage !== null,
|
|
) as { usage?: { input_tokens?: number; output_tokens?: number } } | undefined
|
|
|
|
expect(usageEvent).toBeDefined()
|
|
expect(usageEvent?.usage?.input_tokens).toBe(123)
|
|
expect(usageEvent?.usage?.output_tokens).toBe(45)
|
|
})
|
|
|
|
test('preserves Gemini tool call extra_content in follow-up requests', async () => {
|
|
let requestBody: Record<string, unknown> | undefined
|
|
|
|
globalThis.fetch = (async (_input, init) => {
|
|
requestBody = JSON.parse(String(init?.body))
|
|
|
|
return new Response(
|
|
JSON.stringify({
|
|
id: 'chatcmpl-1',
|
|
model: 'google/gemini-3.1-pro-preview',
|
|
choices: [
|
|
{
|
|
message: {
|
|
role: 'assistant',
|
|
content: 'done',
|
|
},
|
|
finish_reason: 'stop',
|
|
},
|
|
],
|
|
usage: {
|
|
prompt_tokens: 12,
|
|
completion_tokens: 4,
|
|
total_tokens: 16,
|
|
},
|
|
}),
|
|
{
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
},
|
|
},
|
|
)
|
|
}) as FetchType
|
|
|
|
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
|
|
|
await client.beta.messages.create({
|
|
model: 'google/gemini-3.1-pro-preview',
|
|
system: 'test system',
|
|
messages: [
|
|
{ role: 'user', content: 'Use Bash' },
|
|
{
|
|
role: 'assistant',
|
|
content: [
|
|
{
|
|
type: 'tool_use',
|
|
id: 'call_1',
|
|
name: 'Bash',
|
|
input: { command: 'pwd' },
|
|
extra_content: {
|
|
google: {
|
|
thought_signature: 'sig-123',
|
|
},
|
|
},
|
|
},
|
|
],
|
|
},
|
|
{
|
|
role: 'user',
|
|
content: [
|
|
{
|
|
type: 'tool_result',
|
|
tool_use_id: 'call_1',
|
|
content: 'D:\\repo',
|
|
},
|
|
],
|
|
},
|
|
],
|
|
max_tokens: 64,
|
|
stream: false,
|
|
})
|
|
|
|
const assistantWithToolCall = (requestBody?.messages as Array<Record<string, unknown>>).find(
|
|
message => Array.isArray(message.tool_calls),
|
|
) as { tool_calls?: Array<Record<string, unknown>> } | undefined
|
|
|
|
expect(assistantWithToolCall?.tool_calls?.[0]).toMatchObject({
|
|
id: 'call_1',
|
|
type: 'function',
|
|
function: {
|
|
name: 'Bash',
|
|
arguments: JSON.stringify({ command: 'pwd' }),
|
|
},
|
|
extra_content: {
|
|
google: {
|
|
thought_signature: 'sig-123',
|
|
},
|
|
},
|
|
})
|
|
})
|
|
|
|
test('preserves Gemini tool call extra_content from streaming chunks', async () => {
|
|
globalThis.fetch = (async (_input, _init) => {
|
|
const chunks = makeStreamChunks([
|
|
{
|
|
id: 'chatcmpl-1',
|
|
object: 'chat.completion.chunk',
|
|
model: 'google/gemini-3.1-pro-preview',
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
delta: {
|
|
role: 'assistant',
|
|
tool_calls: [
|
|
{
|
|
index: 0,
|
|
id: 'function-call-1',
|
|
type: 'function',
|
|
extra_content: {
|
|
google: {
|
|
thought_signature: 'sig-stream',
|
|
},
|
|
},
|
|
function: {
|
|
name: 'Bash',
|
|
arguments: '{"command":"pwd"}',
|
|
},
|
|
},
|
|
],
|
|
},
|
|
finish_reason: null,
|
|
},
|
|
],
|
|
},
|
|
{
|
|
id: 'chatcmpl-1',
|
|
object: 'chat.completion.chunk',
|
|
model: 'google/gemini-3.1-pro-preview',
|
|
choices: [
|
|
{
|
|
index: 0,
|
|
delta: {},
|
|
finish_reason: 'tool_calls',
|
|
},
|
|
],
|
|
},
|
|
])
|
|
|
|
return makeSseResponse(chunks)
|
|
}) as FetchType
|
|
|
|
const client = createOpenAIShimClient({}) as OpenAIShimClient
|
|
|
|
const result = await client.beta.messages
|
|
.create({
|
|
model: 'google/gemini-3.1-pro-preview',
|
|
system: 'test system',
|
|
messages: [{ role: 'user', content: 'Use Bash' }],
|
|
max_tokens: 64,
|
|
stream: true,
|
|
})
|
|
.withResponse()
|
|
|
|
const events: Array<Record<string, unknown>> = []
|
|
for await (const event of result.data) {
|
|
events.push(event)
|
|
}
|
|
|
|
const toolStart = events.find(
|
|
event =>
|
|
event.type === 'content_block_start' &&
|
|
typeof event.content_block === 'object' &&
|
|
event.content_block !== null &&
|
|
(event.content_block as Record<string, unknown>).type === 'tool_use',
|
|
) as { content_block?: Record<string, unknown> } | undefined
|
|
|
|
expect(toolStart?.content_block).toMatchObject({
|
|
type: 'tool_use',
|
|
id: 'function-call-1',
|
|
name: 'Bash',
|
|
extra_content: {
|
|
google: {
|
|
thought_signature: 'sig-stream',
|
|
},
|
|
},
|
|
})
|
|
})
|