diff --git a/src/components/CostThresholdDialog.tsx b/src/components/CostThresholdDialog.tsx
index 59c2c582..7c21becd 100644
--- a/src/components/CostThresholdDialog.tsx
+++ b/src/components/CostThresholdDialog.tsx
@@ -1,50 +1,53 @@
-import { c as _c } from "react-compiler-runtime";
-import React from 'react';
-import { Box, Link, Text } from '../ink.js';
-import { Select } from './CustomSelect/index.js';
-import { Dialog } from './design-system/Dialog.js';
+import React from 'react'
+import { Box, Link, Text } from '../ink.js'
+import { Select } from './CustomSelect/index.js'
+import { Dialog } from './design-system/Dialog.js'
+import { getAPIProvider } from '../utils/model/providers.js'
+
type Props = {
- onDone: () => void;
-};
-export function CostThresholdDialog(t0) {
- const $ = _c(7);
- const {
- onDone
- } = t0;
- let t1;
- if ($[0] === Symbol.for("react.memo_cache_sentinel")) {
- t1 = Learn more about how to monitor your spending:;
- $[0] = t1;
- } else {
- t1 = $[0];
- }
- let t2;
- if ($[1] === Symbol.for("react.memo_cache_sentinel")) {
- t2 = [{
- value: "ok",
- label: "Got it, thanks!"
- }];
- $[1] = t2;
- } else {
- t2 = $[1];
- }
- let t3;
- if ($[2] !== onDone) {
- t3 = ;
- $[2] = onDone;
- $[3] = t3;
- } else {
- t3 = $[3];
- }
- let t4;
- if ($[4] !== onDone || $[5] !== t3) {
- t4 = ;
- $[4] = onDone;
- $[5] = t3;
- $[6] = t4;
- } else {
- t4 = $[6];
- }
- return t4;
+ onDone: () => void
+}
+
+function getProviderLabel(): string {
+ const provider = getAPIProvider()
+ switch (provider) {
+ case 'firstParty':
+ return 'Anthropic API'
+ case 'bedrock':
+ return 'AWS Bedrock'
+ case 'vertex':
+ return 'Google Vertex'
+ case 'foundry':
+ return 'Azure Foundry'
+ case 'openai':
+ return 'OpenAI-compatible API'
+ case 'gemini':
+ return 'Gemini API'
+ default:
+ return 'API'
+ }
+}
+
+export function CostThresholdDialog({ onDone }: Props): React.ReactNode {
+ const providerLabel = getProviderLabel()
+ return (
+
+ )
}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJSZWFjdCIsIkJveCIsIkxpbmsiLCJUZXh0IiwiU2VsZWN0IiwiRGlhbG9nIiwiUHJvcHMiLCJvbkRvbmUiLCJDb3N0VGhyZXNob2xkRGlhbG9nIiwidDAiLCIkIiwiX2MiLCJ0MSIsIlN5bWJvbCIsImZvciIsInQyIiwidmFsdWUiLCJsYWJlbCIsInQzIiwidDQiXSwic291cmNlcyI6WyJDb3N0VGhyZXNob2xkRGlhbG9nLnRzeCJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgUmVhY3QgZnJvbSAncmVhY3QnXG5pbXBvcnQgeyBCb3gsIExpbmssIFRleHQgfSBmcm9tICcuLi9pbmsuanMnXG5pbXBvcnQgeyBTZWxlY3QgfSBmcm9tICcuL0N1c3RvbVNlbGVjdC9pbmRleC5qcydcbmltcG9ydCB7IERpYWxvZyB9IGZyb20gJy4vZGVzaWduLXN5c3RlbS9EaWFsb2cuanMnXG5cbnR5cGUgUHJvcHMgPSB7XG4gIG9uRG9uZTogKCkgPT4gdm9pZFxufVxuXG5leHBvcnQgZnVuY3Rpb24gQ29zdFRocmVzaG9sZERpYWxvZyh7IG9uRG9uZSB9OiBQcm9wcyk6IFJlYWN0LlJlYWN0Tm9kZSB7XG4gIHJldHVybiAoXG4gICAgPERpYWxvZ1xuICAgICAgdGl0bGU9XCJZb3UndmUgc3BlbnQgJDUgb24gdGhlIEFudGhyb3BpYyBBUEkgdGhpcyBzZXNzaW9uLlwiXG4gICAgICBvbkNhbmNlbD17b25Eb25lfVxuICAgID5cbiAgICAgIDxCb3ggZmxleERpcmVjdGlvbj1cImNvbHVtblwiPlxuICAgICAgICA8VGV4dD5MZWFybiBtb3JlIGFib3V0IGhvdyB0byBtb25pdG9yIHlvdXIgc3BlbmRpbmc6PC9UZXh0PlxuICAgICAgICA8TGluayB1cmw9XCJodHRwczovL2NvZGUuY2xhdWRlLmNvbS9kb2NzL2VuL2Nvc3RzXCIgLz5cbiAgICAgIDwvQm94PlxuICAgICAgPFNlbGVjdFxuICAgICAgICBvcHRpb25zPXtbXG4gICAgICAgICAge1xuICAgICAgICAgICAgdmFsdWU6ICdvaycsXG4gICAgICAgICAgICBsYWJlbDogJ0dvdCBpdCwgdGhhbmtzIScsXG4gICAgICAgICAgfSxcbiAgICAgICAgXX1cbiAgICAgICAgb25DaGFuZ2U9e29uRG9uZX1cbiAgICAgIC8+XG4gICAgPC9EaWFsb2c+XG4gIClcbn1cbiJdLCJtYXBwaW5ncyI6IjtBQUFBLE9BQU9BLEtBQUssTUFBTSxPQUFPO0FBQ3pCLFNBQVNDLEdBQUcsRUFBRUMsSUFBSSxFQUFFQyxJQUFJLFFBQVEsV0FBVztBQUMzQyxTQUFTQyxNQUFNLFFBQVEseUJBQXlCO0FBQ2hELFNBQVNDLE1BQU0sUUFBUSwyQkFBMkI7QUFFbEQsS0FBS0MsS0FBSyxHQUFHO0VBQ1hDLE1BQU0sRUFBRSxHQUFHLEdBQUcsSUFBSTtBQUNwQixDQUFDO0FBRUQsT0FBTyxTQUFBQyxvQkFBQUMsRUFBQTtFQUFBLE1BQUFDLENBQUEsR0FBQUMsRUFBQTtFQUE2QjtJQUFBSjtFQUFBLElBQUFFLEVBQWlCO0VBQUEsSUFBQUcsRUFBQTtFQUFBLElBQUFGLENBQUEsUUFBQUcsTUFBQSxDQUFBQyxHQUFBO0lBTS9DRixFQUFBLElBQUMsR0FBRyxDQUFlLGFBQVEsQ0FBUixRQUFRLENBQ3pCLENBQUMsSUFBSSxDQUFDLDhDQUE4QyxFQUFuRCxJQUFJLENBQ0wsQ0FBQyxJQUFJLENBQUssR0FBdUMsQ0FBdkMsdUNBQXVDLEdBQ25ELEVBSEMsR0FBRyxDQUdFO0lBQUFGLENBQUEsTUFBQUUsRUFBQTtFQUFBO0lBQUFBLEVBQUEsR0FBQUYsQ0FBQTtFQUFBO0VBQUEsSUFBQUssRUFBQTtFQUFBLElBQUFMLENBQUEsUUFBQUcsTUFBQSxDQUFBQyxHQUFBO0lBRUtDLEVBQUEsSUFDUDtNQUFBQyxLQUFBLEVBQ1MsSUFBSTtNQUFBQyxLQUFBLEVBQ0o7SUFDVCxDQUFDLENBQ0Y7SUFBQVAsQ0FBQSxNQUFBSyxFQUFBO0VBQUE7SUFBQUEsRUFBQSxHQUFBTCxDQUFBO0VBQUE7RUFBQSxJQUFBUSxFQUFBO0VBQUEsSUFBQVIsQ0FBQSxRQUFBSCxNQUFBO0lBTkhXLEVBQUEsSUFBQyxNQUFNLENBQ0ksT0FLUixDQUxRLENBQUFILEVBS1QsQ0FBQyxDQUNTUixRQUFNLENBQU5BLE9BQUssQ0FBQyxHQUNoQjtJQUFBRyxDQUFBLE1BQUFILE1BQUE7SUFBQUcsQ0FBQSxNQUFBUSxFQUFBO0VBQUE7SUFBQUEsRUFBQSxHQUFBUixDQUFBO0VBQUE7RUFBQSxJQUFBUyxFQUFBO0VBQUEsSUFBQVQsQ0FBQSxRQUFBSCxNQUFBLElBQUFHLENBQUEsUUFBQVEsRUFBQTtJQWhCSkMsRUFBQSxJQUFDLE1BQU0sQ0FDQyxLQUFvRCxDQUFwRCxvREFBb0QsQ0FDaERaLFFBQU0sQ0FBTkEsT0FBSyxDQUFDLENBRWhCLENBQUFLLEVBR0ssQ0FDTCxDQUFBTSxFQVFDLENBQ0gsRUFqQkMsTUFBTSxDQWlCRTtJQUFBUixDQUFBLE1BQUFILE1BQUE7SUFBQUcsQ0FBQSxNQUFBUSxFQUFBO0lBQUFSLENBQUEsTUFBQVMsRUFBQTtFQUFBO0lBQUFBLEVBQUEsR0FBQVQsQ0FBQTtFQUFBO0VBQUEsT0FqQlRTLEVBaUJTO0FBQUEiLCJpZ25vcmVMaXN0IjpbXX0=
\ No newline at end of file
diff --git a/src/services/api/openaiShim.ts b/src/services/api/openaiShim.ts
index 9a500490..0d5bde28 100644
--- a/src/services/api/openaiShim.ts
+++ b/src/services/api/openaiShim.ts
@@ -235,20 +235,60 @@ function normalizeSchemaForOpenAI(
schema: Record,
strict = true,
): Record {
- if (schema.type !== 'object' || !schema.properties) return schema
- const properties = schema.properties as Record
- const existingRequired = Array.isArray(schema.required) ? schema.required as string[] : []
- // OpenAI strict mode requires every property to be listed in required[].
- // Gemini rejects schemas where required[] contains keys absent from properties,
- // so only promote keys that actually exist in properties.
- if (strict) {
- const allKeys = Object.keys(properties)
- const required = Array.from(new Set([...existingRequired, ...allKeys]))
- return { ...schema, required }
+ if (!schema || typeof schema !== 'object' || Array.isArray(schema)) {
+ return (schema ?? {}) as Record
}
- // For Gemini: keep only existing required keys that are present in properties
- const required = existingRequired.filter(k => k in properties)
- return { ...schema, required }
+
+ const record = { ...schema }
+
+ if (record.type === 'object' && record.properties) {
+ const properties = record.properties as Record>
+ const existingRequired = Array.isArray(record.required) ? record.required as string[] : []
+
+ // Recurse into each property
+ const normalizedProps: Record = {}
+ for (const [key, value] of Object.entries(properties)) {
+ normalizedProps[key] = normalizeSchemaForOpenAI(
+ value as Record,
+ strict,
+ )
+ }
+ record.properties = normalizedProps
+
+ if (strict) {
+ // OpenAI strict mode requires every property to be listed in required[]
+ const allKeys = Object.keys(normalizedProps)
+ record.required = Array.from(new Set([...existingRequired, ...allKeys]))
+ // OpenAI strict mode requires additionalProperties: false on all object
+ // schemas — override unconditionally to ensure nested objects comply.
+ record.additionalProperties = false
+ } else {
+ // For Gemini: keep only existing required keys that are present in properties
+ record.required = existingRequired.filter(k => k in normalizedProps)
+ }
+ }
+
+ // Recurse into array items
+ if ('items' in record) {
+ if (Array.isArray(record.items)) {
+ record.items = (record.items as unknown[]).map(
+ item => normalizeSchemaForOpenAI(item as Record, strict),
+ )
+ } else {
+ record.items = normalizeSchemaForOpenAI(record.items as Record, strict)
+ }
+ }
+
+ // Recurse into combinators
+ for (const key of ['anyOf', 'oneOf', 'allOf'] as const) {
+ if (key in record && Array.isArray(record[key])) {
+ record[key] = (record[key] as unknown[]).map(
+ item => normalizeSchemaForOpenAI(item as Record, strict),
+ )
+ }
+ }
+
+ return record
}
function convertTools(
@@ -374,15 +414,16 @@ async function* openaiStreamToAnthropic(
const decoder = new TextDecoder()
let buffer = ''
- while (true) {
- const { done, value } = await reader.read()
- if (done) break
+ try {
+ while (true) {
+ const { done, value } = await reader.read()
+ if (done) break
- buffer += decoder.decode(value, { stream: true })
- const lines = buffer.split('\n')
- buffer = lines.pop() ?? ''
+ buffer += decoder.decode(value, { stream: true })
+ const lines = buffer.split('\n')
+ buffer = lines.pop() ?? ''
- for (const line of lines) {
+ for (const line of lines) {
const trimmed = line.trim()
if (!trimmed || trimmed === 'data: [DONE]') continue
if (!trimmed.startsWith('data: ')) continue
@@ -528,6 +569,9 @@ async function* openaiStreamToAnthropic(
hasEmittedFinalUsage = true
}
}
+ }
+ } finally {
+ reader.releaseLock()
}
yield { type: 'message_stop' }