@@ -341,29 +341,47 @@ export class OpenAIApi implements BaseLlmApi {
341341 }
342342
343343 // Get final usage from stream.usage Promise (finish event has incomplete data)
344- const finalUsage = await stream . usage ;
345- if ( finalUsage ) {
346- const promptTokens =
347- typeof finalUsage . promptTokens === "number"
348- ? finalUsage . promptTokens
349- : 0 ;
350- const completionTokens =
351- typeof finalUsage . completionTokens === "number"
352- ? finalUsage . completionTokens
353- : 0 ;
354- const totalTokens =
355- typeof finalUsage . totalTokens === "number"
356- ? finalUsage . totalTokens
357- : promptTokens + completionTokens ;
358-
359- yield usageChatChunk ( {
360- model : modifiedBody . model ,
361- usage : {
362- prompt_tokens : promptTokens ,
363- completion_tokens : completionTokens ,
364- total_tokens : totalTokens ,
365- } ,
344+ try {
345+ const finalUsage = await stream . usage ;
346+ console . log ( "[OpenAI Vercel] stream.usage resolved:" , {
347+ finalUsage,
348+ type : typeof finalUsage ,
349+ keys : finalUsage ? Object . keys ( finalUsage ) : [ ] ,
366350 } ) ;
351+
352+ if ( finalUsage ) {
353+ const promptTokens =
354+ typeof finalUsage . promptTokens === "number"
355+ ? finalUsage . promptTokens
356+ : 0 ;
357+ const completionTokens =
358+ typeof finalUsage . completionTokens === "number"
359+ ? finalUsage . completionTokens
360+ : 0 ;
361+ const totalTokens =
362+ typeof finalUsage . totalTokens === "number"
363+ ? finalUsage . totalTokens
364+ : promptTokens + completionTokens ;
365+
366+ console . log ( "[OpenAI Vercel] Emitting usage:" , {
367+ promptTokens,
368+ completionTokens,
369+ totalTokens,
370+ } ) ;
371+
372+ yield usageChatChunk ( {
373+ model : modifiedBody . model ,
374+ usage : {
375+ prompt_tokens : promptTokens ,
376+ completion_tokens : completionTokens ,
377+ total_tokens : totalTokens ,
378+ } ,
379+ } ) ;
380+ } else {
381+ console . warn ( "[OpenAI Vercel] stream.usage resolved to falsy value" ) ;
382+ }
383+ } catch ( error ) {
384+ console . error ( "[OpenAI Vercel] Error awaiting stream.usage:" , error ) ;
367385 }
368386 }
369387 async completionNonStream (
0 commit comments