@@ -18,26 +18,14 @@ import {
1818  Thread 
1919}  from  '..' ; 
2020
21- const  openaiReqs : Record < 
22-   string , 
23-   { 
24-     // Record the ID of the request 
25-     id : string ; 
26-     // Record the start time of the request 
27-     start : number ; 
28-     // Record the inputs of the request 
29-     inputs : Record < string ,  any > ; 
30-     // Record the stream of the request if it's a streaming request 
31-     stream ?: Stream < ChatCompletionChunk  |  Completion > ; 
32-   } 
33- >  =  { } ; 
34- 
3521// Define a generic type for the original function to be wrapped 
3622type  OriginalFunction < T  extends  any [ ] ,  R >  =  ( ...args : T )  =>  Promise < R > ; 
3723
3824// Utility function to wrap a method 
3925function  wrapFunction < T  extends  any [ ] ,  R > ( 
40-   originalFunction : OriginalFunction < T ,  R > 
26+   originalFunction : OriginalFunction < T ,  R > , 
27+   client : LiteralClient , 
28+   options : InstrumentOpenAIOptions  =  { } 
4129) : OriginalFunction < T ,  R >  { 
4230  return  async  function  ( this : any ,  ...args : T ) : Promise < R >  { 
4331    const  start  =  Date . now ( ) ; 
@@ -46,58 +34,57 @@ function wrapFunction<T extends any[], R>(
4634    const  result  =  await  originalFunction . apply ( this ,  args ) ; 
4735
4836    if  ( result  instanceof  Stream )  { 
49-       const  streamResult  =  result  as  Stream < ChatCompletionChunk  |  Completion > ; 
50-       // If it is a streaming request, we need to process the first token to get the id 
51-       // However we also need to tee the stream so that the end developer can process the stream 
52-       const  [ a ,  b ]  =  streamResult . tee ( ) ; 
53-       // Re split the stream to store a clean instance for final processing later on 
54-       const  c  =  a . tee ( ) [ 0 ] ; 
55-       let  id ; 
56-       // Iterate over the stream to find the first chunk and store the id 
57-       for  await  ( const  chunk  of  a )  { 
58-         id  =  chunk . id ; 
59-         if  ( ! openaiReqs [ id ] )  { 
60-           openaiReqs [ id ]  =  { 
61-             id, 
62-             inputs : args [ 0 ] , 
63-             start, 
64-             stream : c 
65-           } ; 
66-           break ; 
67-         } 
68-       } 
69-       // @ts -expect-error Hacky way to add the id to the stream 
70-       b . id  =  id ; 
37+       const  streamResult  =  result ; 
38+       const  [ returnedResult ,  processedResult ]  =  streamResult . tee ( ) ; 
39+ 
40+       await  processOpenAIOutput ( client ,  processedResult ,  { 
41+         ...options , 
42+         start, 
43+         inputs : args [ 0 ] 
44+       } ) ; 
7145
72-       return  b  as  any ; 
46+       return  returnedResult  as  R ; 
7347    }  else  { 
74-       const  regularResult  =  result  as  ChatCompletion  |  Completion ; 
75-       const  id  =  regularResult . id ; 
76-       openaiReqs [ id ]  =  { 
77-         id, 
78-         inputs : args [ 0 ] , 
79-         start
80-       } ; 
48+       await  processOpenAIOutput ( client ,  result  as  ChatCompletion  |  Completion ,  { 
49+         ...options , 
50+         start, 
51+         inputs : args [ 0 ] 
52+       } ) ; 
53+ 
8154      return  result ; 
8255    } 
8356  } ; 
8457} 
8558
86- // Patching the chat.completions.create function 
87- const  originalChatCompletionsCreate  =  OpenAI . Chat . Completions . prototype . create ; 
88- OpenAI . Chat . Completions . prototype . create  =  wrapFunction ( 
89-   originalChatCompletionsCreate 
90- )  as  any ; 
91- 
92- // Patching the completions.create function 
93- const  originalCompletionsCreate  =  OpenAI . Completions . prototype . create ; 
94- OpenAI . Completions . prototype . create  =  wrapFunction ( 
95-   originalCompletionsCreate 
96- )  as  any ; 
97- 
98- // Patching the completions.create function 
99- const  originalImagesGenerate  =  OpenAI . Images . prototype . generate ; 
100- OpenAI . Images . prototype . generate  =  wrapFunction ( originalImagesGenerate )  as  any ; 
59+ function  instrumentOpenAI ( 
60+   client : LiteralClient , 
61+   options : InstrumentOpenAIOptions  =  { } 
62+ )  { 
63+   // Patching the chat.completions.create function 
64+   const  originalChatCompletionsCreate  = 
65+     OpenAI . Chat . Completions . prototype . create ; 
66+   OpenAI . Chat . Completions . prototype . create  =  wrapFunction ( 
67+     originalChatCompletionsCreate , 
68+     client , 
69+     options 
70+   )  as  any ; 
71+ 
72+   // Patching the completions.create function 
73+   const  originalCompletionsCreate  =  OpenAI . Completions . prototype . create ; 
74+   OpenAI . Completions . prototype . create  =  wrapFunction ( 
75+     originalCompletionsCreate , 
76+     client , 
77+     options 
78+   )  as  any ; 
79+ 
80+   // Patching the images.generate function 
81+   const  originalImagesGenerate  =  OpenAI . Images . prototype . generate ; 
82+   OpenAI . Images . prototype . generate  =  wrapFunction ( 
83+     originalImagesGenerate , 
84+     client , 
85+     options 
86+   )  as  any ; 
87+ } 
10188
10289function  processChatDelta ( 
10390  newDelta : ChatCompletionChunk . Choice . Delta , 
@@ -296,22 +283,49 @@ export interface InstrumentOpenAIOptions {
296283  tags ?: Maybe < string [ ] > ; 
297284} 
298285
299- const  instrumentOpenAI  =  async  ( 
286+ export  interface  ProcessOpenAIOutput  extends  InstrumentOpenAIOptions  { 
287+   start : number ; 
288+   inputs : Record < string ,  any > ; 
289+ } 
290+ 
291+ function  isStream ( obj : any ) : boolean  { 
292+   return  ( 
293+     obj  !==  null  && 
294+     typeof  obj  ===  'object'  && 
295+     typeof  obj . pipe  ===  'function'  && 
296+     typeof  obj . on  ===  'function'  && 
297+     typeof  obj . read  ===  'function' 
298+   ) ; 
299+ } 
300+ 
301+ const  processOpenAIOutput  =  async  ( 
300302  client : LiteralClient , 
301303  output : OpenAIOutput , 
302-   parent ?: Step  |  Thread , 
303-   options : InstrumentOpenAIOptions  =  { } 
304+   {  start,  tags,  inputs } : ProcessOpenAIOutput 
304305)  =>  { 
305-   //@ts -expect-error - This is a hacky way to get the id from the stream 
306-   const  outputId  =  output . id ; 
307-   const  {  stream,  start,  inputs }  =  openaiReqs [ outputId ] ; 
308306  const  baseGeneration  =  { 
309307    provider : 'openai' , 
310308    model : inputs . model , 
311309    settings : getSettings ( inputs ) , 
312-     tags : options . tags 
310+     tags : tags 
313311  } ; 
314312
313+   let  threadFromStore : Thread  |  null  =  null ; 
314+   try  { 
315+     threadFromStore  =  client . getCurrentThread ( ) ; 
316+   }  catch  ( error )  { 
317+     // Ignore error thrown if getCurrentThread is called outside of a context 
318+   } 
319+ 
320+   let  stepFromStore : Step  |  null  =  null ; 
321+   try  { 
322+     stepFromStore  =  client . getCurrentStep ( ) ; 
323+   }  catch  ( error )  { 
324+     // Ignore error thrown if getCurrentStep is called outside of a context 
325+   } 
326+ 
327+   const  parent  =  stepFromStore  ||  threadFromStore ; 
328+ 
315329  if  ( 'data'  in  output )  { 
316330    // Image Generation 
317331
@@ -322,14 +336,16 @@ const instrumentOpenAI = async (
322336      output : output , 
323337      startTime : new  Date ( start ) . toISOString ( ) , 
324338      endTime : new  Date ( ) . toISOString ( ) , 
325-       tags : options . tags 
339+       tags : tags 
326340    } ; 
327341
328342    const  step  =  parent 
329343      ? parent . step ( stepData ) 
330344      : client . step ( {  ...stepData ,  type : 'run'  } ) ; 
331345    await  step . send ( ) ; 
332-   }  else  if  ( output  instanceof  Stream )  { 
346+   }  else  if  ( output  instanceof  Stream  ||  isStream ( output ) )  { 
347+     const  stream  =  output  as  Stream < ChatCompletionChunk  |  Completion > ; 
348+ 
333349    if  ( ! stream )  { 
334350      throw  new  Error ( 'Stream not found' ) ; 
335351    } 
@@ -460,8 +476,6 @@ const instrumentOpenAI = async (
460476      } 
461477    } 
462478  } 
463- 
464-   delete  openaiReqs [ outputId ] ; 
465479} ; 
466480
467481export  default  instrumentOpenAI ; 
0 commit comments