@@ -193,12 +193,15 @@ export abstract class BaseLLM implements ILLM {
193
193
194
194
isFromAutoDetect ?: boolean ;
195
195
196
+ lastRequestId : string | undefined ;
197
+
196
198
private _llmOptions : LLMOptions ;
197
199
198
200
protected openaiAdapter ?: BaseLlmApi ;
199
201
200
202
constructor ( _options : LLMOptions ) {
201
203
this . _llmOptions = _options ;
204
+ this . lastRequestId = undefined ;
202
205
203
206
// Set default options
204
207
const options = {
@@ -594,6 +597,7 @@ export abstract class BaseLLM implements ILLM {
594
597
signal : AbortSignal ,
595
598
options : LLMFullCompletionOptions = { } ,
596
599
) : AsyncGenerator < string > {
600
+ this . lastRequestId = undefined ;
597
601
const { completionOptions, logEnabled } =
598
602
this . _parseCompletionOptions ( options ) ;
599
603
const interaction = logEnabled
@@ -623,6 +627,9 @@ export abstract class BaseLLM implements ILLM {
623
627
signal ,
624
628
) ;
625
629
for await ( const chunk of stream ) {
630
+ if ( ! this . lastRequestId && typeof ( chunk as any ) . id === "string" ) {
631
+ this . lastRequestId = ( chunk as any ) . id ;
632
+ }
626
633
const result = fromChatCompletionChunk ( chunk ) ;
627
634
if ( result ) {
628
635
const content = renderChatMessage ( result ) ;
@@ -706,6 +713,7 @@ export abstract class BaseLLM implements ILLM {
706
713
signal : AbortSignal ,
707
714
options : LLMFullCompletionOptions = { } ,
708
715
) {
716
+ this . lastRequestId = undefined ;
709
717
const { completionOptions, logEnabled, raw } =
710
718
this . _parseCompletionOptions ( options ) ;
711
719
const interaction = logEnabled
@@ -745,6 +753,7 @@ export abstract class BaseLLM implements ILLM {
745
753
{ ...toCompleteBody ( prompt , completionOptions ) , stream : false } ,
746
754
signal ,
747
755
) ;
756
+ this . lastRequestId = response . id ?? this . lastRequestId ;
748
757
completion = response . choices [ 0 ] ?. text ?? "" ;
749
758
yield completion ;
750
759
} else {
@@ -756,6 +765,9 @@ export abstract class BaseLLM implements ILLM {
756
765
} ,
757
766
signal ,
758
767
) ) {
768
+ if ( ! this . lastRequestId && typeof ( chunk as any ) . id === "string" ) {
769
+ this . lastRequestId = ( chunk as any ) . id ;
770
+ }
759
771
const content = chunk . choices [ 0 ] ?. text ?? "" ;
760
772
completion += content ;
761
773
interaction ?. logItem ( {
@@ -835,6 +847,7 @@ export abstract class BaseLLM implements ILLM {
835
847
signal : AbortSignal ,
836
848
options : LLMFullCompletionOptions = { } ,
837
849
) {
850
+ this . lastRequestId = undefined ;
838
851
const { completionOptions, logEnabled, raw } =
839
852
this . _parseCompletionOptions ( options ) ;
840
853
const interaction = logEnabled
@@ -876,6 +889,7 @@ export abstract class BaseLLM implements ILLM {
876
889
} ,
877
890
signal ,
878
891
) ;
892
+ this . lastRequestId = result . id ?? this . lastRequestId ;
879
893
completion = result . choices [ 0 ] . text ;
880
894
} else {
881
895
completion = await this . _complete ( prompt , signal , completionOptions ) ;
@@ -985,6 +999,7 @@ export abstract class BaseLLM implements ILLM {
985
999
options : LLMFullCompletionOptions = { } ,
986
1000
messageOptions ?: MessageOption ,
987
1001
) : AsyncGenerator < ChatMessage , PromptLog > {
1002
+ this . lastRequestId = undefined ;
988
1003
let { completionOptions, logEnabled } =
989
1004
this . _parseCompletionOptions ( options ) ;
990
1005
const interaction = logEnabled
@@ -1054,6 +1069,7 @@ export abstract class BaseLLM implements ILLM {
1054
1069
{ ...body , stream : false } ,
1055
1070
signal ,
1056
1071
) ;
1072
+ this . lastRequestId = response . id ?? this . lastRequestId ;
1057
1073
const msg = fromChatResponse ( response ) ;
1058
1074
yield msg ;
1059
1075
completion = this . _formatChatMessage ( msg ) ;
@@ -1071,6 +1087,12 @@ export abstract class BaseLLM implements ILLM {
1071
1087
signal ,
1072
1088
) ;
1073
1089
for await ( const chunk of stream ) {
1090
+ if (
1091
+ ! this . lastRequestId &&
1092
+ typeof ( chunk as any ) . id === "string"
1093
+ ) {
1094
+ this . lastRequestId = ( chunk as any ) . id ;
1095
+ }
1074
1096
const result = fromChatCompletionChunk ( chunk ) ;
1075
1097
if ( result ) {
1076
1098
completion += this . _formatChatMessage ( result ) ;
0 commit comments