@@ -4,7 +4,8 @@ import {ChatPromptWrapper} from "../ChatPromptWrapper.js";
4
4
import { AbortError } from "../AbortError.js" ;
5
5
import { GeneralChatPromptWrapper } from "../chatWrappers/GeneralChatPromptWrapper.js" ;
6
6
import { getChatWrapperByBos } from "../chatWrappers/createChatWrapperByBos.js" ;
7
- import { Token } from "../types.js" ;
7
+ import { ConversationInteraction , Token } from "../types.js" ;
8
+ import { generateContextTextFromConversationHistory } from "../chatWrappers/generateContextTextFromConversationHistory.js" ;
8
9
import { LlamaModel } from "./LlamaModel.js" ;
9
10
import { LlamaContext } from "./LlamaContext.js" ;
10
11
@@ -15,7 +16,10 @@ export type LlamaChatSessionOptions = {
15
16
context : LlamaContext ,
16
17
printLLamaSystemInfo ?: boolean ,
17
18
promptWrapper ?: ChatPromptWrapper | "auto" ,
18
- systemPrompt ?: string
19
+ systemPrompt ?: string ,
20
+
21
+ /** Conversation history to load into the context to continue an existing conversation */
22
+ conversationHistory ?: readonly ConversationInteraction [ ]
19
23
} ;
20
24
21
25
export class LlamaChatSession {
@@ -26,17 +30,22 @@ export class LlamaChatSession {
26
30
private _initialized : boolean = false ;
27
31
private _lastStopString : string | null = null ;
28
32
private _lastStopStringSuffix : string | null = null ;
33
+ private _conversationHistoryToLoad : readonly ConversationInteraction [ ] | null = null ;
29
34
private readonly _ctx : LlamaContext ;
30
35
31
36
public constructor ( {
32
37
context,
33
38
printLLamaSystemInfo = false ,
34
39
promptWrapper = new GeneralChatPromptWrapper ( ) ,
35
- systemPrompt = defaultChatSystemPrompt
40
+ systemPrompt = defaultChatSystemPrompt ,
41
+ conversationHistory
36
42
} : LlamaChatSessionOptions ) {
37
43
this . _ctx = context ;
38
44
this . _printLLamaSystemInfo = printLLamaSystemInfo ;
39
45
this . _systemPrompt = systemPrompt ;
46
+ this . _conversationHistoryToLoad = ( conversationHistory != null && conversationHistory . length > 0 )
47
+ ? conversationHistory
48
+ : null ;
40
49
41
50
if ( promptWrapper === "auto" ) {
42
51
const chatWrapper = getChatWrapperByBos ( context . getBosString ( ) ) ;
@@ -76,7 +85,32 @@ export class LlamaChatSession {
76
85
await this . init ( ) ;
77
86
78
87
return await withLock ( this , "prompt" , async ( ) => {
79
- const promptText = this . _promptWrapper . wrapPrompt ( prompt , {
88
+ let promptText = "" ;
89
+
90
+ if ( this . _promptIndex == 0 && this . _conversationHistoryToLoad != null ) {
91
+ const { text, stopString, stopStringSuffix} =
92
+ generateContextTextFromConversationHistory ( this . _promptWrapper , this . _conversationHistoryToLoad , {
93
+ systemPrompt : this . _systemPrompt ,
94
+ currentPromptIndex : this . _promptIndex ,
95
+ lastStopString : this . _lastStopString ,
96
+ lastStopStringSuffix : this . _promptIndex == 0
97
+ ? (
98
+ this . _ctx . prependBos
99
+ ? this . _ctx . getBosString ( )
100
+ : null
101
+ )
102
+ : this . _lastStopStringSuffix
103
+ } ) ;
104
+
105
+ promptText += text ;
106
+ this . _lastStopString = stopString ;
107
+ this . _lastStopStringSuffix = stopStringSuffix ;
108
+ this . _promptIndex += this . _conversationHistoryToLoad . length ;
109
+
110
+ this . _conversationHistoryToLoad = null ;
111
+ }
112
+
113
+ promptText += this . _promptWrapper . wrapPrompt ( prompt , {
80
114
systemPrompt : this . _systemPrompt ,
81
115
promptIndex : this . _promptIndex ,
82
116
lastStopString : this . _lastStopString ,
0 commit comments