@@ -44,8 +44,23 @@ async function populateDropdown() {
4444
4545window . onload = populateDropdown ;
4646
47+ // Utility function to introduce a delay
48+ function delay ( ms ) {
49+ return new Promise ( resolve => setTimeout ( resolve , ms ) ) ;
50+ }
51+ let lastMessageTime = 0 ; // Track the time of the last message sent
4752document . getElementById ( 'chat-form' ) . addEventListener ( 'submit' , async function ( event ) {
4853 event . preventDefault ( ) ;
54+ const currentTime = Date . now ( ) ;
55+ const timeSinceLastMessage = currentTime - lastMessageTime ;
56+
57+ if ( timeSinceLastMessage < 5000 ) { // Check if less than 5 seconds have passed
58+ TimeNotification ( 5 , "Warning" , "You are sending messages too fast. Please wait a moment." ) ;
59+ return ;
60+ }
61+
62+ lastMessageTime = currentTime ; // Update the last message time
63+
4964 const userInput = document . getElementById ( 'userInput' ) . value ;
5065 document . getElementById ( 'userInput' ) . value = '' ; // Clear the prompt box
5166 AppendHistory ( userInput , false ) ;
@@ -74,34 +89,63 @@ document.getElementById('chat-form').addEventListener('submit', async function (
7489 } ,
7590 body : JSON . stringify ( {
7691 model : selectedModel ,
77- messages : messages
92+ messages : messages ,
93+ stream : document . getElementById ( 'streamingSwitch' ) . checked // Include streaming option in the request
7894 } ) ,
7995 } ) ;
96+
8097 if ( ! response . ok ) {
8198 TimeNotification ( 10 , "Error" , `Network response was not ok: ${ response . status } ${ response . statusText } ` ) ;
8299 throw new Error ( `Network response was not ok: ${ response . status } ${ response . statusText } ` ) ;
83100 }
84- const data = await response . json ( ) ;
85- let botResponse = data . choices [ 0 ] . message . content ;
86101
102+ let botResponse = '' ;
103+ // Check if streaming is enabled
104+ if ( document . getElementById ( 'streamingSwitch' ) . checked ) {
105+ const reader = response . body . getReader ( ) ;
106+ const decoder = new TextDecoder ( "utf-8" ) ;
107+
108+ while ( true ) {
109+ const { done, value } = await reader . read ( ) ;
110+ if ( done ) break ;
111+ const chunk = decoder . decode ( value , { stream : true } ) ;
112+ const lines = chunk . split ( '\n' ) ;
113+ for ( const line of lines ) {
114+ if ( line . trim ( ) . startsWith ( 'data:' ) ) {
115+ const data = line . replace ( 'data: ' , '' ) . trim ( ) ;
116+ if ( data === '[DONE]' ) break ;
117+ const parsedData = JSON . parse ( data ) ;
118+ const content = parsedData . choices [ 0 ] . delta . content || '' ;
119+ botResponse += content ;
120+ EditMessage ( document . querySelector ( ".ai-message:last-child" ) , convertMarkdown ( botResponse ) ) ;
121+ }
122+ }
123+ }
124+ } else {
125+ const data = await response . json ( ) ;
126+ botResponse = data . choices [ 0 ] . message . content ;
127+ botResponse = convertMarkdown ( botResponse ) ;
128+ conversationHistory . push ( { role : "assistant" , content : botResponse } ) ;
129+ EditMessage ( document . querySelector ( ".ai-message:last-child" ) , botResponse ) ;
130+ }
131+
132+ // Handle image generation after the message is complete
87133 let imageGenerated = false ;
88134 const imageRequestMatch = botResponse . match ( / \[ \{ G E N _ I M G : " ( .* ?) " \} \] / ) ;
89135 if ( imageRequestMatch ) {
90136 EditMessage ( document . querySelector ( '.ai-message:last-child' ) , " <img src='./imgs/loading.gif'> Generating Image..." ) ;
91137 const imagePrompt = imageRequestMatch [ 1 ] ;
138+ await delay ( 5000 ) ; // Wait for 5 seconds before making the image generation request
92139 const imageUrl = await generateImage ( imagePrompt , selectedModel ) ;
93140 if ( imageUrl ) {
94141 botResponse = botResponse . replace ( imageRequestMatch [ 0 ] , `<img src="${ imageUrl } " alt="Generated Image" style="max-width: 100%; height: auto;">` ) ;
95142 imageGenerated = true ;
96143 } else {
97144 botResponse = botResponse . replace ( imageRequestMatch [ 0 ] , "Failed to generate image." ) ;
98145 }
146+ EditMessage ( document . querySelector ( ".ai-message:last-child" ) , botResponse ) ;
99147 }
100-
101- botResponse = convertMarkdown ( botResponse ) ;
102- conversationHistory . push ( { role : "assistant" , content : botResponse } ) ;
103- EditMessage ( document . querySelector ( ".ai-message:last-child" ) , botResponse ) ;
104-
148+
105149 // Send telemetry data if the switch is enabled
106150 if ( document . getElementById ( 'telemetrySwitch' ) . checked ) {
107151 const chatHistoryString = conversationHistory . map ( entry => `${ entry . role } : ${ entry . content } ` ) . join ( '\n' ) ;
@@ -138,6 +182,10 @@ document.getElementById('chat-form').addEventListener('submit', async function (
138182 }
139183} ) ;
140184
185+
186+
187+
188+
141189async function generateImage ( prompt , model ) {
142190 try {
143191 const response = await fetch ( image_api_url , {
0 commit comments