@@ -17,6 +17,7 @@ import { LLMResponse } from "./api/LLMResponse.js";
1717import { PromptBuilder } from "./api/formats/PromptBuilder.js" ;
1818import { LLMFormat , llmFormats } from "./api/index.js" ;
1919import { createAgentCursorPlugin } from "./plugins/AgentCursorPlugin.js" ;
20+ import { Fragment , Slice } from "prosemirror-model" ;
2021
2122type MakeOptional < T , K extends keyof T > = Omit < T , K > & Partial < Pick < T , K > > ;
2223
@@ -198,15 +199,65 @@ export class AIExtension extends BlockNoteExtension {
198199 * Accept the changes made by the LLM
199200 */
200201 public acceptChanges ( ) {
201- this . editor . exec ( applySuggestions ) ;
202+ // This is slightly convoluted, to try to maintain the undo history as much as possible
203+ // The idea is that the LLM call has appended a number of updates to the document, moving the document from state `A` to state `C`
204+ // But we want the undo history to skip all of the intermediate states and go straight from `C` to `A`
205+ // To do this, we capture the document state `C` (post-LLM call), and then reject the suggestions to recover the original document state `A`
206+ // Then we create an intermediate state `B` that captures the diff between `A` and `C`
207+ // Then we apply the suggestions to `B` to get the final state `C`
208+ // This causes the undo history to skip `B` and go straight from `C` back to `A`
209+
210+ // Capture the document state `C'` (post-LLM call with all suggestions still in the document)
211+ const markedUpDocument = this . editor . prosemirrorState . doc ;
212+
213+ // revert the suggestions to get back to the original document state `A`
214+ this . editor . exec ( ( state , dispatch ) => {
215+ return revertSuggestions ( state , ( tr ) => {
216+ dispatch ?.( tr . setMeta ( "addToHistory" , false ) ) ;
217+ } ) ;
218+ } ) ;
219+
220+ // Create an intermediate state `B` that captures the diff between the original document and the marked up document
221+ this . editor . exec ( ( state , dispatch ) => {
222+ const tr = state . tr ;
223+ tr . replace (
224+ 0 ,
225+ tr . doc . content . size ,
226+ new Slice ( Fragment . from ( markedUpDocument ) , 0 , 0 ) ,
227+ ) ;
228+ const nextState = state . apply ( tr ) ;
229+ // Apply the suggestions to the intermediate state `B` to get the final state `C`
230+ return applySuggestions ( nextState , ( resultTr ) => {
231+ dispatch ?.(
232+ tr . replace (
233+ 0 ,
234+ tr . doc . content . size ,
235+ new Slice ( Fragment . from ( resultTr . doc ) , 0 , 0 ) ,
236+ ) ,
237+ ) ;
238+ } ) ;
239+ } ) ;
240+
241+ // If in collaboration mode, merge the changes back into the original yDoc
242+ this . editor . forkYDocPlugin ?. merge ( { keepChanges : true } ) ;
243+
202244 this . closeAIMenu ( ) ;
203245 }
204246
205247 /**
206248 * Reject the changes made by the LLM
207249 */
208250 public rejectChanges ( ) {
209- this . editor . exec ( revertSuggestions ) ;
251+ // Revert the suggestions to get back to the original document
252+ this . editor . exec ( ( state , dispatch ) => {
253+ return revertSuggestions ( state , ( tr ) => {
254+ // Do so without adding to history (so the last undo step is just prior to the LLM call)
255+ dispatch ?.( tr . setMeta ( "addToHistory" , false ) ) ;
256+ } ) ;
257+ } ) ;
258+
259+ // If in collaboration mode, discard the changes and revert to the original yDoc
260+ this . editor . forkYDocPlugin ?. merge ( { keepChanges : false } ) ;
210261 this . closeAIMenu ( ) ;
211262 }
212263
@@ -299,6 +350,8 @@ export class AIExtension extends BlockNoteExtension {
299350 */
300351 public async callLLM ( opts : MakeOptional < LLMRequestOptions , "model" > ) {
301352 this . setAIResponseStatus ( "thinking" ) ;
353+ this . editor . forkYDocPlugin ?. fork ( ) ;
354+
302355 let ret : LLMResponse | undefined ;
303356 try {
304357 const requestOptions = {
@@ -334,6 +387,8 @@ export class AIExtension extends BlockNoteExtension {
334387
335388 this . setAIResponseStatus ( "user-reviewing" ) ;
336389 } catch ( e ) {
390+ // TODO in error state, should we discard the forked document?
391+
337392 this . setAIResponseStatus ( {
338393 status : "error" ,
339394 error : e ,
0 commit comments