Skip to content

Commit afccf0e

Browse files
authored
fix: performance issue when renaming an object key, see #543 (#551)
1 parent 1a794cb commit afccf0e

File tree

4 files changed

+248
-80
lines changed

4 files changed

+248
-80
lines changed

src/lib/logic/operations.test.ts

Lines changed: 110 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1-
import { test, describe } from 'vitest'
1+
import { test, describe, expect } from 'vitest'
22
import assert from 'assert'
33
import {
44
clipboardToValues,
55
createNestedValueOperations,
66
createNewValue,
77
moveInsideParent,
8+
rename,
89
revertJSONPatchWithMoveOperations
910
} from './operations.js'
1011
import { createMultiSelection } from './selection.js'
@@ -270,7 +271,6 @@ describe('operations', () => {
270271

271272
const revertOperations = revertJSONPatchWithMoveOperations(json, operations)
272273
assert.deepStrictEqual(revertOperations, [
273-
{ op: 'move', from: '/b', path: '/b' },
274274
{ op: 'move', from: '/a', path: '/a' },
275275
{ op: 'move', from: '/c', path: '/c' }
276276
])
@@ -280,6 +280,114 @@ describe('operations', () => {
280280
assert.deepStrictEqual(Object.keys(revertedJson), ['b', 'a', 'c'])
281281
})
282282

283+
test('should remove redundant move operations when renaming a key and maintaining order (1)', () => {
284+
const json = {
285+
a: 1,
286+
b: 2,
287+
c: 3
288+
}
289+
290+
const operations: JSONPatchOperation[] = [{ op: 'move', from: '/b', path: '/b_renamed' }]
291+
const updatedJson = immutableJSONPatch(json, operations)
292+
assert.deepStrictEqual(Object.keys(updatedJson as Record<string, number>), [
293+
'a',
294+
'c',
295+
'b_renamed'
296+
])
297+
298+
const revertOperations = revertJSONPatchWithMoveOperations(json, operations)
299+
assert.deepStrictEqual(revertOperations, [
300+
{ op: 'move', from: '/b_renamed', path: '/b' },
301+
{ op: 'move', from: '/c', path: '/c' },
302+
{ op: 'move', from: '/c', path: '/c' } // TODO: would be neat to remove the duplicate
303+
])
304+
305+
const revertedJson = immutableJSONPatch(updatedJson, revertOperations)
306+
assert.deepStrictEqual(revertedJson, json)
307+
assert.deepStrictEqual(Object.keys(revertedJson), ['a', 'b', 'c'])
308+
})
309+
310+
test('should remove redundant move operations when renaming a key and maintaining order (2)', () => {
311+
const json = {
312+
a: 1,
313+
b: 2,
314+
c: 3
315+
}
316+
317+
const operations: JSONPatchOperation[] = [
318+
{ op: 'move', from: '/b', path: '/b_renamed' },
319+
{ op: 'move', from: '/c', path: '/c' }
320+
]
321+
const updatedJson = immutableJSONPatch(json, operations)
322+
assert.deepStrictEqual(Object.keys(updatedJson as Record<string, number>), [
323+
'a',
324+
'b_renamed',
325+
'c'
326+
])
327+
328+
const revertOperations = revertJSONPatchWithMoveOperations(json, operations)
329+
assert.deepStrictEqual(revertOperations, [
330+
{ op: 'move', from: '/b_renamed', path: '/b' },
331+
{ op: 'move', from: '/c', path: '/c' }
332+
])
333+
334+
const revertedJson = immutableJSONPatch(updatedJson, revertOperations)
335+
assert.deepStrictEqual(revertedJson, json)
336+
assert.deepStrictEqual(Object.keys(revertedJson), ['a', 'b', 'c'])
337+
})
338+
339+
test('should remove redundant move operations when renaming a key and maintaining order (3)', () => {
340+
const json = {
341+
a: 1,
342+
b: 2,
343+
c: 3
344+
}
345+
346+
const operations: JSONPatchOperation[] = [
347+
{ op: 'move', from: '/c', path: '/c_renamed' },
348+
{ op: 'move', from: '/a', path: '/a' } // we move `a` to the end (different operation)
349+
]
350+
const updatedJson = immutableJSONPatch(json, operations)
351+
assert.deepStrictEqual(Object.keys(updatedJson as Record<string, number>), [
352+
'b',
353+
'c_renamed',
354+
'a'
355+
])
356+
357+
// Note: there are optimizations possible in the following revert operations,
358+
// but this is a made up example, not a real world example, quite an edge case.
359+
const revertOperations = revertJSONPatchWithMoveOperations(json, operations)
360+
assert.deepStrictEqual(revertOperations, [
361+
{ op: 'move', from: '/c_renamed', path: '/c' },
362+
{ op: 'move', from: '/b', path: '/b' },
363+
{ op: 'move', from: '/b', path: '/b' }, // TODO: would be neath to remove the duplicate
364+
{ op: 'move', from: '/c', path: '/c' }
365+
])
366+
367+
const revertedJson = immutableJSONPatch(updatedJson, revertOperations)
368+
assert.deepStrictEqual(revertedJson, json)
369+
assert.deepStrictEqual(Object.keys(revertedJson), ['a', 'b', 'c'])
370+
})
371+
372+
test('should perform well when creating the revert patch of renaming a key in a large object', () => {
373+
console.time('revert large patch')
374+
const count = 100
375+
const json: Record<string, number> = {}
376+
for (let i = 0; i < count; i++) {
377+
json['item ' + i] = i
378+
}
379+
380+
const operations = rename([], Object.keys(json), 'item 0', 'item 0 (renamed)')
381+
expect(operations.length).toEqual(count)
382+
const updatedJson = immutableJSONPatch(json, operations)
383+
384+
const revertOperations = revertJSONPatchWithMoveOperations(json, operations)
385+
expect(revertOperations.length).toEqual(count)
386+
const revertedJson = immutableJSONPatch(updatedJson, revertOperations)
387+
expect(revertedJson).toEqual(json)
388+
console.timeEnd('revert large patch')
389+
}, /* timeout */ 200)
390+
283391
test('should restore correctly revert multiple remove operations in an array', () => {
284392
const json = [0, 1, 2, 3, 4]
285393

src/lib/logic/operations.ts

Lines changed: 138 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { cloneDeepWith, first, initial, isEmpty, last, times } from 'lodash-es'
1+
import { cloneDeepWith, first, initial, isEmpty, isEqual, last, times } from 'lodash-es'
22
import {
33
compileJSONPointer,
44
existsIn,
@@ -11,6 +11,7 @@ import {
1111
type JSONPatchAdd,
1212
type JSONPatchCopy,
1313
type JSONPatchDocument,
14+
type JSONPatchMove,
1415
type JSONPatchOperation,
1516
type JSONPath,
1617
parseJSONPointer,
@@ -39,7 +40,6 @@ import {
3940
} from './selection.js'
4041
import type { ClipboardValues, DragInsideAction, JSONParser, JSONSelection } from '$lib/types'
4142
import { int } from '../utils/numberUtils.js'
42-
import { dedupeKeepLast } from '$lib/utils/arrayUtils'
4343

4444
/**
4545
* Create a JSONPatch for an insert operation.
@@ -722,30 +722,146 @@ export function revertJSONPatchWithMoveOperations(
722722
json: unknown,
723723
operations: JSONPatchDocument
724724
): JSONPatchDocument {
725-
return dedupeKeepLast(
726-
revertJSONPatch(json, operations, {
727-
before: (json, operation, revertOperations) => {
728-
if (isJSONPatchRemove(operation)) {
729-
const path = parseJSONPointer(operation.path)
730-
return {
731-
revertOperations: [...revertOperations, ...createRevertMoveOperations(json, path)]
732-
}
725+
// first, we merge series of move operations inside an object. These move operations are used
726+
// to ensure a specific ordering of the keys in the object. Reverting those move events one by
727+
// one would explode the amount of move operations in the revert operations and contain mostly
728+
// duplicates.
729+
const filteredOperations = _filterRedundantMoveOperations(json, operations)
730+
731+
return revertJSONPatch(json, filteredOperations, {
732+
before: (json, operation, revertOperations) => {
733+
if (isJSONPatchRemove(operation)) {
734+
// we must restore the key order when reverting removing of an object key
735+
const path = parseJSONPointer(operation.path)
736+
return {
737+
revertOperations: [...revertOperations, ...createRevertMoveOperations(json, path)]
733738
}
739+
}
734740

735-
if (isJSONPatchMove(operation)) {
736-
const from = parseJSONPointer(operation.from)
737-
return {
738-
revertOperations:
739-
operation.from === operation.path
740-
? [operation, ...createRevertMoveOperations(json, from)] // move in-place (just for re-ordering object keys)
741-
: [...revertOperations, ...createRevertMoveOperations(json, from)]
742-
}
741+
if (isJSONPatchMove(operation)) {
742+
const from = parseJSONPointer(operation.from)
743+
return {
744+
revertOperations:
745+
operation.from === operation.path
746+
? [operation, ...createRevertMoveOperations(json, from)] // move in-place (just for re-ordering object keys)
747+
: [...revertOperations, ...createRevertMoveOperations(json, from)]
743748
}
744-
745-
return { document: json }
746749
}
747-
})
748-
)
750+
751+
return { document: json }
752+
}
753+
})
754+
}
755+
756+
/**
757+
* Filter move operations which are redundant when creating the reverse patch operations.
758+
*
759+
* This is a preprocessing step for creating the revert patch operations. For example, having
760+
* json {b:2, a:1, c:3} and the following move operations (this will reorder the keys):
761+
*
762+
* [
763+
* { op: 'move', from: '/a', path: '/a' },
764+
* { op: 'move', from: '/b', path: '/b' },
765+
* { op: 'move', from: '/c', path: '/c' }
766+
* ]
767+
*
768+
* This function will return only moving of b, since that is the first of all the moved keys:
769+
*
770+
* [
771+
* { op: 'move', from: '/b', path: '/b' }
772+
* ]
773+
*
774+
* So, when creating the reverse operations, this single operation is enough to create the needed
775+
* move operations for all lower down keys to restore the original order of the keys.
776+
*/
777+
function _filterRedundantMoveOperations(
778+
json: unknown,
779+
operations: JSONPatchOperation[]
780+
): JSONPatchOperation[] {
781+
// check whether this is a set of move operations (cheap check)
782+
if (isEmpty(operations) || !operations.every(isJSONPatchMove)) {
783+
return operations
784+
}
785+
786+
// parse the JSON pointers once and split them in parent paths and keys
787+
const processedOps: ProcessedOperation[] = []
788+
for (const operation of operations) {
789+
const from = splitParentKey(parseJSONPointer(operation.from))
790+
const path = splitParentKey(parseJSONPointer(operation.path))
791+
if (!from || !path) {
792+
return operations
793+
}
794+
processedOps.push({ from, path, operation })
795+
}
796+
797+
// check whether the first movement is inside an object (and not an array)
798+
const parentPath = processedOps[0].path.parent
799+
const parent = getIn(json, parentPath)
800+
if (!isJSONObject(parent)) {
801+
return operations
802+
}
803+
804+
// check whether all movements are within the same parent object
805+
if (!processedOps.every((op) => _equalParentPath(op, parentPath))) {
806+
return operations
807+
}
808+
809+
const firstKey = _findKeyWithLowestKeyIndex(processedOps, json)
810+
const getOperation = (op: ProcessedOperation) => op.operation
811+
812+
// only return the operation moving the firstKey back, and all rename operations
813+
const renameOps = processedOps.filter((op) => op.operation.from !== op.operation.path)
814+
return renameOps.some((op) => op.path.key === firstKey)
815+
? renameOps.map(getOperation) // no need to add the firstKey move operation
816+
: [moveDown(parentPath, firstKey), ...renameOps.map(getOperation)]
817+
}
818+
819+
interface ParentKey {
820+
parent: string[]
821+
key: string
822+
}
823+
824+
interface ProcessedOperation {
825+
from: ParentKey
826+
path: ParentKey
827+
operation: JSONPatchMove
828+
}
829+
830+
function splitParentKey(path: JSONPath): ParentKey | undefined {
831+
return path.length > 0 ? { parent: initial(path), key: last(path) as string } : undefined
832+
}
833+
834+
/**
835+
* Test whether a move operation has a specific parent path for both `from` and `path`
836+
*/
837+
function _equalParentPath(
838+
operation: ProcessedOperation,
839+
parentPath: JSONPath | undefined
840+
): boolean {
841+
return isEqual(operation.from.parent, parentPath) && isEqual(operation.path.parent, parentPath)
842+
}
843+
844+
function _findKeyWithLowestKeyIndex(processedOps: ProcessedOperation[], json: unknown): string {
845+
const keys = Object.keys(json as Record<string, unknown>)
846+
const movedKeys = keys.slice()
847+
848+
// first execute all move operations on the list with keys
849+
// here we assume all move operations have the same parent and are keys in an object
850+
for (const op of processedOps) {
851+
const index = movedKeys.indexOf(op.from.key)
852+
if (index !== -1) {
853+
movedKeys.splice(index, 1)
854+
movedKeys.push(op.path.key)
855+
}
856+
}
857+
858+
// find the first changed key
859+
let i = 0
860+
while (i < keys.length && keys[i] === movedKeys[i]) {
861+
i++
862+
}
863+
864+
return movedKeys[i]
749865
}
750866

751867
function createRevertMoveOperations(json: unknown, path: JSONPath): JSONPatchOperation[] {

src/lib/utils/arrayUtils.test.ts

Lines changed: 0 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ import {
44
arrayStartsWith,
55
arrayToObject,
66
compareArrays,
7-
dedupeKeepLast,
87
forEachSample,
98
getNestedPaths,
109
moveItems,
@@ -158,43 +157,4 @@ describe('arrayUtils', () => {
158157
expect(sample([], 4)).toEqual([])
159158
})
160159
})
161-
162-
describe('dedupeKeepLast', () => {
163-
test('should keep the last item in case of a duplicate', () => {
164-
expect(dedupeKeepLast([3, 1, 3])).toEqual([1, 3])
165-
expect(dedupeKeepLast([3, 1, 3, 3])).toEqual([1, 3])
166-
167-
expect(
168-
dedupeKeepLast([
169-
{ id: 1, name: 'Joe' },
170-
{ id: 3, name: 'Sarah' },
171-
{ id: 1, name: 'Joe' }
172-
])
173-
).toEqual([
174-
{ id: 3, name: 'Sarah' },
175-
{ id: 1, name: 'Joe' }
176-
])
177-
178-
expect(
179-
dedupeKeepLast([
180-
{ id: 1, name: 'Joe' },
181-
{ id: 3, name: 'Sarah' },
182-
{ id: 1, name: 'Joey' }
183-
])
184-
).toEqual([
185-
{ id: 1, name: 'Joe' },
186-
{ id: 3, name: 'Sarah' },
187-
{ id: 1, name: 'Joey' }
188-
])
189-
})
190-
191-
test('should pass a custom comparator', () => {
192-
const comparator = (a: Record<string, unknown>, b: Record<string, unknown>) => a.id === b.id
193-
194-
const input = [{ id: 1, name: 'Joe' }, { id: 3 }, { id: 1, name: 'Joey' }]
195-
const expected = [{ id: 3 }, { id: 1, name: 'Joey' }]
196-
197-
expect(dedupeKeepLast(input, comparator)).toEqual(expected)
198-
})
199-
})
200160
})

src/lib/utils/arrayUtils.ts

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -189,19 +189,3 @@ export function forEachSample<T>(
189189
export function insertItemsAt<T>(array: T[], index: number, items: T[]): T[] {
190190
return array.slice(0, index).concat(items).concat(array.slice(index))
191191
}
192-
193-
/**
194-
* Remove duplicate entries from an array, keeping the last item in case of a duplicate.
195-
* This is similar to the `uniqWith` function of Lodash, but that function keeps the *first* item in case of a duplicate.
196-
*/
197-
export function dedupeKeepLast<T>(array: T[], comparator: (a: T, b: T) => boolean = isEqual): T[] {
198-
return array.filter((item, index) => {
199-
for (let i = index + 1; i < array.length; i++) {
200-
if (comparator(item, array[i])) {
201-
return false
202-
}
203-
}
204-
205-
return true
206-
})
207-
}

0 commit comments

Comments
 (0)