diff --git a/web/src/engine/predictive-text/worker-thread/src/main/correction/context-token.ts b/web/src/engine/predictive-text/worker-thread/src/main/correction/context-token.ts index 8c121fb1b99..fe9ae47f525 100644 --- a/web/src/engine/predictive-text/worker-thread/src/main/correction/context-token.ts +++ b/web/src/engine/predictive-text/worker-thread/src/main/correction/context-token.ts @@ -11,9 +11,10 @@ import { applyTransform, buildMergedTransform } from "@keymanapp/models-template import { LexicalModelTypes } from '@keymanapp/common-types'; import { deepCopy, KMWString } from "@keymanapp/web-utils"; -import { SearchQuotientSpur } from "./search-quotient-spur.js"; import { SearchQuotientNode } from "./search-quotient-node.js"; import { TokenSplitMap } from "./context-tokenization.js"; +import { LegacyQuotientSpur } from "./legacy-quotient-spur.js"; +import { LegacyQuotientRoot } from "./legacy-quotient-root.js"; import Distribution = LexicalModelTypes.Distribution; import LexicalModel = LexicalModelTypes.LexicalModel; @@ -121,7 +122,7 @@ export class ContextToken { // Supports the old pathway for: updateWithBackspace(tokenText: string, transformId: number) // Build a token that represents the current text with no ambiguity - probability at max (1.0) - let searchSpace = new SearchQuotientSpur(model); + let searchModule: SearchQuotientNode = new LegacyQuotientRoot(model); const BASE_PROBABILITY = 1; textToCharTransforms(rawText).forEach((transform) => { this._inputRange.push({ @@ -129,10 +130,10 @@ export class ContextToken { inputStartIndex: 0, bestProbFromSet: BASE_PROBABILITY }); - searchSpace = new SearchQuotientSpur(searchSpace, [{sample: transform, p: BASE_PROBABILITY}], 1); + searchModule = new LegacyQuotientSpur(searchModule, [{sample: transform, p: BASE_PROBABILITY}], 1); }); - this._searchModule = searchSpace; + this._searchModule = searchModule; } } @@ -142,7 +143,7 @@ export class ContextToken { */ addInput(inputSource: TokenInputSource, distribution: Distribution) { this._inputRange.push(inputSource); - this._searchModule = new SearchQuotientSpur(this._searchModule, distribution, inputSource.bestProbFromSet); + this._searchModule = new LegacyQuotientSpur(this._searchModule, distribution, inputSource.bestProbFromSet); } /** diff --git a/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-root.ts b/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-root.ts new file mode 100644 index 00000000000..f2a23077a7a --- /dev/null +++ b/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-root.ts @@ -0,0 +1,61 @@ +import { PriorityQueue } from '@keymanapp/web-utils'; +import { LexicalModelTypes } from '@keymanapp/common-types'; + +import { SearchQuotientRoot } from './search-quotient-root.js'; +import { QUEUE_NODE_COMPARATOR } from './search-quotient-spur.js'; +import { SearchNode, SearchResult } from './distance-modeler.js'; + +import LexicalModel = LexicalModelTypes.LexicalModel; +import { PathResult } from './search-quotient-node.js'; + +export class LegacyQuotientRoot extends SearchQuotientRoot { + private selectionQueue: PriorityQueue = new PriorityQueue(QUEUE_NODE_COMPARATOR); + private processed: SearchResult[] = []; + + constructor(model: LexicalModel) { + super(model); + + this.selectionQueue.enqueue(this.rootNode); + } + + // TODO: Remove when removing LegacyQuotientSpur! + // At that time, inserts should have their own devoted 'Spur' type and not be managed + // within the same pre-existing instance. + /** + * Retrieves the lowest-cost / lowest-distance edge from the selection queue, + * checks its validity as a correction to the input text, and reports on what + * sort of result the edge's destination node represents. + * @returns + */ + public handleNextNode(): PathResult { + const node = this.selectionQueue.dequeue(); + + if(!node) { + return { + type: 'none' + }; + } + + // The legacy variant includes 'insert' operations! + if(node.editCount < 2) { + let insertionEdges = node.buildInsertionEdges(); + this.selectionQueue.enqueueAll(insertionEdges); + } + + this.processed.push(new SearchResult(node)); + return { + type: 'complete', + cost: node.currentCost, + finalNode: node, + spaceId: this.spaceId + }; + } + + public get currentCost(): number { + return this.selectionQueue.peek()?.currentCost ?? Number.POSITIVE_INFINITY; + } + + get previousResults(): SearchResult[] { + return this.processed.slice(); + } +} \ No newline at end of file diff --git a/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-spur.ts b/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-spur.ts new file mode 100644 index 00000000000..9fd218fa55e --- /dev/null +++ b/web/src/engine/predictive-text/worker-thread/src/main/correction/legacy-quotient-spur.ts @@ -0,0 +1,80 @@ +/* + * Keyman is copyright (C) SIL Global. MIT License. + * + * Created by jahorton on 2025-10-09 + * + * This file defines tests for the predictive-text engine's SearchPath class, + * which is used to manage the search-space(s) for text corrections within the + * engine. + */ + +import { LexicalModelTypes } from '@keymanapp/common-types'; + +import { SearchNode } from './distance-modeler.js'; +import { PathResult, SearchQuotientNode } from './search-quotient-node.js'; +import { SearchQuotientSpur } from './search-quotient-spur.js'; + +import Distribution = LexicalModelTypes.Distribution; +import Transform = LexicalModelTypes.Transform; + +// The set of search spaces corresponding to the same 'context' for search. +// Whenever a wordbreak boundary is crossed, a new instance should be made. +export class LegacyQuotientSpur extends SearchQuotientSpur { + /** + * Constructs a fresh SearchQuotientNode instance for use in predictive-text + * correction and suggestion searches. + * @param space + * @param inputs + * @param bestProbFromSet + */ + constructor(space: SearchQuotientNode, inputs: Distribution, bestProbFromSet: number) { + super(space, inputs, space.lowestPossibleSingleCost - Math.log(bestProbFromSet)); + this.queueNodes(this.buildEdgesForNodes(space.previousResults.map(r => r.node))); + return; + } + + protected buildEdgesForNodes(baseNodes: ReadonlyArray) { + // With a newly-available input, we can extend new input-dependent paths from + // our previously-reached 'extractedResults' nodes. + let outboundNodes = baseNodes.map((node) => { + // Hard restriction: no further edits will be supported. This helps keep the search + // more narrowly focused. + const substitutionsOnly = node.editCount == 2; + + let deletionEdges: SearchNode[] = []; + if(!substitutionsOnly) { + deletionEdges = node.buildDeletionEdges(this.inputs, this.spaceId); + } + const substitutionEdges = node.buildSubstitutionEdges(this.inputs, this.spaceId); + + // Skip the queue for the first pass; there will ALWAYS be at least one pass, + // and queue-enqueing does come with a cost - avoid unnecessary overhead here. + return substitutionEdges.flatMap(e => e.processSubsetEdge()).concat(deletionEdges); + }).flat(); + + return outboundNodes; + } + + /** + * Retrieves the lowest-cost / lowest-distance edge from the selection queue, + * checks its validity as a correction to the input text, and reports on what + * sort of result the edge's destination node represents. + * @returns + */ + public handleNextNode(): PathResult { + const result = super.handleNextNode(); + + if(result.type == 'complete') { + const currentNode = result.finalNode; + + // Forbid a raw edit-distance of greater than 2. + // Note: .knownCost is not scaled, while its contribution to .currentCost _is_ scaled. + if(currentNode.editCount < 2) { + let insertionEdges = currentNode.buildInsertionEdges(); + this.queueNodes(insertionEdges); + } + } + + return result; + } +} \ No newline at end of file diff --git a/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-root.ts b/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-root.ts new file mode 100644 index 00000000000..fd23301a8f6 --- /dev/null +++ b/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-root.ts @@ -0,0 +1,92 @@ + +import { LexicalModelTypes } from '@keymanapp/common-types'; + +import { SearchNode, SearchResult } from './distance-modeler.js'; +import { generateSpaceSeed, PathResult, SearchQuotientNode } from './search-quotient-node.js'; + +import LexicalModel = LexicalModelTypes.LexicalModel; + +// The set of search spaces corresponding to the same 'context' for search. +// Whenever a wordbreak boundary is crossed, a new instance should be made. +export class SearchQuotientRoot implements SearchQuotientNode { + readonly rootNode: SearchNode; + private readonly rootResult: SearchResult; + + readonly lowestPossibleSingleCost: number = 0; + + readonly inputCount: number = 0; + readonly correctionsEnabled: boolean = false; + + private hasBeenProcessed: boolean = false; + + /** + * Constructs a fresh SearchQuotientRoot instance to be used as the root of + * the predictive-text correction / suggestion search process. + * @param baseSpaceId + * @param model + */ + constructor(model: LexicalModel) { + this.rootNode = new SearchNode(model.traverseFromRoot(), generateSpaceSeed(), t => model.toKey(t)); + this.rootResult = new SearchResult(this.rootNode); + } + + get spaceId(): number { + return this.rootNode.spaceId; + } + + hasInputs(keystrokeDistributions: LexicalModelTypes.Distribution[]): boolean { + return keystrokeDistributions.length == 0; + } + + // Return a new array each time; avoid aliasing potential! + get parents(): SearchQuotientNode[] { + return []; + } + + // Return a new array each time; avoid aliasing potential! + get inputSequence(): LexicalModelTypes.Distribution[] { + return []; + } + + // Return a new instance each time; avoid aliasing potential! + get bestExample(): { text: string; p: number; } { + return { text: '', p: 1 }; + } + + increaseMaxEditDistance(): void { + this.rootNode.calculation = this.rootNode.calculation.increaseMaxDistance(); + } + + /** + * Retrieves the lowest-cost / lowest-distance edge from the selection queue, + * checks its validity as a correction to the input text, and reports on what + * sort of result the edge's destination node represents. + * @returns + */ + public handleNextNode(): PathResult { + if(this.hasBeenProcessed) { + return null; + } + + this.hasBeenProcessed = true; + + return { + type: 'complete', + cost: 0, + finalNode: this.rootNode, + spaceId: this.spaceId + }; + } + + public get currentCost(): number { + return this.hasBeenProcessed ? Number.POSITIVE_INFINITY : 0; + } + + get previousResults(): SearchResult[] { + if(!this.hasBeenProcessed) { + return []; + } else { + return [this.rootResult]; + } + } +} \ No newline at end of file diff --git a/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-spur.ts b/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-spur.ts index b877aaf2868..b2a39a869d7 100644 --- a/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-spur.ts +++ b/web/src/engine/predictive-text/worker-thread/src/main/correction/search-quotient-spur.ts @@ -15,7 +15,6 @@ import { EDIT_DISTANCE_COST_SCALE, SearchNode, SearchResult } from './distance-m import { generateSpaceSeed, PathResult, SearchQuotientNode } from './search-quotient-node.js'; import Distribution = LexicalModelTypes.Distribution; -import LexicalModel = LexicalModelTypes.LexicalModel; import Transform = LexicalModelTypes.Transform; export const QUEUE_NODE_COMPARATOR: Comparator = function(arg1, arg2) { @@ -24,7 +23,7 @@ export const QUEUE_NODE_COMPARATOR: Comparator = function(arg1, arg2 // The set of search spaces corresponding to the same 'context' for search. // Whenever a wordbreak boundary is crossed, a new instance should be made. -export class SearchQuotientSpur implements SearchQuotientNode { +export abstract class SearchQuotientSpur implements SearchQuotientNode { private selectionQueue: PriorityQueue = new PriorityQueue(QUEUE_NODE_COMPARATOR); readonly inputs?: Distribution>; @@ -46,47 +45,29 @@ export class SearchQuotientSpur implements SearchQuotientNode { readonly lowestPossibleSingleCost: number; /** - * Constructs a fresh SearchSpace instance for used in predictive-text correction - * and suggestion searches. - * @param baseSpaceId - * @param model + * Extends an existing SearchQuotientNode (and its correction data) by a keystroke based + * on a subset of the incoming keystroke's fat-finger distribution. + * + * @param parentNode + * @param inputs + * @param costHeuristic */ - constructor(model: LexicalModel); - constructor(space: SearchQuotientNode, inputs: Distribution, bestProbFromSet: number); - constructor(arg1: LexicalModel | SearchQuotientNode, inputs?: Distribution, bestProbFromSet?: number) { + constructor(parentNode: SearchQuotientNode, inputs: Distribution, costHeuristic: number) { this.spaceId = generateSpaceSeed(); - if(arg1 instanceof SearchQuotientSpur) { - const parentNode = arg1 as SearchQuotientNode; - const logTierCost = -Math.log(bestProbFromSet); - - this.inputs = inputs; - this.inputCount = parentNode.inputCount + 1; - this.lowestPossibleSingleCost = parentNode.lowestPossibleSingleCost + logTierCost; - this.parentNode = parentNode; - - this.addEdgesForNodes(parentNode.previousResults.map(r => r.node)); - - return; - } - - const model = arg1 as LexicalModel; - this.selectionQueue.enqueue(new SearchNode(model.traverseFromRoot(), this.spaceId, t => model.toKey(t))); - this.lowestPossibleSingleCost = 0; - this.inputCount = 0; + this.parentNode = parentNode; + this.lowestPossibleSingleCost = (parentNode?.lowestPossibleSingleCost ?? 0) - Math.log(costHeuristic); + this.inputs = inputs?.length > 0 ? inputs : null; + this.inputCount = (parentNode?.inputCount ?? 0) + (this.inputs ? 1 : 0); } /** * Retrieves the sequences of inputs that led to this SearchPath. */ public get inputSequence(): Distribution[] { - if(this.parentNode) { - return [...this.parentNode.inputSequence, this.inputs]; - } else if(this.inputs) { - return [this.inputs]; - } else { - return []; - } + const parentInputs = this.parentNode?.inputSequence.slice() ?? []; + const localInputs = this.inputs ? [this.inputs.slice()] : []; + return parentInputs.concat(localInputs); } public hasInputs(keystrokeDistributions: Distribution[]): boolean { @@ -137,7 +118,7 @@ export class SearchQuotientSpur implements SearchQuotientNode { public get lastInput(): Distribution> { // Shallow-copies the array to prevent external modification; the Transforms // are marked Readonly to prevent their modification as well. - return [...this.inputs]; + return this.inputs ?? []; } public get bestExample(): {text: string, p: number} { @@ -181,26 +162,10 @@ export class SearchQuotientSpur implements SearchQuotientNode { return Math.min(localCost, parentCost); } - private addEdgesForNodes(baseNodes: ReadonlyArray) { - // With a newly-available input, we can extend new input-dependent paths from - // our previously-reached 'extractedResults' nodes. - let outboundNodes = baseNodes.map((node) => { - // Hard restriction: no further edits will be supported. This helps keep the search - // more narrowly focused. - const substitutionsOnly = node.editCount == 2; - - let deletionEdges: SearchNode[] = []; - if(!substitutionsOnly) { - deletionEdges = node.buildDeletionEdges(this.inputs, this.spaceId); - } - const substitutionEdges = node.buildSubstitutionEdges(this.inputs, this.spaceId); - - // Skip the queue for the first pass; there will ALWAYS be at least one pass, - // and queue-enqueing does come with a cost - avoid unnecessary overhead here. - return substitutionEdges.flatMap(e => e.processSubsetEdge()).concat(deletionEdges); - }).flat(); + protected abstract buildEdgesForNodes(baseNodes: ReadonlyArray): SearchNode[]; - this.selectionQueue.enqueueAll(outboundNodes); + protected queueNodes(nodes: SearchNode[]) { + this.selectionQueue.enqueueAll(nodes); } /** @@ -223,7 +188,7 @@ export class SearchQuotientSpur implements SearchQuotientNode { const result = this.parentNode.handleNextNode(); if(result.type == 'complete') { - this.addEdgesForNodes([result.finalNode]); + this.queueNodes(this.buildEdgesForNodes([result.finalNode])); } return { diff --git a/web/src/engine/predictive-text/worker-thread/src/main/test-index.ts b/web/src/engine/predictive-text/worker-thread/src/main/test-index.ts index 2c5c0214ada..ae713f29e99 100644 --- a/web/src/engine/predictive-text/worker-thread/src/main/test-index.ts +++ b/web/src/engine/predictive-text/worker-thread/src/main/test-index.ts @@ -6,6 +6,10 @@ export { ContextTracker } from './correction/context-tracker.js'; export { ContextTransition } from './correction/context-transition.js'; export * from './correction/distance-modeler.js'; export * from './correction/search-quotient-spur.js'; +export * from './correction/search-quotient-node.js'; +export * from './correction/legacy-quotient-root.js'; +export * from './correction/legacy-quotient-spur.js'; +export * from './correction/search-quotient-root.js'; export { ExtendedEditOperation, SegmentableDistanceCalculation } from './correction/segmentable-calculation.js'; export * from './correction/tokenization-subsets.js'; export * as correction from './correction/index.js'; diff --git a/web/src/test/auto/headless/engine/predictive-text/worker-thread/context/context-token.tests.ts b/web/src/test/auto/headless/engine/predictive-text/worker-thread/context/context-token.tests.ts index 06dc12a0d77..b455cdba315 100644 --- a/web/src/test/auto/headless/engine/predictive-text/worker-thread/context/context-token.tests.ts +++ b/web/src/test/auto/headless/engine/predictive-text/worker-thread/context/context-token.tests.ts @@ -86,7 +86,9 @@ describe('ContextToken', function() { let clonedToken = new ContextToken(baseToken); assert.equal(clonedToken.searchModule, baseToken.searchModule); - // Deep equality on .searchModule can't be directly checked due to the internal complexities involved. + // Deep equality on .searchModule can't be directly checked due to the + // internal complexities involved. + // // We CAN check for the most important members, though. assert.equal(clonedToken.searchModule, baseToken.searchModule); diff --git a/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/getBestMatches.tests.ts b/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/getBestMatches.tests.ts index 146d8bb8218..14cb80a9a11 100644 --- a/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/getBestMatches.tests.ts +++ b/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/getBestMatches.tests.ts @@ -10,7 +10,7 @@ import { assert } from 'chai'; import { jsonFixture } from '@keymanapp/common-test-resources/model-helpers.mjs'; -import { correction, getBestMatches, models, SearchQuotientSpur } from '@keymanapp/lm-worker/test-index'; +import { correction, getBestMatches, LegacyQuotientSpur, models, LegacyQuotientRoot } from '@keymanapp/lm-worker/test-index'; import SearchResult = correction.SearchResult; import TrieModel = models.TrieModel; @@ -96,7 +96,7 @@ describe('getBestMatches', () => { const rootTraversal = testModel.traverseFromRoot(); assert.isNotEmpty(rootTraversal); - const searchSpace = new SearchQuotientSpur(testModel); + const searchSpace = new LegacyQuotientRoot(testModel); const iter = getBestMatches(searchSpace, buildTestTimer()); const firstResult = await iter.next(); @@ -109,7 +109,7 @@ describe('getBestMatches', () => { const rootTraversal = testModel.traverseFromRoot(); assert.isNotEmpty(rootTraversal); - let searchPath = new SearchQuotientSpur(testModel); + let searchPath = new LegacyQuotientRoot(testModel); // VERY artificial distributions. const synthInput1 = [ @@ -126,9 +126,9 @@ describe('getBestMatches', () => { {sample: {insert: 'n', deleteLeft: 0}, p: 0.25} ]; - const searchPath1 = new SearchQuotientSpur(searchPath, synthInput1, 1); - const searchPath2 = new SearchQuotientSpur(searchPath1, synthInput2, .75); - const searchPath3 = new SearchQuotientSpur(searchPath2, synthInput3, .75); + const searchPath1 = new LegacyQuotientSpur(searchPath, synthInput1, 1); + const searchPath2 = new LegacyQuotientSpur(searchPath1, synthInput2, .75); + const searchPath3 = new LegacyQuotientSpur(searchPath2, synthInput3, .75); assert.notEqual(searchPath1.spaceId, searchPath.spaceId); assert.notEqual(searchPath2.spaceId, searchPath1.spaceId); @@ -143,7 +143,7 @@ describe('getBestMatches', () => { const rootTraversal = testModel.traverseFromRoot(); assert.isNotEmpty(rootTraversal); - let searchPath = new SearchQuotientSpur(testModel); + let searchPath = new LegacyQuotientRoot(testModel); // VERY artificial distributions. const synthInput1 = [ @@ -160,9 +160,9 @@ describe('getBestMatches', () => { {sample: {insert: 'n', deleteLeft: 0}, p: 0.25} ]; - const searchPath1 = new SearchQuotientSpur(searchPath, synthInput1, 1); - const searchPath2 = new SearchQuotientSpur(searchPath1, synthInput2, .75); - const searchPath3 = new SearchQuotientSpur(searchPath2, synthInput3, .75); + const searchPath1 = new LegacyQuotientSpur(searchPath, synthInput1, 1); + const searchPath2 = new LegacyQuotientSpur(searchPath1, synthInput2, .75); + const searchPath3 = new LegacyQuotientSpur(searchPath2, synthInput3, .75); assert.notEqual(searchPath1.spaceId, searchPath.spaceId); assert.notEqual(searchPath2.spaceId, searchPath1.spaceId); @@ -182,7 +182,7 @@ describe('getBestMatches', () => { const rootTraversal = testModel.traverseFromRoot(); assert.isNotEmpty(rootTraversal); - const searchSpace = new SearchQuotientSpur(testModel); + const searchSpace = new LegacyQuotientRoot(testModel); const timer = buildTestTimer(); const iter = getBestMatches(searchSpace, timer); diff --git a/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/search-quotient-spur.tests.ts b/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/search-quotient-spur.tests.ts index 55babb6765d..5a1432e9e99 100644 --- a/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/search-quotient-spur.tests.ts +++ b/web/src/test/auto/headless/engine/predictive-text/worker-thread/correction-search/search-quotient-spur.tests.ts @@ -10,38 +10,38 @@ import { assert } from 'chai'; import { jsonFixture } from '@keymanapp/common-test-resources/model-helpers.mjs'; -import { models, SearchQuotientSpur } from '@keymanapp/lm-worker/test-index'; +import { LegacyQuotientSpur, models, LegacyQuotientRoot } from '@keymanapp/lm-worker/test-index'; import TrieModel = models.TrieModel; const testModel = new TrieModel(jsonFixture('models/tries/english-1000')); export function buildSimplePathSplitFixture() { - const rootPath = new SearchQuotientSpur(testModel); + const rootPath = new LegacyQuotientRoot(testModel); const distrib1 = [ { sample: {insert: 'c', deleteLeft: 0, id: 11}, p: 0.5 }, { sample: {insert: 'r', deleteLeft: 0, id: 11}, p: 0.4 }, { sample: {insert: 't', deleteLeft: 0, id: 11}, p: 0.1 } ]; - const path1 = new SearchQuotientSpur(rootPath, distrib1, distrib1[0].p); + const path1 = new LegacyQuotientSpur(rootPath, distrib1, distrib1[0].p); const distrib2 = [ { sample: {insert: 'a', deleteLeft: 0, id: 12}, p: 0.7 }, { sample: {insert: 'e', deleteLeft: 0, id: 12}, p: 0.3 } ]; - const path2 = new SearchQuotientSpur(path1, distrib2, distrib2[0].p); + const path2 = new LegacyQuotientSpur(path1, distrib2, distrib2[0].p); const distrib3 = [ { sample: {insert: 'n', deleteLeft: 0, id: 13}, p: 0.8 }, { sample: {insert: 'r', deleteLeft: 0, id: 13}, p: 0.2 } ]; - const path3 = new SearchQuotientSpur(path2, distrib3, distrib3[0].p); + const path3 = new LegacyQuotientSpur(path2, distrib3, distrib3[0].p); const distrib4 = [ { sample: {insert: 't', deleteLeft: 0, id: 14}, p: 1 } ]; - const path4 = new SearchQuotientSpur(path3, distrib4, distrib4[0].p); + const path4 = new LegacyQuotientSpur(path3, distrib4, distrib4[0].p); return { paths: [rootPath, path1, path2, path3, path4], @@ -52,16 +52,15 @@ export function buildSimplePathSplitFixture() { describe('SearchQuotientSpur', () => { describe('constructor', () => { it('initializes from a lexical model', () => { - const path = new SearchQuotientSpur(testModel); + const path = new LegacyQuotientRoot(testModel); assert.equal(path.inputCount, 0); assert.isNumber(path.spaceId); assert.deepEqual(path.bestExample, {text: '', p: 1}); assert.deepEqual(path.parents, []); - assert.isNotOk(path.inputs); }); it('may be extended from root path', () => { - const rootPath = new SearchQuotientSpur(testModel); + const rootPath = new LegacyQuotientRoot(testModel); const leadEdgeDistribution = [ {sample: {insert: 't', deleteLeft: 0, id: 13 }, p: 0.5}, @@ -69,7 +68,7 @@ describe('SearchQuotientSpur', () => { {sample: {insert: 'o', deleteLeft: 0, id: 13 }, p: 0.2} ]; - const extendedPath = new SearchQuotientSpur(rootPath, leadEdgeDistribution, leadEdgeDistribution[0].p); + const extendedPath = new LegacyQuotientSpur(rootPath, leadEdgeDistribution, leadEdgeDistribution[0].p); assert.equal(extendedPath.inputCount, 1); assert.isNumber(extendedPath.spaceId); @@ -83,11 +82,10 @@ describe('SearchQuotientSpur', () => { // Should (still) have codepointLength == 0 once it's defined. assert.deepEqual(rootPath.bestExample, {text: '', p: 1}); assert.deepEqual(rootPath.parents, []); - assert.isNotOk(rootPath.inputs); }); it('may be built from arbitrary prior SearchPath', () => { - const rootPath = new SearchQuotientSpur(testModel); + const rootPath = new LegacyQuotientRoot(testModel); const leadEdgeDistribution = [ {sample: {insert: 't', deleteLeft: 0, id: 13 }, p: 0.5}, @@ -96,7 +94,7 @@ describe('SearchQuotientSpur', () => { ]; const inputClone = leadEdgeDistribution.map(e => ({...e})); - const length1Path = new SearchQuotientSpur( + const length1Path = new LegacyQuotientSpur( rootPath, leadEdgeDistribution, leadEdgeDistribution[0].p @@ -108,7 +106,7 @@ describe('SearchQuotientSpur', () => { {sample: {insert: 'h', deleteLeft: 0, id: 17 }, p: 0.15} ]; - const length2Path = new SearchQuotientSpur( + const length2Path = new LegacyQuotientSpur( length1Path, tailEdgeDistribution, tailEdgeDistribution[0].p @@ -133,7 +131,7 @@ describe('SearchQuotientSpur', () => { }); it('may extend with a Transform inserting multiple codepoints', () => { - const rootPath = new SearchQuotientSpur(testModel); + const rootPath = new LegacyQuotientRoot(testModel); const leadEdgeDistribution = [ {sample: {insert: 't', deleteLeft: 0, id: 13 }, p: 0.5}, @@ -142,7 +140,7 @@ describe('SearchQuotientSpur', () => { ]; const inputClone = leadEdgeDistribution.map(e => ({...e})); - const length1Path = new SearchQuotientSpur( + const length1Path = new LegacyQuotientSpur( rootPath, leadEdgeDistribution, leadEdgeDistribution[0].p @@ -154,7 +152,7 @@ describe('SearchQuotientSpur', () => { {sample: {insert: 'hi', deleteLeft: 0, id: 17 }, p: 0.15} ]; - const length2Path = new SearchQuotientSpur( + const length2Path = new LegacyQuotientSpur( length1Path, tailEdgeDistribution, tailEdgeDistribution[0].p @@ -201,7 +199,7 @@ describe('SearchQuotientSpur', () => { describe('hasInputs()', () => { it('matches an empty array on root SearchPaths', () => { - assert.isTrue(new SearchQuotientSpur(testModel).hasInputs([])); + assert.isTrue(new LegacyQuotientRoot(testModel).hasInputs([])); }); it('matches all path inputs when provided in proper order', () => {