Skip to content

Commit 9b98f53

Browse files
authored
keyboard: fix partial arg code (#2172)
The original partial arg code constructed a partial arg where each field of the top-level command arg was either completely filled out or completely missing. Before #2169, that was fine, because the top-level commands were more specific, and thus their args had more detail and the command id itself told you more. For example, in `modifyTargetContainingScopeType`, you had a specific scope type that was missing, but you knew from command id that it was "containing". As of #2169, we do a lot more in the lower level captures. For example `modifyTargetContainingScopeType` and `modifyTargetEveryScopeType` are now just `modifyTarget`, which only has a `modifier` key at the top level, so you don't know until the very end what you're dealing with, because every modifier just looks like `{command: "modifyTarget", partialArg: {modifier: undefined}}`. This PR deeply reconstructs the partial target, so that you can get much more information. See the new test cases for examples of these partial arguments - Depends on #2169 ## Checklist - [x] Switch to `CURRENT` for default arg - [x] Remove unique queue file - [x] I have added [tests](https://www.cursorless.org/docs/contributing/test-case-recorder/) - [-] I have updated the [docs](https://github.com/cursorless-dev/cursorless/tree/main/docs) and [cheatsheet](https://github.com/cursorless-dev/cursorless/tree/main/cursorless-talon/src/cheatsheet) - [-] I have not broken the cheatsheet
1 parent 2b596eb commit 9b98f53

File tree

5 files changed

+271
-69
lines changed

5 files changed

+271
-69
lines changed

packages/cursorless-vscode/src/keyboard/grammar/UniqueWorkQueue.ts

Lines changed: 0 additions & 27 deletions
This file was deleted.
Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
import assert from "assert";
2+
import { Grammar, Parser } from "nearley";
3+
import { KeyDescriptor } from "../TokenTypeHelpers";
4+
import grammar from "./generated/grammar";
5+
import {
6+
AcceptableTokenType,
7+
MISSING,
8+
NEXT,
9+
getAcceptableTokenTypes,
10+
} from "./getAcceptableTokenTypes";
11+
import { isEqual } from "lodash";
12+
import { stringifyTokens } from "./stringifyTokens";
13+
14+
interface TestCase {
15+
/**
16+
* The tokens to feed to the parser before checking for acceptable token types
17+
*/
18+
tokens: KeyDescriptor[];
19+
20+
/**
21+
* Expect these token types to be acceptable; note that this list doesn't need
22+
* to include all acceptable token types, just the ones that we want to test
23+
* for.
24+
*/
25+
expected: AcceptableTokenType[];
26+
}
27+
28+
const testCases: TestCase[] = [
29+
{
30+
tokens: [],
31+
expected: [
32+
{
33+
type: "shape",
34+
command: "targetDecoratedMark",
35+
partialArg: {
36+
decoratedMark: {
37+
shape: NEXT,
38+
},
39+
mode: "replace",
40+
},
41+
},
42+
{
43+
type: "combineColorAndShape",
44+
command: "targetDecoratedMark",
45+
partialArg: {
46+
decoratedMark: {
47+
color: MISSING,
48+
shape: MISSING,
49+
},
50+
mode: "replace",
51+
},
52+
},
53+
{
54+
type: "digit",
55+
command: "modifyTarget",
56+
partialArg: {
57+
modifier: {
58+
type: "relativeScope",
59+
length: MISSING,
60+
offset: 0,
61+
direction: "forward",
62+
scopeType: MISSING,
63+
},
64+
},
65+
},
66+
{
67+
type: "digit",
68+
command: "modifyTarget",
69+
partialArg: {
70+
modifier: {
71+
type: "relativeScope",
72+
length: MISSING,
73+
offset: MISSING,
74+
direction: "forward",
75+
scopeType: MISSING,
76+
},
77+
},
78+
},
79+
{
80+
type: "nextPrev",
81+
command: "modifyTarget",
82+
partialArg: {
83+
modifier: {
84+
type: "relativeScope",
85+
length: MISSING,
86+
offset: 1,
87+
direction: "forward",
88+
scopeType: MISSING,
89+
},
90+
},
91+
},
92+
],
93+
},
94+
{
95+
tokens: [{ type: "digit", value: 3 }],
96+
expected: [
97+
{
98+
type: "simpleScopeTypeType",
99+
command: "modifyTarget",
100+
partialArg: {
101+
modifier: {
102+
type: "relativeScope",
103+
length: 3,
104+
offset: 0,
105+
direction: "forward",
106+
scopeType: {
107+
type: NEXT,
108+
},
109+
},
110+
},
111+
},
112+
{
113+
type: "nextPrev",
114+
command: "modifyTarget",
115+
partialArg: {
116+
modifier: {
117+
type: "relativeScope",
118+
length: MISSING,
119+
offset: 3,
120+
direction: "forward",
121+
scopeType: MISSING,
122+
},
123+
},
124+
},
125+
],
126+
},
127+
];
128+
129+
suite("keyboard.getAcceptableTokenTypes", () => {
130+
let parser: Parser;
131+
setup(() => {
132+
parser = new Parser(Grammar.fromCompiled(grammar));
133+
});
134+
135+
testCases.forEach(({ tokens, expected }) => {
136+
test(`after \`${stringifyTokens(tokens)}\``, () => {
137+
parser.feed(tokens);
138+
for (const value of expected) {
139+
// filter by type first for shorter error messages
140+
const candidates = getAcceptableTokenTypes(parser).filter(
141+
({ type }) => type === value.type,
142+
);
143+
const fullValue = {
144+
type: value.type,
145+
command: value.command,
146+
partialArg: {
147+
type: value.command,
148+
arg: value.partialArg,
149+
},
150+
};
151+
assert(
152+
candidates.some((result) => isEqual(result, fullValue)),
153+
"Relevant candidates (note that symbols will be missing):\n" +
154+
JSON.stringify(candidates, null, 2),
155+
);
156+
}
157+
});
158+
});
159+
});
Lines changed: 100 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,28 @@
11
import nearley, { State } from "nearley";
2-
import { isEqual } from "lodash";
2+
import { isEqual, times } from "lodash";
33
import { CommandRulePostProcessor } from "./CommandRulePostProcessor";
4-
import { UniqueWorkQueue } from "./UniqueWorkQueue";
5-
import { uniqWithHash } from "@cursorless/common";
4+
import { DefaultMap, uniqWithHash } from "@cursorless/common";
65
import { KeyboardCommandHandler } from "../KeyboardCommandHandler";
6+
import { TokenType } from "../TokenTypeHelpers";
7+
8+
export interface AcceptableTokenType {
9+
/**
10+
* The token type, eg "color".
11+
*/
12+
type: TokenType;
13+
14+
/**
15+
* The command that wants the token type
16+
*/
17+
command: keyof KeyboardCommandHandler;
18+
19+
/**
20+
* A partial argument for the command that wants the token type, where
21+
* {@link NEXT} indicates where the next token would go and {@link MISSING}
22+
* indicates arguments that haven't been typed yet.
23+
*/
24+
partialArg: any;
25+
}
726

827
/**
928
* Given a parser, returns a list of acceptable token types at the current state
@@ -17,18 +36,22 @@ import { KeyboardCommandHandler } from "../KeyboardCommandHandler";
1736
* @returns A list of acceptable token types, along with information about which
1837
* top-level rules want them
1938
*/
20-
export function getAcceptableTokenTypes(parser: nearley.Parser) {
39+
export function getAcceptableTokenTypes(
40+
parser: nearley.Parser,
41+
): AcceptableTokenType[] {
2142
return uniqWithHash(
2243
parser.table[parser.table.length - 1].scannable.flatMap(
2344
(scannableState) => {
2445
/** The token type */
2546
const { type } = scannableState.rule.symbols[scannableState.dot];
2647

27-
return getRootStates(scannableState).map((root) => ({
28-
type,
29-
command: getMetadata(root).type,
30-
partialArg: computePartialArg(root),
31-
}));
48+
return computeRootStatePartialArgs(scannableState).map(
49+
({ state, partialArg }) => ({
50+
type,
51+
command: getMetadata(state).type,
52+
partialArg,
53+
}),
54+
);
3255
},
3356
),
3457
isEqual,
@@ -43,39 +66,86 @@ function getMetadata<T extends keyof KeyboardCommandHandler>(
4366
.metadata;
4467
}
4568

69+
// Prevent infinite recursion by limiting the number of times we visit a state
70+
// to 3. Note that this is a pretty arbitrary number, and in theory we could
71+
// need to visit the same state more than 3 times. However, in practice, we
72+
// don't expect to need to visit the same state more than 3 times.
73+
const MAX_VISITS = 3;
74+
75+
/** Indicates that the given token hasn't been typed yet */
76+
export const MISSING = Symbol("missing");
77+
78+
/** The next token to be consumed */
79+
export const NEXT = Symbol("next");
80+
4681
/**
47-
* Given a state, returns all root states that are reachable from it. We use this
48-
* to find out which top-level rules want a given token type.
82+
* Given a state, returns all root states that are reachable from it, including
83+
* partially filled out arguments to the given rule. We use this to find out
84+
* which top-level rules want a given token type.
4985
*
5086
* @param state The state to get the root states of
87+
* @param lastSymbol The partially filled out symbol that is currently being
88+
* constructed
89+
* @param visitCounts A map of states to the number of times they've been
90+
* visited. We use this to prevent infinite recursion
91+
* @param roots The list of root states
5192
* @returns A list of root states
5293
*/
53-
function getRootStates(state: nearley.State) {
54-
/** A queue of states to process; ensures we don't try to process state twice */
55-
const queue = new UniqueWorkQueue(state);
56-
const roots: State[] = [];
94+
function computeRootStatePartialArgs(
95+
state: nearley.State,
96+
lastSymbol: any = NEXT,
97+
visitCounts = new DefaultMap<State, number>(() => 0),
98+
roots: { state: State; partialArg: any }[] = [],
99+
) {
100+
const visitCount = visitCounts.get(state);
101+
if (visitCount > MAX_VISITS) {
102+
return roots;
103+
}
104+
visitCounts.set(state, visitCount + 1);
105+
106+
let partialArg: any;
107+
try {
108+
const argList = [...getCompletedSymbols(state), lastSymbol];
109+
argList.push(
110+
...times(state.rule.symbols.length - argList.length, () => MISSING),
111+
);
112+
partialArg = state.rule.postprocess?.(argList) ?? argList;
113+
} catch (err) {
114+
// If we can't construct the partial argument because the rule's postprocess
115+
// wasn't designed to handle partial arguments, then we just replace it with
116+
// MISSING
117+
partialArg = MISSING;
118+
}
57119

58-
for (const state of queue) {
59-
queue.push(...state.wantedBy);
120+
if (state.wantedBy.length === 0) {
121+
roots.push({ state, partialArg });
122+
}
60123

61-
if (state.wantedBy.length === 0) {
62-
roots.push(state);
63-
}
124+
for (const parent of state.wantedBy) {
125+
// If we're not a root state, we recurse on all states that want this state,
126+
// passing them our partial argument so that they can use it when constructing
127+
// their partial argument
128+
computeRootStatePartialArgs(parent, partialArg, visitCounts, roots);
64129
}
65130

66131
return roots;
67132
}
68133

69134
/**
70-
* Given a root state, returns a partial argument for the command that the state
71-
* represents. We could use this info to display which arguments have already
72-
* been filled out while a user is typing a command.
73-
* @param state A root state
74-
* @returns A partial argument for the command that the state represents
135+
* Given a state, returns a list of the symbols that have already been
136+
* completed.
137+
*
138+
* @param state A state
139+
* @returns A list of completed symbols
75140
*/
76-
function computePartialArg<T extends keyof KeyboardCommandHandler>(
77-
_state: nearley.State,
78-
): Partial<Record<T, any>> {
79-
// FIXME: Fill this out
80-
return {};
141+
function getCompletedSymbols(state: nearley.State) {
142+
let currentState = state;
143+
const completedSymbols: any[] = [];
144+
145+
while (currentState.dot > 0) {
146+
completedSymbols.push(currentState.right!.data);
147+
currentState = currentState.left!;
148+
}
149+
150+
return completedSymbols;
81151
}

packages/cursorless-vscode/src/keyboard/grammar/grammar.test.ts

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import assert from "assert";
44
import { KeyDescriptor } from "../TokenTypeHelpers";
55
import { KeyboardCommandHandler } from "../KeyboardCommandHandler";
66
import { KeyboardCommand } from "../KeyboardCommandTypeHelpers";
7+
import { stringifyTokens } from "./stringifyTokens";
78

89
interface TestCase {
910
tokens: KeyDescriptor[];
@@ -158,15 +159,3 @@ suite("keyboard grammar", () => {
158159
});
159160
});
160161
});
161-
162-
function stringifyTokens(tokens: any[]) {
163-
return tokens
164-
.map((token) => {
165-
let ret = token.type;
166-
if (token.value != null) {
167-
ret += `:${JSON.stringify(token.value)}`;
168-
}
169-
return ret;
170-
})
171-
.join(" ");
172-
}
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
export function stringifyTokens(tokens: any[]) {
2+
return tokens
3+
.map((token) => {
4+
let ret = token.type;
5+
if (token.value != null) {
6+
ret += `:${JSON.stringify(token.value)}`;
7+
}
8+
return ret;
9+
})
10+
.join(" ");
11+
}

0 commit comments

Comments
 (0)