Skip to content

Commit eb68391

Browse files
pyoorsaschanaz
andauthored
feat: allow adding additional members to production parse methods (#745)
* feat: allow adding additional members to production parse methods * feat: add 'extensions' option for extending existing productions * fix: remove unnecessary spread operator Co-authored-by: Kagami Sascha Rosylight <[email protected]> * refactor: rename extension 'callback-interface' to callbackInterface * test: improve extension parsing tests * docs: fix up jsdoc definition for ParserOptions * test: remove use strict * test: merge extension test into custom-production * test: replace customProduction with top-level CustomAttribute * test: remove extension argument from collection utility * docs: normalize use of Token import * test: fix import of expect function * docs: mark args as any This is also due to microsoft/TypeScript#4628 which prevents changing the signature of static methods on inherited classes. * docs: fix path to container.js * refactor: remove unnecessary spread operator * docs: fix jsdoc types Co-authored-by: Kagami Sascha Rosylight <[email protected]> * docs: fix jsdoc types Co-authored-by: Kagami Sascha Rosylight <[email protected]> * fix: remove iheritance attribute from CallbackInterface --------- Co-authored-by: Kagami Sascha Rosylight <[email protected]>
1 parent 0e0cdc5 commit eb68391

File tree

9 files changed

+155
-75
lines changed

9 files changed

+155
-75
lines changed

lib/productions/callback-interface.js

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,11 @@ import { Constant } from "./constant.js";
55
export class CallbackInterface extends Container {
66
/**
77
* @param {import("../tokeniser.js").Tokeniser} tokeniser
8+
* @param {*} callback
9+
* @param {object} [options]
10+
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
811
*/
9-
static parse(tokeniser, callback, { partial = null } = {}) {
12+
static parse(tokeniser, callback, { extMembers = [] } = {}) {
1013
const tokens = { callback };
1114
tokens.base = tokeniser.consume("interface");
1215
if (!tokens.base) {
@@ -16,8 +19,8 @@ export class CallbackInterface extends Container {
1619
tokeniser,
1720
new CallbackInterface({ source: tokeniser.source, tokens }),
1821
{
19-
inheritable: !partial,
2022
allowedMembers: [
23+
...extMembers,
2124
[Constant.parse],
2225
[Operation.parse, { regular: true }],
2326
],

lib/productions/container.js

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,19 @@ function inheritance(tokeniser) {
1616
return { colon, inheritance };
1717
}
1818

19+
/**
20+
* Parser callback.
21+
* @callback ParserCallback
22+
* @param {import("../tokeniser.js").Tokeniser} tokeniser
23+
* @param {...*} args
24+
*/
25+
26+
/**
27+
* A parser callback and optional option object.
28+
* @typedef AllowedMember
29+
* @type {[ParserCallback, object?]}
30+
*/
31+
1932
export class Container extends Base {
2033
/**
2134
* @param {import("../tokeniser.js").Tokeniser} tokeniser

lib/productions/dictionary.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,10 @@ export class Dictionary extends Container {
55
/**
66
* @param {import("../tokeniser.js").Tokeniser} tokeniser
77
* @param {object} [options]
8+
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
89
* @param {import("../tokeniser.js").Token} [options.partial]
910
*/
10-
static parse(tokeniser, { partial } = {}) {
11+
static parse(tokeniser, { extMembers = [], partial } = {}) {
1112
const tokens = { partial };
1213
tokens.base = tokeniser.consume("dictionary");
1314
if (!tokens.base) {
@@ -18,7 +19,7 @@ export class Dictionary extends Container {
1819
new Dictionary({ source: tokeniser.source, tokens }),
1920
{
2021
inheritable: !partial,
21-
allowedMembers: [[Field.parse]],
22+
allowedMembers: [...extMembers, [Field.parse]],
2223
}
2324
);
2425
}

lib/productions/interface.js

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,15 +34,20 @@ function static_member(tokeniser) {
3434
export class Interface extends Container {
3535
/**
3636
* @param {import("../tokeniser.js").Tokeniser} tokeniser
37+
* @param {import("../tokeniser.js").Token} base
38+
* @param {object} [options]
39+
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
40+
* @param {import("../tokeniser.js").Token|null} [options.partial]
3741
*/
38-
static parse(tokeniser, base, { partial = null } = {}) {
42+
static parse(tokeniser, base, { extMembers = [], partial = null } = {}) {
3943
const tokens = { partial, base };
4044
return Container.parse(
4145
tokeniser,
4246
new Interface({ source: tokeniser.source, tokens }),
4347
{
4448
inheritable: !partial,
4549
allowedMembers: [
50+
...extMembers,
4651
[Constant.parse],
4752
[Constructor.parse],
4853
[static_member],

lib/productions/mixin.js

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,13 @@ import { stringifier } from "./helpers.js";
66

77
export class Mixin extends Container {
88
/**
9-
* @typedef {import("../tokeniser.js").Token} Token
10-
*
119
* @param {import("../tokeniser.js").Tokeniser} tokeniser
12-
* @param {Token} base
10+
* @param {import("../tokeniser.js").Token} base
1311
* @param {object} [options]
14-
* @param {Token} [options.partial]
12+
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
13+
* @param {import("../tokeniser.js").Token} [options.partial]
1514
*/
16-
static parse(tokeniser, base, { partial } = {}) {
15+
static parse(tokeniser, base, { extMembers = [], partial } = {}) {
1716
const tokens = { partial, base };
1817
tokens.mixin = tokeniser.consume("mixin");
1918
if (!tokens.mixin) {
@@ -24,6 +23,7 @@ export class Mixin extends Container {
2423
new Mixin({ source: tokeniser.source, tokens }),
2524
{
2625
allowedMembers: [
26+
...extMembers,
2727
[Constant.parse],
2828
[stringifier],
2929
[Attribute.parse, { noInherit: true }],

lib/productions/namespace.js

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,10 @@ export class Namespace extends Container {
99
/**
1010
* @param {import("../tokeniser.js").Tokeniser} tokeniser
1111
* @param {object} [options]
12+
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
1213
* @param {import("../tokeniser.js").Token} [options.partial]
1314
*/
14-
static parse(tokeniser, { partial } = {}) {
15+
static parse(tokeniser, { extMembers = [], partial } = {}) {
1516
const tokens = { partial };
1617
tokens.base = tokeniser.consume("namespace");
1718
if (!tokens.base) {
@@ -22,6 +23,7 @@ export class Namespace extends Container {
2223
new Namespace({ source: tokeniser.source, tokens }),
2324
{
2425
allowedMembers: [
26+
...extMembers,
2527
[Attribute.parse, { noInherit: true, readonly: true }],
2628
[Constant.parse],
2729
[Operation.parse, { regular: true }],

lib/productions/operation.js

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,10 @@ import { validationError } from "../error.js";
99

1010
export class Operation extends Base {
1111
/**
12-
* @typedef {import("../tokeniser.js").Token} Token
13-
*
1412
* @param {import("../tokeniser.js").Tokeniser} tokeniser
1513
* @param {object} [options]
16-
* @param {Token} [options.special]
17-
* @param {Token} [options.regular]
14+
* @param {import("../tokeniser.js").Token} [options.special]
15+
* @param {import("../tokeniser.js").Token} [options.regular]
1816
*/
1917
static parse(tokeniser, { special, regular } = {}) {
2018
const tokens = { special };

lib/webidl2.js

Lines changed: 40 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,22 @@ import { CallbackInterface } from "./productions/callback-interface.js";
1212
import { autoParenter } from "./productions/helpers.js";
1313
import { Eof } from "./productions/token.js";
1414

15+
/** @typedef {'callbackInterface'|'dictionary'|'interface'|'mixin'|'namespace'} ExtendableInterfaces */
16+
/** @typedef {{ extMembers?: import("./productions/container.js").AllowedMember[]}} Extension */
17+
/** @typedef {Partial<Record<ExtendableInterfaces, Extension>>} Extensions */
18+
19+
/**
20+
* Parser options.
21+
* @typedef {Object} ParserOptions
22+
* @property {string} [sourceName]
23+
* @property {boolean} [concrete]
24+
* @property {Function[]} [productions]
25+
* @property {Extensions} [extensions]
26+
*/
27+
1528
/**
1629
* @param {Tokeniser} tokeniser
17-
* @param {object} options
18-
* @param {boolean} [options.concrete]
19-
* @param {Function[]} [options.productions]
30+
* @param {ParserOptions} options
2031
*/
2132
function parseByTokens(tokeniser, options) {
2233
const source = tokeniser.source;
@@ -33,28 +44,42 @@ function parseByTokens(tokeniser, options) {
3344
const callback = consume("callback");
3445
if (!callback) return;
3546
if (tokeniser.probe("interface")) {
36-
return CallbackInterface.parse(tokeniser, callback);
47+
return CallbackInterface.parse(tokeniser, callback, {
48+
...options?.extensions?.callbackInterface,
49+
});
3750
}
3851
return CallbackFunction.parse(tokeniser, callback);
3952
}
4053

4154
function interface_(opts) {
4255
const base = consume("interface");
4356
if (!base) return;
44-
const ret =
45-
Mixin.parse(tokeniser, base, opts) ||
46-
Interface.parse(tokeniser, base, opts) ||
47-
error("Interface has no proper body");
48-
return ret;
57+
return (
58+
Mixin.parse(tokeniser, base, {
59+
...opts,
60+
...options?.extensions?.mixin,
61+
}) ||
62+
Interface.parse(tokeniser, base, {
63+
...opts,
64+
...options?.extensions?.interface,
65+
}) ||
66+
error("Interface has no proper body")
67+
);
4968
}
5069

5170
function partial() {
5271
const partial = consume("partial");
5372
if (!partial) return;
5473
return (
55-
Dictionary.parse(tokeniser, { partial }) ||
74+
Dictionary.parse(tokeniser, {
75+
partial,
76+
...options?.extensions?.dictionary,
77+
}) ||
5678
interface_({ partial }) ||
57-
Namespace.parse(tokeniser, { partial }) ||
79+
Namespace.parse(tokeniser, {
80+
partial,
81+
...options?.extensions?.namespace,
82+
}) ||
5883
error("Partial doesn't apply to anything")
5984
);
6085
}
@@ -73,11 +98,11 @@ function parseByTokens(tokeniser, options) {
7398
callback() ||
7499
interface_() ||
75100
partial() ||
76-
Dictionary.parse(tokeniser) ||
101+
Dictionary.parse(tokeniser, options?.extensions?.dictionary) ||
77102
Enum.parse(tokeniser) ||
78103
Typedef.parse(tokeniser) ||
79104
Includes.parse(tokeniser) ||
80-
Namespace.parse(tokeniser)
105+
Namespace.parse(tokeniser, options?.extensions?.namespace)
81106
);
82107
}
83108

@@ -100,18 +125,15 @@ function parseByTokens(tokeniser, options) {
100125
}
101126
return defs;
102127
}
128+
103129
const res = definitions();
104130
if (tokeniser.position < source.length) error("Unrecognised tokens");
105131
return res;
106132
}
107133

108134
/**
109135
* @param {string} str
110-
* @param {object} [options]
111-
* @param {*} [options.sourceName]
112-
* @param {boolean} [options.concrete]
113-
* @param {Function[]} [options.productions]
114-
* @return {import("./productions/base.js").Base[]}
136+
* @param {ParserOptions} [options]
115137
*/
116138
export function parse(str, options = {}) {
117139
const tokeniser = new Tokeniser(str);

test/custom-production.js

Lines changed: 78 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,51 +1,87 @@
1-
"use strict";
2-
31
import { expect } from "expect";
4-
import { parse, write } from "webidl2";
52

6-
describe("Writer template functions", () => {
7-
const customIdl = `
8-
interface X {};
9-
custom Y;
10-
`;
3+
import { parse, write } from "webidl2";
4+
import { Base } from "../lib/productions/base.js";
5+
import {
6+
autoParenter,
7+
type_with_extended_attributes,
8+
} from "../lib/productions/helpers.js";
119

12-
/**
13-
* @param {import("../lib/tokeniser").Tokeniser} tokeniser
14-
*/
15-
const customProduction = (tokeniser) => {
16-
const { position } = tokeniser;
17-
const base = tokeniser.consumeIdentifier("custom");
18-
if (!base) {
19-
return;
20-
}
21-
const tokens = { base };
22-
tokens.name = tokeniser.consumeKind("identifier");
23-
tokens.termination = tokeniser.consume(";");
24-
if (!tokens.name || !tokens.termination) {
25-
tokeniser.unconsume(position);
10+
class CustomAttribute extends Base {
11+
static parse(tokeniser) {
12+
const start_position = tokeniser.position;
13+
const tokens = {};
14+
const ret = autoParenter(
15+
new CustomAttribute({ source: tokeniser.source, tokens })
16+
);
17+
tokens.base = tokeniser.consumeIdentifier("custom");
18+
if (!tokens.base) {
19+
tokeniser.unconsume(start_position);
2620
return;
2721
}
28-
return {
29-
type: "custom",
30-
tokens,
31-
/** @param {import("../lib/writer.js").Writer} w */
32-
write(w) {
33-
return w.ts.wrap([
34-
w.token(this.tokens.base),
35-
w.token(this.tokens.name),
36-
w.token(this.tokens.termination),
37-
]);
38-
},
39-
};
40-
};
22+
ret.idlType =
23+
type_with_extended_attributes(tokeniser, "attribute-type") ||
24+
tokeniser.error("Attribute lacks a type");
25+
tokens.name =
26+
tokeniser.consumeKind("identifier") ||
27+
tokeniser.error("Attribute lacks a name");
28+
tokens.termination =
29+
tokeniser.consume(";") ||
30+
tokeniser.error("Unterminated attribute, expected `;`");
31+
return ret.this;
32+
}
4133

42-
const result = parse(customIdl, {
43-
productions: [customProduction],
44-
concrete: true,
34+
get type() {
35+
return "custom attribute";
36+
}
37+
38+
write(w) {
39+
const { parent } = this;
40+
return w.ts.definition(
41+
w.ts.wrap([
42+
this.extAttrs.write(w),
43+
w.token(this.tokens.base),
44+
w.ts.type(this.idlType.write(w)),
45+
w.name_token(this.tokens.name, { data: this, parent }),
46+
w.token(this.tokens.termination),
47+
]),
48+
{ data: this, parent }
49+
);
50+
}
51+
}
52+
53+
describe("Parse IDLs using custom productions", () => {
54+
it("Parse and rewrite top-level custom attribute", () => {
55+
const customIdl = "custom long bar;";
56+
const result = parse(customIdl, {
57+
productions: [CustomAttribute.parse],
58+
concrete: true,
59+
});
60+
expect(result[0].type).toBe("custom attribute");
61+
62+
const rewritten = write(result);
63+
expect(rewritten).toBe(customIdl);
4564
});
46-
expect(result[0].type).toBe("interface");
47-
expect(result[1].type).toBe("custom");
65+
});
4866

49-
const rewritten = write(result);
50-
expect(rewritten).toBe(customIdl);
67+
describe("Parse IDLs using custom extensions", () => {
68+
[
69+
["callback interface", "callbackInterface"],
70+
["dictionary", "dictionary"],
71+
["interface", "interface"],
72+
["interface mixin", "mixin"],
73+
["namespace", "namespace"],
74+
].forEach(([type, key]) => {
75+
it(`Attribute on ${type}`, () => {
76+
const customIdl = `${type} Foo {
77+
custom long bar;
78+
};`;
79+
const result = parse(customIdl, {
80+
concrete: true,
81+
extensions: { [key]: { extMembers: [[CustomAttribute.parse]] } },
82+
});
83+
expect(result[0].type).toBe(type);
84+
expect(result[0].members[0].type).toBe("custom attribute");
85+
});
86+
});
5187
});

0 commit comments

Comments
 (0)