Skip to content

Commit

Permalink
feat: allow adding additional members to production parse methods (#745
Browse files Browse the repository at this point in the history
)

* feat: allow adding additional members to production parse methods

* feat: add 'extensions' option for extending existing productions

* fix: remove unnecessary spread operator

Co-authored-by: Kagami Sascha Rosylight <saschanaz@outlook.com>

* refactor: rename extension 'callback-interface' to callbackInterface

* test: improve extension parsing tests

* docs: fix up jsdoc definition for ParserOptions

* test: remove use strict

* test: merge extension test into custom-production

* test: replace customProduction with top-level CustomAttribute

* test: remove extension argument from collection utility

* docs: normalize use of Token import

* test: fix import of expect function

* docs: mark args as any

This is also due to microsoft/TypeScript#4628
which prevents changing the signature of static methods on inherited
classes.

* docs: fix path to container.js

* refactor: remove unnecessary spread operator

* docs: fix jsdoc types

Co-authored-by: Kagami Sascha Rosylight <saschanaz@outlook.com>

* docs: fix jsdoc types

Co-authored-by: Kagami Sascha Rosylight <saschanaz@outlook.com>

* fix: remove iheritance attribute from CallbackInterface

---------

Co-authored-by: Kagami Sascha Rosylight <saschanaz@outlook.com>
  • Loading branch information
pyoor and saschanaz committed Jun 21, 2023
1 parent 0e0cdc5 commit eb68391
Show file tree
Hide file tree
Showing 9 changed files with 155 additions and 75 deletions.
7 changes: 5 additions & 2 deletions lib/productions/callback-interface.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,11 @@ import { Constant } from "./constant.js";
export class CallbackInterface extends Container {
/**
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {*} callback
* @param {object} [options]
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
*/
static parse(tokeniser, callback, { partial = null } = {}) {
static parse(tokeniser, callback, { extMembers = [] } = {}) {
const tokens = { callback };
tokens.base = tokeniser.consume("interface");
if (!tokens.base) {
Expand All @@ -16,8 +19,8 @@ export class CallbackInterface extends Container {
tokeniser,
new CallbackInterface({ source: tokeniser.source, tokens }),
{
inheritable: !partial,
allowedMembers: [
...extMembers,
[Constant.parse],
[Operation.parse, { regular: true }],
],
Expand Down
13 changes: 13 additions & 0 deletions lib/productions/container.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,19 @@ function inheritance(tokeniser) {
return { colon, inheritance };
}

/**
* Parser callback.
* @callback ParserCallback
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {...*} args
*/

/**
* A parser callback and optional option object.
* @typedef AllowedMember
* @type {[ParserCallback, object?]}
*/

export class Container extends Base {
/**
* @param {import("../tokeniser.js").Tokeniser} tokeniser
Expand Down
5 changes: 3 additions & 2 deletions lib/productions/dictionary.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,10 @@ export class Dictionary extends Container {
/**
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
static parse(tokeniser, { partial } = {}) {
static parse(tokeniser, { extMembers = [], partial } = {}) {
const tokens = { partial };
tokens.base = tokeniser.consume("dictionary");
if (!tokens.base) {
Expand All @@ -18,7 +19,7 @@ export class Dictionary extends Container {
new Dictionary({ source: tokeniser.source, tokens }),
{
inheritable: !partial,
allowedMembers: [[Field.parse]],
allowedMembers: [...extMembers, [Field.parse]],
}
);
}
Expand Down
7 changes: 6 additions & 1 deletion lib/productions/interface.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,20 @@ function static_member(tokeniser) {
export class Interface extends Container {
/**
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Token} base
* @param {object} [options]
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
* @param {import("../tokeniser.js").Token|null} [options.partial]
*/
static parse(tokeniser, base, { partial = null } = {}) {
static parse(tokeniser, base, { extMembers = [], partial = null } = {}) {
const tokens = { partial, base };
return Container.parse(
tokeniser,
new Interface({ source: tokeniser.source, tokens }),
{
inheritable: !partial,
allowedMembers: [
...extMembers,
[Constant.parse],
[Constructor.parse],
[static_member],
Expand Down
10 changes: 5 additions & 5 deletions lib/productions/mixin.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,13 @@ import { stringifier } from "./helpers.js";

export class Mixin extends Container {
/**
* @typedef {import("../tokeniser.js").Token} Token
*
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {Token} base
* @param {import("../tokeniser.js").Token} base
* @param {object} [options]
* @param {Token} [options.partial]
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
static parse(tokeniser, base, { partial } = {}) {
static parse(tokeniser, base, { extMembers = [], partial } = {}) {
const tokens = { partial, base };
tokens.mixin = tokeniser.consume("mixin");
if (!tokens.mixin) {
Expand All @@ -24,6 +23,7 @@ export class Mixin extends Container {
new Mixin({ source: tokeniser.source, tokens }),
{
allowedMembers: [
...extMembers,
[Constant.parse],
[stringifier],
[Attribute.parse, { noInherit: true }],
Expand Down
4 changes: 3 additions & 1 deletion lib/productions/namespace.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,10 @@ export class Namespace extends Container {
/**
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {import("./container.js").AllowedMember[]} [options.extMembers]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
static parse(tokeniser, { partial } = {}) {
static parse(tokeniser, { extMembers = [], partial } = {}) {
const tokens = { partial };
tokens.base = tokeniser.consume("namespace");
if (!tokens.base) {
Expand All @@ -22,6 +23,7 @@ export class Namespace extends Container {
new Namespace({ source: tokeniser.source, tokens }),
{
allowedMembers: [
...extMembers,
[Attribute.parse, { noInherit: true, readonly: true }],
[Constant.parse],
[Operation.parse, { regular: true }],
Expand Down
6 changes: 2 additions & 4 deletions lib/productions/operation.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,10 @@ import { validationError } from "../error.js";

export class Operation extends Base {
/**
* @typedef {import("../tokeniser.js").Token} Token
*
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {Token} [options.special]
* @param {Token} [options.regular]
* @param {import("../tokeniser.js").Token} [options.special]
* @param {import("../tokeniser.js").Token} [options.regular]
*/
static parse(tokeniser, { special, regular } = {}) {
const tokens = { special };
Expand Down
58 changes: 40 additions & 18 deletions lib/webidl2.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,22 @@ import { CallbackInterface } from "./productions/callback-interface.js";
import { autoParenter } from "./productions/helpers.js";
import { Eof } from "./productions/token.js";

/** @typedef {'callbackInterface'|'dictionary'|'interface'|'mixin'|'namespace'} ExtendableInterfaces */
/** @typedef {{ extMembers?: import("./productions/container.js").AllowedMember[]}} Extension */
/** @typedef {Partial<Record<ExtendableInterfaces, Extension>>} Extensions */

/**
* Parser options.
* @typedef {Object} ParserOptions
* @property {string} [sourceName]
* @property {boolean} [concrete]
* @property {Function[]} [productions]
* @property {Extensions} [extensions]
*/

/**
* @param {Tokeniser} tokeniser
* @param {object} options
* @param {boolean} [options.concrete]
* @param {Function[]} [options.productions]
* @param {ParserOptions} options
*/
function parseByTokens(tokeniser, options) {
const source = tokeniser.source;
Expand All @@ -33,28 +44,42 @@ function parseByTokens(tokeniser, options) {
const callback = consume("callback");
if (!callback) return;
if (tokeniser.probe("interface")) {
return CallbackInterface.parse(tokeniser, callback);
return CallbackInterface.parse(tokeniser, callback, {
...options?.extensions?.callbackInterface,
});
}
return CallbackFunction.parse(tokeniser, callback);
}

function interface_(opts) {
const base = consume("interface");
if (!base) return;
const ret =
Mixin.parse(tokeniser, base, opts) ||
Interface.parse(tokeniser, base, opts) ||
error("Interface has no proper body");
return ret;
return (
Mixin.parse(tokeniser, base, {
...opts,
...options?.extensions?.mixin,
}) ||
Interface.parse(tokeniser, base, {
...opts,
...options?.extensions?.interface,
}) ||
error("Interface has no proper body")
);
}

function partial() {
const partial = consume("partial");
if (!partial) return;
return (
Dictionary.parse(tokeniser, { partial }) ||
Dictionary.parse(tokeniser, {
partial,
...options?.extensions?.dictionary,
}) ||
interface_({ partial }) ||
Namespace.parse(tokeniser, { partial }) ||
Namespace.parse(tokeniser, {
partial,
...options?.extensions?.namespace,
}) ||
error("Partial doesn't apply to anything")
);
}
Expand All @@ -73,11 +98,11 @@ function parseByTokens(tokeniser, options) {
callback() ||
interface_() ||
partial() ||
Dictionary.parse(tokeniser) ||
Dictionary.parse(tokeniser, options?.extensions?.dictionary) ||
Enum.parse(tokeniser) ||
Typedef.parse(tokeniser) ||
Includes.parse(tokeniser) ||
Namespace.parse(tokeniser)
Namespace.parse(tokeniser, options?.extensions?.namespace)
);
}

Expand All @@ -100,18 +125,15 @@ function parseByTokens(tokeniser, options) {
}
return defs;
}

const res = definitions();
if (tokeniser.position < source.length) error("Unrecognised tokens");
return res;
}

/**
* @param {string} str
* @param {object} [options]
* @param {*} [options.sourceName]
* @param {boolean} [options.concrete]
* @param {Function[]} [options.productions]
* @return {import("./productions/base.js").Base[]}
* @param {ParserOptions} [options]
*/
export function parse(str, options = {}) {
const tokeniser = new Tokeniser(str);
Expand Down
120 changes: 78 additions & 42 deletions test/custom-production.js
Original file line number Diff line number Diff line change
@@ -1,51 +1,87 @@
"use strict";

import { expect } from "expect";
import { parse, write } from "webidl2";

describe("Writer template functions", () => {
const customIdl = `
interface X {};
custom Y;
`;
import { parse, write } from "webidl2";
import { Base } from "../lib/productions/base.js";
import {
autoParenter,
type_with_extended_attributes,
} from "../lib/productions/helpers.js";

/**
* @param {import("../lib/tokeniser").Tokeniser} tokeniser
*/
const customProduction = (tokeniser) => {
const { position } = tokeniser;
const base = tokeniser.consumeIdentifier("custom");
if (!base) {
return;
}
const tokens = { base };
tokens.name = tokeniser.consumeKind("identifier");
tokens.termination = tokeniser.consume(";");
if (!tokens.name || !tokens.termination) {
tokeniser.unconsume(position);
class CustomAttribute extends Base {
static parse(tokeniser) {
const start_position = tokeniser.position;
const tokens = {};
const ret = autoParenter(
new CustomAttribute({ source: tokeniser.source, tokens })
);
tokens.base = tokeniser.consumeIdentifier("custom");
if (!tokens.base) {
tokeniser.unconsume(start_position);
return;
}
return {
type: "custom",
tokens,
/** @param {import("../lib/writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.token(this.tokens.base),
w.token(this.tokens.name),
w.token(this.tokens.termination),
]);
},
};
};
ret.idlType =
type_with_extended_attributes(tokeniser, "attribute-type") ||
tokeniser.error("Attribute lacks a type");
tokens.name =
tokeniser.consumeKind("identifier") ||
tokeniser.error("Attribute lacks a name");
tokens.termination =
tokeniser.consume(";") ||
tokeniser.error("Unterminated attribute, expected `;`");
return ret.this;
}

const result = parse(customIdl, {
productions: [customProduction],
concrete: true,
get type() {
return "custom attribute";
}

write(w) {
const { parent } = this;
return w.ts.definition(
w.ts.wrap([
this.extAttrs.write(w),
w.token(this.tokens.base),
w.ts.type(this.idlType.write(w)),
w.name_token(this.tokens.name, { data: this, parent }),
w.token(this.tokens.termination),
]),
{ data: this, parent }
);
}
}

describe("Parse IDLs using custom productions", () => {
it("Parse and rewrite top-level custom attribute", () => {
const customIdl = "custom long bar;";
const result = parse(customIdl, {
productions: [CustomAttribute.parse],
concrete: true,
});
expect(result[0].type).toBe("custom attribute");

const rewritten = write(result);
expect(rewritten).toBe(customIdl);
});
expect(result[0].type).toBe("interface");
expect(result[1].type).toBe("custom");
});

const rewritten = write(result);
expect(rewritten).toBe(customIdl);
describe("Parse IDLs using custom extensions", () => {
[
["callback interface", "callbackInterface"],
["dictionary", "dictionary"],
["interface", "interface"],
["interface mixin", "mixin"],
["namespace", "namespace"],
].forEach(([type, key]) => {
it(`Attribute on ${type}`, () => {
const customIdl = `${type} Foo {
custom long bar;
};`;
const result = parse(customIdl, {
concrete: true,
extensions: { [key]: { extMembers: [[CustomAttribute.parse]] } },
});
expect(result[0].type).toBe(type);
expect(result[0].members[0].type).toBe("custom attribute");
});
});
});

0 comments on commit eb68391

Please sign in to comment.