diff --git a/lib/productions/callback-interface.js b/lib/productions/callback-interface.js index da9dc96f..a22b0098 100644 --- a/lib/productions/callback-interface.js +++ b/lib/productions/callback-interface.js @@ -5,8 +5,11 @@ import { Constant } from "./constant.js"; export class CallbackInterface extends Container { /** * @param {import("../tokeniser.js").Tokeniser} tokeniser + * @param {*} callback + * @param {object} [options] + * @param {import("./container.js").AllowedMember[]} [options.extMembers] */ - static parse(tokeniser, callback, { partial = null } = {}) { + static parse(tokeniser, callback, { extMembers = [] } = {}) { const tokens = { callback }; tokens.base = tokeniser.consume("interface"); if (!tokens.base) { @@ -16,8 +19,8 @@ export class CallbackInterface extends Container { tokeniser, new CallbackInterface({ source: tokeniser.source, tokens }), { - inheritable: !partial, allowedMembers: [ + ...extMembers, [Constant.parse], [Operation.parse, { regular: true }], ], diff --git a/lib/productions/container.js b/lib/productions/container.js index d52dc509..8f1f22d5 100644 --- a/lib/productions/container.js +++ b/lib/productions/container.js @@ -16,6 +16,19 @@ function inheritance(tokeniser) { return { colon, inheritance }; } +/** + * Parser callback. + * @callback ParserCallback + * @param {import("../tokeniser.js").Tokeniser} tokeniser + * @param {...*} args + */ + +/** + * A parser callback and optional option object. + * @typedef AllowedMember + * @type {[ParserCallback, object?]} + */ + export class Container extends Base { /** * @param {import("../tokeniser.js").Tokeniser} tokeniser diff --git a/lib/productions/dictionary.js b/lib/productions/dictionary.js index fa55d979..57462c4c 100644 --- a/lib/productions/dictionary.js +++ b/lib/productions/dictionary.js @@ -5,9 +5,10 @@ export class Dictionary extends Container { /** * @param {import("../tokeniser.js").Tokeniser} tokeniser * @param {object} [options] + * @param {import("./container.js").AllowedMember[]} [options.extMembers] * @param {import("../tokeniser.js").Token} [options.partial] */ - static parse(tokeniser, { partial } = {}) { + static parse(tokeniser, { extMembers = [], partial } = {}) { const tokens = { partial }; tokens.base = tokeniser.consume("dictionary"); if (!tokens.base) { @@ -18,7 +19,7 @@ export class Dictionary extends Container { new Dictionary({ source: tokeniser.source, tokens }), { inheritable: !partial, - allowedMembers: [[Field.parse]], + allowedMembers: [...extMembers, [Field.parse]], } ); } diff --git a/lib/productions/interface.js b/lib/productions/interface.js index e22b54a2..0f88e77c 100644 --- a/lib/productions/interface.js +++ b/lib/productions/interface.js @@ -34,8 +34,12 @@ function static_member(tokeniser) { export class Interface extends Container { /** * @param {import("../tokeniser.js").Tokeniser} tokeniser + * @param {import("../tokeniser.js").Token} base + * @param {object} [options] + * @param {import("./container.js").AllowedMember[]} [options.extMembers] + * @param {import("../tokeniser.js").Token|null} [options.partial] */ - static parse(tokeniser, base, { partial = null } = {}) { + static parse(tokeniser, base, { extMembers = [], partial = null } = {}) { const tokens = { partial, base }; return Container.parse( tokeniser, @@ -43,6 +47,7 @@ export class Interface extends Container { { inheritable: !partial, allowedMembers: [ + ...extMembers, [Constant.parse], [Constructor.parse], [static_member], diff --git a/lib/productions/mixin.js b/lib/productions/mixin.js index c3094e7c..19cce9f8 100644 --- a/lib/productions/mixin.js +++ b/lib/productions/mixin.js @@ -6,14 +6,13 @@ import { stringifier } from "./helpers.js"; export class Mixin extends Container { /** - * @typedef {import("../tokeniser.js").Token} Token - * * @param {import("../tokeniser.js").Tokeniser} tokeniser - * @param {Token} base + * @param {import("../tokeniser.js").Token} base * @param {object} [options] - * @param {Token} [options.partial] + * @param {import("./container.js").AllowedMember[]} [options.extMembers] + * @param {import("../tokeniser.js").Token} [options.partial] */ - static parse(tokeniser, base, { partial } = {}) { + static parse(tokeniser, base, { extMembers = [], partial } = {}) { const tokens = { partial, base }; tokens.mixin = tokeniser.consume("mixin"); if (!tokens.mixin) { @@ -24,6 +23,7 @@ export class Mixin extends Container { new Mixin({ source: tokeniser.source, tokens }), { allowedMembers: [ + ...extMembers, [Constant.parse], [stringifier], [Attribute.parse, { noInherit: true }], diff --git a/lib/productions/namespace.js b/lib/productions/namespace.js index ef7c35f5..34fa6e49 100644 --- a/lib/productions/namespace.js +++ b/lib/productions/namespace.js @@ -9,9 +9,10 @@ export class Namespace extends Container { /** * @param {import("../tokeniser.js").Tokeniser} tokeniser * @param {object} [options] + * @param {import("./container.js").AllowedMember[]} [options.extMembers] * @param {import("../tokeniser.js").Token} [options.partial] */ - static parse(tokeniser, { partial } = {}) { + static parse(tokeniser, { extMembers = [], partial } = {}) { const tokens = { partial }; tokens.base = tokeniser.consume("namespace"); if (!tokens.base) { @@ -22,6 +23,7 @@ export class Namespace extends Container { new Namespace({ source: tokeniser.source, tokens }), { allowedMembers: [ + ...extMembers, [Attribute.parse, { noInherit: true, readonly: true }], [Constant.parse], [Operation.parse, { regular: true }], diff --git a/lib/productions/operation.js b/lib/productions/operation.js index b92c10aa..380ada5e 100644 --- a/lib/productions/operation.js +++ b/lib/productions/operation.js @@ -9,12 +9,10 @@ import { validationError } from "../error.js"; export class Operation extends Base { /** - * @typedef {import("../tokeniser.js").Token} Token - * * @param {import("../tokeniser.js").Tokeniser} tokeniser * @param {object} [options] - * @param {Token} [options.special] - * @param {Token} [options.regular] + * @param {import("../tokeniser.js").Token} [options.special] + * @param {import("../tokeniser.js").Token} [options.regular] */ static parse(tokeniser, { special, regular } = {}) { const tokens = { special }; diff --git a/lib/webidl2.js b/lib/webidl2.js index 6129879b..26a020ad 100644 --- a/lib/webidl2.js +++ b/lib/webidl2.js @@ -12,11 +12,22 @@ import { CallbackInterface } from "./productions/callback-interface.js"; import { autoParenter } from "./productions/helpers.js"; import { Eof } from "./productions/token.js"; +/** @typedef {'callbackInterface'|'dictionary'|'interface'|'mixin'|'namespace'} ExtendableInterfaces */ +/** @typedef {{ extMembers?: import("./productions/container.js").AllowedMember[]}} Extension */ +/** @typedef {Partial>} Extensions */ + +/** + * Parser options. + * @typedef {Object} ParserOptions + * @property {string} [sourceName] + * @property {boolean} [concrete] + * @property {Function[]} [productions] + * @property {Extensions} [extensions] + */ + /** * @param {Tokeniser} tokeniser - * @param {object} options - * @param {boolean} [options.concrete] - * @param {Function[]} [options.productions] + * @param {ParserOptions} options */ function parseByTokens(tokeniser, options) { const source = tokeniser.source; @@ -33,7 +44,9 @@ function parseByTokens(tokeniser, options) { const callback = consume("callback"); if (!callback) return; if (tokeniser.probe("interface")) { - return CallbackInterface.parse(tokeniser, callback); + return CallbackInterface.parse(tokeniser, callback, { + ...options?.extensions?.callbackInterface, + }); } return CallbackFunction.parse(tokeniser, callback); } @@ -41,20 +54,32 @@ function parseByTokens(tokeniser, options) { function interface_(opts) { const base = consume("interface"); if (!base) return; - const ret = - Mixin.parse(tokeniser, base, opts) || - Interface.parse(tokeniser, base, opts) || - error("Interface has no proper body"); - return ret; + return ( + Mixin.parse(tokeniser, base, { + ...opts, + ...options?.extensions?.mixin, + }) || + Interface.parse(tokeniser, base, { + ...opts, + ...options?.extensions?.interface, + }) || + error("Interface has no proper body") + ); } function partial() { const partial = consume("partial"); if (!partial) return; return ( - Dictionary.parse(tokeniser, { partial }) || + Dictionary.parse(tokeniser, { + partial, + ...options?.extensions?.dictionary, + }) || interface_({ partial }) || - Namespace.parse(tokeniser, { partial }) || + Namespace.parse(tokeniser, { + partial, + ...options?.extensions?.namespace, + }) || error("Partial doesn't apply to anything") ); } @@ -73,11 +98,11 @@ function parseByTokens(tokeniser, options) { callback() || interface_() || partial() || - Dictionary.parse(tokeniser) || + Dictionary.parse(tokeniser, options?.extensions?.dictionary) || Enum.parse(tokeniser) || Typedef.parse(tokeniser) || Includes.parse(tokeniser) || - Namespace.parse(tokeniser) + Namespace.parse(tokeniser, options?.extensions?.namespace) ); } @@ -100,6 +125,7 @@ function parseByTokens(tokeniser, options) { } return defs; } + const res = definitions(); if (tokeniser.position < source.length) error("Unrecognised tokens"); return res; @@ -107,11 +133,7 @@ function parseByTokens(tokeniser, options) { /** * @param {string} str - * @param {object} [options] - * @param {*} [options.sourceName] - * @param {boolean} [options.concrete] - * @param {Function[]} [options.productions] - * @return {import("./productions/base.js").Base[]} + * @param {ParserOptions} [options] */ export function parse(str, options = {}) { const tokeniser = new Tokeniser(str); diff --git a/test/custom-production.js b/test/custom-production.js index 245fdec6..7c11dce7 100644 --- a/test/custom-production.js +++ b/test/custom-production.js @@ -1,51 +1,87 @@ -"use strict"; - import { expect } from "expect"; -import { parse, write } from "webidl2"; -describe("Writer template functions", () => { - const customIdl = ` - interface X {}; - custom Y; - `; +import { parse, write } from "webidl2"; +import { Base } from "../lib/productions/base.js"; +import { + autoParenter, + type_with_extended_attributes, +} from "../lib/productions/helpers.js"; - /** - * @param {import("../lib/tokeniser").Tokeniser} tokeniser - */ - const customProduction = (tokeniser) => { - const { position } = tokeniser; - const base = tokeniser.consumeIdentifier("custom"); - if (!base) { - return; - } - const tokens = { base }; - tokens.name = tokeniser.consumeKind("identifier"); - tokens.termination = tokeniser.consume(";"); - if (!tokens.name || !tokens.termination) { - tokeniser.unconsume(position); +class CustomAttribute extends Base { + static parse(tokeniser) { + const start_position = tokeniser.position; + const tokens = {}; + const ret = autoParenter( + new CustomAttribute({ source: tokeniser.source, tokens }) + ); + tokens.base = tokeniser.consumeIdentifier("custom"); + if (!tokens.base) { + tokeniser.unconsume(start_position); return; } - return { - type: "custom", - tokens, - /** @param {import("../lib/writer.js").Writer} w */ - write(w) { - return w.ts.wrap([ - w.token(this.tokens.base), - w.token(this.tokens.name), - w.token(this.tokens.termination), - ]); - }, - }; - }; + ret.idlType = + type_with_extended_attributes(tokeniser, "attribute-type") || + tokeniser.error("Attribute lacks a type"); + tokens.name = + tokeniser.consumeKind("identifier") || + tokeniser.error("Attribute lacks a name"); + tokens.termination = + tokeniser.consume(";") || + tokeniser.error("Unterminated attribute, expected `;`"); + return ret.this; + } - const result = parse(customIdl, { - productions: [customProduction], - concrete: true, + get type() { + return "custom attribute"; + } + + write(w) { + const { parent } = this; + return w.ts.definition( + w.ts.wrap([ + this.extAttrs.write(w), + w.token(this.tokens.base), + w.ts.type(this.idlType.write(w)), + w.name_token(this.tokens.name, { data: this, parent }), + w.token(this.tokens.termination), + ]), + { data: this, parent } + ); + } +} + +describe("Parse IDLs using custom productions", () => { + it("Parse and rewrite top-level custom attribute", () => { + const customIdl = "custom long bar;"; + const result = parse(customIdl, { + productions: [CustomAttribute.parse], + concrete: true, + }); + expect(result[0].type).toBe("custom attribute"); + + const rewritten = write(result); + expect(rewritten).toBe(customIdl); }); - expect(result[0].type).toBe("interface"); - expect(result[1].type).toBe("custom"); +}); - const rewritten = write(result); - expect(rewritten).toBe(customIdl); +describe("Parse IDLs using custom extensions", () => { + [ + ["callback interface", "callbackInterface"], + ["dictionary", "dictionary"], + ["interface", "interface"], + ["interface mixin", "mixin"], + ["namespace", "namespace"], + ].forEach(([type, key]) => { + it(`Attribute on ${type}`, () => { + const customIdl = `${type} Foo { + custom long bar; + };`; + const result = parse(customIdl, { + concrete: true, + extensions: { [key]: { extMembers: [[CustomAttribute.parse]] } }, + }); + expect(result[0].type).toBe(type); + expect(result[0].members[0].type).toBe("custom attribute"); + }); + }); });