summaryrefslogtreecommitdiff
path: root/chromium/third_party/devtools-frontend/src/node_modules/webidl2
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2022-09-29 16:16:15 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2022-11-09 10:04:06 +0000
commita95a7417ad456115a1ef2da4bb8320531c0821f1 (patch)
treeedcd59279e486d2fd4a8f88a7ed025bcf925c6e6 /chromium/third_party/devtools-frontend/src/node_modules/webidl2
parent33fc33aa94d4add0878ec30dc818e34e1dd3cc2a (diff)
downloadqtwebengine-chromium-a95a7417ad456115a1ef2da4bb8320531c0821f1.tar.gz
BASELINE: Update Chromium to 106.0.5249.126
Change-Id: Ib0bb21c437a7d1686e21c33f2d329f2ac425b7ab Reviewed-on: https://codereview.qt-project.org/c/qt/qtwebengine-chromium/+/438936 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/third_party/devtools-frontend/src/node_modules/webidl2')
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js.map2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/error.js12
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/argument.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/array-base.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/attribute.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/base.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/callback-interface.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/constructor.js5
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/container.js3
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/default.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/dictionary.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/enum.js6
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/extended-attributes.js33
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/field.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/helpers.js31
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/includes.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/interface.js9
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/iterable.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/namespace.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/token.js8
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/type.js21
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/typedef.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/supplement.d.ts118
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validator.js6
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/helpers.js2
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/interface.js48
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/webidl2.js4
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/writer.js13
-rw-r--r--chromium/third_party/devtools-frontend/src/node_modules/webidl2/package.json22
30 files changed, 270 insertions, 111 deletions
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js
index f4273a7e247..4a64f3e2271 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js
@@ -1,2 +1,2 @@
-!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.WebIDL2=t():e.WebIDL2=t()}(globalThis,(function(){return(()=>{"use strict";var e={d:(t,n)=>{for(var s in n)e.o(n,s)&&!e.o(t,s)&&Object.defineProperty(t,s,{enumerable:!0,get:n[s]})},o:(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r:e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})}},t={};function n(e,t,n,s,r,{level:i="error",autofix:o,ruleName:a}={}){function c(n){return n>0?e.slice(t,t+n):e.slice(Math.max(t+n,0),t)}function u(n,{precedes:s}={}){const r=n.map((e=>e.trivia+e.value)).join(""),i=e[t];return"eof"===i.type?r:s?r+i.trivia:r.slice(i.trivia.length)}const l="eof"!==e[t].type?e[t].line:e.length>1?e[t-1].line:1,p=function(e){const t=e.split("\n");return t[t.length-1]}(u(c(-5),{precedes:!0})),d=c(5),h=u(d),m=p+h.split("\n")[0]+"\n"+(" ".repeat(p.length)+"^"),f="Syntax"===r?"since":"inside",y=`${r} error at line ${l}${e.name?` in ${e.name}`:""}${n&&n.name?`, ${f} \`${n.partial?"partial ":""}${function(e){const t=[e];for(;e&&e.parent;){const{parent:n}=e;t.unshift(n),e=n}return t.map((e=>function(e,t){let n=e;return t&&(n+=` ${t}`),n}(e.type,e.name))).join(" -> ")}(n)}\``:""}:\n${m}`;return{message:`${y} ${s}`,bareMessage:s,context:y,line:l,sourceName:e.name,level:i,ruleName:a,autofix:o,input:h,tokens:d}}function s(e,t,s,r){return n(e,t,s,r,"Syntax")}function r(e,t,s,r,i={}){return i.ruleName=s,n(t.source,e.index,t,r,"Validation",i)}e.r(t),e.d(t,{WebIDLParseError:()=>WebIDLParseError,parse:()=>q,validate:()=>_,write:()=>F});class Base{constructor({source:e,tokens:t}){Object.defineProperties(this,{source:{value:e},tokens:{value:t,writable:!0},parent:{value:null,writable:!0},this:{value:this}})}toJSON(){const e={type:void 0,name:void 0,inheritance:void 0};let t=this;for(;t!==Object.prototype;){const n=Object.getOwnPropertyDescriptors(t);for(const[t,s]of Object.entries(n))(s.enumerable||s.get)&&(e[t]=this[t]);t=Object.getPrototypeOf(t)}return e}}function i(e,t,{useNullableInner:n}={}){if(!e.union){const s=t.unique.get(e.idlType);if(!s)return;if("typedef"===s.type){const{typedefIncludesDictionary:n}=t.cache;if(n.has(s))return n.get(s);t.cache.typedefIncludesDictionary.set(s,void 0);const r=i(s.idlType,t);if(t.cache.typedefIncludesDictionary.set(s,r),r)return{reference:e,dictionary:r.dictionary}}if("dictionary"===s.type&&(n||!e.nullable))return{reference:e,dictionary:s}}for(const n of e.subtype){const e=i(n,t);if(e)return n.union?e:{reference:n,dictionary:e.dictionary}}}function o(e,t){if(t.cache.dictionaryIncludesRequiredField.has(e))return t.cache.dictionaryIncludesRequiredField.get(e);t.cache.dictionaryIncludesRequiredField.set(e,void 0);let n=e.members.some((e=>e.required));if(!n&&e.inheritance){const s=t.unique.get(e.inheritance);s?o(s,t)&&(n=!0):n=!0}return t.cache.dictionaryIncludesRequiredField.set(e,n),n}class ArrayBase extends Array{constructor({source:e,tokens:t}){super(),Object.defineProperties(this,{source:{value:e},tokens:{value:t},parent:{value:null,writable:!0}})}}class WrappedToken extends Base{static parser(e,t){return()=>{const n=e.consumeKind(t);if(n)return new WrappedToken({source:e.source,tokens:{value:n}})}}get value(){return m(this.tokens.value.value)}write(e){return e.ts.wrap([e.token(this.tokens.value),e.token(this.tokens.separator)])}}class Eof extends WrappedToken{static parse(e){const t=e.consumeKind("eof");if(t)return new Eof({source:e.source,tokens:{value:t}})}get type(){return"eof"}}function a(e,t){return f(e,{parser:WrappedToken.parser(e,t),listName:t+" list"})}const c=["identifier","decimal","integer","string"],u=new Map([...["NoInterfaceObject","LenientSetter","LenientThis","TreatNonObjectAsNull","Unforgeable"].map((e=>[e,`Legacy${e}`])),["NamedConstructor","LegacyFactoryFunction"],["OverrideBuiltins","LegacyOverrideBuiltIns"],["TreatNullAs","LegacyNullToEmptyString"]]);function l(e){for(const t of c){const n=a(e,t);if(n.length)return n}e.error("Expected identifiers, strings, decimals, or integers but none found")}class ExtendedAttributeParameters extends Base{static parse(e){const t={assign:e.consume("=")},n=N(new ExtendedAttributeParameters({source:e.source,tokens:t}));if(n.list=[],t.assign){if(t.asterisk=e.consume("*"),t.asterisk)return n.this;t.secondaryName=e.consumeKind(...c)}return t.open=e.consume("("),t.open?(n.list=n.rhsIsList?l(e):g(e),t.close=e.consume(")")||e.error("Unexpected token in extended attribute argument list")):n.hasRhs&&!t.secondaryName&&e.error("No right hand side to extended attribute assignment"),n.this}get rhsIsList(){return this.tokens.assign&&!this.tokens.asterisk&&!this.tokens.secondaryName}get rhsType(){return this.rhsIsList?this.list[0].tokens.value.type+"-list":this.tokens.asterisk?"*":this.tokens.secondaryName?this.tokens.secondaryName.type:null}write(e){const{rhsType:t}=this;return e.ts.wrap([e.token(this.tokens.assign),e.token(this.tokens.asterisk),e.reference_token(this.tokens.secondaryName,this.parent),e.token(this.tokens.open),...this.list.map((n=>"identifier-list"===t?e.identifier(n,this.parent):n.write(e))),e.token(this.tokens.close)])}}class SimpleExtendedAttribute extends Base{static parse(e){const t=e.consumeKind("identifier");if(t)return new SimpleExtendedAttribute({source:e.source,tokens:{name:t},params:ExtendedAttributeParameters.parse(e)})}constructor({source:e,tokens:t,params:n}){super({source:e,tokens:t}),n.parent=this,Object.defineProperty(this,"params",{value:n})}get type(){return"extended-attribute"}get name(){return this.tokens.name.value}get rhs(){const{rhsType:e,tokens:t,list:n}=this.params;if(!e)return null;return{type:e,value:this.params.rhsIsList?n:this.params.tokens.secondaryName?m(t.secondaryName.value):null}}get arguments(){const{rhsIsList:e,list:t}=this.params;return!t||e?[]:t}*validate(e){const{name:t}=this;if("LegacyNoInterfaceObject"===t){const e="`[LegacyNoInterfaceObject]` extended attribute is an undesirable feature that may be removed from Web IDL in the future. Refer to the [relevant upstream PR](https://github.com/whatwg/webidl/pull/609) for more information.";yield r(this.tokens.name,this,"no-nointerfaceobject",e,{level:"warning"})}else if(u.has(t)){const e=`\`[${t}]\` extended attribute is a legacy feature that is now renamed to \`[${u.get(t)}]\`. Refer to the [relevant upstream PR](https://github.com/whatwg/webidl/pull/870) for more information.`;yield r(this.tokens.name,this,"renamed-legacy",e,{level:"warning",autofix:(n=this,()=>{const{name:e}=n;n.tokens.name.value=u.get(e),"TreatNullAs"===e&&(n.params.tokens={})})})}var n;for(const t of this.arguments)yield*t.validate(e)}write(e){return e.ts.wrap([e.ts.trivia(this.tokens.name.trivia),e.ts.extendedAttribute(e.ts.wrap([e.ts.extendedAttributeReference(this.name),this.params.write(e)])),e.token(this.tokens.separator)])}}class ExtendedAttributes extends ArrayBase{static parse(e){const t={};if(t.open=e.consume("["),!t.open)return new ExtendedAttributes({});const n=new ExtendedAttributes({source:e.source,tokens:t});return n.push(...f(e,{parser:SimpleExtendedAttribute.parse,listName:"extended attribute"})),t.close=e.consume("]")||e.error("Unexpected closing token of extended attribute"),n.length||e.error("Found an empty extended attribute"),e.probe("[")&&e.error("Illegal double extended attribute lists, consider merging them"),n}*validate(e){for(const t of this)yield*t.validate(e)}write(e){return this.length?e.ts.wrap([e.token(this.tokens.open),...this.map((t=>t.write(e))),e.token(this.tokens.close)]):""}}function p(e,t){const n=e.consume("?");n&&(t.tokens.nullable=n),e.probe("?")&&e.error("Can't nullable more than once")}function d(e,t){let n=function(e,t){const n=e.consume("FrozenArray","ObservableArray","Promise","sequence","record");if(!n)return;const s=N(new Type({source:e.source,tokens:{base:n}}));switch(s.tokens.open=e.consume("<")||e.error(`No opening bracket after ${n.value}`),n.value){case"Promise":{e.probe("[")&&e.error("Promise type cannot have extended attribute");const n=x(e,t)||e.error("Missing Promise subtype");s.subtype.push(n);break}case"sequence":case"FrozenArray":case"ObservableArray":{const r=v(e,t)||e.error(`Missing ${n.value} subtype`);s.subtype.push(r);break}case"record":{e.probe("[")&&e.error("Record key cannot have extended attribute");const n=e.consume(...C)||e.error(`Record key must be one of: ${C.join(", ")}`),r=new Type({source:e.source,tokens:{base:n}});r.tokens.separator=e.consume(",")||e.error("Missing comma after record key type"),r.type=t;const i=v(e,t)||e.error("Error parsing generic type record");s.subtype.push(r,i);break}}return s.idlType||e.error(`Error parsing generic type ${n.value}`),s.tokens.close=e.consume(">")||e.error(`Missing closing bracket after ${n.value}`),s.this}(e,t)||b(e);if(!n){const t=e.consumeKind("identifier")||e.consume(...C,...O);if(!t)return;n=new Type({source:e.source,tokens:{base:t}}),e.probe("<")&&e.error(`Unsupported generic type ${t.value}`)}return"Promise"===n.generic&&e.probe("?")&&e.error("Promise type cannot be nullable"),n.type=t||null,p(e,n),n.nullable&&"any"===n.idlType&&e.error("Type `any` cannot be made nullable"),n}class Type extends Base{static parse(e,t){return d(e,t)||function(e,t){const n={};if(n.open=e.consume("("),!n.open)return;const s=N(new Type({source:e.source,tokens:n}));for(s.type=t||null;;){const t=v(e)||e.error("No type after open parenthesis or 'or' in union type");"any"===t.idlType&&e.error("Type `any` cannot be included in a union type"),"Promise"===t.generic&&e.error("Type `Promise` cannot be included in a union type"),s.subtype.push(t);const n=e.consume("or");if(!n)break;t.tokens.separator=n}return s.idlType.length<2&&e.error("At least two types are expected in a union type but found less"),n.close=e.consume(")")||e.error("Unterminated union type"),p(e,s),s.this}(e,t)}constructor({source:e,tokens:t}){super({source:e,tokens:t}),Object.defineProperty(this,"subtype",{value:[],writable:!0}),this.extAttrs=new ExtendedAttributes({})}get generic(){return this.subtype.length&&this.tokens.base?this.tokens.base.value:""}get nullable(){return Boolean(this.tokens.nullable)}get union(){return Boolean(this.subtype.length)&&!this.tokens.base}get idlType(){if(this.subtype.length)return this.subtype;return m([this.tokens.prefix,this.tokens.base,this.tokens.postfix].filter((e=>e)).map((e=>e.value)).join(" "))}*validate(e){if(yield*this.extAttrs.validate(e),"void"===this.idlType){const e="`void` is now replaced by `undefined`. Refer to the [relevant GitHub issue](https://github.com/whatwg/webidl/issues/60) for more information.";yield r(this.tokens.base,this,"replace-void",e,{autofix:(t=this,()=>{t.tokens.base.value="undefined"})})}var t;const n=!this.union&&e.unique.get(this.idlType),s=this.union?this:n&&"typedef"===n.type?n.idlType:void 0;if(s&&this.nullable){const{reference:t}=i(s,e)||{};if(t){const e=(this.union?t:this).tokens.base,n="Nullable union cannot include a dictionary type.";yield r(e,this,"no-nullable-union-dict",n)}}else for(const t of this.subtype)yield*t.validate(e)}write(e){return e.ts.wrap([this.extAttrs.write(e),(()=>{if(this.union||this.generic)return e.ts.wrap([e.token(this.tokens.base,e.ts.generic),e.token(this.tokens.open),...this.subtype.map((t=>t.write(e))),e.token(this.tokens.close)]);const t=this.tokens.prefix||this.tokens.base,n=this.tokens.prefix?[this.tokens.prefix.value,e.ts.trivia(this.tokens.base.trivia)]:[],s=e.reference(e.ts.wrap([...n,this.tokens.base.value,e.token(this.tokens.postfix)]),{unescaped:this.idlType,context:this});return e.ts.wrap([e.ts.trivia(t.trivia),s])})(),e.token(this.tokens.nullable),e.token(this.tokens.separator)])}}class Default extends Base{static parse(e){const t=e.consume("=");if(!t)return null;const n=y(e)||e.consumeKind("string")||e.consume("null","[","{")||e.error("No value for default"),s=[n];if("["===n.value){const t=e.consume("]")||e.error("Default sequence value must be empty");s.push(t)}else if("{"===n.value){const t=e.consume("}")||e.error("Default dictionary value must be empty");s.push(t)}return new Default({source:e.source,tokens:{assign:t},expression:s})}constructor({source:e,tokens:t,expression:n}){super({source:e,tokens:t}),n.parent=this,Object.defineProperty(this,"expression",{value:n})}get type(){return k(this.expression[0]).type}get value(){return k(this.expression[0]).value}get negative(){return k(this.expression[0]).negative}write(e){return e.ts.wrap([e.token(this.tokens.assign),...this.expression.map((t=>e.token(t)))])}}class Argument extends Base{static parse(e){const t=e.position,n={},s=N(new Argument({source:e.source,tokens:n}));return s.extAttrs=ExtendedAttributes.parse(e),n.optional=e.consume("optional"),s.idlType=v(e,"argument-type"),s.idlType?(n.optional||(n.variadic=e.consume("...")),n.name=e.consumeKind("identifier")||e.consume(...$),n.name?(s.default=n.optional?Default.parse(e):null,s.this):e.unconsume(t)):e.unconsume(t)}get type(){return"argument"}get optional(){return!!this.tokens.optional}get variadic(){return!!this.tokens.variadic}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.extAttrs.validate(e),yield*this.idlType.validate(e);const t=i(this.idlType,e,{useNullableInner:!0});if(t)if(this.idlType.nullable){const e="Dictionary arguments cannot be nullable.";yield r(this.tokens.name,this,"no-nullable-dict-arg",e)}else if(this.optional){if(!this.default){const e="Optional dictionary arguments must have a default value of `{}`.";yield r(this.tokens.name,this,"dict-arg-default",e,{autofix:h(this)})}}else if(this.parent&&!o(t.dictionary,e)&&function(e){const t=e.parent.arguments||e.parent.list,n=t.indexOf(e);return!t.slice(n+1).some((e=>!e.optional))}(this)){const e="Dictionary argument must be optional if it has no required fields";yield r(this.tokens.name,this,"dict-arg-optional",e,{autofix:(n=this,()=>{const e=I(n.idlType);n.tokens.optional={...e,type:"optional",value:"optional"},e.trivia=" ",h(n)()})})}var n}write(e){return e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.optional),e.ts.type(this.idlType.write(e)),e.token(this.tokens.variadic),e.name_token(this.tokens.name,{data:this}),this.default?this.default.write(e):"",e.token(this.tokens.separator)])}}function h(e){return()=>{e.default=Default.parse(new Tokeniser(" = {}"))}}class Operation extends Base{static parse(e,{special:t,regular:n}={}){const s={special:t},r=N(new Operation({source:e.source,tokens:s}));return t&&"stringifier"===t.value&&(s.termination=e.consume(";"),s.termination)?(r.arguments=[],r):(t||n||(s.special=e.consume("getter","setter","deleter")),r.idlType=x(e)||e.error("Missing return type"),s.name=e.consumeKind("identifier")||e.consume("includes"),s.open=e.consume("(")||e.error("Invalid operation"),r.arguments=g(e),s.close=e.consume(")")||e.error("Unterminated operation"),s.termination=e.consume(";")||e.error("Unterminated operation, expected `;`"),r.this)}get type(){return"operation"}get name(){const{name:e}=this.tokens;return e?m(e.value):""}get special(){return this.tokens.special?this.tokens.special.value:""}*validate(e){if(yield*this.extAttrs.validate(e),!this.name&&["","static"].includes(this.special)){const e="Regular or static operations must have both a return type and an identifier.";yield r(this.tokens.open,this,"incomplete-op",e)}this.idlType&&(yield*this.idlType.validate(e));for(const t of this.arguments)yield*t.validate(e)}write(e){const{parent:t}=this,n=this.idlType?[e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.open),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.close)]:[];return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),this.tokens.name?e.token(this.tokens.special):e.token(this.tokens.special,e.ts.nameless,{data:this,parent:t}),...n,e.token(this.tokens.termination)]),{data:this,parent:t})}}class Attribute extends Base{static parse(e,{special:t,noInherit:n=!1,readonly:s=!1}={}){const r=e.position,i={special:t},o=N(new Attribute({source:e.source,tokens:i}));if(t||n||(i.special=e.consume("inherit")),"inherit"===o.special&&e.probe("readonly")&&e.error("Inherited attributes cannot be read-only"),i.readonly=e.consume("readonly"),s&&!i.readonly&&e.probe("attribute")&&e.error("Attributes must be readonly in this context"),i.base=e.consume("attribute"),i.base)return o.idlType=v(e,"attribute-type")||e.error("Attribute lacks a type"),i.name=e.consumeKind("identifier")||e.consume("async","required")||e.error("Attribute lacks a name"),i.termination=e.consume(";")||e.error("Unterminated attribute, expected `;`"),o.this;e.unconsume(r)}get type(){return"attribute"}get special(){return this.tokens.special?this.tokens.special.value:""}get readonly(){return!!this.tokens.readonly}get name(){return m(this.tokens.name.value)}*validate(e){switch(yield*this.extAttrs.validate(e),yield*this.idlType.validate(e),this.idlType.generic){case"sequence":case"record":{const e=`Attributes cannot accept ${this.idlType.generic} types.`;yield r(this.tokens.name,this,"attr-invalid-type",e);break}default:{const{reference:t}=i(this.idlType,e)||{};if(t){const e=(this.idlType.union?t:this.idlType).tokens.base,n="Attributes cannot accept dictionary types.";yield r(e,this,"attr-invalid-type",n)}}}}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.special),e.token(this.tokens.readonly),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.termination)]),{data:this,parent:t})}}function m(e){return e.startsWith("_")?e.slice(1):e}function f(e,{parser:t,allowDangler:n,listName:s="list"}){const r=t(e);if(!r)return[];r.tokens.separator=e.consume(",");const i=[r];for(;r.tokens.separator;){const r=t(e);if(!r){n||e.error(`Trailing comma in ${s}`);break}if(r.tokens.separator=e.consume(","),i.push(r),!r.tokens.separator)break}return i}function y(e){return e.consumeKind("decimal","integer")||e.consume("true","false","Infinity","-Infinity","NaN")}function k({type:e,value:t}){switch(e){case"decimal":case"integer":return{type:"number",value:t};case"string":return{type:"string",value:t.slice(1,-1)}}switch(t){case"true":case"false":return{type:"boolean",value:"true"===t};case"Infinity":case"-Infinity":return{type:"Infinity",negative:t.startsWith("-")};case"[":return{type:"sequence",value:[]};case"{":return{type:"dictionary"};default:return{type:t}}}function b(e){const{source:t}=e,n=function(){const n=e.consume("unsigned"),s=e.consume("short","long");if(s){const r=e.consume("long");return new Type({source:t,tokens:{prefix:n,base:s,postfix:r}})}n&&e.error("Failed to parse integer type")}()||function(){const n=e.consume("unrestricted"),s=e.consume("float","double");if(s)return new Type({source:t,tokens:{prefix:n,base:s}});n&&e.error("Failed to parse float type")}();if(n)return n;const s=e.consume("bigint","boolean","byte","octet","undefined");return s?new Type({source:t,tokens:{base:s}}):void 0}function g(e){return f(e,{parser:Argument.parse,listName:"arguments list"})}function v(e,t){const n=ExtendedAttributes.parse(e),s=Type.parse(e,t);return s&&(N(s).extAttrs=n),s}function x(e,t){const n=Type.parse(e,t||"return-type");if(n)return n;const s=e.consume("void");if(s){const t=new Type({source:e.source,tokens:{base:s}});return t.type="return-type",t}}function w(e){const t=e.consume("stringifier");if(!t)return;return Attribute.parse(e,{special:t})||Operation.parse(e,{special:t})||e.error("Unterminated stringifier")}function A(e){const t=e.split("\n");if(t.length){const e=t[t.length-1].match(/^\s+/);if(e)return e[0]}return""}function T(e){return()=>{if(e.extAttrs.length){const t=new Tokeniser("Exposed=Window,"),n=SimpleExtendedAttribute.parse(t);n.tokens.separator=t.consume(",");const s=e.extAttrs[0];/^\s/.test(s.tokens.name.trivia)||(s.tokens.name.trivia=` ${s.tokens.name.trivia}`),e.extAttrs.unshift(n)}else{N(e).extAttrs=ExtendedAttributes.parse(new Tokeniser("[Exposed=Window]"));const t=e.tokens.base.trivia;e.extAttrs.tokens.open.trivia=t,e.tokens.base.trivia=`\n${A(t)}`}}}function I(e){if(e.extAttrs.length)return e.extAttrs.tokens.open;if("operation"===e.type&&!e.special)return I(e.idlType);return Object.values(e.tokens).sort(((e,t)=>e.index-t.index))[0]}function N(e,t){return t||(t=e),e?new Proxy(e,{get(e,t){const n=e[t];return Array.isArray(n)?N(n,e):n},set(e,n,s){if(e[n]=s,!s)return!0;if(Array.isArray(s))for(const e of s)void 0!==e.parent&&(e.parent=t);else void 0!==s.parent&&(s.parent=t);return!0}}):e}const E={decimal:/-?(?=[0-9]*\.|[0-9]+[eE])(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,integer:/-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,identifier:/[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,string:/"[^"]*"/y,whitespace:/[\t\n\r ]+/y,comment:/\/\/.*|\/\*[\s\S]*?\*\//y,other:/[^\t\n\r 0-9A-Za-z]/y},O=["ArrayBuffer","DataView","Int8Array","Int16Array","Int32Array","Uint8Array","Uint16Array","Uint32Array","Uint8ClampedArray","BigInt64Array","BigUint64Array","Float32Array","Float64Array","any","object","symbol"],C=["ByteString","DOMString","USVString"],$=["async","attribute","callback","const","constructor","deleter","dictionary","enum","getter","includes","inherit","interface","iterable","maplike","namespace","partial","required","setlike","setter","static","stringifier","typedef","unrestricted"],D=["-Infinity","FrozenArray","Infinity","NaN","ObservableArray","Promise","bigint","boolean","byte","double","false","float","long","mixin","null","octet","optional","or","readonly","record","sequence","short","true","undefined","unsigned","void"].concat($,C,O),M=["(",")",",","...",":",";","<","=",">","?","*","[","]","{","}"],P=["_constructor","toString","_toString"];class Tokeniser{constructor(e){this.source=function(e){const t=[];let n=0,r="",i=1,o=0;for(;n<e.length;){const c=e.charAt(n);let u=-1;if(/[\t\n\r ]/.test(c)?u=a("whitespace",{noFlushTrivia:!0}):"/"===c&&(u=a("comment",{noFlushTrivia:!0})),-1!==u){const e=t.pop().value;i+=(e.match(/\n/g)||[]).length,r+=e,o-=1}else if(/[-0-9.A-Z_a-z]/.test(c)){if(u=a("decimal"),-1===u&&(u=a("integer")),-1===u){u=a("identifier");const e=t.length-1,n=t[e];if(-1!==u){if(P.includes(n.value)){const r=`${m(n.value)} is a reserved identifier and must not be used.`;throw new WebIDLParseError(s(t,e,null,r))}D.includes(n.value)&&(n.type="inline")}}}else'"'===c&&(u=a("string"));for(const s of M)if(e.startsWith(s,n)){t.push({type:"inline",value:s,trivia:r,line:i,index:o}),r="",n+=s.length,u=n;break}if(-1===u&&(u=a("other")),-1===u)throw new Error("Token stream not progressing");n=u,o+=1}return t.push({type:"eof",value:"",trivia:r,line:i,index:o}),t;function a(s,{noFlushTrivia:a}={}){const c=E[s];c.lastIndex=n;const u=c.exec(e);return u?(t.push({type:s,value:u[0],trivia:r,line:i,index:o}),a||(r=""),c.lastIndex):-1}}(e),this.position=0}error(e){throw new WebIDLParseError(s(this.source,this.position,this.current,e))}probeKind(e){return this.source.length>this.position&&this.source[this.position].type===e}probe(e){return this.probeKind("inline")&&this.source[this.position].value===e}consumeKind(...e){for(const t of e){if(!this.probeKind(t))continue;const e=this.source[this.position];return this.position++,e}}consume(...e){if(!this.probeKind("inline"))return;const t=this.source[this.position];for(const n of e)if(t.value===n)return this.position++,t}consumeIdentifier(e){if(this.probeKind("identifier")&&this.source[this.position].value===e)return this.consumeKind("identifier")}unconsume(e){this.position=e}}class WebIDLParseError extends Error{constructor({message:e,bareMessage:t,context:n,line:s,sourceName:r,input:i,tokens:o}){super(e),this.name="WebIDLParseError",this.bareMessage=t,this.context=n,this.line=s,this.sourceName=r,this.input=i,this.tokens=o}}class EnumValue extends WrappedToken{static parse(e){const t=e.consumeKind("string");if(t)return new EnumValue({source:e.source,tokens:{value:t}})}get type(){return"enum-value"}get value(){return super.value.slice(1,-1)}write(e){const{parent:t}=this;return e.ts.wrap([e.ts.trivia(this.tokens.value.trivia),e.ts.definition(e.ts.wrap(['"',e.ts.name(this.value,{data:this,parent:t}),'"']),{data:this,parent:t}),e.token(this.tokens.separator)])}}class Enum extends Base{static parse(e){const t={};if(t.base=e.consume("enum"),!t.base)return;t.name=e.consumeKind("identifier")||e.error("No name for enum");const n=N(new Enum({source:e.source,tokens:t}));return e.current=n.this,t.open=e.consume("{")||e.error("Bodyless enum"),n.values=f(e,{parser:EnumValue.parse,allowDangler:!0,listName:"enumeration"}),e.probeKind("string")&&e.error("No comma between enum values"),t.close=e.consume("}")||e.error("Unexpected value in enum"),n.values.length||e.error("No value in enum"),t.termination=e.consume(";")||e.error("No semicolon after enum"),n.this}get type(){return"enum"}get name(){return m(this.tokens.name.value)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.open),e.ts.wrap(this.values.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Includes extends Base{static parse(e){const t=e.consumeKind("identifier");if(!t)return;const n={target:t};if(n.includes=e.consume("includes"),n.includes)return n.mixin=e.consumeKind("identifier")||e.error("Incomplete includes statement"),n.termination=e.consume(";")||e.error("No terminating ; for includes statement"),new Includes({source:e.source,tokens:n});e.unconsume(t.index)}get type(){return"includes"}get target(){return m(this.tokens.target.value)}get includes(){return m(this.tokens.mixin.value)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.reference_token(this.tokens.target,this),e.token(this.tokens.includes),e.reference_token(this.tokens.mixin,this),e.token(this.tokens.termination)]),{data:this})}}class Typedef extends Base{static parse(e){const t={},n=N(new Typedef({source:e.source,tokens:t}));if(t.base=e.consume("typedef"),t.base)return n.idlType=v(e,"typedef-type")||e.error("Typedef lacks a type"),t.name=e.consumeKind("identifier")||e.error("Typedef lacks a name"),e.current=n.this,t.termination=e.consume(";")||e.error("Unterminated typedef, expected `;`"),n.this}get type(){return"typedef"}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.idlType.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.termination)]),{data:this})}}class CallbackFunction extends Base{static parse(e,t){const n={base:t},s=N(new CallbackFunction({source:e.source,tokens:n}));return n.name=e.consumeKind("identifier")||e.error("Callback lacks a name"),e.current=s.this,n.assign=e.consume("=")||e.error("Callback lacks an assignment"),s.idlType=x(e)||e.error("Callback lacks a return type"),n.open=e.consume("(")||e.error("Callback lacks parentheses for arguments"),s.arguments=g(e),n.close=e.consume(")")||e.error("Unterminated callback"),n.termination=e.consume(";")||e.error("Unterminated callback, expected `;`"),s.this}get type(){return"callback"}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.extAttrs.validate(e),yield*this.idlType.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.assign),e.ts.type(this.idlType.write(e)),e.token(this.tokens.open),...this.arguments.map((t=>t.write(e))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Container extends Base{static parse(e,t,{inheritable:n,allowedMembers:s}){const{tokens:r,type:i}=t;for(r.name=e.consumeKind("identifier")||e.error(`Missing name in ${i}`),e.current=t,t=N(t),n&&Object.assign(r,function(e){const t=e.consume(":");return t?{colon:t,inheritance:e.consumeKind("identifier")||e.error("Inheritance lacks a type")}:{}}(e)),r.open=e.consume("{")||e.error(`Bodyless ${i}`),t.members=[];;){if(r.close=e.consume("}"),r.close)return r.termination=e.consume(";")||e.error(`Missing semicolon after ${i}`),t.this;const n=ExtendedAttributes.parse(e);let o;for(const[t,...n]of s)if(o=N(t(e,...n)),o)break;o||e.error("Unknown member"),o.extAttrs=n,t.members.push(o.this)}}get partial(){return!!this.tokens.partial}get name(){return m(this.tokens.name.value)}get inheritance(){return this.tokens.inheritance?m(this.tokens.inheritance.value):null}*validate(e){for(const t of this.members)t.validate&&(yield*t.validate(e))}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.callback),e.token(this.tokens.partial),e.token(this.tokens.base),e.token(this.tokens.mixin),e.name_token(this.tokens.name,{data:this}),(()=>this.tokens.inheritance?e.ts.wrap([e.token(this.tokens.colon),e.ts.trivia(this.tokens.inheritance.trivia),e.ts.inheritance(e.reference(this.tokens.inheritance.value,{context:this}))]):"")(),e.token(this.tokens.open),e.ts.wrap(this.members.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Constant extends Base{static parse(e){const t={};if(t.base=e.consume("const"),!t.base)return;let n=b(e);if(!n){const t=e.consumeKind("identifier")||e.error("Const lacks a type");n=new Type({source:e.source,tokens:{base:t}})}e.probe("?")&&e.error("Unexpected nullable constant type"),n.type="const-type",t.name=e.consumeKind("identifier")||e.error("Const lacks a name"),t.assign=e.consume("=")||e.error("Const lacks value assignment"),t.value=y(e)||e.error("Const lacks a value"),t.termination=e.consume(";")||e.error("Unterminated const, expected `;`");const s=new Constant({source:e.source,tokens:t});return N(s).idlType=n,s}get type(){return"const"}get name(){return m(this.tokens.name.value)}get value(){return k(this.tokens.value)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.assign),e.token(this.tokens.value),e.token(this.tokens.termination)]),{data:this,parent:t})}}class IterableLike extends Base{static parse(e){const t=e.position,n={},s=N(new IterableLike({source:e.source,tokens:n}));if(n.readonly=e.consume("readonly"),n.readonly||(n.async=e.consume("async")),n.base=n.readonly?e.consume("maplike","setlike"):n.async?e.consume("iterable"):e.consume("iterable","maplike","setlike"),!n.base)return void e.unconsume(t);const{type:r}=s,i="maplike"===r,o=i||"iterable"===r,a=s.async&&"iterable"===r;n.open=e.consume("<")||e.error(`Missing less-than sign \`<\` in ${r} declaration`);const c=v(e)||e.error(`Missing a type argument in ${r} declaration`);return s.idlType=[c],s.arguments=[],o&&(c.tokens.separator=e.consume(","),c.tokens.separator?s.idlType.push(v(e)):i&&e.error(`Missing second type argument in ${r} declaration`)),n.close=e.consume(">")||e.error(`Missing greater-than sign \`>\` in ${r} declaration`),e.probe("(")&&(a?(n.argsOpen=e.consume("("),s.arguments.push(...g(e)),n.argsClose=e.consume(")")||e.error("Unterminated async iterable argument list")):e.error("Arguments are only allowed for `async iterable`")),n.termination=e.consume(";")||e.error(`Missing semicolon after ${r} declaration`),s.this}get type(){return this.tokens.base.value}get readonly(){return!!this.tokens.readonly}get async(){return!!this.tokens.async}*validate(e){for(const t of this.idlType)yield*t.validate(e);for(const t of this.arguments)yield*t.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.readonly),e.token(this.tokens.async),e.token(this.tokens.base,e.ts.generic),e.token(this.tokens.open),e.ts.wrap(this.idlType.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.argsOpen),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.argsClose),e.token(this.tokens.termination)]),{data:this,parent:this.parent})}}class Constructor extends Base{static parse(e){const t=e.consume("constructor");if(!t)return;const n={base:t};n.open=e.consume("(")||e.error("No argument list in constructor");const s=g(e);n.close=e.consume(")")||e.error("Unterminated constructor"),n.termination=e.consume(";")||e.error("No semicolon after constructor");const r=new Constructor({source:e.source,tokens:n});return N(r).arguments=s,r}get type(){return"constructor"}*validate(e){this.idlType&&(yield*this.idlType.validate(e));for(const t of this.arguments)yield*t.validate(e)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base,e.ts.nameless,{data:this,parent:t}),e.token(this.tokens.open),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this,parent:t})}}function j(e){const t=e.consume("static");if(!t)return;return Attribute.parse(e,{special:t})||Operation.parse(e,{special:t})||e.error("No body in static member")}class Interface extends Container{static parse(e,t,{partial:n=null}={}){const s={partial:n,base:t};return Container.parse(e,new Interface({source:e.source,tokens:s}),{inheritable:!n,allowedMembers:[[Constant.parse],[Constructor.parse],[j],[w],[IterableLike.parse],[Attribute.parse],[Operation.parse]]})}get type(){return"interface"}*validate(e){if(yield*this.extAttrs.validate(e),!this.partial&&this.extAttrs.every((e=>"Exposed"!==e.name))){const e="Interfaces must have `[Exposed]` extended attribute. To fix, add, for example, `[Exposed=Window]`. Please also consider carefully if your interface should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield r(this.tokens.name,this,"require-exposed",e,{autofix:T(this)})}const t=this.extAttrs.filter((e=>"Constructor"===e.name));for(const e of t){const t="Constructors should now be represented as a `constructor()` operation on the interface instead of `[Constructor]` extended attribute. Refer to the [WebIDL spec section on constructor operations](https://heycam.github.io/webidl/#idl-constructors) for more information.";yield r(e.tokens.name,this,"constructor-member",t,{autofix:W(this,e)})}if(this.extAttrs.some((e=>"Global"===e.name))){const e=this.extAttrs.filter((e=>"LegacyFactoryFunction"===e.name));for(const t of e){const e="Interfaces marked as `[Global]` cannot have factory functions.";yield r(t.tokens.name,this,"no-constructible-global",e)}const t=this.members.filter((e=>"constructor"===e.type));for(const e of t){const t="Interfaces marked as `[Global]` cannot have constructors.";yield r(e.tokens.base,this,"no-constructible-global",t)}}yield*super.validate(e),this.partial||(yield*function*(e,t){const n=new Set(a(t).map((e=>e.name))),s=e.partials.get(t.name)||[],i=e.mixinMap.get(t.name)||[];for(const e of[...s,...i]){const s=a(e);yield*o(s,n,e,t);for(const e of s)n.add(e.name)}function*o(e,t,n,s){for(const i of e){const{name:e}=i;if(e&&t.has(e)){const t=`The operation "${e}" has already been defined for the base interface "${s.name}" either in itself or in a mixin`;yield r(i.tokens.name,n,"no-cross-overload",t)}}}function a(e){return e.members.filter((({type:e})=>"operation"===e))}}(e,this))}}function W(e,t){return e=N(e),()=>{const n=A(e.extAttrs.tokens.open.trivia),s=e.members.length?A(I(e.members[0]).trivia):function(e){const t=A(e),n=t.includes("\t")?"\t":" ";return t+n}(n),r=Constructor.parse(new Tokeniser(`\n${s}constructor();`));r.extAttrs=new ExtendedAttributes({}),N(r).arguments=t.arguments;const i=function(e,t){const n=e.slice().reverse().findIndex(t);return-1===n?n:e.length-n-1}(e.members,(e=>"constructor"===e.type));e.members.splice(i+1,0,r);const{close:o}=e.tokens;o.trivia.includes("\n")||(o.trivia+=`\n${n}`);const{extAttrs:a}=e,c=a.indexOf(t),u=a.splice(c,1);a.length?a.length===c?a[c-1].tokens.separator=void 0:a[c].tokens.name.trivia.trim()||(a[c].tokens.name.trivia=u[0].tokens.name.trivia):a.tokens.open=a.tokens.close=void 0}}class Mixin extends Container{static parse(e,t,{partial:n}={}){const s={partial:n,base:t};if(s.mixin=e.consume("mixin"),s.mixin)return Container.parse(e,new Mixin({source:e.source,tokens:s}),{allowedMembers:[[Constant.parse],[w],[Attribute.parse,{noInherit:!0}],[Operation.parse,{regular:!0}]]})}get type(){return"interface mixin"}}class Field extends Base{static parse(e){const t={},n=N(new Field({source:e.source,tokens:t}));return n.extAttrs=ExtendedAttributes.parse(e),t.required=e.consume("required"),n.idlType=v(e,"dictionary-type")||e.error("Dictionary member lacks a type"),t.name=e.consumeKind("identifier")||e.error("Dictionary member lacks a name"),n.default=Default.parse(e),t.required&&n.default&&e.error("Required member must not have a default"),t.termination=e.consume(";")||e.error("Unterminated dictionary member, expected `;`"),n.this}get type(){return"field"}get name(){return m(this.tokens.name.value)}get required(){return!!this.tokens.required}*validate(e){yield*this.idlType.validate(e)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.required),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),this.default?this.default.write(e):"",e.token(this.tokens.termination)]),{data:this,parent:t})}}class Dictionary extends Container{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("dictionary"),n.base)return Container.parse(e,new Dictionary({source:e.source,tokens:n}),{inheritable:!t,allowedMembers:[[Field.parse]]})}get type(){return"dictionary"}}class Namespace extends Container{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("namespace"),n.base)return Container.parse(e,new Namespace({source:e.source,tokens:n}),{allowedMembers:[[Attribute.parse,{noInherit:!0,readonly:!0}],[Constant.parse],[Operation.parse,{regular:!0}]]})}get type(){return"namespace"}*validate(e){if(!this.partial&&this.extAttrs.every((e=>"Exposed"!==e.name))){const e="Namespaces must have [Exposed] extended attribute. To fix, add, for example, [Exposed=Window]. Please also consider carefully if your namespace should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield r(this.tokens.name,this,"require-exposed",e,{autofix:T(this)})}yield*super.validate(e)}}class CallbackInterface extends Container{static parse(e,t,{partial:n=null}={}){const s={callback:t};if(s.base=e.consume("interface"),s.base)return Container.parse(e,new CallbackInterface({source:e.source,tokens:s}),{inheritable:!n,allowedMembers:[[Constant.parse],[Operation.parse,{regular:!0}]]})}get type(){return"callback interface"}}function B(e,t){const n=e.source;function s(t){e.error(t)}function r(...t){return e.consume(...t)}function i(t){const n=r("interface");if(!n)return;return Mixin.parse(e,n,t)||Interface.parse(e,n,t)||s("Interface has no proper body")}function o(){if(t.productions)for(const n of t.productions){const t=n(e);if(t)return t}return function(){const t=r("callback");if(t)return e.probe("interface")?CallbackInterface.parse(e,t):CallbackFunction.parse(e,t)}()||i()||function(){const t=r("partial");if(t)return Dictionary.parse(e,{partial:t})||i({partial:t})||Namespace.parse(e,{partial:t})||s("Partial doesn't apply to anything")}()||Dictionary.parse(e)||Enum.parse(e)||Typedef.parse(e)||Includes.parse(e)||Namespace.parse(e)}const a=function(){if(!n.length)return[];const r=[];for(;;){const t=ExtendedAttributes.parse(e),n=o();if(!n){t.length&&s("Stray extended attributes");break}N(n).extAttrs=t,r.push(n)}const i=Eof.parse(e);return t.concrete&&r.push(i),r}();return e.position<n.length&&s("Unrecognised tokens"),a}function q(e,t={}){const n=new Tokeniser(e);return void 0!==t.sourceName&&(n.source.name=t.sourceName),B(n,t)}function K(e){return e}const L={wrap:e=>e.join(""),trivia:K,name:K,reference:K,type:K,generic:K,nameless:K,inheritance:K,definition:K,extendedAttribute:K,extendedAttributeReference:K};class Writer{constructor(e){this.ts=Object.assign({},L,e)}reference(e,{unescaped:t,context:n}){return t||(t=e.startsWith("_")?e.slice(1):e),this.ts.reference(e,t,n)}token(e,t=K,...n){if(!e)return"";const s=t(e.value,...n);return this.ts.wrap([this.ts.trivia(e.trivia),s])}reference_token(e,t){return this.token(e,this.reference.bind(this),{context:t})}name_token(e,t){return this.token(e,this.ts.name,t)}identifier(e,t){return this.ts.wrap([this.reference_token(e.tokens.value,t),this.token(e.tokens.separator)])}}function F(e,{templates:t=L}={}){t=Object.assign({},L,t);const n=new Writer(t);return t.wrap(e.map((e=>e.write(n))))}function U(e,t){const n=new Map,s=e.filter((e=>"includes"===e.type));for(const e of s){const s=t.get(e.includes);if(!s)continue;const r=n.get(e.target);r?r.push(s):n.set(e.target,[s])}return n}function*S(e){const t=function(e){const t=new Map,n=new Set,s=new Map;for(const r of e)if(r.partial){const e=s.get(r.name);e?e.push(r):s.set(r.name,[r])}else r.name&&(t.has(r.name)?n.add(r):t.set(r.name,r));return{all:e,unique:t,partials:s,duplicates:n,mixinMap:U(e,t),cache:{typedefIncludesDictionary:new WeakMap,dictionaryIncludesRequiredField:new WeakMap}}}(e);for(const e of t.all)e.validate&&(yield*e.validate(t));yield*function*({unique:e,duplicates:t}){for(const n of t){const{name:t}=n,s=`The name "${t}" of type "${e.get(t).type}" was already seen`;yield r(n.tokens.name,n,"no-duplicate",s)}}(t)}function _(e){return[...S((t=e,t.flat?t.flat():[].concat(...t)))];var t}return t})()}));
+!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.WebIDL2=t():e.WebIDL2=t()}(globalThis,(()=>(()=>{"use strict";var e={d:(t,n)=>{for(var s in n)e.o(n,s)&&!e.o(t,s)&&Object.defineProperty(t,s,{enumerable:!0,get:n[s]})},o:(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r:e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})}},t={};function n(e,t,n,s,r,{level:i="error",autofix:o,ruleName:a}={}){function c(n){return n>0?e.slice(t,t+n):e.slice(Math.max(t+n,0),t)}function u(n,{precedes:s}={}){const r=n.map((e=>e.trivia+e.value)).join(""),i=e[t];return"eof"===i.type?r:s?r+i.trivia:r.slice(i.trivia.length)}const l="eof"!==e[t].type?e[t].line:e.length>1?e[t-1].line:1,p=function(e){const t=e.split("\n");return t[t.length-1]}(u(c(-5),{precedes:!0})),d=c(5),h=u(d),m=p+h.split("\n")[0]+"\n"+(" ".repeat(p.length)+"^"),f="Syntax"===r?"since":"inside",y=`${r} error at line ${l}${e.name?` in ${e.name}`:""}${n&&n.name?`, ${f} \`${n.partial?"partial ":""}${function(e){const t=[e];for(;e&&e.parent;){const{parent:n}=e;t.unshift(n),e=n}return t.map((e=>function(e,t){let n=e;return t&&(n+=` ${t}`),n}(e.type,e.name))).join(" -> ")}(n)}\``:""}:\n${m}`;return{message:`${y} ${s}`,bareMessage:s,context:y,line:l,sourceName:e.name,level:i,ruleName:a,autofix:o,input:h,tokens:d}}function s(e,t,s,r){return n(e,t,s,r,"Syntax")}function r(e,t,s,r,i={}){return i.ruleName=s,n(t.source,e.index,t,r,"Validation",i)}e.r(t),e.d(t,{WebIDLParseError:()=>WebIDLParseError,parse:()=>q,validate:()=>_,write:()=>F});class Base{constructor({source:e,tokens:t}){Object.defineProperties(this,{source:{value:e},tokens:{value:t,writable:!0},parent:{value:null,writable:!0},this:{value:this}})}toJSON(){const e={type:void 0,name:void 0,inheritance:void 0};let t=this;for(;t!==Object.prototype;){const n=Object.getOwnPropertyDescriptors(t);for(const[t,s]of Object.entries(n))(s.enumerable||s.get)&&(e[t]=this[t]);t=Object.getPrototypeOf(t)}return e}}function i(e,t,{useNullableInner:n}={}){if(!e.union){const s=t.unique.get(e.idlType);if(!s)return;if("typedef"===s.type){const{typedefIncludesDictionary:n}=t.cache;if(n.has(s))return n.get(s);t.cache.typedefIncludesDictionary.set(s,void 0);const r=i(s.idlType,t);if(t.cache.typedefIncludesDictionary.set(s,r),r)return{reference:e,dictionary:r.dictionary}}if("dictionary"===s.type&&(n||!e.nullable))return{reference:e,dictionary:s}}for(const n of e.subtype){const e=i(n,t);if(e)return n.union?e:{reference:n,dictionary:e.dictionary}}}function o(e,t){if(t.cache.dictionaryIncludesRequiredField.has(e))return t.cache.dictionaryIncludesRequiredField.get(e);t.cache.dictionaryIncludesRequiredField.set(e,void 0);let n=e.members.some((e=>e.required));if(!n&&e.inheritance){const s=t.unique.get(e.inheritance);s?o(s,t)&&(n=!0):n=!0}return t.cache.dictionaryIncludesRequiredField.set(e,n),n}class ArrayBase extends Array{constructor({source:e,tokens:t}){super(),Object.defineProperties(this,{source:{value:e},tokens:{value:t},parent:{value:null,writable:!0}})}}class WrappedToken extends Base{static parser(e,t){return()=>{const n=e.consumeKind(t);if(n)return new WrappedToken({source:e.source,tokens:{value:n}})}}get value(){return m(this.tokens.value.value)}write(e){return e.ts.wrap([e.token(this.tokens.value),e.token(this.tokens.separator)])}}class Eof extends WrappedToken{static parse(e){const t=e.consumeKind("eof");if(t)return new Eof({source:e.source,tokens:{value:t}})}get type(){return"eof"}}function a(e,t){return f(e,{parser:WrappedToken.parser(e,t),listName:t+" list"})}const c=["identifier","decimal","integer","string"],u=new Map([...["NoInterfaceObject","LenientSetter","LenientThis","TreatNonObjectAsNull","Unforgeable"].map((e=>[e,`Legacy${e}`])),["NamedConstructor","LegacyFactoryFunction"],["OverrideBuiltins","LegacyOverrideBuiltIns"],["TreatNullAs","LegacyNullToEmptyString"]]);function l(e){for(const t of c){const n=a(e,t);if(n.length)return n}e.error("Expected identifiers, strings, decimals, or integers but none found")}class ExtendedAttributeParameters extends Base{static parse(e){const t={assign:e.consume("=")},n=E(new ExtendedAttributeParameters({source:e.source,tokens:t}));if(n.list=[],t.assign){if(t.asterisk=e.consume("*"),t.asterisk)return n.this;t.secondaryName=e.consumeKind(...c)}return t.open=e.consume("("),t.open?(n.list=n.rhsIsList?l(e):g(e),t.close=e.consume(")")||e.error("Unexpected token in extended attribute argument list")):t.assign&&!t.secondaryName&&e.error("No right hand side to extended attribute assignment"),n.this}get rhsIsList(){return this.tokens.assign&&!this.tokens.asterisk&&!this.tokens.secondaryName}get rhsType(){return this.rhsIsList?this.list[0].tokens.value.type+"-list":this.tokens.asterisk?"*":this.tokens.secondaryName?this.tokens.secondaryName.type:null}write(e){const{rhsType:t}=this;return e.ts.wrap([e.token(this.tokens.assign),e.token(this.tokens.asterisk),e.reference_token(this.tokens.secondaryName,this.parent),e.token(this.tokens.open),...this.list.map((n=>"identifier-list"===t?e.identifier(n,this.parent):n.write(e))),e.token(this.tokens.close)])}}class SimpleExtendedAttribute extends Base{static parse(e){const t=e.consumeKind("identifier");if(t)return new SimpleExtendedAttribute({source:e.source,tokens:{name:t},params:ExtendedAttributeParameters.parse(e)})}constructor({source:e,tokens:t,params:n}){super({source:e,tokens:t}),n.parent=this,Object.defineProperty(this,"params",{value:n})}get type(){return"extended-attribute"}get name(){return this.tokens.name.value}get rhs(){const{rhsType:e,tokens:t,list:n}=this.params;if(!e)return null;return{type:e,value:this.params.rhsIsList?n:this.params.tokens.secondaryName?m(t.secondaryName.value):null}}get arguments(){const{rhsIsList:e,list:t}=this.params;return!t||e?[]:t}*validate(e){const{name:t}=this;if("LegacyNoInterfaceObject"===t){const e="`[LegacyNoInterfaceObject]` extended attribute is an undesirable feature that may be removed from Web IDL in the future. Refer to the [relevant upstream PR](https://github.com/whatwg/webidl/pull/609) for more information.";yield r(this.tokens.name,this,"no-nointerfaceobject",e,{level:"warning"})}else if(u.has(t)){const e=`\`[${t}]\` extended attribute is a legacy feature that is now renamed to \`[${u.get(t)}]\`. Refer to the [relevant upstream PR](https://github.com/whatwg/webidl/pull/870) for more information.`;yield r(this.tokens.name,this,"renamed-legacy",e,{level:"warning",autofix:(n=this,()=>{const{name:e}=n;n.tokens.name.value=u.get(e),"TreatNullAs"===e&&(n.params.tokens={})})})}var n;for(const t of this.arguments)yield*t.validate(e)}write(e){return e.ts.wrap([e.ts.trivia(this.tokens.name.trivia),e.ts.extendedAttribute(e.ts.wrap([e.ts.extendedAttributeReference(this.name),this.params.write(e)])),e.token(this.tokens.separator)])}}class ExtendedAttributes extends ArrayBase{static parse(e){const t={};t.open=e.consume("[");const n=new ExtendedAttributes({source:e.source,tokens:t});return t.open?(n.push(...f(e,{parser:SimpleExtendedAttribute.parse,listName:"extended attribute"})),t.close=e.consume("]")||e.error("Expected a closing token for the extended attribute list"),n.length||(e.unconsume(t.close.index),e.error("An extended attribute list must not be empty")),e.probe("[")&&e.error("Illegal double extended attribute lists, consider merging them"),n):n}*validate(e){for(const t of this)yield*t.validate(e)}write(e){return this.length?e.ts.wrap([e.token(this.tokens.open),...this.map((t=>t.write(e))),e.token(this.tokens.close)]):""}}function p(e,t){const n=e.consume("?");n&&(t.tokens.nullable=n),e.probe("?")&&e.error("Can't nullable more than once")}function d(e,t){let n=function(e,t){const n=e.consume("FrozenArray","ObservableArray","Promise","sequence","record");if(!n)return;const s=E(new Type({source:e.source,tokens:{base:n}}));switch(s.tokens.open=e.consume("<")||e.error(`No opening bracket after ${n.value}`),n.value){case"Promise":{e.probe("[")&&e.error("Promise type cannot have extended attribute");const n=x(e,t)||e.error("Missing Promise subtype");s.subtype.push(n);break}case"sequence":case"FrozenArray":case"ObservableArray":{const r=v(e,t)||e.error(`Missing ${n.value} subtype`);s.subtype.push(r);break}case"record":{e.probe("[")&&e.error("Record key cannot have extended attribute");const n=e.consume(...C)||e.error(`Record key must be one of: ${C.join(", ")}`),r=new Type({source:e.source,tokens:{base:n}});r.tokens.separator=e.consume(",")||e.error("Missing comma after record key type"),r.type=t;const i=v(e,t)||e.error("Error parsing generic type record");s.subtype.push(r,i);break}}return s.idlType||e.error(`Error parsing generic type ${n.value}`),s.tokens.close=e.consume(">")||e.error(`Missing closing bracket after ${n.value}`),s.this}(e,t)||b(e);if(!n){const t=e.consumeKind("identifier")||e.consume(...C,...O);if(!t)return;n=new Type({source:e.source,tokens:{base:t}}),e.probe("<")&&e.error(`Unsupported generic type ${t.value}`)}return"Promise"===n.generic&&e.probe("?")&&e.error("Promise type cannot be nullable"),n.type=t||null,p(e,n),n.nullable&&"any"===n.idlType&&e.error("Type `any` cannot be made nullable"),n}class Type extends Base{static parse(e,t){return d(e,t)||function(e,t){const n={};if(n.open=e.consume("("),!n.open)return;const s=E(new Type({source:e.source,tokens:n}));for(s.type=t||null;;){const t=v(e)||e.error("No type after open parenthesis or 'or' in union type");"any"===t.idlType&&e.error("Type `any` cannot be included in a union type"),"Promise"===t.generic&&e.error("Type `Promise` cannot be included in a union type"),s.subtype.push(t);const n=e.consume("or");if(!n)break;t.tokens.separator=n}return s.idlType.length<2&&e.error("At least two types are expected in a union type but found less"),n.close=e.consume(")")||e.error("Unterminated union type"),p(e,s),s.this}(e,t)}constructor({source:e,tokens:t}){super({source:e,tokens:t}),Object.defineProperty(this,"subtype",{value:[],writable:!0}),this.extAttrs=new ExtendedAttributes({source:e,tokens:{}})}get generic(){return this.subtype.length&&this.tokens.base?this.tokens.base.value:""}get nullable(){return Boolean(this.tokens.nullable)}get union(){return Boolean(this.subtype.length)&&!this.tokens.base}get idlType(){if(this.subtype.length)return this.subtype;return m([this.tokens.prefix,this.tokens.base,this.tokens.postfix].filter((e=>e)).map((e=>e.value)).join(" "))}*validate(e){if(yield*this.extAttrs.validate(e),"void"===this.idlType){const e="`void` is now replaced by `undefined`. Refer to the [relevant GitHub issue](https://github.com/whatwg/webidl/issues/60) for more information.";yield r(this.tokens.base,this,"replace-void",e,{autofix:(t=this,()=>{t.tokens.base.value="undefined"})})}var t;const n=!this.union&&e.unique.get(this.idlType),s=this.union?this:n&&"typedef"===n.type?n.idlType:void 0;if(s&&this.nullable){const{reference:t}=i(s,e)||{};if(t){const e=(this.union?t:this).tokens.base,n="Nullable union cannot include a dictionary type.";yield r(e,this,"no-nullable-union-dict",n)}}else for(const t of this.subtype)yield*t.validate(e)}write(e){return e.ts.wrap([this.extAttrs.write(e),(()=>{if(this.union||this.generic)return e.ts.wrap([e.token(this.tokens.base,e.ts.generic),e.token(this.tokens.open),...this.subtype.map((t=>t.write(e))),e.token(this.tokens.close)]);const t=this.tokens.prefix||this.tokens.base,n=this.tokens.prefix?[this.tokens.prefix.value,e.ts.trivia(this.tokens.base.trivia)]:[],s=e.reference(e.ts.wrap([...n,this.tokens.base.value,e.token(this.tokens.postfix)]),{unescaped:this.idlType,context:this});return e.ts.wrap([e.ts.trivia(t.trivia),s])})(),e.token(this.tokens.nullable),e.token(this.tokens.separator)])}}class Default extends Base{static parse(e){const t=e.consume("=");if(!t)return null;const n=y(e)||e.consumeKind("string")||e.consume("null","[","{")||e.error("No value for default"),s=[n];if("["===n.value){const t=e.consume("]")||e.error("Default sequence value must be empty");s.push(t)}else if("{"===n.value){const t=e.consume("}")||e.error("Default dictionary value must be empty");s.push(t)}return new Default({source:e.source,tokens:{assign:t},expression:s})}constructor({source:e,tokens:t,expression:n}){super({source:e,tokens:t}),n.parent=this,Object.defineProperty(this,"expression",{value:n})}get type(){return k(this.expression[0]).type}get value(){return k(this.expression[0]).value}get negative(){return k(this.expression[0]).negative}write(e){return e.ts.wrap([e.token(this.tokens.assign),...this.expression.map((t=>e.token(t)))])}}class Argument extends Base{static parse(e){const t=e.position,n={},s=E(new Argument({source:e.source,tokens:n}));return s.extAttrs=ExtendedAttributes.parse(e),n.optional=e.consume("optional"),s.idlType=v(e,"argument-type"),s.idlType?(n.optional||(n.variadic=e.consume("...")),n.name=e.consumeKind("identifier")||e.consume(...$),n.name?(s.default=n.optional?Default.parse(e):null,s.this):e.unconsume(t)):e.unconsume(t)}get type(){return"argument"}get optional(){return!!this.tokens.optional}get variadic(){return!!this.tokens.variadic}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.extAttrs.validate(e),yield*this.idlType.validate(e);const t=i(this.idlType,e,{useNullableInner:!0});if(t)if(this.idlType.nullable){const e="Dictionary arguments cannot be nullable.";yield r(this.tokens.name,this,"no-nullable-dict-arg",e)}else if(this.optional){if(!this.default){const e="Optional dictionary arguments must have a default value of `{}`.";yield r(this.tokens.name,this,"dict-arg-default",e,{autofix:h(this)})}}else if(this.parent&&!o(t.dictionary,e)&&function(e){const t=e.parent.arguments||e.parent.list,n=t.indexOf(e);return!t.slice(n+1).some((e=>!e.optional))}(this)){const e="Dictionary argument must be optional if it has no required fields";yield r(this.tokens.name,this,"dict-arg-optional",e,{autofix:(n=this,()=>{const e=I(n.idlType);n.tokens.optional={...e,type:"optional",value:"optional"},e.trivia=" ",h(n)()})})}var n}write(e){return e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.optional),e.ts.type(this.idlType.write(e)),e.token(this.tokens.variadic),e.name_token(this.tokens.name,{data:this}),this.default?this.default.write(e):"",e.token(this.tokens.separator)])}}function h(e){return()=>{e.default=Default.parse(new Tokeniser(" = {}"))}}class Operation extends Base{static parse(e,{special:t,regular:n}={}){const s={special:t},r=E(new Operation({source:e.source,tokens:s}));return t&&"stringifier"===t.value&&(s.termination=e.consume(";"),s.termination)?(r.arguments=[],r):(t||n||(s.special=e.consume("getter","setter","deleter")),r.idlType=x(e)||e.error("Missing return type"),s.name=e.consumeKind("identifier")||e.consume("includes"),s.open=e.consume("(")||e.error("Invalid operation"),r.arguments=g(e),s.close=e.consume(")")||e.error("Unterminated operation"),s.termination=e.consume(";")||e.error("Unterminated operation, expected `;`"),r.this)}get type(){return"operation"}get name(){const{name:e}=this.tokens;return e?m(e.value):""}get special(){return this.tokens.special?this.tokens.special.value:""}*validate(e){if(yield*this.extAttrs.validate(e),!this.name&&["","static"].includes(this.special)){const e="Regular or static operations must have both a return type and an identifier.";yield r(this.tokens.open,this,"incomplete-op",e)}this.idlType&&(yield*this.idlType.validate(e));for(const t of this.arguments)yield*t.validate(e)}write(e){const{parent:t}=this,n=this.idlType?[e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.open),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.close)]:[];return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),this.tokens.name?e.token(this.tokens.special):e.token(this.tokens.special,e.ts.nameless,{data:this,parent:t}),...n,e.token(this.tokens.termination)]),{data:this,parent:t})}}class Attribute extends Base{static parse(e,{special:t,noInherit:n=!1,readonly:s=!1}={}){const r=e.position,i={special:t},o=E(new Attribute({source:e.source,tokens:i}));if(t||n||(i.special=e.consume("inherit")),"inherit"===o.special&&e.probe("readonly")&&e.error("Inherited attributes cannot be read-only"),i.readonly=e.consume("readonly"),s&&!i.readonly&&e.probe("attribute")&&e.error("Attributes must be readonly in this context"),i.base=e.consume("attribute"),i.base)return o.idlType=v(e,"attribute-type")||e.error("Attribute lacks a type"),i.name=e.consumeKind("identifier")||e.consume("async","required")||e.error("Attribute lacks a name"),i.termination=e.consume(";")||e.error("Unterminated attribute, expected `;`"),o.this;e.unconsume(r)}get type(){return"attribute"}get special(){return this.tokens.special?this.tokens.special.value:""}get readonly(){return!!this.tokens.readonly}get name(){return m(this.tokens.name.value)}*validate(e){switch(yield*this.extAttrs.validate(e),yield*this.idlType.validate(e),this.idlType.generic){case"sequence":case"record":{const e=`Attributes cannot accept ${this.idlType.generic} types.`;yield r(this.tokens.name,this,"attr-invalid-type",e);break}default:{const{reference:t}=i(this.idlType,e)||{};if(t){const e=(this.idlType.union?t:this.idlType).tokens.base,n="Attributes cannot accept dictionary types.";yield r(e,this,"attr-invalid-type",n)}}}}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.special),e.token(this.tokens.readonly),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.termination)]),{data:this,parent:t})}}function m(e){return e.startsWith("_")?e.slice(1):e}function f(e,{parser:t,allowDangler:n,listName:s="list"}){const r=t(e);if(!r)return[];r.tokens.separator=e.consume(",");const i=[r];for(;r.tokens.separator;){const r=t(e);if(!r){n||e.error(`Trailing comma in ${s}`);break}if(r.tokens.separator=e.consume(","),i.push(r),!r.tokens.separator)break}return i}function y(e){return e.consumeKind("decimal","integer")||e.consume("true","false","Infinity","-Infinity","NaN")}function k({type:e,value:t}){switch(e){case"decimal":case"integer":return{type:"number",value:t};case"string":return{type:"string",value:t.slice(1,-1)}}switch(t){case"true":case"false":return{type:"boolean",value:"true"===t};case"Infinity":case"-Infinity":return{type:"Infinity",negative:t.startsWith("-")};case"[":return{type:"sequence",value:[]};case"{":return{type:"dictionary"};default:return{type:t}}}function b(e){const{source:t}=e,n=function(){const n=e.consume("unsigned"),s=e.consume("short","long");if(s){const r=e.consume("long");return new Type({source:t,tokens:{prefix:n,base:s,postfix:r}})}n&&e.error("Failed to parse integer type")}()||function(){const n=e.consume("unrestricted"),s=e.consume("float","double");if(s)return new Type({source:t,tokens:{prefix:n,base:s}});n&&e.error("Failed to parse float type")}();if(n)return n;const s=e.consume("bigint","boolean","byte","octet","undefined");return s?new Type({source:t,tokens:{base:s}}):void 0}function g(e){return f(e,{parser:Argument.parse,listName:"arguments list"})}function v(e,t){const n=ExtendedAttributes.parse(e),s=Type.parse(e,t);return s&&(E(s).extAttrs=n),s}function x(e,t){const n=Type.parse(e,t||"return-type");if(n)return n;const s=e.consume("void");if(s){const t=new Type({source:e.source,tokens:{base:s}});return t.type="return-type",t}}function w(e){const t=e.consume("stringifier");if(!t)return;return Attribute.parse(e,{special:t})||Operation.parse(e,{special:t})||e.error("Unterminated stringifier")}function A(e){const t=e.split("\n");if(t.length){const e=t[t.length-1].match(/^\s+/);if(e)return e[0]}return""}function T(e){return()=>{if(e.extAttrs.length){const t=new Tokeniser("Exposed=Window,"),n=SimpleExtendedAttribute.parse(t);n.tokens.separator=t.consume(",");const s=e.extAttrs[0];/^\s/.test(s.tokens.name.trivia)||(s.tokens.name.trivia=` ${s.tokens.name.trivia}`),e.extAttrs.unshift(n)}else{E(e).extAttrs=ExtendedAttributes.parse(new Tokeniser("[Exposed=Window]"));const t=e.tokens.base.trivia;e.extAttrs.tokens.open.trivia=t,e.tokens.base.trivia=`\n${A(t)}`}}}function I(e){if(e.extAttrs.length)return e.extAttrs.tokens.open;if("operation"===e.type&&!e.special)return I(e.idlType);return Object.values(e.tokens).sort(((e,t)=>e.index-t.index))[0]}function E(e,t){if(t||(t=e),!e)return e;return new Proxy(e,{get(e,t){const n=e[t];return Array.isArray(n)&&"source"!==t?E(n,e):n},set(e,n,s){if(e[n]=s,!s)return!0;if(Array.isArray(s))for(const e of s)void 0!==e.parent&&(e.parent=t);else void 0!==s.parent&&(s.parent=t);return!0}})}const N={decimal:/-?(?=[0-9]*\.|[0-9]+[eE])(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,integer:/-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,identifier:/[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,string:/"[^"]*"/y,whitespace:/[\t\n\r ]+/y,comment:/\/\/.*|\/\*[\s\S]*?\*\//y,other:/[^\t\n\r 0-9A-Za-z]/y},O=["ArrayBuffer","DataView","Int8Array","Int16Array","Int32Array","Uint8Array","Uint16Array","Uint32Array","Uint8ClampedArray","BigInt64Array","BigUint64Array","Float32Array","Float64Array","any","object","symbol"],C=["ByteString","DOMString","USVString"],$=["async","attribute","callback","const","constructor","deleter","dictionary","enum","getter","includes","inherit","interface","iterable","maplike","namespace","partial","required","setlike","setter","static","stringifier","typedef","unrestricted"],D=["-Infinity","FrozenArray","Infinity","NaN","ObservableArray","Promise","bigint","boolean","byte","double","false","float","long","mixin","null","octet","optional","or","readonly","record","sequence","short","true","undefined","unsigned","void"].concat($,C,O),M=["(",")",",","...",":",";","<","=",">","?","*","[","]","{","}"],P=["_constructor","toString","_toString"];class Tokeniser{constructor(e){this.source=function(e){const t=[];let n=0,r="",i=1,o=0;for(;n<e.length;){const c=e.charAt(n);let u=-1;if(/[\t\n\r ]/.test(c)?u=a("whitespace",{noFlushTrivia:!0}):"/"===c&&(u=a("comment",{noFlushTrivia:!0})),-1!==u){const e=t.pop().value;i+=(e.match(/\n/g)||[]).length,r+=e,o-=1}else if(/[-0-9.A-Z_a-z]/.test(c)){if(u=a("decimal"),-1===u&&(u=a("integer")),-1===u){u=a("identifier");const e=t.length-1,n=t[e];if(-1!==u){if(P.includes(n.value)){const r=`${m(n.value)} is a reserved identifier and must not be used.`;throw new WebIDLParseError(s(t,e,null,r))}D.includes(n.value)&&(n.type="inline")}}}else'"'===c&&(u=a("string"));for(const s of M)if(e.startsWith(s,n)){t.push({type:"inline",value:s,trivia:r,line:i,index:o}),r="",n+=s.length,u=n;break}if(-1===u&&(u=a("other")),-1===u)throw new Error("Token stream not progressing");n=u,o+=1}return t.push({type:"eof",value:"",trivia:r,line:i,index:o}),t;function a(s,{noFlushTrivia:a}={}){const c=N[s];c.lastIndex=n;const u=c.exec(e);return u?(t.push({type:s,value:u[0],trivia:r,line:i,index:o}),a||(r=""),c.lastIndex):-1}}(e),this.position=0}error(e){throw new WebIDLParseError(s(this.source,this.position,this.current,e))}probeKind(e){return this.source.length>this.position&&this.source[this.position].type===e}probe(e){return this.probeKind("inline")&&this.source[this.position].value===e}consumeKind(...e){for(const t of e){if(!this.probeKind(t))continue;const e=this.source[this.position];return this.position++,e}}consume(...e){if(!this.probeKind("inline"))return;const t=this.source[this.position];for(const n of e)if(t.value===n)return this.position++,t}consumeIdentifier(e){if(this.probeKind("identifier")&&this.source[this.position].value===e)return this.consumeKind("identifier")}unconsume(e){this.position=e}}class WebIDLParseError extends Error{constructor({message:e,bareMessage:t,context:n,line:s,sourceName:r,input:i,tokens:o}){super(e),this.name="WebIDLParseError",this.bareMessage=t,this.context=n,this.line=s,this.sourceName=r,this.input=i,this.tokens=o}}class EnumValue extends WrappedToken{static parse(e){const t=e.consumeKind("string");if(t)return new EnumValue({source:e.source,tokens:{value:t}})}get type(){return"enum-value"}get value(){return super.value.slice(1,-1)}write(e){const{parent:t}=this;return e.ts.wrap([e.ts.trivia(this.tokens.value.trivia),e.ts.definition(e.ts.wrap(['"',e.ts.name(this.value,{data:this,parent:t}),'"']),{data:this,parent:t}),e.token(this.tokens.separator)])}}class Enum extends Base{static parse(e){const t={};if(t.base=e.consume("enum"),!t.base)return;t.name=e.consumeKind("identifier")||e.error("No name for enum");const n=E(new Enum({source:e.source,tokens:t}));return e.current=n.this,t.open=e.consume("{")||e.error("Bodyless enum"),n.values=f(e,{parser:EnumValue.parse,allowDangler:!0,listName:"enumeration"}),e.probeKind("string")&&e.error("No comma between enum values"),t.close=e.consume("}")||e.error("Unexpected value in enum"),n.values.length||e.error("No value in enum"),t.termination=e.consume(";")||e.error("No semicolon after enum"),n.this}get type(){return"enum"}get name(){return m(this.tokens.name.value)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.open),e.ts.wrap(this.values.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Includes extends Base{static parse(e){const t=e.consumeKind("identifier");if(!t)return;const n={target:t};if(n.includes=e.consume("includes"),n.includes)return n.mixin=e.consumeKind("identifier")||e.error("Incomplete includes statement"),n.termination=e.consume(";")||e.error("No terminating ; for includes statement"),new Includes({source:e.source,tokens:n});e.unconsume(t.index)}get type(){return"includes"}get target(){return m(this.tokens.target.value)}get includes(){return m(this.tokens.mixin.value)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.reference_token(this.tokens.target,this),e.token(this.tokens.includes),e.reference_token(this.tokens.mixin,this),e.token(this.tokens.termination)]),{data:this})}}class Typedef extends Base{static parse(e){const t={},n=E(new Typedef({source:e.source,tokens:t}));if(t.base=e.consume("typedef"),t.base)return n.idlType=v(e,"typedef-type")||e.error("Typedef lacks a type"),t.name=e.consumeKind("identifier")||e.error("Typedef lacks a name"),e.current=n.this,t.termination=e.consume(";")||e.error("Unterminated typedef, expected `;`"),n.this}get type(){return"typedef"}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.idlType.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.termination)]),{data:this})}}class CallbackFunction extends Base{static parse(e,t){const n={base:t},s=E(new CallbackFunction({source:e.source,tokens:n}));return n.name=e.consumeKind("identifier")||e.error("Callback lacks a name"),e.current=s.this,n.assign=e.consume("=")||e.error("Callback lacks an assignment"),s.idlType=x(e)||e.error("Callback lacks a return type"),n.open=e.consume("(")||e.error("Callback lacks parentheses for arguments"),s.arguments=g(e),n.close=e.consume(")")||e.error("Unterminated callback"),n.termination=e.consume(";")||e.error("Unterminated callback, expected `;`"),s.this}get type(){return"callback"}get name(){return m(this.tokens.name.value)}*validate(e){yield*this.extAttrs.validate(e),yield*this.idlType.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.name_token(this.tokens.name,{data:this}),e.token(this.tokens.assign),e.ts.type(this.idlType.write(e)),e.token(this.tokens.open),...this.arguments.map((t=>t.write(e))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Container extends Base{static parse(e,t,{inheritable:n,allowedMembers:s}){const{tokens:r,type:i}=t;for(r.name=e.consumeKind("identifier")||e.error(`Missing name in ${i}`),e.current=t,t=E(t),n&&Object.assign(r,function(e){const t=e.consume(":");return t?{colon:t,inheritance:e.consumeKind("identifier")||e.error("Inheritance lacks a type")}:{}}(e)),r.open=e.consume("{")||e.error(`Bodyless ${i}`),t.members=[];;){if(r.close=e.consume("}"),r.close)return r.termination=e.consume(";")||e.error(`Missing semicolon after ${i}`),t.this;const n=ExtendedAttributes.parse(e);let o;for(const[t,...n]of s)if(o=E(t(e,...n)),o)break;o||e.error("Unknown member"),o.extAttrs=n,t.members.push(o.this)}}get partial(){return!!this.tokens.partial}get name(){return m(this.tokens.name.value)}get inheritance(){return this.tokens.inheritance?m(this.tokens.inheritance.value):null}*validate(e){for(const t of this.members)t.validate&&(yield*t.validate(e))}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.callback),e.token(this.tokens.partial),e.token(this.tokens.base),e.token(this.tokens.mixin),e.name_token(this.tokens.name,{data:this}),(()=>this.tokens.inheritance?e.ts.wrap([e.token(this.tokens.colon),e.ts.trivia(this.tokens.inheritance.trivia),e.ts.inheritance(e.reference(this.tokens.inheritance.value,{context:this}))]):"")(),e.token(this.tokens.open),e.ts.wrap(this.members.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this})}}class Constant extends Base{static parse(e){const t={};if(t.base=e.consume("const"),!t.base)return;let n=b(e);if(!n){const t=e.consumeKind("identifier")||e.error("Const lacks a type");n=new Type({source:e.source,tokens:{base:t}})}e.probe("?")&&e.error("Unexpected nullable constant type"),n.type="const-type",t.name=e.consumeKind("identifier")||e.error("Const lacks a name"),t.assign=e.consume("=")||e.error("Const lacks value assignment"),t.value=y(e)||e.error("Const lacks a value"),t.termination=e.consume(";")||e.error("Unterminated const, expected `;`");const s=new Constant({source:e.source,tokens:t});return E(s).idlType=n,s}get type(){return"const"}get name(){return m(this.tokens.name.value)}get value(){return k(this.tokens.value)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),e.token(this.tokens.assign),e.token(this.tokens.value),e.token(this.tokens.termination)]),{data:this,parent:t})}}class IterableLike extends Base{static parse(e){const t=e.position,n=E(new IterableLike({source:e.source,tokens:{}})),{tokens:s}=n;if(s.readonly=e.consume("readonly"),s.readonly||(s.async=e.consume("async")),s.base=s.readonly?e.consume("maplike","setlike"):s.async?e.consume("iterable"):e.consume("iterable","maplike","setlike"),!s.base)return void e.unconsume(t);const{type:r}=n,i="maplike"===r,o=i||"iterable"===r,a=n.async&&"iterable"===r;s.open=e.consume("<")||e.error(`Missing less-than sign \`<\` in ${r} declaration`);const c=v(e)||e.error(`Missing a type argument in ${r} declaration`);return n.idlType=[c],n.arguments=[],o&&(c.tokens.separator=e.consume(","),c.tokens.separator?n.idlType.push(v(e)):i&&e.error(`Missing second type argument in ${r} declaration`)),s.close=e.consume(">")||e.error(`Missing greater-than sign \`>\` in ${r} declaration`),e.probe("(")&&(a?(s.argsOpen=e.consume("("),n.arguments.push(...g(e)),s.argsClose=e.consume(")")||e.error("Unterminated async iterable argument list")):e.error("Arguments are only allowed for `async iterable`")),s.termination=e.consume(";")||e.error(`Missing semicolon after ${r} declaration`),n.this}get type(){return this.tokens.base.value}get readonly(){return!!this.tokens.readonly}get async(){return!!this.tokens.async}*validate(e){for(const t of this.idlType)yield*t.validate(e);for(const t of this.arguments)yield*t.validate(e)}write(e){return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.readonly),e.token(this.tokens.async),e.token(this.tokens.base,e.ts.generic),e.token(this.tokens.open),e.ts.wrap(this.idlType.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.argsOpen),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.argsClose),e.token(this.tokens.termination)]),{data:this,parent:this.parent})}}class Constructor extends Base{static parse(e){const t=e.consume("constructor");if(!t)return;const n={base:t};n.open=e.consume("(")||e.error("No argument list in constructor");const s=g(e);n.close=e.consume(")")||e.error("Unterminated constructor"),n.termination=e.consume(";")||e.error("No semicolon after constructor");const r=new Constructor({source:e.source,tokens:n});return E(r).arguments=s,r}get type(){return"constructor"}*validate(e){for(const t of this.arguments)yield*t.validate(e)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.base,e.ts.nameless,{data:this,parent:t}),e.token(this.tokens.open),e.ts.wrap(this.arguments.map((t=>t.write(e)))),e.token(this.tokens.close),e.token(this.tokens.termination)]),{data:this,parent:t})}}function j(e){const t=e.consume("static");if(!t)return;return Attribute.parse(e,{special:t})||Operation.parse(e,{special:t})||e.error("No body in static member")}class Interface extends Container{static parse(e,t,{partial:n=null}={}){const s={partial:n,base:t};return Container.parse(e,new Interface({source:e.source,tokens:s}),{inheritable:!n,allowedMembers:[[Constant.parse],[Constructor.parse],[j],[w],[IterableLike.parse],[Attribute.parse],[Operation.parse]]})}get type(){return"interface"}*validate(e){if(yield*this.extAttrs.validate(e),!this.partial&&this.extAttrs.every((e=>"Exposed"!==e.name))){const e="Interfaces must have `[Exposed]` extended attribute. To fix, add, for example, `[Exposed=Window]`. Please also consider carefully if your interface should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield r(this.tokens.name,this,"require-exposed",e,{autofix:T(this)})}const t=this.extAttrs.filter((e=>"Constructor"===e.name));for(const e of t){const t="Constructors should now be represented as a `constructor()` operation on the interface instead of `[Constructor]` extended attribute. Refer to the [WebIDL spec section on constructor operations](https://heycam.github.io/webidl/#idl-constructors) for more information.";yield r(e.tokens.name,this,"constructor-member",t,{autofix:W(this,e)})}if(this.extAttrs.some((e=>"Global"===e.name))){const e=this.extAttrs.filter((e=>"LegacyFactoryFunction"===e.name));for(const t of e){const e="Interfaces marked as `[Global]` cannot have factory functions.";yield r(t.tokens.name,this,"no-constructible-global",e)}const t=this.members.filter((e=>"constructor"===e.type));for(const e of t){const t="Interfaces marked as `[Global]` cannot have constructors.";yield r(e.tokens.base,this,"no-constructible-global",t)}}yield*super.validate(e),this.partial||(yield*function*(e,t){const n=function(e){const t=a(e);return{statics:new Set(t.filter((e=>"static"===e.special)).map((e=>e.name))),nonstatics:new Set(t.filter((e=>"static"!==e.special)).map((e=>e.name)))}}(t),s=e.partials.get(t.name)||[],i=e.mixinMap.get(t.name)||[];for(const e of[...s,...i]){const s=a(e),r=s.filter((e=>"static"===e.special)),i=s.filter((e=>"static"!==e.special));yield*o(r,n.statics,e,t),yield*o(i,n.nonstatics,e,t),r.forEach((e=>n.statics.add(e.name))),i.forEach((e=>n.nonstatics.add(e.name)))}function*o(e,t,n,s){for(const i of e){const{name:e}=i;if(e&&t.has(e)){const t=`The ${"static"===i.special?"static ":""}operation "${e}" has already been defined for the base interface "${s.name}" either in itself or in a mixin`;yield r(i.tokens.name,n,"no-cross-overload",t)}}}function a(e){return e.members.filter((({type:e})=>"operation"===e))}}(e,this))}}function W(e,t){return e=E(e),()=>{const n=A(e.extAttrs.tokens.open.trivia),s=e.members.length?A(I(e.members[0]).trivia):function(e){const t=A(e),n=t.includes("\t")?"\t":" ";return t+n}(n),r=Constructor.parse(new Tokeniser(`\n${s}constructor();`));r.extAttrs=new ExtendedAttributes({source:e.source,tokens:{}}),E(r).arguments=t.arguments;const i=function(e,t){const n=e.slice().reverse().findIndex(t);return-1===n?n:e.length-n-1}(e.members,(e=>"constructor"===e.type));e.members.splice(i+1,0,r);const{close:o}=e.tokens;o.trivia.includes("\n")||(o.trivia+=`\n${n}`);const{extAttrs:a}=e,c=a.indexOf(t),u=a.splice(c,1);a.length?a.length===c?a[c-1].tokens.separator=void 0:a[c].tokens.name.trivia.trim()||(a[c].tokens.name.trivia=u[0].tokens.name.trivia):a.tokens.open=a.tokens.close=void 0}}class Mixin extends Container{static parse(e,t,{partial:n}={}){const s={partial:n,base:t};if(s.mixin=e.consume("mixin"),s.mixin)return Container.parse(e,new Mixin({source:e.source,tokens:s}),{allowedMembers:[[Constant.parse],[w],[Attribute.parse,{noInherit:!0}],[Operation.parse,{regular:!0}]]})}get type(){return"interface mixin"}}class Field extends Base{static parse(e){const t={},n=E(new Field({source:e.source,tokens:t}));return n.extAttrs=ExtendedAttributes.parse(e),t.required=e.consume("required"),n.idlType=v(e,"dictionary-type")||e.error("Dictionary member lacks a type"),t.name=e.consumeKind("identifier")||e.error("Dictionary member lacks a name"),n.default=Default.parse(e),t.required&&n.default&&e.error("Required member must not have a default"),t.termination=e.consume(";")||e.error("Unterminated dictionary member, expected `;`"),n.this}get type(){return"field"}get name(){return m(this.tokens.name.value)}get required(){return!!this.tokens.required}*validate(e){yield*this.idlType.validate(e)}write(e){const{parent:t}=this;return e.ts.definition(e.ts.wrap([this.extAttrs.write(e),e.token(this.tokens.required),e.ts.type(this.idlType.write(e)),e.name_token(this.tokens.name,{data:this,parent:t}),this.default?this.default.write(e):"",e.token(this.tokens.termination)]),{data:this,parent:t})}}class Dictionary extends Container{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("dictionary"),n.base)return Container.parse(e,new Dictionary({source:e.source,tokens:n}),{inheritable:!t,allowedMembers:[[Field.parse]]})}get type(){return"dictionary"}}class Namespace extends Container{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("namespace"),n.base)return Container.parse(e,new Namespace({source:e.source,tokens:n}),{allowedMembers:[[Attribute.parse,{noInherit:!0,readonly:!0}],[Constant.parse],[Operation.parse,{regular:!0}]]})}get type(){return"namespace"}*validate(e){if(!this.partial&&this.extAttrs.every((e=>"Exposed"!==e.name))){const e="Namespaces must have [Exposed] extended attribute. To fix, add, for example, [Exposed=Window]. Please also consider carefully if your namespace should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield r(this.tokens.name,this,"require-exposed",e,{autofix:T(this)})}yield*super.validate(e)}}class CallbackInterface extends Container{static parse(e,t,{partial:n=null}={}){const s={callback:t};if(s.base=e.consume("interface"),s.base)return Container.parse(e,new CallbackInterface({source:e.source,tokens:s}),{inheritable:!n,allowedMembers:[[Constant.parse],[Operation.parse,{regular:!0}]]})}get type(){return"callback interface"}}function B(e,t){const n=e.source;function s(t){e.error(t)}function r(...t){return e.consume(...t)}function i(t){const n=r("interface");if(!n)return;return Mixin.parse(e,n,t)||Interface.parse(e,n,t)||s("Interface has no proper body")}function o(){if(t.productions)for(const n of t.productions){const t=n(e);if(t)return t}return function(){const t=r("callback");if(t)return e.probe("interface")?CallbackInterface.parse(e,t):CallbackFunction.parse(e,t)}()||i()||function(){const t=r("partial");if(t)return Dictionary.parse(e,{partial:t})||i({partial:t})||Namespace.parse(e,{partial:t})||s("Partial doesn't apply to anything")}()||Dictionary.parse(e)||Enum.parse(e)||Typedef.parse(e)||Includes.parse(e)||Namespace.parse(e)}const a=function(){if(!n.length)return[];const r=[];for(;;){const t=ExtendedAttributes.parse(e),n=o();if(!n){t.length&&s("Stray extended attributes");break}E(n).extAttrs=t,r.push(n)}const i=Eof.parse(e);return t.concrete&&r.push(i),r}();return e.position<n.length&&s("Unrecognised tokens"),a}function q(e,t={}){const n=new Tokeniser(e);return void 0!==t.sourceName&&(n.source.name=t.sourceName),B(n,t)}function K(e){return e}const L={wrap:e=>e.join(""),trivia:K,name:K,reference:K,type:K,generic:K,nameless:K,inheritance:K,definition:K,extendedAttribute:K,extendedAttributeReference:K};class Writer{constructor(e){this.ts=Object.assign({},L,e)}reference(e,{unescaped:t,context:n}){return t||(t=e.startsWith("_")?e.slice(1):e),this.ts.reference(e,t,n)}token(e,t=K,...n){if(!e)return"";const s=t(e.value,...n);return this.ts.wrap([this.ts.trivia(e.trivia),s])}reference_token(e,t){return this.token(e,this.reference.bind(this),{context:t})}name_token(e,t){return this.token(e,this.ts.name,t)}identifier(e,t){return this.ts.wrap([this.reference_token(e.tokens.value,t),this.token(e.tokens.separator)])}}function F(e,{templates:t=L}={}){t=Object.assign({},L,t);const n=new Writer(t);return t.wrap(e.map((e=>e.write(n))))}function S(e,t){const n=new Map,s=e.filter((e=>"includes"===e.type));for(const e of s){const s=t.get(e.includes);if(!s)continue;const r=n.get(e.target);r?r.push(s):n.set(e.target,[s])}return n}function*U(e){const t=function(e){const t=new Map,n=new Set,s=new Map;for(const r of e)if(r.partial){const e=s.get(r.name);e?e.push(r):s.set(r.name,[r])}else r.name&&(t.has(r.name)?n.add(r):t.set(r.name,r));return{all:e,unique:t,partials:s,duplicates:n,mixinMap:S(e,t),cache:{typedefIncludesDictionary:new WeakMap,dictionaryIncludesRequiredField:new WeakMap}}}(e);for(const e of t.all)e.validate&&(yield*e.validate(t));yield*function*({unique:e,duplicates:t}){for(const n of t){const{name:t}=n,s=`The name "${t}" of type "${e.get(t).type}" was already seen`;yield r(n.tokens.name,n,"no-duplicate",s)}}(t)}function _(e){return[...U((t=e,t.flat?t.flat():[].concat(...t)))];var t}return t})()));
//# sourceMappingURL=webidl2.js.map \ No newline at end of file
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js.map b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js.map
index 1c09b9c2e22..ba690c82326 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js.map
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/dist/webidl2.js.map
@@ -1 +1 @@
-{"version":3,"file":"webidl2.js","mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAiB,QAAID,IAErBD,EAAc,QAAIC,IARpB,CASGK,YAAY,WACf,M,mBCTA,IAAIC,EAAsB,CCA1B,EAAwB,CAACL,EAASM,KACjC,IAAI,IAAIC,KAAOD,EACXD,EAAoBG,EAAEF,EAAYC,KAASF,EAAoBG,EAAER,EAASO,IAC5EE,OAAOC,eAAeV,EAASO,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,MCJ3E,EAAwB,CAACM,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClF,EAAyBd,IACH,oBAAXkB,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeV,EAASkB,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeV,EAAS,aAAc,CAAEoB,OAAO,M,KCgCvD,SAASC,EACPC,EACAC,EACAC,EACAC,EACAC,GACA,MAAEC,EAAQ,QAAO,QAAEC,EAAO,SAAEC,GAAa,IAKzC,SAASC,EAAYC,GACnB,OAAOA,EAAQ,EACXT,EAAOU,MAAMT,EAAUA,EAAWQ,GAClCT,EAAOU,MAAMC,KAAKC,IAAIX,EAAWQ,EAAO,GAAIR,GAGlD,SAASY,EAAaC,GAAQ,SAAEC,GAAa,IAC3C,MAAMC,EAAOF,EAAOG,KAAKC,GAAMA,EAAEC,OAASD,EAAEpB,QAAOsB,KAAK,IAClDC,EAAYrB,EAAOC,GACzB,MAAuB,QAAnBoB,EAAUC,KACLN,EAELD,EACKC,EAAOK,EAAUF,OAEnBH,EAAKN,MAAMW,EAAUF,OAAOI,QAGrC,MACMC,EACsB,QAA1BxB,EAAOC,GAAUqB,KACbtB,EAAOC,GAAUuB,KACjBxB,EAAOuB,OAAS,EAChBvB,EAAOC,EAAW,GAAGuB,KACrB,EAEAC,EAvER,SAAkBT,GAChB,MAAMU,EAAWV,EAAKW,MAAM,MAC5B,OAAOD,EAASA,EAASH,OAAS,GAqERK,CACxBf,EAAaL,GATG,GASsB,CAAEO,UAAU,KAG9Cc,EAAmBrB,EAZP,GAaZsB,EAAiBjB,EAAagB,GAI9BE,EAAgBN,EAHMK,EAAeH,MAAM,MAAM,GAGS,MADjD,IAAIK,OAAOP,EAAkBF,QAAU,KAGhDU,EAAuB,WAAT7B,EAAoB,QAAU,SAQ5C8B,EAAU,GAAG9B,mBAAsBoB,IAPpBxB,EAAOmC,KAAO,OAAOnC,EAAOmC,OAAS,KAExDjC,GAAWA,EAAQiC,KACf,KAAKF,OAAiB/B,EAAQkC,QAAU,WAAa,KAzE7D,SAAuBC,GACrB,MAAMC,EAAY,CAACD,GACnB,KAAOA,GAAQA,EAAKE,QAAQ,CAC1B,MAAM,OAAEA,GAAWF,EACnBC,EAAUE,QAAQD,GAClBF,EAAOE,EAET,OAAOD,EAAUrB,KAAKwB,GAfxB,SAAuBC,EAAMC,GAC3B,IAAIC,EAASF,EAIb,OAHIC,IACFC,GAAU,IAAID,KAETC,EAUqBC,CAAcJ,EAAEnB,KAAMmB,EAAEN,QAAOf,KAAK,QAkEA0B,CACxD5C,OAEF,QACiF6B,IACvF,MAAO,CACL5B,QAAS,GAAG+B,KAAW/B,IACvB4C,YAAa5C,EACb+B,QAAAA,EACAV,KAAAA,EACAwB,WAAYhD,EAAOmC,KACnB9B,MAAAA,EACAE,SAAAA,EACAD,QAAAA,EACA2C,MAAOnB,EACPoB,OAAQrB,GAOL,SAASsB,EAAYnD,EAAQC,EAAUC,EAASC,GACrD,OAAOJ,EAAMC,EAAQC,EAAUC,EAASC,EAAS,UAO5C,SAASiD,EACdC,EACAnD,EACAK,EACAJ,EACAmD,EAAU,IAGV,OADAA,EAAQ/C,SAAWA,EACZR,EACLG,EAAQF,OACRqD,EAAME,MACNrD,EACAC,EACA,aACAmD,G,6FCnIG,MAAME,KAMXC,aAAY,OAAEzD,EAAM,OAAEkD,IACpB/D,OAAOuE,iBAAiBC,KAAM,CAC5B3D,OAAQ,CAAEF,MAAOE,GACjBkD,OAAQ,CAAEpD,MAAOoD,EAAQU,UAAU,GACnCrB,OAAQ,CAAEzC,MAAO,KAAM8D,UAAU,GACjCD,KAAM,CAAE7D,MAAO6D,QAInBE,SACE,MAAMC,EAAO,CAAExC,UAAMyC,EAAW5B,UAAM4B,EAAWC,iBAAaD,GAC9D,IAAIE,EAAQN,KACZ,KAAOM,IAAU9E,OAAOM,WAAW,CACjC,MAAMyE,EAAU/E,OAAOgF,0BAA0BF,GACjD,IAAK,MAAOhF,EAAKa,KAAUX,OAAOiF,QAAQF,IACpCpE,EAAMT,YAAcS,EAAMR,OAE5BwE,EAAK7E,GAAO0E,KAAK1E,IAGrBgF,EAAQ9E,OAAOkF,eAAeJ,GAEhC,OAAOH,GCnBJ,SAASQ,EACdC,EACAC,GACA,iBAAEC,GAAqB,IAEvB,IAAKF,EAAQG,MAAO,CAClB,MAAMC,EAAMH,EAAKI,OAAOtF,IAAIiF,EAAQA,SACpC,IAAKI,EACH,OAEF,GAAiB,YAAbA,EAAIrD,KAAoB,CAC1B,MAAM,0BAAEuD,GAA8BL,EAAKM,MAC3C,GAAID,EAA0BE,IAAIJ,GAGhC,OAAOE,EAA0BvF,IAAIqF,GAEvCH,EAAKM,MAAMD,0BAA0BG,IAAIL,OAAKZ,GAC9C,MAAMnB,EAAS0B,EAA0BK,EAAIJ,QAASC,GAEtD,GADAA,EAAKM,MAAMD,0BAA0BG,IAAIL,EAAK/B,GAC1CA,EACF,MAAO,CACLqC,UAAWV,EACXW,WAAYtC,EAAOsC,YAIzB,GAAiB,eAAbP,EAAIrD,OAA0BmD,IAAqBF,EAAQY,UAC7D,MAAO,CACLF,UAAWV,EACXW,WAAYP,GAIlB,IAAK,MAAMS,KAAWb,EAAQa,QAAS,CACrC,MAAMxC,EAAS0B,EAA0Bc,EAASZ,GAClD,GAAI5B,EACF,OAAIwC,EAAQV,MACH9B,EAEF,CACLqC,UAAWG,EACXF,WAAYtC,EAAOsC,aAWpB,SAASG,EAAgCC,EAAMd,GACpD,GAAIA,EAAKM,MAAMO,gCAAgCN,IAAIO,GACjD,OAAOd,EAAKM,MAAMO,gCAAgC/F,IAAIgG,GAIxDd,EAAKM,MAAMO,gCAAgCL,IAAIM,OAAMvB,GACrD,IAAInB,EAAS0C,EAAKC,QAAQC,MAAMC,GAAUA,EAAMC,WAChD,IAAK9C,GAAU0C,EAAKtB,YAAa,CAC/B,MAAM2B,EAAYnB,EAAKI,OAAOtF,IAAIgG,EAAKtB,aAClC2B,EAGMN,EAAgCM,EAAWnB,KACpD5B,GAAS,GAFTA,GAAS,EAMb,OADA4B,EAAKM,MAAMO,gCAAgCL,IAAIM,EAAM1C,GAC9CA,EChFF,MAAMgD,kBAAkBC,MAC7BpC,aAAY,OAAEzD,EAAM,OAAEkD,IACpB4C,QACA3G,OAAOuE,iBAAiBC,KAAM,CAC5B3D,OAAQ,CAAEF,MAAOE,GACjBkD,OAAQ,CAAEpD,MAAOoD,GACjBX,OAAQ,CAAEzC,MAAO,KAAM8D,UAAU,MCHhC,MAAMmC,qBAAqBvC,KAKhC,cAAcwC,EAAW1E,GACvB,MAAO,KACL,MAAMxB,EAAQkG,EAAUC,YAAY3E,GACpC,GAAIxB,EACF,OAAO,IAAIiG,aAAa,CACtB/F,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEpD,MAAAA,MAMlB,YACE,OAAO,EAAS6D,KAAKT,OAAOpD,MAAMA,OAIpCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOpD,OACpBqG,EAAE9C,MAAMM,KAAKT,OAAOoD,cAKnB,MAAMC,YAAYR,aAIvB,aAAaC,GACX,MAAMlG,EAAQkG,EAAUC,YAAY,OACpC,GAAInG,EACF,OAAO,IAAIyG,IAAI,CAAEvG,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAEpD,MAAAA,KAIzD,WACE,MAAO,OCrCX,SAASoD,EAAO8C,EAAWQ,GACzB,OAAOC,EAAKT,EAAW,CACrBU,OAAQX,aAAaW,OAAOV,EAAWQ,GACvCG,SAAUH,EAAY,UAI1B,MAAMI,EAAqB,CAAC,aAAc,UAAW,UAAW,UAU1DC,EAAkB,IAAIC,IAAI,IARD,CAC7B,oBACA,gBACA,cACA,uBACA,eAI0B7F,KAAKkB,GAAS,CAACA,EAAM,SAASA,OACxD,CAAC,mBAAoB,yBACrB,CAAC,mBAAoB,0BACrB,CAAC,cAAe,6BAOlB,SAAS4E,EAAiBf,GACxB,IAAK,MAAMgB,KAAUJ,EAAoB,CACvC,MAAMK,EAAO/D,EAAO8C,EAAWgB,GAC/B,GAAIC,EAAK1F,OACP,OAAO0F,EAGXjB,EAAUjG,MACR,uEAIJ,MAAMmH,oCAAoC1D,KAIxC,aAAawC,GACX,MAAM9C,EAAS,CAAEiE,OAAQnB,EAAUoB,QAAQ,MACrCC,EAAMC,EACV,IAAIJ,4BAA4B,CAAElH,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAG9D,GADAmE,EAAIZ,KAAO,GACPvD,EAAOiE,OAAQ,CAEjB,GADAjE,EAAOqE,SAAWvB,EAAUoB,QAAQ,KAChClE,EAAOqE,SACT,OAAOF,EAAI1D,KAEbT,EAAOsE,cAAgBxB,EAAUC,eAAeW,GAelD,OAbA1D,EAAOuE,KAAOzB,EAAUoB,QAAQ,KAC5BlE,EAAOuE,MACTJ,EAAIZ,KAAOY,EAAIK,UAEXX,EAAiBf,GAEjB2B,EAAc3B,GAClB9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,yDACTsH,EAAIQ,SAAW3E,EAAOsE,eAC/BxB,EAAUjG,MAAM,uDAEXsH,EAAI1D,KAGb,gBACE,OACEA,KAAKT,OAAOiE,SAAWxD,KAAKT,OAAOqE,WAAa5D,KAAKT,OAAOsE,cAIhE,cACE,OAAI7D,KAAK+D,UACA/D,KAAK8C,KAAK,GAAGvD,OAAOpD,MAAMwB,KAAO,QAEtCqC,KAAKT,OAAOqE,SACP,IAEL5D,KAAKT,OAAOsE,cACP7D,KAAKT,OAAOsE,cAAclG,KAE5B,KAIT4E,MAAMC,GACJ,MAAM,QAAE2B,GAAYnE,KACpB,OAAOwC,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAE9C,MAAMM,KAAKT,OAAOqE,UACpBpB,EAAE4B,gBAAgBpE,KAAKT,OAAOsE,cAAe7D,KAAKpB,QAClD4D,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK8C,KAAKxF,KAAK+G,GACG,oBAAZF,EACH3B,EAAE8B,WAAWD,EAAGrE,KAAKpB,QACrByF,EAAE9B,MAAMC,KAEdA,EAAE9C,MAAMM,KAAKT,OAAO0E,UAKnB,MAAMM,gCAAgC1E,KAI3C,aAAawC,GACX,MAAM7D,EAAO6D,EAAUC,YAAY,cACnC,GAAI9D,EACF,OAAO,IAAI+F,wBAAwB,CACjClI,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEf,KAAAA,GACVgG,OAAQjB,4BAA4BkB,MAAMpC,KAKhDvC,aAAY,OAAEzD,EAAM,OAAEkD,EAAM,OAAEiF,IAC5BrC,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChBiF,EAAO5F,OAASoB,KAChBxE,OAAOC,eAAeuE,KAAM,SAAU,CAAE7D,MAAOqI,IAGjD,WACE,MAAO,qBAET,WACE,OAAOxE,KAAKT,OAAOf,KAAKrC,MAE1B,UACE,MAAQgI,QAASxG,EAAI,OAAE4B,EAAM,KAAEuD,GAAS9C,KAAKwE,OAC7C,IAAK7G,EACH,OAAO,KAOT,MAAO,CAAEA,KAAAA,EAAMxB,MALD6D,KAAKwE,OAAOT,UACtBjB,EACA9C,KAAKwE,OAAOjF,OAAOsE,cACnB,EAAStE,EAAOsE,cAAc1H,OAC9B,MAGN,gBACE,MAAM,UAAE4H,EAAS,KAAEjB,GAAS9C,KAAKwE,OACjC,OAAK1B,GAAQiB,EACJ,GAEFjB,EAGT,UAAUjC,GACR,MAAM,KAAErC,GAASwB,KACjB,GAAa,4BAATxB,EAAoC,CACtC,MAAMhC,EAAU,sOAIViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,uBACAxD,EACA,CAAEE,MAAO,iBAEN,GAAIwG,EAAgB9B,IAAI5C,GAAO,CACpC,MAAMhC,EAAU,MAAMgC,yEACA0E,EAAgBvH,IAAI6C,oHAGpCiB,EAAgBO,KAAKT,OAAOf,KAAMwB,KAAM,iBAAkBxD,EAAS,CACvEE,MAAO,UACPC,SA0B+B+H,EA1BQ1E,KA2BtC,KACL,MAAM,KAAExB,GAASkG,EACjBA,EAAQnF,OAAOf,KAAKrC,MAAQ+G,EAAgBvH,IAAI6C,GACnC,gBAATA,IACFkG,EAAQF,OAAOjF,OAAS,QAL9B,IAAuCmF,EAvBnC,IAAK,MAAMC,KAAO3E,KAAK4E,gBACdD,EAAIE,SAAShE,GAKxB0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOf,KAAKhB,QAC7BgF,EAAEC,GAAGqC,kBACHtC,EAAEC,GAAGC,KAAK,CACRF,EAAEC,GAAGsC,2BAA2B/E,KAAKxB,MACrCwB,KAAKwE,OAAOjC,MAAMC,MAGtBA,EAAE9C,MAAMM,KAAKT,OAAOoD,cAoBnB,MAAMqC,2BAA2B/C,UAItC,aAAaI,GACX,MAAM9C,EAAS,GAEf,GADAA,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAC3BlE,EAAOuE,KAAM,OAAO,IAAIkB,mBAAmB,IAChD,MAAMtB,EAAM,IAAIsB,mBAAmB,CAAE3I,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAkB/D,OAjBAmE,EAAIuB,QACCnC,EAAKT,EAAW,CACjBU,OAAQwB,wBAAwBE,MAChCzB,SAAU,wBAGdzD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,kDACbsH,EAAI9F,QACPyE,EAAUjG,MAAM,qCAEdiG,EAAU6C,MAAM,MAClB7C,EAAUjG,MACR,kEAGGsH,EAGT,UAAU7C,GACR,IAAK,MAAM6D,KAAW1E,WACb0E,EAAQG,SAAShE,GAK5B0B,MAAMC,GACJ,OAAKxC,KAAKpC,OACH4E,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK1C,KAAK6H,GAAOA,EAAG5C,MAAMC,KAC7BA,EAAE9C,MAAMM,KAAKT,OAAO0E,SAJG,ICnL7B,SAASmB,EAAY/C,EAAWzG,GAC9B,MAAM4F,EAAWa,EAAUoB,QAAQ,KAC/BjC,IACF5F,EAAI2D,OAAOiC,SAAWA,GAEpBa,EAAU6C,MAAM,MAAM7C,EAAUjG,MAAM,iCAO5C,SAASiJ,EAAYhD,EAAWiD,GAC9B,IAAI5B,EAhFN,SAAsBrB,EAAWiD,GAC/B,MAAMvG,EAAOsD,EAAUoB,QACrB,cACA,kBACA,UACA,WACA,UAEF,IAAK1E,EACH,OAEF,MAAM2E,EAAMC,EACV,IAAI4B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,MAKjD,OAHA2E,EAAInE,OAAOuE,KACTzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,4BAA4B2C,EAAK5C,SAC3C4C,EAAK5C,OACX,IAAK,UAAW,CACVkG,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,+CAClB,MAAMqF,EACJ+D,EAAYnD,EAAWiD,IACvBjD,EAAUjG,MAAM,2BAClBsH,EAAIjC,QAAQwD,KAAKxD,GACjB,MAEF,IAAK,WACL,IAAK,cACL,IAAK,kBAAmB,CACtB,MAAMA,EACJgE,EAA8BpD,EAAWiD,IACzCjD,EAAUjG,MAAM,WAAW2C,EAAK5C,iBAClCuH,EAAIjC,QAAQwD,KAAKxD,GACjB,MAEF,IAAK,SAAU,CACTY,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,6CAClB,MAAMsJ,EACJrD,EAAUoB,WAAWkC,IACrBtD,EAAUjG,MAAM,8BAA8BuJ,EAAYlI,KAAK,SAC3DmI,EAAa,IAAIL,KAAK,CAC1BlJ,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAER,KAAM2G,KAElBE,EAAWrG,OAAOoD,UAChBN,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,uCAClBwJ,EAAWjI,KAAO2H,EAClB,MAAMO,EACJJ,EAA8BpD,EAAWiD,IACzCjD,EAAUjG,MAAM,qCAClBsH,EAAIjC,QAAQwD,KAAKW,EAAYC,GAC7B,OAOJ,OAJKnC,EAAI9C,SAASyB,EAAUjG,MAAM,8BAA8B2C,EAAK5C,SACrEuH,EAAInE,OAAO0E,MACT5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,iCAAiC2C,EAAK5C,SACjDuH,EAAI1D,KAmBD8F,CAAazD,EAAWiD,IAAaS,EAAe1D,GAC9D,IAAKqB,EAAK,CACR,MAAM3E,EACJsD,EAAUC,YAAY,eACtBD,EAAUoB,WAAWkC,KAAgBK,GACvC,IAAKjH,EACH,OAEF2E,EAAM,IAAI6B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,KACjDsD,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,4BAA4B2C,EAAK5C,SASrD,MAPoB,YAAhBuH,EAAIuC,SAAyB5D,EAAU6C,MAAM,MAC/C7C,EAAUjG,MAAM,mCAElBsH,EAAI/F,KAAO2H,GAAY,KACvBF,EAAY/C,EAAWqB,GACnBA,EAAIlC,UAA4B,QAAhBkC,EAAI9C,SACtByB,EAAUjG,MAAM,sCACXsH,EAsCF,MAAM6B,aAAa1F,KAKxB,aAAawC,EAAWiD,GACtB,OAAOD,EAAYhD,EAAWiD,IArClC,SAAoBjD,EAAW1E,GAC7B,MAAM4B,EAAS,GAEf,GADAA,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAC3BlE,EAAOuE,KAAM,OAClB,MAAMJ,EAAMC,EAAa,IAAI4B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAE9D,IADAmE,EAAI/F,KAAOA,GAAQ,OACN,CACX,MAAMuI,EACJT,EAA8BpD,IAC9BA,EAAUjG,MAAM,wDACE,QAAhB8J,EAAItF,SACNyB,EAAUjG,MAAM,iDACE,YAAhB8J,EAAID,SACN5D,EAAUjG,MAAM,qDAClBsH,EAAIjC,QAAQwD,KAAKiB,GACjB,MAAMC,EAAK9D,EAAUoB,QAAQ,MAC7B,IAAI0C,EAEG,MADLD,EAAI3G,OAAOoD,UAAYwD,EAW3B,OARIzC,EAAI9C,QAAQhD,OAAS,GACvByE,EAAUjG,MACR,kEAGJmD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,2BAC5CgJ,EAAY/C,EAAWqB,GAChBA,EAAI1D,KASkCoG,CAAW/D,EAAWiD,GAGnExF,aAAY,OAAEzD,EAAM,OAAEkD,IACpB4C,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChB/D,OAAOC,eAAeuE,KAAM,UAAW,CAAE7D,MAAO,GAAI8D,UAAU,IAC9DD,KAAKqG,SAAW,IAAIrB,mBAAmB,IAGzC,cACE,OAAIhF,KAAKyB,QAAQ7D,QAAUoC,KAAKT,OAAOR,KAC9BiB,KAAKT,OAAOR,KAAK5C,MAEnB,GAET,eACE,OAAOmK,QAAQtG,KAAKT,OAAOiC,UAE7B,YACE,OAAO8E,QAAQtG,KAAKyB,QAAQ7D,UAAYoC,KAAKT,OAAOR,KAEtD,cACE,GAAIiB,KAAKyB,QAAQ7D,OACf,OAAOoC,KAAKyB,QAOd,OAAO,EAJM,CAACzB,KAAKT,OAAOgH,OAAQvG,KAAKT,OAAOR,KAAMiB,KAAKT,OAAOiH,SAC7DC,QAAQlJ,GAAMA,IACdD,KAAKC,GAAMA,EAAEpB,QACbsB,KAAK,MAIV,UAAUoD,GAGR,SAFOb,KAAKqG,SAASxB,SAAShE,GAET,SAAjBb,KAAKY,QAAoB,CAC3B,MAAMpE,EAAU,sJAGViD,EAAgBO,KAAKT,OAAOR,KAAMiB,KAAM,eAAgBxD,EAAS,CACrEG,SAwEagB,EAxEQqC,KAyEpB,KACLrC,EAAK4B,OAAOR,KAAK5C,MAAQ,gBAF7B,IAAqBwB,EAhEjB,MAAM+I,GAAW1G,KAAKe,OAASF,EAAKI,OAAOtF,IAAIqE,KAAKY,SAC9C5B,EAASgB,KAAKe,MAChBf,KACA0G,GAA4B,YAAjBA,EAAQ/I,KACnB+I,EAAQ9F,aACRR,EACJ,GAAIpB,GAAUgB,KAAKwB,SAAU,CAE3B,MAAM,UAAEF,GAAcX,EAA0B3B,EAAQ6B,IAAS,GACjE,GAAIS,EAAW,CACb,MAAMqF,GAAe3G,KAAKe,MAAQO,EAAYtB,MAAMT,OAAOR,KACrDvC,EAAU,yDACViD,EACJkH,EACA3G,KACA,yBACAxD,SAKJ,IAAK,MAAMiF,KAAWzB,KAAKyB,cAClBA,EAAQoD,SAAShE,GAM9B0B,MAAMC,GAwBJ,OAAOA,EAAEC,GAAGC,KAAK,CACf1C,KAAKqG,SAAS9D,MAAMC,GAxBJ,MAChB,GAAIxC,KAAKe,OAASf,KAAKiG,QACrB,OAAOzD,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGwD,SAC/BzD,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAKyB,QAAQnE,KAAKC,GAAMA,EAAEgF,MAAMC,KACnCA,EAAE9C,MAAMM,KAAKT,OAAO0E,SAGxB,MAAM2C,EAAa5G,KAAKT,OAAOgH,QAAUvG,KAAKT,OAAOR,KAC/CwH,EAASvG,KAAKT,OAAOgH,OACvB,CAACvG,KAAKT,OAAOgH,OAAOpK,MAAOqG,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOR,KAAKvB,SACxD,GACEqJ,EAAMrE,EAAElB,UACZkB,EAAEC,GAAGC,KAAK,IACL6D,EACHvG,KAAKT,OAAOR,KAAK5C,MACjBqG,EAAE9C,MAAMM,KAAKT,OAAOiH,WAEtB,CAAEM,UAAW9G,KAAKY,QAASrC,QAASyB,OAEtC,OAAOwC,EAAEC,GAAGC,KAAK,CAACF,EAAEC,GAAGjF,OAAOoJ,EAAWpJ,QAASqJ,KAIlDE,GACAvE,EAAE9C,MAAMM,KAAKT,OAAOiC,UACpBgB,EAAE9C,MAAMM,KAAKT,OAAOoD,cCtQnB,MAAMqE,gBAAgBnH,KAI3B,aAAawC,GACX,MAAMmB,EAASnB,EAAUoB,QAAQ,KACjC,IAAKD,EACH,OAAO,KAET,MAAMxC,EACJiG,EAAY5E,IACZA,EAAUC,YAAY,WACtBD,EAAUoB,QAAQ,OAAQ,IAAK,MAC/BpB,EAAUjG,MAAM,wBACZ8K,EAAa,CAAClG,GACpB,GAAkB,MAAdA,EAAI7E,MAAe,CACrB,MAAM8H,EACJ5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCAClB8K,EAAWjC,KAAKhB,QACX,GAAkB,MAAdjD,EAAI7E,MAAe,CAC5B,MAAM8H,EACJ5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,0CAClB8K,EAAWjC,KAAKhB,GAElB,OAAO,IAAI+C,QAAQ,CACjB3K,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEiE,OAAAA,GACV0D,WAAAA,IAIJpH,aAAY,OAAEzD,EAAM,OAAEkD,EAAM,WAAE2H,IAC5B/E,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChB2H,EAAWtI,OAASoB,KACpBxE,OAAOC,eAAeuE,KAAM,aAAc,CAAE7D,MAAO+K,IAGrD,WACE,OAAOC,EAAWnH,KAAKkH,WAAW,IAAIvJ,KAExC,YACE,OAAOwJ,EAAWnH,KAAKkH,WAAW,IAAI/K,MAExC,eACE,OAAOgL,EAAWnH,KAAKkH,WAAW,IAAIE,SAIxC7E,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOiE,WACjBxD,KAAKkH,WAAW5J,KAAKC,GAAMiF,EAAE9C,MAAMnC,QCtCrC,MAAM8J,iBAAiBxH,KAI5B,aAAawC,GACX,MAAMiF,EAAiBjF,EAAU/F,SAE3BiD,EAAS,GACTmE,EAAMC,EACV,IAAI0D,SAAS,CAAEhL,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAK3C,OAHAmE,EAAI2C,SAAWrB,mBAAmBP,MAAMpC,GACxC9C,EAAOgI,SAAWlF,EAAUoB,QAAQ,YACpCC,EAAI9C,QAAU6E,EAA8BpD,EAAW,iBAClDqB,EAAI9C,SAGJrB,EAAOgI,WACVhI,EAAOiI,SAAWnF,EAAUoB,QAAQ,QAEtClE,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUoB,WAAWgE,GAClBlI,EAAOf,MAGZkF,EAAIgE,QAAUnI,EAAOgI,SAAWP,QAAQvC,MAAMpC,GAAa,KACpDqB,EAAI1D,MAHFqC,EAAUsF,UAAUL,IATpBjF,EAAUsF,UAAUL,GAe/B,WACE,MAAO,WAET,eACE,QAAStH,KAAKT,OAAOgI,SAEvB,eACE,QAASvH,KAAKT,OAAOiI,SAEvB,WACE,OAAO,EAASxH,KAAKT,OAAOf,KAAKrC,OAMnC,UAAU0E,SACDb,KAAKqG,SAASxB,SAAShE,SACvBb,KAAKY,QAAQiE,SAAShE,GAC7B,MAAM5B,EAAS0B,EAA0BX,KAAKY,QAASC,EAAM,CAC3DC,kBAAkB,IAEpB,GAAI7B,EACF,GAAIe,KAAKY,QAAQY,SAAU,CACzB,MAAMhF,EAAU,iDACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,uBACAxD,QAEG,GAAKwD,KAAKuH,UAiBV,IAAKvH,KAAK0H,QAAS,CACxB,MAAMlL,EAAU,yEACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,mBACAxD,EACA,CACEG,QAASiL,EAAsC5H,cAxBnD,GACEA,KAAKpB,SACJ8C,EAAgCzC,EAAOsC,WAAYV,IA8C9D,SAAgC8D,GAC9B,MAAM7B,EAAO6B,EAAI/F,OAAOgG,WAAaD,EAAI/F,OAAOkE,KAC1ClD,EAAQkD,EAAK+E,QAAQlD,GAE3B,OADuB7B,EAAK/F,MAAM6C,EAAQ,GAAGiC,MAAMiG,IAAOA,EAAEP,WAhDpDQ,CAAuB/H,MACvB,CACA,MAAMxD,EAAU,0EACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,oBACAxD,EACA,CACEG,SA8CgCgI,EA9Cc3E,KA+CnD,KACL,MAAM4G,EAAaoB,EAAcrD,EAAI/D,SACrC+D,EAAIpF,OAAOgI,SAAW,IACjBX,EACHjJ,KAAM,WACNxB,MAAO,YAETyK,EAAWpJ,OAAS,IACpBoK,EAAsCjD,EAAtCiD,OATJ,IAA8CjD,EA1B5CpC,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACf1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOgI,UACpB/E,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAE9C,MAAMM,KAAKT,OAAOiI,UACpBhF,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,OACvCA,KAAK0H,QAAU1H,KAAK0H,QAAQnF,MAAMC,GAAK,GACvCA,EAAE9C,MAAMM,KAAKT,OAAOoD,cAkC1B,SAASiF,EAAsCjD,GAC7C,MAAO,KACLA,EAAI+C,QAAUV,QAAQvC,MAAM,IAAI0D,UAAU,WCnJvC,MAAMC,kBAAkBvI,KAS7B,aAAawC,GAAW,QAAEgG,EAAO,QAAEC,GAAY,IAC7C,MAAM/I,EAAS,CAAE8I,QAAAA,GACX3E,EAAMC,EACV,IAAIyE,UAAU,CAAE/L,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAE5C,OAAI8I,GAA6B,gBAAlBA,EAAQlM,QACrBoD,EAAOgJ,YAAclG,EAAUoB,QAAQ,KACnClE,EAAOgJ,cACT7E,EAAIkB,UAAY,GACTlB,IAGN2E,GAAYC,IACf/I,EAAO8I,QAAUhG,EAAUoB,QAAQ,SAAU,SAAU,YAEzDC,EAAI9C,QACF4E,EAAYnD,IAAcA,EAAUjG,MAAM,uBAC5CmD,EAAOf,KACL6D,EAAUC,YAAY,eAAiBD,EAAUoB,QAAQ,YAC3DlE,EAAOuE,KACLzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,qBAC5CsH,EAAIkB,UAAYZ,EAAc3B,GAC9B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,0BAC5CmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCACXsH,EAAI1D,MAGb,WACE,MAAO,YAET,WACE,MAAM,KAAExB,GAASwB,KAAKT,OACtB,OAAKf,EAGE,EAASA,EAAKrC,OAFZ,GAIX,cACE,OAAK6D,KAAKT,OAAO8I,QAGVrI,KAAKT,OAAO8I,QAAQlM,MAFlB,GAKX,UAAU0E,GAER,SADOb,KAAKqG,SAASxB,SAAShE,IACzBb,KAAKxB,MAAQ,CAAC,GAAI,UAAUgK,SAASxI,KAAKqI,SAAU,CACvD,MAAM7L,EAAU,qFACViD,EAAgBO,KAAKT,OAAOuE,KAAM9D,KAAM,gBAAiBxD,GAE7DwD,KAAKY,gBACAZ,KAAKY,QAAQiE,SAAShE,IAE/B,IAAK,MAAM4H,KAAYzI,KAAK4E,gBACnB6D,EAAS5D,SAAShE,GAK7B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACb0I,EAAO1I,KAAKY,QACd,CACE4B,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK4E,UAAUtH,KAAKqH,GAAQA,EAAIpC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAO0E,QAEtB,GACJ,OAAOzB,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBxC,KAAKT,OAAOf,KACRgE,EAAE9C,MAAMM,KAAKT,OAAO8I,SACpB7F,EAAE9C,MAAMM,KAAKT,OAAO8I,QAAS7F,EAAEC,GAAGkG,SAAU,CAAET,KAAMlI,KAAMpB,OAAAA,OAC3D8J,EACHlG,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAAA,KCzFb,MAAMgK,kBAAkB/I,KAQ7B,aACEwC,GACA,QAAEgG,EAAO,UAAEQ,GAAY,EAAK,SAAEC,GAAW,GAAU,IAEnD,MAAMxB,EAAiBjF,EAAU/F,SAC3BiD,EAAS,CAAE8I,QAAAA,GACX3E,EAAMC,EACV,IAAIiF,UAAU,CAAEvM,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAa5C,GAXK8I,GAAYQ,IACftJ,EAAO8I,QAAUhG,EAAUoB,QAAQ,YAEjB,YAAhBC,EAAI2E,SAAyBhG,EAAU6C,MAAM,aAC/C7C,EAAUjG,MAAM,4CAElBmD,EAAOuJ,SAAWzG,EAAUoB,QAAQ,YAChCqF,IAAavJ,EAAOuJ,UAAYzG,EAAU6C,MAAM,cAClD7C,EAAUjG,MAAM,+CAElBmD,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAcZ,OAVA2E,EAAI9C,QACF6E,EAA8BpD,EAAW,mBACzCA,EAAUjG,MAAM,0BAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUoB,QAAQ,QAAS,aAC3BpB,EAAUjG,MAAM,0BAClBmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCACXsH,EAAI1D,KAbTqC,EAAUsF,UAAUL,GAgBxB,WACE,MAAO,YAET,cACE,OAAKtH,KAAKT,OAAO8I,QAGVrI,KAAKT,OAAO8I,QAAQlM,MAFlB,GAIX,eACE,QAAS6D,KAAKT,OAAOuJ,SAEvB,WACE,OAAO,EAAS9I,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,GAIR,aAHOb,KAAKqG,SAASxB,SAAShE,SACvBb,KAAKY,QAAQiE,SAAShE,GAErBb,KAAKY,QAAQqF,SACnB,IAAK,WACL,IAAK,SAAU,CACb,MAAMzJ,EAAU,4BAA4BwD,KAAKY,QAAQqF,uBACnDxG,EACJO,KAAKT,OAAOf,KACZwB,KACA,oBACAxD,GAEF,MAEF,QAAS,CACP,MAAM,UAAE8E,GACNX,EAA0BX,KAAKY,QAASC,IAAS,GACnD,GAAIS,EAAW,CACb,MAAMqF,GAAe3G,KAAKY,QAAQG,MAAQO,EAAYtB,KAAKY,SACxDrB,OAAOR,KACJvC,EAAU,mDACViD,EACJkH,EACA3G,KACA,oBACAxD,MAQV+F,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAO8I,SACpB7F,EAAE9C,MAAMM,KAAKT,OAAOuJ,UACpBtG,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAAA,KC1Gb,SAAS,EAAS0F,GACvB,OAAOA,EAAWyE,WAAW,KAAOzE,EAAWvH,MAAM,GAAKuH,EAWrD,SAASxB,EAAKT,GAAW,OAAEU,EAAM,aAAEiG,EAAY,SAAEhG,EAAW,SACjE,MAAMiG,EAAQlG,EAAOV,GACrB,IAAK4G,EACH,MAAO,GAETA,EAAM1J,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC3C,MAAMyF,EAAQ,CAACD,GACf,KAAOA,EAAM1J,OAAOoD,WAAW,CAC7B,MAAMwG,EAAOpG,EAAOV,GACpB,IAAK8G,EAAM,CACJH,GACH3G,EAAUjG,MAAM,qBAAqB4G,KAEvC,MAIF,GAFAmG,EAAK5J,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC1CyF,EAAMjE,KAAKkE,IACNA,EAAK5J,OAAOoD,UAAW,MAE9B,OAAOuG,EAMF,SAASjC,EAAY5E,GAC1B,OACEA,EAAUC,YAAY,UAAW,YACjCD,EAAUoB,QAAQ,OAAQ,QAAS,WAAY,YAAa,OASzD,SAAS0D,GAAW,KAAExJ,EAAI,MAAExB,IACjC,OAAQwB,GACN,IAAK,UACL,IAAK,UACH,MAAO,CAAEA,KAAM,SAAUxB,MAAAA,GAC3B,IAAK,SACH,MAAO,CAAEwB,KAAM,SAAUxB,MAAOA,EAAMY,MAAM,GAAI,IAGpD,OAAQZ,GACN,IAAK,OACL,IAAK,QACH,MAAO,CAAEwB,KAAM,UAAWxB,MAAiB,SAAVA,GACnC,IAAK,WACL,IAAK,YACH,MAAO,CAAEwB,KAAM,WAAYyJ,SAAUjL,EAAM4M,WAAW,MACxD,IAAK,IACH,MAAO,CAAEpL,KAAM,WAAYxB,MAAO,IACpC,IAAK,IACH,MAAO,CAAEwB,KAAM,cACjB,QACE,MAAO,CAAEA,KAAMxB,IAOd,SAAS4J,EAAe1D,GAoB7B,MAAM,OAAEhG,GAAWgG,EACb+G,EApBN,WACE,MAAM7C,EAASlE,EAAUoB,QAAQ,YAC3B1E,EAAOsD,EAAUoB,QAAQ,QAAS,QACxC,GAAI1E,EAAM,CACR,MAAMyH,EAAUnE,EAAUoB,QAAQ,QAClC,OAAO,IAAI8B,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAEgH,OAAAA,EAAQxH,KAAAA,EAAMyH,QAAAA,KAEhDD,GAAQlE,EAAUjG,MAAM,gCAabiN,IAVjB,WACE,MAAM9C,EAASlE,EAAUoB,QAAQ,gBAC3B1E,EAAOsD,EAAUoB,QAAQ,QAAS,UACxC,GAAI1E,EACF,OAAO,IAAIwG,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAEgH,OAAAA,EAAQxH,KAAAA,KAE1CwH,GAAQlE,EAAUjG,MAAM,8BAIckN,GAC5C,GAAIF,EAAU,OAAOA,EACrB,MAAMrK,EAAOsD,EAAUoB,QACrB,SACA,UACA,OACA,QACA,aAEF,OAAI1E,EACK,IAAIwG,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAER,KAAAA,UADtC,EAQK,SAASiF,EAAc3B,GAC5B,OAAOS,EAAKT,EAAW,CACrBU,OAAQsE,SAAS5C,MACjBzB,SAAU,mBAQP,SAASyC,EAA8BpD,EAAWiD,GACvD,MAAMe,EAAWrB,mBAAmBP,MAAMpC,GACpCqB,EAAM6B,KAAKd,MAAMpC,EAAWiD,GAElC,OADI5B,IAAKC,EAAaD,GAAK2C,SAAWA,GAC/B3C,EAOF,SAAS8B,EAAYnD,EAAWiD,GACrC,MAAMY,EAAMX,KAAKd,MAAMpC,EAAWiD,GAAY,eAC9C,GAAIY,EACF,OAAOA,EAET,MAAMqD,EAAYlH,EAAUoB,QAAQ,QACpC,GAAI8F,EAAW,CACb,MAAM7F,EAAM,IAAI6B,KAAK,CACnBlJ,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAER,KAAMwK,KAGlB,OADA7F,EAAI/F,KAAO,cACJ+F,GAOJ,SAAS8F,EAAYnH,GAC1B,MAAMgG,EAAUhG,EAAUoB,QAAQ,eAClC,IAAK4E,EAAS,OAKd,OAHEO,UAAUnE,MAAMpC,EAAW,CAAEgG,QAAAA,KAC7BD,UAAU3D,MAAMpC,EAAW,CAAEgG,QAAAA,KAC7BhG,EAAUjG,MAAM,4BAOb,SAASqN,EAAmBC,GACjC,MAAMC,EAAQD,EAAI1L,MAAM,MAExB,GAAI2L,EAAM/L,OAAQ,CAChB,MAAMgM,EAAQD,EAAMA,EAAM/L,OAAS,GAAGgM,MAAM,QAC5C,GAAIA,EACF,OAAOA,EAAM,GAGjB,MAAO,GAgBF,SAASC,EAAwB7I,GACtC,MAAO,KACL,GAAIA,EAAIqF,SAASzI,OAAQ,CACvB,MAAMyE,EAAY,IAAI8F,UAAU,mBAC1B2B,EAAUvF,wBAAwBE,MAAMpC,GAC9CyH,EAAQvK,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC7C,MAAMsG,EAAW/I,EAAIqF,SAAS,GACzB,MAAM2D,KAAKD,EAASxK,OAAOf,KAAKhB,UACnCuM,EAASxK,OAAOf,KAAKhB,OAAS,IAAIuM,EAASxK,OAAOf,KAAKhB,UAEzDwD,EAAIqF,SAASxH,QAAQiL,OAChB,CACLnG,EAAa3C,GAAKqF,SAAWrB,mBAAmBP,MAC9C,IAAI0D,UAAU,qBAEhB,MAAM3K,EAASwD,EAAIzB,OAAOR,KAAKvB,OAC/BwD,EAAIqF,SAAS9G,OAAOuE,KAAKtG,OAASA,EAClCwD,EAAIzB,OAAOR,KAAKvB,OAAS,KAAKiM,EAAmBjM,OAShD,SAASwK,EAAcE,GAC5B,GAAIA,EAAK7B,SAASzI,OAChB,OAAOsK,EAAK7B,SAAS9G,OAAOuE,KAE9B,GAAkB,cAAdoE,EAAKvK,OAAyBuK,EAAKG,QACrC,OAAOL,EAAcE,EAAKtH,SAG5B,OADepF,OAAOyO,OAAO/B,EAAK3I,QAAQ2K,MAAK,CAACC,EAAGC,IAAMD,EAAEvK,MAAQwK,EAAExK,QACvD,GAwBT,SAAS+D,EAAauE,EAAMtJ,GAKjC,OAJKA,IAEHA,EAASsJ,GAENA,EAKE,IAAImC,MAAMnC,EAAM,CACrBvM,IAAIqD,EAAQqF,GACV,MAAMlI,EAAQ6C,EAAOqF,GACrB,OAAInC,MAAMoI,QAAQnO,GAGTwH,EAAaxH,EAAO6C,GAEtB7C,GAETkF,IAAIrC,EAAQqF,EAAGlI,GAEb,GADA6C,EAAOqF,GAAKlI,GACPA,EACH,OAAO,EACF,GAAI+F,MAAMoI,QAAQnO,GAEvB,IAAK,MAAMgN,KAAQhN,OACU,IAAhBgN,EAAKvK,SACduK,EAAKvK,OAASA,aAGe,IAAjBzC,EAAMyC,SACtBzC,EAAMyC,OAASA,GAEjB,OAAO,KA1BFsJ,EC5QX,MAAMqC,EAAU,CAGdC,QACE,sGACFC,QAAS,8CACTnG,WAAY,+BACZoG,OAAQ,WACRC,WAAY,cACZC,QAAS,2BACTC,MAAO,wBAGI7E,EAAmB,CAC9B,cACA,WACA,YACA,aACA,aACA,aACA,cACA,cACA,oBACA,gBACA,iBACA,eACA,eACA,MACA,SACA,UAGWL,EAAc,CAAC,aAAc,YAAa,aAE1C8B,EAAuB,CAClC,QACA,YACA,WACA,QACA,cACA,UACA,aACA,OACA,SACA,WACA,UACA,YACA,WACA,UACA,YACA,UACA,WACA,UACA,SACA,SACA,cACA,UACA,gBAGIqD,EAAoB,CACxB,YACA,cACA,WACA,MACA,kBACA,UACA,SACA,UACA,OACA,SACA,QACA,QACA,OACA,QACA,OACA,QACA,WACA,KACA,WACA,SACA,WACA,QACA,OACA,YACA,WACA,QACAC,OAAOtD,EAAsB9B,EAAaK,GAEtCgF,EAAe,CACnB,IACA,IACA,IACA,MACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KAGIC,EAAW,CAEf,eACA,WACA,aAgHK,MAAM9C,UAIXrI,YAAYoL,GACVlL,KAAK3D,OA9GT,SAAkBqN,GAChB,MAAMnK,EAAS,GACf,IAAI4L,EAAgB,EAChB3N,EAAS,GACTK,EAAO,EACP+B,EAAQ,EACZ,KAAOuL,EAAgBzB,EAAI9L,QAAQ,CACjC,MAAMwN,EAAW1B,EAAI2B,OAAOF,GAC5B,IAAIlM,GAAU,EAQd,GANI,YAAY+K,KAAKoB,GACnBnM,EAASqM,EAAkB,aAAc,CAAEC,eAAe,IACpC,MAAbH,IACTnM,EAASqM,EAAkB,UAAW,CAAEC,eAAe,MAGzC,IAAZtM,EAAe,CACjB,MAAMuM,EAAgBjM,EAAOkM,MAAMtP,MACnC0B,IAAS2N,EAAc5B,MAAM,QAAU,IAAIhM,OAC3CJ,GAAUgO,EACV5L,GAAS,OACJ,GAAI,iBAAiBoK,KAAKoB,IAK/B,GAJAnM,EAASqM,EAAkB,YACX,IAAZrM,IACFA,EAASqM,EAAkB,aAEb,IAAZrM,EAAe,CACjBA,EAASqM,EAAkB,cAC3B,MAAMI,EAAYnM,EAAO3B,OAAS,EAC5B8B,EAAQH,EAAOmM,GACrB,IAAgB,IAAZzM,EAAe,CACjB,GAAIgM,EAASzC,SAAS9I,EAAMvD,OAAQ,CAClC,MAAMK,EAAU,GAAG,EACjBkD,EAAMvD,wDAER,MAAM,IAAIwP,iBACRnM,EAAYD,EAAQmM,EAAW,KAAMlP,IAE9BsO,EAAkBtC,SAAS9I,EAAMvD,SAC1CuD,EAAM/B,KAAO,gBAIG,MAAbyN,IACTnM,EAASqM,EAAkB,WAG7B,IAAK,MAAMM,KAAeZ,EACxB,GAAItB,EAAIX,WAAW6C,EAAaT,GAAgB,CAC9C5L,EAAO0F,KAAK,CACVtH,KAAM,SACNxB,MAAOyP,EACPpO,OAAAA,EACAK,KAAAA,EACA+B,MAAAA,IAEFpC,EAAS,GACT2N,GAAiBS,EAAYhO,OAC7BqB,EAASkM,EACT,MAQJ,IAHgB,IAAZlM,IACFA,EAASqM,EAAkB,WAEb,IAAZrM,EACF,MAAM,IAAI4M,MAAM,gCAElBV,EAAgBlM,EAChBW,GAAS,EAYX,OARAL,EAAO0F,KAAK,CACVtH,KAAM,MACNxB,MAAO,GACPqB,OAAAA,EACAK,KAAAA,EACA+B,MAAAA,IAGKL,EAOP,SAAS+L,EAAkB3N,GAAM,cAAE4N,GAAkB,IACnD,MAAMO,EAAKvB,EAAQ5M,GACnBmO,EAAGJ,UAAYP,EACf,MAAMlM,EAAS6M,EAAGC,KAAKrC,GACvB,OAAIzK,GACFM,EAAO0F,KAAK,CAAEtH,KAAAA,EAAMxB,MAAO8C,EAAO,GAAIzB,OAAAA,EAAQK,KAAAA,EAAM+B,MAAAA,IAC/C2L,IACH/N,EAAS,IAEJsO,EAAGJ,YAEJ,GASMM,CAASd,GACvBlL,KAAK1D,SAAW,EAOlBF,MAAMI,GACJ,MAAM,IAAImP,iBACRnM,EAAYQ,KAAK3D,OAAQ2D,KAAK1D,SAAU0D,KAAKzD,QAASC,IAO1DyP,UAAUtO,GACR,OACEqC,KAAK3D,OAAOuB,OAASoC,KAAK1D,UAC1B0D,KAAK3D,OAAO2D,KAAK1D,UAAUqB,OAASA,EAOxCuH,MAAM/I,GACJ,OACE6D,KAAKiM,UAAU,WAAajM,KAAK3D,OAAO2D,KAAK1D,UAAUH,QAAUA,EAOrEmG,eAAe4J,GACb,IAAK,MAAMvO,KAAQuO,EAAY,CAC7B,IAAKlM,KAAKiM,UAAUtO,GAAO,SAC3B,MAAM+B,EAAQM,KAAK3D,OAAO2D,KAAK1D,UAE/B,OADA0D,KAAK1D,WACEoD,GAOX+D,WAAWyI,GACT,IAAKlM,KAAKiM,UAAU,UAAW,OAC/B,MAAMvM,EAAQM,KAAK3D,OAAO2D,KAAK1D,UAC/B,IAAK,MAAMH,KAAS+P,EAClB,GAAIxM,EAAMvD,QAAUA,EAEpB,OADA6D,KAAK1D,WACEoD,EAOXyM,kBAAkBhQ,GAChB,GAAK6D,KAAKiM,UAAU,eAGhBjM,KAAK3D,OAAO2D,KAAK1D,UAAUH,QAAUA,EAGzC,OAAO6D,KAAKsC,YAAY,cAM1BqF,UAAUrL,GACR0D,KAAK1D,SAAWA,GAIb,MAAMqP,yBAAyBE,MAWpC/L,aAAY,QACVtD,EAAO,YACP4C,EAAW,QACXb,EAAO,KACPV,EAAI,WACJwB,EAAU,MACVC,EAAK,OACLC,IAEA4C,MAAM3F,GAENwD,KAAKxB,KAAO,mBACZwB,KAAKZ,YAAcA,EACnBY,KAAKzB,QAAUA,EACfyB,KAAKnC,KAAOA,EACZmC,KAAKX,WAAaA,EAClBW,KAAKV,MAAQA,EACbU,KAAKT,OAASA,GChVlB,MAAM6M,kBAAkBhK,aAItB,aAAaC,GACX,MAAMlG,EAAQkG,EAAUC,YAAY,UACpC,GAAInG,EACF,OAAO,IAAIiQ,UAAU,CAAE/P,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAEpD,MAAAA,KAI/D,WACE,MAAO,aAET,YACE,OAAOgG,MAAMhG,MAAMY,MAAM,GAAI,GAI/BwF,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGC,KAAK,CACfF,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOpD,MAAMqB,QAC9BgF,EAAEC,GAAGpH,WACHmH,EAAEC,GAAGC,KAAK,CAAC,IAAKF,EAAEC,GAAGjE,KAAKwB,KAAK7D,MAAO,CAAE+L,KAAMlI,KAAMpB,OAAAA,IAAW,MAC/D,CAAEsJ,KAAMlI,KAAMpB,OAAAA,IAEhB4D,EAAE9C,MAAMM,KAAKT,OAAOoD,cAKnB,MAAM0J,aAAaxM,KAIxB,aAAawC,GAEX,MAAM9C,EAAS,GAEf,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,SAC3BlE,EAAOR,KACV,OAEFQ,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,oBAClB,MAAMsH,EAAMC,EAAa,IAAI0I,KAAK,CAAEhQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAkB9D,OAjBA8C,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,iBACxDsH,EAAIuG,OAASnH,EAAKT,EAAW,CAC3BU,OAAQqJ,UAAU3H,MAClBuE,cAAc,EACdhG,SAAU,gBAERX,EAAU4J,UAAU,WACtB5J,EAAUjG,MAAM,gCAElBmD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,4BACvCsH,EAAIuG,OAAOrM,QACdyE,EAAUjG,MAAM,oBAElBmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,2BACrCsH,EAAI1D,KAGb,WACE,MAAO,OAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAInCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAKiK,OAAO3M,KAAKgP,GAAMA,EAAE/J,MAAMC,MACzCA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,QCrFP,MAAMuM,iBAAiB1M,KAI5B,aAAawC,GACX,MAAMrD,EAASqD,EAAUC,YAAY,cACrC,IAAKtD,EACH,OAEF,MAAMO,EAAS,CAAEP,OAAAA,GAEjB,GADAO,EAAOiJ,SAAWnG,EAAUoB,QAAQ,YAC/BlE,EAAOiJ,SAUZ,OANAjJ,EAAOiN,MACLnK,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,iCAClBmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2CACX,IAAImQ,SAAS,CAAElQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAT9C8C,EAAUsF,UAAU3I,EAAOY,OAY/B,WACE,MAAO,WAET,aACE,OAAO,EAASI,KAAKT,OAAOP,OAAO7C,OAErC,eACE,OAAO,EAAS6D,KAAKT,OAAOiN,MAAMrQ,OAIpCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE4B,gBAAgBpE,KAAKT,OAAOP,OAAQgB,MACtCwC,EAAE9C,MAAMM,KAAKT,OAAOiJ,UACpBhG,EAAE4B,gBAAgBpE,KAAKT,OAAOiN,MAAOxM,MACrCwC,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,QC1CP,MAAMyM,gBAAgB5M,KAI3B,aAAawC,GAEX,MAAM9C,EAAS,GACTmE,EAAMC,EAAa,IAAI8I,QAAQ,CAAEpQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAEjE,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,WAC3BlE,EAAOR,KAaZ,OAVA2E,EAAI9C,QACF6E,EAA8BpD,EAAW,iBACzCA,EAAUjG,MAAM,wBAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,wBAClBiG,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,sCACXsH,EAAI1D,KAGb,WACE,MAAO,UAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,SACDb,KAAKY,QAAQiE,SAAShE,GAI/B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,QC7CP,MAAM0M,yBAAyB7M,KAIpC,aAAawC,EAAWtD,GACtB,MAAMQ,EAAS,CAAER,KAAAA,GACX2E,EAAMC,EACV,IAAI+I,iBAAiB,CAAErQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAmBnD,OAjBAA,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,yBAClBiG,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAOiE,OACLnB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,gCAC5CsH,EAAI9C,QACF4E,EAAYnD,IAAcA,EAAUjG,MAAM,gCAC5CmD,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,4CAClBsH,EAAIkB,UAAYZ,EAAc3B,GAC9B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,yBAC5CmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,uCACXsH,EAAI1D,KAGb,WACE,MAAO,WAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,SACDb,KAAKqG,SAASxB,SAAShE,SACvBb,KAAKY,QAAQiE,SAAShE,GAI/B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK4E,UAAUtH,KAAKqH,GAAQA,EAAIpC,MAAMC,KACzCA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,QC7CP,MAAM2M,kBAAkB9M,KAO7B,aAAawC,EAAWuK,GAAU,YAAEC,EAAW,eAAEC,IAC/C,MAAM,OAAEvN,EAAM,KAAE5B,GAASiP,EAWzB,IAVArN,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,mBAAmBuB,KACrC0E,EAAU9F,QAAUqQ,EACpBA,EAAWjJ,EAAaiJ,GACpBC,GACFrR,OAAOgI,OAAOjE,EA1BpB,SAAqB8C,GACnB,MAAM0K,EAAQ1K,EAAUoB,QAAQ,KAChC,OAAKsJ,EAME,CAAEA,MAAAA,EAAO1M,YAFdgC,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,6BAJT,GAuBiBiE,CAAYgC,IAEpC9C,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,YAAYuB,KACpEiP,EAAShL,QAAU,KACN,CAEX,GADArC,EAAO0E,MAAQ5B,EAAUoB,QAAQ,KAC7BlE,EAAO0E,MAIT,OAHA1E,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2BAA2BuB,KACtCiP,EAAS5M,KAElB,MAAMmF,EAAKH,mBAAmBP,MAAMpC,GACpC,IAAI2K,EACJ,IAAK,MAAOjK,KAAWkK,KAASH,EAE9B,GADAE,EAAMrJ,EAAaZ,EAAOV,KAAc4K,IACpCD,EACF,MAGCA,GACH3K,EAAUjG,MAAM,kBAElB4Q,EAAI3G,SAAWlB,EACfyH,EAAShL,QAAQqD,KAAK+H,EAAIhN,OAI9B,cACE,QAASA,KAAKT,OAAOd,QAEvB,WACE,OAAO,EAASuB,KAAKT,OAAOf,KAAKrC,OAEnC,kBACE,OAAK6D,KAAKT,OAAOc,YAGV,EAASL,KAAKT,OAAOc,YAAYlE,OAF/B,KAKX,UAAU0E,GACR,IAAK,MAAMqM,KAAUlN,KAAK4B,QACpBsL,EAAOrI,iBACFqI,EAAOrI,SAAShE,IAM7B0B,MAAMC,GAcJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAO4N,UACpB3K,EAAE9C,MAAMM,KAAKT,OAAOd,SACpB+D,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAE9C,MAAMM,KAAKT,OAAOiN,OACpBhK,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,OApBvB,KACbA,KAAKT,OAAOc,YAGVmC,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOwN,OACpBvK,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOc,YAAY7C,QACpCgF,EAAEC,GAAGpC,YACHmC,EAAElB,UAAUtB,KAAKT,OAAOc,YAAYlE,MAAO,CAAEoC,QAASyB,UANjD,GAmBPK,GACAmC,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK4B,QAAQtE,KAAK8P,GAAMA,EAAE7K,MAAMC,MAC1CA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,QCrGP,MAAMqN,iBAAiBxN,KAI5B,aAAawC,GAEX,MAAM9C,EAAS,GAEf,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,UAC3BlE,EAAOR,KACV,OAEF,IAAI6B,EAAUmF,EAAe1D,GAC7B,IAAKzB,EAAS,CACZ,MAAM7B,EACJsD,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,sBAClBwE,EAAU,IAAI2E,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,KAEvDsD,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,qCAElBwE,EAAQjD,KAAO,aACf4B,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,sBAClBmD,EAAOiE,OACLnB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,gCAC5CmD,EAAOpD,MACL8K,EAAY5E,IAAcA,EAAUjG,MAAM,uBAC5CmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,oCAClB,MAAMsH,EAAM,IAAI2J,SAAS,CAAEhR,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAErD,OADAoE,EAAaD,GAAK9C,QAAUA,EACrB8C,EAGT,WACE,MAAO,QAET,WACE,OAAO,EAAS1D,KAAKT,OAAOf,KAAKrC,OAEnC,YACE,OAAOgL,EAAWnH,KAAKT,OAAOpD,OAIhCoG,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAE9C,MAAMM,KAAKT,OAAOpD,OACpBqG,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAAA,KC/Db,MAAM0O,qBAAqBzN,KAIhC,aAAawC,GACX,MAAMiF,EAAiBjF,EAAU/F,SAC3BiD,EAAS,GACTmE,EAAMC,EACV,IAAI2J,aAAa,CAAEjR,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAW/C,GATAA,EAAOuJ,SAAWzG,EAAUoB,QAAQ,YAC/BlE,EAAOuJ,WACVvJ,EAAOgO,MAAQlL,EAAUoB,QAAQ,UAEnClE,EAAOR,KAAOQ,EAAOuJ,SACjBzG,EAAUoB,QAAQ,UAAW,WAC7BlE,EAAOgO,MACPlL,EAAUoB,QAAQ,YAClBpB,EAAUoB,QAAQ,WAAY,UAAW,YACxClE,EAAOR,KAEV,YADAsD,EAAUsF,UAAUL,GAItB,MAAM,KAAE3J,GAAS+F,EACX8J,EAA8B,YAAT7P,EACrB8P,EAAoBD,GAA+B,aAAT7P,EAC1C+P,EAAkBhK,EAAI6J,OAAkB,aAAT5P,EAErC4B,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,mCAAmCuB,iBACrD,MAAMsL,EACJxD,EAA8BpD,IAC9BA,EAAUjG,MAAM,8BAA8BuB,iBAiChD,OAhCA+F,EAAI9C,QAAU,CAACqI,GACfvF,EAAIkB,UAAY,GAEZ6I,IACFxE,EAAM1J,OAAOoD,UAAYN,EAAUoB,QAAQ,KACvCwF,EAAM1J,OAAOoD,UACfe,EAAI9C,QAAQqE,KAAKQ,EAA8BpD,IACtCmL,GACTnL,EAAUjG,MAAM,mCAAmCuB,kBAIvD4B,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,sCAAsCuB,iBAEpD0E,EAAU6C,MAAM,OACdwI,GACFnO,EAAOoO,SAAWtL,EAAUoB,QAAQ,KACpCC,EAAIkB,UAAUK,QAAQjB,EAAc3B,IACpC9C,EAAOqO,UACLvL,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,8CAElBiG,EAAUjG,MAAM,oDAIpBmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2BAA2BuB,iBAEtC+F,EAAI1D,KAGb,WACE,OAAOA,KAAKT,OAAOR,KAAK5C,MAE1B,eACE,QAAS6D,KAAKT,OAAOuJ,SAEvB,YACE,QAAS9I,KAAKT,OAAOgO,MAGvB,UAAU1M,GACR,IAAK,MAAMlD,KAAQqC,KAAKY,cACfjD,EAAKkH,SAAShE,GAEvB,IAAK,MAAM4H,KAAYzI,KAAK4E,gBACnB6D,EAAS5D,SAAShE,GAK7B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOuJ,UACpBtG,EAAE9C,MAAMM,KAAKT,OAAOgO,OACpB/K,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGwD,SAC/BzD,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAKY,QAAQtD,KAAKC,GAAMA,EAAEgF,MAAMC,MAC1CA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOoO,UACpBnL,EAAEC,GAAGC,KAAK1C,KAAK4E,UAAUtH,KAAKqH,GAAQA,EAAIpC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAOqO,WACpBpL,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAQoB,KAAKpB,UC7G1B,MAAMiP,oBAAoBhO,KAI/B,aAAawC,GACX,MAAMtD,EAAOsD,EAAUoB,QAAQ,eAC/B,IAAK1E,EACH,OAGF,MAAMQ,EAAS,CAAER,KAAAA,GACjBQ,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,mCAClB,MAAM6Q,EAAOjJ,EAAc3B,GAC3B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,4BAC5CmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,kCAClB,MAAMsH,EAAM,IAAImK,YAAY,CAAExR,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAExD,OADAoE,EAAaD,GAAKkB,UAAYqI,EACvBvJ,EAGT,WACE,MAAO,cAGT,UAAU7C,GACJb,KAAKY,gBACAZ,KAAKY,QAAQiE,SAAShE,IAE/B,IAAK,MAAM4H,KAAYzI,KAAK4E,gBACnB6D,EAAS5D,SAAShE,GAK7B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGkG,SAAU,CAAET,KAAMlI,KAAMpB,OAAAA,IACvD4D,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK4E,UAAUtH,KAAKqH,GAAQA,EAAIpC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAAA,KC9BpB,SAASkP,EAAczL,GACrB,MAAMgG,EAAUhG,EAAUoB,QAAQ,UAClC,IAAK4E,EAAS,OAKd,OAHEO,UAAUnE,MAAMpC,EAAW,CAAEgG,QAAAA,KAC7BD,UAAU3D,MAAMpC,EAAW,CAAEgG,QAAAA,KAC7BhG,EAAUjG,MAAM,4BAIb,MAAM2R,kBAAkBpB,UAI7B,aAAatK,EAAWtD,GAAM,QAAEN,EAAU,MAAS,IACjD,MAAMc,EAAS,CAAEd,QAAAA,EAASM,KAAAA,GAC1B,OAAO4N,UAAUlI,MACfpC,EACA,IAAI0L,UAAU,CAAE1R,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC1C,CACEsN,aAAcpO,EACdqO,eAAgB,CACd,CAACO,SAAS5I,OACV,CAACoJ,YAAYpJ,OACb,CAACqJ,GACD,CAACtE,GACD,CAAC8D,aAAa7I,OACd,CAACmE,UAAUnE,OACX,CAAC2D,UAAU3D,UAMnB,WACE,MAAO,YAGT,UAAU5D,GAER,SADOb,KAAKqG,SAASxB,SAAShE,IAE3Bb,KAAKvB,SACNuB,KAAKqG,SAAS2H,OAAOtJ,GAA6B,YAAjBA,EAAQlG,OACzC,CACA,MAAMhC,EAAU,oTAKViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,kBACAxD,EACA,CACEG,QAASkN,EAAwB7J,QAIvC,MAAMiO,EAAkBjO,KAAKqG,SAASI,QACnC/B,GAA6B,gBAAjBA,EAAQlG,OAEvB,IAAK,MAAMsB,KAAemO,EAAiB,CACzC,MAAMzR,EAAU,oRAIViD,EACJK,EAAYP,OAAOf,KACnBwB,KACA,qBACAxD,EACA,CACEG,QAASuR,EAAmBlO,KAAMF,KAMxC,GADiBE,KAAKqG,SAASxE,MAAM6C,GAA6B,WAAjBA,EAAQlG,OAC3C,CACZ,MAAM2P,EAAmBnO,KAAKqG,SAASI,QACpC/B,GAA6B,0BAAjBA,EAAQlG,OAEvB,IAAK,MAAM4P,KAASD,EAAkB,CACpC,MAAM3R,EAAU,uEACViD,EACJ2O,EAAM7O,OAAOf,KACbwB,KACA,0BACAxD,GAIJ,MAAM6R,EAAerO,KAAK4B,QAAQ6E,QAC/ByG,GAA2B,gBAAhBA,EAAOvP,OAErB,IAAK,MAAMyQ,KAASC,EAAc,CAChC,MAAM7R,EAAU,kEACViD,EACJ2O,EAAM7O,OAAOR,KACbiB,KACA,0BACAxD,UAKC2F,MAAM0C,SAAShE,GACjBb,KAAKvB,gBC/HP,UAA0CoC,EAAMyN,GACrD,MAAMC,EAAU,IAAIC,IAAIC,EAAcH,GAAGhR,KAAKoR,GAAOA,EAAGlQ,QAClDmQ,EAAW9N,EAAK8N,SAAShT,IAAI2S,EAAE9P,OAAS,GACxCoQ,EAAS/N,EAAKgO,SAASlT,IAAI2S,EAAE9P,OAAS,GAC5C,IAAK,MAAMsQ,IAAO,IAAIH,KAAaC,GAAS,CAC1C,MAAMG,EAAYN,EAAcK,SACzBE,EAAiBD,EAAWR,EAASO,EAAKR,GACjD,IAAK,MAAMW,KAAYF,EACrBR,EAAQW,IAAID,EAASzQ,MAIzB,SAAUwQ,EAAiBD,EAAWI,EAAWL,EAAK/P,GACpD,IAAK,MAAMkQ,KAAYF,EAAW,CAChC,MAAM,KAAEvQ,GAASyQ,EACjB,GAAIzQ,GAAQ2Q,EAAU/N,IAAI5C,GAAO,CAC/B,MAAMhC,EAAU,kBAAkBgC,uDAA0DO,EAAKP,6CAC3FiB,EACJwP,EAAS1P,OAAOf,KAChBsQ,EACA,oBACAtS,KAMR,SAASiS,EAAcH,GACrB,OAAOA,EAAE1M,QAAQ6E,QAAO,EAAG9I,KAAAA,KAAoB,cAATA,KDoG7ByR,CAAgCvO,EAAMb,QAKnD,SAASkO,EAAmBmB,EAAcC,GAExC,OADAD,EAAe1L,EAAa0L,GACrB,KACL,MAAME,EAAc9F,EAClB4F,EAAahJ,SAAS9G,OAAOuE,KAAKtG,QAE9BgS,EAAeH,EAAazN,QAAQhE,OACtC6L,EAAmBzB,EAAcqH,EAAazN,QAAQ,IAAIpE,QVqD3D,SAA8BiS,GACnC,MAAMF,EAAc9F,EAAmBgG,GACjCC,EAAWH,EAAY/G,SAAS,MAAQ,KAAO,KACrD,OAAO+G,EAAcG,EUvDfC,CAAqBJ,GACnBK,EAAgB/B,YAAYpJ,MAChC,IAAI0D,UAAU,KAAKqH,oBAErBI,EAAcvJ,SAAW,IAAIrB,mBAAmB,IAChDrB,EAAaiM,GAAehL,UAAY0K,EAAmB1K,UAE3D,MAAMiL,EViGH,SAAuBC,EAAOC,GACnC,MAAMnQ,EAAQkQ,EAAM/S,QAAQiT,UAAUC,UAAUF,GAChD,OAAe,IAAXnQ,EACKA,EAEFkQ,EAAMlS,OAASgC,EAAQ,EUtGNsQ,CACpBb,EAAazN,SACZwL,GAAiB,gBAAXA,EAAEzP,OAEX0R,EAAazN,QAAQuO,OAAON,EAAgB,EAAG,EAAGD,GAElD,MAAM,MAAE3L,GAAUoL,EAAa9P,OAC1B0E,EAAMzG,OAAOgL,SAAS,QACzBvE,EAAMzG,QAAU,KAAK+R,KAGvB,MAAM,SAAElJ,GAAagJ,EACfzP,EAAQyG,EAASwB,QAAQyH,GACzBc,EAAU/J,EAAS8J,OAAOvQ,EAAO,GAClCyG,EAASzI,OAEHyI,EAASzI,SAAWgC,EAC7ByG,EAASzG,EAAQ,GAAGL,OAAOoD,eAAYvC,EAC7BiG,EAASzG,GAAOL,OAAOf,KAAKhB,OAAO6S,SAC7ChK,EAASzG,GAAOL,OAAOf,KAAKhB,OAAS4S,EAAQ,GAAG7Q,OAAOf,KAAKhB,QAJ5D6I,EAAS9G,OAAOuE,KAAOuC,EAAS9G,OAAO0E,WAAQ7D,GEjK9C,MAAMkQ,cAAc3D,UASzB,aAAatK,EAAWtD,GAAM,QAAEN,GAAY,IAC1C,MAAMc,EAAS,CAAEd,QAAAA,EAASM,KAAAA,GAE1B,GADAQ,EAAOiN,MAAQnK,EAAUoB,QAAQ,SAC5BlE,EAAOiN,MAGZ,OAAOG,UAAUlI,MACfpC,EACA,IAAIiO,MAAM,CAAEjU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IACtC,CACEuN,eAAgB,CACd,CAACO,SAAS5I,OACV,CAAC+E,GACD,CAACZ,UAAUnE,MAAO,CAAEoE,WAAW,IAC/B,CAACT,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,mBC3BJ,MAAMiI,cAAc1Q,KAIzB,aAAawC,GAEX,MAAM9C,EAAS,GACTmE,EAAMC,EAAa,IAAI4M,MAAM,CAAElU,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAe/D,OAdAmE,EAAI2C,SAAWrB,mBAAmBP,MAAMpC,GACxC9C,EAAOwC,SAAWM,EAAUoB,QAAQ,YACpCC,EAAI9C,QACF6E,EAA8BpD,EAAW,oBACzCA,EAAUjG,MAAM,kCAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,kCAClBsH,EAAIgE,QAAUV,QAAQvC,MAAMpC,GACxB9C,EAAOwC,UAAY2B,EAAIgE,SACzBrF,EAAUjG,MAAM,2CAClBmD,EAAOgJ,YACLlG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,gDACXsH,EAAI1D,KAGb,WACE,MAAO,QAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAEnC,eACE,QAAS6D,KAAKT,OAAOwC,SAGvB,UAAUlB,SACDb,KAAKY,QAAQiE,SAAShE,GAI/B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOwC,UACpBS,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEyF,WAAWjI,KAAKT,OAAOf,KAAM,CAAE0J,KAAMlI,KAAMpB,OAAAA,IAC7CoB,KAAK0H,QAAU1H,KAAK0H,QAAQnF,MAAMC,GAAK,GACvCA,EAAE9C,MAAMM,KAAKT,OAAOgJ,eAEtB,CAAEL,KAAMlI,KAAMpB,OAAAA,KCvDb,MAAM4R,mBAAmB7D,UAM9B,aAAatK,GAAW,QAAE5D,GAAY,IACpC,MAAMc,EAAS,CAAEd,QAAAA,GAEjB,GADAc,EAAOR,KAAOsD,EAAUoB,QAAQ,cAC3BlE,EAAOR,KAGZ,OAAO4N,UAAUlI,MACfpC,EACA,IAAImO,WAAW,CAAEnU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC3C,CACEsN,aAAcpO,EACdqO,eAAgB,CAAC,CAACyD,MAAM9L,UAK9B,WACE,MAAO,cCrBJ,MAAMgM,kBAAkB9D,UAM7B,aAAatK,GAAW,QAAE5D,GAAY,IACpC,MAAMc,EAAS,CAAEd,QAAAA,GAEjB,GADAc,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAGZ,OAAO4N,UAAUlI,MACfpC,EACA,IAAIoO,UAAU,CAAEpU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC1C,CACEuN,eAAgB,CACd,CAAClE,UAAUnE,MAAO,CAAEoE,WAAW,EAAMC,UAAU,IAC/C,CAACuE,SAAS5I,OACV,CAAC2D,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,YAGT,UAAUzH,GACR,IACGb,KAAKvB,SACNuB,KAAKqG,SAAS2H,OAAOtJ,GAA6B,YAAjBA,EAAQlG,OACzC,CACA,MAAMhC,EAAU,gTAKViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,kBACAxD,EACA,CACEG,QAASkN,EAAwB7J,cAIhCmC,MAAM0C,SAAShE,IClDnB,MAAM6P,0BAA0B/D,UAIrC,aAAatK,EAAW8K,GAAU,QAAE1O,EAAU,MAAS,IACrD,MAAMc,EAAS,CAAE4N,SAAAA,GAEjB,GADA5N,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAGZ,OAAO4N,UAAUlI,MACfpC,EACA,IAAIqO,kBAAkB,CAAErU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAClD,CACEsN,aAAcpO,EACdqO,eAAgB,CACd,CAACO,SAAS5I,OACV,CAAC2D,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,sBCXX,SAASqI,EAActO,EAAW1C,GAChC,MAAMtD,EAASgG,EAAUhG,OAEzB,SAASD,EAAMsN,GACbrH,EAAUjG,MAAMsN,GAGlB,SAASjG,KAAWyI,GAClB,OAAO7J,EAAUoB,WAAWyI,GAY9B,SAAS0E,EAAWC,GAClB,MAAM9R,EAAO0E,EAAQ,aACrB,IAAK1E,EAAM,OAKX,OAHEuR,MAAM7L,MAAMpC,EAAWtD,EAAM8R,IAC7B9C,UAAUtJ,MAAMpC,EAAWtD,EAAM8R,IACjCzU,EAAM,gCAeV,SAASf,IACP,GAAIsE,EAAQmR,YACV,IAAK,MAAMC,KAAcpR,EAAQmR,YAAa,CAC5C,MAAM7R,EAAS8R,EAAW1O,GAC1B,GAAIpD,EACF,OAAOA,EAKb,OAxCF,WACE,MAAMkO,EAAW1J,EAAQ,YACzB,GAAK0J,EACL,OAAI9K,EAAU6C,MAAM,aACXwL,kBAAkBjM,MAAMpC,EAAW8K,GAErCT,iBAAiBjI,MAAMpC,EAAW8K,GAmCvCA,IACAyD,KAvBJ,WACE,MAAMnS,EAAUgF,EAAQ,WACxB,GAAKhF,EACL,OACE+R,WAAW/L,MAAMpC,EAAW,CAAE5D,QAAAA,KAC9BmS,EAAW,CAAEnS,QAAAA,KACbgS,UAAUhM,MAAMpC,EAAW,CAAE5D,QAAAA,KAC7BrC,EAAM,qCAiBNqC,IACA+R,WAAW/L,MAAMpC,IACjBgK,KAAK5H,MAAMpC,IACXoK,QAAQhI,MAAMpC,IACdkK,SAAS9H,MAAMpC,IACfoO,UAAUhM,MAAMpC,GAuBpB,MAAM2O,EAnBN,WACE,IAAK3U,EAAOuB,OAAQ,MAAO,GAC3B,MAAMiD,EAAO,GACb,OAAa,CACX,MAAMsE,EAAKH,mBAAmBP,MAAMpC,GAC9BrB,EAAM3F,IACZ,IAAK2F,EAAK,CACJmE,EAAGvH,QAAQxB,EAAM,6BACrB,MAEFuH,EAAa3C,GAAKqF,SAAWlB,EAC7BtE,EAAKoE,KAAKjE,GAEZ,MAAMiQ,EAAMrO,IAAI6B,MAAMpC,GAItB,OAHI1C,EAAQuR,UACVrQ,EAAKoE,KAAKgM,GAELpQ,EAEGsQ,GAEZ,OADI9O,EAAU/F,SAAWD,EAAOuB,QAAQxB,EAAM,uBACvC4U,EAWF,SAASvM,EAAMiF,EAAK/J,EAAU,IACnC,MAAM0C,EAAY,IAAI8F,UAAUuB,GAIhC,YAHkC,IAAvB/J,EAAQN,aACjBgD,EAAUhG,OAAOmC,KAAOmB,EAAQN,YAE3BsR,EAActO,EAAW1C,GCvHlC,SAASyR,EAAKzM,GACZ,OAAOA,EAGT,MAAM0M,EAAY,CAChB3O,KAAOwG,GAAUA,EAAMzL,KAAK,IAC5BD,OAAQ4T,EACR5S,KAAM4S,EACN9P,UAAW8P,EACXzT,KAAMyT,EACNnL,QAASmL,EACTzI,SAAUyI,EACV/Q,YAAa+Q,EACb/V,WAAY+V,EACZtM,kBAAmBsM,EACnBrM,2BAA4BqM,GAGvB,MAAME,OACXxR,YAAY2C,GACVzC,KAAKyC,GAAKjH,OAAOgI,OAAO,GAAI6N,EAAW5O,GAGzCnB,UAAUiQ,GAAK,UAAEzK,EAAS,QAAEvI,IAI1B,OAHKuI,IACHA,EAAYyK,EAAIxI,WAAW,KAAOwI,EAAIxU,MAAM,GAAKwU,GAE5CvR,KAAKyC,GAAGnB,UAAUiQ,EAAKzK,EAAWvI,GAG3CmB,MAAMnC,EAAGiU,EAAUJ,KAASnE,GAC1B,IAAK1P,EACH,MAAO,GAET,MAAMpB,EAAQqV,EAAQjU,EAAEpB,SAAU8Q,GAClC,OAAOjN,KAAKyC,GAAGC,KAAK,CAAC1C,KAAKyC,GAAGjF,OAAOD,EAAEC,QAASrB,IAGjDiI,gBAAgB7G,EAAGgB,GACjB,OAAOyB,KAAKN,MAAMnC,EAAGyC,KAAKsB,UAAUmQ,KAAKzR,MAAO,CAAEzB,QAAAA,IAGpD0J,WAAW1K,EAAGoH,GACZ,OAAO3E,KAAKN,MAAMnC,EAAGyC,KAAKyC,GAAGjE,KAAMmG,GAGrCL,WAAWoN,EAAInT,GACb,OAAOyB,KAAKyC,GAAGC,KAAK,CAClB1C,KAAKoE,gBAAgBsN,EAAGnS,OAAOpD,MAAOoC,GACtCyB,KAAKN,MAAMgS,EAAGnS,OAAOoD,cAKpB,SAASJ,EAAMoP,GAAON,UAAW5O,EAAK4O,GAAc,IACzD5O,EAAKjH,OAAOgI,OAAO,GAAI6N,EAAW5O,GAElC,MAAMD,EAAI,IAAI8O,OAAO7O,GAErB,OAAOA,EAAGC,KAAKiP,EAAIrU,KAAKsU,GAAOA,EAAGrP,MAAMC,MCvD1C,SAASqP,EAAYC,EAAK7Q,GACxB,MAAM3D,EAAM,IAAI6F,IACVqF,EAAWsJ,EAAIrL,QAAQzF,GAAqB,aAAbA,EAAIrD,OACzC,IAAK,MAAMoU,KAAWvJ,EAAU,CAC9B,MAAMgE,EAAQvL,EAAOtF,IAAIoW,EAAQvJ,UACjC,IAAKgE,EACH,SAEF,MAAMsD,EAAQxS,EAAI3B,IAAIoW,EAAQ/S,QAC1B8Q,EACFA,EAAM7K,KAAKuH,GAEXlP,EAAI+D,IAAI0Q,EAAQ/S,OAAQ,CAACwN,IAG7B,OAAOlP,EAoDT,SAAU0U,EAAiBL,GACzB,MAAM9Q,EA/CR,SAA0BiR,GACxB,MAAM7Q,EAAS,IAAIkC,IACb8O,EAAa,IAAIzD,IACjBG,EAAW,IAAIxL,IACrB,IAAK,MAAMnC,KAAO8Q,EAChB,GAAI9Q,EAAIvC,QAAR,CACE,MAAMqR,EAAQnB,EAAShT,IAAIqF,EAAIxC,MAC3BsR,EACFA,EAAM7K,KAAKjE,GAEX2N,EAAStN,IAAIL,EAAIxC,KAAM,CAACwC,SAIvBA,EAAIxC,OAGJyC,EAAOG,IAAIJ,EAAIxC,MAGlByT,EAAW/C,IAAIlO,GAFfC,EAAOI,IAAIL,EAAIxC,KAAMwC,IAKzB,MAAO,CACL8Q,IAAAA,EACA7Q,OAAAA,EACA0N,SAAAA,EACAsD,WAAAA,EACApD,SAAUgD,EAAYC,EAAK7Q,GAC3BE,MAAO,CACLD,0BAA2B,IAAIgR,QAC/BxQ,gCAAiC,IAAIwQ,UAgB5BC,CAAiBR,GAC9B,IAAK,MAAM3Q,KAAOH,EAAKiR,IACjB9Q,EAAI6D,iBACC7D,EAAI6D,SAAShE,UAd1B,WAA+B,OAAEI,EAAM,WAAEgR,IACvC,IAAK,MAAMG,KAAOH,EAAY,CAC5B,MAAM,KAAEzT,GAAS4T,EACX5V,EAAU,aAAagC,eAC3ByC,EAAOtF,IAAI6C,GAAMb,+BAEb,EAAMyU,EAAI7S,OAAOf,KAAM4T,EAAK,eAAgB5V,IAW7C6V,CAAqBxR,GAevB,SAASgE,EAAS8M,GACvB,MAAO,IAAIK,GAZIlC,EAYqB6B,EAXhC7B,EAAMwC,KACDxC,EAAMwC,OAER,GAAGvH,UAAU+E,MAJtB,IAAiBA,E","sources":["webpack://WebIDL2/webpack/universalModuleDefinition","webpack://WebIDL2/webpack/bootstrap","webpack://WebIDL2/webpack/runtime/define property getters","webpack://WebIDL2/webpack/runtime/hasOwnProperty shorthand","webpack://WebIDL2/webpack/runtime/make namespace object","webpack://WebIDL2/./lib/error.js","webpack://WebIDL2/./lib/productions/base.js","webpack://WebIDL2/./lib/validators/helpers.js","webpack://WebIDL2/./lib/productions/array-base.js","webpack://WebIDL2/./lib/productions/token.js","webpack://WebIDL2/./lib/productions/extended-attributes.js","webpack://WebIDL2/./lib/productions/type.js","webpack://WebIDL2/./lib/productions/default.js","webpack://WebIDL2/./lib/productions/argument.js","webpack://WebIDL2/./lib/productions/operation.js","webpack://WebIDL2/./lib/productions/attribute.js","webpack://WebIDL2/./lib/productions/helpers.js","webpack://WebIDL2/./lib/tokeniser.js","webpack://WebIDL2/./lib/productions/enum.js","webpack://WebIDL2/./lib/productions/includes.js","webpack://WebIDL2/./lib/productions/typedef.js","webpack://WebIDL2/./lib/productions/callback.js","webpack://WebIDL2/./lib/productions/container.js","webpack://WebIDL2/./lib/productions/constant.js","webpack://WebIDL2/./lib/productions/iterable.js","webpack://WebIDL2/./lib/productions/constructor.js","webpack://WebIDL2/./lib/productions/interface.js","webpack://WebIDL2/./lib/validators/interface.js","webpack://WebIDL2/./lib/productions/mixin.js","webpack://WebIDL2/./lib/productions/field.js","webpack://WebIDL2/./lib/productions/dictionary.js","webpack://WebIDL2/./lib/productions/namespace.js","webpack://WebIDL2/./lib/productions/callback-interface.js","webpack://WebIDL2/./lib/webidl2.js","webpack://WebIDL2/./lib/writer.js","webpack://WebIDL2/./lib/validator.js"],"sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"WebIDL2\"] = factory();\n\telse\n\t\troot[\"WebIDL2\"] = factory();\n})(globalThis, function() {\nreturn ","// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","/**\n * @param {string} text\n */\nfunction lastLine(text) {\n const splitted = text.split(\"\\n\");\n return splitted[splitted.length - 1];\n}\n\nfunction appendIfExist(base, target) {\n let result = base;\n if (target) {\n result += ` ${target}`;\n }\n return result;\n}\n\nfunction contextAsText(node) {\n const hierarchy = [node];\n while (node && node.parent) {\n const { parent } = node;\n hierarchy.unshift(parent);\n node = parent;\n }\n return hierarchy.map((n) => appendIfExist(n.type, n.name)).join(\" -> \");\n}\n\n/**\n * @typedef {object} WebIDL2ErrorOptions\n * @property {\"error\" | \"warning\"} [level]\n * @property {Function} [autofix]\n *\n * @typedef {ReturnType<typeof error>} WebIDLErrorData\n *\n * @param {string} message error message\n * @param {\"Syntax\" | \"Validation\"} kind error type\n * @param {WebIDL2ErrorOptions} [options]\n */\nfunction error(\n source,\n position,\n current,\n message,\n kind,\n { level = \"error\", autofix, ruleName } = {}\n) {\n /**\n * @param {number} count\n */\n function sliceTokens(count) {\n return count > 0\n ? source.slice(position, position + count)\n : source.slice(Math.max(position + count, 0), position);\n }\n\n function tokensToText(inputs, { precedes } = {}) {\n const text = inputs.map((t) => t.trivia + t.value).join(\"\");\n const nextToken = source[position];\n if (nextToken.type === \"eof\") {\n return text;\n }\n if (precedes) {\n return text + nextToken.trivia;\n }\n return text.slice(nextToken.trivia.length);\n }\n\n const maxTokens = 5; // arbitrary but works well enough\n const line =\n source[position].type !== \"eof\"\n ? source[position].line\n : source.length > 1\n ? source[position - 1].line\n : 1;\n\n const precedingLastLine = lastLine(\n tokensToText(sliceTokens(-maxTokens), { precedes: true })\n );\n\n const subsequentTokens = sliceTokens(maxTokens);\n const subsequentText = tokensToText(subsequentTokens);\n const subsequentFirstLine = subsequentText.split(\"\\n\")[0];\n\n const spaced = \" \".repeat(precedingLastLine.length) + \"^\";\n const sourceContext = precedingLastLine + subsequentFirstLine + \"\\n\" + spaced;\n\n const contextType = kind === \"Syntax\" ? \"since\" : \"inside\";\n const inSourceName = source.name ? ` in ${source.name}` : \"\";\n const grammaticalContext =\n current && current.name\n ? `, ${contextType} \\`${current.partial ? \"partial \" : \"\"}${contextAsText(\n current\n )}\\``\n : \"\";\n const context = `${kind} error at line ${line}${inSourceName}${grammaticalContext}:\\n${sourceContext}`;\n return {\n message: `${context} ${message}`,\n bareMessage: message,\n context,\n line,\n sourceName: source.name,\n level,\n ruleName,\n autofix,\n input: subsequentText,\n tokens: subsequentTokens,\n };\n}\n\n/**\n * @param {string} message error message\n */\nexport function syntaxError(source, position, current, message) {\n return error(source, position, current, message, \"Syntax\");\n}\n\n/**\n * @param {string} message error message\n * @param {WebIDL2ErrorOptions} [options]\n */\nexport function validationError(\n token,\n current,\n ruleName,\n message,\n options = {}\n) {\n options.ruleName = ruleName;\n return error(\n current.source,\n token.index,\n current,\n message,\n \"Validation\",\n options\n );\n}\n","// @ts-check\n\nexport class Base {\n /**\n * @param {object} initializer\n * @param {Base[\"source\"]} initializer.source\n * @param {Base[\"tokens\"]} initializer.tokens\n */\n constructor({ source, tokens }) {\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens, writable: true },\n parent: { value: null, writable: true },\n this: { value: this }, // useful when escaping from proxy\n });\n }\n\n toJSON() {\n const json = { type: undefined, name: undefined, inheritance: undefined };\n let proto = this;\n while (proto !== Object.prototype) {\n const descMap = Object.getOwnPropertyDescriptors(proto);\n for (const [key, value] of Object.entries(descMap)) {\n if (value.enumerable || value.get) {\n // @ts-ignore - allow indexing here\n json[key] = this[key];\n }\n }\n proto = Object.getPrototypeOf(proto);\n }\n return json;\n }\n}\n","// @ts-check\n\n/**\n * @typedef {import(\"../productions/dictionary.js\").Dictionary} Dictionary\n *\n * @param {*} idlType\n * @param {import(\"../validator.js\").Definitions} defs\n * @param {object} [options]\n * @param {boolean} [options.useNullableInner] use when the input idlType is nullable and you want to use its inner type\n * @return {{ reference: *, dictionary: Dictionary }} the type reference that ultimately includes dictionary.\n */\nexport function idlTypeIncludesDictionary(\n idlType,\n defs,\n { useNullableInner } = {}\n) {\n if (!idlType.union) {\n const def = defs.unique.get(idlType.idlType);\n if (!def) {\n return;\n }\n if (def.type === \"typedef\") {\n const { typedefIncludesDictionary } = defs.cache;\n if (typedefIncludesDictionary.has(def)) {\n // Note that this also halts when it met indeterminate state\n // to prevent infinite recursion\n return typedefIncludesDictionary.get(def);\n }\n defs.cache.typedefIncludesDictionary.set(def, undefined); // indeterminate state\n const result = idlTypeIncludesDictionary(def.idlType, defs);\n defs.cache.typedefIncludesDictionary.set(def, result);\n if (result) {\n return {\n reference: idlType,\n dictionary: result.dictionary,\n };\n }\n }\n if (def.type === \"dictionary\" && (useNullableInner || !idlType.nullable)) {\n return {\n reference: idlType,\n dictionary: def,\n };\n }\n }\n for (const subtype of idlType.subtype) {\n const result = idlTypeIncludesDictionary(subtype, defs);\n if (result) {\n if (subtype.union) {\n return result;\n }\n return {\n reference: subtype,\n dictionary: result.dictionary,\n };\n }\n }\n}\n\n/**\n * @param {*} dict dictionary type\n * @param {import(\"../validator.js\").Definitions} defs\n * @return {boolean}\n */\nexport function dictionaryIncludesRequiredField(dict, defs) {\n if (defs.cache.dictionaryIncludesRequiredField.has(dict)) {\n return defs.cache.dictionaryIncludesRequiredField.get(dict);\n }\n // Set cached result to indeterminate to short-circuit circular definitions.\n // The final result will be updated to true or false.\n defs.cache.dictionaryIncludesRequiredField.set(dict, undefined);\n let result = dict.members.some((field) => field.required);\n if (!result && dict.inheritance) {\n const superdict = defs.unique.get(dict.inheritance);\n if (!superdict) {\n // Assume required members in the supertype if it is unknown.\n result = true;\n } else if (dictionaryIncludesRequiredField(superdict, defs)) {\n result = true;\n }\n }\n defs.cache.dictionaryIncludesRequiredField.set(dict, result);\n return result;\n}\n","// @ts-check\n\nexport class ArrayBase extends Array {\n constructor({ source, tokens }) {\n super();\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens },\n parent: { value: null, writable: true },\n });\n }\n}\n","// @ts-check\n\nimport { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class WrappedToken extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\n static parser(tokeniser, type) {\n return () => {\n const value = tokeniser.consumeKind(type);\n if (value) {\n return new WrappedToken({\n source: tokeniser.source,\n tokens: { value },\n });\n }\n };\n }\n\n get value() {\n return unescape(this.tokens.value.value);\n }\n\n /** @param {import(\"../writer\").Writer} w */\n write(w) {\n return w.ts.wrap([\n w.token(this.tokens.value),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\nexport class Eof extends WrappedToken {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consumeKind(\"eof\");\n if (value) {\n return new Eof({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"eof\";\n }\n}\n","import { Base } from \"./base.js\";\nimport { ArrayBase } from \"./array-base.js\";\nimport { WrappedToken } from \"./token.js\";\nimport { list, argument_list, autoParenter, unescape } from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} tokenName\n */\nfunction tokens(tokeniser, tokenName) {\n return list(tokeniser, {\n parser: WrappedToken.parser(tokeniser, tokenName),\n listName: tokenName + \" list\",\n });\n}\n\nconst extAttrValueSyntax = [\"identifier\", \"decimal\", \"integer\", \"string\"];\n\nconst shouldBeLegacyPrefixed = [\n \"NoInterfaceObject\",\n \"LenientSetter\",\n \"LenientThis\",\n \"TreatNonObjectAsNull\",\n \"Unforgeable\",\n];\n\nconst renamedLegacies = new Map([\n ...shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`]),\n [\"NamedConstructor\", \"LegacyFactoryFunction\"],\n [\"OverrideBuiltins\", \"LegacyOverrideBuiltIns\"],\n [\"TreatNullAs\", \"LegacyNullToEmptyString\"],\n]);\n\n/**\n * This will allow a set of extended attribute values to be parsed.\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction extAttrListItems(tokeniser) {\n for (const syntax of extAttrValueSyntax) {\n const toks = tokens(tokeniser, syntax);\n if (toks.length) {\n return toks;\n }\n }\n tokeniser.error(\n `Expected identifiers, strings, decimals, or integers but none found`\n );\n}\n\nclass ExtendedAttributeParameters extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = { assign: tokeniser.consume(\"=\") };\n const ret = autoParenter(\n new ExtendedAttributeParameters({ source: tokeniser.source, tokens })\n );\n ret.list = [];\n if (tokens.assign) {\n tokens.asterisk = tokeniser.consume(\"*\");\n if (tokens.asterisk) {\n return ret.this;\n }\n tokens.secondaryName = tokeniser.consumeKind(...extAttrValueSyntax);\n }\n tokens.open = tokeniser.consume(\"(\");\n if (tokens.open) {\n ret.list = ret.rhsIsList\n ? // [Exposed=(Window,Worker)]\n extAttrListItems(tokeniser)\n : // [LegacyFactoryFunction=Audio(DOMString src)] or [Constructor(DOMString str)]\n argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") ||\n tokeniser.error(\"Unexpected token in extended attribute argument list\");\n } else if (ret.hasRhs && !tokens.secondaryName) {\n tokeniser.error(\"No right hand side to extended attribute assignment\");\n }\n return ret.this;\n }\n\n get rhsIsList() {\n return (\n this.tokens.assign && !this.tokens.asterisk && !this.tokens.secondaryName\n );\n }\n\n get rhsType() {\n if (this.rhsIsList) {\n return this.list[0].tokens.value.type + \"-list\";\n }\n if (this.tokens.asterisk) {\n return \"*\";\n }\n if (this.tokens.secondaryName) {\n return this.tokens.secondaryName.type;\n }\n return null;\n }\n\n /** @param {import(\"../writer.js\").Writer)} w */\n write(w) {\n const { rhsType } = this;\n return w.ts.wrap([\n w.token(this.tokens.assign),\n w.token(this.tokens.asterisk),\n w.reference_token(this.tokens.secondaryName, this.parent),\n w.token(this.tokens.open),\n ...this.list.map((p) => {\n return rhsType === \"identifier-list\"\n ? w.identifier(p, this.parent)\n : p.write(w);\n }),\n w.token(this.tokens.close),\n ]);\n }\n}\n\nexport class SimpleExtendedAttribute extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const name = tokeniser.consumeKind(\"identifier\");\n if (name) {\n return new SimpleExtendedAttribute({\n source: tokeniser.source,\n tokens: { name },\n params: ExtendedAttributeParameters.parse(tokeniser),\n });\n }\n }\n\n constructor({ source, tokens, params }) {\n super({ source, tokens });\n params.parent = this;\n Object.defineProperty(this, \"params\", { value: params });\n }\n\n get type() {\n return \"extended-attribute\";\n }\n get name() {\n return this.tokens.name.value;\n }\n get rhs() {\n const { rhsType: type, tokens, list } = this.params;\n if (!type) {\n return null;\n }\n const value = this.params.rhsIsList\n ? list\n : this.params.tokens.secondaryName\n ? unescape(tokens.secondaryName.value)\n : null;\n return { type, value };\n }\n get arguments() {\n const { rhsIsList, list } = this.params;\n if (!list || rhsIsList) {\n return [];\n }\n return list;\n }\n\n *validate(defs) {\n const { name } = this;\n if (name === \"LegacyNoInterfaceObject\") {\n const message = `\\`[LegacyNoInterfaceObject]\\` extended attribute is an \\\nundesirable feature that may be removed from Web IDL in the future. Refer to the \\\n[relevant upstream PR](https://github.com/whatwg/webidl/pull/609) for more \\\ninformation.`;\n yield validationError(\n this.tokens.name,\n this,\n \"no-nointerfaceobject\",\n message,\n { level: \"warning\" }\n );\n } else if (renamedLegacies.has(name)) {\n const message = `\\`[${name}]\\` extended attribute is a legacy feature \\\nthat is now renamed to \\`[${renamedLegacies.get(name)}]\\`. Refer to the \\\n[relevant upstream PR](https://github.com/whatwg/webidl/pull/870) for more \\\ninformation.`;\n yield validationError(this.tokens.name, this, \"renamed-legacy\", message, {\n level: \"warning\",\n autofix: renameLegacyExtendedAttribute(this),\n });\n }\n for (const arg of this.arguments) {\n yield* arg.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer)} w */\n write(w) {\n return w.ts.wrap([\n w.ts.trivia(this.tokens.name.trivia),\n w.ts.extendedAttribute(\n w.ts.wrap([\n w.ts.extendedAttributeReference(this.name),\n this.params.write(w),\n ])\n ),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {SimpleExtendedAttribute} extAttr\n */\nfunction renameLegacyExtendedAttribute(extAttr) {\n return () => {\n const { name } = extAttr;\n extAttr.tokens.name.value = renamedLegacies.get(name);\n if (name === \"TreatNullAs\") {\n extAttr.params.tokens = {};\n }\n };\n}\n\n// Note: we parse something simpler than the official syntax. It's all that ever\n// seems to be used\nexport class ExtendedAttributes extends ArrayBase {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"[\");\n if (!tokens.open) return new ExtendedAttributes({});\n const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });\n ret.push(\n ...list(tokeniser, {\n parser: SimpleExtendedAttribute.parse,\n listName: \"extended attribute\",\n })\n );\n tokens.close =\n tokeniser.consume(\"]\") ||\n tokeniser.error(\"Unexpected closing token of extended attribute\");\n if (!ret.length) {\n tokeniser.error(\"Found an empty extended attribute\");\n }\n if (tokeniser.probe(\"[\")) {\n tokeniser.error(\n \"Illegal double extended attribute lists, consider merging them\"\n );\n }\n return ret;\n }\n\n *validate(defs) {\n for (const extAttr of this) {\n yield* extAttr.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer)} w */\n write(w) {\n if (!this.length) return \"\";\n return w.ts.wrap([\n w.token(this.tokens.open),\n ...this.map((ea) => ea.write(w)),\n w.token(this.tokens.close),\n ]);\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n return_type,\n primitive_type,\n autoParenter,\n} from \"./helpers.js\";\nimport { stringTypes, typeNameKeywords } from \"../tokeniser.js\";\nimport { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction generic_type(tokeniser, typeName) {\n const base = tokeniser.consume(\n \"FrozenArray\",\n \"ObservableArray\",\n \"Promise\",\n \"sequence\",\n \"record\"\n );\n if (!base) {\n return;\n }\n const ret = autoParenter(\n new Type({ source: tokeniser.source, tokens: { base } })\n );\n ret.tokens.open =\n tokeniser.consume(\"<\") ||\n tokeniser.error(`No opening bracket after ${base.value}`);\n switch (base.value) {\n case \"Promise\": {\n if (tokeniser.probe(\"[\"))\n tokeniser.error(\"Promise type cannot have extended attribute\");\n const subtype =\n return_type(tokeniser, typeName) ||\n tokeniser.error(\"Missing Promise subtype\");\n ret.subtype.push(subtype);\n break;\n }\n case \"sequence\":\n case \"FrozenArray\":\n case \"ObservableArray\": {\n const subtype =\n type_with_extended_attributes(tokeniser, typeName) ||\n tokeniser.error(`Missing ${base.value} subtype`);\n ret.subtype.push(subtype);\n break;\n }\n case \"record\": {\n if (tokeniser.probe(\"[\"))\n tokeniser.error(\"Record key cannot have extended attribute\");\n const keyType =\n tokeniser.consume(...stringTypes) ||\n tokeniser.error(`Record key must be one of: ${stringTypes.join(\", \")}`);\n const keyIdlType = new Type({\n source: tokeniser.source,\n tokens: { base: keyType },\n });\n keyIdlType.tokens.separator =\n tokeniser.consume(\",\") ||\n tokeniser.error(\"Missing comma after record key type\");\n keyIdlType.type = typeName;\n const valueType =\n type_with_extended_attributes(tokeniser, typeName) ||\n tokeniser.error(\"Error parsing generic type record\");\n ret.subtype.push(keyIdlType, valueType);\n break;\n }\n }\n if (!ret.idlType) tokeniser.error(`Error parsing generic type ${base.value}`);\n ret.tokens.close =\n tokeniser.consume(\">\") ||\n tokeniser.error(`Missing closing bracket after ${base.value}`);\n return ret.this;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction type_suffix(tokeniser, obj) {\n const nullable = tokeniser.consume(\"?\");\n if (nullable) {\n obj.tokens.nullable = nullable;\n }\n if (tokeniser.probe(\"?\")) tokeniser.error(\"Can't nullable more than once\");\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction single_type(tokeniser, typeName) {\n let ret = generic_type(tokeniser, typeName) || primitive_type(tokeniser);\n if (!ret) {\n const base =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(...stringTypes, ...typeNameKeywords);\n if (!base) {\n return;\n }\n ret = new Type({ source: tokeniser.source, tokens: { base } });\n if (tokeniser.probe(\"<\"))\n tokeniser.error(`Unsupported generic type ${base.value}`);\n }\n if (ret.generic === \"Promise\" && tokeniser.probe(\"?\")) {\n tokeniser.error(\"Promise type cannot be nullable\");\n }\n ret.type = typeName || null;\n type_suffix(tokeniser, ret);\n if (ret.nullable && ret.idlType === \"any\")\n tokeniser.error(\"Type `any` cannot be made nullable\");\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\nfunction union_type(tokeniser, type) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"(\");\n if (!tokens.open) return;\n const ret = autoParenter(new Type({ source: tokeniser.source, tokens }));\n ret.type = type || null;\n while (true) {\n const typ =\n type_with_extended_attributes(tokeniser) ||\n tokeniser.error(\"No type after open parenthesis or 'or' in union type\");\n if (typ.idlType === \"any\")\n tokeniser.error(\"Type `any` cannot be included in a union type\");\n if (typ.generic === \"Promise\")\n tokeniser.error(\"Type `Promise` cannot be included in a union type\");\n ret.subtype.push(typ);\n const or = tokeniser.consume(\"or\");\n if (or) {\n typ.tokens.separator = or;\n } else break;\n }\n if (ret.idlType.length < 2) {\n tokeniser.error(\n \"At least two types are expected in a union type but found less\"\n );\n }\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated union type\");\n type_suffix(tokeniser, ret);\n return ret.this;\n}\n\nexport class Type extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\n static parse(tokeniser, typeName) {\n return single_type(tokeniser, typeName) || union_type(tokeniser, typeName);\n }\n\n constructor({ source, tokens }) {\n super({ source, tokens });\n Object.defineProperty(this, \"subtype\", { value: [], writable: true });\n this.extAttrs = new ExtendedAttributes({});\n }\n\n get generic() {\n if (this.subtype.length && this.tokens.base) {\n return this.tokens.base.value;\n }\n return \"\";\n }\n get nullable() {\n return Boolean(this.tokens.nullable);\n }\n get union() {\n return Boolean(this.subtype.length) && !this.tokens.base;\n }\n get idlType() {\n if (this.subtype.length) {\n return this.subtype;\n }\n // Adding prefixes/postfixes for \"unrestricted float\", etc.\n const name = [this.tokens.prefix, this.tokens.base, this.tokens.postfix]\n .filter((t) => t)\n .map((t) => t.value)\n .join(\" \");\n return unescape(name);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n\n if (this.idlType === \"void\") {\n const message = `\\`void\\` is now replaced by \\`undefined\\`. Refer to the \\\n[relevant GitHub issue](https://github.com/whatwg/webidl/issues/60) \\\nfor more information.`;\n yield validationError(this.tokens.base, this, \"replace-void\", message, {\n autofix: replaceVoid(this),\n });\n }\n\n /*\n * If a union is nullable, its subunions cannot include a dictionary\n * If not, subunions may include dictionaries if each union is not nullable\n */\n const typedef = !this.union && defs.unique.get(this.idlType);\n const target = this.union\n ? this\n : typedef && typedef.type === \"typedef\"\n ? typedef.idlType\n : undefined;\n if (target && this.nullable) {\n // do not allow any dictionary\n const { reference } = idlTypeIncludesDictionary(target, defs) || {};\n if (reference) {\n const targetToken = (this.union ? reference : this).tokens.base;\n const message = \"Nullable union cannot include a dictionary type.\";\n yield validationError(\n targetToken,\n this,\n \"no-nullable-union-dict\",\n message\n );\n }\n } else {\n // allow some dictionary\n for (const subtype of this.subtype) {\n yield* subtype.validate(defs);\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer)} w */\n write(w) {\n const type_body = () => {\n if (this.union || this.generic) {\n return w.ts.wrap([\n w.token(this.tokens.base, w.ts.generic),\n w.token(this.tokens.open),\n ...this.subtype.map((t) => t.write(w)),\n w.token(this.tokens.close),\n ]);\n }\n const firstToken = this.tokens.prefix || this.tokens.base;\n const prefix = this.tokens.prefix\n ? [this.tokens.prefix.value, w.ts.trivia(this.tokens.base.trivia)]\n : [];\n const ref = w.reference(\n w.ts.wrap([\n ...prefix,\n this.tokens.base.value,\n w.token(this.tokens.postfix),\n ]),\n { unescaped: this.idlType, context: this }\n );\n return w.ts.wrap([w.ts.trivia(firstToken.trivia), ref]);\n };\n return w.ts.wrap([\n this.extAttrs.write(w),\n type_body(),\n w.token(this.tokens.nullable),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {Type} type\n */\nfunction replaceVoid(type) {\n return () => {\n type.tokens.base.value = \"undefined\";\n };\n}\n","import { Base } from \"./base.js\";\nimport { const_data, const_value } from \"./helpers.js\";\n\nexport class Default extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const assign = tokeniser.consume(\"=\");\n if (!assign) {\n return null;\n }\n const def =\n const_value(tokeniser) ||\n tokeniser.consumeKind(\"string\") ||\n tokeniser.consume(\"null\", \"[\", \"{\") ||\n tokeniser.error(\"No value for default\");\n const expression = [def];\n if (def.value === \"[\") {\n const close =\n tokeniser.consume(\"]\") ||\n tokeniser.error(\"Default sequence value must be empty\");\n expression.push(close);\n } else if (def.value === \"{\") {\n const close =\n tokeniser.consume(\"}\") ||\n tokeniser.error(\"Default dictionary value must be empty\");\n expression.push(close);\n }\n return new Default({\n source: tokeniser.source,\n tokens: { assign },\n expression,\n });\n }\n\n constructor({ source, tokens, expression }) {\n super({ source, tokens });\n expression.parent = this;\n Object.defineProperty(this, \"expression\", { value: expression });\n }\n\n get type() {\n return const_data(this.expression[0]).type;\n }\n get value() {\n return const_data(this.expression[0]).value;\n }\n get negative() {\n return const_data(this.expression[0]).negative;\n }\n\n /** @param {import(\"../writer.js\").Writer)} w */\n write(w) {\n return w.ts.wrap([\n w.token(this.tokens.assign),\n ...this.expression.map((t) => w.token(t)),\n ]);\n }\n}\n","// @ts-check\n\nimport { Base } from \"./base.js\";\nimport { Default } from \"./default.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n autoParenter,\n getFirstToken,\n} from \"./helpers.js\";\nimport { argumentNameKeywords, Tokeniser } from \"../tokeniser.js\";\nimport { validationError } from \"../error.js\";\nimport {\n idlTypeIncludesDictionary,\n dictionaryIncludesRequiredField,\n} from \"../validators/helpers.js\";\n\nexport class Argument extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(\n new Argument({ source: tokeniser.source, tokens })\n );\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.optional = tokeniser.consume(\"optional\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"argument-type\");\n if (!ret.idlType) {\n return tokeniser.unconsume(start_position);\n }\n if (!tokens.optional) {\n tokens.variadic = tokeniser.consume(\"...\");\n }\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(...argumentNameKeywords);\n if (!tokens.name) {\n return tokeniser.unconsume(start_position);\n }\n ret.default = tokens.optional ? Default.parse(tokeniser) : null;\n return ret.this;\n }\n\n get type() {\n return \"argument\";\n }\n get optional() {\n return !!this.tokens.optional;\n }\n get variadic() {\n return !!this.tokens.variadic;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n /**\n * @param {import(\"../validator.js\").Definitions} defs\n */\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n const result = idlTypeIncludesDictionary(this.idlType, defs, {\n useNullableInner: true,\n });\n if (result) {\n if (this.idlType.nullable) {\n const message = `Dictionary arguments cannot be nullable.`;\n yield validationError(\n this.tokens.name,\n this,\n \"no-nullable-dict-arg\",\n message\n );\n } else if (!this.optional) {\n if (\n this.parent &&\n !dictionaryIncludesRequiredField(result.dictionary, defs) &&\n isLastRequiredArgument(this)\n ) {\n const message = `Dictionary argument must be optional if it has no required fields`;\n yield validationError(\n this.tokens.name,\n this,\n \"dict-arg-optional\",\n message,\n {\n autofix: autofixDictionaryArgumentOptionality(this),\n }\n );\n }\n } else if (!this.default) {\n const message = `Optional dictionary arguments must have a default value of \\`{}\\`.`;\n yield validationError(\n this.tokens.name,\n this,\n \"dict-arg-default\",\n message,\n {\n autofix: autofixOptionalDictionaryDefaultValue(this),\n }\n );\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.optional),\n w.ts.type(this.idlType.write(w)),\n w.token(this.tokens.variadic),\n w.name_token(this.tokens.name, { data: this }),\n this.default ? this.default.write(w) : \"\",\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {Argument} arg\n */\nfunction isLastRequiredArgument(arg) {\n const list = arg.parent.arguments || arg.parent.list;\n const index = list.indexOf(arg);\n const requiredExists = list.slice(index + 1).some((a) => !a.optional);\n return !requiredExists;\n}\n\n/**\n * @param {Argument} arg\n */\nfunction autofixDictionaryArgumentOptionality(arg) {\n return () => {\n const firstToken = getFirstToken(arg.idlType);\n arg.tokens.optional = {\n ...firstToken,\n type: \"optional\",\n value: \"optional\",\n };\n firstToken.trivia = \" \";\n autofixOptionalDictionaryDefaultValue(arg)();\n };\n}\n\n/**\n * @param {Argument} arg\n */\nfunction autofixOptionalDictionaryDefaultValue(arg) {\n return () => {\n arg.default = Default.parse(new Tokeniser(\" = {}\"));\n };\n}\n","import { Base } from \"./base.js\";\nimport {\n return_type,\n argument_list,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\n\nexport class Operation extends Base {\n /**\n * @typedef {import(\"../tokeniser.js\").Token} Token\n *\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {Token} [options.special]\n * @param {Token} [options.regular]\n */\n static parse(tokeniser, { special, regular } = {}) {\n const tokens = { special };\n const ret = autoParenter(\n new Operation({ source: tokeniser.source, tokens })\n );\n if (special && special.value === \"stringifier\") {\n tokens.termination = tokeniser.consume(\";\");\n if (tokens.termination) {\n ret.arguments = [];\n return ret;\n }\n }\n if (!special && !regular) {\n tokens.special = tokeniser.consume(\"getter\", \"setter\", \"deleter\");\n }\n ret.idlType =\n return_type(tokeniser) || tokeniser.error(\"Missing return type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") || tokeniser.consume(\"includes\");\n tokens.open =\n tokeniser.consume(\"(\") || tokeniser.error(\"Invalid operation\");\n ret.arguments = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated operation\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated operation, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"operation\";\n }\n get name() {\n const { name } = this.tokens;\n if (!name) {\n return \"\";\n }\n return unescape(name.value);\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n if (!this.name && [\"\", \"static\"].includes(this.special)) {\n const message = `Regular or static operations must have both a return type and an identifier.`;\n yield validationError(this.tokens.open, this, \"incomplete-op\", message);\n }\n if (this.idlType) {\n yield* this.idlType.validate(defs);\n }\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n const body = this.idlType\n ? [\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.open),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.close),\n ]\n : [];\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n this.tokens.name\n ? w.token(this.tokens.special)\n : w.token(this.tokens.special, w.ts.nameless, { data: this, parent }),\n ...body,\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","// @ts-check\n\nimport { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\nimport { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class Attribute extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.special]\n * @param {boolean} [options.noInherit]\n * @param {boolean} [options.readonly]\n */\n static parse(\n tokeniser,\n { special, noInherit = false, readonly = false } = {}\n ) {\n const start_position = tokeniser.position;\n const tokens = { special };\n const ret = autoParenter(\n new Attribute({ source: tokeniser.source, tokens })\n );\n if (!special && !noInherit) {\n tokens.special = tokeniser.consume(\"inherit\");\n }\n if (ret.special === \"inherit\" && tokeniser.probe(\"readonly\")) {\n tokeniser.error(\"Inherited attributes cannot be read-only\");\n }\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (readonly && !tokens.readonly && tokeniser.probe(\"attribute\")) {\n tokeniser.error(\"Attributes must be readonly in this context\");\n }\n tokens.base = tokeniser.consume(\"attribute\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"attribute-type\") ||\n tokeniser.error(\"Attribute lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(\"async\", \"required\") ||\n tokeniser.error(\"Attribute lacks a name\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated attribute, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"attribute\";\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n\n switch (this.idlType.generic) {\n case \"sequence\":\n case \"record\": {\n const message = `Attributes cannot accept ${this.idlType.generic} types.`;\n yield validationError(\n this.tokens.name,\n this,\n \"attr-invalid-type\",\n message\n );\n break;\n }\n default: {\n const { reference } =\n idlTypeIncludesDictionary(this.idlType, defs) || {};\n if (reference) {\n const targetToken = (this.idlType.union ? reference : this.idlType)\n .tokens.base;\n const message = \"Attributes cannot accept dictionary types.\";\n yield validationError(\n targetToken,\n this,\n \"attr-invalid-type\",\n message\n );\n }\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.special),\n w.token(this.tokens.readonly),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Type } from \"./type.js\";\nimport { Argument } from \"./argument.js\";\nimport {\n ExtendedAttributes,\n SimpleExtendedAttribute,\n} from \"./extended-attributes.js\";\nimport { Operation } from \"./operation.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Tokeniser } from \"../tokeniser.js\";\n\n/**\n * @param {string} identifier\n */\nexport function unescape(identifier) {\n return identifier.startsWith(\"_\") ? identifier.slice(1) : identifier;\n}\n\n/**\n * Parses comma-separated list\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {object} args\n * @param {Function} args.parser parser function for each item\n * @param {boolean} [args.allowDangler] whether to allow dangling comma\n * @param {string} [args.listName] the name to be shown on error messages\n */\nexport function list(tokeniser, { parser, allowDangler, listName = \"list\" }) {\n const first = parser(tokeniser);\n if (!first) {\n return [];\n }\n first.tokens.separator = tokeniser.consume(\",\");\n const items = [first];\n while (first.tokens.separator) {\n const item = parser(tokeniser);\n if (!item) {\n if (!allowDangler) {\n tokeniser.error(`Trailing comma in ${listName}`);\n }\n break;\n }\n item.tokens.separator = tokeniser.consume(\",\");\n items.push(item);\n if (!item.tokens.separator) break;\n }\n return items;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function const_value(tokeniser) {\n return (\n tokeniser.consumeKind(\"decimal\", \"integer\") ||\n tokeniser.consume(\"true\", \"false\", \"Infinity\", \"-Infinity\", \"NaN\")\n );\n}\n\n/**\n * @param {object} token\n * @param {string} token.type\n * @param {string} token.value\n */\nexport function const_data({ type, value }) {\n switch (type) {\n case \"decimal\":\n case \"integer\":\n return { type: \"number\", value };\n case \"string\":\n return { type: \"string\", value: value.slice(1, -1) };\n }\n\n switch (value) {\n case \"true\":\n case \"false\":\n return { type: \"boolean\", value: value === \"true\" };\n case \"Infinity\":\n case \"-Infinity\":\n return { type: \"Infinity\", negative: value.startsWith(\"-\") };\n case \"[\":\n return { type: \"sequence\", value: [] };\n case \"{\":\n return { type: \"dictionary\" };\n default:\n return { type: value };\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function primitive_type(tokeniser) {\n function integer_type() {\n const prefix = tokeniser.consume(\"unsigned\");\n const base = tokeniser.consume(\"short\", \"long\");\n if (base) {\n const postfix = tokeniser.consume(\"long\");\n return new Type({ source, tokens: { prefix, base, postfix } });\n }\n if (prefix) tokeniser.error(\"Failed to parse integer type\");\n }\n\n function decimal_type() {\n const prefix = tokeniser.consume(\"unrestricted\");\n const base = tokeniser.consume(\"float\", \"double\");\n if (base) {\n return new Type({ source, tokens: { prefix, base } });\n }\n if (prefix) tokeniser.error(\"Failed to parse float type\");\n }\n\n const { source } = tokeniser;\n const num_type = integer_type(tokeniser) || decimal_type(tokeniser);\n if (num_type) return num_type;\n const base = tokeniser.consume(\n \"bigint\",\n \"boolean\",\n \"byte\",\n \"octet\",\n \"undefined\"\n );\n if (base) {\n return new Type({ source, tokens: { base } });\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function argument_list(tokeniser) {\n return list(tokeniser, {\n parser: Argument.parse,\n listName: \"arguments list\",\n });\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function type_with_extended_attributes(tokeniser, typeName) {\n const extAttrs = ExtendedAttributes.parse(tokeniser);\n const ret = Type.parse(tokeniser, typeName);\n if (ret) autoParenter(ret).extAttrs = extAttrs;\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function return_type(tokeniser, typeName) {\n const typ = Type.parse(tokeniser, typeName || \"return-type\");\n if (typ) {\n return typ;\n }\n const voidToken = tokeniser.consume(\"void\");\n if (voidToken) {\n const ret = new Type({\n source: tokeniser.source,\n tokens: { base: voidToken },\n });\n ret.type = \"return-type\";\n return ret;\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function stringifier(tokeniser) {\n const special = tokeniser.consume(\"stringifier\");\n if (!special) return;\n const member =\n Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"Unterminated stringifier\");\n return member;\n}\n\n/**\n * @param {string} str\n */\nexport function getLastIndentation(str) {\n const lines = str.split(\"\\n\");\n // the first line visually binds to the preceding token\n if (lines.length) {\n const match = lines[lines.length - 1].match(/^\\s+/);\n if (match) {\n return match[0];\n }\n }\n return \"\";\n}\n\n/**\n * @param {string} parentTrivia\n */\nexport function getMemberIndentation(parentTrivia) {\n const indentation = getLastIndentation(parentTrivia);\n const indentCh = indentation.includes(\"\\t\") ? \"\\t\" : \" \";\n return indentation + indentCh;\n}\n\n/**\n * @param {object} def\n * @param {import(\"./extended-attributes.js\").ExtendedAttributes} def.extAttrs\n */\nexport function autofixAddExposedWindow(def) {\n return () => {\n if (def.extAttrs.length) {\n const tokeniser = new Tokeniser(\"Exposed=Window,\");\n const exposed = SimpleExtendedAttribute.parse(tokeniser);\n exposed.tokens.separator = tokeniser.consume(\",\");\n const existing = def.extAttrs[0];\n if (!/^\\s/.test(existing.tokens.name.trivia)) {\n existing.tokens.name.trivia = ` ${existing.tokens.name.trivia}`;\n }\n def.extAttrs.unshift(exposed);\n } else {\n autoParenter(def).extAttrs = ExtendedAttributes.parse(\n new Tokeniser(\"[Exposed=Window]\")\n );\n const trivia = def.tokens.base.trivia;\n def.extAttrs.tokens.open.trivia = trivia;\n def.tokens.base.trivia = `\\n${getLastIndentation(trivia)}`;\n }\n };\n}\n\n/**\n * Get the first syntax token for the given IDL object.\n * @param {*} data\n */\nexport function getFirstToken(data) {\n if (data.extAttrs.length) {\n return data.extAttrs.tokens.open;\n }\n if (data.type === \"operation\" && !data.special) {\n return getFirstToken(data.idlType);\n }\n const tokens = Object.values(data.tokens).sort((x, y) => x.index - y.index);\n return tokens[0];\n}\n\n/**\n * @template T\n * @param {T[]} array\n * @param {(item: T) => boolean} predicate\n */\nexport function findLastIndex(array, predicate) {\n const index = array.slice().reverse().findIndex(predicate);\n if (index === -1) {\n return index;\n }\n return array.length - index - 1;\n}\n\n/**\n * Returns a proxy that auto-assign `parent` field.\n * @template T\n * @param {T} data\n * @param {*} [parent] The object that will be assigned to `parent`.\n * If absent, it will be `data` by default.\n * @return {T}\n */\nexport function autoParenter(data, parent) {\n if (!parent) {\n // Defaults to `data` unless specified otherwise.\n parent = data;\n }\n if (!data) {\n // This allows `autoParenter(undefined)` which again allows\n // `autoParenter(parse())` where the function may return nothing.\n return data;\n }\n return new Proxy(data, {\n get(target, p) {\n const value = target[p];\n if (Array.isArray(value)) {\n // Wraps the array so that any added items will also automatically\n // get their `parent` values.\n return autoParenter(value, target);\n }\n return value;\n },\n set(target, p, value) {\n target[p] = value;\n if (!value) {\n return true;\n } else if (Array.isArray(value)) {\n // Assigning an array will add `parent` to its items.\n for (const item of value) {\n if (typeof item.parent !== \"undefined\") {\n item.parent = parent;\n }\n }\n } else if (typeof value.parent !== \"undefined\") {\n value.parent = parent;\n }\n return true;\n },\n });\n}\n","import { syntaxError } from \"./error.js\";\nimport { unescape } from \"./productions/helpers.js\";\n\n// These regular expressions use the sticky flag so they will only match at\n// the current location (ie. the offset of lastIndex).\nconst tokenRe = {\n // This expression uses a lookahead assertion to catch false matches\n // against integers early.\n decimal:\n /-?(?=[0-9]*\\.|[0-9]+[eE])(([0-9]+\\.[0-9]*|[0-9]*\\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,\n integer: /-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,\n identifier: /[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,\n string: /\"[^\"]*\"/y,\n whitespace: /[\\t\\n\\r ]+/y,\n comment: /\\/\\/.*|\\/\\*[\\s\\S]*?\\*\\//y,\n other: /[^\\t\\n\\r 0-9A-Za-z]/y,\n};\n\nexport const typeNameKeywords = [\n \"ArrayBuffer\",\n \"DataView\",\n \"Int8Array\",\n \"Int16Array\",\n \"Int32Array\",\n \"Uint8Array\",\n \"Uint16Array\",\n \"Uint32Array\",\n \"Uint8ClampedArray\",\n \"BigInt64Array\",\n \"BigUint64Array\",\n \"Float32Array\",\n \"Float64Array\",\n \"any\",\n \"object\",\n \"symbol\",\n];\n\nexport const stringTypes = [\"ByteString\", \"DOMString\", \"USVString\"];\n\nexport const argumentNameKeywords = [\n \"async\",\n \"attribute\",\n \"callback\",\n \"const\",\n \"constructor\",\n \"deleter\",\n \"dictionary\",\n \"enum\",\n \"getter\",\n \"includes\",\n \"inherit\",\n \"interface\",\n \"iterable\",\n \"maplike\",\n \"namespace\",\n \"partial\",\n \"required\",\n \"setlike\",\n \"setter\",\n \"static\",\n \"stringifier\",\n \"typedef\",\n \"unrestricted\",\n];\n\nconst nonRegexTerminals = [\n \"-Infinity\",\n \"FrozenArray\",\n \"Infinity\",\n \"NaN\",\n \"ObservableArray\",\n \"Promise\",\n \"bigint\",\n \"boolean\",\n \"byte\",\n \"double\",\n \"false\",\n \"float\",\n \"long\",\n \"mixin\",\n \"null\",\n \"octet\",\n \"optional\",\n \"or\",\n \"readonly\",\n \"record\",\n \"sequence\",\n \"short\",\n \"true\",\n \"undefined\",\n \"unsigned\",\n \"void\",\n].concat(argumentNameKeywords, stringTypes, typeNameKeywords);\n\nconst punctuations = [\n \"(\",\n \")\",\n \",\",\n \"...\",\n \":\",\n \";\",\n \"<\",\n \"=\",\n \">\",\n \"?\",\n \"*\",\n \"[\",\n \"]\",\n \"{\",\n \"}\",\n];\n\nconst reserved = [\n // \"constructor\" is now a keyword\n \"_constructor\",\n \"toString\",\n \"_toString\",\n];\n\n/**\n * @typedef {ArrayItemType<ReturnType<typeof tokenise>>} Token\n * @param {string} str\n */\nfunction tokenise(str) {\n const tokens = [];\n let lastCharIndex = 0;\n let trivia = \"\";\n let line = 1;\n let index = 0;\n while (lastCharIndex < str.length) {\n const nextChar = str.charAt(lastCharIndex);\n let result = -1;\n\n if (/[\\t\\n\\r ]/.test(nextChar)) {\n result = attemptTokenMatch(\"whitespace\", { noFlushTrivia: true });\n } else if (nextChar === \"/\") {\n result = attemptTokenMatch(\"comment\", { noFlushTrivia: true });\n }\n\n if (result !== -1) {\n const currentTrivia = tokens.pop().value;\n line += (currentTrivia.match(/\\n/g) || []).length;\n trivia += currentTrivia;\n index -= 1;\n } else if (/[-0-9.A-Z_a-z]/.test(nextChar)) {\n result = attemptTokenMatch(\"decimal\");\n if (result === -1) {\n result = attemptTokenMatch(\"integer\");\n }\n if (result === -1) {\n result = attemptTokenMatch(\"identifier\");\n const lastIndex = tokens.length - 1;\n const token = tokens[lastIndex];\n if (result !== -1) {\n if (reserved.includes(token.value)) {\n const message = `${unescape(\n token.value\n )} is a reserved identifier and must not be used.`;\n throw new WebIDLParseError(\n syntaxError(tokens, lastIndex, null, message)\n );\n } else if (nonRegexTerminals.includes(token.value)) {\n token.type = \"inline\";\n }\n }\n }\n } else if (nextChar === '\"') {\n result = attemptTokenMatch(\"string\");\n }\n\n for (const punctuation of punctuations) {\n if (str.startsWith(punctuation, lastCharIndex)) {\n tokens.push({\n type: \"inline\",\n value: punctuation,\n trivia,\n line,\n index,\n });\n trivia = \"\";\n lastCharIndex += punctuation.length;\n result = lastCharIndex;\n break;\n }\n }\n\n // other as the last try\n if (result === -1) {\n result = attemptTokenMatch(\"other\");\n }\n if (result === -1) {\n throw new Error(\"Token stream not progressing\");\n }\n lastCharIndex = result;\n index += 1;\n }\n\n // remaining trivia as eof\n tokens.push({\n type: \"eof\",\n value: \"\",\n trivia,\n line,\n index,\n });\n\n return tokens;\n\n /**\n * @param {keyof typeof tokenRe} type\n * @param {object} options\n * @param {boolean} [options.noFlushTrivia]\n */\n function attemptTokenMatch(type, { noFlushTrivia } = {}) {\n const re = tokenRe[type];\n re.lastIndex = lastCharIndex;\n const result = re.exec(str);\n if (result) {\n tokens.push({ type, value: result[0], trivia, line, index });\n if (!noFlushTrivia) {\n trivia = \"\";\n }\n return re.lastIndex;\n }\n return -1;\n }\n}\n\nexport class Tokeniser {\n /**\n * @param {string} idl\n */\n constructor(idl) {\n this.source = tokenise(idl);\n this.position = 0;\n }\n\n /**\n * @param {string} message\n * @return {never}\n */\n error(message) {\n throw new WebIDLParseError(\n syntaxError(this.source, this.position, this.current, message)\n );\n }\n\n /**\n * @param {string} type\n */\n probeKind(type) {\n return (\n this.source.length > this.position &&\n this.source[this.position].type === type\n );\n }\n\n /**\n * @param {string} value\n */\n probe(value) {\n return (\n this.probeKind(\"inline\") && this.source[this.position].value === value\n );\n }\n\n /**\n * @param {...string} candidates\n */\n consumeKind(...candidates) {\n for (const type of candidates) {\n if (!this.probeKind(type)) continue;\n const token = this.source[this.position];\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {...string} candidates\n */\n consume(...candidates) {\n if (!this.probeKind(\"inline\")) return;\n const token = this.source[this.position];\n for (const value of candidates) {\n if (token.value !== value) continue;\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {string} value\n */\n consumeIdentifier(value) {\n if (!this.probeKind(\"identifier\")) {\n return;\n }\n if (this.source[this.position].value !== value) {\n return;\n }\n return this.consumeKind(\"identifier\");\n }\n\n /**\n * @param {number} position\n */\n unconsume(position) {\n this.position = position;\n }\n}\n\nexport class WebIDLParseError extends Error {\n /**\n * @param {object} options\n * @param {string} options.message\n * @param {string} options.bareMessage\n * @param {string} options.context\n * @param {number} options.line\n * @param {*} options.sourceName\n * @param {string} options.input\n * @param {*[]} options.tokens\n */\n constructor({\n message,\n bareMessage,\n context,\n line,\n sourceName,\n input,\n tokens,\n }) {\n super(message);\n\n this.name = \"WebIDLParseError\"; // not to be mangled\n this.bareMessage = bareMessage;\n this.context = context;\n this.line = line;\n this.sourceName = sourceName;\n this.input = input;\n this.tokens = tokens;\n }\n}\n","import { list, unescape, autoParenter } from \"./helpers.js\";\nimport { WrappedToken } from \"./token.js\";\nimport { Base } from \"./base.js\";\n\nclass EnumValue extends WrappedToken {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consumeKind(\"string\");\n if (value) {\n return new EnumValue({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"enum-value\";\n }\n get value() {\n return super.value.slice(1, -1);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.wrap([\n w.ts.trivia(this.tokens.value.trivia),\n w.ts.definition(\n w.ts.wrap(['\"', w.ts.name(this.value, { data: this, parent }), '\"']),\n { data: this, parent }\n ),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\nexport class Enum extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n tokens.base = tokeniser.consume(\"enum\");\n if (!tokens.base) {\n return;\n }\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"No name for enum\");\n const ret = autoParenter(new Enum({ source: tokeniser.source, tokens }));\n tokeniser.current = ret.this;\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(\"Bodyless enum\");\n ret.values = list(tokeniser, {\n parser: EnumValue.parse,\n allowDangler: true,\n listName: \"enumeration\",\n });\n if (tokeniser.probeKind(\"string\")) {\n tokeniser.error(\"No comma between enum values\");\n }\n tokens.close =\n tokeniser.consume(\"}\") || tokeniser.error(\"Unexpected value in enum\");\n if (!ret.values.length) {\n tokeniser.error(\"No value in enum\");\n }\n tokens.termination =\n tokeniser.consume(\";\") || tokeniser.error(\"No semicolon after enum\");\n return ret.this;\n }\n\n get type() {\n return \"enum\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.open),\n w.ts.wrap(this.values.map((v) => v.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","// @ts-check\n\nimport { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class Includes extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const target = tokeniser.consumeKind(\"identifier\");\n if (!target) {\n return;\n }\n const tokens = { target };\n tokens.includes = tokeniser.consume(\"includes\");\n if (!tokens.includes) {\n tokeniser.unconsume(target.index);\n return;\n }\n tokens.mixin =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Incomplete includes statement\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"No terminating ; for includes statement\");\n return new Includes({ source: tokeniser.source, tokens });\n }\n\n get type() {\n return \"includes\";\n }\n get target() {\n return unescape(this.tokens.target.value);\n }\n get includes() {\n return unescape(this.tokens.mixin.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.reference_token(this.tokens.target, this),\n w.token(this.tokens.includes),\n w.reference_token(this.tokens.mixin, this),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class Typedef extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(new Typedef({ source: tokeniser.source, tokens }));\n tokens.base = tokeniser.consume(\"typedef\");\n if (!tokens.base) {\n return;\n }\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"typedef-type\") ||\n tokeniser.error(\"Typedef lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Typedef lacks a name\");\n tokeniser.current = ret.this;\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated typedef, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"typedef\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n return_type,\n argument_list,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class CallbackFunction extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base) {\n const tokens = { base };\n const ret = autoParenter(\n new CallbackFunction({ source: tokeniser.source, tokens })\n );\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Callback lacks a name\");\n tokeniser.current = ret.this;\n tokens.assign =\n tokeniser.consume(\"=\") || tokeniser.error(\"Callback lacks an assignment\");\n ret.idlType =\n return_type(tokeniser) || tokeniser.error(\"Callback lacks a return type\");\n tokens.open =\n tokeniser.consume(\"(\") ||\n tokeniser.error(\"Callback lacks parentheses for arguments\");\n ret.arguments = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated callback\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated callback, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"callback\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.assign),\n w.ts.type(this.idlType.write(w)),\n w.token(this.tokens.open),\n ...this.arguments.map((arg) => arg.write(w)),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape, autoParenter } from \"./helpers.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction inheritance(tokeniser) {\n const colon = tokeniser.consume(\":\");\n if (!colon) {\n return {};\n }\n const inheritance =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Inheritance lacks a type\");\n return { colon, inheritance };\n}\n\nexport class Container extends Base {\n /**\n * @template T\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {T} instance\n * @param {*} args\n */\n static parse(tokeniser, instance, { inheritable, allowedMembers }) {\n const { tokens, type } = instance;\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(`Missing name in ${type}`);\n tokeniser.current = instance;\n instance = autoParenter(instance);\n if (inheritable) {\n Object.assign(tokens, inheritance(tokeniser));\n }\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(`Bodyless ${type}`);\n instance.members = [];\n while (true) {\n tokens.close = tokeniser.consume(\"}\");\n if (tokens.close) {\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(`Missing semicolon after ${type}`);\n return instance.this;\n }\n const ea = ExtendedAttributes.parse(tokeniser);\n let mem;\n for (const [parser, ...args] of allowedMembers) {\n mem = autoParenter(parser(tokeniser, ...args));\n if (mem) {\n break;\n }\n }\n if (!mem) {\n tokeniser.error(\"Unknown member\");\n }\n mem.extAttrs = ea;\n instance.members.push(mem.this);\n }\n }\n\n get partial() {\n return !!this.tokens.partial;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get inheritance() {\n if (!this.tokens.inheritance) {\n return null;\n }\n return unescape(this.tokens.inheritance.value);\n }\n\n *validate(defs) {\n for (const member of this.members) {\n if (member.validate) {\n yield* member.validate(defs);\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const inheritance = () => {\n if (!this.tokens.inheritance) {\n return \"\";\n }\n return w.ts.wrap([\n w.token(this.tokens.colon),\n w.ts.trivia(this.tokens.inheritance.trivia),\n w.ts.inheritance(\n w.reference(this.tokens.inheritance.value, { context: this })\n ),\n ]);\n };\n\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.callback),\n w.token(this.tokens.partial),\n w.token(this.tokens.base),\n w.token(this.tokens.mixin),\n w.name_token(this.tokens.name, { data: this }),\n inheritance(),\n w.token(this.tokens.open),\n w.ts.wrap(this.members.map((m) => m.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { Type } from \"./type.js\";\nimport {\n const_data,\n const_value,\n primitive_type,\n autoParenter,\n unescape,\n} from \"./helpers.js\";\n\nexport class Constant extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n tokens.base = tokeniser.consume(\"const\");\n if (!tokens.base) {\n return;\n }\n let idlType = primitive_type(tokeniser);\n if (!idlType) {\n const base =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Const lacks a type\");\n idlType = new Type({ source: tokeniser.source, tokens: { base } });\n }\n if (tokeniser.probe(\"?\")) {\n tokeniser.error(\"Unexpected nullable constant type\");\n }\n idlType.type = \"const-type\";\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Const lacks a name\");\n tokens.assign =\n tokeniser.consume(\"=\") || tokeniser.error(\"Const lacks value assignment\");\n tokens.value =\n const_value(tokeniser) || tokeniser.error(\"Const lacks a value\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated const, expected `;`\");\n const ret = new Constant({ source: tokeniser.source, tokens });\n autoParenter(ret).idlType = idlType;\n return ret;\n }\n\n get type() {\n return \"const\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get value() {\n return const_data(this.tokens.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.assign),\n w.token(this.tokens.value),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n autoParenter,\n argument_list,\n} from \"./helpers.js\";\n\nexport class IterableLike extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const tokens = {};\n const ret = autoParenter(\n new IterableLike({ source: tokeniser.source, tokens })\n );\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (!tokens.readonly) {\n tokens.async = tokeniser.consume(\"async\");\n }\n tokens.base = tokens.readonly\n ? tokeniser.consume(\"maplike\", \"setlike\")\n : tokens.async\n ? tokeniser.consume(\"iterable\")\n : tokeniser.consume(\"iterable\", \"maplike\", \"setlike\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n\n const { type } = ret;\n const secondTypeRequired = type === \"maplike\";\n const secondTypeAllowed = secondTypeRequired || type === \"iterable\";\n const argumentAllowed = ret.async && type === \"iterable\";\n\n tokens.open =\n tokeniser.consume(\"<\") ||\n tokeniser.error(`Missing less-than sign \\`<\\` in ${type} declaration`);\n const first =\n type_with_extended_attributes(tokeniser) ||\n tokeniser.error(`Missing a type argument in ${type} declaration`);\n ret.idlType = [first];\n ret.arguments = [];\n\n if (secondTypeAllowed) {\n first.tokens.separator = tokeniser.consume(\",\");\n if (first.tokens.separator) {\n ret.idlType.push(type_with_extended_attributes(tokeniser));\n } else if (secondTypeRequired) {\n tokeniser.error(`Missing second type argument in ${type} declaration`);\n }\n }\n\n tokens.close =\n tokeniser.consume(\">\") ||\n tokeniser.error(`Missing greater-than sign \\`>\\` in ${type} declaration`);\n\n if (tokeniser.probe(\"(\")) {\n if (argumentAllowed) {\n tokens.argsOpen = tokeniser.consume(\"(\");\n ret.arguments.push(...argument_list(tokeniser));\n tokens.argsClose =\n tokeniser.consume(\")\") ||\n tokeniser.error(\"Unterminated async iterable argument list\");\n } else {\n tokeniser.error(`Arguments are only allowed for \\`async iterable\\``);\n }\n }\n\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(`Missing semicolon after ${type} declaration`);\n\n return ret.this;\n }\n\n get type() {\n return this.tokens.base.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get async() {\n return !!this.tokens.async;\n }\n\n *validate(defs) {\n for (const type of this.idlType) {\n yield* type.validate(defs);\n }\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.readonly),\n w.token(this.tokens.async),\n w.token(this.tokens.base, w.ts.generic),\n w.token(this.tokens.open),\n w.ts.wrap(this.idlType.map((t) => t.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.argsOpen),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.argsClose),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent: this.parent }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { argument_list, autoParenter } from \"./helpers.js\";\n\nexport class Constructor extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const base = tokeniser.consume(\"constructor\");\n if (!base) {\n return;\n }\n /** @type {Base[\"tokens\"]} */\n const tokens = { base };\n tokens.open =\n tokeniser.consume(\"(\") ||\n tokeniser.error(\"No argument list in constructor\");\n const args = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated constructor\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"No semicolon after constructor\");\n const ret = new Constructor({ source: tokeniser.source, tokens });\n autoParenter(ret).arguments = args;\n return ret;\n }\n\n get type() {\n return \"constructor\";\n }\n\n *validate(defs) {\n if (this.idlType) {\n yield* this.idlType.validate(defs);\n }\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base, w.ts.nameless, { data: this, parent }),\n w.token(this.tokens.open),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\nimport { IterableLike } from \"./iterable.js\";\nimport {\n stringifier,\n autofixAddExposedWindow,\n getMemberIndentation,\n getLastIndentation,\n getFirstToken,\n findLastIndex,\n autoParenter,\n} from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\nimport { checkInterfaceMemberDuplication } from \"../validators/interface.js\";\nimport { Constructor } from \"./constructor.js\";\nimport { Tokeniser } from \"../tokeniser.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction static_member(tokeniser) {\n const special = tokeniser.consume(\"static\");\n if (!special) return;\n const member =\n Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"No body in static member\");\n return member;\n}\n\nexport class Interface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial = null } = {}) {\n const tokens = { partial, base };\n return Container.parse(\n tokeniser,\n new Interface({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Constructor.parse],\n [static_member],\n [stringifier],\n [IterableLike.parse],\n [Attribute.parse],\n [Operation.parse],\n ],\n }\n );\n }\n\n get type() {\n return \"interface\";\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n if (\n !this.partial &&\n this.extAttrs.every((extAttr) => extAttr.name !== \"Exposed\")\n ) {\n const message = `Interfaces must have \\`[Exposed]\\` extended attribute. \\\nTo fix, add, for example, \\`[Exposed=Window]\\`. Please also consider carefully \\\nif your interface should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(\n this.tokens.name,\n this,\n \"require-exposed\",\n message,\n {\n autofix: autofixAddExposedWindow(this),\n }\n );\n }\n const oldConstructors = this.extAttrs.filter(\n (extAttr) => extAttr.name === \"Constructor\"\n );\n for (const constructor of oldConstructors) {\n const message = `Constructors should now be represented as a \\`constructor()\\` operation on the interface \\\ninstead of \\`[Constructor]\\` extended attribute. Refer to the \\\n[WebIDL spec section on constructor operations](https://heycam.github.io/webidl/#idl-constructors) \\\nfor more information.`;\n yield validationError(\n constructor.tokens.name,\n this,\n \"constructor-member\",\n message,\n {\n autofix: autofixConstructor(this, constructor),\n }\n );\n }\n\n const isGlobal = this.extAttrs.some((extAttr) => extAttr.name === \"Global\");\n if (isGlobal) {\n const factoryFunctions = this.extAttrs.filter(\n (extAttr) => extAttr.name === \"LegacyFactoryFunction\"\n );\n for (const named of factoryFunctions) {\n const message = `Interfaces marked as \\`[Global]\\` cannot have factory functions.`;\n yield validationError(\n named.tokens.name,\n this,\n \"no-constructible-global\",\n message\n );\n }\n\n const constructors = this.members.filter(\n (member) => member.type === \"constructor\"\n );\n for (const named of constructors) {\n const message = `Interfaces marked as \\`[Global]\\` cannot have constructors.`;\n yield validationError(\n named.tokens.base,\n this,\n \"no-constructible-global\",\n message\n );\n }\n }\n\n yield* super.validate(defs);\n if (!this.partial) {\n yield* checkInterfaceMemberDuplication(defs, this);\n }\n }\n}\n\nfunction autofixConstructor(interfaceDef, constructorExtAttr) {\n interfaceDef = autoParenter(interfaceDef);\n return () => {\n const indentation = getLastIndentation(\n interfaceDef.extAttrs.tokens.open.trivia\n );\n const memberIndent = interfaceDef.members.length\n ? getLastIndentation(getFirstToken(interfaceDef.members[0]).trivia)\n : getMemberIndentation(indentation);\n const constructorOp = Constructor.parse(\n new Tokeniser(`\\n${memberIndent}constructor();`)\n );\n constructorOp.extAttrs = new ExtendedAttributes({});\n autoParenter(constructorOp).arguments = constructorExtAttr.arguments;\n\n const existingIndex = findLastIndex(\n interfaceDef.members,\n (m) => m.type === \"constructor\"\n );\n interfaceDef.members.splice(existingIndex + 1, 0, constructorOp);\n\n const { close } = interfaceDef.tokens;\n if (!close.trivia.includes(\"\\n\")) {\n close.trivia += `\\n${indentation}`;\n }\n\n const { extAttrs } = interfaceDef;\n const index = extAttrs.indexOf(constructorExtAttr);\n const removed = extAttrs.splice(index, 1);\n if (!extAttrs.length) {\n extAttrs.tokens.open = extAttrs.tokens.close = undefined;\n } else if (extAttrs.length === index) {\n extAttrs[index - 1].tokens.separator = undefined;\n } else if (!extAttrs[index].tokens.name.trivia.trim()) {\n extAttrs[index].tokens.name.trivia = removed[0].tokens.name.trivia;\n }\n };\n}\n","// @ts-check\n\nimport { validationError } from \"../error.js\";\n\nexport function* checkInterfaceMemberDuplication(defs, i) {\n const opNames = new Set(getOperations(i).map((op) => op.name));\n const partials = defs.partials.get(i.name) || [];\n const mixins = defs.mixinMap.get(i.name) || [];\n for (const ext of [...partials, ...mixins]) {\n const additions = getOperations(ext);\n yield* forEachExtension(additions, opNames, ext, i);\n for (const addition of additions) {\n opNames.add(addition.name);\n }\n }\n\n function* forEachExtension(additions, existings, ext, base) {\n for (const addition of additions) {\n const { name } = addition;\n if (name && existings.has(name)) {\n const message = `The operation \"${name}\" has already been defined for the base interface \"${base.name}\" either in itself or in a mixin`;\n yield validationError(\n addition.tokens.name,\n ext,\n \"no-cross-overload\",\n message\n );\n }\n }\n }\n\n function getOperations(i) {\n return i.members.filter(({ type }) => type === \"operation\");\n }\n}\n","import { Container } from \"./container.js\";\nimport { Constant } from \"./constant.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { stringifier } from \"./helpers.js\";\n\nexport class Mixin extends Container {\n /**\n * @typedef {import(\"../tokeniser.js\").Token} Token\n *\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {Token} base\n * @param {object} [options]\n * @param {Token} [options.partial]\n */\n static parse(tokeniser, base, { partial } = {}) {\n const tokens = { partial, base };\n tokens.mixin = tokeniser.consume(\"mixin\");\n if (!tokens.mixin) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Mixin({ source: tokeniser.source, tokens }),\n {\n allowedMembers: [\n [Constant.parse],\n [stringifier],\n [Attribute.parse, { noInherit: true }],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"interface mixin\";\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n autoParenter,\n} from \"./helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Default } from \"./default.js\";\n\nexport class Field extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(new Field({ source: tokeniser.source, tokens }));\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.required = tokeniser.consume(\"required\");\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"dictionary-type\") ||\n tokeniser.error(\"Dictionary member lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Dictionary member lacks a name\");\n ret.default = Default.parse(tokeniser);\n if (tokens.required && ret.default)\n tokeniser.error(\"Required member must not have a default\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated dictionary member, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"field\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get required() {\n return !!this.tokens.required;\n }\n\n *validate(defs) {\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.required),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n this.default ? this.default.write(w) : \"\",\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","// @ts-check\n\nimport { Container } from \"./container.js\";\nimport { Field } from \"./field.js\";\n\nexport class Dictionary extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.partial]\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"dictionary\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Dictionary({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [[Field.parse]],\n }\n );\n }\n\n get type() {\n return \"dictionary\";\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { validationError } from \"../error.js\";\nimport { autofixAddExposedWindow } from \"./helpers.js\";\nimport { Constant } from \"./constant.js\";\n\nexport class Namespace extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.partial]\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"namespace\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Namespace({ source: tokeniser.source, tokens }),\n {\n allowedMembers: [\n [Attribute.parse, { noInherit: true, readonly: true }],\n [Constant.parse],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"namespace\";\n }\n\n *validate(defs) {\n if (\n !this.partial &&\n this.extAttrs.every((extAttr) => extAttr.name !== \"Exposed\")\n ) {\n const message = `Namespaces must have [Exposed] extended attribute. \\\nTo fix, add, for example, [Exposed=Window]. Please also consider carefully \\\nif your namespace should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(\n this.tokens.name,\n this,\n \"require-exposed\",\n message,\n {\n autofix: autofixAddExposedWindow(this),\n }\n );\n }\n yield* super.validate(defs);\n }\n}\n","// @ts-check\n\nimport { Container } from \"./container.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\n\nexport class CallbackInterface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, callback, { partial = null } = {}) {\n const tokens = { callback };\n tokens.base = tokeniser.consume(\"interface\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new CallbackInterface({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"callback interface\";\n }\n}\n","import { Tokeniser } from \"./tokeniser.js\";\nimport { Enum } from \"./productions/enum.js\";\nimport { Includes } from \"./productions/includes.js\";\nimport { ExtendedAttributes } from \"./productions/extended-attributes.js\";\nimport { Typedef } from \"./productions/typedef.js\";\nimport { CallbackFunction } from \"./productions/callback.js\";\nimport { Interface } from \"./productions/interface.js\";\nimport { Mixin } from \"./productions/mixin.js\";\nimport { Dictionary } from \"./productions/dictionary.js\";\nimport { Namespace } from \"./productions/namespace.js\";\nimport { CallbackInterface } from \"./productions/callback-interface.js\";\nimport { autoParenter } from \"./productions/helpers.js\";\nimport { Eof } from \"./productions/token.js\";\n\n/**\n * @param {Tokeniser} tokeniser\n * @param {object} options\n * @param {boolean} [options.concrete]\n */\nfunction parseByTokens(tokeniser, options) {\n const source = tokeniser.source;\n\n function error(str) {\n tokeniser.error(str);\n }\n\n function consume(...candidates) {\n return tokeniser.consume(...candidates);\n }\n\n function callback() {\n const callback = consume(\"callback\");\n if (!callback) return;\n if (tokeniser.probe(\"interface\")) {\n return CallbackInterface.parse(tokeniser, callback);\n }\n return CallbackFunction.parse(tokeniser, callback);\n }\n\n function interface_(opts) {\n const base = consume(\"interface\");\n if (!base) return;\n const ret =\n Mixin.parse(tokeniser, base, opts) ||\n Interface.parse(tokeniser, base, opts) ||\n error(\"Interface has no proper body\");\n return ret;\n }\n\n function partial() {\n const partial = consume(\"partial\");\n if (!partial) return;\n return (\n Dictionary.parse(tokeniser, { partial }) ||\n interface_({ partial }) ||\n Namespace.parse(tokeniser, { partial }) ||\n error(\"Partial doesn't apply to anything\")\n );\n }\n\n function definition() {\n if (options.productions) {\n for (const production of options.productions) {\n const result = production(tokeniser);\n if (result) {\n return result;\n }\n }\n }\n\n return (\n callback() ||\n interface_() ||\n partial() ||\n Dictionary.parse(tokeniser) ||\n Enum.parse(tokeniser) ||\n Typedef.parse(tokeniser) ||\n Includes.parse(tokeniser) ||\n Namespace.parse(tokeniser)\n );\n }\n\n function definitions() {\n if (!source.length) return [];\n const defs = [];\n while (true) {\n const ea = ExtendedAttributes.parse(tokeniser);\n const def = definition();\n if (!def) {\n if (ea.length) error(\"Stray extended attributes\");\n break;\n }\n autoParenter(def).extAttrs = ea;\n defs.push(def);\n }\n const eof = Eof.parse(tokeniser);\n if (options.concrete) {\n defs.push(eof);\n }\n return defs;\n }\n const res = definitions();\n if (tokeniser.position < source.length) error(\"Unrecognised tokens\");\n return res;\n}\n\n/**\n * @param {string} str\n * @param {object} [options]\n * @param {*} [options.sourceName]\n * @param {boolean} [options.concrete]\n * @param {Function[]} [options.productions]\n * @return {import(\"./productions/base\").Base[]}\n */\nexport function parse(str, options = {}) {\n const tokeniser = new Tokeniser(str);\n if (typeof options.sourceName !== \"undefined\") {\n tokeniser.source.name = options.sourceName;\n }\n return parseByTokens(tokeniser, options);\n}\n","function noop(arg) {\n return arg;\n}\n\nconst templates = {\n wrap: (items) => items.join(\"\"),\n trivia: noop,\n name: noop,\n reference: noop,\n type: noop,\n generic: noop,\n nameless: noop,\n inheritance: noop,\n definition: noop,\n extendedAttribute: noop,\n extendedAttributeReference: noop,\n};\n\nexport class Writer {\n constructor(ts) {\n this.ts = Object.assign({}, templates, ts);\n }\n\n reference(raw, { unescaped, context }) {\n if (!unescaped) {\n unescaped = raw.startsWith(\"_\") ? raw.slice(1) : raw;\n }\n return this.ts.reference(raw, unescaped, context);\n }\n\n token(t, wrapper = noop, ...args) {\n if (!t) {\n return \"\";\n }\n const value = wrapper(t.value, ...args);\n return this.ts.wrap([this.ts.trivia(t.trivia), value]);\n }\n\n reference_token(t, context) {\n return this.token(t, this.reference.bind(this), { context });\n }\n\n name_token(t, arg) {\n return this.token(t, this.ts.name, arg);\n }\n\n identifier(id, context) {\n return this.ts.wrap([\n this.reference_token(id.tokens.value, context),\n this.token(id.tokens.separator),\n ]);\n }\n}\n\nexport function write(ast, { templates: ts = templates } = {}) {\n ts = Object.assign({}, templates, ts);\n\n const w = new Writer(ts);\n\n return ts.wrap(ast.map((it) => it.write(w)));\n}\n","// @ts-check\n\nimport { validationError as error } from \"./error.js\";\n\nfunction getMixinMap(all, unique) {\n const map = new Map();\n const includes = all.filter((def) => def.type === \"includes\");\n for (const include of includes) {\n const mixin = unique.get(include.includes);\n if (!mixin) {\n continue;\n }\n const array = map.get(include.target);\n if (array) {\n array.push(mixin);\n } else {\n map.set(include.target, [mixin]);\n }\n }\n return map;\n}\n\n/**\n * @typedef {ReturnType<typeof groupDefinitions>} Definitions\n */\nfunction groupDefinitions(all) {\n const unique = new Map();\n const duplicates = new Set();\n const partials = new Map();\n for (const def of all) {\n if (def.partial) {\n const array = partials.get(def.name);\n if (array) {\n array.push(def);\n } else {\n partials.set(def.name, [def]);\n }\n continue;\n }\n if (!def.name) {\n continue;\n }\n if (!unique.has(def.name)) {\n unique.set(def.name, def);\n } else {\n duplicates.add(def);\n }\n }\n return {\n all,\n unique,\n partials,\n duplicates,\n mixinMap: getMixinMap(all, unique),\n cache: {\n typedefIncludesDictionary: new WeakMap(),\n dictionaryIncludesRequiredField: new WeakMap(),\n },\n };\n}\n\nfunction* checkDuplicatedNames({ unique, duplicates }) {\n for (const dup of duplicates) {\n const { name } = dup;\n const message = `The name \"${name}\" of type \"${\n unique.get(name).type\n }\" was already seen`;\n yield error(dup.tokens.name, dup, \"no-duplicate\", message);\n }\n}\n\nfunction* validateIterable(ast) {\n const defs = groupDefinitions(ast);\n for (const def of defs.all) {\n if (def.validate) {\n yield* def.validate(defs);\n }\n }\n yield* checkDuplicatedNames(defs);\n}\n\n// Remove this once all of our support targets expose `.flat()` by default\nfunction flatten(array) {\n if (array.flat) {\n return array.flat();\n }\n return [].concat(...array);\n}\n\n/**\n * @param {import(\"./productions/base\").Base[]} ast\n * @return {import(\"./error\").WebIDLErrorData[]} validation errors\n */\nexport function validate(ast) {\n return [...validateIterable(flatten(ast))];\n}\n"],"names":["root","factory","exports","module","define","amd","globalThis","__webpack_require__","definition","key","o","Object","defineProperty","enumerable","get","obj","prop","prototype","hasOwnProperty","call","Symbol","toStringTag","value","error","source","position","current","message","kind","level","autofix","ruleName","sliceTokens","count","slice","Math","max","tokensToText","inputs","precedes","text","map","t","trivia","join","nextToken","type","length","line","precedingLastLine","splitted","split","lastLine","subsequentTokens","subsequentText","sourceContext","repeat","contextType","context","name","partial","node","hierarchy","parent","unshift","n","base","target","result","appendIfExist","contextAsText","bareMessage","sourceName","input","tokens","syntaxError","validationError","token","options","index","Base","constructor","defineProperties","this","writable","toJSON","json","undefined","inheritance","proto","descMap","getOwnPropertyDescriptors","entries","getPrototypeOf","idlTypeIncludesDictionary","idlType","defs","useNullableInner","union","def","unique","typedefIncludesDictionary","cache","has","set","reference","dictionary","nullable","subtype","dictionaryIncludesRequiredField","dict","members","some","field","required","superdict","ArrayBase","Array","super","WrappedToken","tokeniser","consumeKind","write","w","ts","wrap","separator","Eof","tokenName","list","parser","listName","extAttrValueSyntax","renamedLegacies","Map","extAttrListItems","syntax","toks","ExtendedAttributeParameters","assign","consume","ret","autoParenter","asterisk","secondaryName","open","rhsIsList","argument_list","close","hasRhs","rhsType","reference_token","p","identifier","SimpleExtendedAttribute","params","parse","extAttr","arg","arguments","validate","extendedAttribute","extendedAttributeReference","ExtendedAttributes","push","probe","ea","type_suffix","single_type","typeName","Type","return_type","type_with_extended_attributes","keyType","stringTypes","keyIdlType","valueType","generic_type","primitive_type","typeNameKeywords","generic","typ","or","union_type","extAttrs","Boolean","prefix","postfix","filter","typedef","targetToken","firstToken","ref","unescaped","type_body","Default","const_value","expression","const_data","negative","Argument","start_position","optional","variadic","argumentNameKeywords","default","unconsume","autofixOptionalDictionaryDefaultValue","indexOf","a","isLastRequiredArgument","getFirstToken","name_token","data","Tokeniser","Operation","special","regular","termination","includes","argument","body","nameless","Attribute","noInherit","readonly","startsWith","allowDangler","first","items","item","num_type","integer_type","decimal_type","voidToken","stringifier","getLastIndentation","str","lines","match","autofixAddExposedWindow","exposed","existing","test","values","sort","x","y","Proxy","isArray","tokenRe","decimal","integer","string","whitespace","comment","other","nonRegexTerminals","concat","punctuations","reserved","idl","lastCharIndex","nextChar","charAt","attemptTokenMatch","noFlushTrivia","currentTrivia","pop","lastIndex","WebIDLParseError","punctuation","Error","re","exec","tokenise","probeKind","candidates","consumeIdentifier","EnumValue","Enum","v","Includes","mixin","Typedef","CallbackFunction","Container","instance","inheritable","allowedMembers","colon","mem","args","member","callback","m","Constant","IterableLike","async","secondTypeRequired","secondTypeAllowed","argumentAllowed","argsOpen","argsClose","Constructor","static_member","Interface","every","oldConstructors","autofixConstructor","factoryFunctions","named","constructors","i","opNames","Set","getOperations","op","partials","mixins","mixinMap","ext","additions","forEachExtension","addition","add","existings","checkInterfaceMemberDuplication","interfaceDef","constructorExtAttr","indentation","memberIndent","parentTrivia","indentCh","getMemberIndentation","constructorOp","existingIndex","array","predicate","reverse","findIndex","findLastIndex","splice","removed","trim","Mixin","Field","Dictionary","Namespace","CallbackInterface","parseByTokens","interface_","opts","productions","production","res","eof","concrete","definitions","noop","templates","Writer","raw","wrapper","bind","id","ast","it","getMixinMap","all","include","validateIterable","duplicates","WeakMap","groupDefinitions","dup","checkDuplicatedNames","flat"],"sourceRoot":""} \ No newline at end of file
+{"version":3,"file":"webidl2.js","mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAiB,QAAID,IAErBD,EAAc,QAAIC,IARpB,CASGK,YAAY,I,mBCRf,IAAIC,EAAsB,CCA1B,EAAwB,CAACL,EAASM,KACjC,IAAI,IAAIC,KAAOD,EACXD,EAAoBG,EAAEF,EAAYC,KAASF,EAAoBG,EAAER,EAASO,IAC5EE,OAAOC,eAAeV,EAASO,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,MCJ3E,EAAwB,CAACM,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,GCClF,EAAyBd,IACH,oBAAXkB,QAA0BA,OAAOC,aAC1CV,OAAOC,eAAeV,EAASkB,OAAOC,YAAa,CAAEC,MAAO,WAE7DX,OAAOC,eAAeV,EAAS,aAAc,CAAEoB,OAAO,M,KCoCvD,SAASC,EACPC,EACAC,EACAC,EACAC,EACAC,GACA,MAAEC,EAAQ,QAAO,QAAEC,EAAO,SAAEC,GAAa,IAKzC,SAASC,EAAYC,GACnB,OAAOA,EAAQ,EACXT,EAAOU,MAAMT,EAAUA,EAAWQ,GAClCT,EAAOU,MAAMC,KAAKC,IAAIX,EAAWQ,EAAO,GAAIR,GASlD,SAASY,EAAaC,GAAQ,SAAEC,GAAa,IAC3C,MAAMC,EAAOF,EAAOG,KAAKC,GAAMA,EAAEC,OAASD,EAAEpB,QAAOsB,KAAK,IAClDC,EAAYrB,EAAOC,GACzB,MAAuB,QAAnBoB,EAAUC,KACLN,EAELD,EACKC,EAAOK,EAAUF,OAEnBH,EAAKN,MAAMW,EAAUF,OAAOI,QAGrC,MACMC,EACsB,QAA1BxB,EAAOC,GAAUqB,KACbtB,EAAOC,GAAUuB,KACjBxB,EAAOuB,OAAS,EAChBvB,EAAOC,EAAW,GAAGuB,KACrB,EAEAC,EAjFR,SAAkBT,GAChB,MAAMU,EAAWV,EAAKW,MAAM,MAC5B,OAAOD,EAASA,EAASH,OAAS,GA+ERK,CACxBf,EAAaL,GATG,GASsB,CAAEO,UAAU,KAG9Cc,EAAmBrB,EAZP,GAaZsB,EAAiBjB,EAAagB,GAI9BE,EAAgBN,EAHMK,EAAeH,MAAM,MAAM,GAGS,MADjD,IAAIK,OAAOP,EAAkBF,QAAU,KAGhDU,EAAuB,WAAT7B,EAAoB,QAAU,SAQ5C8B,EAAU,GAAG9B,mBAAsBoB,IAPpBxB,EAAOmC,KAAO,OAAOnC,EAAOmC,OAAS,KAExDjC,GAAWA,EAAQiC,KACf,KAAKF,OAAiB/B,EAAQkC,QAAU,WAAa,KAnF7D,SAAuBC,GACrB,MAAMC,EAAY,CAACD,GACnB,KAAOA,GAAQA,EAAKE,QAAQ,CAC1B,MAAM,OAAEA,GAAWF,EACnBC,EAAUE,QAAQD,GAClBF,EAAOE,EAET,OAAOD,EAAUrB,KAAKwB,GAfxB,SAAuBC,EAAMC,GAC3B,IAAIC,EAASF,EAIb,OAHIC,IACFC,GAAU,IAAID,KAETC,EAUqBC,CAAcJ,EAAEnB,KAAMmB,EAAEN,QAAOf,KAAK,QA4EA0B,CACxD5C,OAEF,QACiF6B,IACvF,MAAO,CACL5B,QAAS,GAAG+B,KAAW/B,IACvB4C,YAAa5C,EACb+B,QAAAA,EACAV,KAAAA,EACAwB,WAAYhD,EAAOmC,KACnB9B,MAAAA,EACAE,SAAAA,EACAD,QAAAA,EACA2C,MAAOnB,EACPoB,OAAQrB,GAOL,SAASsB,EAAYnD,EAAQC,EAAUC,EAASC,GACrD,OAAOJ,EAAMC,EAAQC,EAAUC,EAASC,EAAS,UAO5C,SAASiD,EACdC,EACAnD,EACAK,EACAJ,EACAmD,EAAU,IAGV,OADAA,EAAQ/C,SAAWA,EACZR,EACLG,EAAQF,OACRqD,EAAME,MACNrD,EACAC,EACA,aACAmD,G,6FC/IG,MAAME,KAMXC,aAAY,OAAEzD,EAAM,OAAEkD,IACpB/D,OAAOuE,iBAAiBC,KAAM,CAC5B3D,OAAQ,CAAEF,MAAOE,GACjBkD,OAAQ,CAAEpD,MAAOoD,EAAQU,UAAU,GACnCrB,OAAQ,CAAEzC,MAAO,KAAM8D,UAAU,GACjCD,KAAM,CAAE7D,MAAO6D,QAInBE,SACE,MAAMC,EAAO,CAAExC,UAAMyC,EAAW5B,UAAM4B,EAAWC,iBAAaD,GAC9D,IAAIE,EAAQN,KACZ,KAAOM,IAAU9E,OAAOM,WAAW,CACjC,MAAMyE,EAAU/E,OAAOgF,0BAA0BF,GACjD,IAAK,MAAOhF,EAAKa,KAAUX,OAAOiF,QAAQF,IACpCpE,EAAMT,YAAcS,EAAMR,OAE5BwE,EAAK7E,GAAO0E,KAAK1E,IAGrBgF,EAAQ9E,OAAOkF,eAAeJ,GAEhC,OAAOH,GCnBJ,SAASQ,EACdC,EACAC,GACA,iBAAEC,GAAqB,IAEvB,IAAKF,EAAQG,MAAO,CAClB,MAAMC,EAAMH,EAAKI,OAAOtF,IAAIiF,EAAQA,SACpC,IAAKI,EACH,OAEF,GAAiB,YAAbA,EAAIrD,KAAoB,CAC1B,MAAM,0BAAEuD,GAA8BL,EAAKM,MAC3C,GAAID,EAA0BE,IAAIJ,GAGhC,OAAOE,EAA0BvF,IAAIqF,GAEvCH,EAAKM,MAAMD,0BAA0BG,IAAIL,OAAKZ,GAC9C,MAAMnB,EAAS0B,EAA0BK,EAAIJ,QAASC,GAEtD,GADAA,EAAKM,MAAMD,0BAA0BG,IAAIL,EAAK/B,GAC1CA,EACF,MAAO,CACLqC,UAAWV,EACXW,WAAYtC,EAAOsC,YAIzB,GAAiB,eAAbP,EAAIrD,OAA0BmD,IAAqBF,EAAQY,UAC7D,MAAO,CACLF,UAAWV,EACXW,WAAYP,GAIlB,IAAK,MAAMS,KAAWb,EAAQa,QAAS,CACrC,MAAMxC,EAAS0B,EAA0Bc,EAASZ,GAClD,GAAI5B,EACF,OAAIwC,EAAQV,MACH9B,EAEF,CACLqC,UAAWG,EACXF,WAAYtC,EAAOsC,aAWpB,SAASG,EAAgCC,EAAMd,GACpD,GAAIA,EAAKM,MAAMO,gCAAgCN,IAAIO,GACjD,OAAOd,EAAKM,MAAMO,gCAAgC/F,IAAIgG,GAIxDd,EAAKM,MAAMO,gCAAgCL,IAAIM,OAAMvB,GACrD,IAAInB,EAAS0C,EAAKC,QAAQC,MAAMC,GAAUA,EAAMC,WAChD,IAAK9C,GAAU0C,EAAKtB,YAAa,CAC/B,MAAM2B,EAAYnB,EAAKI,OAAOtF,IAAIgG,EAAKtB,aAClC2B,EAGMN,EAAgCM,EAAWnB,KACpD5B,GAAS,GAFTA,GAAS,EAMb,OADA4B,EAAKM,MAAMO,gCAAgCL,IAAIM,EAAM1C,GAC9CA,EChFF,MAAMgD,kBAAkBC,MAC7BpC,aAAY,OAAEzD,EAAM,OAAEkD,IACpB4C,QACA3G,OAAOuE,iBAAiBC,KAAM,CAC5B3D,OAAQ,CAAEF,MAAOE,GACjBkD,OAAQ,CAAEpD,MAAOoD,GACjBX,OAAQ,CAAEzC,MAAO,KAAM8D,UAAU,MCHhC,MAAMmC,qBAAqBvC,KAKhC,cAAcwC,EAAW1E,GACvB,MAAO,KACL,MAAMxB,EAAQkG,EAAUC,YAAY3E,GACpC,GAAIxB,EACF,OAAO,IAAIiG,aAAa,CACtB/F,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEpD,MAAAA,MAMlB,YACE,OAAO,EAAS6D,KAAKT,OAAOpD,MAAMA,OAIpCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOpD,OACpBqG,EAAE9C,MAAMM,KAAKT,OAAOoD,cAKnB,MAAMC,YAAYR,aAIvB,aAAaC,GACX,MAAMlG,EAAQkG,EAAUC,YAAY,OACpC,GAAInG,EACF,OAAO,IAAIyG,IAAI,CAAEvG,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAEpD,MAAAA,KAIzD,WACE,MAAO,OCnCX,SAASoD,EAAO8C,EAAWQ,GACzB,OAAOC,EAAKT,EAAW,CACrBU,OAAQX,aAAaW,OAAOV,EAAWQ,GACvCG,SAAUH,EAAY,UAI1B,MAAMI,EAAqB,CAAC,aAAc,UAAW,UAAW,UAU1DC,EAAkB,IAAIC,IAAI,IARD,CAC7B,oBACA,gBACA,cACA,uBACA,eAKyB7F,KAAKkB,GAAS,CAACA,EAAM,SAASA,OAEvD,CAAC,mBAAoB,yBACrB,CAAC,mBAAoB,0BACrB,CAAC,cAAe,6BAOlB,SAAS4E,EAAiBf,GACxB,IAAK,MAAMgB,KAAUJ,EAAoB,CACvC,MAAMK,EAAO/D,EAAO8C,EAAWgB,GAC/B,GAAIC,EAAK1F,OACP,OAAO0F,EAGXjB,EAAUjG,MACR,uEAIG,MAAMmH,oCAAoC1D,KAI/C,aAAawC,GACX,MAAM9C,EAAS,CAAEiE,OAAQnB,EAAUoB,QAAQ,MACrCC,EAAMC,EACV,IAAIJ,4BAA4B,CAAElH,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAG9D,GADAmE,EAAIZ,KAAO,GACPvD,EAAOiE,OAAQ,CAEjB,GADAjE,EAAOqE,SAAWvB,EAAUoB,QAAQ,KAChClE,EAAOqE,SACT,OAAOF,EAAI1D,KAEbT,EAAOsE,cAAgBxB,EAAUC,eAAeW,GAelD,OAbA1D,EAAOuE,KAAOzB,EAAUoB,QAAQ,KAC5BlE,EAAOuE,MACTJ,EAAIZ,KAAOY,EAAIK,UAEXX,EAAiBf,GAEjB2B,EAAc3B,GAClB9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,yDACTmD,EAAOiE,SAAWjE,EAAOsE,eAClCxB,EAAUjG,MAAM,uDAEXsH,EAAI1D,KAGb,gBACE,OACEA,KAAKT,OAAOiE,SAAWxD,KAAKT,OAAOqE,WAAa5D,KAAKT,OAAOsE,cAIhE,cACE,OAAI7D,KAAK+D,UACA/D,KAAK8C,KAAK,GAAGvD,OAAOpD,MAAMwB,KAAO,QAEtCqC,KAAKT,OAAOqE,SACP,IAEL5D,KAAKT,OAAOsE,cACP7D,KAAKT,OAAOsE,cAAclG,KAE5B,KAIT4E,MAAMC,GACJ,MAAM,QAAE0B,GAAYlE,KACpB,OAAOwC,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAE9C,MAAMM,KAAKT,OAAOqE,UACpBpB,EAAE2B,gBAAgBnE,KAAKT,OAAOsE,cAAe7D,KAAKpB,QAClD4D,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK8C,KAAKxF,KAAK8G,GACG,oBAAZF,EACH1B,EAAE6B,WAAWD,EAAGpE,KAAKpB,QACrBwF,EAAE7B,MAAMC,KAEdA,EAAE9C,MAAMM,KAAKT,OAAO0E,UAKnB,MAAMK,gCAAgCzE,KAI3C,aAAawC,GACX,MAAM7D,EAAO6D,EAAUC,YAAY,cACnC,GAAI9D,EACF,OAAO,IAAI8F,wBAAwB,CACjCjI,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEf,KAAAA,GACV+F,OAAQhB,4BAA4BiB,MAAMnC,KAKhDvC,aAAY,OAAEzD,EAAM,OAAEkD,EAAM,OAAEgF,IAC5BpC,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChBgF,EAAO3F,OAASoB,KAChBxE,OAAOC,eAAeuE,KAAM,SAAU,CAAE7D,MAAOoI,IAGjD,WACE,MAAO,qBAET,WACE,OAAOvE,KAAKT,OAAOf,KAAKrC,MAE1B,UACE,MAAQ+H,QAASvG,EAAI,OAAE4B,EAAM,KAAEuD,GAAS9C,KAAKuE,OAC7C,IAAK5G,EACH,OAAO,KAOT,MAAO,CAAEA,KAAAA,EAAMxB,MALD6D,KAAKuE,OAAOR,UACtBjB,EACA9C,KAAKuE,OAAOhF,OAAOsE,cACnB,EAAStE,EAAOsE,cAAc1H,OAC9B,MAGN,gBACE,MAAM,UAAE4H,EAAS,KAAEjB,GAAS9C,KAAKuE,OACjC,OAAKzB,GAAQiB,EACJ,GAEFjB,EAGT,UAAUjC,GACR,MAAM,KAAErC,GAASwB,KACjB,GAAa,4BAATxB,EAAoC,CACtC,MAAMhC,EAAU,sOAIViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,uBACAxD,EACA,CAAEE,MAAO,iBAEN,GAAIwG,EAAgB9B,IAAI5C,GAAO,CACpC,MAAMhC,EAAU,MAAMgC,yEACA0E,EAAgBvH,IAAI6C,oHAGpCiB,EAAgBO,KAAKT,OAAOf,KAAMwB,KAAM,iBAAkBxD,EAAS,CACvEE,MAAO,UACPC,SA0B+B8H,EA1BQzE,KA2BtC,KACL,MAAM,KAAExB,GAASiG,EACjBA,EAAQlF,OAAOf,KAAKrC,MAAQ+G,EAAgBvH,IAAI6C,GACnC,gBAATA,IACFiG,EAAQF,OAAOhF,OAAS,QAL9B,IAAuCkF,EAvBnC,IAAK,MAAMC,KAAO1E,KAAK2E,gBACdD,EAAIE,SAAS/D,GAKxB0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOf,KAAKhB,QAC7BgF,EAAEC,GAAGoC,kBACHrC,EAAEC,GAAGC,KAAK,CACRF,EAAEC,GAAGqC,2BAA2B9E,KAAKxB,MACrCwB,KAAKuE,OAAOhC,MAAMC,MAGtBA,EAAE9C,MAAMM,KAAKT,OAAOoD,cAoBnB,MAAMoC,2BAA2B9C,UAItC,aAAaI,GACX,MAAM9C,EAAS,GACfA,EAAOuE,KAAOzB,EAAUoB,QAAQ,KAChC,MAAMC,EAAM,IAAIqB,mBAAmB,CAAE1I,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC/D,OAAKA,EAAOuE,MACZJ,EAAIsB,QACClC,EAAKT,EAAW,CACjBU,OAAQuB,wBAAwBE,MAChCxB,SAAU,wBAGdzD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MACR,4DAECsH,EAAI9F,SACPyE,EAAU4C,UAAU1F,EAAO0E,MAAMrE,OACjCyC,EAAUjG,MAAM,iDAEdiG,EAAU6C,MAAM,MAClB7C,EAAUjG,MACR,kEAGGsH,GArBkBA,EAwB3B,UAAU7C,GACR,IAAK,MAAM4D,KAAWzE,WACbyE,EAAQG,SAAS/D,GAK5B0B,MAAMC,GACJ,OAAKxC,KAAKpC,OACH4E,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK1C,KAAK6H,GAAOA,EAAG5C,MAAMC,KAC7BA,EAAE9C,MAAMM,KAAKT,OAAO0E,SAJG,ICxL7B,SAASmB,EAAY/C,EAAWzG,GAC9B,MAAM4F,EAAWa,EAAUoB,QAAQ,KAC/BjC,IACF5F,EAAI2D,OAAOiC,SAAWA,GAEpBa,EAAU6C,MAAM,MAAM7C,EAAUjG,MAAM,iCAO5C,SAASiJ,EAAYhD,EAAWiD,GAC9B,IAAI5B,EAhFN,SAAsBrB,EAAWiD,GAC/B,MAAMvG,EAAOsD,EAAUoB,QACrB,cACA,kBACA,UACA,WACA,UAEF,IAAK1E,EACH,OAEF,MAAM2E,EAAMC,EACV,IAAI4B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,MAKjD,OAHA2E,EAAInE,OAAOuE,KACTzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,4BAA4B2C,EAAK5C,SAC3C4C,EAAK5C,OACX,IAAK,UAAW,CACVkG,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,+CAClB,MAAMqF,EACJ+D,EAAYnD,EAAWiD,IACvBjD,EAAUjG,MAAM,2BAClBsH,EAAIjC,QAAQuD,KAAKvD,GACjB,MAEF,IAAK,WACL,IAAK,cACL,IAAK,kBAAmB,CACtB,MAAMA,EACJgE,EAA8BpD,EAAWiD,IACzCjD,EAAUjG,MAAM,WAAW2C,EAAK5C,iBAClCuH,EAAIjC,QAAQuD,KAAKvD,GACjB,MAEF,IAAK,SAAU,CACTY,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,6CAClB,MAAMsJ,EACJrD,EAAUoB,WAAWkC,IACrBtD,EAAUjG,MAAM,8BAA8BuJ,EAAYlI,KAAK,SAC3DmI,EAAa,IAAIL,KAAK,CAC1BlJ,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAER,KAAM2G,KAElBE,EAAWrG,OAAOoD,UAChBN,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,uCAClBwJ,EAAWjI,KAAO2H,EAClB,MAAMO,EACJJ,EAA8BpD,EAAWiD,IACzCjD,EAAUjG,MAAM,qCAClBsH,EAAIjC,QAAQuD,KAAKY,EAAYC,GAC7B,OAOJ,OAJKnC,EAAI9C,SAASyB,EAAUjG,MAAM,8BAA8B2C,EAAK5C,SACrEuH,EAAInE,OAAO0E,MACT5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,iCAAiC2C,EAAK5C,SACjDuH,EAAI1D,KAmBD8F,CAAazD,EAAWiD,IAAaS,EAAe1D,GAC9D,IAAKqB,EAAK,CACR,MAAM3E,EACJsD,EAAUC,YAAY,eACtBD,EAAUoB,WAAWkC,KAAgBK,GACvC,IAAKjH,EACH,OAEF2E,EAAM,IAAI6B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,KACjDsD,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,4BAA4B2C,EAAK5C,SASrD,MAPoB,YAAhBuH,EAAIuC,SAAyB5D,EAAU6C,MAAM,MAC/C7C,EAAUjG,MAAM,mCAElBsH,EAAI/F,KAAO2H,GAAY,KACvBF,EAAY/C,EAAWqB,GACnBA,EAAIlC,UAA4B,QAAhBkC,EAAI9C,SACtByB,EAAUjG,MAAM,sCACXsH,EAsCF,MAAM6B,aAAa1F,KAKxB,aAAawC,EAAWiD,GACtB,OAAOD,EAAYhD,EAAWiD,IArClC,SAAoBjD,EAAW1E,GAC7B,MAAM4B,EAAS,GAEf,GADAA,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAC3BlE,EAAOuE,KAAM,OAClB,MAAMJ,EAAMC,EAAa,IAAI4B,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAE9D,IADAmE,EAAI/F,KAAOA,GAAQ,OACN,CACX,MAAMuI,EACJT,EAA8BpD,IAC9BA,EAAUjG,MAAM,wDACE,QAAhB8J,EAAItF,SACNyB,EAAUjG,MAAM,iDACE,YAAhB8J,EAAID,SACN5D,EAAUjG,MAAM,qDAClBsH,EAAIjC,QAAQuD,KAAKkB,GACjB,MAAMC,EAAK9D,EAAUoB,QAAQ,MAC7B,IAAI0C,EAEG,MADLD,EAAI3G,OAAOoD,UAAYwD,EAW3B,OARIzC,EAAI9C,QAAQhD,OAAS,GACvByE,EAAUjG,MACR,kEAGJmD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,2BAC5CgJ,EAAY/C,EAAWqB,GAChBA,EAAI1D,KASkCoG,CAAW/D,EAAWiD,GAGnExF,aAAY,OAAEzD,EAAM,OAAEkD,IACpB4C,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChB/D,OAAOC,eAAeuE,KAAM,UAAW,CAAE7D,MAAO,GAAI8D,UAAU,IAC9DD,KAAKqG,SAAW,IAAItB,mBAAmB,CAAE1I,OAAAA,EAAQkD,OAAQ,KAG3D,cACE,OAAIS,KAAKyB,QAAQ7D,QAAUoC,KAAKT,OAAOR,KAC9BiB,KAAKT,OAAOR,KAAK5C,MAEnB,GAET,eACE,OAAOmK,QAAQtG,KAAKT,OAAOiC,UAE7B,YACE,OAAO8E,QAAQtG,KAAKyB,QAAQ7D,UAAYoC,KAAKT,OAAOR,KAEtD,cACE,GAAIiB,KAAKyB,QAAQ7D,OACf,OAAOoC,KAAKyB,QAOd,OAAO,EAJM,CAACzB,KAAKT,OAAOgH,OAAQvG,KAAKT,OAAOR,KAAMiB,KAAKT,OAAOiH,SAC7DC,QAAQlJ,GAAMA,IACdD,KAAKC,GAAMA,EAAEpB,QACbsB,KAAK,MAIV,UAAUoD,GAGR,SAFOb,KAAKqG,SAASzB,SAAS/D,GAET,SAAjBb,KAAKY,QAAoB,CAC3B,MAAMpE,EAAU,sJAGViD,EAAgBO,KAAKT,OAAOR,KAAMiB,KAAM,eAAgBxD,EAAS,CACrEG,SA6EagB,EA7EQqC,KA8EpB,KACLrC,EAAK4B,OAAOR,KAAK5C,MAAQ,gBAF7B,IAAqBwB,EArEjB,MAAM+I,GAAW1G,KAAKe,OAASF,EAAKI,OAAOtF,IAAIqE,KAAKY,SAC9C5B,EAASgB,KAAKe,MAChBf,KACA0G,GAA4B,YAAjBA,EAAQ/I,KACnB+I,EAAQ9F,aACRR,EACJ,GAAIpB,GAAUgB,KAAKwB,SAAU,CAE3B,MAAM,UAAEF,GAAcX,EAA0B3B,EAAQ6B,IAAS,GACjE,GAAIS,EAAW,CACb,MAAMqF,GAAe3G,KAAKe,MAAQO,EAAYtB,MAAMT,OAAOR,KACrDvC,EAAU,yDACViD,EACJkH,EACA3G,KACA,yBACAxD,SAKJ,IAAK,MAAMiF,KAAWzB,KAAKyB,cAClBA,EAAQmD,SAAS/D,GAM9B0B,MAAMC,GA6BJ,OAAOA,EAAEC,GAAGC,KAAK,CACf1C,KAAKqG,SAAS9D,MAAMC,GA7BJ,MAChB,GAAIxC,KAAKe,OAASf,KAAKiG,QACrB,OAAOzD,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGwD,SAC/BzD,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAKyB,QAAQnE,KAAKC,GAAMA,EAAEgF,MAAMC,KACnCA,EAAE9C,MAAMM,KAAKT,OAAO0E,SAGxB,MAAM2C,EAAa5G,KAAKT,OAAOgH,QAAUvG,KAAKT,OAAOR,KAC/CwH,EAASvG,KAAKT,OAAOgH,OACvB,CAACvG,KAAKT,OAAOgH,OAAOpK,MAAOqG,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOR,KAAKvB,SACxD,GACEqJ,EAAMrE,EAAElB,UACZkB,EAAEC,GAAGC,KAAK,IACL6D,EACHvG,KAAKT,OAAOR,KAAK5C,MACjBqG,EAAE9C,MAAMM,KAAKT,OAAOiH,WAEtB,CACEM,UACE9G,KACF,QACAzB,QAASyB,OAGb,OAAOwC,EAAEC,GAAGC,KAAK,CAACF,EAAEC,GAAGjF,OAAOoJ,EAAWpJ,QAASqJ,KAIlDE,GACAvE,EAAE9C,MAAMM,KAAKT,OAAOiC,UACpBgB,EAAE9C,MAAMM,KAAKT,OAAOoD,cC3QnB,MAAMqE,gBAAgBnH,KAI3B,aAAawC,GACX,MAAMmB,EAASnB,EAAUoB,QAAQ,KACjC,IAAKD,EACH,OAAO,KAET,MAAMxC,EACJiG,EAAY5E,IACZA,EAAUC,YAAY,WACtBD,EAAUoB,QAAQ,OAAQ,IAAK,MAC/BpB,EAAUjG,MAAM,wBACZ8K,EAAa,CAAClG,GACpB,GAAkB,MAAdA,EAAI7E,MAAe,CACrB,MAAM8H,EACJ5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCAClB8K,EAAWlC,KAAKf,QACX,GAAkB,MAAdjD,EAAI7E,MAAe,CAC5B,MAAM8H,EACJ5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,0CAClB8K,EAAWlC,KAAKf,GAElB,OAAO,IAAI+C,QAAQ,CACjB3K,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAEiE,OAAAA,GACV0D,WAAAA,IAIJpH,aAAY,OAAEzD,EAAM,OAAEkD,EAAM,WAAE2H,IAC5B/E,MAAM,CAAE9F,OAAAA,EAAQkD,OAAAA,IAChB2H,EAAWtI,OAASoB,KACpBxE,OAAOC,eAAeuE,KAAM,aAAc,CAAE7D,MAAO+K,IAGrD,WACE,OAAOC,EAAWnH,KAAKkH,WAAW,IAAIvJ,KAExC,YACE,OAAOwJ,EAAWnH,KAAKkH,WAAW,IAAI/K,MAExC,eACE,OAAOgL,EAAWnH,KAAKkH,WAAW,IAAIE,SAIxC7E,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOiE,WACjBxD,KAAKkH,WAAW5J,KAAKC,GAAMiF,EAAE9C,MAAMnC,QCxCrC,MAAM8J,iBAAiBxH,KAI5B,aAAawC,GACX,MAAMiF,EAAiBjF,EAAU/F,SAE3BiD,EAAS,GACTmE,EAAMC,EACV,IAAI0D,SAAS,CAAEhL,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAK3C,OAHAmE,EAAI2C,SAAWtB,mBAAmBP,MAAMnC,GACxC9C,EAAOgI,SAAWlF,EAAUoB,QAAQ,YACpCC,EAAI9C,QAAU6E,EAA8BpD,EAAW,iBAClDqB,EAAI9C,SAGJrB,EAAOgI,WACVhI,EAAOiI,SAAWnF,EAAUoB,QAAQ,QAEtClE,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUoB,WAAWgE,GAClBlI,EAAOf,MAGZkF,EAAIgE,QAAUnI,EAAOgI,SAAWP,QAAQxC,MAAMnC,GAAa,KACpDqB,EAAI1D,MAHFqC,EAAU4C,UAAUqC,IATpBjF,EAAU4C,UAAUqC,GAe/B,WACE,MAAO,WAET,eACE,QAAStH,KAAKT,OAAOgI,SAEvB,eACE,QAASvH,KAAKT,OAAOiI,SAEvB,WACE,OAAO,EAASxH,KAAKT,OAAOf,KAAKrC,OAMnC,UAAU0E,SACDb,KAAKqG,SAASzB,SAAS/D,SACvBb,KAAKY,QAAQgE,SAAS/D,GAC7B,MAAM5B,EAAS0B,EAA0BX,KAAKY,QAASC,EAAM,CAC3DC,kBAAkB,IAEpB,GAAI7B,EACF,GAAIe,KAAKY,QAAQY,SAAU,CACzB,MAAMhF,EAAU,iDACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,uBACAxD,QAEG,GAAKwD,KAAKuH,UAiBV,IAAKvH,KAAK0H,QAAS,CACxB,MAAMlL,EAAU,yEACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,mBACAxD,EACA,CACEG,QAASgL,EAAsC3H,cAxBnD,GACEA,KAAKpB,SACJ8C,EAAgCzC,EAAOsC,WAAYV,IA8C9D,SAAgC6D,GAC9B,MAAM5B,EAAO4B,EAAI9F,OAAO+F,WAAaD,EAAI9F,OAAOkE,KAC1ClD,EAAQkD,EAAK8E,QAAQlD,GAE3B,OADuB5B,EAAK/F,MAAM6C,EAAQ,GAAGiC,MAAMgG,IAAOA,EAAEN,WAhDpDO,CAAuB9H,MACvB,CACA,MAAMxD,EAAU,0EACViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,oBACAxD,EACA,CACEG,SA8CgC+H,EA9Cc1E,KA+CnD,KACL,MAAM4G,EAAamB,EAAcrD,EAAI9D,SACrC8D,EAAInF,OAAOgI,SAAW,IACjBX,EACHjJ,KAAM,WACNxB,MAAO,YAETyK,EAAWpJ,OAAS,IACpBmK,EAAsCjD,EAAtCiD,OATJ,IAA8CjD,EA1B5CnC,MAAMC,GACJ,OAAOA,EAAEC,GAAGC,KAAK,CACf1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOgI,UACpB/E,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAE9C,MAAMM,KAAKT,OAAOiI,UACpBhF,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,OACvCA,KAAK0H,QAAU1H,KAAK0H,QAAQnF,MAAMC,GAAK,GACvCA,EAAE9C,MAAMM,KAAKT,OAAOoD,cAkC1B,SAASgF,EAAsCjD,GAC7C,MAAO,KACLA,EAAIgD,QAAUV,QAAQxC,MAAM,IAAI0D,UAAU,WCjJvC,MAAMC,kBAAkBtI,KAS7B,aAAawC,GAAW,QAAE+F,EAAO,QAAEC,GAAY,IAC7C,MAAM9I,EAAS,CAAE6I,QAAAA,GACX1E,EAAMC,EACV,IAAIwE,UAAU,CAAE9L,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAE5C,OAAI6I,GAA6B,gBAAlBA,EAAQjM,QACrBoD,EAAO+I,YAAcjG,EAAUoB,QAAQ,KACnClE,EAAO+I,cACT5E,EAAIiB,UAAY,GACTjB,IAGN0E,GAAYC,IACf9I,EAAO6I,QAAU/F,EAAUoB,QAAQ,SAAU,SAAU,YAEzDC,EAAI9C,QACF4E,EAAYnD,IAAcA,EAAUjG,MAAM,uBAC5CmD,EAAOf,KACL6D,EAAUC,YAAY,eAAiBD,EAAUoB,QAAQ,YAC3DlE,EAAOuE,KACLzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,qBAC5CsH,EAAIiB,UAAYX,EAAc3B,GAC9B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,0BAC5CmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCACXsH,EAAI1D,MAGb,WACE,MAAO,YAET,WACE,MAAM,KAAExB,GAASwB,KAAKT,OACtB,OAAKf,EAGE,EAASA,EAAKrC,OAFZ,GAIX,cACE,OAAK6D,KAAKT,OAAO6I,QAGVpI,KAAKT,OAAO6I,QAAQjM,MAFlB,GAKX,UAAU0E,GAER,SADOb,KAAKqG,SAASzB,SAAS/D,IACzBb,KAAKxB,MAAQ,CAAC,GAAI,UAAU+J,SAASvI,KAAKoI,SAAU,CACvD,MAAM5L,EAAU,qFACViD,EAAgBO,KAAKT,OAAOuE,KAAM9D,KAAM,gBAAiBxD,GAE7DwD,KAAKY,gBACAZ,KAAKY,QAAQgE,SAAS/D,IAE/B,IAAK,MAAM2H,KAAYxI,KAAK2E,gBACnB6D,EAAS5D,SAAS/D,GAK7B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACbyI,EAAOzI,KAAKY,QACd,CACE4B,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK2E,UAAUrH,KAAKoH,GAAQA,EAAInC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAO0E,QAEtB,GACJ,OAAOzB,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBxC,KAAKT,OAAOf,KACRgE,EAAE9C,MAAMM,KAAKT,OAAO6I,SACpB5F,EAAE9C,MAAMM,KAAKT,OAAO6I,QAAS5F,EAAEC,GAAGiG,SAAU,CAAET,KAAMjI,KAAMpB,OAAAA,OAC3D6J,EACHjG,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAAA,KC3Fb,MAAM+J,kBAAkB9I,KAQ7B,aACEwC,GACA,QAAE+F,EAAO,UAAEQ,GAAY,EAAK,SAAEC,GAAW,GAAU,IAEnD,MAAMvB,EAAiBjF,EAAU/F,SAC3BiD,EAAS,CAAE6I,QAAAA,GACX1E,EAAMC,EACV,IAAIgF,UAAU,CAAEtM,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAa5C,GAXK6I,GAAYQ,IACfrJ,EAAO6I,QAAU/F,EAAUoB,QAAQ,YAEjB,YAAhBC,EAAI0E,SAAyB/F,EAAU6C,MAAM,aAC/C7C,EAAUjG,MAAM,4CAElBmD,EAAOsJ,SAAWxG,EAAUoB,QAAQ,YAChCoF,IAAatJ,EAAOsJ,UAAYxG,EAAU6C,MAAM,cAClD7C,EAAUjG,MAAM,+CAElBmD,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAcZ,OAVA2E,EAAI9C,QACF6E,EAA8BpD,EAAW,mBACzCA,EAAUjG,MAAM,0BAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUoB,QAAQ,QAAS,aAC3BpB,EAAUjG,MAAM,0BAClBmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,wCACXsH,EAAI1D,KAbTqC,EAAU4C,UAAUqC,GAgBxB,WACE,MAAO,YAET,cACE,OAAKtH,KAAKT,OAAO6I,QAGVpI,KAAKT,OAAO6I,QAAQjM,MAFlB,GAIX,eACE,QAAS6D,KAAKT,OAAOsJ,SAEvB,WACE,OAAO,EAAS7I,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,GAIR,aAHOb,KAAKqG,SAASzB,SAAS/D,SACvBb,KAAKY,QAAQgE,SAAS/D,GAErBb,KAAKY,QAAQqF,SACnB,IAAK,WACL,IAAK,SAAU,CACb,MAAMzJ,EAAU,4BAA4BwD,KAAKY,QAAQqF,uBACnDxG,EACJO,KAAKT,OAAOf,KACZwB,KACA,oBACAxD,GAEF,MAEF,QAAS,CACP,MAAM,UAAE8E,GACNX,EAA0BX,KAAKY,QAASC,IAAS,GACnD,GAAIS,EAAW,CACb,MAAMqF,GAAe3G,KAAKY,QAAQG,MAAQO,EAAYtB,KAAKY,SACxDrB,OAAOR,KACJvC,EAAU,mDACViD,EACJkH,EACA3G,KACA,oBACAxD,MAQV+F,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAO6I,SACpB5F,EAAE9C,MAAMM,KAAKT,OAAOsJ,UACpBrG,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAAA,KCxGb,SAAS,EAASyF,GACvB,OAAOA,EAAWyE,WAAW,KAAOzE,EAAWtH,MAAM,GAAKsH,EAWrD,SAASvB,EAAKT,GAAW,OAAEU,EAAM,aAAEgG,EAAY,SAAE/F,EAAW,SACjE,MAAMgG,EAAQjG,EAAOV,GACrB,IAAK2G,EACH,MAAO,GAETA,EAAMzJ,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC3C,MAAMwF,EAAQ,CAACD,GACf,KAAOA,EAAMzJ,OAAOoD,WAAW,CAC7B,MAAMuG,EAAOnG,EAAOV,GACpB,IAAK6G,EAAM,CACJH,GACH1G,EAAUjG,MAAM,qBAAqB4G,KAEvC,MAIF,GAFAkG,EAAK3J,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC1CwF,EAAMjE,KAAKkE,IACNA,EAAK3J,OAAOoD,UAAW,MAE9B,OAAOsG,EAMF,SAAShC,EAAY5E,GAC1B,OACEA,EAAUC,YAAY,UAAW,YACjCD,EAAUoB,QAAQ,OAAQ,QAAS,WAAY,YAAa,OASzD,SAAS0D,GAAW,KAAExJ,EAAI,MAAExB,IACjC,OAAQwB,GACN,IAAK,UACL,IAAK,UACH,MAAO,CAAEA,KAAM,SAAUxB,MAAAA,GAC3B,IAAK,SACH,MAAO,CAAEwB,KAAM,SAAUxB,MAAOA,EAAMY,MAAM,GAAI,IAGpD,OAAQZ,GACN,IAAK,OACL,IAAK,QACH,MAAO,CAAEwB,KAAM,UAAWxB,MAAiB,SAAVA,GACnC,IAAK,WACL,IAAK,YACH,MAAO,CAAEwB,KAAM,WAAYyJ,SAAUjL,EAAM2M,WAAW,MACxD,IAAK,IACH,MAAO,CAAEnL,KAAM,WAAYxB,MAAO,IACpC,IAAK,IACH,MAAO,CAAEwB,KAAM,cACjB,QACE,MAAO,CAAEA,KAAMxB,IAOd,SAAS4J,EAAe1D,GAoB7B,MAAM,OAAEhG,GAAWgG,EACb8G,EApBN,WACE,MAAM5C,EAASlE,EAAUoB,QAAQ,YAC3B1E,EAAOsD,EAAUoB,QAAQ,QAAS,QACxC,GAAI1E,EAAM,CACR,MAAMyH,EAAUnE,EAAUoB,QAAQ,QAClC,OAAO,IAAI8B,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAEgH,OAAAA,EAAQxH,KAAAA,EAAMyH,QAAAA,KAEhDD,GAAQlE,EAAUjG,MAAM,gCAabgN,IAVjB,WACE,MAAM7C,EAASlE,EAAUoB,QAAQ,gBAC3B1E,EAAOsD,EAAUoB,QAAQ,QAAS,UACxC,GAAI1E,EACF,OAAO,IAAIwG,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAEgH,OAAAA,EAAQxH,KAAAA,KAE1CwH,GAAQlE,EAAUjG,MAAM,8BAIKiN,GACnC,GAAIF,EAAU,OAAOA,EACrB,MAAMpK,EAAOsD,EAAUoB,QACrB,SACA,UACA,OACA,QACA,aAEF,OAAI1E,EACK,IAAIwG,KAAK,CAAElJ,OAAAA,EAAQkD,OAAQ,CAAER,KAAAA,UADtC,EAQK,SAASiF,EAAc3B,GAC5B,OAAOS,EAAKT,EAAW,CACrBU,OAAQsE,SAAS7C,MACjBxB,SAAU,mBAQP,SAASyC,EAA8BpD,EAAWiD,GACvD,MAAMe,EAAWtB,mBAAmBP,MAAMnC,GACpCqB,EAAM6B,KAAKf,MAAMnC,EAAWiD,GAElC,OADI5B,IAAKC,EAAaD,GAAK2C,SAAWA,GAC/B3C,EAOF,SAAS8B,EAAYnD,EAAWiD,GACrC,MAAMY,EAAMX,KAAKf,MAAMnC,EAAWiD,GAAY,eAC9C,GAAIY,EACF,OAAOA,EAET,MAAMoD,EAAYjH,EAAUoB,QAAQ,QACpC,GAAI6F,EAAW,CACb,MAAM5F,EAAM,IAAI6B,KAAK,CACnBlJ,OAAQgG,EAAUhG,OAClBkD,OAAQ,CAAER,KAAMuK,KAGlB,OADA5F,EAAI/F,KAAO,cACJ+F,GAOJ,SAAS6F,EAAYlH,GAC1B,MAAM+F,EAAU/F,EAAUoB,QAAQ,eAClC,IAAK2E,EAAS,OAKd,OAHEO,UAAUnE,MAAMnC,EAAW,CAAE+F,QAAAA,KAC7BD,UAAU3D,MAAMnC,EAAW,CAAE+F,QAAAA,KAC7B/F,EAAUjG,MAAM,4BAOb,SAASoN,EAAmBC,GACjC,MAAMC,EAAQD,EAAIzL,MAAM,MAExB,GAAI0L,EAAM9L,OAAQ,CAChB,MAAM+L,EAAQD,EAAMA,EAAM9L,OAAS,GAAG+L,MAAM,QAC5C,GAAIA,EACF,OAAOA,EAAM,GAGjB,MAAO,GAeF,SAASC,EAAwB5I,GACtC,MAAO,KACL,GAAIA,EAAIqF,SAASzI,OAAQ,CACvB,MAAMyE,EAAY,IAAI6F,UAAU,mBAC1B2B,EAAUvF,wBAAwBE,MAAMnC,GAC9CwH,EAAQtK,OAAOoD,UAAYN,EAAUoB,QAAQ,KAC7C,MAAMqG,EAAW9I,EAAIqF,SAAS,GACzB,MAAM0D,KAAKD,EAASvK,OAAOf,KAAKhB,UACnCsM,EAASvK,OAAOf,KAAKhB,OAAS,IAAIsM,EAASvK,OAAOf,KAAKhB,UAEzDwD,EAAIqF,SAASxH,QAAQgL,OAChB,CACLlG,EAAa3C,GAAKqF,SAAWtB,mBAAmBP,MAC9C,IAAI0D,UAAU,qBAEhB,MAAM1K,EAASwD,EAAIzB,OAAOR,KAAKvB,OAC/BwD,EAAIqF,SAAS9G,OAAOuE,KAAKtG,OAASA,EAClCwD,EAAIzB,OAAOR,KAAKvB,OAAS,KAAKgM,EAAmBhM,OAShD,SAASuK,EAAcE,GAC5B,GAAIA,EAAK5B,SAASzI,OAChB,OAAOqK,EAAK5B,SAAS9G,OAAOuE,KAE9B,GAAkB,cAAdmE,EAAKtK,OAAyBsK,EAAKG,QACrC,OAAOL,EAAcE,EAAKrH,SAG5B,OADepF,OAAOwO,OAAO/B,EAAK1I,QAAQ0K,MAAK,CAACC,EAAGC,IAAMD,EAAEtK,MAAQuK,EAAEvK,QACvD,GAwBT,SAAS+D,EAAasE,EAAMrJ,GAKjC,GAJKA,IAEHA,EAASqJ,IAENA,EAGH,OAAOA,EA8BT,OA5Bc,IAAImC,MAAMnC,EAAM,CAC5BtM,IAAIqD,EAAQoF,GACV,MAAMjI,EAAQ6C,EAAOoF,GACrB,OAAIlC,MAAMmI,QAAQlO,IAAgB,WAANiI,EAGnBT,EAAaxH,EAAO6C,GAEtB7C,GAETkF,IAAIrC,EAAQoF,EAAGjI,GAGb,GADA6C,EAAOoF,GAAKjI,GACPA,EACH,OAAO,EACF,GAAI+F,MAAMmI,QAAQlO,GAEvB,IAAK,MAAM+M,KAAQ/M,OACU,IAAhB+M,EAAKtK,SACdsK,EAAKtK,OAASA,aAGe,IAAjBzC,EAAMyC,SACtBzC,EAAMyC,OAASA,GAEjB,OAAO,KCtSb,MAAM0L,EAAU,CAGdC,QACE,sGACFC,QAAS,8CACTnG,WAAY,+BACZoG,OAAQ,WACRC,WAAY,cACZC,QAAS,2BACTC,MAAO,wBAGI5E,EAAmB,CAC9B,cACA,WACA,YACA,aACA,aACA,aACA,cACA,cACA,oBACA,gBACA,iBACA,eACA,eACA,MACA,SACA,UAGWL,EAAc,CAAC,aAAc,YAAa,aAE1C8B,EAAuB,CAClC,QACA,YACA,WACA,QACA,cACA,UACA,aACA,OACA,SACA,WACA,UACA,YACA,WACA,UACA,YACA,UACA,WACA,UACA,SACA,SACA,cACA,UACA,gBAGIoD,EAAoB,CACxB,YACA,cACA,WACA,MACA,kBACA,UACA,SACA,UACA,OACA,SACA,QACA,QACA,OACA,QACA,OACA,QACA,WACA,KACA,WACA,SACA,WACA,QACA,OACA,YACA,WACA,QACAC,OAAOrD,EAAsB9B,EAAaK,GAEtC+E,EAAe,CACnB,IACA,IACA,IACA,MACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KAGIC,EAAW,CAEf,eACA,WACA,aAgHK,MAAM9C,UAIXpI,YAAYmL,GACVjL,KAAK3D,OA9GT,SAAkBoN,GAChB,MAAMlK,EAAS,GACf,IAAI2L,EAAgB,EAChB1N,EAAS,GACTK,EAAO,EACP+B,EAAQ,EACZ,KAAOsL,EAAgBzB,EAAI7L,QAAQ,CACjC,MAAMuN,EAAW1B,EAAI2B,OAAOF,GAC5B,IAAIjM,GAAU,EAQd,GANI,YAAY8K,KAAKoB,GACnBlM,EAASoM,EAAkB,aAAc,CAAEC,eAAe,IACpC,MAAbH,IACTlM,EAASoM,EAAkB,UAAW,CAAEC,eAAe,MAGzC,IAAZrM,EAAe,CACjB,MAAMsM,EAAgBhM,EAAOiM,MAAMrP,MACnC0B,IAAS0N,EAAc5B,MAAM,QAAU,IAAI/L,OAC3CJ,GAAU+N,EACV3L,GAAS,OACJ,GAAI,iBAAiBmK,KAAKoB,IAK/B,GAJAlM,EAASoM,EAAkB,YACX,IAAZpM,IACFA,EAASoM,EAAkB,aAEb,IAAZpM,EAAe,CACjBA,EAASoM,EAAkB,cAC3B,MAAMI,EAAYlM,EAAO3B,OAAS,EAC5B8B,EAAQH,EAAOkM,GACrB,IAAgB,IAAZxM,EAAe,CACjB,GAAI+L,EAASzC,SAAS7I,EAAMvD,OAAQ,CAClC,MAAMK,EAAU,GAAG,EACjBkD,EAAMvD,wDAER,MAAM,IAAIuP,iBACRlM,EAAYD,EAAQkM,EAAW,KAAMjP,IAE9BqO,EAAkBtC,SAAS7I,EAAMvD,SAC1CuD,EAAM/B,KAAO,gBAIG,MAAbwN,IACTlM,EAASoM,EAAkB,WAG7B,IAAK,MAAMM,KAAeZ,EACxB,GAAItB,EAAIX,WAAW6C,EAAaT,GAAgB,CAC9C3L,EAAOyF,KAAK,CACVrH,KAAM,SACNxB,MAAOwP,EACPnO,OAAAA,EACAK,KAAAA,EACA+B,MAAAA,IAEFpC,EAAS,GACT0N,GAAiBS,EAAY/N,OAC7BqB,EAASiM,EACT,MAQJ,IAHgB,IAAZjM,IACFA,EAASoM,EAAkB,WAEb,IAAZpM,EACF,MAAM,IAAI2M,MAAM,gCAElBV,EAAgBjM,EAChBW,GAAS,EAYX,OARAL,EAAOyF,KAAK,CACVrH,KAAM,MACNxB,MAAO,GACPqB,OAAAA,EACAK,KAAAA,EACA+B,MAAAA,IAGKL,EAOP,SAAS8L,EAAkB1N,GAAM,cAAE2N,GAAkB,IACnD,MAAMO,EAAKvB,EAAQ3M,GACnBkO,EAAGJ,UAAYP,EACf,MAAMjM,EAAS4M,EAAGC,KAAKrC,GACvB,OAAIxK,GACFM,EAAOyF,KAAK,CAAErH,KAAAA,EAAMxB,MAAO8C,EAAO,GAAIzB,OAAAA,EAAQK,KAAAA,EAAM+B,MAAAA,IAC/C0L,IACH9N,EAAS,IAEJqO,EAAGJ,YAEJ,GASMM,CAASd,GACvBjL,KAAK1D,SAAW,EAOlBF,MAAMI,GACJ,MAAM,IAAIkP,iBACRlM,EAAYQ,KAAK3D,OAAQ2D,KAAK1D,SAAU0D,KAAKzD,QAASC,IAO1DwP,UAAUrO,GACR,OACEqC,KAAK3D,OAAOuB,OAASoC,KAAK1D,UAC1B0D,KAAK3D,OAAO2D,KAAK1D,UAAUqB,OAASA,EAOxCuH,MAAM/I,GACJ,OACE6D,KAAKgM,UAAU,WAAahM,KAAK3D,OAAO2D,KAAK1D,UAAUH,QAAUA,EAOrEmG,eAAe2J,GACb,IAAK,MAAMtO,KAAQsO,EAAY,CAC7B,IAAKjM,KAAKgM,UAAUrO,GAAO,SAC3B,MAAM+B,EAAQM,KAAK3D,OAAO2D,KAAK1D,UAE/B,OADA0D,KAAK1D,WACEoD,GAOX+D,WAAWwI,GACT,IAAKjM,KAAKgM,UAAU,UAAW,OAC/B,MAAMtM,EAAQM,KAAK3D,OAAO2D,KAAK1D,UAC/B,IAAK,MAAMH,KAAS8P,EAClB,GAAIvM,EAAMvD,QAAUA,EAEpB,OADA6D,KAAK1D,WACEoD,EAOXwM,kBAAkB/P,GAChB,GAAK6D,KAAKgM,UAAU,eAGhBhM,KAAK3D,OAAO2D,KAAK1D,UAAUH,QAAUA,EAGzC,OAAO6D,KAAKsC,YAAY,cAM1B2C,UAAU3I,GACR0D,KAAK1D,SAAWA,GAIb,MAAMoP,yBAAyBE,MAWpC9L,aAAY,QACVtD,EAAO,YACP4C,EAAW,QACXb,EAAO,KACPV,EAAI,WACJwB,EAAU,MACVC,EAAK,OACLC,IAEA4C,MAAM3F,GAENwD,KAAKxB,KAAO,mBACZwB,KAAKZ,YAAcA,EACnBY,KAAKzB,QAAUA,EACfyB,KAAKnC,KAAOA,EACZmC,KAAKX,WAAaA,EAClBW,KAAKV,MAAQA,EACbU,KAAKT,OAASA,GChVX,MAAM4M,kBAAkB/J,aAI7B,aAAaC,GACX,MAAMlG,EAAQkG,EAAUC,YAAY,UACpC,GAAInG,EACF,OAAO,IAAIgQ,UAAU,CAAE9P,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAEpD,MAAAA,KAI/D,WACE,MAAO,aAET,YACE,OAAOgG,MAAMhG,MAAMY,MAAM,GAAI,GAI/BwF,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGC,KAAK,CACfF,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOpD,MAAMqB,QAC9BgF,EAAEC,GAAGpH,WACHmH,EAAEC,GAAGC,KAAK,CAAC,IAAKF,EAAEC,GAAGjE,KAAKwB,KAAK7D,MAAO,CAAE8L,KAAMjI,KAAMpB,OAAAA,IAAW,MAC/D,CAAEqJ,KAAMjI,KAAMpB,OAAAA,IAEhB4D,EAAE9C,MAAMM,KAAKT,OAAOoD,cAKnB,MAAMyJ,aAAavM,KAIxB,aAAawC,GAEX,MAAM9C,EAAS,GAEf,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,SAC3BlE,EAAOR,KACV,OAEFQ,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,oBAClB,MAAMsH,EAAMC,EAAa,IAAIyI,KAAK,CAAE/P,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAkB9D,OAjBA8C,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,iBACxDsH,EAAIsG,OAASlH,EAAKT,EAAW,CAC3BU,OAAQoJ,UAAU3H,MAClBuE,cAAc,EACd/F,SAAU,gBAERX,EAAU2J,UAAU,WACtB3J,EAAUjG,MAAM,gCAElBmD,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,4BACvCsH,EAAIsG,OAAOpM,QACdyE,EAAUjG,MAAM,oBAElBmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,2BACrCsH,EAAI1D,KAGb,WACE,MAAO,OAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAInCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAKgK,OAAO1M,KAAK+O,GAAMA,EAAE9J,MAAMC,MACzCA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,QCvFP,MAAMsM,iBAAiBzM,KAI5B,aAAawC,GACX,MAAMrD,EAASqD,EAAUC,YAAY,cACrC,IAAKtD,EACH,OAEF,MAAMO,EAAS,CAAEP,OAAAA,GAEjB,GADAO,EAAOgJ,SAAWlG,EAAUoB,QAAQ,YAC/BlE,EAAOgJ,SAUZ,OANAhJ,EAAOgN,MACLlK,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,iCAClBmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2CACX,IAAIkQ,SAAS,CAAEjQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAT9C8C,EAAU4C,UAAUjG,EAAOY,OAY/B,WACE,MAAO,WAET,aACE,OAAO,EAASI,KAAKT,OAAOP,OAAO7C,OAErC,eACE,OAAO,EAAS6D,KAAKT,OAAOgN,MAAMpQ,OAIpCoG,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE2B,gBAAgBnE,KAAKT,OAAOP,OAAQgB,MACtCwC,EAAE9C,MAAMM,KAAKT,OAAOgJ,UACpB/F,EAAE2B,gBAAgBnE,KAAKT,OAAOgN,MAAOvM,MACrCwC,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,QCxCP,MAAMwM,gBAAgB3M,KAI3B,aAAawC,GAEX,MAAM9C,EAAS,GACTmE,EAAMC,EAAa,IAAI6I,QAAQ,CAAEnQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAEjE,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,WAC3BlE,EAAOR,KAaZ,OAVA2E,EAAI9C,QACF6E,EAA8BpD,EAAW,iBACzCA,EAAUjG,MAAM,wBAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,wBAClBiG,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,sCACXsH,EAAI1D,KAGb,WACE,MAAO,UAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,SACDb,KAAKY,QAAQgE,SAAS/D,GAI/B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,QC7CP,MAAMyM,yBAAyB5M,KAIpC,aAAawC,EAAWtD,GACtB,MAAMQ,EAAS,CAAER,KAAAA,GACX2E,EAAMC,EACV,IAAI8I,iBAAiB,CAAEpQ,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAmBnD,OAjBAA,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,yBAClBiG,EAAU9F,QAAUmH,EAAI1D,KACxBT,EAAOiE,OACLnB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,gCAC5CsH,EAAI9C,QACF4E,EAAYnD,IAAcA,EAAUjG,MAAM,gCAC5CmD,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,4CAClBsH,EAAIiB,UAAYX,EAAc3B,GAC9B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,yBAC5CmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,uCACXsH,EAAI1D,KAGb,WACE,MAAO,WAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAGnC,UAAU0E,SACDb,KAAKqG,SAASzB,SAAS/D,SACvBb,KAAKY,QAAQgE,SAAS/D,GAI/B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,OACvCwC,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAE9C,MAAMM,KAAKT,OAAOuE,SACjB9D,KAAK2E,UAAUrH,KAAKoH,GAAQA,EAAInC,MAAMC,KACzCA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,QC7CP,MAAM0M,kBAAkB7M,KAM7B,aAAawC,EAAWsK,GAAU,YAAEC,EAAW,eAAEC,IAC/C,MAAM,OAAEtN,EAAM,KAAE5B,GAASgP,EAWzB,IAVApN,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,mBAAmBuB,KACrC0E,EAAU9F,QAAUoQ,EACpBA,EAAWhJ,EAAagJ,GACpBC,GACFpR,OAAOgI,OAAOjE,EAzBpB,SAAqB8C,GACnB,MAAMyK,EAAQzK,EAAUoB,QAAQ,KAChC,OAAKqJ,EAME,CAAEA,MAAAA,EAAOzM,YAFdgC,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,6BAJT,GAsBiBiE,CAAYgC,IAEpC9C,EAAOuE,KAAOzB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,YAAYuB,KACpEgP,EAAS/K,QAAU,KACN,CAEX,GADArC,EAAO0E,MAAQ5B,EAAUoB,QAAQ,KAC7BlE,EAAO0E,MAIT,OAHA1E,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2BAA2BuB,KACtCgP,EAAS3M,KAElB,MAAMmF,EAAKJ,mBAAmBP,MAAMnC,GACpC,IAAI0K,EACJ,IAAK,MAAOhK,KAAWiK,KAASH,EAE9B,GADAE,EAAMpJ,EAAaZ,EAAOV,KAAc2K,IACpCD,EACF,MAGCA,GACH1K,EAAUjG,MAAM,kBAElB2Q,EAAI1G,SAAWlB,EACfwH,EAAS/K,QAAQoD,KAAK+H,EAAI/M,OAI9B,cACE,QAASA,KAAKT,OAAOd,QAEvB,WACE,OAAO,EAASuB,KAAKT,OAAOf,KAAKrC,OAEnC,kBACE,OAAK6D,KAAKT,OAAOc,YAGV,EAASL,KAAKT,OAAOc,YAAYlE,OAF/B,KAKX,UAAU0E,GACR,IAAK,MAAMoM,KAAUjN,KAAK4B,QACpBqL,EAAOrI,iBACFqI,EAAOrI,SAAS/D,IAM7B0B,MAAMC,GAcJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAO2N,UACpB1K,EAAE9C,MAAMM,KAAKT,OAAOd,SACpB+D,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAE9C,MAAMM,KAAKT,OAAOgN,OACpB/J,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,OApBvB,KACbA,KAAKT,OAAOc,YAGVmC,EAAEC,GAAGC,KAAK,CACfF,EAAE9C,MAAMM,KAAKT,OAAOuN,OACpBtK,EAAEC,GAAGjF,OAAOwC,KAAKT,OAAOc,YAAY7C,QACpCgF,EAAEC,GAAGpC,YACHmC,EAAElB,UAAUtB,KAAKT,OAAOc,YAAYlE,MAAO,CAAEoC,QAASyB,UANjD,GAmBPK,GACAmC,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK4B,QAAQtE,KAAK6P,GAAMA,EAAE5K,MAAMC,MAC1CA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,QCpGP,MAAMoN,iBAAiBvN,KAI5B,aAAawC,GAEX,MAAM9C,EAAS,GAEf,GADAA,EAAOR,KAAOsD,EAAUoB,QAAQ,UAC3BlE,EAAOR,KACV,OAEF,IAAI6B,EAAUmF,EAAe1D,GAC7B,IAAKzB,EAAS,CACZ,MAAM7B,EACJsD,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,sBAClBwE,EAAU,IAAI2E,KAAK,CAAElJ,OAAQgG,EAAUhG,OAAQkD,OAAQ,CAAER,KAAAA,KAEvDsD,EAAU6C,MAAM,MAClB7C,EAAUjG,MAAM,qCAElBwE,EAAQjD,KAAO,aACf4B,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,sBAClBmD,EAAOiE,OACLnB,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,gCAC5CmD,EAAOpD,MACL8K,EAAY5E,IAAcA,EAAUjG,MAAM,uBAC5CmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,oCAClB,MAAMsH,EAAM,IAAI0J,SAAS,CAAE/Q,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAErD,OADAoE,EAAaD,GAAK9C,QAAUA,EACrB8C,EAGT,WACE,MAAO,QAET,WACE,OAAO,EAAS1D,KAAKT,OAAOf,KAAKrC,OAEnC,YACE,OAAOgL,EAAWnH,KAAKT,OAAOpD,OAIhCoG,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,MACpByD,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,KAAMpB,OAAAA,IAC7C4D,EAAE9C,MAAMM,KAAKT,OAAOiE,QACpBhB,EAAE9C,MAAMM,KAAKT,OAAOpD,OACpBqG,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAAA,KC/Db,MAAMyO,qBAAqBxN,KAIhC,aAAawC,GACX,MAAMiF,EAAiBjF,EAAU/F,SAC3BoH,EAAMC,EACV,IAAI0J,aAAa,CAAEhR,OAAQgG,EAAUhG,OAAQkD,OAAQ,OAEjD,OAAEA,GAAWmE,EAUnB,GATAnE,EAAOsJ,SAAWxG,EAAUoB,QAAQ,YAC/BlE,EAAOsJ,WACVtJ,EAAO+N,MAAQjL,EAAUoB,QAAQ,UAEnClE,EAAOR,KAAOQ,EAAOsJ,SACjBxG,EAAUoB,QAAQ,UAAW,WAC7BlE,EAAO+N,MACPjL,EAAUoB,QAAQ,YAClBpB,EAAUoB,QAAQ,WAAY,UAAW,YACxClE,EAAOR,KAEV,YADAsD,EAAU4C,UAAUqC,GAItB,MAAM,KAAE3J,GAAS+F,EACX6J,EAA8B,YAAT5P,EACrB6P,EAAoBD,GAA+B,aAAT5P,EAC1C8P,EAAkB/J,EAAI4J,OAAkB,aAAT3P,EAErC4B,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,mCAAmCuB,iBACrD,MAAMqL,EACJvD,EAA8BpD,IAC9BA,EAAUjG,MAAM,8BAA8BuB,iBAiChD,OAhCA+F,EAAI9C,QAAU,CAACoI,GACftF,EAAIiB,UAAY,GAEZ6I,IACFxE,EAAMzJ,OAAOoD,UAAYN,EAAUoB,QAAQ,KACvCuF,EAAMzJ,OAAOoD,UACfe,EAAI9C,QAAQoE,KAAKS,EAA8BpD,IACtCkL,GACTlL,EAAUjG,MAAM,mCAAmCuB,kBAIvD4B,EAAO0E,MACL5B,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,sCAAsCuB,iBAEpD0E,EAAU6C,MAAM,OACduI,GACFlO,EAAOmO,SAAWrL,EAAUoB,QAAQ,KACpCC,EAAIiB,UAAUK,QAAQhB,EAAc3B,IACpC9C,EAAOoO,UACLtL,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,8CAElBiG,EAAUjG,MAAM,oDAIpBmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,2BAA2BuB,iBAEtC+F,EAAI1D,KAGb,WACE,OAAOA,KAAKT,OAAOR,KAAK5C,MAE1B,eACE,QAAS6D,KAAKT,OAAOsJ,SAEvB,YACE,QAAS7I,KAAKT,OAAO+N,MAGvB,UAAUzM,GACR,IAAK,MAAMlD,KAAQqC,KAAKY,cACfjD,EAAKiH,SAAS/D,GAEvB,IAAK,MAAM2H,KAAYxI,KAAK2E,gBACnB6D,EAAS5D,SAAS/D,GAK7B0B,MAAMC,GACJ,OAAOA,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOsJ,UACpBrG,EAAE9C,MAAMM,KAAKT,OAAO+N,OACpB9K,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGwD,SAC/BzD,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAKY,QAAQtD,KAAKC,GAAMA,EAAEgF,MAAMC,MAC1CA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAOmO,UACpBlL,EAAEC,GAAGC,KAAK1C,KAAK2E,UAAUrH,KAAKoH,GAAQA,EAAInC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAOoO,WACpBnL,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAQoB,KAAKpB,UC7G1B,MAAMgP,oBAAoB/N,KAI/B,aAAawC,GACX,MAAMtD,EAAOsD,EAAUoB,QAAQ,eAC/B,IAAK1E,EACH,OAGF,MAAMQ,EAAS,CAAER,KAAAA,GACjBQ,EAAOuE,KACLzB,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,mCAClB,MAAM4Q,EAAOhJ,EAAc3B,GAC3B9C,EAAO0E,MACL5B,EAAUoB,QAAQ,MAAQpB,EAAUjG,MAAM,4BAC5CmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,kCAClB,MAAMsH,EAAM,IAAIkK,YAAY,CAAEvR,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAExD,OADAoE,EAAaD,GAAKiB,UAAYqI,EACvBtJ,EAGT,WACE,MAAO,cAGT,UAAU7C,GACR,IAAK,MAAM2H,KAAYxI,KAAK2E,gBACnB6D,EAAS5D,SAAS/D,GAK7B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOR,KAAMyD,EAAEC,GAAGiG,SAAU,CAAET,KAAMjI,KAAMpB,OAAAA,IACvD4D,EAAE9C,MAAMM,KAAKT,OAAOuE,MACpBtB,EAAEC,GAAGC,KAAK1C,KAAK2E,UAAUrH,KAAKoH,GAAQA,EAAInC,MAAMC,MAChDA,EAAE9C,MAAMM,KAAKT,OAAO0E,OACpBzB,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAAA,KC3BpB,SAASiP,EAAcxL,GACrB,MAAM+F,EAAU/F,EAAUoB,QAAQ,UAClC,IAAK2E,EAAS,OAKd,OAHEO,UAAUnE,MAAMnC,EAAW,CAAE+F,QAAAA,KAC7BD,UAAU3D,MAAMnC,EAAW,CAAE+F,QAAAA,KAC7B/F,EAAUjG,MAAM,4BAIb,MAAM0R,kBAAkBpB,UAI7B,aAAarK,EAAWtD,GAAM,QAAEN,EAAU,MAAS,IACjD,MAAMc,EAAS,CAAEd,QAAAA,EAASM,KAAAA,GAC1B,OAAO2N,UAAUlI,MACfnC,EACA,IAAIyL,UAAU,CAAEzR,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC1C,CACEqN,aAAcnO,EACdoO,eAAgB,CACd,CAACO,SAAS5I,OACV,CAACoJ,YAAYpJ,OACb,CAACqJ,GACD,CAACtE,GACD,CAAC8D,aAAa7I,OACd,CAACmE,UAAUnE,OACX,CAAC2D,UAAU3D,UAMnB,WACE,MAAO,YAGT,UAAU3D,GAER,SADOb,KAAKqG,SAASzB,SAAS/D,IAE3Bb,KAAKvB,SACNuB,KAAKqG,SAAS0H,OAAOtJ,GAA6B,YAAjBA,EAAQjG,OACzC,CACA,MAAMhC,EAAU,oTAKViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,kBACAxD,EACA,CACEG,QAASiN,EAAwB5J,QAIvC,MAAMgO,EAAkBhO,KAAKqG,SAASI,QACnChC,GAA6B,gBAAjBA,EAAQjG,OAEvB,IAAK,MAAMsB,KAAekO,EAAiB,CACzC,MAAMxR,EAAU,oRAIViD,EACJK,EAAYP,OAAOf,KACnBwB,KACA,qBACAxD,EACA,CACEG,QAASsR,EAAmBjO,KAAMF,KAMxC,GADiBE,KAAKqG,SAASxE,MAAM4C,GAA6B,WAAjBA,EAAQjG,OAC3C,CACZ,MAAM0P,EAAmBlO,KAAKqG,SAASI,QACpChC,GAA6B,0BAAjBA,EAAQjG,OAEvB,IAAK,MAAM2P,KAASD,EAAkB,CACpC,MAAM1R,EAAU,uEACViD,EACJ0O,EAAM5O,OAAOf,KACbwB,KACA,0BACAxD,GAIJ,MAAM4R,EAAepO,KAAK4B,QAAQ6E,QAC/BwG,GAA2B,gBAAhBA,EAAOtP,OAErB,IAAK,MAAMwQ,KAASC,EAAc,CAChC,MAAM5R,EAAU,kEACViD,EACJ0O,EAAM5O,OAAOR,KACbiB,KACA,0BACAxD,UAKC2F,MAAMyC,SAAS/D,GACjBb,KAAKvB,gBC7HP,UAA0CoC,EAAMwN,GACrD,MAAMC,EA8CN,SAA6BD,GAC3B,MAAME,EAAMC,EAAcH,GAC1B,MAAO,CACLI,QAAS,IAAIC,IACXH,EAAI9H,QAAQkI,GAAsB,WAAfA,EAAGvG,UAAsB9K,KAAKqR,GAAOA,EAAGnQ,QAE7DoQ,WAAY,IAAIF,IACdH,EAAI9H,QAAQkI,GAAsB,WAAfA,EAAGvG,UAAsB9K,KAAKqR,GAAOA,EAAGnQ,SArDjDqQ,CAAoBR,GAC9BS,EAAWjO,EAAKiO,SAASnT,IAAI0S,EAAE7P,OAAS,GACxCuQ,EAASlO,EAAKmO,SAASrT,IAAI0S,EAAE7P,OAAS,GAC5C,IAAK,MAAMyQ,IAAO,IAAIH,KAAaC,GAAS,CAC1C,MAAMG,EAAYV,EAAcS,GAC1BR,EAAUS,EAAUzI,QAAQoB,GAAoB,WAAdA,EAAEO,UACpCwG,EAAaM,EAAUzI,QAAQoB,GAAoB,WAAdA,EAAEO,gBACtC+G,EAAeV,EAASH,EAAQG,QAASQ,EAAKZ,SAC9Cc,EAAeP,EAAYN,EAAQM,WAAYK,EAAKZ,GAC3DI,EAAQW,SAAST,GAAOL,EAAQG,QAAQY,IAAIV,EAAGnQ,QAC/CoQ,EAAWQ,SAAST,GAAOL,EAAQM,WAAWS,IAAIV,EAAGnQ,QASvD,SAAU2Q,EAAeD,EAAWI,EAAWL,EAAKlQ,GAClD,IAAK,MAAMwQ,KAAYL,EAAW,CAChC,MAAM,KAAE1Q,GAAS+Q,EACjB,GAAI/Q,GAAQ8Q,EAAUlO,IAAI5C,GAAO,CAC/B,MACMhC,EAAU,OADsB,WAArB+S,EAASnH,QAAuB,UAAY,gBAChB5J,uDAA0DO,EAAKP,6CACtGiB,EACJ8P,EAAShQ,OAAOf,KAChByQ,EACA,oBACAzS,KAUR,SAASgS,EAAcH,GACrB,OAAOA,EAAEzM,QAAQ6E,QAAO,EAAG9I,KAAAA,KAAoB,cAATA,KDqF7B6R,CAAgC3O,EAAMb,QAKnD,SAASiO,EAAmBwB,EAAcC,GAExC,OADAD,EAAe9L,EAAa8L,GACrB,KACL,MAAME,EAAcnG,EAClBiG,EAAapJ,SAAS9G,OAAOuE,KAAKtG,QAE9BoS,EAAeH,EAAa7N,QAAQhE,OACtC4L,EAAmBzB,EAAc0H,EAAa7N,QAAQ,IAAIpE,QVqD3D,SAA8BqS,GACnC,MAAMF,EAAcnG,EAAmBqG,GACjCC,EAAWH,EAAYpH,SAAS,MAAQ,KAAO,KACrD,OAAOoH,EAAcG,EUvDfC,CAAqBJ,GACnBK,EAAgBpC,YAAYpJ,MAChC,IAAI0D,UAAU,KAAK0H,oBAErBI,EAAc3J,SAAW,IAAItB,mBAAmB,CAC9C1I,OAAQoT,EAAapT,OACrBkD,OAAQ,KAEVoE,EAAaqM,GAAerL,UAAY+K,EAAmB/K,UAE3D,MAAMsL,EV6FH,SAAuBC,EAAOC,GACnC,MAAMvQ,EAAQsQ,EAAMnT,QAAQqT,UAAUC,UAAUF,GAChD,OAAe,IAAXvQ,EACKA,EAEFsQ,EAAMtS,OAASgC,EAAQ,EUlGN0Q,CACpBb,EAAa7N,SACZuL,GAAiB,gBAAXA,EAAExP,OAEX8R,EAAa7N,QAAQ2O,OAAON,EAAgB,EAAG,EAAGD,GAElD,MAAM,MAAE/L,GAAUwL,EAAalQ,OAC1B0E,EAAMzG,OAAO+K,SAAS,QACzBtE,EAAMzG,QAAU,KAAKmS,KAGvB,MAAM,SAAEtJ,GAAaoJ,EACf7P,EAAQyG,EAASuB,QAAQ8H,GACzBc,EAAUnK,EAASkK,OAAO3Q,EAAO,GAClCyG,EAASzI,OAEHyI,EAASzI,SAAWgC,EAC7ByG,EAASzG,EAAQ,GAAGL,OAAOoD,eAAYvC,EAC7BiG,EAASzG,GAAOL,OAAOf,KAAKhB,OAAOiT,SAC7CpK,EAASzG,GAAOL,OAAOf,KAAKhB,OAASgT,EAAQ,GAAGjR,OAAOf,KAAKhB,QAJ5D6I,EAAS9G,OAAOuE,KAAOuC,EAAS9G,OAAO0E,WAAQ7D,GEpK9C,MAAMsQ,cAAchE,UASzB,aAAarK,EAAWtD,GAAM,QAAEN,GAAY,IAC1C,MAAMc,EAAS,CAAEd,QAAAA,EAASM,KAAAA,GAE1B,GADAQ,EAAOgN,MAAQlK,EAAUoB,QAAQ,SAC5BlE,EAAOgN,MAGZ,OAAOG,UAAUlI,MACfnC,EACA,IAAIqO,MAAM,CAAErU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IACtC,CACEsN,eAAgB,CACd,CAACO,SAAS5I,OACV,CAAC+E,GACD,CAACZ,UAAUnE,MAAO,CAAEoE,WAAW,IAC/B,CAACT,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,mBC3BJ,MAAMsI,cAAc9Q,KAIzB,aAAawC,GAEX,MAAM9C,EAAS,GACTmE,EAAMC,EAAa,IAAIgN,MAAM,CAAEtU,OAAQgG,EAAUhG,OAAQkD,OAAAA,KAe/D,OAdAmE,EAAI2C,SAAWtB,mBAAmBP,MAAMnC,GACxC9C,EAAOwC,SAAWM,EAAUoB,QAAQ,YACpCC,EAAI9C,QACF6E,EAA8BpD,EAAW,oBACzCA,EAAUjG,MAAM,kCAClBmD,EAAOf,KACL6D,EAAUC,YAAY,eACtBD,EAAUjG,MAAM,kCAClBsH,EAAIgE,QAAUV,QAAQxC,MAAMnC,GACxB9C,EAAOwC,UAAY2B,EAAIgE,SACzBrF,EAAUjG,MAAM,2CAClBmD,EAAO+I,YACLjG,EAAUoB,QAAQ,MAClBpB,EAAUjG,MAAM,gDACXsH,EAAI1D,KAGb,WACE,MAAO,QAET,WACE,OAAO,EAASA,KAAKT,OAAOf,KAAKrC,OAEnC,eACE,QAAS6D,KAAKT,OAAOwC,SAGvB,UAAUlB,SACDb,KAAKY,QAAQgE,SAAS/D,GAI/B0B,MAAMC,GACJ,MAAM,OAAE5D,GAAWoB,KACnB,OAAOwC,EAAEC,GAAGpH,WACVmH,EAAEC,GAAGC,KAAK,CACR1C,KAAKqG,SAAS9D,MAAMC,GACpBA,EAAE9C,MAAMM,KAAKT,OAAOwC,UACpBS,EAAEC,GAAG9E,KAAKqC,KAAKY,QAAQ2B,MAAMC,IAC7BA,EAAEwF,WAAWhI,KAAKT,OAAOf,KAAM,CAAEyJ,KAAMjI,KAAMpB,OAAAA,IAC7CoB,KAAK0H,QAAU1H,KAAK0H,QAAQnF,MAAMC,GAAK,GACvCA,EAAE9C,MAAMM,KAAKT,OAAO+I,eAEtB,CAAEL,KAAMjI,KAAMpB,OAAAA,KCzDb,MAAMgS,mBAAmBlE,UAM9B,aAAarK,GAAW,QAAE5D,GAAY,IACpC,MAAMc,EAAS,CAAEd,QAAAA,GAEjB,GADAc,EAAOR,KAAOsD,EAAUoB,QAAQ,cAC3BlE,EAAOR,KAGZ,OAAO2N,UAAUlI,MACfnC,EACA,IAAIuO,WAAW,CAAEvU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC3C,CACEqN,aAAcnO,EACdoO,eAAgB,CAAC,CAAC8D,MAAMnM,UAK9B,WACE,MAAO,cCnBJ,MAAMqM,kBAAkBnE,UAM7B,aAAarK,GAAW,QAAE5D,GAAY,IACpC,MAAMc,EAAS,CAAEd,QAAAA,GAEjB,GADAc,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAGZ,OAAO2N,UAAUlI,MACfnC,EACA,IAAIwO,UAAU,CAAExU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAC1C,CACEsN,eAAgB,CACd,CAAClE,UAAUnE,MAAO,CAAEoE,WAAW,EAAMC,UAAU,IAC/C,CAACuE,SAAS5I,OACV,CAAC2D,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,YAGT,UAAUxH,GACR,IACGb,KAAKvB,SACNuB,KAAKqG,SAAS0H,OAAOtJ,GAA6B,YAAjBA,EAAQjG,OACzC,CACA,MAAMhC,EAAU,gTAKViD,EACJO,KAAKT,OAAOf,KACZwB,KACA,kBACAxD,EACA,CACEG,QAASiN,EAAwB5J,cAIhCmC,MAAMyC,SAAS/D,ICpDnB,MAAMiQ,0BAA0BpE,UAIrC,aAAarK,EAAW6K,GAAU,QAAEzO,EAAU,MAAS,IACrD,MAAMc,EAAS,CAAE2N,SAAAA,GAEjB,GADA3N,EAAOR,KAAOsD,EAAUoB,QAAQ,aAC3BlE,EAAOR,KAGZ,OAAO2N,UAAUlI,MACfnC,EACA,IAAIyO,kBAAkB,CAAEzU,OAAQgG,EAAUhG,OAAQkD,OAAAA,IAClD,CACEqN,aAAcnO,EACdoO,eAAgB,CACd,CAACO,SAAS5I,OACV,CAAC2D,UAAU3D,MAAO,CAAE6D,SAAS,OAMrC,WACE,MAAO,sBCRX,SAAS0I,EAAc1O,EAAW1C,GAChC,MAAMtD,EAASgG,EAAUhG,OAEzB,SAASD,EAAMqN,GACbpH,EAAUjG,MAAMqN,GAGlB,SAAShG,KAAWwI,GAClB,OAAO5J,EAAUoB,WAAWwI,GAY9B,SAAS+E,EAAWC,GAClB,MAAMlS,EAAO0E,EAAQ,aACrB,IAAK1E,EAAM,OAKX,OAHE2R,MAAMlM,MAAMnC,EAAWtD,EAAMkS,IAC7BnD,UAAUtJ,MAAMnC,EAAWtD,EAAMkS,IACjC7U,EAAM,gCAeV,SAASf,IACP,GAAIsE,EAAQuR,YACV,IAAK,MAAMC,KAAcxR,EAAQuR,YAAa,CAC5C,MAAMjS,EAASkS,EAAW9O,GAC1B,GAAIpD,EACF,OAAOA,EAKb,OAxCF,WACE,MAAMiO,EAAWzJ,EAAQ,YACzB,GAAKyJ,EACL,OAAI7K,EAAU6C,MAAM,aACX4L,kBAAkBtM,MAAMnC,EAAW6K,GAErCT,iBAAiBjI,MAAMnC,EAAW6K,GAmCvCA,IACA8D,KAvBJ,WACE,MAAMvS,EAAUgF,EAAQ,WACxB,GAAKhF,EACL,OACEmS,WAAWpM,MAAMnC,EAAW,CAAE5D,QAAAA,KAC9BuS,EAAW,CAAEvS,QAAAA,KACboS,UAAUrM,MAAMnC,EAAW,CAAE5D,QAAAA,KAC7BrC,EAAM,qCAiBNqC,IACAmS,WAAWpM,MAAMnC,IACjB+J,KAAK5H,MAAMnC,IACXmK,QAAQhI,MAAMnC,IACdiK,SAAS9H,MAAMnC,IACfwO,UAAUrM,MAAMnC,GAuBpB,MAAM+O,EAnBN,WACE,IAAK/U,EAAOuB,OAAQ,MAAO,GAC3B,MAAMiD,EAAO,GACb,OAAa,CACX,MAAMsE,EAAKJ,mBAAmBP,MAAMnC,GAC9BrB,EAAM3F,IACZ,IAAK2F,EAAK,CACJmE,EAAGvH,QAAQxB,EAAM,6BACrB,MAEFuH,EAAa3C,GAAKqF,SAAWlB,EAC7BtE,EAAKmE,KAAKhE,GAEZ,MAAMqQ,EAAMzO,IAAI4B,MAAMnC,GAItB,OAHI1C,EAAQ2R,UACVzQ,EAAKmE,KAAKqM,GAELxQ,EAEG0Q,GAEZ,OADIlP,EAAU/F,SAAWD,EAAOuB,QAAQxB,EAAM,uBACvCgV,EAWF,SAAS5M,EAAMiF,EAAK9J,EAAU,IACnC,MAAM0C,EAAY,IAAI6F,UAAUuB,GAKhC,YAJkC,IAAvB9J,EAAQN,aAEjBgD,EAAUhG,OAAOmC,KAAOmB,EAAQN,YAE3B0R,EAAc1O,EAAW1C,GCzHlC,SAAS6R,EAAK9M,GACZ,OAAOA,EAGT,MAAM+M,EAAY,CAChB/O,KAAOuG,GAAUA,EAAMxL,KAAK,IAC5BD,OAAQgU,EACRhT,KAAMgT,EACNlQ,UAAWkQ,EACX7T,KAAM6T,EACNvL,QAASuL,EACT9I,SAAU8I,EACVnR,YAAamR,EACbnW,WAAYmW,EACZ3M,kBAAmB2M,EACnB1M,2BAA4B0M,GAGvB,MAAME,OACX5R,YAAY2C,GACVzC,KAAKyC,GAAKjH,OAAOgI,OAAO,GAAIiO,EAAWhP,GAUzCnB,UAAUqQ,GAAK,UAAE7K,EAAS,QAAEvI,IAI1B,OAHKuI,IACHA,EAAY6K,EAAI7I,WAAW,KAAO6I,EAAI5U,MAAM,GAAK4U,GAE5C3R,KAAKyC,GAAGnB,UAAUqQ,EAAK7K,EAAWvI,GAS3CmB,MAAMnC,EAAGqU,EAAUJ,KAASxE,GAC1B,IAAKzP,EACH,MAAO,GAET,MAAMpB,EAAQyV,EAAQrU,EAAEpB,SAAU6Q,GAClC,OAAOhN,KAAKyC,GAAGC,KAAK,CAAC1C,KAAKyC,GAAGjF,OAAOD,EAAEC,QAASrB,IAGjDgI,gBAAgB5G,EAAGgB,GACjB,OAAOyB,KAAKN,MAAMnC,EAAGyC,KAAKsB,UAAUuQ,KAAK7R,MAAO,CAAEzB,QAAAA,IAGpDyJ,WAAWzK,EAAGmH,GACZ,OAAO1E,KAAKN,MAAMnC,EAAGyC,KAAKyC,GAAGjE,KAAMkG,GAGrCL,WAAWyN,EAAIvT,GACb,OAAOyB,KAAKyC,GAAGC,KAAK,CAClB1C,KAAKmE,gBAAgB2N,EAAGvS,OAAOpD,MAAOoC,GACtCyB,KAAKN,MAAMoS,EAAGvS,OAAOoD,cAKpB,SAASJ,EAAMwP,GAAON,UAAWhP,EAAKgP,GAAc,IACzDhP,EAAKjH,OAAOgI,OAAO,GAAIiO,EAAWhP,GAElC,MAAMD,EAAI,IAAIkP,OAAOjP,GAErB,OAAOA,EAAGC,KAAKqP,EAAIzU,KAAK0U,GAAOA,EAAGzP,MAAMC,MCtE1C,SAASyP,EAAYC,EAAKjR,GACxB,MAAM3D,EAAM,IAAI6F,IACVoF,EAAW2J,EAAIzL,QAAQzF,GAAqB,aAAbA,EAAIrD,OACzC,IAAK,MAAMwU,KAAW5J,EAAU,CAC9B,MAAMgE,EAAQtL,EAAOtF,IAAIwW,EAAQ5J,UACjC,IAAKgE,EACH,SAEF,MAAM2D,EAAQ5S,EAAI3B,IAAIwW,EAAQnT,QAC1BkR,EACFA,EAAMlL,KAAKuH,GAEXjP,EAAI+D,IAAI8Q,EAAQnT,OAAQ,CAACuN,IAG7B,OAAOjP,EAoDT,SAAU8U,EAAiBL,GACzB,MAAMlR,EA/CR,SAA0BqR,GACxB,MAAMjR,EAAS,IAAIkC,IACbkP,EAAa,IAAI3D,IACjBI,EAAW,IAAI3L,IACrB,IAAK,MAAMnC,KAAOkR,EAChB,GAAIlR,EAAIvC,QAAR,CACE,MAAMyR,EAAQpB,EAASnT,IAAIqF,EAAIxC,MAC3B0R,EACFA,EAAMlL,KAAKhE,GAEX8N,EAASzN,IAAIL,EAAIxC,KAAM,CAACwC,SAIvBA,EAAIxC,OAGJyC,EAAOG,IAAIJ,EAAIxC,MAGlB6T,EAAWhD,IAAIrO,GAFfC,EAAOI,IAAIL,EAAIxC,KAAMwC,IAKzB,MAAO,CACLkR,IAAAA,EACAjR,OAAAA,EACA6N,SAAAA,EACAuD,WAAAA,EACArD,SAAUiD,EAAYC,EAAKjR,GAC3BE,MAAO,CACLD,0BAA2B,IAAIoR,QAC/B5Q,gCAAiC,IAAI4Q,UAgB5BC,CAAiBR,GAC9B,IAAK,MAAM/Q,KAAOH,EAAKqR,IACjBlR,EAAI4D,iBACC5D,EAAI4D,SAAS/D,UAd1B,WAA+B,OAAEI,EAAM,WAAEoR,IACvC,IAAK,MAAMG,KAAOH,EAAY,CAC5B,MAAM,KAAE7T,GAASgU,EACXhW,EAAU,aAAagC,eAC3ByC,EAAOtF,IAAI6C,GAAMb,+BAEb,EAAM6U,EAAIjT,OAAOf,KAAMgU,EAAK,eAAgBhW,IAW7CiW,CAAqB5R,GAevB,SAAS+D,EAASmN,GACvB,MAAO,IAAIK,GAZIlC,EAYqB6B,EAXhC7B,EAAMwC,KACDxC,EAAMwC,OAER,GAAG5H,UAAUoF,MAJtB,IAAiBA,E","sources":["webpack://WebIDL2/webpack/universalModuleDefinition","webpack://WebIDL2/webpack/bootstrap","webpack://WebIDL2/webpack/runtime/define property getters","webpack://WebIDL2/webpack/runtime/hasOwnProperty shorthand","webpack://WebIDL2/webpack/runtime/make namespace object","webpack://WebIDL2/./lib/error.js","webpack://WebIDL2/./lib/productions/base.js","webpack://WebIDL2/./lib/validators/helpers.js","webpack://WebIDL2/./lib/productions/array-base.js","webpack://WebIDL2/./lib/productions/token.js","webpack://WebIDL2/./lib/productions/extended-attributes.js","webpack://WebIDL2/./lib/productions/type.js","webpack://WebIDL2/./lib/productions/default.js","webpack://WebIDL2/./lib/productions/argument.js","webpack://WebIDL2/./lib/productions/operation.js","webpack://WebIDL2/./lib/productions/attribute.js","webpack://WebIDL2/./lib/productions/helpers.js","webpack://WebIDL2/./lib/tokeniser.js","webpack://WebIDL2/./lib/productions/enum.js","webpack://WebIDL2/./lib/productions/includes.js","webpack://WebIDL2/./lib/productions/typedef.js","webpack://WebIDL2/./lib/productions/callback.js","webpack://WebIDL2/./lib/productions/container.js","webpack://WebIDL2/./lib/productions/constant.js","webpack://WebIDL2/./lib/productions/iterable.js","webpack://WebIDL2/./lib/productions/constructor.js","webpack://WebIDL2/./lib/productions/interface.js","webpack://WebIDL2/./lib/validators/interface.js","webpack://WebIDL2/./lib/productions/mixin.js","webpack://WebIDL2/./lib/productions/field.js","webpack://WebIDL2/./lib/productions/dictionary.js","webpack://WebIDL2/./lib/productions/namespace.js","webpack://WebIDL2/./lib/productions/callback-interface.js","webpack://WebIDL2/./lib/webidl2.js","webpack://WebIDL2/./lib/writer.js","webpack://WebIDL2/./lib/validator.js"],"sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"WebIDL2\"] = factory();\n\telse\n\t\troot[\"WebIDL2\"] = factory();\n})(globalThis, () => {\nreturn ","// The require scope\nvar __webpack_require__ = {};\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","/**\n * @param {string} text\n */\nfunction lastLine(text) {\n const splitted = text.split(\"\\n\");\n return splitted[splitted.length - 1];\n}\n\nfunction appendIfExist(base, target) {\n let result = base;\n if (target) {\n result += ` ${target}`;\n }\n return result;\n}\n\nfunction contextAsText(node) {\n const hierarchy = [node];\n while (node && node.parent) {\n const { parent } = node;\n hierarchy.unshift(parent);\n node = parent;\n }\n return hierarchy.map((n) => appendIfExist(n.type, n.name)).join(\" -> \");\n}\n\n/**\n * @typedef {object} WebIDL2ErrorOptions\n * @property {\"error\" | \"warning\"} [level]\n * @property {Function} [autofix]\n * @property {string} [ruleName]\n *\n * @typedef {ReturnType<typeof error>} WebIDLErrorData\n *\n * @param {string} message error message\n * @param {*} position\n * @param {*} current\n * @param {*} message\n * @param {\"Syntax\" | \"Validation\"} kind error type\n * @param {WebIDL2ErrorOptions=} options\n */\nfunction error(\n source,\n position,\n current,\n message,\n kind,\n { level = \"error\", autofix, ruleName } = {}\n) {\n /**\n * @param {number} count\n */\n function sliceTokens(count) {\n return count > 0\n ? source.slice(position, position + count)\n : source.slice(Math.max(position + count, 0), position);\n }\n\n /**\n * @param {import(\"./tokeniser.js\").Token[]} inputs\n * @param {object} [options]\n * @param {boolean} [options.precedes]\n * @returns\n */\n function tokensToText(inputs, { precedes } = {}) {\n const text = inputs.map((t) => t.trivia + t.value).join(\"\");\n const nextToken = source[position];\n if (nextToken.type === \"eof\") {\n return text;\n }\n if (precedes) {\n return text + nextToken.trivia;\n }\n return text.slice(nextToken.trivia.length);\n }\n\n const maxTokens = 5; // arbitrary but works well enough\n const line =\n source[position].type !== \"eof\"\n ? source[position].line\n : source.length > 1\n ? source[position - 1].line\n : 1;\n\n const precedingLastLine = lastLine(\n tokensToText(sliceTokens(-maxTokens), { precedes: true })\n );\n\n const subsequentTokens = sliceTokens(maxTokens);\n const subsequentText = tokensToText(subsequentTokens);\n const subsequentFirstLine = subsequentText.split(\"\\n\")[0];\n\n const spaced = \" \".repeat(precedingLastLine.length) + \"^\";\n const sourceContext = precedingLastLine + subsequentFirstLine + \"\\n\" + spaced;\n\n const contextType = kind === \"Syntax\" ? \"since\" : \"inside\";\n const inSourceName = source.name ? ` in ${source.name}` : \"\";\n const grammaticalContext =\n current && current.name\n ? `, ${contextType} \\`${current.partial ? \"partial \" : \"\"}${contextAsText(\n current\n )}\\``\n : \"\";\n const context = `${kind} error at line ${line}${inSourceName}${grammaticalContext}:\\n${sourceContext}`;\n return {\n message: `${context} ${message}`,\n bareMessage: message,\n context,\n line,\n sourceName: source.name,\n level,\n ruleName,\n autofix,\n input: subsequentText,\n tokens: subsequentTokens,\n };\n}\n\n/**\n * @param {string} message error message\n */\nexport function syntaxError(source, position, current, message) {\n return error(source, position, current, message, \"Syntax\");\n}\n\n/**\n * @param {string} message error message\n * @param {WebIDL2ErrorOptions} [options]\n */\nexport function validationError(\n token,\n current,\n ruleName,\n message,\n options = {}\n) {\n options.ruleName = ruleName;\n return error(\n current.source,\n token.index,\n current,\n message,\n \"Validation\",\n options\n );\n}\n","export class Base {\n /**\n * @param {object} initializer\n * @param {Base[\"source\"]} initializer.source\n * @param {Base[\"tokens\"]} initializer.tokens\n */\n constructor({ source, tokens }) {\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens, writable: true },\n parent: { value: null, writable: true },\n this: { value: this }, // useful when escaping from proxy\n });\n }\n\n toJSON() {\n const json = { type: undefined, name: undefined, inheritance: undefined };\n let proto = this;\n while (proto !== Object.prototype) {\n const descMap = Object.getOwnPropertyDescriptors(proto);\n for (const [key, value] of Object.entries(descMap)) {\n if (value.enumerable || value.get) {\n // @ts-ignore - allow indexing here\n json[key] = this[key];\n }\n }\n proto = Object.getPrototypeOf(proto);\n }\n return json;\n }\n}\n","/**\n * @typedef {import(\"../productions/dictionary.js\").Dictionary} Dictionary\n *\n * @param {*} idlType\n * @param {import(\"../validator.js\").Definitions} defs\n * @param {object} [options]\n * @param {boolean} [options.useNullableInner] use when the input idlType is nullable and you want to use its inner type\n * @return {{ reference: *, dictionary: Dictionary }} the type reference that ultimately includes dictionary.\n */\nexport function idlTypeIncludesDictionary(\n idlType,\n defs,\n { useNullableInner } = {}\n) {\n if (!idlType.union) {\n const def = defs.unique.get(idlType.idlType);\n if (!def) {\n return;\n }\n if (def.type === \"typedef\") {\n const { typedefIncludesDictionary } = defs.cache;\n if (typedefIncludesDictionary.has(def)) {\n // Note that this also halts when it met indeterminate state\n // to prevent infinite recursion\n return typedefIncludesDictionary.get(def);\n }\n defs.cache.typedefIncludesDictionary.set(def, undefined); // indeterminate state\n const result = idlTypeIncludesDictionary(def.idlType, defs);\n defs.cache.typedefIncludesDictionary.set(def, result);\n if (result) {\n return {\n reference: idlType,\n dictionary: result.dictionary,\n };\n }\n }\n if (def.type === \"dictionary\" && (useNullableInner || !idlType.nullable)) {\n return {\n reference: idlType,\n dictionary: def,\n };\n }\n }\n for (const subtype of idlType.subtype) {\n const result = idlTypeIncludesDictionary(subtype, defs);\n if (result) {\n if (subtype.union) {\n return result;\n }\n return {\n reference: subtype,\n dictionary: result.dictionary,\n };\n }\n }\n}\n\n/**\n * @param {*} dict dictionary type\n * @param {import(\"../validator.js\").Definitions} defs\n * @return {boolean}\n */\nexport function dictionaryIncludesRequiredField(dict, defs) {\n if (defs.cache.dictionaryIncludesRequiredField.has(dict)) {\n return defs.cache.dictionaryIncludesRequiredField.get(dict);\n }\n // Set cached result to indeterminate to short-circuit circular definitions.\n // The final result will be updated to true or false.\n defs.cache.dictionaryIncludesRequiredField.set(dict, undefined);\n let result = dict.members.some((field) => field.required);\n if (!result && dict.inheritance) {\n const superdict = defs.unique.get(dict.inheritance);\n if (!superdict) {\n // Assume required members in the supertype if it is unknown.\n result = true;\n } else if (dictionaryIncludesRequiredField(superdict, defs)) {\n result = true;\n }\n }\n defs.cache.dictionaryIncludesRequiredField.set(dict, result);\n return result;\n}\n","export class ArrayBase extends Array {\n constructor({ source, tokens }) {\n super();\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens },\n parent: { value: null, writable: true },\n });\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class WrappedToken extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} type\n */\n static parser(tokeniser, type) {\n return () => {\n const value = tokeniser.consumeKind(type);\n if (value) {\n return new WrappedToken({\n source: tokeniser.source,\n tokens: { value },\n });\n }\n };\n }\n\n get value() {\n return unescape(this.tokens.value.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.wrap([\n w.token(this.tokens.value),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\nexport class Eof extends WrappedToken {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consumeKind(\"eof\");\n if (value) {\n return new Eof({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"eof\";\n }\n}\n","import { Base } from \"./base.js\";\nimport { ArrayBase } from \"./array-base.js\";\nimport { WrappedToken } from \"./token.js\";\nimport { list, argument_list, autoParenter, unescape } from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} tokenName\n */\nfunction tokens(tokeniser, tokenName) {\n return list(tokeniser, {\n parser: WrappedToken.parser(tokeniser, tokenName),\n listName: tokenName + \" list\",\n });\n}\n\nconst extAttrValueSyntax = [\"identifier\", \"decimal\", \"integer\", \"string\"];\n\nconst shouldBeLegacyPrefixed = [\n \"NoInterfaceObject\",\n \"LenientSetter\",\n \"LenientThis\",\n \"TreatNonObjectAsNull\",\n \"Unforgeable\",\n];\n\nconst renamedLegacies = new Map([\n .../** @type {[string, string][]} */ (\n shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`])\n ),\n [\"NamedConstructor\", \"LegacyFactoryFunction\"],\n [\"OverrideBuiltins\", \"LegacyOverrideBuiltIns\"],\n [\"TreatNullAs\", \"LegacyNullToEmptyString\"],\n]);\n\n/**\n * This will allow a set of extended attribute values to be parsed.\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction extAttrListItems(tokeniser) {\n for (const syntax of extAttrValueSyntax) {\n const toks = tokens(tokeniser, syntax);\n if (toks.length) {\n return toks;\n }\n }\n tokeniser.error(\n `Expected identifiers, strings, decimals, or integers but none found`\n );\n}\n\nexport class ExtendedAttributeParameters extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = { assign: tokeniser.consume(\"=\") };\n const ret = autoParenter(\n new ExtendedAttributeParameters({ source: tokeniser.source, tokens })\n );\n ret.list = [];\n if (tokens.assign) {\n tokens.asterisk = tokeniser.consume(\"*\");\n if (tokens.asterisk) {\n return ret.this;\n }\n tokens.secondaryName = tokeniser.consumeKind(...extAttrValueSyntax);\n }\n tokens.open = tokeniser.consume(\"(\");\n if (tokens.open) {\n ret.list = ret.rhsIsList\n ? // [Exposed=(Window,Worker)]\n extAttrListItems(tokeniser)\n : // [LegacyFactoryFunction=Audio(DOMString src)] or [Constructor(DOMString str)]\n argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") ||\n tokeniser.error(\"Unexpected token in extended attribute argument list\");\n } else if (tokens.assign && !tokens.secondaryName) {\n tokeniser.error(\"No right hand side to extended attribute assignment\");\n }\n return ret.this;\n }\n\n get rhsIsList() {\n return (\n this.tokens.assign && !this.tokens.asterisk && !this.tokens.secondaryName\n );\n }\n\n get rhsType() {\n if (this.rhsIsList) {\n return this.list[0].tokens.value.type + \"-list\";\n }\n if (this.tokens.asterisk) {\n return \"*\";\n }\n if (this.tokens.secondaryName) {\n return this.tokens.secondaryName.type;\n }\n return null;\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { rhsType } = this;\n return w.ts.wrap([\n w.token(this.tokens.assign),\n w.token(this.tokens.asterisk),\n w.reference_token(this.tokens.secondaryName, this.parent),\n w.token(this.tokens.open),\n ...this.list.map((p) => {\n return rhsType === \"identifier-list\"\n ? w.identifier(p, this.parent)\n : p.write(w);\n }),\n w.token(this.tokens.close),\n ]);\n }\n}\n\nexport class SimpleExtendedAttribute extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const name = tokeniser.consumeKind(\"identifier\");\n if (name) {\n return new SimpleExtendedAttribute({\n source: tokeniser.source,\n tokens: { name },\n params: ExtendedAttributeParameters.parse(tokeniser),\n });\n }\n }\n\n constructor({ source, tokens, params }) {\n super({ source, tokens });\n params.parent = this;\n Object.defineProperty(this, \"params\", { value: params });\n }\n\n get type() {\n return \"extended-attribute\";\n }\n get name() {\n return this.tokens.name.value;\n }\n get rhs() {\n const { rhsType: type, tokens, list } = this.params;\n if (!type) {\n return null;\n }\n const value = this.params.rhsIsList\n ? list\n : this.params.tokens.secondaryName\n ? unescape(tokens.secondaryName.value)\n : null;\n return { type, value };\n }\n get arguments() {\n const { rhsIsList, list } = this.params;\n if (!list || rhsIsList) {\n return [];\n }\n return list;\n }\n\n *validate(defs) {\n const { name } = this;\n if (name === \"LegacyNoInterfaceObject\") {\n const message = `\\`[LegacyNoInterfaceObject]\\` extended attribute is an \\\nundesirable feature that may be removed from Web IDL in the future. Refer to the \\\n[relevant upstream PR](https://github.com/whatwg/webidl/pull/609) for more \\\ninformation.`;\n yield validationError(\n this.tokens.name,\n this,\n \"no-nointerfaceobject\",\n message,\n { level: \"warning\" }\n );\n } else if (renamedLegacies.has(name)) {\n const message = `\\`[${name}]\\` extended attribute is a legacy feature \\\nthat is now renamed to \\`[${renamedLegacies.get(name)}]\\`. Refer to the \\\n[relevant upstream PR](https://github.com/whatwg/webidl/pull/870) for more \\\ninformation.`;\n yield validationError(this.tokens.name, this, \"renamed-legacy\", message, {\n level: \"warning\",\n autofix: renameLegacyExtendedAttribute(this),\n });\n }\n for (const arg of this.arguments) {\n yield* arg.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.wrap([\n w.ts.trivia(this.tokens.name.trivia),\n w.ts.extendedAttribute(\n w.ts.wrap([\n w.ts.extendedAttributeReference(this.name),\n this.params.write(w),\n ])\n ),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {SimpleExtendedAttribute} extAttr\n */\nfunction renameLegacyExtendedAttribute(extAttr) {\n return () => {\n const { name } = extAttr;\n extAttr.tokens.name.value = renamedLegacies.get(name);\n if (name === \"TreatNullAs\") {\n extAttr.params.tokens = {};\n }\n };\n}\n\n// Note: we parse something simpler than the official syntax. It's all that ever\n// seems to be used\nexport class ExtendedAttributes extends ArrayBase {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"[\");\n const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });\n if (!tokens.open) return ret;\n ret.push(\n ...list(tokeniser, {\n parser: SimpleExtendedAttribute.parse,\n listName: \"extended attribute\",\n })\n );\n tokens.close =\n tokeniser.consume(\"]\") ||\n tokeniser.error(\n \"Expected a closing token for the extended attribute list\"\n );\n if (!ret.length) {\n tokeniser.unconsume(tokens.close.index);\n tokeniser.error(\"An extended attribute list must not be empty\");\n }\n if (tokeniser.probe(\"[\")) {\n tokeniser.error(\n \"Illegal double extended attribute lists, consider merging them\"\n );\n }\n return ret;\n }\n\n *validate(defs) {\n for (const extAttr of this) {\n yield* extAttr.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n if (!this.length) return \"\";\n return w.ts.wrap([\n w.token(this.tokens.open),\n ...this.map((ea) => ea.write(w)),\n w.token(this.tokens.close),\n ]);\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n return_type,\n primitive_type,\n autoParenter,\n} from \"./helpers.js\";\nimport { stringTypes, typeNameKeywords } from \"../tokeniser.js\";\nimport { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction generic_type(tokeniser, typeName) {\n const base = tokeniser.consume(\n \"FrozenArray\",\n \"ObservableArray\",\n \"Promise\",\n \"sequence\",\n \"record\"\n );\n if (!base) {\n return;\n }\n const ret = autoParenter(\n new Type({ source: tokeniser.source, tokens: { base } })\n );\n ret.tokens.open =\n tokeniser.consume(\"<\") ||\n tokeniser.error(`No opening bracket after ${base.value}`);\n switch (base.value) {\n case \"Promise\": {\n if (tokeniser.probe(\"[\"))\n tokeniser.error(\"Promise type cannot have extended attribute\");\n const subtype =\n return_type(tokeniser, typeName) ||\n tokeniser.error(\"Missing Promise subtype\");\n ret.subtype.push(subtype);\n break;\n }\n case \"sequence\":\n case \"FrozenArray\":\n case \"ObservableArray\": {\n const subtype =\n type_with_extended_attributes(tokeniser, typeName) ||\n tokeniser.error(`Missing ${base.value} subtype`);\n ret.subtype.push(subtype);\n break;\n }\n case \"record\": {\n if (tokeniser.probe(\"[\"))\n tokeniser.error(\"Record key cannot have extended attribute\");\n const keyType =\n tokeniser.consume(...stringTypes) ||\n tokeniser.error(`Record key must be one of: ${stringTypes.join(\", \")}`);\n const keyIdlType = new Type({\n source: tokeniser.source,\n tokens: { base: keyType },\n });\n keyIdlType.tokens.separator =\n tokeniser.consume(\",\") ||\n tokeniser.error(\"Missing comma after record key type\");\n keyIdlType.type = typeName;\n const valueType =\n type_with_extended_attributes(tokeniser, typeName) ||\n tokeniser.error(\"Error parsing generic type record\");\n ret.subtype.push(keyIdlType, valueType);\n break;\n }\n }\n if (!ret.idlType) tokeniser.error(`Error parsing generic type ${base.value}`);\n ret.tokens.close =\n tokeniser.consume(\">\") ||\n tokeniser.error(`Missing closing bracket after ${base.value}`);\n return ret.this;\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction type_suffix(tokeniser, obj) {\n const nullable = tokeniser.consume(\"?\");\n if (nullable) {\n obj.tokens.nullable = nullable;\n }\n if (tokeniser.probe(\"?\")) tokeniser.error(\"Can't nullable more than once\");\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction single_type(tokeniser, typeName) {\n let ret = generic_type(tokeniser, typeName) || primitive_type(tokeniser);\n if (!ret) {\n const base =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(...stringTypes, ...typeNameKeywords);\n if (!base) {\n return;\n }\n ret = new Type({ source: tokeniser.source, tokens: { base } });\n if (tokeniser.probe(\"<\"))\n tokeniser.error(`Unsupported generic type ${base.value}`);\n }\n if (ret.generic === \"Promise\" && tokeniser.probe(\"?\")) {\n tokeniser.error(\"Promise type cannot be nullable\");\n }\n ret.type = typeName || null;\n type_suffix(tokeniser, ret);\n if (ret.nullable && ret.idlType === \"any\")\n tokeniser.error(\"Type `any` cannot be made nullable\");\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} type\n */\nfunction union_type(tokeniser, type) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"(\");\n if (!tokens.open) return;\n const ret = autoParenter(new Type({ source: tokeniser.source, tokens }));\n ret.type = type || null;\n while (true) {\n const typ =\n type_with_extended_attributes(tokeniser) ||\n tokeniser.error(\"No type after open parenthesis or 'or' in union type\");\n if (typ.idlType === \"any\")\n tokeniser.error(\"Type `any` cannot be included in a union type\");\n if (typ.generic === \"Promise\")\n tokeniser.error(\"Type `Promise` cannot be included in a union type\");\n ret.subtype.push(typ);\n const or = tokeniser.consume(\"or\");\n if (or) {\n typ.tokens.separator = or;\n } else break;\n }\n if (ret.idlType.length < 2) {\n tokeniser.error(\n \"At least two types are expected in a union type but found less\"\n );\n }\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated union type\");\n type_suffix(tokeniser, ret);\n return ret.this;\n}\n\nexport class Type extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string} typeName\n */\n static parse(tokeniser, typeName) {\n return single_type(tokeniser, typeName) || union_type(tokeniser, typeName);\n }\n\n constructor({ source, tokens }) {\n super({ source, tokens });\n Object.defineProperty(this, \"subtype\", { value: [], writable: true });\n this.extAttrs = new ExtendedAttributes({ source, tokens: {} });\n }\n\n get generic() {\n if (this.subtype.length && this.tokens.base) {\n return this.tokens.base.value;\n }\n return \"\";\n }\n get nullable() {\n return Boolean(this.tokens.nullable);\n }\n get union() {\n return Boolean(this.subtype.length) && !this.tokens.base;\n }\n get idlType() {\n if (this.subtype.length) {\n return this.subtype;\n }\n // Adding prefixes/postfixes for \"unrestricted float\", etc.\n const name = [this.tokens.prefix, this.tokens.base, this.tokens.postfix]\n .filter((t) => t)\n .map((t) => t.value)\n .join(\" \");\n return unescape(name);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n\n if (this.idlType === \"void\") {\n const message = `\\`void\\` is now replaced by \\`undefined\\`. Refer to the \\\n[relevant GitHub issue](https://github.com/whatwg/webidl/issues/60) \\\nfor more information.`;\n yield validationError(this.tokens.base, this, \"replace-void\", message, {\n autofix: replaceVoid(this),\n });\n }\n\n /*\n * If a union is nullable, its subunions cannot include a dictionary\n * If not, subunions may include dictionaries if each union is not nullable\n */\n const typedef = !this.union && defs.unique.get(this.idlType);\n const target = this.union\n ? this\n : typedef && typedef.type === \"typedef\"\n ? typedef.idlType\n : undefined;\n if (target && this.nullable) {\n // do not allow any dictionary\n const { reference } = idlTypeIncludesDictionary(target, defs) || {};\n if (reference) {\n const targetToken = (this.union ? reference : this).tokens.base;\n const message = \"Nullable union cannot include a dictionary type.\";\n yield validationError(\n targetToken,\n this,\n \"no-nullable-union-dict\",\n message\n );\n }\n } else {\n // allow some dictionary\n for (const subtype of this.subtype) {\n yield* subtype.validate(defs);\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const type_body = () => {\n if (this.union || this.generic) {\n return w.ts.wrap([\n w.token(this.tokens.base, w.ts.generic),\n w.token(this.tokens.open),\n ...this.subtype.map((t) => t.write(w)),\n w.token(this.tokens.close),\n ]);\n }\n const firstToken = this.tokens.prefix || this.tokens.base;\n const prefix = this.tokens.prefix\n ? [this.tokens.prefix.value, w.ts.trivia(this.tokens.base.trivia)]\n : [];\n const ref = w.reference(\n w.ts.wrap([\n ...prefix,\n this.tokens.base.value,\n w.token(this.tokens.postfix),\n ]),\n {\n unescaped: /** @type {string} (because it's not union) */ (\n this.idlType\n ),\n context: this,\n }\n );\n return w.ts.wrap([w.ts.trivia(firstToken.trivia), ref]);\n };\n return w.ts.wrap([\n this.extAttrs.write(w),\n type_body(),\n w.token(this.tokens.nullable),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {Type} type\n */\nfunction replaceVoid(type) {\n return () => {\n type.tokens.base.value = \"undefined\";\n };\n}\n","import { Base } from \"./base.js\";\nimport { const_data, const_value } from \"./helpers.js\";\n\nexport class Default extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const assign = tokeniser.consume(\"=\");\n if (!assign) {\n return null;\n }\n const def =\n const_value(tokeniser) ||\n tokeniser.consumeKind(\"string\") ||\n tokeniser.consume(\"null\", \"[\", \"{\") ||\n tokeniser.error(\"No value for default\");\n const expression = [def];\n if (def.value === \"[\") {\n const close =\n tokeniser.consume(\"]\") ||\n tokeniser.error(\"Default sequence value must be empty\");\n expression.push(close);\n } else if (def.value === \"{\") {\n const close =\n tokeniser.consume(\"}\") ||\n tokeniser.error(\"Default dictionary value must be empty\");\n expression.push(close);\n }\n return new Default({\n source: tokeniser.source,\n tokens: { assign },\n expression,\n });\n }\n\n constructor({ source, tokens, expression }) {\n super({ source, tokens });\n expression.parent = this;\n Object.defineProperty(this, \"expression\", { value: expression });\n }\n\n get type() {\n return const_data(this.expression[0]).type;\n }\n get value() {\n return const_data(this.expression[0]).value;\n }\n get negative() {\n return const_data(this.expression[0]).negative;\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.wrap([\n w.token(this.tokens.assign),\n ...this.expression.map((t) => w.token(t)),\n ]);\n }\n}\n","import { Base } from \"./base.js\";\nimport { Default } from \"./default.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n autoParenter,\n getFirstToken,\n} from \"./helpers.js\";\nimport { argumentNameKeywords, Tokeniser } from \"../tokeniser.js\";\nimport { validationError } from \"../error.js\";\nimport {\n idlTypeIncludesDictionary,\n dictionaryIncludesRequiredField,\n} from \"../validators/helpers.js\";\n\nexport class Argument extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(\n new Argument({ source: tokeniser.source, tokens })\n );\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.optional = tokeniser.consume(\"optional\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"argument-type\");\n if (!ret.idlType) {\n return tokeniser.unconsume(start_position);\n }\n if (!tokens.optional) {\n tokens.variadic = tokeniser.consume(\"...\");\n }\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(...argumentNameKeywords);\n if (!tokens.name) {\n return tokeniser.unconsume(start_position);\n }\n ret.default = tokens.optional ? Default.parse(tokeniser) : null;\n return ret.this;\n }\n\n get type() {\n return \"argument\";\n }\n get optional() {\n return !!this.tokens.optional;\n }\n get variadic() {\n return !!this.tokens.variadic;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n /**\n * @param {import(\"../validator.js\").Definitions} defs\n */\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n const result = idlTypeIncludesDictionary(this.idlType, defs, {\n useNullableInner: true,\n });\n if (result) {\n if (this.idlType.nullable) {\n const message = `Dictionary arguments cannot be nullable.`;\n yield validationError(\n this.tokens.name,\n this,\n \"no-nullable-dict-arg\",\n message\n );\n } else if (!this.optional) {\n if (\n this.parent &&\n !dictionaryIncludesRequiredField(result.dictionary, defs) &&\n isLastRequiredArgument(this)\n ) {\n const message = `Dictionary argument must be optional if it has no required fields`;\n yield validationError(\n this.tokens.name,\n this,\n \"dict-arg-optional\",\n message,\n {\n autofix: autofixDictionaryArgumentOptionality(this),\n }\n );\n }\n } else if (!this.default) {\n const message = `Optional dictionary arguments must have a default value of \\`{}\\`.`;\n yield validationError(\n this.tokens.name,\n this,\n \"dict-arg-default\",\n message,\n {\n autofix: autofixOptionalDictionaryDefaultValue(this),\n }\n );\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.optional),\n w.ts.type(this.idlType.write(w)),\n w.token(this.tokens.variadic),\n w.name_token(this.tokens.name, { data: this }),\n this.default ? this.default.write(w) : \"\",\n w.token(this.tokens.separator),\n ]);\n }\n}\n\n/**\n * @param {Argument} arg\n */\nfunction isLastRequiredArgument(arg) {\n const list = arg.parent.arguments || arg.parent.list;\n const index = list.indexOf(arg);\n const requiredExists = list.slice(index + 1).some((a) => !a.optional);\n return !requiredExists;\n}\n\n/**\n * @param {Argument} arg\n */\nfunction autofixDictionaryArgumentOptionality(arg) {\n return () => {\n const firstToken = getFirstToken(arg.idlType);\n arg.tokens.optional = {\n ...firstToken,\n type: \"optional\",\n value: \"optional\",\n };\n firstToken.trivia = \" \";\n autofixOptionalDictionaryDefaultValue(arg)();\n };\n}\n\n/**\n * @param {Argument} arg\n */\nfunction autofixOptionalDictionaryDefaultValue(arg) {\n return () => {\n arg.default = Default.parse(new Tokeniser(\" = {}\"));\n };\n}\n","import { Base } from \"./base.js\";\nimport {\n return_type,\n argument_list,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\n\nexport class Operation extends Base {\n /**\n * @typedef {import(\"../tokeniser.js\").Token} Token\n *\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {Token} [options.special]\n * @param {Token} [options.regular]\n */\n static parse(tokeniser, { special, regular } = {}) {\n const tokens = { special };\n const ret = autoParenter(\n new Operation({ source: tokeniser.source, tokens })\n );\n if (special && special.value === \"stringifier\") {\n tokens.termination = tokeniser.consume(\";\");\n if (tokens.termination) {\n ret.arguments = [];\n return ret;\n }\n }\n if (!special && !regular) {\n tokens.special = tokeniser.consume(\"getter\", \"setter\", \"deleter\");\n }\n ret.idlType =\n return_type(tokeniser) || tokeniser.error(\"Missing return type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") || tokeniser.consume(\"includes\");\n tokens.open =\n tokeniser.consume(\"(\") || tokeniser.error(\"Invalid operation\");\n ret.arguments = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated operation\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated operation, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"operation\";\n }\n get name() {\n const { name } = this.tokens;\n if (!name) {\n return \"\";\n }\n return unescape(name.value);\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n if (!this.name && [\"\", \"static\"].includes(this.special)) {\n const message = `Regular or static operations must have both a return type and an identifier.`;\n yield validationError(this.tokens.open, this, \"incomplete-op\", message);\n }\n if (this.idlType) {\n yield* this.idlType.validate(defs);\n }\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n const body = this.idlType\n ? [\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.open),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.close),\n ]\n : [];\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n this.tokens.name\n ? w.token(this.tokens.special)\n : w.token(this.tokens.special, w.ts.nameless, { data: this, parent }),\n ...body,\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\nimport { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class Attribute extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.special]\n * @param {boolean} [options.noInherit]\n * @param {boolean} [options.readonly]\n */\n static parse(\n tokeniser,\n { special, noInherit = false, readonly = false } = {}\n ) {\n const start_position = tokeniser.position;\n const tokens = { special };\n const ret = autoParenter(\n new Attribute({ source: tokeniser.source, tokens })\n );\n if (!special && !noInherit) {\n tokens.special = tokeniser.consume(\"inherit\");\n }\n if (ret.special === \"inherit\" && tokeniser.probe(\"readonly\")) {\n tokeniser.error(\"Inherited attributes cannot be read-only\");\n }\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (readonly && !tokens.readonly && tokeniser.probe(\"attribute\")) {\n tokeniser.error(\"Attributes must be readonly in this context\");\n }\n tokens.base = tokeniser.consume(\"attribute\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"attribute-type\") ||\n tokeniser.error(\"Attribute lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.consume(\"async\", \"required\") ||\n tokeniser.error(\"Attribute lacks a name\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated attribute, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"attribute\";\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n\n switch (this.idlType.generic) {\n case \"sequence\":\n case \"record\": {\n const message = `Attributes cannot accept ${this.idlType.generic} types.`;\n yield validationError(\n this.tokens.name,\n this,\n \"attr-invalid-type\",\n message\n );\n break;\n }\n default: {\n const { reference } =\n idlTypeIncludesDictionary(this.idlType, defs) || {};\n if (reference) {\n const targetToken = (this.idlType.union ? reference : this.idlType)\n .tokens.base;\n const message = \"Attributes cannot accept dictionary types.\";\n yield validationError(\n targetToken,\n this,\n \"attr-invalid-type\",\n message\n );\n }\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.special),\n w.token(this.tokens.readonly),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Type } from \"./type.js\";\nimport { Argument } from \"./argument.js\";\nimport {\n ExtendedAttributes,\n SimpleExtendedAttribute,\n} from \"./extended-attributes.js\";\nimport { Operation } from \"./operation.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Tokeniser } from \"../tokeniser.js\";\n\n/**\n * @param {string} identifier\n */\nexport function unescape(identifier) {\n return identifier.startsWith(\"_\") ? identifier.slice(1) : identifier;\n}\n\n/**\n * Parses comma-separated list\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} args\n * @param {Function} args.parser parser function for each item\n * @param {boolean} [args.allowDangler] whether to allow dangling comma\n * @param {string} [args.listName] the name to be shown on error messages\n */\nexport function list(tokeniser, { parser, allowDangler, listName = \"list\" }) {\n const first = parser(tokeniser);\n if (!first) {\n return [];\n }\n first.tokens.separator = tokeniser.consume(\",\");\n const items = [first];\n while (first.tokens.separator) {\n const item = parser(tokeniser);\n if (!item) {\n if (!allowDangler) {\n tokeniser.error(`Trailing comma in ${listName}`);\n }\n break;\n }\n item.tokens.separator = tokeniser.consume(\",\");\n items.push(item);\n if (!item.tokens.separator) break;\n }\n return items;\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nexport function const_value(tokeniser) {\n return (\n tokeniser.consumeKind(\"decimal\", \"integer\") ||\n tokeniser.consume(\"true\", \"false\", \"Infinity\", \"-Infinity\", \"NaN\")\n );\n}\n\n/**\n * @param {object} token\n * @param {string} token.type\n * @param {string} token.value\n */\nexport function const_data({ type, value }) {\n switch (type) {\n case \"decimal\":\n case \"integer\":\n return { type: \"number\", value };\n case \"string\":\n return { type: \"string\", value: value.slice(1, -1) };\n }\n\n switch (value) {\n case \"true\":\n case \"false\":\n return { type: \"boolean\", value: value === \"true\" };\n case \"Infinity\":\n case \"-Infinity\":\n return { type: \"Infinity\", negative: value.startsWith(\"-\") };\n case \"[\":\n return { type: \"sequence\", value: [] };\n case \"{\":\n return { type: \"dictionary\" };\n default:\n return { type: value };\n }\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nexport function primitive_type(tokeniser) {\n function integer_type() {\n const prefix = tokeniser.consume(\"unsigned\");\n const base = tokeniser.consume(\"short\", \"long\");\n if (base) {\n const postfix = tokeniser.consume(\"long\");\n return new Type({ source, tokens: { prefix, base, postfix } });\n }\n if (prefix) tokeniser.error(\"Failed to parse integer type\");\n }\n\n function decimal_type() {\n const prefix = tokeniser.consume(\"unrestricted\");\n const base = tokeniser.consume(\"float\", \"double\");\n if (base) {\n return new Type({ source, tokens: { prefix, base } });\n }\n if (prefix) tokeniser.error(\"Failed to parse float type\");\n }\n\n const { source } = tokeniser;\n const num_type = integer_type() || decimal_type();\n if (num_type) return num_type;\n const base = tokeniser.consume(\n \"bigint\",\n \"boolean\",\n \"byte\",\n \"octet\",\n \"undefined\"\n );\n if (base) {\n return new Type({ source, tokens: { base } });\n }\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nexport function argument_list(tokeniser) {\n return list(tokeniser, {\n parser: Argument.parse,\n listName: \"arguments list\",\n });\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string=} typeName (TODO: See Type.type for more details)\n */\nexport function type_with_extended_attributes(tokeniser, typeName) {\n const extAttrs = ExtendedAttributes.parse(tokeniser);\n const ret = Type.parse(tokeniser, typeName);\n if (ret) autoParenter(ret).extAttrs = extAttrs;\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {string=} typeName (TODO: See Type.type for more details)\n */\nexport function return_type(tokeniser, typeName) {\n const typ = Type.parse(tokeniser, typeName || \"return-type\");\n if (typ) {\n return typ;\n }\n const voidToken = tokeniser.consume(\"void\");\n if (voidToken) {\n const ret = new Type({\n source: tokeniser.source,\n tokens: { base: voidToken },\n });\n ret.type = \"return-type\";\n return ret;\n }\n}\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nexport function stringifier(tokeniser) {\n const special = tokeniser.consume(\"stringifier\");\n if (!special) return;\n const member =\n Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"Unterminated stringifier\");\n return member;\n}\n\n/**\n * @param {string} str\n */\nexport function getLastIndentation(str) {\n const lines = str.split(\"\\n\");\n // the first line visually binds to the preceding token\n if (lines.length) {\n const match = lines[lines.length - 1].match(/^\\s+/);\n if (match) {\n return match[0];\n }\n }\n return \"\";\n}\n\n/**\n * @param {string} parentTrivia\n */\nexport function getMemberIndentation(parentTrivia) {\n const indentation = getLastIndentation(parentTrivia);\n const indentCh = indentation.includes(\"\\t\") ? \"\\t\" : \" \";\n return indentation + indentCh;\n}\n\n/**\n * @param {import(\"./interface.js\").Interface} def\n */\nexport function autofixAddExposedWindow(def) {\n return () => {\n if (def.extAttrs.length) {\n const tokeniser = new Tokeniser(\"Exposed=Window,\");\n const exposed = SimpleExtendedAttribute.parse(tokeniser);\n exposed.tokens.separator = tokeniser.consume(\",\");\n const existing = def.extAttrs[0];\n if (!/^\\s/.test(existing.tokens.name.trivia)) {\n existing.tokens.name.trivia = ` ${existing.tokens.name.trivia}`;\n }\n def.extAttrs.unshift(exposed);\n } else {\n autoParenter(def).extAttrs = ExtendedAttributes.parse(\n new Tokeniser(\"[Exposed=Window]\")\n );\n const trivia = def.tokens.base.trivia;\n def.extAttrs.tokens.open.trivia = trivia;\n def.tokens.base.trivia = `\\n${getLastIndentation(trivia)}`;\n }\n };\n}\n\n/**\n * Get the first syntax token for the given IDL object.\n * @param {*} data\n */\nexport function getFirstToken(data) {\n if (data.extAttrs.length) {\n return data.extAttrs.tokens.open;\n }\n if (data.type === \"operation\" && !data.special) {\n return getFirstToken(data.idlType);\n }\n const tokens = Object.values(data.tokens).sort((x, y) => x.index - y.index);\n return tokens[0];\n}\n\n/**\n * @template T\n * @param {T[]} array\n * @param {(item: T) => boolean} predicate\n */\nexport function findLastIndex(array, predicate) {\n const index = array.slice().reverse().findIndex(predicate);\n if (index === -1) {\n return index;\n }\n return array.length - index - 1;\n}\n\n/**\n * Returns a proxy that auto-assign `parent` field.\n * @template {Record<string | symbol, any>} T\n * @param {T} data\n * @param {*} [parent] The object that will be assigned to `parent`.\n * If absent, it will be `data` by default.\n * @return {T}\n */\nexport function autoParenter(data, parent) {\n if (!parent) {\n // Defaults to `data` unless specified otherwise.\n parent = data;\n }\n if (!data) {\n // This allows `autoParenter(undefined)` which again allows\n // `autoParenter(parse())` where the function may return nothing.\n return data;\n }\n const proxy = new Proxy(data, {\n get(target, p) {\n const value = target[p];\n if (Array.isArray(value) && p !== \"source\") {\n // Wraps the array so that any added items will also automatically\n // get their `parent` values.\n return autoParenter(value, target);\n }\n return value;\n },\n set(target, p, value) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/47357\n target[p] = value;\n if (!value) {\n return true;\n } else if (Array.isArray(value)) {\n // Assigning an array will add `parent` to its items.\n for (const item of value) {\n if (typeof item.parent !== \"undefined\") {\n item.parent = parent;\n }\n }\n } else if (typeof value.parent !== \"undefined\") {\n value.parent = parent;\n }\n return true;\n },\n });\n return proxy;\n}\n","import { syntaxError } from \"./error.js\";\nimport { unescape } from \"./productions/helpers.js\";\n\n// These regular expressions use the sticky flag so they will only match at\n// the current location (ie. the offset of lastIndex).\nconst tokenRe = {\n // This expression uses a lookahead assertion to catch false matches\n // against integers early.\n decimal:\n /-?(?=[0-9]*\\.|[0-9]+[eE])(([0-9]+\\.[0-9]*|[0-9]*\\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,\n integer: /-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,\n identifier: /[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,\n string: /\"[^\"]*\"/y,\n whitespace: /[\\t\\n\\r ]+/y,\n comment: /\\/\\/.*|\\/\\*[\\s\\S]*?\\*\\//y,\n other: /[^\\t\\n\\r 0-9A-Za-z]/y,\n};\n\nexport const typeNameKeywords = [\n \"ArrayBuffer\",\n \"DataView\",\n \"Int8Array\",\n \"Int16Array\",\n \"Int32Array\",\n \"Uint8Array\",\n \"Uint16Array\",\n \"Uint32Array\",\n \"Uint8ClampedArray\",\n \"BigInt64Array\",\n \"BigUint64Array\",\n \"Float32Array\",\n \"Float64Array\",\n \"any\",\n \"object\",\n \"symbol\",\n];\n\nexport const stringTypes = [\"ByteString\", \"DOMString\", \"USVString\"];\n\nexport const argumentNameKeywords = [\n \"async\",\n \"attribute\",\n \"callback\",\n \"const\",\n \"constructor\",\n \"deleter\",\n \"dictionary\",\n \"enum\",\n \"getter\",\n \"includes\",\n \"inherit\",\n \"interface\",\n \"iterable\",\n \"maplike\",\n \"namespace\",\n \"partial\",\n \"required\",\n \"setlike\",\n \"setter\",\n \"static\",\n \"stringifier\",\n \"typedef\",\n \"unrestricted\",\n];\n\nconst nonRegexTerminals = [\n \"-Infinity\",\n \"FrozenArray\",\n \"Infinity\",\n \"NaN\",\n \"ObservableArray\",\n \"Promise\",\n \"bigint\",\n \"boolean\",\n \"byte\",\n \"double\",\n \"false\",\n \"float\",\n \"long\",\n \"mixin\",\n \"null\",\n \"octet\",\n \"optional\",\n \"or\",\n \"readonly\",\n \"record\",\n \"sequence\",\n \"short\",\n \"true\",\n \"undefined\",\n \"unsigned\",\n \"void\",\n].concat(argumentNameKeywords, stringTypes, typeNameKeywords);\n\nconst punctuations = [\n \"(\",\n \")\",\n \",\",\n \"...\",\n \":\",\n \";\",\n \"<\",\n \"=\",\n \">\",\n \"?\",\n \"*\",\n \"[\",\n \"]\",\n \"{\",\n \"}\",\n];\n\nconst reserved = [\n // \"constructor\" is now a keyword\n \"_constructor\",\n \"toString\",\n \"_toString\",\n];\n\n/**\n * @typedef {ArrayItemType<ReturnType<typeof tokenise>>} Token\n * @param {string} str\n */\nfunction tokenise(str) {\n const tokens = [];\n let lastCharIndex = 0;\n let trivia = \"\";\n let line = 1;\n let index = 0;\n while (lastCharIndex < str.length) {\n const nextChar = str.charAt(lastCharIndex);\n let result = -1;\n\n if (/[\\t\\n\\r ]/.test(nextChar)) {\n result = attemptTokenMatch(\"whitespace\", { noFlushTrivia: true });\n } else if (nextChar === \"/\") {\n result = attemptTokenMatch(\"comment\", { noFlushTrivia: true });\n }\n\n if (result !== -1) {\n const currentTrivia = tokens.pop().value;\n line += (currentTrivia.match(/\\n/g) || []).length;\n trivia += currentTrivia;\n index -= 1;\n } else if (/[-0-9.A-Z_a-z]/.test(nextChar)) {\n result = attemptTokenMatch(\"decimal\");\n if (result === -1) {\n result = attemptTokenMatch(\"integer\");\n }\n if (result === -1) {\n result = attemptTokenMatch(\"identifier\");\n const lastIndex = tokens.length - 1;\n const token = tokens[lastIndex];\n if (result !== -1) {\n if (reserved.includes(token.value)) {\n const message = `${unescape(\n token.value\n )} is a reserved identifier and must not be used.`;\n throw new WebIDLParseError(\n syntaxError(tokens, lastIndex, null, message)\n );\n } else if (nonRegexTerminals.includes(token.value)) {\n token.type = \"inline\";\n }\n }\n }\n } else if (nextChar === '\"') {\n result = attemptTokenMatch(\"string\");\n }\n\n for (const punctuation of punctuations) {\n if (str.startsWith(punctuation, lastCharIndex)) {\n tokens.push({\n type: \"inline\",\n value: punctuation,\n trivia,\n line,\n index,\n });\n trivia = \"\";\n lastCharIndex += punctuation.length;\n result = lastCharIndex;\n break;\n }\n }\n\n // other as the last try\n if (result === -1) {\n result = attemptTokenMatch(\"other\");\n }\n if (result === -1) {\n throw new Error(\"Token stream not progressing\");\n }\n lastCharIndex = result;\n index += 1;\n }\n\n // remaining trivia as eof\n tokens.push({\n type: \"eof\",\n value: \"\",\n trivia,\n line,\n index,\n });\n\n return tokens;\n\n /**\n * @param {keyof typeof tokenRe} type\n * @param {object} options\n * @param {boolean} [options.noFlushTrivia]\n */\n function attemptTokenMatch(type, { noFlushTrivia } = {}) {\n const re = tokenRe[type];\n re.lastIndex = lastCharIndex;\n const result = re.exec(str);\n if (result) {\n tokens.push({ type, value: result[0], trivia, line, index });\n if (!noFlushTrivia) {\n trivia = \"\";\n }\n return re.lastIndex;\n }\n return -1;\n }\n}\n\nexport class Tokeniser {\n /**\n * @param {string} idl\n */\n constructor(idl) {\n this.source = tokenise(idl);\n this.position = 0;\n }\n\n /**\n * @param {string} message\n * @return {never}\n */\n error(message) {\n throw new WebIDLParseError(\n syntaxError(this.source, this.position, this.current, message)\n );\n }\n\n /**\n * @param {string} type\n */\n probeKind(type) {\n return (\n this.source.length > this.position &&\n this.source[this.position].type === type\n );\n }\n\n /**\n * @param {string} value\n */\n probe(value) {\n return (\n this.probeKind(\"inline\") && this.source[this.position].value === value\n );\n }\n\n /**\n * @param {...string} candidates\n */\n consumeKind(...candidates) {\n for (const type of candidates) {\n if (!this.probeKind(type)) continue;\n const token = this.source[this.position];\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {...string} candidates\n */\n consume(...candidates) {\n if (!this.probeKind(\"inline\")) return;\n const token = this.source[this.position];\n for (const value of candidates) {\n if (token.value !== value) continue;\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {string} value\n */\n consumeIdentifier(value) {\n if (!this.probeKind(\"identifier\")) {\n return;\n }\n if (this.source[this.position].value !== value) {\n return;\n }\n return this.consumeKind(\"identifier\");\n }\n\n /**\n * @param {number} position\n */\n unconsume(position) {\n this.position = position;\n }\n}\n\nexport class WebIDLParseError extends Error {\n /**\n * @param {object} options\n * @param {string} options.message\n * @param {string} options.bareMessage\n * @param {string} options.context\n * @param {number} options.line\n * @param {*} options.sourceName\n * @param {string} options.input\n * @param {*[]} options.tokens\n */\n constructor({\n message,\n bareMessage,\n context,\n line,\n sourceName,\n input,\n tokens,\n }) {\n super(message);\n\n this.name = \"WebIDLParseError\"; // not to be mangled\n this.bareMessage = bareMessage;\n this.context = context;\n this.line = line;\n this.sourceName = sourceName;\n this.input = input;\n this.tokens = tokens;\n }\n}\n","import { list, unescape, autoParenter } from \"./helpers.js\";\nimport { WrappedToken } from \"./token.js\";\nimport { Base } from \"./base.js\";\n\nexport class EnumValue extends WrappedToken {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consumeKind(\"string\");\n if (value) {\n return new EnumValue({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"enum-value\";\n }\n get value() {\n return super.value.slice(1, -1);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.wrap([\n w.ts.trivia(this.tokens.value.trivia),\n w.ts.definition(\n w.ts.wrap(['\"', w.ts.name(this.value, { data: this, parent }), '\"']),\n { data: this, parent }\n ),\n w.token(this.tokens.separator),\n ]);\n }\n}\n\nexport class Enum extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n tokens.base = tokeniser.consume(\"enum\");\n if (!tokens.base) {\n return;\n }\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"No name for enum\");\n const ret = autoParenter(new Enum({ source: tokeniser.source, tokens }));\n tokeniser.current = ret.this;\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(\"Bodyless enum\");\n ret.values = list(tokeniser, {\n parser: EnumValue.parse,\n allowDangler: true,\n listName: \"enumeration\",\n });\n if (tokeniser.probeKind(\"string\")) {\n tokeniser.error(\"No comma between enum values\");\n }\n tokens.close =\n tokeniser.consume(\"}\") || tokeniser.error(\"Unexpected value in enum\");\n if (!ret.values.length) {\n tokeniser.error(\"No value in enum\");\n }\n tokens.termination =\n tokeniser.consume(\";\") || tokeniser.error(\"No semicolon after enum\");\n return ret.this;\n }\n\n get type() {\n return \"enum\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.open),\n w.ts.wrap(this.values.map((v) => v.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class Includes extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const target = tokeniser.consumeKind(\"identifier\");\n if (!target) {\n return;\n }\n const tokens = { target };\n tokens.includes = tokeniser.consume(\"includes\");\n if (!tokens.includes) {\n tokeniser.unconsume(target.index);\n return;\n }\n tokens.mixin =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Incomplete includes statement\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"No terminating ; for includes statement\");\n return new Includes({ source: tokeniser.source, tokens });\n }\n\n get type() {\n return \"includes\";\n }\n get target() {\n return unescape(this.tokens.target.value);\n }\n get includes() {\n return unescape(this.tokens.mixin.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.reference_token(this.tokens.target, this),\n w.token(this.tokens.includes),\n w.reference_token(this.tokens.mixin, this),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class Typedef extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(new Typedef({ source: tokeniser.source, tokens }));\n tokens.base = tokeniser.consume(\"typedef\");\n if (!tokens.base) {\n return;\n }\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"typedef-type\") ||\n tokeniser.error(\"Typedef lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Typedef lacks a name\");\n tokeniser.current = ret.this;\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated typedef, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"typedef\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n return_type,\n argument_list,\n unescape,\n autoParenter,\n} from \"./helpers.js\";\n\nexport class CallbackFunction extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base) {\n const tokens = { base };\n const ret = autoParenter(\n new CallbackFunction({ source: tokeniser.source, tokens })\n );\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Callback lacks a name\");\n tokeniser.current = ret.this;\n tokens.assign =\n tokeniser.consume(\"=\") || tokeniser.error(\"Callback lacks an assignment\");\n ret.idlType =\n return_type(tokeniser) || tokeniser.error(\"Callback lacks a return type\");\n tokens.open =\n tokeniser.consume(\"(\") ||\n tokeniser.error(\"Callback lacks parentheses for arguments\");\n ret.arguments = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated callback\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated callback, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"callback\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.name_token(this.tokens.name, { data: this }),\n w.token(this.tokens.assign),\n w.ts.type(this.idlType.write(w)),\n w.token(this.tokens.open),\n ...this.arguments.map((arg) => arg.write(w)),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape, autoParenter } from \"./helpers.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction inheritance(tokeniser) {\n const colon = tokeniser.consume(\":\");\n if (!colon) {\n return {};\n }\n const inheritance =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Inheritance lacks a type\");\n return { colon, inheritance };\n}\n\nexport class Container extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {*} instance TODO: This should be {T extends Container}, but see https://github.com/microsoft/TypeScript/issues/4628\n * @param {*} args\n */\n static parse(tokeniser, instance, { inheritable, allowedMembers }) {\n const { tokens, type } = instance;\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(`Missing name in ${type}`);\n tokeniser.current = instance;\n instance = autoParenter(instance);\n if (inheritable) {\n Object.assign(tokens, inheritance(tokeniser));\n }\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(`Bodyless ${type}`);\n instance.members = [];\n while (true) {\n tokens.close = tokeniser.consume(\"}\");\n if (tokens.close) {\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(`Missing semicolon after ${type}`);\n return instance.this;\n }\n const ea = ExtendedAttributes.parse(tokeniser);\n let mem;\n for (const [parser, ...args] of allowedMembers) {\n mem = autoParenter(parser(tokeniser, ...args));\n if (mem) {\n break;\n }\n }\n if (!mem) {\n tokeniser.error(\"Unknown member\");\n }\n mem.extAttrs = ea;\n instance.members.push(mem.this);\n }\n }\n\n get partial() {\n return !!this.tokens.partial;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get inheritance() {\n if (!this.tokens.inheritance) {\n return null;\n }\n return unescape(this.tokens.inheritance.value);\n }\n\n *validate(defs) {\n for (const member of this.members) {\n if (member.validate) {\n yield* member.validate(defs);\n }\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const inheritance = () => {\n if (!this.tokens.inheritance) {\n return \"\";\n }\n return w.ts.wrap([\n w.token(this.tokens.colon),\n w.ts.trivia(this.tokens.inheritance.trivia),\n w.ts.inheritance(\n w.reference(this.tokens.inheritance.value, { context: this })\n ),\n ]);\n };\n\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.callback),\n w.token(this.tokens.partial),\n w.token(this.tokens.base),\n w.token(this.tokens.mixin),\n w.name_token(this.tokens.name, { data: this }),\n inheritance(),\n w.token(this.tokens.open),\n w.ts.wrap(this.members.map((m) => m.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { Type } from \"./type.js\";\nimport {\n const_data,\n const_value,\n primitive_type,\n autoParenter,\n unescape,\n} from \"./helpers.js\";\n\nexport class Constant extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n tokens.base = tokeniser.consume(\"const\");\n if (!tokens.base) {\n return;\n }\n let idlType = primitive_type(tokeniser);\n if (!idlType) {\n const base =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Const lacks a type\");\n idlType = new Type({ source: tokeniser.source, tokens: { base } });\n }\n if (tokeniser.probe(\"?\")) {\n tokeniser.error(\"Unexpected nullable constant type\");\n }\n idlType.type = \"const-type\";\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Const lacks a name\");\n tokens.assign =\n tokeniser.consume(\"=\") || tokeniser.error(\"Const lacks value assignment\");\n tokens.value =\n const_value(tokeniser) || tokeniser.error(\"Const lacks a value\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated const, expected `;`\");\n const ret = new Constant({ source: tokeniser.source, tokens });\n autoParenter(ret).idlType = idlType;\n return ret;\n }\n\n get type() {\n return \"const\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get value() {\n return const_data(this.tokens.value);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n w.token(this.tokens.assign),\n w.token(this.tokens.value),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n type_with_extended_attributes,\n autoParenter,\n argument_list,\n} from \"./helpers.js\";\n\nexport class IterableLike extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const ret = autoParenter(\n new IterableLike({ source: tokeniser.source, tokens: {} })\n );\n const { tokens } = ret;\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (!tokens.readonly) {\n tokens.async = tokeniser.consume(\"async\");\n }\n tokens.base = tokens.readonly\n ? tokeniser.consume(\"maplike\", \"setlike\")\n : tokens.async\n ? tokeniser.consume(\"iterable\")\n : tokeniser.consume(\"iterable\", \"maplike\", \"setlike\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n\n const { type } = ret;\n const secondTypeRequired = type === \"maplike\";\n const secondTypeAllowed = secondTypeRequired || type === \"iterable\";\n const argumentAllowed = ret.async && type === \"iterable\";\n\n tokens.open =\n tokeniser.consume(\"<\") ||\n tokeniser.error(`Missing less-than sign \\`<\\` in ${type} declaration`);\n const first =\n type_with_extended_attributes(tokeniser) ||\n tokeniser.error(`Missing a type argument in ${type} declaration`);\n ret.idlType = [first];\n ret.arguments = [];\n\n if (secondTypeAllowed) {\n first.tokens.separator = tokeniser.consume(\",\");\n if (first.tokens.separator) {\n ret.idlType.push(type_with_extended_attributes(tokeniser));\n } else if (secondTypeRequired) {\n tokeniser.error(`Missing second type argument in ${type} declaration`);\n }\n }\n\n tokens.close =\n tokeniser.consume(\">\") ||\n tokeniser.error(`Missing greater-than sign \\`>\\` in ${type} declaration`);\n\n if (tokeniser.probe(\"(\")) {\n if (argumentAllowed) {\n tokens.argsOpen = tokeniser.consume(\"(\");\n ret.arguments.push(...argument_list(tokeniser));\n tokens.argsClose =\n tokeniser.consume(\")\") ||\n tokeniser.error(\"Unterminated async iterable argument list\");\n } else {\n tokeniser.error(`Arguments are only allowed for \\`async iterable\\``);\n }\n }\n\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(`Missing semicolon after ${type} declaration`);\n\n return ret.this;\n }\n\n get type() {\n return this.tokens.base.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get async() {\n return !!this.tokens.async;\n }\n\n *validate(defs) {\n for (const type of this.idlType) {\n yield* type.validate(defs);\n }\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.readonly),\n w.token(this.tokens.async),\n w.token(this.tokens.base, w.ts.generic),\n w.token(this.tokens.open),\n w.ts.wrap(this.idlType.map((t) => t.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.argsOpen),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.argsClose),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent: this.parent }\n );\n }\n}\n","import { Base } from \"./base.js\";\nimport { argument_list, autoParenter } from \"./helpers.js\";\n\nexport class Constructor extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const base = tokeniser.consume(\"constructor\");\n if (!base) {\n return;\n }\n /** @type {Base[\"tokens\"]} */\n const tokens = { base };\n tokens.open =\n tokeniser.consume(\"(\") ||\n tokeniser.error(\"No argument list in constructor\");\n const args = argument_list(tokeniser);\n tokens.close =\n tokeniser.consume(\")\") || tokeniser.error(\"Unterminated constructor\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"No semicolon after constructor\");\n const ret = new Constructor({ source: tokeniser.source, tokens });\n autoParenter(ret).arguments = args;\n return ret;\n }\n\n get type() {\n return \"constructor\";\n }\n\n *validate(defs) {\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.base, w.ts.nameless, { data: this, parent }),\n w.token(this.tokens.open),\n w.ts.wrap(this.arguments.map((arg) => arg.write(w))),\n w.token(this.tokens.close),\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\nimport { IterableLike } from \"./iterable.js\";\nimport {\n stringifier,\n autofixAddExposedWindow,\n getMemberIndentation,\n getLastIndentation,\n getFirstToken,\n findLastIndex,\n autoParenter,\n} from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\nimport { checkInterfaceMemberDuplication } from \"../validators/interface.js\";\nimport { Constructor } from \"./constructor.js\";\nimport { Tokeniser } from \"../tokeniser.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction static_member(tokeniser) {\n const special = tokeniser.consume(\"static\");\n if (!special) return;\n const member =\n Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"No body in static member\");\n return member;\n}\n\nexport class Interface extends Container {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial = null } = {}) {\n const tokens = { partial, base };\n return Container.parse(\n tokeniser,\n new Interface({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Constructor.parse],\n [static_member],\n [stringifier],\n [IterableLike.parse],\n [Attribute.parse],\n [Operation.parse],\n ],\n }\n );\n }\n\n get type() {\n return \"interface\";\n }\n\n *validate(defs) {\n yield* this.extAttrs.validate(defs);\n if (\n !this.partial &&\n this.extAttrs.every((extAttr) => extAttr.name !== \"Exposed\")\n ) {\n const message = `Interfaces must have \\`[Exposed]\\` extended attribute. \\\nTo fix, add, for example, \\`[Exposed=Window]\\`. Please also consider carefully \\\nif your interface should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(\n this.tokens.name,\n this,\n \"require-exposed\",\n message,\n {\n autofix: autofixAddExposedWindow(this),\n }\n );\n }\n const oldConstructors = this.extAttrs.filter(\n (extAttr) => extAttr.name === \"Constructor\"\n );\n for (const constructor of oldConstructors) {\n const message = `Constructors should now be represented as a \\`constructor()\\` operation on the interface \\\ninstead of \\`[Constructor]\\` extended attribute. Refer to the \\\n[WebIDL spec section on constructor operations](https://heycam.github.io/webidl/#idl-constructors) \\\nfor more information.`;\n yield validationError(\n constructor.tokens.name,\n this,\n \"constructor-member\",\n message,\n {\n autofix: autofixConstructor(this, constructor),\n }\n );\n }\n\n const isGlobal = this.extAttrs.some((extAttr) => extAttr.name === \"Global\");\n if (isGlobal) {\n const factoryFunctions = this.extAttrs.filter(\n (extAttr) => extAttr.name === \"LegacyFactoryFunction\"\n );\n for (const named of factoryFunctions) {\n const message = `Interfaces marked as \\`[Global]\\` cannot have factory functions.`;\n yield validationError(\n named.tokens.name,\n this,\n \"no-constructible-global\",\n message\n );\n }\n\n const constructors = this.members.filter(\n (member) => member.type === \"constructor\"\n );\n for (const named of constructors) {\n const message = `Interfaces marked as \\`[Global]\\` cannot have constructors.`;\n yield validationError(\n named.tokens.base,\n this,\n \"no-constructible-global\",\n message\n );\n }\n }\n\n yield* super.validate(defs);\n if (!this.partial) {\n yield* checkInterfaceMemberDuplication(defs, this);\n }\n }\n}\n\nfunction autofixConstructor(interfaceDef, constructorExtAttr) {\n interfaceDef = autoParenter(interfaceDef);\n return () => {\n const indentation = getLastIndentation(\n interfaceDef.extAttrs.tokens.open.trivia\n );\n const memberIndent = interfaceDef.members.length\n ? getLastIndentation(getFirstToken(interfaceDef.members[0]).trivia)\n : getMemberIndentation(indentation);\n const constructorOp = Constructor.parse(\n new Tokeniser(`\\n${memberIndent}constructor();`)\n );\n constructorOp.extAttrs = new ExtendedAttributes({\n source: interfaceDef.source,\n tokens: {},\n });\n autoParenter(constructorOp).arguments = constructorExtAttr.arguments;\n\n const existingIndex = findLastIndex(\n interfaceDef.members,\n (m) => m.type === \"constructor\"\n );\n interfaceDef.members.splice(existingIndex + 1, 0, constructorOp);\n\n const { close } = interfaceDef.tokens;\n if (!close.trivia.includes(\"\\n\")) {\n close.trivia += `\\n${indentation}`;\n }\n\n const { extAttrs } = interfaceDef;\n const index = extAttrs.indexOf(constructorExtAttr);\n const removed = extAttrs.splice(index, 1);\n if (!extAttrs.length) {\n extAttrs.tokens.open = extAttrs.tokens.close = undefined;\n } else if (extAttrs.length === index) {\n extAttrs[index - 1].tokens.separator = undefined;\n } else if (!extAttrs[index].tokens.name.trivia.trim()) {\n extAttrs[index].tokens.name.trivia = removed[0].tokens.name.trivia;\n }\n };\n}\n","import { validationError } from \"../error.js\";\n\n/**\n * @param {import(\"../validator.js\").Definitions} defs\n * @param {import(\"../productions/container.js\").Container} i\n */\nexport function* checkInterfaceMemberDuplication(defs, i) {\n const opNames = groupOperationNames(i);\n const partials = defs.partials.get(i.name) || [];\n const mixins = defs.mixinMap.get(i.name) || [];\n for (const ext of [...partials, ...mixins]) {\n const additions = getOperations(ext);\n const statics = additions.filter((a) => a.special === \"static\");\n const nonstatics = additions.filter((a) => a.special !== \"static\");\n yield* checkAdditions(statics, opNames.statics, ext, i);\n yield* checkAdditions(nonstatics, opNames.nonstatics, ext, i);\n statics.forEach((op) => opNames.statics.add(op.name));\n nonstatics.forEach((op) => opNames.nonstatics.add(op.name));\n }\n\n /**\n * @param {import(\"../productions/operation.js\").Operation[]} additions\n * @param {Set<string>} existings\n * @param {import(\"../productions/container.js\").Container} ext\n * @param {import(\"../productions/container.js\").Container} base\n */\n function* checkAdditions(additions, existings, ext, base) {\n for (const addition of additions) {\n const { name } = addition;\n if (name && existings.has(name)) {\n const isStatic = addition.special === \"static\" ? \"static \" : \"\";\n const message = `The ${isStatic}operation \"${name}\" has already been defined for the base interface \"${base.name}\" either in itself or in a mixin`;\n yield validationError(\n addition.tokens.name,\n ext,\n \"no-cross-overload\",\n message\n );\n }\n }\n }\n\n /**\n * @param {import(\"../productions/container.js\").Container} i\n * @returns {import(\"../productions/operation.js\").Operation[]}\n */\n function getOperations(i) {\n return i.members.filter(({ type }) => type === \"operation\");\n }\n\n /**\n * @param {import(\"../productions/container.js\").Container} i\n */\n function groupOperationNames(i) {\n const ops = getOperations(i);\n return {\n statics: new Set(\n ops.filter((op) => op.special === \"static\").map((op) => op.name)\n ),\n nonstatics: new Set(\n ops.filter((op) => op.special !== \"static\").map((op) => op.name)\n ),\n };\n }\n}\n","import { Container } from \"./container.js\";\nimport { Constant } from \"./constant.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { stringifier } from \"./helpers.js\";\n\nexport class Mixin extends Container {\n /**\n * @typedef {import(\"../tokeniser.js\").Token} Token\n *\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {Token} base\n * @param {object} [options]\n * @param {Token} [options.partial]\n */\n static parse(tokeniser, base, { partial } = {}) {\n const tokens = { partial, base };\n tokens.mixin = tokeniser.consume(\"mixin\");\n if (!tokens.mixin) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Mixin({ source: tokeniser.source, tokens }),\n {\n allowedMembers: [\n [Constant.parse],\n [stringifier],\n [Attribute.parse, { noInherit: true }],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"interface mixin\";\n }\n}\n","import { Base } from \"./base.js\";\nimport {\n unescape,\n type_with_extended_attributes,\n autoParenter,\n} from \"./helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Default } from \"./default.js\";\n\nexport class Field extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n /** @type {Base[\"tokens\"]} */\n const tokens = {};\n const ret = autoParenter(new Field({ source: tokeniser.source, tokens }));\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.required = tokeniser.consume(\"required\");\n ret.idlType =\n type_with_extended_attributes(tokeniser, \"dictionary-type\") ||\n tokeniser.error(\"Dictionary member lacks a type\");\n tokens.name =\n tokeniser.consumeKind(\"identifier\") ||\n tokeniser.error(\"Dictionary member lacks a name\");\n ret.default = Default.parse(tokeniser);\n if (tokens.required && ret.default)\n tokeniser.error(\"Required member must not have a default\");\n tokens.termination =\n tokeniser.consume(\";\") ||\n tokeniser.error(\"Unterminated dictionary member, expected `;`\");\n return ret.this;\n }\n\n get type() {\n return \"field\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get required() {\n return !!this.tokens.required;\n }\n\n *validate(defs) {\n yield* this.idlType.validate(defs);\n }\n\n /** @param {import(\"../writer.js\").Writer} w */\n write(w) {\n const { parent } = this;\n return w.ts.definition(\n w.ts.wrap([\n this.extAttrs.write(w),\n w.token(this.tokens.required),\n w.ts.type(this.idlType.write(w)),\n w.name_token(this.tokens.name, { data: this, parent }),\n this.default ? this.default.write(w) : \"\",\n w.token(this.tokens.termination),\n ]),\n { data: this, parent }\n );\n }\n}\n","import { Container } from \"./container.js\";\nimport { Field } from \"./field.js\";\n\nexport class Dictionary extends Container {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.partial]\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"dictionary\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Dictionary({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [[Field.parse]],\n }\n );\n }\n\n get type() {\n return \"dictionary\";\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { validationError } from \"../error.js\";\nimport { autofixAddExposedWindow } from \"./helpers.js\";\nimport { Constant } from \"./constant.js\";\n\nexport class Namespace extends Container {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {object} [options]\n * @param {import(\"../tokeniser.js\").Token} [options.partial]\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"namespace\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new Namespace({ source: tokeniser.source, tokens }),\n {\n allowedMembers: [\n [Attribute.parse, { noInherit: true, readonly: true }],\n [Constant.parse],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"namespace\";\n }\n\n *validate(defs) {\n if (\n !this.partial &&\n this.extAttrs.every((extAttr) => extAttr.name !== \"Exposed\")\n ) {\n const message = `Namespaces must have [Exposed] extended attribute. \\\nTo fix, add, for example, [Exposed=Window]. Please also consider carefully \\\nif your namespace should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(\n this.tokens.name,\n this,\n \"require-exposed\",\n message,\n {\n autofix: autofixAddExposedWindow(this),\n }\n );\n }\n yield* super.validate(defs);\n }\n}\n","import { Container } from \"./container.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\n\nexport class CallbackInterface extends Container {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, callback, { partial = null } = {}) {\n const tokens = { callback };\n tokens.base = tokeniser.consume(\"interface\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(\n tokeniser,\n new CallbackInterface({ source: tokeniser.source, tokens }),\n {\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Operation.parse, { regular: true }],\n ],\n }\n );\n }\n\n get type() {\n return \"callback interface\";\n }\n}\n","import { Tokeniser } from \"./tokeniser.js\";\nimport { Enum } from \"./productions/enum.js\";\nimport { Includes } from \"./productions/includes.js\";\nimport { ExtendedAttributes } from \"./productions/extended-attributes.js\";\nimport { Typedef } from \"./productions/typedef.js\";\nimport { CallbackFunction } from \"./productions/callback.js\";\nimport { Interface } from \"./productions/interface.js\";\nimport { Mixin } from \"./productions/mixin.js\";\nimport { Dictionary } from \"./productions/dictionary.js\";\nimport { Namespace } from \"./productions/namespace.js\";\nimport { CallbackInterface } from \"./productions/callback-interface.js\";\nimport { autoParenter } from \"./productions/helpers.js\";\nimport { Eof } from \"./productions/token.js\";\n\n/**\n * @param {Tokeniser} tokeniser\n * @param {object} options\n * @param {boolean} [options.concrete]\n * @param {Function[]} [options.productions]\n */\nfunction parseByTokens(tokeniser, options) {\n const source = tokeniser.source;\n\n function error(str) {\n tokeniser.error(str);\n }\n\n function consume(...candidates) {\n return tokeniser.consume(...candidates);\n }\n\n function callback() {\n const callback = consume(\"callback\");\n if (!callback) return;\n if (tokeniser.probe(\"interface\")) {\n return CallbackInterface.parse(tokeniser, callback);\n }\n return CallbackFunction.parse(tokeniser, callback);\n }\n\n function interface_(opts) {\n const base = consume(\"interface\");\n if (!base) return;\n const ret =\n Mixin.parse(tokeniser, base, opts) ||\n Interface.parse(tokeniser, base, opts) ||\n error(\"Interface has no proper body\");\n return ret;\n }\n\n function partial() {\n const partial = consume(\"partial\");\n if (!partial) return;\n return (\n Dictionary.parse(tokeniser, { partial }) ||\n interface_({ partial }) ||\n Namespace.parse(tokeniser, { partial }) ||\n error(\"Partial doesn't apply to anything\")\n );\n }\n\n function definition() {\n if (options.productions) {\n for (const production of options.productions) {\n const result = production(tokeniser);\n if (result) {\n return result;\n }\n }\n }\n\n return (\n callback() ||\n interface_() ||\n partial() ||\n Dictionary.parse(tokeniser) ||\n Enum.parse(tokeniser) ||\n Typedef.parse(tokeniser) ||\n Includes.parse(tokeniser) ||\n Namespace.parse(tokeniser)\n );\n }\n\n function definitions() {\n if (!source.length) return [];\n const defs = [];\n while (true) {\n const ea = ExtendedAttributes.parse(tokeniser);\n const def = definition();\n if (!def) {\n if (ea.length) error(\"Stray extended attributes\");\n break;\n }\n autoParenter(def).extAttrs = ea;\n defs.push(def);\n }\n const eof = Eof.parse(tokeniser);\n if (options.concrete) {\n defs.push(eof);\n }\n return defs;\n }\n const res = definitions();\n if (tokeniser.position < source.length) error(\"Unrecognised tokens\");\n return res;\n}\n\n/**\n * @param {string} str\n * @param {object} [options]\n * @param {*} [options.sourceName]\n * @param {boolean} [options.concrete]\n * @param {Function[]} [options.productions]\n * @return {import(\"./productions/base.js\").Base[]}\n */\nexport function parse(str, options = {}) {\n const tokeniser = new Tokeniser(str);\n if (typeof options.sourceName !== \"undefined\") {\n // @ts-ignore (See Tokeniser.source in supplement.d.ts)\n tokeniser.source.name = options.sourceName;\n }\n return parseByTokens(tokeniser, options);\n}\n","function noop(arg) {\n return arg;\n}\n\nconst templates = {\n wrap: (items) => items.join(\"\"),\n trivia: noop,\n name: noop,\n reference: noop,\n type: noop,\n generic: noop,\n nameless: noop,\n inheritance: noop,\n definition: noop,\n extendedAttribute: noop,\n extendedAttributeReference: noop,\n};\n\nexport class Writer {\n constructor(ts) {\n this.ts = Object.assign({}, templates, ts);\n }\n\n /**\n * @param {string} raw\n * @param {object} options\n * @param {string} [options.unescaped]\n * @param {import(\"./productions/base.js\").Base} [options.context]\n * @returns\n */\n reference(raw, { unescaped, context }) {\n if (!unescaped) {\n unescaped = raw.startsWith(\"_\") ? raw.slice(1) : raw;\n }\n return this.ts.reference(raw, unescaped, context);\n }\n\n /**\n * @param {import(\"./tokeniser.js\").Token} t\n * @param {Function} wrapper\n * @param {...any} args\n * @returns\n */\n token(t, wrapper = noop, ...args) {\n if (!t) {\n return \"\";\n }\n const value = wrapper(t.value, ...args);\n return this.ts.wrap([this.ts.trivia(t.trivia), value]);\n }\n\n reference_token(t, context) {\n return this.token(t, this.reference.bind(this), { context });\n }\n\n name_token(t, arg) {\n return this.token(t, this.ts.name, arg);\n }\n\n identifier(id, context) {\n return this.ts.wrap([\n this.reference_token(id.tokens.value, context),\n this.token(id.tokens.separator),\n ]);\n }\n}\n\nexport function write(ast, { templates: ts = templates } = {}) {\n ts = Object.assign({}, templates, ts);\n\n const w = new Writer(ts);\n\n return ts.wrap(ast.map((it) => it.write(w)));\n}\n","import { validationError as error } from \"./error.js\";\n\nfunction getMixinMap(all, unique) {\n const map = new Map();\n const includes = all.filter((def) => def.type === \"includes\");\n for (const include of includes) {\n const mixin = unique.get(include.includes);\n if (!mixin) {\n continue;\n }\n const array = map.get(include.target);\n if (array) {\n array.push(mixin);\n } else {\n map.set(include.target, [mixin]);\n }\n }\n return map;\n}\n\n/**\n * @typedef {ReturnType<typeof groupDefinitions>} Definitions\n */\nfunction groupDefinitions(all) {\n const unique = new Map();\n const duplicates = new Set();\n const partials = new Map();\n for (const def of all) {\n if (def.partial) {\n const array = partials.get(def.name);\n if (array) {\n array.push(def);\n } else {\n partials.set(def.name, [def]);\n }\n continue;\n }\n if (!def.name) {\n continue;\n }\n if (!unique.has(def.name)) {\n unique.set(def.name, def);\n } else {\n duplicates.add(def);\n }\n }\n return {\n all,\n unique,\n partials,\n duplicates,\n mixinMap: getMixinMap(all, unique),\n cache: {\n typedefIncludesDictionary: new WeakMap(),\n dictionaryIncludesRequiredField: new WeakMap(),\n },\n };\n}\n\nfunction* checkDuplicatedNames({ unique, duplicates }) {\n for (const dup of duplicates) {\n const { name } = dup;\n const message = `The name \"${name}\" of type \"${\n unique.get(name).type\n }\" was already seen`;\n yield error(dup.tokens.name, dup, \"no-duplicate\", message);\n }\n}\n\nfunction* validateIterable(ast) {\n const defs = groupDefinitions(ast);\n for (const def of defs.all) {\n if (def.validate) {\n yield* def.validate(defs);\n }\n }\n yield* checkDuplicatedNames(defs);\n}\n\n// Remove this once all of our support targets expose `.flat()` by default\nfunction flatten(array) {\n if (array.flat) {\n return array.flat();\n }\n return [].concat(...array);\n}\n\n/**\n * @param {import(\"./productions/base.js\").Base[]} ast\n * @return {import(\"./error.js\").WebIDLErrorData[]} validation errors\n */\nexport function validate(ast) {\n return [...validateIterable(flatten(ast))];\n}\n"],"names":["root","factory","exports","module","define","amd","globalThis","__webpack_require__","definition","key","o","Object","defineProperty","enumerable","get","obj","prop","prototype","hasOwnProperty","call","Symbol","toStringTag","value","error","source","position","current","message","kind","level","autofix","ruleName","sliceTokens","count","slice","Math","max","tokensToText","inputs","precedes","text","map","t","trivia","join","nextToken","type","length","line","precedingLastLine","splitted","split","lastLine","subsequentTokens","subsequentText","sourceContext","repeat","contextType","context","name","partial","node","hierarchy","parent","unshift","n","base","target","result","appendIfExist","contextAsText","bareMessage","sourceName","input","tokens","syntaxError","validationError","token","options","index","Base","constructor","defineProperties","this","writable","toJSON","json","undefined","inheritance","proto","descMap","getOwnPropertyDescriptors","entries","getPrototypeOf","idlTypeIncludesDictionary","idlType","defs","useNullableInner","union","def","unique","typedefIncludesDictionary","cache","has","set","reference","dictionary","nullable","subtype","dictionaryIncludesRequiredField","dict","members","some","field","required","superdict","ArrayBase","Array","super","WrappedToken","tokeniser","consumeKind","write","w","ts","wrap","separator","Eof","tokenName","list","parser","listName","extAttrValueSyntax","renamedLegacies","Map","extAttrListItems","syntax","toks","ExtendedAttributeParameters","assign","consume","ret","autoParenter","asterisk","secondaryName","open","rhsIsList","argument_list","close","rhsType","reference_token","p","identifier","SimpleExtendedAttribute","params","parse","extAttr","arg","arguments","validate","extendedAttribute","extendedAttributeReference","ExtendedAttributes","push","unconsume","probe","ea","type_suffix","single_type","typeName","Type","return_type","type_with_extended_attributes","keyType","stringTypes","keyIdlType","valueType","generic_type","primitive_type","typeNameKeywords","generic","typ","or","union_type","extAttrs","Boolean","prefix","postfix","filter","typedef","targetToken","firstToken","ref","unescaped","type_body","Default","const_value","expression","const_data","negative","Argument","start_position","optional","variadic","argumentNameKeywords","default","autofixOptionalDictionaryDefaultValue","indexOf","a","isLastRequiredArgument","getFirstToken","name_token","data","Tokeniser","Operation","special","regular","termination","includes","argument","body","nameless","Attribute","noInherit","readonly","startsWith","allowDangler","first","items","item","num_type","integer_type","decimal_type","voidToken","stringifier","getLastIndentation","str","lines","match","autofixAddExposedWindow","exposed","existing","test","values","sort","x","y","Proxy","isArray","tokenRe","decimal","integer","string","whitespace","comment","other","nonRegexTerminals","concat","punctuations","reserved","idl","lastCharIndex","nextChar","charAt","attemptTokenMatch","noFlushTrivia","currentTrivia","pop","lastIndex","WebIDLParseError","punctuation","Error","re","exec","tokenise","probeKind","candidates","consumeIdentifier","EnumValue","Enum","v","Includes","mixin","Typedef","CallbackFunction","Container","instance","inheritable","allowedMembers","colon","mem","args","member","callback","m","Constant","IterableLike","async","secondTypeRequired","secondTypeAllowed","argumentAllowed","argsOpen","argsClose","Constructor","static_member","Interface","every","oldConstructors","autofixConstructor","factoryFunctions","named","constructors","i","opNames","ops","getOperations","statics","Set","op","nonstatics","groupOperationNames","partials","mixins","mixinMap","ext","additions","checkAdditions","forEach","add","existings","addition","checkInterfaceMemberDuplication","interfaceDef","constructorExtAttr","indentation","memberIndent","parentTrivia","indentCh","getMemberIndentation","constructorOp","existingIndex","array","predicate","reverse","findIndex","findLastIndex","splice","removed","trim","Mixin","Field","Dictionary","Namespace","CallbackInterface","parseByTokens","interface_","opts","productions","production","res","eof","concrete","definitions","noop","templates","Writer","raw","wrapper","bind","id","ast","it","getMixinMap","all","include","validateIterable","duplicates","WeakMap","groupDefinitions","dup","checkDuplicatedNames","flat"],"sourceRoot":""} \ No newline at end of file
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/error.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/error.js
index 5b99fd3f984..3c9bc96b307 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/error.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/error.js
@@ -28,12 +28,16 @@ function contextAsText(node) {
* @typedef {object} WebIDL2ErrorOptions
* @property {"error" | "warning"} [level]
* @property {Function} [autofix]
+ * @property {string} [ruleName]
*
* @typedef {ReturnType<typeof error>} WebIDLErrorData
*
* @param {string} message error message
+ * @param {*} position
+ * @param {*} current
+ * @param {*} message
* @param {"Syntax" | "Validation"} kind error type
- * @param {WebIDL2ErrorOptions} [options]
+ * @param {WebIDL2ErrorOptions=} options
*/
function error(
source,
@@ -52,6 +56,12 @@ function error(
: source.slice(Math.max(position + count, 0), position);
}
+ /**
+ * @param {import("./tokeniser.js").Token[]} inputs
+ * @param {object} [options]
+ * @param {boolean} [options.precedes]
+ * @returns
+ */
function tokensToText(inputs, { precedes } = {}) {
const text = inputs.map((t) => t.trivia + t.value).join("");
const nextToken = source[position];
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/argument.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/argument.js
index 4fc57c0cbe5..f6b5425377e 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/argument.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/argument.js
@@ -1,5 +1,3 @@
-// @ts-check
-
import { Base } from "./base.js";
import { Default } from "./default.js";
import { ExtendedAttributes } from "./extended-attributes.js";
@@ -18,7 +16,7 @@ import {
export class Argument extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const start_position = tokeniser.position;
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/array-base.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/array-base.js
index 7256e0ee775..18304a004e1 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/array-base.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/array-base.js
@@ -1,5 +1,3 @@
-// @ts-check
-
export class ArrayBase extends Array {
constructor({ source, tokens }) {
super();
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/attribute.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/attribute.js
index 9801103d8db..70cc784c41e 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/attribute.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/attribute.js
@@ -1,5 +1,3 @@
-// @ts-check
-
import { validationError } from "../error.js";
import { idlTypeIncludesDictionary } from "../validators/helpers.js";
import { Base } from "./base.js";
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/base.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/base.js
index c266fb2e837..c3dc73c58b7 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/base.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/base.js
@@ -1,5 +1,3 @@
-// @ts-check
-
export class Base {
/**
* @param {object} initializer
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/callback-interface.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/callback-interface.js
index 10f8c137406..da9dc96fa9e 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/callback-interface.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/callback-interface.js
@@ -1,12 +1,10 @@
-// @ts-check
-
import { Container } from "./container.js";
import { Operation } from "./operation.js";
import { Constant } from "./constant.js";
export class CallbackInterface extends Container {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser, callback, { partial = null } = {}) {
const tokens = { callback };
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/constructor.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/constructor.js
index 022aa34b59a..6b5a95ffc0c 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/constructor.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/constructor.js
@@ -3,7 +3,7 @@ import { argument_list, autoParenter } from "./helpers.js";
export class Constructor extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const base = tokeniser.consume("constructor");
@@ -31,9 +31,6 @@ export class Constructor extends Base {
}
*validate(defs) {
- if (this.idlType) {
- yield* this.idlType.validate(defs);
- }
for (const argument of this.arguments) {
yield* argument.validate(defs);
}
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/container.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/container.js
index 8b3c16e6022..d52dc50966a 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/container.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/container.js
@@ -18,9 +18,8 @@ function inheritance(tokeniser) {
export class Container extends Base {
/**
- * @template T
* @param {import("../tokeniser.js").Tokeniser} tokeniser
- * @param {T} instance
+ * @param {*} instance TODO: This should be {T extends Container}, but see https://github.com/microsoft/TypeScript/issues/4628
* @param {*} args
*/
static parse(tokeniser, instance, { inheritable, allowedMembers }) {
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/default.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/default.js
index a4a704366de..ae5adf52735 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/default.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/default.js
@@ -3,7 +3,7 @@ import { const_data, const_value } from "./helpers.js";
export class Default extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const assign = tokeniser.consume("=");
@@ -50,7 +50,7 @@ export class Default extends Base {
return const_data(this.expression[0]).negative;
}
- /** @param {import("../writer.js").Writer)} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.token(this.tokens.assign),
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/dictionary.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/dictionary.js
index 9d1eb3eaccc..fa55d979dfa 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/dictionary.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/dictionary.js
@@ -1,11 +1,9 @@
-// @ts-check
-
import { Container } from "./container.js";
import { Field } from "./field.js";
export class Dictionary extends Container {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/enum.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/enum.js
index 3625405c857..9f425ccf4bb 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/enum.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/enum.js
@@ -2,9 +2,9 @@ import { list, unescape, autoParenter } from "./helpers.js";
import { WrappedToken } from "./token.js";
import { Base } from "./base.js";
-class EnumValue extends WrappedToken {
+export class EnumValue extends WrappedToken {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const value = tokeniser.consumeKind("string");
@@ -36,7 +36,7 @@ class EnumValue extends WrappedToken {
export class Enum extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
/** @type {Base["tokens"]} */
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/extended-attributes.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/extended-attributes.js
index f55262ca589..4c0b3c7a1f5 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/extended-attributes.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/extended-attributes.js
@@ -5,7 +5,7 @@ import { list, argument_list, autoParenter, unescape } from "./helpers.js";
import { validationError } from "../error.js";
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} tokenName
*/
function tokens(tokeniser, tokenName) {
@@ -26,7 +26,9 @@ const shouldBeLegacyPrefixed = [
];
const renamedLegacies = new Map([
- ...shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`]),
+ .../** @type {[string, string][]} */ (
+ shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`])
+ ),
["NamedConstructor", "LegacyFactoryFunction"],
["OverrideBuiltins", "LegacyOverrideBuiltIns"],
["TreatNullAs", "LegacyNullToEmptyString"],
@@ -34,7 +36,7 @@ const renamedLegacies = new Map([
/**
* This will allow a set of extended attribute values to be parsed.
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
function extAttrListItems(tokeniser) {
for (const syntax of extAttrValueSyntax) {
@@ -48,9 +50,9 @@ function extAttrListItems(tokeniser) {
);
}
-class ExtendedAttributeParameters extends Base {
+export class ExtendedAttributeParameters extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const tokens = { assign: tokeniser.consume("=") };
@@ -75,7 +77,7 @@ class ExtendedAttributeParameters extends Base {
tokens.close =
tokeniser.consume(")") ||
tokeniser.error("Unexpected token in extended attribute argument list");
- } else if (ret.hasRhs && !tokens.secondaryName) {
+ } else if (tokens.assign && !tokens.secondaryName) {
tokeniser.error("No right hand side to extended attribute assignment");
}
return ret.this;
@@ -100,7 +102,7 @@ class ExtendedAttributeParameters extends Base {
return null;
}
- /** @param {import("../writer.js").Writer)} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
const { rhsType } = this;
return w.ts.wrap([
@@ -120,7 +122,7 @@ class ExtendedAttributeParameters extends Base {
export class SimpleExtendedAttribute extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const name = tokeniser.consumeKind("identifier");
@@ -194,7 +196,7 @@ information.`;
}
}
- /** @param {import("../writer.js").Writer)} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.ts.trivia(this.tokens.name.trivia),
@@ -226,13 +228,13 @@ function renameLegacyExtendedAttribute(extAttr) {
// seems to be used
export class ExtendedAttributes extends ArrayBase {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const tokens = {};
tokens.open = tokeniser.consume("[");
- if (!tokens.open) return new ExtendedAttributes({});
const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });
+ if (!tokens.open) return ret;
ret.push(
...list(tokeniser, {
parser: SimpleExtendedAttribute.parse,
@@ -241,9 +243,12 @@ export class ExtendedAttributes extends ArrayBase {
);
tokens.close =
tokeniser.consume("]") ||
- tokeniser.error("Unexpected closing token of extended attribute");
+ tokeniser.error(
+ "Expected a closing token for the extended attribute list"
+ );
if (!ret.length) {
- tokeniser.error("Found an empty extended attribute");
+ tokeniser.unconsume(tokens.close.index);
+ tokeniser.error("An extended attribute list must not be empty");
}
if (tokeniser.probe("[")) {
tokeniser.error(
@@ -259,7 +264,7 @@ export class ExtendedAttributes extends ArrayBase {
}
}
- /** @param {import("../writer.js").Writer)} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
if (!this.length) return "";
return w.ts.wrap([
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/field.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/field.js
index 0fb527912a6..1dab953cc97 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/field.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/field.js
@@ -9,7 +9,7 @@ import { Default } from "./default.js";
export class Field extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
/** @type {Base["tokens"]} */
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/helpers.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/helpers.js
index 896bcc75957..dd252a0da6d 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/helpers.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/helpers.js
@@ -17,7 +17,7 @@ export function unescape(identifier) {
/**
* Parses comma-separated list
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} args
* @param {Function} args.parser parser function for each item
* @param {boolean} [args.allowDangler] whether to allow dangling comma
@@ -46,7 +46,7 @@ export function list(tokeniser, { parser, allowDangler, listName = "list" }) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function const_value(tokeniser) {
return (
@@ -86,7 +86,7 @@ export function const_data({ type, value }) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function primitive_type(tokeniser) {
function integer_type() {
@@ -109,7 +109,7 @@ export function primitive_type(tokeniser) {
}
const { source } = tokeniser;
- const num_type = integer_type(tokeniser) || decimal_type(tokeniser);
+ const num_type = integer_type() || decimal_type();
if (num_type) return num_type;
const base = tokeniser.consume(
"bigint",
@@ -124,7 +124,7 @@ export function primitive_type(tokeniser) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function argument_list(tokeniser) {
return list(tokeniser, {
@@ -134,8 +134,8 @@ export function argument_list(tokeniser) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
- * @param {string} typeName
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
+ * @param {string=} typeName (TODO: See Type.type for more details)
*/
export function type_with_extended_attributes(tokeniser, typeName) {
const extAttrs = ExtendedAttributes.parse(tokeniser);
@@ -145,8 +145,8 @@ export function type_with_extended_attributes(tokeniser, typeName) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
- * @param {string} typeName
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
+ * @param {string=} typeName (TODO: See Type.type for more details)
*/
export function return_type(tokeniser, typeName) {
const typ = Type.parse(tokeniser, typeName || "return-type");
@@ -165,7 +165,7 @@ export function return_type(tokeniser, typeName) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function stringifier(tokeniser) {
const special = tokeniser.consume("stringifier");
@@ -202,8 +202,7 @@ export function getMemberIndentation(parentTrivia) {
}
/**
- * @param {object} def
- * @param {import("./extended-attributes.js").ExtendedAttributes} def.extAttrs
+ * @param {import("./interface.js").Interface} def
*/
export function autofixAddExposedWindow(def) {
return () => {
@@ -257,7 +256,7 @@ export function findLastIndex(array, predicate) {
/**
* Returns a proxy that auto-assign `parent` field.
- * @template T
+ * @template {Record<string | symbol, any>} T
* @param {T} data
* @param {*} [parent] The object that will be assigned to `parent`.
* If absent, it will be `data` by default.
@@ -273,10 +272,10 @@ export function autoParenter(data, parent) {
// `autoParenter(parse())` where the function may return nothing.
return data;
}
- return new Proxy(data, {
+ const proxy = new Proxy(data, {
get(target, p) {
const value = target[p];
- if (Array.isArray(value)) {
+ if (Array.isArray(value) && p !== "source") {
// Wraps the array so that any added items will also automatically
// get their `parent` values.
return autoParenter(value, target);
@@ -284,6 +283,7 @@ export function autoParenter(data, parent) {
return value;
},
set(target, p, value) {
+ // @ts-ignore https://github.com/microsoft/TypeScript/issues/47357
target[p] = value;
if (!value) {
return true;
@@ -300,4 +300,5 @@ export function autoParenter(data, parent) {
return true;
},
});
+ return proxy;
}
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/includes.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/includes.js
index 89f7d1beb1e..adc7962f691 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/includes.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/includes.js
@@ -1,11 +1,9 @@
-// @ts-check
-
import { Base } from "./base.js";
import { unescape } from "./helpers.js";
export class Includes extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const target = tokeniser.consumeKind("identifier");
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/interface.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/interface.js
index fab87604241..e22b54a2840 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/interface.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/interface.js
@@ -19,7 +19,7 @@ import { Tokeniser } from "../tokeniser.js";
import { ExtendedAttributes } from "./extended-attributes.js";
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
function static_member(tokeniser) {
const special = tokeniser.consume("static");
@@ -33,7 +33,7 @@ function static_member(tokeniser) {
export class Interface extends Container {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser, base, { partial = null } = {}) {
const tokens = { partial, base };
@@ -147,7 +147,10 @@ function autofixConstructor(interfaceDef, constructorExtAttr) {
const constructorOp = Constructor.parse(
new Tokeniser(`\n${memberIndent}constructor();`)
);
- constructorOp.extAttrs = new ExtendedAttributes({});
+ constructorOp.extAttrs = new ExtendedAttributes({
+ source: interfaceDef.source,
+ tokens: {},
+ });
autoParenter(constructorOp).arguments = constructorExtAttr.arguments;
const existingIndex = findLastIndex(
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/iterable.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/iterable.js
index 655ba4e14da..4a7e510ee55 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/iterable.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/iterable.js
@@ -11,10 +11,10 @@ export class IterableLike extends Base {
*/
static parse(tokeniser) {
const start_position = tokeniser.position;
- const tokens = {};
const ret = autoParenter(
- new IterableLike({ source: tokeniser.source, tokens })
+ new IterableLike({ source: tokeniser.source, tokens: {} })
);
+ const { tokens } = ret;
tokens.readonly = tokeniser.consume("readonly");
if (!tokens.readonly) {
tokens.async = tokeniser.consume("async");
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/namespace.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/namespace.js
index a5e48009777..ef7c35f50f9 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/namespace.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/namespace.js
@@ -7,7 +7,7 @@ import { Constant } from "./constant.js";
export class Namespace extends Container {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/token.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/token.js
index 1458f95a73d..6b59d6b192b 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/token.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/token.js
@@ -1,11 +1,9 @@
-// @ts-check
-
import { Base } from "./base.js";
import { unescape } from "./helpers.js";
export class WrappedToken extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} type
*/
static parser(tokeniser, type) {
@@ -24,7 +22,7 @@ export class WrappedToken extends Base {
return unescape(this.tokens.value.value);
}
- /** @param {import("../writer").Writer} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.token(this.tokens.value),
@@ -35,7 +33,7 @@ export class WrappedToken extends Base {
export class Eof extends WrappedToken {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const value = tokeniser.consumeKind("eof");
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/type.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/type.js
index c9fa9f34ccf..654a52cf75f 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/type.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/type.js
@@ -12,7 +12,7 @@ import { idlTypeIncludesDictionary } from "../validators/helpers.js";
import { ExtendedAttributes } from "./extended-attributes.js";
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} typeName
*/
function generic_type(tokeniser, typeName) {
@@ -80,7 +80,7 @@ function generic_type(tokeniser, typeName) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
function type_suffix(tokeniser, obj) {
const nullable = tokeniser.consume("?");
@@ -91,7 +91,7 @@ function type_suffix(tokeniser, obj) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} typeName
*/
function single_type(tokeniser, typeName) {
@@ -118,7 +118,7 @@ function single_type(tokeniser, typeName) {
}
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} type
*/
function union_type(tokeniser, type) {
@@ -154,7 +154,7 @@ function union_type(tokeniser, type) {
export class Type extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} typeName
*/
static parse(tokeniser, typeName) {
@@ -164,7 +164,7 @@ export class Type extends Base {
constructor({ source, tokens }) {
super({ source, tokens });
Object.defineProperty(this, "subtype", { value: [], writable: true });
- this.extAttrs = new ExtendedAttributes({});
+ this.extAttrs = new ExtendedAttributes({ source, tokens: {} });
}
get generic() {
@@ -234,7 +234,7 @@ for more information.`;
}
}
- /** @param {import("../writer.js").Writer)} w */
+ /** @param {import("../writer.js").Writer} w */
write(w) {
const type_body = () => {
if (this.union || this.generic) {
@@ -255,7 +255,12 @@ for more information.`;
this.tokens.base.value,
w.token(this.tokens.postfix),
]),
- { unescaped: this.idlType, context: this }
+ {
+ unescaped: /** @type {string} (because it's not union) */ (
+ this.idlType
+ ),
+ context: this,
+ }
);
return w.ts.wrap([w.ts.trivia(firstToken.trivia), ref]);
};
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/typedef.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/typedef.js
index d61ec126a00..e6a22c1bb18 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/typedef.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/productions/typedef.js
@@ -7,7 +7,7 @@ import {
export class Typedef extends Base {
/**
- * @param {import("../tokeniser").Tokeniser} tokeniser
+ * @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
/** @type {Base["tokens"]} */
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/supplement.d.ts b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/supplement.d.ts
index b2f60220eec..52c70254b70 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/supplement.d.ts
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/supplement.d.ts
@@ -1,11 +1,18 @@
import { ExtendedAttributes } from "./productions/extended-attributes.js";
import { Type } from "./productions/type.js";
-import { Default } from "./productions/default.js"
+import { Default } from "./productions/default.js";
import { Token } from "./tokeniser.js";
+import { Argument } from "./productions/argument.js";
+import { WrappedToken } from "./productions/token.js";
+import { Base } from "./productions/base.js";
+import { Definitions } from "./validator.js";
+import { Writer } from "./writer.js";
declare module "./tokeniser.js" {
interface Tokeniser {
current: any;
+ // TODO: This somehow causes fatal error on typescript
+ // source: Token[] & { name?: string };
}
}
@@ -23,13 +30,120 @@ declare module "./productions/attribute.js" {
}
}
+declare module "./productions/attribute.js" {
+ interface Attribute {
+ idlType: Type;
+ default: Default | null;
+ }
+}
+
+declare module "./productions/callback.js" {
+ interface CallbackFunction {
+ idlType: Type;
+ arguments: Argument[];
+ }
+}
+
+declare module "./productions/constant.js" {
+ interface Constant {
+ idlType: Type;
+ }
+}
+
+
+declare module "./productions/constructor.js" {
+ interface Constructor {
+ arguments: Argument[];
+ }
+}
+
+declare module "./productions/container.js" {
+ interface TypedBase extends Base {
+ type: string
+ }
+ interface Container {
+ type: string;
+ members: RootType[];
+ }
+}
+
+declare module "./productions/default.js" {
+ interface Default {
+ expression: Token[];
+ }
+}
+
+declare module "./productions/enum.js" {
+ interface Enum {
+ values: EnumValue[];
+ }
+}
+
+declare module "./productions/field.js" {
+ interface Field {
+ idlType: Type;
+ default: Default | null;
+ }
+}
+
+declare module "./productions/iterable.js" {
+ interface IterableLike {
+ idlType: Type[];
+ arguments: Argument[];
+ }
+}
+
+declare module "./productions/operation.js" {
+ interface Operation {
+ idlType: Type;
+ arguments: Argument[];
+ }
+}
+
+declare module "./productions/typedef.js" {
+ interface Typedef {
+ idlType: Type;
+ }
+}
+
+declare module "./productions/extended-attributes.js" {
+ interface ExtendedAttributeParameters {
+ list: WrappedToken[] | Argument[];
+ }
+ interface SimpleExtendedAttribute {
+ params: ExtendedAttributeParameters;
+ }
+}
+
declare module "./productions/base.js" {
interface Base {
tokens: Record<string, Token | undefined>;
source: Token[];
extAttrs: ExtendedAttributes | undefined;
this: this;
- parent: any;
+ parent?: any;
+
+ validate?(defs: Definitions): IterableIterator<any>;
+ write(w: Writer): any;
+ }
+}
+
+declare module "./productions/array-base.js" {
+ interface ArrayBase {
+ tokens: Record<string, Token | undefined>;
+ source: Token[];
+ parent?: any;
+ }
+}
+
+declare module "./productions/type.js" {
+ interface Type {
+ /**
+ * TODO: This kind of type check should ultimately be replaced by exposed constructors.
+ * See https://github.com/w3c/webidl2.js/issues/537 and https://github.com/w3c/webidl2.js/issues/297.
+ */
+ type: string | null;
+ subtype: Type[];
}
}
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validator.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validator.js
index 522557ce60b..0d99da8248b 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validator.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validator.js
@@ -1,5 +1,3 @@
-// @ts-check
-
import { validationError as error } from "./error.js";
function getMixinMap(all, unique) {
@@ -88,8 +86,8 @@ function flatten(array) {
}
/**
- * @param {import("./productions/base").Base[]} ast
- * @return {import("./error").WebIDLErrorData[]} validation errors
+ * @param {import("./productions/base.js").Base[]} ast
+ * @return {import("./error.js").WebIDLErrorData[]} validation errors
*/
export function validate(ast) {
return [...validateIterable(flatten(ast))];
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/helpers.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/helpers.js
index 779acf9485c..59308840b9e 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/helpers.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/helpers.js
@@ -1,5 +1,3 @@
-// @ts-check
-
/**
* @typedef {import("../productions/dictionary.js").Dictionary} Dictionary
*
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/interface.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/interface.js
index 4feb322a8a6..988a9effb69 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/interface.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/validators/interface.js
@@ -1,24 +1,35 @@
-// @ts-check
-
import { validationError } from "../error.js";
+/**
+ * @param {import("../validator.js").Definitions} defs
+ * @param {import("../productions/container.js").Container} i
+ */
export function* checkInterfaceMemberDuplication(defs, i) {
- const opNames = new Set(getOperations(i).map((op) => op.name));
+ const opNames = groupOperationNames(i);
const partials = defs.partials.get(i.name) || [];
const mixins = defs.mixinMap.get(i.name) || [];
for (const ext of [...partials, ...mixins]) {
const additions = getOperations(ext);
- yield* forEachExtension(additions, opNames, ext, i);
- for (const addition of additions) {
- opNames.add(addition.name);
- }
+ const statics = additions.filter((a) => a.special === "static");
+ const nonstatics = additions.filter((a) => a.special !== "static");
+ yield* checkAdditions(statics, opNames.statics, ext, i);
+ yield* checkAdditions(nonstatics, opNames.nonstatics, ext, i);
+ statics.forEach((op) => opNames.statics.add(op.name));
+ nonstatics.forEach((op) => opNames.nonstatics.add(op.name));
}
- function* forEachExtension(additions, existings, ext, base) {
+ /**
+ * @param {import("../productions/operation.js").Operation[]} additions
+ * @param {Set<string>} existings
+ * @param {import("../productions/container.js").Container} ext
+ * @param {import("../productions/container.js").Container} base
+ */
+ function* checkAdditions(additions, existings, ext, base) {
for (const addition of additions) {
const { name } = addition;
if (name && existings.has(name)) {
- const message = `The operation "${name}" has already been defined for the base interface "${base.name}" either in itself or in a mixin`;
+ const isStatic = addition.special === "static" ? "static " : "";
+ const message = `The ${isStatic}operation "${name}" has already been defined for the base interface "${base.name}" either in itself or in a mixin`;
yield validationError(
addition.tokens.name,
ext,
@@ -29,7 +40,26 @@ export function* checkInterfaceMemberDuplication(defs, i) {
}
}
+ /**
+ * @param {import("../productions/container.js").Container} i
+ * @returns {import("../productions/operation.js").Operation[]}
+ */
function getOperations(i) {
return i.members.filter(({ type }) => type === "operation");
}
+
+ /**
+ * @param {import("../productions/container.js").Container} i
+ */
+ function groupOperationNames(i) {
+ const ops = getOperations(i);
+ return {
+ statics: new Set(
+ ops.filter((op) => op.special === "static").map((op) => op.name)
+ ),
+ nonstatics: new Set(
+ ops.filter((op) => op.special !== "static").map((op) => op.name)
+ ),
+ };
+ }
}
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/webidl2.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/webidl2.js
index 303f5e1aac0..6129879b435 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/webidl2.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/webidl2.js
@@ -16,6 +16,7 @@ import { Eof } from "./productions/token.js";
* @param {Tokeniser} tokeniser
* @param {object} options
* @param {boolean} [options.concrete]
+ * @param {Function[]} [options.productions]
*/
function parseByTokens(tokeniser, options) {
const source = tokeniser.source;
@@ -110,11 +111,12 @@ function parseByTokens(tokeniser, options) {
* @param {*} [options.sourceName]
* @param {boolean} [options.concrete]
* @param {Function[]} [options.productions]
- * @return {import("./productions/base").Base[]}
+ * @return {import("./productions/base.js").Base[]}
*/
export function parse(str, options = {}) {
const tokeniser = new Tokeniser(str);
if (typeof options.sourceName !== "undefined") {
+ // @ts-ignore (See Tokeniser.source in supplement.d.ts)
tokeniser.source.name = options.sourceName;
}
return parseByTokens(tokeniser, options);
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/writer.js b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/writer.js
index 443e1b0bbc0..90ab8dd82ef 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/writer.js
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/lib/writer.js
@@ -21,6 +21,13 @@ export class Writer {
this.ts = Object.assign({}, templates, ts);
}
+ /**
+ * @param {string} raw
+ * @param {object} options
+ * @param {string} [options.unescaped]
+ * @param {import("./productions/base.js").Base} [options.context]
+ * @returns
+ */
reference(raw, { unescaped, context }) {
if (!unescaped) {
unescaped = raw.startsWith("_") ? raw.slice(1) : raw;
@@ -28,6 +35,12 @@ export class Writer {
return this.ts.reference(raw, unescaped, context);
}
+ /**
+ * @param {import("./tokeniser.js").Token} t
+ * @param {Function} wrapper
+ * @param {...any} args
+ * @returns
+ */
token(t, wrapper = noop, ...args) {
if (!t) {
return "";
diff --git a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/package.json b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/package.json
index 3743c970307..1aec561c9d8 100644
--- a/chromium/third_party/devtools-frontend/src/node_modules/webidl2/package.json
+++ b/chromium/third_party/devtools-frontend/src/node_modules/webidl2/package.json
@@ -1,7 +1,7 @@
{
"name": "webidl2",
"description": "A WebIDL Parser",
- "version": "24.2.0",
+ "version": "24.2.2",
"contributors": [
"Robin Berjon <robin@berjon.com> (https://berjon.com)",
"Marcos Caceres <marcos@marcosc.com> (https://marcosc.com)",
@@ -10,18 +10,18 @@
],
"license": "W3C",
"devDependencies": {
- "eslint": "^8.1.0",
- "eslint-config-prettier": "^8.3.0",
- "eslint-plugin-import": "^2.25.2",
+ "eslint": "^8.14.0",
+ "eslint-config-prettier": "^8.5.0",
+ "eslint-plugin-import": "^2.26.0",
"eslint-plugin-prettier": "^4.0.0",
- "expect": "^27.3.1",
+ "expect": "^28.0.2",
"jsondiffpatch": "^0.4.1",
- "mocha": "^9.1.3",
- "prettier": "^2.4.1",
- "terser-webpack-plugin": "^5.2.4",
- "typescript": "^4.4.4",
- "webpack": "^5.61.0",
- "webpack-cli": "^4.9.1"
+ "mocha": "^10.0.0",
+ "prettier": "^2.6.2",
+ "terser-webpack-plugin": "^5.3.1",
+ "typescript": "^4.6.4",
+ "webpack": "^5.72.0",
+ "webpack-cli": "^4.9.2"
},
"scripts": {
"eslint": "eslint lib test",