All Downloads are FREE. Search and download functionalities are using the official Maven repository.

package.dist.chunks.mermaid.esm.min.chunk-MHJHRR6Z.mjs.map Maven / Gradle / Ivy

Go to download

Markdown-ish syntax for generating flowcharts, mindmaps, sequence diagrams, class diagrams, gantt charts, git graphs and more.

There is a newer version: 11.4.0
Show newest version
{
  "version": 3,
  "sources": ["../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/ral.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/is.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/events.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/cancellation.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/index.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/cst-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/syntax-tree.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/stream.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/grammar-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/errors.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/languages/generated/ast.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/ast-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/regexp-utils.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/utils.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/character-classes.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/regexp-parser.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/base-regexp-visitor.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/languages/grammar-config.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/print.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/timer.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/to-fast-properties.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/model.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/visitor.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/helpers.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/rest.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/first.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/constants.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/follow.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/reg_exp_parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/reg_exp.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/tokens.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer_errors_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/tokens_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/errors_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/resolver.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/interpreter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/checks.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/exceptions_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recoverable.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/keys.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/llk_lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/looksahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/cst/cst.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/lang/lang_extensions.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/cst/cst_visitor.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/tree_builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/lexer_adapter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recognizer_api.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recognizer_engine.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/error_handler.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/context_assist.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/gast_recorder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/perf_tracer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/utils/apply_mixins.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/parser.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/atn.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/dfa.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/all-star-lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-languageserver-types/lib/esm/main.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/cst-node-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/langium-parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/parser-builder-base.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/completion-parser-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/langium-parser-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/token-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/value-converter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/cancellation.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/promise-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-languageserver-textdocument/lib/esm/main.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/node_modules/path-browserify/index.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/bootstrap", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/define property getters", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/hasOwnProperty shorthand", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/make namespace object", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/platform.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/uri.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/uri-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/documents.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/linker.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/name-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/references.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/collections.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope-computation.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/caching.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/serializer/json-serializer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/service-registry.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/validation/validation-registry.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/validation/document-validator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/ast-descriptions.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/ast-node-locator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/configuration.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/disposable.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/document-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/index-manager.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/workspace-manager.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/lexer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/jsdoc.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/documentation-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/comment-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/event.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/async-parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/workspace-lock.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/serializer/hydrator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/default-module.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/dependency-injection.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/index.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/file-system-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/grammar-loader.ts", "../../../../parser/dist/chunks/mermaid-parser.core/chunk-Y27MQZ3U.mjs"],
  "sourcesContent": ["\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nlet _ral;\nfunction RAL() {\n    if (_ral === undefined) {\n        throw new Error(`No runtime abstraction layer installed`);\n    }\n    return _ral;\n}\n(function (RAL) {\n    function install(ral) {\n        if (ral === undefined) {\n            throw new Error(`No runtime abstraction layer provided`);\n        }\n        _ral = ral;\n    }\n    RAL.install = install;\n})(RAL || (RAL = {}));\nexports.default = RAL;\n", "\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.stringArray = exports.array = exports.func = exports.error = exports.number = exports.string = exports.boolean = void 0;\nfunction boolean(value) {\n    return value === true || value === false;\n}\nexports.boolean = boolean;\nfunction string(value) {\n    return typeof value === 'string' || value instanceof String;\n}\nexports.string = string;\nfunction number(value) {\n    return typeof value === 'number' || value instanceof Number;\n}\nexports.number = number;\nfunction error(value) {\n    return value instanceof Error;\n}\nexports.error = error;\nfunction func(value) {\n    return typeof value === 'function';\n}\nexports.func = func;\nfunction array(value) {\n    return Array.isArray(value);\n}\nexports.array = array;\nfunction stringArray(value) {\n    return array(value) && value.every(elem => string(elem));\n}\nexports.stringArray = stringArray;\n", "\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Emitter = exports.Event = void 0;\nconst ral_1 = require(\"./ral\");\nvar Event;\n(function (Event) {\n    const _disposable = { dispose() { } };\n    Event.None = function () { return _disposable; };\n})(Event || (exports.Event = Event = {}));\nclass CallbackList {\n    add(callback, context = null, bucket) {\n        if (!this._callbacks) {\n            this._callbacks = [];\n            this._contexts = [];\n        }\n        this._callbacks.push(callback);\n        this._contexts.push(context);\n        if (Array.isArray(bucket)) {\n            bucket.push({ dispose: () => this.remove(callback, context) });\n        }\n    }\n    remove(callback, context = null) {\n        if (!this._callbacks) {\n            return;\n        }\n        let foundCallbackWithDifferentContext = false;\n        for (let i = 0, len = this._callbacks.length; i < len; i++) {\n            if (this._callbacks[i] === callback) {\n                if (this._contexts[i] === context) {\n                    // callback & context match => remove it\n                    this._callbacks.splice(i, 1);\n                    this._contexts.splice(i, 1);\n                    return;\n                }\n                else {\n                    foundCallbackWithDifferentContext = true;\n                }\n            }\n        }\n        if (foundCallbackWithDifferentContext) {\n            throw new Error('When adding a listener with a context, you should remove it with the same context');\n        }\n    }\n    invoke(...args) {\n        if (!this._callbacks) {\n            return [];\n        }\n        const ret = [], callbacks = this._callbacks.slice(0), contexts = this._contexts.slice(0);\n        for (let i = 0, len = callbacks.length; i < len; i++) {\n            try {\n                ret.push(callbacks[i].apply(contexts[i], args));\n            }\n            catch (e) {\n                // eslint-disable-next-line no-console\n                (0, ral_1.default)().console.error(e);\n            }\n        }\n        return ret;\n    }\n    isEmpty() {\n        return !this._callbacks || this._callbacks.length === 0;\n    }\n    dispose() {\n        this._callbacks = undefined;\n        this._contexts = undefined;\n    }\n}\nclass Emitter {\n    constructor(_options) {\n        this._options = _options;\n    }\n    /**\n     * For the public to allow to subscribe\n     * to events from this Emitter\n     */\n    get event() {\n        if (!this._event) {\n            this._event = (listener, thisArgs, disposables) => {\n                if (!this._callbacks) {\n                    this._callbacks = new CallbackList();\n                }\n                if (this._options && this._options.onFirstListenerAdd && this._callbacks.isEmpty()) {\n                    this._options.onFirstListenerAdd(this);\n                }\n                this._callbacks.add(listener, thisArgs);\n                const result = {\n                    dispose: () => {\n                        if (!this._callbacks) {\n                            // disposable is disposed after emitter is disposed.\n                            return;\n                        }\n                        this._callbacks.remove(listener, thisArgs);\n                        result.dispose = Emitter._noop;\n                        if (this._options && this._options.onLastListenerRemove && this._callbacks.isEmpty()) {\n                            this._options.onLastListenerRemove(this);\n                        }\n                    }\n                };\n                if (Array.isArray(disposables)) {\n                    disposables.push(result);\n                }\n                return result;\n            };\n        }\n        return this._event;\n    }\n    /**\n     * To be kept private to fire an event to\n     * subscribers\n     */\n    fire(event) {\n        if (this._callbacks) {\n            this._callbacks.invoke.call(this._callbacks, event);\n        }\n    }\n    dispose() {\n        if (this._callbacks) {\n            this._callbacks.dispose();\n            this._callbacks = undefined;\n        }\n    }\n}\nexports.Emitter = Emitter;\nEmitter._noop = function () { };\n", "\"use strict\";\n/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CancellationTokenSource = exports.CancellationToken = void 0;\nconst ral_1 = require(\"./ral\");\nconst Is = require(\"./is\");\nconst events_1 = require(\"./events\");\nvar CancellationToken;\n(function (CancellationToken) {\n    CancellationToken.None = Object.freeze({\n        isCancellationRequested: false,\n        onCancellationRequested: events_1.Event.None\n    });\n    CancellationToken.Cancelled = Object.freeze({\n        isCancellationRequested: true,\n        onCancellationRequested: events_1.Event.None\n    });\n    function is(value) {\n        const candidate = value;\n        return candidate && (candidate === CancellationToken.None\n            || candidate === CancellationToken.Cancelled\n            || (Is.boolean(candidate.isCancellationRequested) && !!candidate.onCancellationRequested));\n    }\n    CancellationToken.is = is;\n})(CancellationToken || (exports.CancellationToken = CancellationToken = {}));\nconst shortcutEvent = Object.freeze(function (callback, context) {\n    const handle = (0, ral_1.default)().timer.setTimeout(callback.bind(context), 0);\n    return { dispose() { handle.dispose(); } };\n});\nclass MutableToken {\n    constructor() {\n        this._isCancelled = false;\n    }\n    cancel() {\n        if (!this._isCancelled) {\n            this._isCancelled = true;\n            if (this._emitter) {\n                this._emitter.fire(undefined);\n                this.dispose();\n            }\n        }\n    }\n    get isCancellationRequested() {\n        return this._isCancelled;\n    }\n    get onCancellationRequested() {\n        if (this._isCancelled) {\n            return shortcutEvent;\n        }\n        if (!this._emitter) {\n            this._emitter = new events_1.Emitter();\n        }\n        return this._emitter.event;\n    }\n    dispose() {\n        if (this._emitter) {\n            this._emitter.dispose();\n            this._emitter = undefined;\n        }\n    }\n}\nclass CancellationTokenSource {\n    get token() {\n        if (!this._token) {\n            // be lazy and create the token only when\n            // actually needed\n            this._token = new MutableToken();\n        }\n        return this._token;\n    }\n    cancel() {\n        if (!this._token) {\n            // save an object by returning the default\n            // cancelled token when cancellation happens\n            // before someone asks for the token\n            this._token = CancellationToken.Cancelled;\n        }\n        else {\n            this._token.cancel();\n        }\n    }\n    dispose() {\n        if (!this._token) {\n            // ensure to initialize with an empty token if we had none\n            this._token = CancellationToken.None;\n        }\n        else if (this._token instanceof MutableToken) {\n            // actually dispose\n            this._token.dispose();\n        }\n    }\n}\nexports.CancellationTokenSource = CancellationTokenSource;\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport * from './default-module.js';\nexport * from './dependency-injection.js';\nexport * from './service-registry.js';\nexport * from './services.js';\nexport * from './syntax-tree.js';\nexport * from './documentation/index.js';\nexport * from './languages/index.js';\nexport * from './parser/index.js';\nexport * from './references/index.js';\nexport * from './serializer/index.js';\nexport * from './utils/index.js';\nexport * from './validation/index.js';\nexport * from './workspace/index.js';\n\n// Export the Langium Grammar AST definitions in the `GrammarAST` namespace\nimport * as GrammarAST from './languages/generated/ast.js';\nimport type { Grammar } from './languages/generated/ast.js';\nexport { Grammar, GrammarAST };\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IToken } from '@chevrotain/types';\nimport type { Range } from 'vscode-languageserver-types';\nimport type { CstNode, CompositeCstNode, LeafCstNode } from '../syntax-tree.js';\nimport type { DocumentSegment } from '../workspace/documents.js';\nimport type { Stream, TreeStream } from './stream.js';\nimport { isCompositeCstNode, isLeafCstNode, isRootCstNode } from '../syntax-tree.js';\nimport { TreeStreamImpl } from './stream.js';\n\n/**\n * Create a stream of all CST nodes that are directly and indirectly contained in the given root node,\n * including the root node itself.\n */\nexport function streamCst(node: CstNode): TreeStream {\n    return new TreeStreamImpl(node, element => {\n        if (isCompositeCstNode(element)) {\n            return element.content;\n        } else {\n            return [];\n        }\n    }, { includeRoot: true });\n}\n\n/**\n * Create a stream of all leaf nodes that are directly and indirectly contained in the given root node.\n */\nexport function flattenCst(node: CstNode): Stream {\n    return streamCst(node).filter(isLeafCstNode);\n}\n\n/**\n * Determines whether the specified cst node is a child of the specified parent node.\n */\nexport function isChildNode(child: CstNode, parent: CstNode): boolean {\n    while (child.container) {\n        child = child.container;\n        if (child === parent) {\n            return true;\n        }\n    }\n    return false;\n}\n\nexport function tokenToRange(token: IToken): Range {\n    // Chevrotain uses 1-based indices everywhere\n    // So we subtract 1 from every value to align with the LSP\n    return {\n        start: {\n            character: token.startColumn! - 1,\n            line: token.startLine! - 1\n        },\n        end: {\n            character: token.endColumn!, // endColumn uses the correct index\n            line: token.endLine! - 1\n        }\n    };\n}\n\nexport function toDocumentSegment(node: CstNode): DocumentSegment;\nexport function toDocumentSegment(node?: CstNode): DocumentSegment | undefined;\nexport function toDocumentSegment(node?: CstNode): DocumentSegment | undefined {\n    if (!node) {\n        return undefined;\n    }\n    const { offset, end, range } = node;\n    return {\n        range,\n        offset,\n        end,\n        length: end - offset\n    };\n}\n\nexport enum RangeComparison {\n    Before = 0,\n    After = 1,\n    OverlapFront = 2,\n    OverlapBack = 3,\n    Inside = 4\n}\n\nexport function compareRange(range: Range, to: Range): RangeComparison {\n    if (range.end.line < to.start.line || (range.end.line === to.start.line && range.end.character < range.start.character)) {\n        return RangeComparison.Before;\n    } else if (range.start.line > to.end.line || (range.start.line === to.end.line && range.start.character > to.end.character)) {\n        return RangeComparison.After;\n    }\n    const startInside = range.start.line > to.start.line || (range.start.line === to.start.line && range.start.character >= to.start.character);\n    const endInside = range.end.line < to.end.line || (range.end.line === to.end.line && range.end.character <= to.end.character);\n    if (startInside && endInside) {\n        return RangeComparison.Inside;\n    } else if (startInside) {\n        return RangeComparison.OverlapBack;\n    } else {\n        return RangeComparison.OverlapFront;\n    }\n}\n\nexport function inRange(range: Range, to: Range): boolean {\n    const comparison = compareRange(range, to);\n    return comparison > RangeComparison.After;\n}\n\n// The \\p{L} regex matches any unicode letter character, i.e. characters from non-english alphabets\n// Together with \\w it matches any kind of character which can commonly appear in IDs\nexport const DefaultNameRegexp = /^[\\w\\p{L}]$/u;\n\n/**\n * Performs `findLeafNodeAtOffset` with a minor difference: When encountering a character that matches the `nameRegexp` argument,\n * it will instead return the leaf node at the `offset - 1` position.\n *\n * For LSP services, users expect that the declaration of an element is available if the cursor is directly after the element.\n */\nexport function findDeclarationNodeAtOffset(cstNode: CstNode | undefined, offset: number, nameRegexp = DefaultNameRegexp): LeafCstNode | undefined {\n    if (cstNode) {\n        if (offset > 0) {\n            const localOffset = offset - cstNode.offset;\n            const textAtOffset = cstNode.text.charAt(localOffset);\n            if (!nameRegexp.test(textAtOffset)) {\n                offset--;\n            }\n        }\n        return findLeafNodeAtOffset(cstNode, offset);\n    }\n    return undefined;\n}\n\nexport function findCommentNode(cstNode: CstNode | undefined, commentNames: string[]): CstNode | undefined {\n    if (cstNode) {\n        const previous = getPreviousNode(cstNode, true);\n        if (previous && isCommentNode(previous, commentNames)) {\n            return previous;\n        }\n        if (isRootCstNode(cstNode)) {\n            // Go from the first non-hidden node through all nodes in reverse order\n            // We do this to find the comment node which directly precedes the root node\n            const endIndex = cstNode.content.findIndex(e => !e.hidden);\n            for (let i = endIndex - 1; i >= 0; i--) {\n                const child = cstNode.content[i];\n                if (isCommentNode(child, commentNames)) {\n                    return child;\n                }\n            }\n        }\n    }\n    return undefined;\n}\n\nexport function isCommentNode(cstNode: CstNode, commentNames: string[]): boolean {\n    return isLeafCstNode(cstNode) && commentNames.includes(cstNode.tokenType.name);\n}\n\n/**\n * Finds the leaf CST node at the specified 0-based string offset.\n * Note that the given offset will be within the range of the returned leaf node.\n *\n * If the offset does not point to a CST node (but just white space), this method will return `undefined`.\n *\n * @param node The CST node to search through.\n * @param offset The specified offset.\n * @returns The CST node at the specified offset.\n */\nexport function findLeafNodeAtOffset(node: CstNode, offset: number): LeafCstNode | undefined {\n    if (isLeafCstNode(node)) {\n        return node;\n    } else if (isCompositeCstNode(node)) {\n        const searchResult = binarySearch(node, offset, false);\n        if (searchResult) {\n            return findLeafNodeAtOffset(searchResult, offset);\n        }\n    }\n    return undefined;\n}\n\n/**\n * Finds the leaf CST node at the specified 0-based string offset.\n * If no CST node exists at the specified position, it will return the leaf node before it.\n *\n * If there is no leaf node before the specified offset, this method will return `undefined`.\n *\n * @param node The CST node to search through.\n * @param offset The specified offset.\n * @returns The CST node closest to the specified offset.\n */\nexport function findLeafNodeBeforeOffset(node: CstNode, offset: number): LeafCstNode | undefined {\n    if (isLeafCstNode(node)) {\n        return node;\n    } else if (isCompositeCstNode(node)) {\n        const searchResult = binarySearch(node, offset, true);\n        if (searchResult) {\n            return findLeafNodeBeforeOffset(searchResult, offset);\n        }\n    }\n    return undefined;\n}\n\nfunction binarySearch(node: CompositeCstNode, offset: number, closest: boolean): CstNode | undefined {\n    let left = 0;\n    let right = node.content.length - 1;\n    let closestNode: CstNode | undefined = undefined;\n\n    while (left <= right) {\n        const middle = Math.floor((left + right) / 2);\n        const middleNode = node.content[middle];\n\n        if (middleNode.offset <= offset && middleNode.end > offset) {\n            // Found an exact match\n            return middleNode;\n        }\n\n        if (middleNode.end <= offset) {\n            // Update the closest node (less than offset) and move to the right half\n            closestNode = closest ? middleNode : undefined;\n            left = middle + 1;\n        } else {\n            // Move to the left half\n            right = middle - 1;\n        }\n    }\n\n    return closestNode;\n}\n\nexport function getPreviousNode(node: CstNode, hidden = true): CstNode | undefined {\n    while (node.container) {\n        const parent = node.container;\n        let index = parent.content.indexOf(node);\n        while (index > 0) {\n            index--;\n            const previous = parent.content[index];\n            if (hidden || !previous.hidden) {\n                return previous;\n            }\n        }\n        node = parent;\n    }\n    return undefined;\n}\n\nexport function getNextNode(node: CstNode, hidden = true): CstNode | undefined {\n    while (node.container) {\n        const parent = node.container;\n        let index = parent.content.indexOf(node);\n        const last = parent.content.length - 1;\n        while (index < last) {\n            index++;\n            const next = parent.content[index];\n            if (hidden || !next.hidden) {\n                return next;\n            }\n        }\n        node = parent;\n    }\n    return undefined;\n}\n\nexport function getStartlineNode(node: CstNode): CstNode {\n    if (node.range.start.character === 0) {\n        return node;\n    }\n    const line = node.range.start.line;\n    let last = node;\n    let index: number | undefined;\n    while (node.container) {\n        const parent = node.container;\n        const selfIndex = index ?? parent.content.indexOf(node);\n        if (selfIndex === 0) {\n            node = parent;\n            index = undefined;\n        } else {\n            index = selfIndex - 1;\n            node = parent.content[index];\n        }\n        if (node.range.start.line !== line) {\n            break;\n        }\n        last = node;\n    }\n    return last;\n}\n\nexport function getInteriorNodes(start: CstNode, end: CstNode): CstNode[] {\n    const commonParent = getCommonParent(start, end);\n    if (!commonParent) {\n        return [];\n    }\n    return commonParent.parent.content.slice(commonParent.a + 1, commonParent.b);\n}\n\nfunction getCommonParent(a: CstNode, b: CstNode): CommonParent | undefined {\n    const aParents = getParentChain(a);\n    const bParents = getParentChain(b);\n    let current: CommonParent | undefined;\n    for (let i = 0; i < aParents.length && i < bParents.length; i++) {\n        const aParent = aParents[i];\n        const bParent = bParents[i];\n        if (aParent.parent === bParent.parent) {\n            current = {\n                parent: aParent.parent,\n                a: aParent.index,\n                b: bParent.index\n            };\n        } else {\n            break;\n        }\n    }\n    return current;\n}\n\ninterface CommonParent {\n    parent: CompositeCstNode\n    a: number\n    b: number\n}\n\nfunction getParentChain(node: CstNode): ParentLink[] {\n    const chain: ParentLink[] = [];\n    while (node.container) {\n        const parent = node.container;\n        const index = parent.content.indexOf(node);\n        chain.push({\n            parent,\n            index\n        });\n        node = parent;\n    }\n    return chain.reverse();\n}\n\ninterface ParentLink {\n    parent: CompositeCstNode\n    index: number\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { TokenType } from 'chevrotain';\nimport type { URI } from './utils/uri-utils.js';\nimport type { AbstractElement } from './languages/generated/ast.js';\nimport type { DocumentSegment, LangiumDocument } from './workspace/documents.js';\n\n/**\n * A node in the Abstract Syntax Tree (AST).\n */\nexport interface AstNode {\n    /** Every AST node has a type corresponding to what was specified in the grammar declaration. */\n    readonly $type: string;\n    /** The container node in the AST; every node except the root node has a container. */\n    readonly $container?: AstNode;\n    /** The property of the `$container` node that contains this node. This is either a direct reference or an array. */\n    readonly $containerProperty?: string;\n    /** In case `$containerProperty` is an array, the array index is stored here. */\n    readonly $containerIndex?: number;\n    /** The Concrete Syntax Tree (CST) node of the text range from which this node was parsed. */\n    readonly $cstNode?: CstNode;\n    /** The document containing the AST; only the root node has a direct reference to the document. */\n    readonly $document?: LangiumDocument;\n}\n\nexport function isAstNode(obj: unknown): obj is AstNode {\n    return typeof obj === 'object' && obj !== null && typeof (obj as AstNode).$type === 'string';\n}\n\nexport interface GenericAstNode extends AstNode {\n    [key: string]: unknown\n}\n\ntype SpecificNodeProperties = keyof Omit;\n\n/**\n * The property names of a given AST node type.\n */\nexport type Properties = SpecificNodeProperties extends never ? string : SpecificNodeProperties\n\n/**\n * A cross-reference in the AST. Cross-references may or may not be successfully resolved.\n */\nexport interface Reference {\n    /**\n     * The target AST node of this reference. Accessing this property may trigger cross-reference\n     * resolution by the `Linker` in case it has not been done yet. If the reference cannot be resolved,\n     * the value is `undefined`.\n     */\n    readonly ref?: T;\n\n    /** If any problem occurred while resolving the reference, it is described by this property. */\n    readonly error?: LinkingError;\n    /** The CST node from which the reference was parsed */\n    readonly $refNode?: CstNode;\n    /** The actual text used to look up in the surrounding scope */\n    readonly $refText: string;\n    /** The node description for the AstNode returned by `ref`  */\n    readonly $nodeDescription?: AstNodeDescription;\n}\n\nexport function isReference(obj: unknown): obj is Reference {\n    return typeof obj === 'object' && obj !== null && typeof (obj as Reference).$refText === 'string';\n}\n\nexport type ResolvedReference = Reference & {\n    readonly ref: T;\n}\n\n/**\n * A description of an AST node is used when constructing scopes and looking up cross-reference targets.\n */\nexport interface AstNodeDescription {\n    /** The target node; should be present only for local references (linking to the same document). */\n    node?: AstNode;\n    /**\n     * The document segment that represents the range of the name of the AST node.\n     */\n    nameSegment?: DocumentSegment;\n    /**\n     * The document segment that represents the full range of the AST node.\n     */\n    selectionSegment?: DocumentSegment;\n    /** `$type` property value of the AST node */\n    type: string;\n    /** Name of the AST node; this is usually determined by the `NameProvider` service. */\n    name: string;\n    /** URI to the document containing the AST node */\n    documentUri: URI;\n    /** Navigation path inside the document */\n    path: string;\n}\n\nexport function isAstNodeDescription(obj: unknown): obj is AstNodeDescription {\n    return typeof obj === 'object' && obj !== null\n        && typeof (obj as AstNodeDescription).name === 'string'\n        && typeof (obj as AstNodeDescription).type === 'string'\n        && typeof (obj as AstNodeDescription).path === 'string';\n}\n\n/**\n * Information about a cross-reference. This is used when traversing references in an AST or to describe\n * unresolved references.\n */\nexport interface ReferenceInfo {\n    reference: Reference\n    container: AstNode\n    property: string\n    index?: number\n}\n\n/**\n * Used to collect information when the `Linker` service fails to resolve a cross-reference.\n */\nexport interface LinkingError extends ReferenceInfo {\n    message: string;\n    targetDescription?: AstNodeDescription;\n}\n\nexport function isLinkingError(obj: unknown): obj is LinkingError {\n    return typeof obj === 'object' && obj !== null\n        && isAstNode((obj as LinkingError).container)\n        && isReference((obj as LinkingError).reference)\n        && typeof (obj as LinkingError).message === 'string';\n}\n\n/**\n * Service used for generic access to the structure of the AST. This service is shared between\n * all involved languages, so it operates on the superset of types of these languages.\n */\nexport interface AstReflection {\n    getAllTypes(): string[]\n    getAllSubTypes(type: string): string[]\n    getReferenceType(refInfo: ReferenceInfo): string\n    getTypeMetaData(type: string): TypeMetaData\n    isInstance(node: unknown, type: string): boolean\n    isSubtype(subtype: string, supertype: string): boolean\n}\n\n/**\n * An abstract implementation of the {@link AstReflection} interface.\n * Serves to cache subtype computation results to improve performance throughout different parts of Langium.\n */\nexport abstract class AbstractAstReflection implements AstReflection {\n\n    protected subtypes: Record> = {};\n    protected allSubtypes: Record = {};\n\n    abstract getAllTypes(): string[];\n    abstract getReferenceType(refInfo: ReferenceInfo): string;\n    abstract getTypeMetaData(type: string): TypeMetaData;\n    protected abstract computeIsSubtype(subtype: string, supertype: string): boolean;\n\n    isInstance(node: unknown, type: string): boolean {\n        return isAstNode(node) && this.isSubtype(node.$type, type);\n    }\n\n    isSubtype(subtype: string, supertype: string): boolean {\n        if (subtype === supertype) {\n            return true;\n        }\n        let nested = this.subtypes[subtype];\n        if (!nested) {\n            nested = this.subtypes[subtype] = {};\n        }\n        const existing = nested[supertype];\n        if (existing !== undefined) {\n            return existing;\n        } else {\n            const result = this.computeIsSubtype(subtype, supertype);\n            nested[supertype] = result;\n            return result;\n        }\n    }\n\n    getAllSubTypes(type: string): string[] {\n        const existing = this.allSubtypes[type];\n        if (existing) {\n            return existing;\n        } else {\n            const allTypes = this.getAllTypes();\n            const types: string[] = [];\n            for (const possibleSubType of allTypes) {\n                if (this.isSubtype(possibleSubType, type)) {\n                    types.push(possibleSubType);\n                }\n            }\n            this.allSubtypes[type] = types;\n            return types;\n        }\n    }\n}\n\n/**\n * Represents runtime meta data about a meta model type.\n */\nexport interface TypeMetaData {\n    /** The name of this meta model type. Corresponds to the `AstNode.$type` value. */\n    name: string\n    /** A list of properties. They can contain default values for their respective property in the AST. */\n    properties: TypeProperty[]\n}\n\n/**\n * Describes the meta data of a property of an AST node.\n *\n * The optional `defaultValue` indicates that the property is mandatory in the AST node.\n * For example, if an AST node contains an array, but no elements of this array have been parsed, we still expect an empty array instead of `undefined`.\n */\nexport interface TypeProperty {\n    name: string\n    defaultValue?: PropertyType\n}\n\n/**\n * Represents a default value for an AST property.\n */\nexport type PropertyType = number | string | boolean | PropertyType[];\n\n/**\n * A node in the Concrete Syntax Tree (CST).\n */\nexport interface CstNode extends DocumentSegment {\n    /** The container node in the CST */\n    readonly container?: CompositeCstNode;\n    /** @deprecated use `container` instead. */\n    readonly parent?: CompositeCstNode;\n    /** The actual text */\n    readonly text: string;\n    /** The root CST node */\n    readonly root: RootCstNode;\n    /** The grammar element from which this node was parsed */\n    readonly grammarSource: AbstractElement;\n    /** @deprecated use `grammarSource` instead. */\n    readonly feature: AbstractElement;\n    /** The AST node created from this CST node */\n    readonly astNode: AstNode;\n    /** @deprecated use `astNode` instead. */\n    readonly element: AstNode;\n    /** Whether the token is hidden, i.e. not explicitly part of the containing grammar rule */\n    readonly hidden: boolean;\n}\n\n/**\n * A composite CST node contains other nodes, but no directly associated token.\n */\nexport interface CompositeCstNode extends CstNode {\n    readonly content: CstNode[];\n    /** @deprecated use `content` instead. */\n    readonly children: CstNode[];\n}\n\nexport function isCompositeCstNode(node: unknown): node is CompositeCstNode {\n    return typeof node === 'object' && node !== null && Array.isArray((node as CompositeCstNode).content);\n}\n\n/**\n * A leaf CST node corresponds to a token in the input token stream.\n */\nexport interface LeafCstNode extends CstNode {\n    readonly tokenType: TokenType;\n}\n\nexport function isLeafCstNode(node: unknown): node is LeafCstNode {\n    return typeof node === 'object' && node !== null && typeof (node as LeafCstNode).tokenType === 'object';\n}\n\nexport interface RootCstNode extends CompositeCstNode {\n    readonly fullText: string\n}\n\nexport function isRootCstNode(node: unknown): node is RootCstNode {\n    return isCompositeCstNode(node) && typeof (node as RootCstNode).fullText === 'string';\n}\n\n/**\n * Returns a type to have only properties names (!) of a type T whose property value is of a certain type K.\n */\ntype ExtractKeysOfValueType = { [I in keyof T]: T[I] extends K ? I : never }[keyof T];\n\n/**\n * Returns the property names (!) of an AstNode that are cross-references.\n * Meant to be used during cross-reference resolution in combination with `assertUnreachable(context.property)`.\n */\nexport type CrossReferencesOfAstNodeType = (\n    ExtractKeysOfValueType\n    | ExtractKeysOfValueType|undefined>\n// eslint-disable-next-line @typescript-eslint/ban-types\n) & {};\n\n/**\n * Represents the enumeration-like type, that lists all AstNode types of your grammar.\n */\nexport type AstTypeList = Record;\n\n/**\n * Returns all types that contain cross-references, A is meant to be the interface `XXXAstType` fromm your generated `ast.ts` file.\n * Meant to be used during cross-reference resolution in combination with `assertUnreachable(context.container)`.\n */\nexport type AstNodeTypesWithCrossReferences> = {\n    [T in keyof A]: CrossReferencesOfAstNodeType extends never ? never : A[T]\n}[keyof A];\n\nexport type Mutable = {\n    -readonly [P in keyof T]: T[P]\n};\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/**\n * A stream is a read-only sequence of values. While the contents of an array can be accessed\n * both sequentially and randomly (via index), a stream allows only sequential access.\n *\n * The advantage of this is that a stream can be evaluated lazily, so it does not require\n * to store intermediate values. This can boost performance when a large sequence is\n * processed via filtering, mapping etc. and accessed at most once. However, lazy\n * evaluation means that all processing is repeated when you access the sequence multiple\n * times; in such a case, it may be better to store the resulting sequence into an array.\n */\nexport interface Stream extends Iterable {\n\n    /**\n     * Returns an iterator for this stream. This is the same as calling the `Symbol.iterator` function property.\n     */\n    iterator(): IterableIterator;\n\n    /**\n     * Determines whether this stream contains no elements.\n     */\n    isEmpty(): boolean;\n\n    /**\n     * Determines the number of elements in this stream.\n     */\n    count(): number;\n\n    /**\n     * Collects all elements of this stream into an array.\n     */\n    toArray(): T[];\n\n    /**\n     * Collects all elements of this stream into a Set.\n     */\n    toSet(): Set;\n\n    /**\n     * Collects all elements of this stream into a Map, applying the provided functions to determine keys and values.\n     *\n     * @param keyFn The function to derive map keys. If omitted, the stream elements are used as keys.\n     * @param valueFn The function to derive map values. If omitted, the stream elements are used as values.\n     */\n    toMap(keyFn?: (e: T) => K, valueFn?: (e: T) => V): Map;\n\n    /**\n     * Returns a string representation of a stream.\n     */\n    toString(): string;\n\n    /**\n     * Combines two streams by returning a new stream that yields all elements of this stream and the other stream.\n     *\n     * @param other Stream to be concatenated with this one.\n     */\n    concat(other: Iterable): Stream;\n\n    /**\n     * Adds all elements of the stream into a string, separated by the specified separator string.\n     *\n     * @param separator A string used to separate one element of the stream from the next in the resulting string.\n     *        If omitted, the steam elements are separated with a comma.\n     */\n    join(separator?: string): string\n\n    /**\n     * Returns the index of the first occurrence of a value in the stream, or -1 if it is not present.\n     *\n     * @param searchElement The value to locate in the array.\n     * @param fromIndex The stream index at which to begin the search. If fromIndex is omitted, the search\n     *        starts at index 0.\n     */\n    indexOf(searchElement: T, fromIndex?: number): number;\n\n    /**\n     * Determines whether all members of the stream satisfy the specified test.\n     *\n     * @param predicate This method calls the predicate function for each element in the stream until the\n     *        predicate returns a value which is coercible to the Boolean value `false`, or until the end\n     *        of the stream.\n     */\n    every(predicate: (value: T) => value is S): this is Stream;\n    every(predicate: (value: T) => unknown): boolean;\n\n    /**\n     * Determines whether any member of the stream satisfies the specified test.\n     *\n     * @param predicate This method calls the predicate function for each element in the stream until the\n     *        predicate returns a value which is coercible to the Boolean value `true`, or until the end\n     *        of the stream.\n     */\n    some(predicate: (value: T) => unknown): boolean;\n\n    /**\n     * Performs the specified action for each element in the stream.\n     *\n     * @param callbackfn Function called once for each element in the stream.\n     */\n    forEach(callbackfn: (value: T, index: number) => void): void;\n\n    /**\n     * Returns a stream that yields the results of calling the specified callback function on each element\n     * of the stream. The function is called when the resulting stream elements are actually accessed, so\n     * accessing the resulting stream multiple times means the function is also called multiple times for\n     * each element of the stream.\n     *\n     * @param callbackfn Lazily evaluated function mapping stream elements.\n     */\n    map(callbackfn: (value: T) => U): Stream;\n\n    /**\n     * Returns the elements of the stream that meet the condition specified in a callback function.\n     * The function is called when the resulting stream elements are actually accessed, so accessing the\n     * resulting stream multiple times means the function is also called multiple times for each element\n     * of the stream.\n     *\n     * @param predicate Lazily evaluated function checking a condition on stream elements.\n     */\n    filter(predicate: (value: T) => value is S): Stream;\n    filter(predicate: (value: T) => unknown): Stream;\n\n    /**\n     * Returns the elements of the stream that are _non-nullable_, which means they are neither `undefined`\n     * nor `null`.\n     */\n    nonNullable(): Stream>;\n\n    /**\n     * Calls the specified callback function for all elements in the stream. The return value of the\n     * callback function is the accumulated result, and is provided as an argument in the next call to\n     * the callback function.\n     *\n     * @param callbackfn This method calls the function once for each element in the stream, providing\n     *        the previous and current values of the reduction.\n     * @param initialValue If specified, `initialValue` is used as the initial value to start the\n     *        accumulation. The first call to the function provides this value as an argument instead\n     *        of a stream value.\n     */\n    reduce(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduce(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n\n    /**\n     * Calls the specified callback function for all elements in the stream, in descending order.\n     * The return value of the callback function is the accumulated result, and is provided as an\n     * argument in the next call to the callback function.\n     *\n     * @param callbackfn This method calls the function once for each element in the stream, providing\n     *        the previous and current values of the reduction.\n     * @param initialValue If specified, `initialValue` is used as the initial value to start the\n     *        accumulation. The first call to the function provides this value as an argument instead\n     *        of an array value.\n     */\n    reduceRight(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduceRight(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n\n    /**\n     * Returns the value of the first element in the stream that meets the condition, or `undefined`\n     * if there is no such element.\n     *\n     * @param predicate This method calls `predicate` once for each element of the stream, in ascending\n     *        order, until it finds one where `predicate` returns a value which is coercible to the\n     *        Boolean value `true`.\n     */\n    find(predicate: (value: T) => value is S): S | undefined;\n    find(predicate: (value: T) => unknown): T | undefined;\n\n    /**\n     * Returns the index of the first element in the stream that meets the condition, or `-1`\n     * if there is no such element.\n     *\n     * @param predicate This method calls `predicate` once for each element of the stream, in ascending\n     *        order, until it finds one where `predicate` returns a value which is coercible to the\n     *        Boolean value `true`.\n     */\n    findIndex(predicate: (value: T) => unknown): number;\n\n    /**\n     * Determines whether the stream includes a certain element, returning `true` or `false` as appropriate.\n     *\n     * @param searchElement The element to search for.\n     */\n    includes(searchElement: T): boolean;\n\n    /**\n     * Calls a defined callback function on each element of the stream and then flattens the result into\n     * a new stream. This is identical to a `map` followed by `flat` with depth 1.\n     *\n     * @param callbackfn Lazily evaluated function mapping stream elements.\n     */\n    flatMap(callbackfn: (value: T) => U | Iterable): Stream;\n\n    /**\n     * Returns a new stream with all sub-stream or sub-array elements concatenated into it recursively up\n     * to the specified depth.\n     *\n     * @param depth The maximum recursion depth. Defaults to 1.\n     */\n    flat(depth?: D): FlatStream;\n\n    /**\n     * Returns the first element in the stream, or `undefined` if the stream is empty.\n     */\n    head(): T | undefined;\n\n    /**\n     * Returns a stream that skips the first `skipCount` elements from this stream.\n     *\n     * @param skipCount The number of elements to skip. If this is larger than the number of elements in\n     *        the stream, an empty stream is returned. Defaults to 1.\n     */\n    tail(skipCount?: number): Stream;\n\n    /**\n     * Returns a stream consisting of the elements of this stream, truncated to be no longer than `maxSize`\n     * in length.\n     *\n     * @param maxSize The number of elements the stream should be limited to\n     */\n    limit(maxSize: number): Stream;\n\n    /**\n     * Returns a stream containing only the distinct elements from this stream.\n     * Equality is determined with the same rules as a standard `Set`.\n     *\n     * @param by A function returning the key used to check equality with a previous stream element.\n     *        If omitted, the stream elements themselves are used for comparison.\n     */\n    distinct(by?: (element: T) => Key): Stream;\n\n    /**\n     * Returns a stream that contains all elements that don't exist in the {@link other} iterable.\n     * Equality is determined with the same rules as a standard `Set`.\n     * @param other The elements that should be exluded from this stream.\n     * @param key A function returning the key used to check quality.\n     *        If omitted, the stream elements themselves are used for comparison.\n     */\n    exclude(other: Iterable, key?: (element: T) => Key): Stream;\n\n}\n\nexport type FlatStream = {\n    'done': Stream,\n    'recur': T extends Iterable\n        ? FlatStream>\n        : Stream\n}[Depth extends 0 ? 'done' : 'recur'];\n\nexport type MinusOne = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20][N];\n\n/**\n * The default implementation of `Stream` works with two input functions:\n *  - The first function creates the initial state of an iteration.\n *  - The second function gets the current state as argument and returns an `IteratorResult`.\n */\nexport class StreamImpl implements Stream {\n    protected readonly startFn: () => S;\n    protected readonly nextFn: (state: S) => IteratorResult;\n\n    constructor(startFn: () => S, nextFn: (state: S) => IteratorResult) {\n        this.startFn = startFn;\n        this.nextFn = nextFn;\n    }\n\n    iterator(): IterableIterator {\n        const iterator = {\n            state: this.startFn(),\n            next: () => this.nextFn(iterator.state),\n            [Symbol.iterator]: () => iterator\n        };\n        return iterator;\n    }\n\n    [Symbol.iterator](): Iterator {\n        return this.iterator();\n    }\n\n    isEmpty(): boolean {\n        const iterator = this.iterator();\n        return Boolean(iterator.next().done);\n    }\n\n    count(): number {\n        const iterator = this.iterator();\n        let count = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            count++;\n            next = iterator.next();\n        }\n        return count;\n    }\n\n    toArray(): T[] {\n        const result: T[] = [];\n        const iterator = this.iterator();\n        let next: IteratorResult;\n        do {\n            next = iterator.next();\n            if (next.value !== undefined) {\n                result.push(next.value);\n            }\n        } while (!next.done);\n        return result;\n    }\n\n    toSet(): Set {\n        return new Set(this);\n    }\n\n    toMap(keyFn?: (e: T) => K, valueFn?: (e: T) => V): Map {\n        const entryStream = this.map(element => <[K, V]>[\n            keyFn ? keyFn(element) : element,\n            valueFn ? valueFn(element) : element\n        ]);\n        return new Map(entryStream);\n    }\n\n    toString(): string {\n        return this.join();\n    }\n\n    concat(other: Iterable): Stream {\n        const iterator = other[Symbol.iterator]();\n        return new StreamImpl<{ first: S, firstDone: boolean }, T | T2>(\n            () => ({ first: this.startFn(), firstDone: false }),\n            state => {\n                let result: IteratorResult;\n                if (!state.firstDone) {\n                    do {\n                        result = this.nextFn(state.first);\n                        if (!result.done) {\n                            return result;\n                        }\n                    } while (!result.done);\n                    state.firstDone = true;\n                }\n                do {\n                    result = iterator.next();\n                    if (!result.done) {\n                        return result;\n                    }\n                } while (!result.done);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    join(separator = ','): string {\n        const iterator = this.iterator();\n        let value = '';\n        let result: IteratorResult;\n        let addSeparator = false;\n        do {\n            result = iterator.next();\n            if (!result.done) {\n                if (addSeparator) {\n                    value += separator;\n                }\n                value += toString(result.value);\n            }\n            addSeparator = true;\n        } while (!result.done);\n        return value;\n    }\n\n    indexOf(searchElement: T, fromIndex = 0): number {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            if (index >= fromIndex && next.value === searchElement) {\n                return index;\n            }\n            next = iterator.next();\n            index++;\n        }\n        return -1;\n    }\n\n    // In the following definition the '& this' part in the return type is important\n    // _and_ the order within 'Stream & this' is crucial!\n    // Otherwise Typescript would infer the type of 'this' as 'StreamImpl & Stream'\n    // (or ' & Stream') and usages like\n    // ```\n    //  const stream = new StreamImpl(...);\n    //  ... stream.every() & stream....\n    // ```\n    // cannot benefit from '', as Typescript would priorize the signatures\n    // of 'StreamImpl' (i.e. those of 'Stream') over those of 'Stream'.\n    // With the order of 'Stream & this' the signatures of 'Stream' get precedence.\n    every(predicate: (value: T) => value is U): this is Stream & this;\n    every(predicate: (value: T) => unknown): boolean;\n    every(predicate: (value: T) => unknown): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (!predicate(next.value)) {\n                return false;\n            }\n            next = iterator.next();\n        }\n        return true;\n    }\n\n    some(predicate: (value: T) => unknown): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return true;\n            }\n            next = iterator.next();\n        }\n        return false;\n    }\n\n    forEach(callbackfn: (value: T, index: number) => void): void {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            callbackfn(next.value, index);\n            next = iterator.next();\n            index++;\n        }\n    }\n\n    map(callbackfn: (value: T) => U): Stream {\n        return new StreamImpl(\n            this.startFn,\n            (state) => {\n                const { done, value } = this.nextFn(state);\n                if (done) {\n                    return DONE_RESULT;\n                } else {\n                    return { done: false, value: callbackfn(value) };\n                }\n            }\n        );\n    }\n\n    // for remarks on the return type definition refer to 'every(...)'\n    filter(predicate: (value: T) => value is U): Stream & this;\n    filter(predicate: (value: T) => unknown): Stream & this;\n    filter(predicate: (value: T) => unknown): Stream {\n        return new StreamImpl(\n            this.startFn,\n            state => {\n                let result: IteratorResult;\n                do {\n                    result = this.nextFn(state);\n                    if (!result.done && predicate(result.value)) {\n                        return result;\n                    }\n                } while (!result.done);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    nonNullable(): Stream> {\n        return this.filter(e => e !== undefined && e !== null) as Stream>;\n    }\n\n    reduce(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduce(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n    reduce(callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        const iterator = this.iterator();\n        let previousValue: U | T | undefined = initialValue;\n        let next = iterator.next();\n        while (!next.done) {\n            if (previousValue === undefined) {\n                previousValue = next.value;\n            } else {\n                previousValue = callbackfn(previousValue, next.value);\n            }\n            next = iterator.next();\n        }\n        return previousValue;\n    }\n\n    reduceRight(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduceRight(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n    reduceRight(callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        return this.recursiveReduce(this.iterator(), callbackfn, initialValue);\n    }\n\n    protected recursiveReduce(iterator: Iterator, callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        const next = iterator.next();\n        if (next.done) {\n            return initialValue;\n        }\n        const previousValue = this.recursiveReduce(iterator, callbackfn, initialValue);\n        if (previousValue === undefined) {\n            return next.value;\n        }\n        return callbackfn(previousValue, next.value);\n    }\n\n    find(predicate: (value: T) => value is S): S | undefined;\n    find(predicate: (value: T) => unknown): T | undefined;\n    find(predicate: (value: T) => unknown): T | undefined {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return next.value;\n            }\n            next = iterator.next();\n        }\n        return undefined;\n    }\n\n    findIndex(predicate: (value: T) => unknown): number {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return index;\n            }\n            next = iterator.next();\n            index++;\n        }\n        return -1;\n    }\n\n    includes(searchElement: T): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (next.value === searchElement) {\n                return true;\n            }\n            next = iterator.next();\n        }\n        return false;\n    }\n\n    flatMap(callbackfn: (value: T) => U | Iterable): Stream {\n        type FlatMapState = { this: S, iterator?: Iterator }\n        return new StreamImpl(\n            () => ({ this: this.startFn() }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (next.done) {\n                            state.iterator = undefined;\n                        } else {\n                            return next;\n                        }\n                    }\n                    const { done, value } = this.nextFn(state.this);\n                    if (!done) {\n                        const mapped = callbackfn(value);\n                        if (isIterable(mapped)) {\n                            state.iterator = mapped[Symbol.iterator]();\n                        } else {\n                            return { done: false, value: mapped };\n                        }\n                    }\n                } while (state.iterator);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    flat(depth?: D): FlatStream {\n        if (depth === undefined) {\n            depth = 1 as D;\n        }\n        if (depth <= 0) {\n            return this as unknown as FlatStream;\n        }\n        const stream = depth > 1 ? this.flat(depth - 1) as unknown as StreamImpl : this;\n        type FlatMapState = { this: S, iterator?: Iterator }\n        return new StreamImpl(\n            () => ({ this: stream.startFn() }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (next.done) {\n                            state.iterator = undefined;\n                        } else {\n                            return next;\n                        }\n                    }\n                    const { done, value } = stream.nextFn(state.this);\n                    if (!done) {\n                        if (isIterable(value)) {\n                            state.iterator = value[Symbol.iterator]() as Iterator;\n                        } else {\n                            return { done: false, value: value };\n                        }\n                    }\n                } while (state.iterator);\n                return DONE_RESULT;\n            }\n        ) as unknown as FlatStream;\n    }\n\n    head(): T | undefined {\n        const iterator = this.iterator();\n        const result = iterator.next();\n        if (result.done) {\n            return undefined;\n        }\n        return result.value;\n    }\n\n    tail(skipCount = 1): Stream {\n        return new StreamImpl(\n            () => {\n                const state = this.startFn();\n                for (let i = 0; i < skipCount; i++) {\n                    const next = this.nextFn(state);\n                    if (next.done) {\n                        return state;\n                    }\n                }\n                return state;\n            },\n            this.nextFn\n        );\n    }\n\n    limit(maxSize: number): Stream {\n        return new StreamImpl<{ size: number, state: S }, T>(\n            () => ({ size: 0, state: this.startFn() }),\n            state => {\n                state.size++;\n                if (state.size > maxSize) {\n                    return DONE_RESULT;\n                }\n                return this.nextFn(state.state);\n            }\n        );\n    }\n\n    distinct(by?: (element: T) => Key): Stream {\n        const set = new Set();\n        return this.filter(e => {\n            const value = by ? by(e) : e;\n            if (set.has(value)) {\n                return false;\n            } else {\n                set.add(value);\n                return true;\n            }\n        });\n    }\n\n    exclude(other: Iterable, key?: (element: T) => Key): Stream {\n        const otherKeySet = new Set();\n        for (const item of other) {\n            const value = key ? key(item) : item;\n            otherKeySet.add(value);\n        }\n        return this.filter(e => {\n            const ownKey = key ? key(e) : e;\n            return !otherKeySet.has(ownKey);\n        });\n    }\n}\n\nfunction toString(item: unknown): string {\n    if (typeof item === 'string') {\n        return item as string;\n    }\n    if (typeof item === 'undefined') {\n        return 'undefined';\n    }\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    if (typeof (item as any).toString === 'function') {\n        // eslint-disable-next-line @typescript-eslint/no-explicit-any\n        return (item as any).toString();\n    }\n    return Object.prototype.toString.call(item);\n}\n\nfunction isIterable(obj: unknown): obj is Iterable {\n    return !!obj && typeof (obj as Iterable)[Symbol.iterator] === 'function';\n}\n\n/**\n * An empty stream of any type.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport const EMPTY_STREAM: Stream = new StreamImpl(() => undefined, () => DONE_RESULT);\n\n/**\n * Use this `IteratorResult` when implementing a `StreamImpl` to indicate that there are no more elements in the stream.\n */\nexport const DONE_RESULT: IteratorReturnResult = Object.freeze({ done: true, value: undefined });\n\n/**\n * Create a stream from one or more iterables or array-likes.\n */\nexport function stream(...collections: Array | ArrayLike>): Stream {\n    if (collections.length === 1) {\n        const collection = collections[0];\n        if (collection instanceof StreamImpl) {\n            return collection as Stream;\n        }\n        if (isIterable(collection)) {\n            return new StreamImpl, T>(\n                () => collection[Symbol.iterator](),\n                (iterator) => iterator.next()\n            );\n        }\n        if (typeof collection.length === 'number') {\n            return new StreamImpl<{ index: number }, T>(\n                () => ({ index: 0 }),\n                (state) => {\n                    if (state.index < collection.length) {\n                        return { done: false, value: collection[state.index++] };\n                    } else {\n                        return DONE_RESULT;\n                    }\n                }\n            );\n        }\n    }\n    if (collections.length > 1) {\n        type State = { collIndex: number, iterator?: Iterator, array?: ArrayLike, arrIndex: number };\n        return new StreamImpl(\n            () => ({ collIndex: 0, arrIndex: 0 }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (!next.done) {\n                            return next;\n                        }\n                        state.iterator = undefined;\n                    }\n                    if (state.array) {\n                        if (state.arrIndex < state.array.length) {\n                            return { done: false, value: state.array[state.arrIndex++] };\n                        }\n                        state.array = undefined;\n                        state.arrIndex = 0;\n                    }\n                    if (state.collIndex < collections.length) {\n                        const collection = collections[state.collIndex++];\n                        if (isIterable(collection)) {\n                            state.iterator = collection[Symbol.iterator]();\n                        } else if (collection && typeof collection.length === 'number') {\n                            state.array = collection;\n                        }\n                    }\n                } while (state.iterator || state.array || state.collIndex < collections.length);\n                return DONE_RESULT;\n            }\n        );\n    }\n    return EMPTY_STREAM;\n}\n\n/**\n * A tree iterator adds the ability to prune the current iteration.\n */\nexport interface TreeIterator extends IterableIterator {\n    /**\n     * Skip the whole subtree below the last returned element. The iteration continues as if that\n     * element had no children.\n     */\n    prune(): void\n}\n\n/**\n * A tree stream is used to stream the elements of a tree, for example an AST or CST.\n */\nexport interface TreeStream extends Stream {\n    iterator(): TreeIterator\n}\n\n/**\n * The default implementation of `TreeStream` takes a root element and a function that computes the\n * children of its argument. Whether the root node included in the stream is controlled with the\n * `includeRoot` option, which defaults to `false`.\n */\nexport class TreeStreamImpl\n    extends StreamImpl<{ iterators: Array>, pruned: boolean }, T>\n    implements TreeStream {\n\n    constructor(root: T, children: (node: T) => Iterable, options?: { includeRoot?: boolean }) {\n        super(\n            () => ({\n                iterators: options?.includeRoot ? [[root][Symbol.iterator]()] : [children(root)[Symbol.iterator]()],\n                pruned: false\n            }),\n            state => {\n                if (state.pruned) {\n                    state.iterators.pop();\n                    state.pruned = false;\n                }\n                while (state.iterators.length > 0) {\n                    const iterator = state.iterators[state.iterators.length - 1];\n                    const next = iterator.next();\n                    if (next.done) {\n                        state.iterators.pop();\n                    } else {\n                        state.iterators.push(children(next.value)[Symbol.iterator]());\n                        return next;\n                    }\n                }\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    override iterator(): TreeIterator {\n        const iterator = {\n            state: this.startFn(),\n            next: () => this.nextFn(iterator.state),\n            prune: () => {\n                iterator.state.pruned = true;\n            },\n            [Symbol.iterator]: () => iterator\n        };\n        return iterator;\n    }\n}\n\n/**\n * A set of utility functions that reduce a stream to a single value.\n */\nexport namespace Reduction {\n\n    /**\n     * Compute the sum of a number stream.\n     */\n    export function sum(stream: Stream): number {\n        return stream.reduce((a, b) => a + b, 0);\n    }\n\n    /**\n     * Compute the product of a number stream.\n     */\n    export function product(stream: Stream): number {\n        return stream.reduce((a, b) => a * b, 0);\n    }\n\n    /**\n     * Compute the minimum of a number stream. Returns `undefined` if the stream is empty.\n     */\n    export function min(stream: Stream): number | undefined {\n        return stream.reduce((a, b) => Math.min(a, b));\n    }\n\n    /**\n     * Compute the maximum of a number stream. Returns `undefined` if the stream is empty.\n     */\n    export function max(stream: Stream): number | undefined {\n        return stream.reduce((a, b) => Math.max(a, b));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { assertUnreachable } from '../utils/errors.js';\nimport * as ast from '../languages/generated/ast.js';\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport { isCompositeCstNode } from '../syntax-tree.js';\nimport { getContainerOfType, streamAllContents } from './ast-utils.js';\nimport { streamCst } from './cst-utils.js';\nimport { escapeRegExp } from './regexp-utils.js';\n\n/**\n * Returns the entry rule of the given grammar, if any. If the grammar file does not contain an entry rule,\n * the result is `undefined`.\n */\nexport function getEntryRule(grammar: ast.Grammar): ast.ParserRule | undefined {\n    return grammar.rules.find(e => ast.isParserRule(e) && e.entry) as ast.ParserRule;\n}\n\n/**\n * Returns all hidden terminal rules of the given grammar, if any.\n */\nexport function getHiddenRules(grammar: ast.Grammar) {\n    return grammar.rules.filter((e): e is ast.TerminalRule => ast.isTerminalRule(e) && e.hidden);\n}\n\n/**\n * Returns all rules that can be reached from the topmost rules of the specified grammar (entry and hidden terminal rules).\n *\n * @param grammar The grammar that contains all rules\n * @param allTerminals Whether or not to include terminals that are referenced only by other terminals\n * @returns A list of referenced parser and terminal rules. If the grammar contains no entry rule,\n *      this function returns all rules of the specified grammar.\n */\nexport function getAllReachableRules(grammar: ast.Grammar, allTerminals: boolean): Set {\n    const ruleNames = new Set();\n    const entryRule = getEntryRule(grammar);\n    if (!entryRule) {\n        return new Set(grammar.rules);\n    }\n\n    const topMostRules = [entryRule as ast.AbstractRule].concat(getHiddenRules(grammar));\n    for (const rule of topMostRules) {\n        ruleDfs(rule, ruleNames, allTerminals);\n    }\n\n    const rules = new Set();\n    for (const rule of grammar.rules) {\n        if (ruleNames.has(rule.name) || (ast.isTerminalRule(rule) && rule.hidden)) {\n            rules.add(rule);\n        }\n    }\n    return rules;\n}\n\nfunction ruleDfs(rule: ast.AbstractRule, visitedSet: Set, allTerminals: boolean): void {\n    visitedSet.add(rule.name);\n    streamAllContents(rule).forEach(node => {\n        if (ast.isRuleCall(node) || (allTerminals && ast.isTerminalRuleCall(node))) {\n            const refRule = node.rule.ref;\n            if (refRule && !visitedSet.has(refRule.name)) {\n                ruleDfs(refRule, visitedSet, allTerminals);\n            }\n        }\n    });\n}\n\n/**\n * Determines the grammar expression used to parse a cross-reference (usually a reference to a terminal rule).\n * A cross-reference can declare this expression explicitly in the form `[Type : Terminal]`, but if `Terminal`\n * is omitted, this function attempts to infer it from the name of the referenced `Type` (using `findNameAssignment`).\n *\n * Returns the grammar expression used to parse the given cross-reference, or `undefined` if it is not declared\n * and cannot be inferred.\n */\nexport function getCrossReferenceTerminal(crossRef: ast.CrossReference): ast.AbstractElement | undefined {\n    if (crossRef.terminal) {\n        return crossRef.terminal;\n    } else if (crossRef.type.ref) {\n        const nameAssigment = findNameAssignment(crossRef.type.ref);\n        return nameAssigment?.terminal;\n    }\n    return undefined;\n}\n\n/**\n * Determines whether the given terminal rule represents a comment. This is true if the rule is marked\n * as `hidden` and it does not match white space. This means every hidden token (i.e. excluded from the AST)\n * that contains visible characters is considered a comment.\n */\nexport function isCommentTerminal(terminalRule: ast.TerminalRule): boolean {\n    return terminalRule.hidden && !terminalRegex(terminalRule).test(' ');\n}\n\n/**\n * Find all CST nodes within the given node that contribute to the specified property.\n *\n * @param node A CST node in which to look for property assignments. If this is undefined, the result is an empty array.\n * @param property A property name of the constructed AST node. If this is undefined, the result is an empty array.\n */\nexport function findNodesForProperty(node: CstNode | undefined, property: string | undefined): CstNode[] {\n    if (!node || !property) {\n        return [];\n    }\n    return findNodesForPropertyInternal(node, property, node.astNode, true);\n}\n\n/**\n * Find a single CST node within the given node that contributes to the specified property.\n *\n * @param node A CST node in which to look for property assignments. If this is undefined, the result is `undefined`.\n * @param property A property name of the constructed AST node. If this is undefined, the result is `undefined`.\n * @param index If no index is specified or the index is less than zero, the first found node is returned. If the\n *        specified index exceeds the number of assignments to the property, the last found node is returned. Otherwise,\n *        the node with the specified index is returned.\n */\nexport function findNodeForProperty(node: CstNode | undefined, property: string | undefined, index?: number): CstNode | undefined {\n    if (!node || !property) {\n        return undefined;\n    }\n    const nodes = findNodesForPropertyInternal(node, property, node.astNode, true);\n    if (nodes.length === 0) {\n        return undefined;\n    }\n    if (index !== undefined) {\n        index = Math.max(0, Math.min(index, nodes.length - 1));\n    } else {\n        index = 0;\n    }\n    return nodes[index];\n}\n\nfunction findNodesForPropertyInternal(node: CstNode, property: string, element: AstNode | undefined, first: boolean): CstNode[] {\n    if (!first) {\n        const nodeFeature = getContainerOfType(node.grammarSource, ast.isAssignment);\n        if (nodeFeature && nodeFeature.feature === property) {\n            return [node];\n        }\n    }\n    if (isCompositeCstNode(node) && node.astNode === element) {\n        return node.content.flatMap(e => findNodesForPropertyInternal(e, property, element, false));\n    }\n    return [];\n}\n\n/**\n * Find all CST nodes within the given node that correspond to the specified keyword.\n *\n * @param node A CST node in which to look for keywords. If this is undefined, the result is an empty array.\n * @param keyword A keyword as specified in the grammar.\n */\nexport function findNodesForKeyword(node: CstNode | undefined, keyword: string): CstNode[] {\n    if (!node) {\n        return [];\n    }\n    return findNodesForKeywordInternal(node, keyword, node?.astNode);\n}\n\n/**\n * Find a single CST node within the given node that corresponds to the specified keyword.\n *\n * @param node A CST node in which to look for keywords. If this is undefined, the result is `undefined`.\n * @param keyword A keyword as specified in the grammar.\n * @param index If no index is specified or the index is less than zero, the first found node is returned. If the\n *        specified index exceeds the number of keyword occurrences, the last found node is returned. Otherwise,\n *        the node with the specified index is returned.\n */\nexport function findNodeForKeyword(node: CstNode | undefined, keyword: string, index?: number): CstNode | undefined {\n    if (!node) {\n        return undefined;\n    }\n    const nodes = findNodesForKeywordInternal(node, keyword, node?.astNode);\n    if (nodes.length === 0) {\n        return undefined;\n    }\n    if (index !== undefined) {\n        index = Math.max(0, Math.min(index, nodes.length - 1));\n    } else {\n        index = 0;\n    }\n    return nodes[index];\n}\n\nexport function findNodesForKeywordInternal(node: CstNode, keyword: string, element: AstNode | undefined): CstNode[] {\n    if (node.astNode !== element) {\n        return [];\n    }\n    if (ast.isKeyword(node.grammarSource) && node.grammarSource.value === keyword) {\n        return [node];\n    }\n    const treeIterator = streamCst(node).iterator();\n    let result: IteratorResult;\n    const keywordNodes: CstNode[] = [];\n    do {\n        result = treeIterator.next();\n        if (!result.done) {\n            const childNode = result.value;\n            if (childNode.astNode === element) {\n                if (ast.isKeyword(childNode.grammarSource) && childNode.grammarSource.value === keyword) {\n                    keywordNodes.push(childNode);\n                }\n            } else {\n                treeIterator.prune();\n            }\n        }\n    } while (!result.done);\n    return keywordNodes;\n}\n\n/**\n * If the given CST node was parsed in the context of a property assignment, the respective `Assignment` grammar\n * node is returned. If no assignment is found, the result is `undefined`.\n *\n * @param cstNode A CST node for which to find a property assignment.\n */\nexport function findAssignment(cstNode: CstNode): ast.Assignment | undefined {\n    const astNode = cstNode.astNode;\n    // Only search until the ast node of the parent cst node is no longer the original ast node\n    // This would make us jump to a preceding rule call, which contains only unrelated assignments\n    while (astNode === cstNode.container?.astNode) {\n        const assignment = getContainerOfType(cstNode.grammarSource, ast.isAssignment);\n        if (assignment) {\n            return assignment;\n        }\n        cstNode = cstNode.container;\n    }\n    return undefined;\n}\n\n/**\n * Find an assignment to the `name` property for the given grammar type. This requires the `type` to be inferred\n * from a parser rule, and that rule must contain an assignment to the `name` property. In all other cases,\n * this function returns `undefined`.\n */\nexport function findNameAssignment(type: ast.AbstractType): ast.Assignment | undefined {\n    let startNode: AstNode = type;\n    if (ast.isInferredType(startNode)) {\n        // for inferred types, the location to start searching for the name-assignment is different\n        if (ast.isAction(startNode.$container)) {\n            // a type which is explicitly inferred by an action: investigate the sibbling of the Action node, i.e. start searching at the Action's parent\n            startNode = startNode.$container.$container!;\n        } else if (ast.isParserRule(startNode.$container)) {\n            // investigate the parser rule with the explicitly inferred type\n            startNode = startNode.$container;\n        } else {\n            assertUnreachable(startNode.$container);\n        }\n    }\n    return findNameAssignmentInternal(type, startNode, new Map());\n}\n\nfunction findNameAssignmentInternal(type: ast.AbstractType, startNode: AstNode, cache: Map): ast.Assignment | undefined {\n    // the cache is only required to prevent infinite loops\n    function go(node: AstNode, refType: ast.AbstractType): ast.Assignment | undefined {\n        let childAssignment: ast.Assignment | undefined = undefined;\n        const parentAssignment = getContainerOfType(node, ast.isAssignment);\n        // No parent assignment implies unassigned rule call\n        if (!parentAssignment) {\n            childAssignment = findNameAssignmentInternal(refType, refType, cache);\n        }\n        cache.set(type, childAssignment);\n        return childAssignment;\n    }\n\n    if (cache.has(type)) {\n        return cache.get(type);\n    }\n    cache.set(type, undefined);\n    for (const node of streamAllContents(startNode)) {\n        if (ast.isAssignment(node) && node.feature.toLowerCase() === 'name') {\n            cache.set(type, node);\n            return node;\n        } else if (ast.isRuleCall(node) && ast.isParserRule(node.rule.ref)) {\n            return go(node, node.rule.ref);\n        } else if (ast.isSimpleType(node) && node.typeRef?.ref) {\n            return go(node, node.typeRef.ref);\n        }\n    }\n    return undefined;\n}\n\nexport function getActionAtElement(element: ast.AbstractElement): ast.Action | undefined {\n    const parent = element.$container;\n    if (ast.isGroup(parent)) {\n        const elements = parent.elements;\n        const index = elements.indexOf(element);\n        for (let i = index - 1; i >= 0; i--) {\n            const item = elements[i];\n            if (ast.isAction(item)) {\n                return item;\n            } else {\n                const action = streamAllContents(elements[i]).find(ast.isAction);\n                if (action) {\n                    return action;\n                }\n            }\n        }\n    }\n    if (ast.isAbstractElement(parent)) {\n        return getActionAtElement(parent);\n    } else {\n        return undefined;\n    }\n}\n\nexport type Cardinality = '?' | '*' | '+' | undefined;\nexport type Operator = '=' | '+=' | '?=' | undefined;\n\nexport function isOptionalCardinality(cardinality?: Cardinality, element?: ast.AbstractElement): boolean {\n    return cardinality === '?' || cardinality === '*' || (ast.isGroup(element) && Boolean(element.guardCondition));\n}\n\nexport function isArrayCardinality(cardinality?: Cardinality): boolean {\n    return cardinality === '*' || cardinality === '+';\n}\n\nexport function isArrayOperator(operator?: Operator): boolean {\n    return operator === '+=';\n}\n\n/**\n * Determines whether the given parser rule is a _data type rule_, meaning that it has a\n * primitive return type like `number`, `boolean`, etc.\n */\nexport function isDataTypeRule(rule: ast.ParserRule): boolean {\n    return isDataTypeRuleInternal(rule, new Set());\n}\n\nfunction isDataTypeRuleInternal(rule: ast.ParserRule, visited: Set): boolean {\n    if (visited.has(rule)) {\n        return true;\n    } else {\n        visited.add(rule);\n    }\n    for (const node of streamAllContents(rule)) {\n        if (ast.isRuleCall(node)) {\n            if (!node.rule.ref) {\n                // RuleCall to unresolved rule. Don't assume `rule` is a DataType rule.\n                return false;\n            }\n            if (ast.isParserRule(node.rule.ref) && !isDataTypeRuleInternal(node.rule.ref, visited)) {\n                return false;\n            }\n        } else if (ast.isAssignment(node)) {\n            return false;\n        } else if (ast.isAction(node)) {\n            return false;\n        }\n    }\n    return Boolean(rule.definition);\n}\n\nexport function isDataType(type: ast.Type): boolean {\n    return isDataTypeInternal(type.type, new Set());\n}\n\nfunction isDataTypeInternal(type: ast.TypeDefinition, visited: Set): boolean {\n    if (visited.has(type)) {\n        return true;\n    } else {\n        visited.add(type);\n    }\n    if (ast.isArrayType(type)) {\n        return false;\n    } else if (ast.isReferenceType(type)) {\n        return false;\n    } else if (ast.isUnionType(type)) {\n        return type.types.every(e => isDataTypeInternal(e, visited));\n    } else if (ast.isSimpleType(type)) {\n        if (type.primitiveType !== undefined) {\n            return true;\n        } else if (type.stringType !== undefined) {\n            return true;\n        } else if (type.typeRef !== undefined) {\n            const ref = type.typeRef.ref;\n            if (ast.isType(ref)) {\n                return isDataTypeInternal(ref.type, visited);\n            } else {\n                return false;\n            }\n        } else {\n            return false;\n        }\n    } else {\n        return false;\n    }\n}\n\nexport function getExplicitRuleType(rule: ast.ParserRule): string | undefined {\n    if (rule.inferredType) {\n        return rule.inferredType.name;\n    } else if (rule.dataType) {\n        return rule.dataType;\n    } else if (rule.returnType) {\n        const refType = rule.returnType.ref;\n        if(refType) {\n            // check if we need to check Action as return type\n            if (ast.isParserRule(refType)) {\n                return refType.name;\n            }  else if(ast.isInterface(refType) || ast.isType(refType)) {\n                return refType.name;\n            }\n        }\n    }\n    return undefined;\n}\n\nexport function getTypeName(type: ast.AbstractType | ast.Action): string {\n    if (ast.isParserRule(type)) {\n        return isDataTypeRule(type) ? type.name : getExplicitRuleType(type) ?? type.name;\n    } else if (ast.isInterface(type) || ast.isType(type) || ast.isReturnType(type)) {\n        return type.name;\n    } else if (ast.isAction(type)) {\n        const actionType = getActionType(type);\n        if (actionType) {\n            return actionType;\n        }\n    } else if (ast.isInferredType(type)) {\n        return type.name;\n    }\n    throw new Error('Cannot get name of Unknown Type');\n}\n\nexport function getActionType(action: ast.Action): string | undefined {\n    if (action.inferredType) {\n        return action.inferredType.name;\n    } else if (action.type?.ref) {\n        return getTypeName(action.type.ref);\n    }\n    return undefined; // not inferring and not referencing a valid type\n}\n\nexport function getRuleType(rule: ast.AbstractRule): string {\n    if (ast.isTerminalRule(rule)) {\n        return rule.type?.name ?? 'string';\n    } else {\n        return isDataTypeRule(rule) ? rule.name : getExplicitRuleType(rule) ?? rule.name;\n    }\n}\n\nexport function terminalRegex(terminalRule: ast.TerminalRule): RegExp {\n    const flags: Flags = {\n        s: false,\n        i: false,\n        u: false\n    };\n    const source = abstractElementToRegex(terminalRule.definition, flags);\n    const flagText = Object.entries(flags).filter(([, value]) => value).map(([name]) => name).join('');\n    return new RegExp(source, flagText);\n}\n\n// Using [\\s\\S]* allows to match everything, compared to . which doesn't match line terminators\nconst WILDCARD = /[\\s\\S]/.source;\n\ntype Flags = {\n    s: boolean;\n    i: boolean;\n    u: boolean;\n}\n\nfunction abstractElementToRegex(element: ast.AbstractElement, flags?: Flags): string {\n    if (ast.isTerminalAlternatives(element)) {\n        return terminalAlternativesToRegex(element);\n    } else if (ast.isTerminalGroup(element)) {\n        return terminalGroupToRegex(element);\n    } else if (ast.isCharacterRange(element)) {\n        return characterRangeToRegex(element);\n    } else if (ast.isTerminalRuleCall(element)) {\n        const rule = element.rule.ref;\n        if (!rule) {\n            throw new Error('Missing rule reference.');\n        }\n        return withCardinality(abstractElementToRegex(rule.definition), {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead\n        });\n    } else if (ast.isNegatedToken(element)) {\n        return negateTokenToRegex(element);\n    } else if (ast.isUntilToken(element)) {\n        return untilTokenToRegex(element);\n    } else if (ast.isRegexToken(element)) {\n        const lastSlash = element.regex.lastIndexOf('/');\n        const source = element.regex.substring(1, lastSlash);\n        const regexFlags = element.regex.substring(lastSlash + 1);\n        if (flags) {\n            flags.i = regexFlags.includes('i');\n            flags.s = regexFlags.includes('s');\n            flags.u = regexFlags.includes('u');\n        }\n        return withCardinality(source, {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead,\n            wrap: false\n        });\n    } else if (ast.isWildcard(element)) {\n        return withCardinality(WILDCARD, {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead\n        });\n    } else {\n        throw new Error(`Invalid terminal element: ${element?.$type}`);\n    }\n}\n\nfunction terminalAlternativesToRegex(alternatives: ast.TerminalAlternatives): string {\n    return withCardinality(alternatives.elements.map(e => abstractElementToRegex(e)).join('|'), {\n        cardinality: alternatives.cardinality,\n        lookahead: alternatives.lookahead\n    });\n}\n\nfunction terminalGroupToRegex(group: ast.TerminalGroup): string {\n    return withCardinality(group.elements.map(e => abstractElementToRegex(e)).join(''), {\n        cardinality: group.cardinality,\n        lookahead: group.lookahead\n    });\n}\n\nfunction untilTokenToRegex(until: ast.UntilToken): string {\n    return withCardinality(`${WILDCARD}*?${abstractElementToRegex(until.terminal)}`, {\n        cardinality: until.cardinality,\n        lookahead: until.lookahead\n    });\n}\n\nfunction negateTokenToRegex(negate: ast.NegatedToken): string {\n    return withCardinality(`(?!${abstractElementToRegex(negate.terminal)})${WILDCARD}*?`, {\n        cardinality: negate.cardinality,\n        lookahead: negate.lookahead\n    });\n}\n\nfunction characterRangeToRegex(range: ast.CharacterRange): string {\n    if (range.right) {\n        return withCardinality(`[${keywordToRegex(range.left)}-${keywordToRegex(range.right)}]`, {\n            cardinality: range.cardinality,\n            lookahead: range.lookahead,\n            wrap: false\n        });\n    }\n    return withCardinality(keywordToRegex(range.left), {\n        cardinality: range.cardinality,\n        lookahead: range.lookahead,\n        wrap: false\n    });\n}\n\nfunction keywordToRegex(keyword: ast.Keyword): string {\n    return escapeRegExp(keyword.value);\n}\n\nfunction withCardinality(regex: string, options: {\n    cardinality?: string\n    wrap?: boolean\n    lookahead?: string\n}): string {\n    if (options.wrap !== false || options.lookahead) {\n        regex = `(${options.lookahead ?? ''}${regex})`;\n    }\n    if (options.cardinality) {\n        return `${regex}${options.cardinality}`;\n    }\n    return regex;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CstNode } from '../syntax-tree.js';\n\nexport class ErrorWithLocation extends Error {\n    constructor(node: CstNode | undefined, message: string) {\n        super(node ? `${message} at ${node.range.start.line}:${node.range.start.character}` : message);\n    }\n}\n\nexport function assertUnreachable(_: never): never {\n    throw new Error('Error! The input value was not handled.');\n}\n", "/******************************************************************************\n * This file was generated by langium-cli 3.0.0.\n * DO NOT EDIT MANUALLY!\n ******************************************************************************/\n\n/* eslint-disable */\nimport type { AstNode, Reference, ReferenceInfo, TypeMetaData } from '../../syntax-tree.js';\nimport { AbstractAstReflection } from '../../syntax-tree.js';\n\nexport const LangiumGrammarTerminals = {\n    ID: /\\^?[_a-zA-Z][\\w_]*/,\n    STRING: /\"(\\\\.|[^\"\\\\])*\"|'(\\\\.|[^'\\\\])*'/,\n    NUMBER: /NaN|-?((\\d*\\.\\d+|\\d+)([Ee][+-]?\\d+)?|Infinity)/,\n    RegexLiteral: /\\/(?![*+?])(?:[^\\r\\n\\[/\\\\]|\\\\.|\\[(?:[^\\r\\n\\]\\\\]|\\\\.)*\\])+\\/[a-z]*/,\n    WS: /\\s+/,\n    ML_COMMENT: /\\/\\*[\\s\\S]*?\\*\\//,\n    SL_COMMENT: /\\/\\/[^\\n\\r]*/,\n};\n\nexport type AbstractRule = ParserRule | TerminalRule;\n\nexport const AbstractRule = 'AbstractRule';\n\nexport function isAbstractRule(item: unknown): item is AbstractRule {\n    return reflection.isInstance(item, AbstractRule);\n}\n\nexport type AbstractType = InferredType | Interface | ParserRule | Type;\n\nexport const AbstractType = 'AbstractType';\n\nexport function isAbstractType(item: unknown): item is AbstractType {\n    return reflection.isInstance(item, AbstractType);\n}\n\nexport type Condition = BooleanLiteral | Conjunction | Disjunction | Negation | ParameterReference;\n\nexport const Condition = 'Condition';\n\nexport function isCondition(item: unknown): item is Condition {\n    return reflection.isInstance(item, Condition);\n}\n\nexport type FeatureName = 'current' | 'entry' | 'extends' | 'false' | 'fragment' | 'grammar' | 'hidden' | 'import' | 'infer' | 'infers' | 'interface' | 'returns' | 'terminal' | 'true' | 'type' | 'with' | PrimitiveType | string;\n\nexport function isFeatureName(item: unknown): item is FeatureName {\n    return isPrimitiveType(item) || item === 'current' || item === 'entry' || item === 'extends' || item === 'false' || item === 'fragment' || item === 'grammar' || item === 'hidden' || item === 'import' || item === 'interface' || item === 'returns' || item === 'terminal' || item === 'true' || item === 'type' || item === 'infer' || item === 'infers' || item === 'with' || (typeof item === 'string' && (/\\^?[_a-zA-Z][\\w_]*/.test(item)));\n}\n\nexport type PrimitiveType = 'Date' | 'bigint' | 'boolean' | 'number' | 'string';\n\nexport function isPrimitiveType(item: unknown): item is PrimitiveType {\n    return item === 'string' || item === 'number' || item === 'boolean' || item === 'Date' || item === 'bigint';\n}\n\nexport type TypeDefinition = ArrayType | ReferenceType | SimpleType | UnionType;\n\nexport const TypeDefinition = 'TypeDefinition';\n\nexport function isTypeDefinition(item: unknown): item is TypeDefinition {\n    return reflection.isInstance(item, TypeDefinition);\n}\n\nexport type ValueLiteral = ArrayLiteral | BooleanLiteral | NumberLiteral | StringLiteral;\n\nexport const ValueLiteral = 'ValueLiteral';\n\nexport function isValueLiteral(item: unknown): item is ValueLiteral {\n    return reflection.isInstance(item, ValueLiteral);\n}\n\nexport interface AbstractElement extends AstNode {\n    readonly $type: 'AbstractElement' | 'Action' | 'Alternatives' | 'Assignment' | 'CharacterRange' | 'CrossReference' | 'EndOfFile' | 'Group' | 'Keyword' | 'NegatedToken' | 'RegexToken' | 'RuleCall' | 'TerminalAlternatives' | 'TerminalGroup' | 'TerminalRuleCall' | 'UnorderedGroup' | 'UntilToken' | 'Wildcard';\n    cardinality?: '*' | '+' | '?';\n    lookahead?: '?!' | '?;\n}\n\nexport const ArrayLiteral = 'ArrayLiteral';\n\nexport function isArrayLiteral(item: unknown): item is ArrayLiteral {\n    return reflection.isInstance(item, ArrayLiteral);\n}\n\nexport interface ArrayType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'ArrayType';\n    elementType: TypeDefinition;\n}\n\nexport const ArrayType = 'ArrayType';\n\nexport function isArrayType(item: unknown): item is ArrayType {\n    return reflection.isInstance(item, ArrayType);\n}\n\nexport interface BooleanLiteral extends AstNode {\n    readonly $container: ArrayLiteral | Conjunction | Disjunction | Group | NamedArgument | Negation | TypeAttribute;\n    readonly $type: 'BooleanLiteral';\n    true: boolean;\n}\n\nexport const BooleanLiteral = 'BooleanLiteral';\n\nexport function isBooleanLiteral(item: unknown): item is BooleanLiteral {\n    return reflection.isInstance(item, BooleanLiteral);\n}\n\nexport interface Conjunction extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Conjunction';\n    left: Condition;\n    right: Condition;\n}\n\nexport const Conjunction = 'Conjunction';\n\nexport function isConjunction(item: unknown): item is Conjunction {\n    return reflection.isInstance(item, Conjunction);\n}\n\nexport interface Disjunction extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Disjunction';\n    left: Condition;\n    right: Condition;\n}\n\nexport const Disjunction = 'Disjunction';\n\nexport function isDisjunction(item: unknown): item is Disjunction {\n    return reflection.isInstance(item, Disjunction);\n}\n\nexport interface Grammar extends AstNode {\n    readonly $type: 'Grammar';\n    definesHiddenTokens: boolean;\n    hiddenTokens: Array>;\n    imports: Array;\n    interfaces: Array;\n    isDeclared: boolean;\n    name?: string;\n    rules: Array;\n    types: Array;\n    usedGrammars: Array>;\n}\n\nexport const Grammar = 'Grammar';\n\nexport function isGrammar(item: unknown): item is Grammar {\n    return reflection.isInstance(item, Grammar);\n}\n\nexport interface GrammarImport extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'GrammarImport';\n    path: string;\n}\n\nexport const GrammarImport = 'GrammarImport';\n\nexport function isGrammarImport(item: unknown): item is GrammarImport {\n    return reflection.isInstance(item, GrammarImport);\n}\n\nexport interface InferredType extends AstNode {\n    readonly $container: Action | ParserRule;\n    readonly $type: 'InferredType';\n    name: string;\n}\n\nexport const InferredType = 'InferredType';\n\nexport function isInferredType(item: unknown): item is InferredType {\n    return reflection.isInstance(item, InferredType);\n}\n\nexport interface Interface extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'Interface';\n    attributes: Array;\n    name: string;\n    superTypes: Array>;\n}\n\nexport const Interface = 'Interface';\n\nexport function isInterface(item: unknown): item is Interface {\n    return reflection.isInstance(item, Interface);\n}\n\nexport interface NamedArgument extends AstNode {\n    readonly $container: RuleCall;\n    readonly $type: 'NamedArgument';\n    calledByName: boolean;\n    parameter?: Reference;\n    value: Condition;\n}\n\nexport const NamedArgument = 'NamedArgument';\n\nexport function isNamedArgument(item: unknown): item is NamedArgument {\n    return reflection.isInstance(item, NamedArgument);\n}\n\nexport interface Negation extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Negation';\n    value: Condition;\n}\n\nexport const Negation = 'Negation';\n\nexport function isNegation(item: unknown): item is Negation {\n    return reflection.isInstance(item, Negation);\n}\n\nexport interface NumberLiteral extends AstNode {\n    readonly $container: ArrayLiteral | TypeAttribute;\n    readonly $type: 'NumberLiteral';\n    value: number;\n}\n\nexport const NumberLiteral = 'NumberLiteral';\n\nexport function isNumberLiteral(item: unknown): item is NumberLiteral {\n    return reflection.isInstance(item, NumberLiteral);\n}\n\nexport interface Parameter extends AstNode {\n    readonly $container: ParserRule;\n    readonly $type: 'Parameter';\n    name: string;\n}\n\nexport const Parameter = 'Parameter';\n\nexport function isParameter(item: unknown): item is Parameter {\n    return reflection.isInstance(item, Parameter);\n}\n\nexport interface ParameterReference extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'ParameterReference';\n    parameter: Reference;\n}\n\nexport const ParameterReference = 'ParameterReference';\n\nexport function isParameterReference(item: unknown): item is ParameterReference {\n    return reflection.isInstance(item, ParameterReference);\n}\n\nexport interface ParserRule extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'ParserRule';\n    dataType?: PrimitiveType;\n    definesHiddenTokens: boolean;\n    definition: AbstractElement;\n    entry: boolean;\n    fragment: boolean;\n    hiddenTokens: Array>;\n    inferredType?: InferredType;\n    name: string;\n    parameters: Array;\n    returnType?: Reference;\n    wildcard: boolean;\n}\n\nexport const ParserRule = 'ParserRule';\n\nexport function isParserRule(item: unknown): item is ParserRule {\n    return reflection.isInstance(item, ParserRule);\n}\n\nexport interface ReferenceType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'ReferenceType';\n    referenceType: TypeDefinition;\n}\n\nexport const ReferenceType = 'ReferenceType';\n\nexport function isReferenceType(item: unknown): item is ReferenceType {\n    return reflection.isInstance(item, ReferenceType);\n}\n\nexport interface ReturnType extends AstNode {\n    readonly $container: TerminalRule;\n    readonly $type: 'ReturnType';\n    name: PrimitiveType | string;\n}\n\nexport const ReturnType = 'ReturnType';\n\nexport function isReturnType(item: unknown): item is ReturnType {\n    return reflection.isInstance(item, ReturnType);\n}\n\nexport interface SimpleType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'SimpleType';\n    primitiveType?: PrimitiveType;\n    stringType?: string;\n    typeRef?: Reference;\n}\n\nexport const SimpleType = 'SimpleType';\n\nexport function isSimpleType(item: unknown): item is SimpleType {\n    return reflection.isInstance(item, SimpleType);\n}\n\nexport interface StringLiteral extends AstNode {\n    readonly $container: ArrayLiteral | TypeAttribute;\n    readonly $type: 'StringLiteral';\n    value: string;\n}\n\nexport const StringLiteral = 'StringLiteral';\n\nexport function isStringLiteral(item: unknown): item is StringLiteral {\n    return reflection.isInstance(item, StringLiteral);\n}\n\nexport interface TerminalRule extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'TerminalRule';\n    definition: AbstractElement;\n    fragment: boolean;\n    hidden: boolean;\n    name: string;\n    type?: ReturnType;\n}\n\nexport const TerminalRule = 'TerminalRule';\n\nexport function isTerminalRule(item: unknown): item is TerminalRule {\n    return reflection.isInstance(item, TerminalRule);\n}\n\nexport interface Type extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'Type';\n    name: string;\n    type: TypeDefinition;\n}\n\nexport const Type = 'Type';\n\nexport function isType(item: unknown): item is Type {\n    return reflection.isInstance(item, Type);\n}\n\nexport interface TypeAttribute extends AstNode {\n    readonly $container: Interface;\n    readonly $type: 'TypeAttribute';\n    defaultValue?: ValueLiteral;\n    isOptional: boolean;\n    name: FeatureName;\n    type: TypeDefinition;\n}\n\nexport const TypeAttribute = 'TypeAttribute';\n\nexport function isTypeAttribute(item: unknown): item is TypeAttribute {\n    return reflection.isInstance(item, TypeAttribute);\n}\n\nexport interface UnionType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'UnionType';\n    types: Array;\n}\n\nexport const UnionType = 'UnionType';\n\nexport function isUnionType(item: unknown): item is UnionType {\n    return reflection.isInstance(item, UnionType);\n}\n\nexport interface Action extends AbstractElement {\n    readonly $type: 'Action';\n    feature?: FeatureName;\n    inferredType?: InferredType;\n    operator?: '+=' | '=';\n    type?: Reference;\n}\n\nexport const Action = 'Action';\n\nexport function isAction(item: unknown): item is Action {\n    return reflection.isInstance(item, Action);\n}\n\nexport interface Alternatives extends AbstractElement {\n    readonly $type: 'Alternatives';\n    elements: Array;\n}\n\nexport const Alternatives = 'Alternatives';\n\nexport function isAlternatives(item: unknown): item is Alternatives {\n    return reflection.isInstance(item, Alternatives);\n}\n\nexport interface Assignment extends AbstractElement {\n    readonly $type: 'Assignment';\n    feature: FeatureName;\n    operator: '+=' | '=' | '?=';\n    terminal: AbstractElement;\n}\n\nexport const Assignment = 'Assignment';\n\nexport function isAssignment(item: unknown): item is Assignment {\n    return reflection.isInstance(item, Assignment);\n}\n\nexport interface CharacterRange extends AbstractElement {\n    readonly $type: 'CharacterRange';\n    left: Keyword;\n    right?: Keyword;\n}\n\nexport const CharacterRange = 'CharacterRange';\n\nexport function isCharacterRange(item: unknown): item is CharacterRange {\n    return reflection.isInstance(item, CharacterRange);\n}\n\nexport interface CrossReference extends AbstractElement {\n    readonly $type: 'CrossReference';\n    deprecatedSyntax: boolean;\n    terminal?: AbstractElement;\n    type: Reference;\n}\n\nexport const CrossReference = 'CrossReference';\n\nexport function isCrossReference(item: unknown): item is CrossReference {\n    return reflection.isInstance(item, CrossReference);\n}\n\nexport interface EndOfFile extends AbstractElement {\n    readonly $type: 'EndOfFile';\n}\n\nexport const EndOfFile = 'EndOfFile';\n\nexport function isEndOfFile(item: unknown): item is EndOfFile {\n    return reflection.isInstance(item, EndOfFile);\n}\n\nexport interface Group extends AbstractElement {\n    readonly $type: 'Group';\n    elements: Array;\n    guardCondition?: Condition;\n}\n\nexport const Group = 'Group';\n\nexport function isGroup(item: unknown): item is Group {\n    return reflection.isInstance(item, Group);\n}\n\nexport interface Keyword extends AbstractElement {\n    readonly $container: CharacterRange;\n    readonly $type: 'Keyword';\n    value: string;\n}\n\nexport const Keyword = 'Keyword';\n\nexport function isKeyword(item: unknown): item is Keyword {\n    return reflection.isInstance(item, Keyword);\n}\n\nexport interface NegatedToken extends AbstractElement {\n    readonly $type: 'NegatedToken';\n    terminal: AbstractElement;\n}\n\nexport const NegatedToken = 'NegatedToken';\n\nexport function isNegatedToken(item: unknown): item is NegatedToken {\n    return reflection.isInstance(item, NegatedToken);\n}\n\nexport interface RegexToken extends AbstractElement {\n    readonly $type: 'RegexToken';\n    regex: string;\n}\n\nexport const RegexToken = 'RegexToken';\n\nexport function isRegexToken(item: unknown): item is RegexToken {\n    return reflection.isInstance(item, RegexToken);\n}\n\nexport interface RuleCall extends AbstractElement {\n    readonly $type: 'RuleCall';\n    arguments: Array;\n    rule: Reference;\n}\n\nexport const RuleCall = 'RuleCall';\n\nexport function isRuleCall(item: unknown): item is RuleCall {\n    return reflection.isInstance(item, RuleCall);\n}\n\nexport interface TerminalAlternatives extends AbstractElement {\n    readonly $type: 'TerminalAlternatives';\n    elements: Array;\n}\n\nexport const TerminalAlternatives = 'TerminalAlternatives';\n\nexport function isTerminalAlternatives(item: unknown): item is TerminalAlternatives {\n    return reflection.isInstance(item, TerminalAlternatives);\n}\n\nexport interface TerminalGroup extends AbstractElement {\n    readonly $type: 'TerminalGroup';\n    elements: Array;\n}\n\nexport const TerminalGroup = 'TerminalGroup';\n\nexport function isTerminalGroup(item: unknown): item is TerminalGroup {\n    return reflection.isInstance(item, TerminalGroup);\n}\n\nexport interface TerminalRuleCall extends AbstractElement {\n    readonly $type: 'TerminalRuleCall';\n    rule: Reference;\n}\n\nexport const TerminalRuleCall = 'TerminalRuleCall';\n\nexport function isTerminalRuleCall(item: unknown): item is TerminalRuleCall {\n    return reflection.isInstance(item, TerminalRuleCall);\n}\n\nexport interface UnorderedGroup extends AbstractElement {\n    readonly $type: 'UnorderedGroup';\n    elements: Array;\n}\n\nexport const UnorderedGroup = 'UnorderedGroup';\n\nexport function isUnorderedGroup(item: unknown): item is UnorderedGroup {\n    return reflection.isInstance(item, UnorderedGroup);\n}\n\nexport interface UntilToken extends AbstractElement {\n    readonly $type: 'UntilToken';\n    terminal: AbstractElement;\n}\n\nexport const UntilToken = 'UntilToken';\n\nexport function isUntilToken(item: unknown): item is UntilToken {\n    return reflection.isInstance(item, UntilToken);\n}\n\nexport interface Wildcard extends AbstractElement {\n    readonly $type: 'Wildcard';\n}\n\nexport const Wildcard = 'Wildcard';\n\nexport function isWildcard(item: unknown): item is Wildcard {\n    return reflection.isInstance(item, Wildcard);\n}\n\nexport type LangiumGrammarAstType = {\n    AbstractElement: AbstractElement\n    AbstractRule: AbstractRule\n    AbstractType: AbstractType\n    Action: Action\n    Alternatives: Alternatives\n    ArrayLiteral: ArrayLiteral\n    ArrayType: ArrayType\n    Assignment: Assignment\n    BooleanLiteral: BooleanLiteral\n    CharacterRange: CharacterRange\n    Condition: Condition\n    Conjunction: Conjunction\n    CrossReference: CrossReference\n    Disjunction: Disjunction\n    EndOfFile: EndOfFile\n    Grammar: Grammar\n    GrammarImport: GrammarImport\n    Group: Group\n    InferredType: InferredType\n    Interface: Interface\n    Keyword: Keyword\n    NamedArgument: NamedArgument\n    NegatedToken: NegatedToken\n    Negation: Negation\n    NumberLiteral: NumberLiteral\n    Parameter: Parameter\n    ParameterReference: ParameterReference\n    ParserRule: ParserRule\n    ReferenceType: ReferenceType\n    RegexToken: RegexToken\n    ReturnType: ReturnType\n    RuleCall: RuleCall\n    SimpleType: SimpleType\n    StringLiteral: StringLiteral\n    TerminalAlternatives: TerminalAlternatives\n    TerminalGroup: TerminalGroup\n    TerminalRule: TerminalRule\n    TerminalRuleCall: TerminalRuleCall\n    Type: Type\n    TypeAttribute: TypeAttribute\n    TypeDefinition: TypeDefinition\n    UnionType: UnionType\n    UnorderedGroup: UnorderedGroup\n    UntilToken: UntilToken\n    ValueLiteral: ValueLiteral\n    Wildcard: Wildcard\n}\n\nexport class LangiumGrammarAstReflection extends AbstractAstReflection {\n\n    getAllTypes(): string[] {\n        return ['AbstractElement', 'AbstractRule', 'AbstractType', 'Action', 'Alternatives', 'ArrayLiteral', 'ArrayType', 'Assignment', 'BooleanLiteral', 'CharacterRange', 'Condition', 'Conjunction', 'CrossReference', 'Disjunction', 'EndOfFile', 'Grammar', 'GrammarImport', 'Group', 'InferredType', 'Interface', 'Keyword', 'NamedArgument', 'NegatedToken', 'Negation', 'NumberLiteral', 'Parameter', 'ParameterReference', 'ParserRule', 'ReferenceType', 'RegexToken', 'ReturnType', 'RuleCall', 'SimpleType', 'StringLiteral', 'TerminalAlternatives', 'TerminalGroup', 'TerminalRule', 'TerminalRuleCall', 'Type', 'TypeAttribute', 'TypeDefinition', 'UnionType', 'UnorderedGroup', 'UntilToken', 'ValueLiteral', 'Wildcard'];\n    }\n\n    protected override computeIsSubtype(subtype: string, supertype: string): boolean {\n        switch (subtype) {\n            case Action:\n            case Alternatives:\n            case Assignment:\n            case CharacterRange:\n            case CrossReference:\n            case EndOfFile:\n            case Group:\n            case Keyword:\n            case NegatedToken:\n            case RegexToken:\n            case RuleCall:\n            case TerminalAlternatives:\n            case TerminalGroup:\n            case TerminalRuleCall:\n            case UnorderedGroup:\n            case UntilToken:\n            case Wildcard: {\n                return this.isSubtype(AbstractElement, supertype);\n            }\n            case ArrayLiteral:\n            case NumberLiteral:\n            case StringLiteral: {\n                return this.isSubtype(ValueLiteral, supertype);\n            }\n            case ArrayType:\n            case ReferenceType:\n            case SimpleType:\n            case UnionType: {\n                return this.isSubtype(TypeDefinition, supertype);\n            }\n            case BooleanLiteral: {\n                return this.isSubtype(Condition, supertype) || this.isSubtype(ValueLiteral, supertype);\n            }\n            case Conjunction:\n            case Disjunction:\n            case Negation:\n            case ParameterReference: {\n                return this.isSubtype(Condition, supertype);\n            }\n            case InferredType:\n            case Interface:\n            case Type: {\n                return this.isSubtype(AbstractType, supertype);\n            }\n            case ParserRule: {\n                return this.isSubtype(AbstractRule, supertype) || this.isSubtype(AbstractType, supertype);\n            }\n            case TerminalRule: {\n                return this.isSubtype(AbstractRule, supertype);\n            }\n            default: {\n                return false;\n            }\n        }\n    }\n\n    getReferenceType(refInfo: ReferenceInfo): string {\n        const referenceId = `${refInfo.container.$type}:${refInfo.property}`;\n        switch (referenceId) {\n            case 'Action:type':\n            case 'CrossReference:type':\n            case 'Interface:superTypes':\n            case 'ParserRule:returnType':\n            case 'SimpleType:typeRef': {\n                return AbstractType;\n            }\n            case 'Grammar:hiddenTokens':\n            case 'ParserRule:hiddenTokens':\n            case 'RuleCall:rule': {\n                return AbstractRule;\n            }\n            case 'Grammar:usedGrammars': {\n                return Grammar;\n            }\n            case 'NamedArgument:parameter':\n            case 'ParameterReference:parameter': {\n                return Parameter;\n            }\n            case 'TerminalRuleCall:rule': {\n                return TerminalRule;\n            }\n            default: {\n                throw new Error(`${referenceId} is not a valid reference id.`);\n            }\n        }\n    }\n\n    getTypeMetaData(type: string): TypeMetaData {\n        switch (type) {\n            case 'AbstractElement': {\n                return {\n                    name: 'AbstractElement',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'ArrayLiteral': {\n                return {\n                    name: 'ArrayLiteral',\n                    properties: [\n                        { name: 'elements', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'ArrayType': {\n                return {\n                    name: 'ArrayType',\n                    properties: [\n                        { name: 'elementType' }\n                    ]\n                };\n            }\n            case 'BooleanLiteral': {\n                return {\n                    name: 'BooleanLiteral',\n                    properties: [\n                        { name: 'true', defaultValue: false }\n                    ]\n                };\n            }\n            case 'Conjunction': {\n                return {\n                    name: 'Conjunction',\n                    properties: [\n                        { name: 'left' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'Disjunction': {\n                return {\n                    name: 'Disjunction',\n                    properties: [\n                        { name: 'left' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'Grammar': {\n                return {\n                    name: 'Grammar',\n                    properties: [\n                        { name: 'definesHiddenTokens', defaultValue: false },\n                        { name: 'hiddenTokens', defaultValue: [] },\n                        { name: 'imports', defaultValue: [] },\n                        { name: 'interfaces', defaultValue: [] },\n                        { name: 'isDeclared', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'rules', defaultValue: [] },\n                        { name: 'types', defaultValue: [] },\n                        { name: 'usedGrammars', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'GrammarImport': {\n                return {\n                    name: 'GrammarImport',\n                    properties: [\n                        { name: 'path' }\n                    ]\n                };\n            }\n            case 'InferredType': {\n                return {\n                    name: 'InferredType',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'Interface': {\n                return {\n                    name: 'Interface',\n                    properties: [\n                        { name: 'attributes', defaultValue: [] },\n                        { name: 'name' },\n                        { name: 'superTypes', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'NamedArgument': {\n                return {\n                    name: 'NamedArgument',\n                    properties: [\n                        { name: 'calledByName', defaultValue: false },\n                        { name: 'parameter' },\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'Negation': {\n                return {\n                    name: 'Negation',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'NumberLiteral': {\n                return {\n                    name: 'NumberLiteral',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'Parameter': {\n                return {\n                    name: 'Parameter',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'ParameterReference': {\n                return {\n                    name: 'ParameterReference',\n                    properties: [\n                        { name: 'parameter' }\n                    ]\n                };\n            }\n            case 'ParserRule': {\n                return {\n                    name: 'ParserRule',\n                    properties: [\n                        { name: 'dataType' },\n                        { name: 'definesHiddenTokens', defaultValue: false },\n                        { name: 'definition' },\n                        { name: 'entry', defaultValue: false },\n                        { name: 'fragment', defaultValue: false },\n                        { name: 'hiddenTokens', defaultValue: [] },\n                        { name: 'inferredType' },\n                        { name: 'name' },\n                        { name: 'parameters', defaultValue: [] },\n                        { name: 'returnType' },\n                        { name: 'wildcard', defaultValue: false }\n                    ]\n                };\n            }\n            case 'ReferenceType': {\n                return {\n                    name: 'ReferenceType',\n                    properties: [\n                        { name: 'referenceType' }\n                    ]\n                };\n            }\n            case 'ReturnType': {\n                return {\n                    name: 'ReturnType',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'SimpleType': {\n                return {\n                    name: 'SimpleType',\n                    properties: [\n                        { name: 'primitiveType' },\n                        { name: 'stringType' },\n                        { name: 'typeRef' }\n                    ]\n                };\n            }\n            case 'StringLiteral': {\n                return {\n                    name: 'StringLiteral',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'TerminalRule': {\n                return {\n                    name: 'TerminalRule',\n                    properties: [\n                        { name: 'definition' },\n                        { name: 'fragment', defaultValue: false },\n                        { name: 'hidden', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'Type': {\n                return {\n                    name: 'Type',\n                    properties: [\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'TypeAttribute': {\n                return {\n                    name: 'TypeAttribute',\n                    properties: [\n                        { name: 'defaultValue' },\n                        { name: 'isOptional', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'UnionType': {\n                return {\n                    name: 'UnionType',\n                    properties: [\n                        { name: 'types', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'Action': {\n                return {\n                    name: 'Action',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'feature' },\n                        { name: 'inferredType' },\n                        { name: 'lookahead' },\n                        { name: 'operator' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'Alternatives': {\n                return {\n                    name: 'Alternatives',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Assignment': {\n                return {\n                    name: 'Assignment',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'feature' },\n                        { name: 'lookahead' },\n                        { name: 'operator' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'CharacterRange': {\n                return {\n                    name: 'CharacterRange',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'left' },\n                        { name: 'lookahead' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'CrossReference': {\n                return {\n                    name: 'CrossReference',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'deprecatedSyntax', defaultValue: false },\n                        { name: 'lookahead' },\n                        { name: 'terminal' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'EndOfFile': {\n                return {\n                    name: 'EndOfFile',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Group': {\n                return {\n                    name: 'Group',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'guardCondition' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Keyword': {\n                return {\n                    name: 'Keyword',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'NegatedToken': {\n                return {\n                    name: 'NegatedToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'RegexToken': {\n                return {\n                    name: 'RegexToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'regex' }\n                    ]\n                };\n            }\n            case 'RuleCall': {\n                return {\n                    name: 'RuleCall',\n                    properties: [\n                        { name: 'arguments', defaultValue: [] },\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'rule' }\n                    ]\n                };\n            }\n            case 'TerminalAlternatives': {\n                return {\n                    name: 'TerminalAlternatives',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'TerminalGroup': {\n                return {\n                    name: 'TerminalGroup',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'TerminalRuleCall': {\n                return {\n                    name: 'TerminalRuleCall',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'rule' }\n                    ]\n                };\n            }\n            case 'UnorderedGroup': {\n                return {\n                    name: 'UnorderedGroup',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'UntilToken': {\n                return {\n                    name: 'UntilToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'Wildcard': {\n                return {\n                    name: 'Wildcard',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            default: {\n                return {\n                    name: type,\n                    properties: []\n                };\n            }\n        }\n    }\n}\n\nexport const reflection = new LangiumGrammarAstReflection();\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Range } from 'vscode-languageserver-types';\nimport type { AstNode, AstReflection, CstNode, GenericAstNode, Mutable, PropertyType, Reference, ReferenceInfo } from '../syntax-tree.js';\nimport type { Stream, TreeStream } from './stream.js';\nimport type { LangiumDocument } from '../workspace/documents.js';\nimport { isAstNode, isReference } from '../syntax-tree.js';\nimport { DONE_RESULT, stream, StreamImpl, TreeStreamImpl } from './stream.js';\nimport { inRange } from './cst-utils.js';\n\n/**\n * Link the `$container` and other related properties of every AST node that is directly contained\n * in the given `node`.\n */\nexport function linkContentToContainer(node: AstNode): void {\n    for (const [name, value] of Object.entries(node)) {\n        if (!name.startsWith('$')) {\n            if (Array.isArray(value)) {\n                value.forEach((item, index) => {\n                    if (isAstNode(item)) {\n                        (item as Mutable).$container = node;\n                        (item as Mutable).$containerProperty = name;\n                        (item as Mutable).$containerIndex = index;\n                    }\n                });\n            } else if (isAstNode(value)) {\n                (value as Mutable).$container = node;\n                (value as Mutable).$containerProperty = name;\n            }\n        }\n    }\n}\n\n/**\n * Walk along the hierarchy of containers from the given AST node to the root and return the first\n * node that matches the type predicate. If the start node itself matches, it is returned.\n * If no container matches, `undefined` is returned.\n */\nexport function getContainerOfType(node: AstNode | undefined, typePredicate: (n: AstNode) => n is T): T | undefined {\n    let item = node;\n    while (item) {\n        if (typePredicate(item)) {\n            return item;\n        }\n        item = item.$container;\n    }\n    return undefined;\n}\n\n/**\n * Walk along the hierarchy of containers from the given AST node to the root and check for existence\n * of a container that matches the given predicate. The start node is included in the checks.\n */\nexport function hasContainerOfType(node: AstNode | undefined, predicate: (n: AstNode) => boolean): boolean {\n    let item = node;\n    while (item) {\n        if (predicate(item)) {\n            return true;\n        }\n        item = item.$container;\n    }\n    return false;\n}\n\n/**\n * Retrieve the document in which the given AST node is contained. A reference to the document is\n * usually held by the root node of the AST.\n *\n * @throws an error if the node is not contained in a document.\n */\nexport function getDocument(node: AstNode): LangiumDocument {\n    const rootNode = findRootNode(node);\n    const result = rootNode.$document;\n    if (!result) {\n        throw new Error('AST node has no document.');\n    }\n    return result as LangiumDocument;\n}\n\n/**\n * Returns the root node of the given AST node by following the `$container` references.\n */\nexport function findRootNode(node: AstNode): AstNode {\n    while (node.$container) {\n        node = node.$container;\n    }\n    return node;\n}\n\nexport interface AstStreamOptions {\n    /**\n     * Optional target range that the nodes in the stream need to intersect\n     */\n    range?: Range\n}\n\n/**\n * Create a stream of all AST nodes that are directly contained in the given node. This includes\n * single-valued as well as multi-valued (array) properties.\n */\nexport function streamContents(node: AstNode, options?: AstStreamOptions): Stream {\n    if (!node) {\n        throw new Error('Node must be an AstNode.');\n    }\n    const range = options?.range;\n    type State = { keys: string[], keyIndex: number, arrayIndex: number };\n    return new StreamImpl(() => ({\n        keys: Object.keys(node),\n        keyIndex: 0,\n        arrayIndex: 0\n    }), state => {\n        while (state.keyIndex < state.keys.length) {\n            const property = state.keys[state.keyIndex];\n            if (!property.startsWith('$')) {\n                const value = (node as GenericAstNode)[property];\n                if (isAstNode(value)) {\n                    state.keyIndex++;\n                    if (isAstNodeInRange(value, range)) {\n                        return { done: false, value };\n                    }\n                } else if (Array.isArray(value)) {\n                    while (state.arrayIndex < value.length) {\n                        const index = state.arrayIndex++;\n                        const element = value[index];\n                        if (isAstNode(element) && isAstNodeInRange(element, range)) {\n                            return { done: false, value: element };\n                        }\n                    }\n                    state.arrayIndex = 0;\n                }\n            }\n            state.keyIndex++;\n        }\n        return DONE_RESULT;\n    });\n}\n\n/**\n * Create a stream of all AST nodes that are directly and indirectly contained in the given root node.\n * This does not include the root node itself.\n */\nexport function streamAllContents(root: AstNode, options?: AstStreamOptions): TreeStream {\n    if (!root) {\n        throw new Error('Root node must be an AstNode.');\n    }\n    return new TreeStreamImpl(root, node => streamContents(node, options));\n}\n\n/**\n * Create a stream of all AST nodes that are directly and indirectly contained in the given root node,\n * including the root node itself.\n */\nexport function streamAst(root: AstNode, options?: AstStreamOptions): TreeStream {\n    if (!root) {\n        throw new Error('Root node must be an AstNode.');\n    } else if (options?.range && !isAstNodeInRange(root, options.range)) {\n        // Return an empty stream if the root node isn't in range\n        return new TreeStreamImpl(root, () => []);\n    }\n    return new TreeStreamImpl(root, node => streamContents(node, options), { includeRoot: true });\n}\n\nfunction isAstNodeInRange(astNode: AstNode, range?: Range): boolean {\n    if (!range) {\n        return true;\n    }\n    const nodeRange = astNode.$cstNode?.range;\n    if (!nodeRange) {\n        return false;\n    }\n    return inRange(nodeRange, range);\n}\n\n/**\n * Create a stream of all cross-references that are held by the given AST node. This includes\n * single-valued as well as multi-valued (array) properties.\n */\nexport function streamReferences(node: AstNode): Stream {\n    type State = { keys: string[], keyIndex: number, arrayIndex: number };\n    return new StreamImpl(() => ({\n        keys: Object.keys(node),\n        keyIndex: 0,\n        arrayIndex: 0\n    }), state => {\n        while (state.keyIndex < state.keys.length) {\n            const property = state.keys[state.keyIndex];\n            if (!property.startsWith('$')) {\n                const value = (node as GenericAstNode)[property];\n                if (isReference(value)) {\n                    state.keyIndex++;\n                    return { done: false, value: { reference: value, container: node, property } };\n                } else if (Array.isArray(value)) {\n                    while (state.arrayIndex < value.length) {\n                        const index = state.arrayIndex++;\n                        const element = value[index];\n                        if (isReference(element)) {\n                            return { done: false, value: { reference: element, container: node, property, index } };\n                        }\n                    }\n                    state.arrayIndex = 0;\n                }\n            }\n            state.keyIndex++;\n        }\n        return DONE_RESULT;\n    });\n}\n\n/**\n * Returns a Stream of references to the target node from the AstNode tree\n *\n * @param targetNode AstNode we are looking for\n * @param lookup AstNode where we search for references. If not provided, the root node of the document is used as the default value\n */\nexport function findLocalReferences(targetNode: AstNode, lookup = getDocument(targetNode).parseResult.value): Stream {\n    const refs: Reference[] = [];\n    streamAst(lookup).forEach(node => {\n        streamReferences(node).forEach(refInfo => {\n            if (refInfo.reference.ref === targetNode) {\n                refs.push(refInfo.reference);\n            }\n        });\n    });\n    return stream(refs);\n}\n\n/**\n * Assigns all mandatory AST properties to the specified node.\n *\n * @param reflection Reflection object used to gather mandatory properties for the node.\n * @param node Specified node is modified in place and properties are directly assigned.\n */\nexport function assignMandatoryProperties(reflection: AstReflection, node: AstNode): void {\n    const typeMetaData = reflection.getTypeMetaData(node.$type);\n    const genericNode = node as GenericAstNode;\n    for (const property of typeMetaData.properties) {\n        // Only set the value if the property is not already set and if it has a default value\n        if (property.defaultValue !== undefined && genericNode[property.name] === undefined) {\n            genericNode[property.name] = copyDefaultValue(property.defaultValue);\n        }\n    }\n}\n\nfunction copyDefaultValue(propertyType: PropertyType): PropertyType {\n    if (Array.isArray(propertyType)) {\n        return [...propertyType.map(copyDefaultValue)];\n    } else {\n        return propertyType;\n    }\n}\n\n/**\n * Creates a deep copy of the specified AST node.\n * The resulting copy will only contain semantically relevant information, such as the `$type` property and AST properties.\n *\n * References are copied without resolved cross reference. The specified function is used to rebuild them.\n */\nexport function copyAstNode(node: T, buildReference: (node: AstNode, property: string, refNode: CstNode | undefined, refText: string) => Reference): T {\n    const copy: GenericAstNode = { $type: node.$type };\n\n    for (const [name, value] of Object.entries(node)) {\n        if (!name.startsWith('$')) {\n            if (isAstNode(value)) {\n                copy[name] = copyAstNode(value, buildReference);\n            } else if (isReference(value)) {\n                copy[name] = buildReference(\n                    copy,\n                    name,\n                    value.$refNode,\n                    value.$refText\n                );\n            } else if (Array.isArray(value)) {\n                const copiedArray: unknown[] = [];\n                for (const element of value) {\n                    if (isAstNode(element)) {\n                        copiedArray.push(copyAstNode(element, buildReference));\n                    } else if (isReference(element)) {\n                        copiedArray.push(\n                            buildReference(\n                                copy,\n                                name,\n                                element.$refNode,\n                                element.$refText\n                            )\n                        );\n                    } else {\n                        copiedArray.push(element);\n                    }\n                }\n                copy[name] = copiedArray;\n            } else {\n                copy[name] = value;\n            }\n        }\n    }\n\n    linkContentToContainer(copy);\n    return copy as unknown as T;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Set, Group, Character, IRegExpAST } from '@chevrotain/regexp-to-ast';\nimport { RegExpParser, BaseRegExpVisitor } from '@chevrotain/regexp-to-ast';\n\nexport const NEWLINE_REGEXP = /\\r?\\n/gm;\n\nconst regexpParser = new RegExpParser();\n\n/**\n * This class is in charge of heuristically identifying start/end tokens of terminals.\n *\n * The way this works is by doing the following:\n * 1. Traverse the regular expression in the \"start state\"\n * 2. Add any encountered sets/single characters to the \"start regexp\"\n * 3. Once we encounter any variable-length content (i.e. with quantifiers such as +/?/*), we enter the \"end state\"\n * 4. In the end state, any sets/single characters are added to an \"end stack\".\n * 5. If we re-encounter any variable-length content we reset the end stack\n * 6. We continue visiting the regex until the end, reseting the end stack and rebuilding it as necessary\n *\n * After traversing a regular expression the `startRegexp/endRegexp` properties allow access to the stored start/end of the terminal\n */\nclass TerminalRegExpVisitor extends BaseRegExpVisitor {\n\n    private isStarting = true;\n    startRegexp: string;\n    private endRegexpStack: string[] = [];\n    multiline = false;\n    regex: string;\n\n    get endRegex(): string {\n        return this.endRegexpStack.join('');\n    }\n\n    reset(regex: string): void {\n        this.multiline = false;\n        this.regex = regex;\n        this.startRegexp = '';\n        this.isStarting = true;\n        this.endRegexpStack = [];\n    }\n\n    override visitGroup(node: Group) {\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        }\n    }\n\n    override visitCharacter(node: Character): void {\n        const char = String.fromCharCode(node.value);\n        if (!this.multiline && char === '\\n') {\n            this.multiline = true;\n        }\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        } else {\n            const escapedChar = escapeRegExp(char);\n            this.endRegexpStack.push(escapedChar);\n            if (this.isStarting) {\n                this.startRegexp += escapedChar;\n            }\n        }\n    }\n\n    override visitSet(node: Set): void {\n        if (!this.multiline) {\n            const set = this.regex.substring(node.loc.begin, node.loc.end);\n            const regex = new RegExp(set);\n            this.multiline = Boolean('\\n'.match(regex));\n        }\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        } else {\n            const set = this.regex.substring(node.loc.begin, node.loc.end);\n            this.endRegexpStack.push(set);\n            if (this.isStarting) {\n                this.startRegexp += set;\n            }\n        }\n    }\n\n    override visitChildren(node: IRegExpAST): void {\n        if (node.type === 'Group') {\n            // Ignore children of groups with quantifier (+/*/?)\n            // These groups are unrelated to start/end tokens of terminals\n            const group = node as Group;\n            if (group.quantifier) {\n                return;\n            }\n        }\n        super.visitChildren(node);\n    }\n}\n\nconst visitor = new TerminalRegExpVisitor();\n\nexport function getTerminalParts(regexp: RegExp | string): Array<{ start: string, end: string }> {\n    try {\n        if (typeof regexp !== 'string') {\n            regexp = regexp.source;\n        }\n        regexp = `/${regexp}/`;\n        const pattern = regexpParser.pattern(regexp);\n        const parts: Array<{ start: string, end: string }> = [];\n        for (const alternative of pattern.value.value) {\n            visitor.reset(regexp);\n            visitor.visit(alternative);\n            parts.push({\n                start: visitor.startRegexp,\n                end: visitor.endRegex\n            });\n        }\n        return parts;\n    } catch {\n        return [];\n    }\n}\n\nexport function isMultilineComment(regexp: RegExp | string): boolean {\n    try {\n        if (typeof regexp === 'string') {\n            regexp = new RegExp(regexp);\n        }\n        regexp = regexp.toString();\n        visitor.reset(regexp);\n        // Parsing the pattern might fail (since it's user code)\n        visitor.visit(regexpParser.pattern(regexp));\n        return visitor.multiline;\n    } catch {\n        return false;\n    }\n}\n\nexport function isWhitespace(value: RegExp | string): boolean {\n    const regexp = typeof value === 'string' ? new RegExp(value) : value;\n    return regexp.test(' ');\n}\n\nexport function escapeRegExp(value: string): string {\n    return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n\nexport function getCaseInsensitivePattern(keyword: string): string {\n    return Array.prototype.map.call(keyword, letter =>\n        /\\w/.test(letter) ? `[${letter.toLowerCase()}${letter.toUpperCase()}]` : escapeRegExp(letter)\n    ).join('');\n}\n\n/**\n * Determines whether the given input has a partial match with the specified regex.\n * @param regex The regex to partially match against\n * @param input The input string\n * @returns Whether any match exists.\n */\nexport function partialMatches(regex: RegExp | string, input: string): boolean {\n    const partial = partialRegExp(regex);\n    const match = input.match(partial);\n    return !!match && match[0].length > 0;\n}\n\n/**\n * Builds a partial regex from the input regex. A partial regex is able to match incomplete input strings. E.g.\n * a partial regex constructed from `/ab/` is able to match the string `a` without needing a following `b` character. However it won't match `b` alone.\n * @param regex The input regex to be converted.\n * @returns A partial regex constructed from the input regex.\n */\nexport function partialRegExp(regex: RegExp | string): RegExp {\n    if (typeof regex === 'string') {\n        regex = new RegExp(regex);\n    }\n    const re = regex, source = regex.source;\n    let i = 0;\n\n    function process() {\n        let result = '',\n            tmp;\n\n        function appendRaw(nbChars: number) {\n            result += source.substr(i, nbChars);\n            i += nbChars;\n        }\n\n        function appendOptional(nbChars: number) {\n            result += '(?:' + source.substr(i, nbChars) + '|$)';\n            i += nbChars;\n        }\n\n        while (i < source.length) {\n            switch (source[i]) {\n                case '\\\\':\n                    switch (source[i + 1]) {\n                        case 'c':\n                            appendOptional(3);\n                            break;\n                        case 'x':\n                            appendOptional(4);\n                            break;\n                        case 'u':\n                            if (re.unicode) {\n                                if (source[i + 2] === '{') {\n                                    appendOptional(source.indexOf('}', i) - i + 1);\n                                } else {\n                                    appendOptional(6);\n                                }\n                            } else {\n                                appendOptional(2);\n                            }\n                            break;\n                        case 'p':\n                        case 'P':\n                            if (re.unicode) {\n                                appendOptional(source.indexOf('}', i) - i + 1);\n                            } else {\n                                appendOptional(2);\n                            }\n                            break;\n                        case 'k':\n                            appendOptional(source.indexOf('>', i) - i + 1);\n                            break;\n                        default:\n                            appendOptional(2);\n                            break;\n                    }\n                    break;\n\n                case '[':\n                    tmp = /\\[(?:\\\\.|.)*?\\]/g;\n                    tmp.lastIndex = i;\n                    tmp = tmp.exec(source) || [];\n                    appendOptional(tmp[0].length);\n                    break;\n\n                case '|':\n                case '^':\n                case '$':\n                case '*':\n                case '+':\n                case '?':\n                    appendRaw(1);\n                    break;\n                case '{':\n                    tmp = /\\{\\d+,?\\d*\\}/g;\n                    tmp.lastIndex = i;\n                    tmp = tmp.exec(source);\n                    if (tmp) {\n                        appendRaw(tmp[0].length);\n                    } else {\n                        appendOptional(1);\n                    }\n                    break;\n                case '(':\n                    if (source[i + 1] === '?') {\n                        switch (source[i + 2]) {\n                            case ':':\n                                result += '(?:';\n                                i += 3;\n                                result += process() + '|$)';\n                                break;\n                            case '=':\n                                result += '(?=';\n                                i += 3;\n                                result += process() + ')';\n                                break;\n                            case '!':\n                                tmp = i;\n                                i += 3;\n                                process();\n                                result += source.substr(tmp, i - tmp);\n                                break;\n                            case '<':\n                                switch (source[i + 3]) {\n                                    case '=':\n                                    case '!':\n                                        tmp = i;\n                                        i += 4;\n                                        process();\n                                        result += source.substr(tmp, i - tmp);\n                                        break;\n                                    default:\n                                        appendRaw(source.indexOf('>', i) - i + 1);\n                                        result += process() + '|$)';\n                                        break;\n                                }\n                                break;\n                        }\n                    } else {\n                        appendRaw(1);\n                        result += process() + '|$)';\n                    }\n                    break;\n                case ')':\n                    ++i;\n                    return result;\n                default:\n                    appendOptional(1);\n                    break;\n            }\n        }\n\n        return result;\n    }\n\n    return new RegExp(process(), regex.flags);\n}\n", "import type { Character, IRegExpAST, RegExpFlags } from \"../types\";\n\nexport function cc(char: string): number {\n  return char.charCodeAt(0);\n}\n\nexport function insertToSet(item: T | T[], set: T[]) {\n  if (Array.isArray(item)) {\n    item.forEach(function (subItem) {\n      set.push(subItem);\n    });\n  } else {\n    set.push(item);\n  }\n}\n\nexport function addFlag(\n  flagObj: RegExpFlags,\n  flagKey: keyof Omit,\n) {\n  if (flagObj[flagKey] === true) {\n    throw \"duplicate flag \" + flagKey;\n  }\n\n  const x: boolean = flagObj[flagKey];\n  flagObj[flagKey] = true;\n}\n\nexport function ASSERT_EXISTS(obj: any): obj is T {\n  // istanbul ignore next\n  if (obj === undefined) {\n    throw Error(\"Internal Error - Should never get here!\");\n  }\n  return true;\n}\n\n// istanbul ignore next\nexport function ASSERT_NEVER_REACH_HERE(): any {\n  throw Error(\"Internal Error - Should never get here!\");\n}\n\nexport function isCharacter(obj: { type: string }): obj is Character {\n  return obj[\"type\"] === \"Character\";\n}\n", "import { cc } from \"./utils.js\";\n\nexport const digitsCharCodes: number[] = [];\nfor (let i = cc(\"0\"); i <= cc(\"9\"); i++) {\n  digitsCharCodes.push(i);\n}\n\nexport const wordCharCodes: number[] = [cc(\"_\")].concat(digitsCharCodes);\nfor (let i = cc(\"a\"); i <= cc(\"z\"); i++) {\n  wordCharCodes.push(i);\n}\n\nfor (let i = cc(\"A\"); i <= cc(\"Z\"); i++) {\n  wordCharCodes.push(i);\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#character-classes\nexport const whitespaceCodes: number[] = [\n  cc(\" \"),\n  cc(\"\\f\"),\n  cc(\"\\n\"),\n  cc(\"\\r\"),\n  cc(\"\\t\"),\n  cc(\"\\v\"),\n  cc(\"\\t\"),\n  cc(\"\\u00a0\"),\n  cc(\"\\u1680\"),\n  cc(\"\\u2000\"),\n  cc(\"\\u2001\"),\n  cc(\"\\u2002\"),\n  cc(\"\\u2003\"),\n  cc(\"\\u2004\"),\n  cc(\"\\u2005\"),\n  cc(\"\\u2006\"),\n  cc(\"\\u2007\"),\n  cc(\"\\u2008\"),\n  cc(\"\\u2009\"),\n  cc(\"\\u200a\"),\n  cc(\"\\u2028\"),\n  cc(\"\\u2029\"),\n  cc(\"\\u202f\"),\n  cc(\"\\u205f\"),\n  cc(\"\\u3000\"),\n  cc(\"\\ufeff\"),\n];\n", "import type {\n  Alternative,\n  Assertion,\n  Atom,\n  Character,\n  Disjunction,\n  Group,\n  GroupBackReference,\n  Location,\n  Quantifier,\n  Range,\n  RegExpFlags,\n  RegExpPattern,\n  Set,\n  Term,\n} from \"../types\";\nimport {\n  addFlag,\n  ASSERT_EXISTS,\n  ASSERT_NEVER_REACH_HERE,\n  cc,\n  insertToSet,\n  isCharacter,\n} from \"./utils.js\";\nimport {\n  digitsCharCodes,\n  whitespaceCodes,\n  wordCharCodes,\n} from \"./character-classes.js\";\n\n// consts and utilities\nconst hexDigitPattern = /[0-9a-fA-F]/;\nconst decimalPattern = /[0-9]/;\nconst decimalPatternNoZero = /[1-9]/;\n\n// https://hackernoon.com/the-madness-of-parsing-real-world-javascript-regexps-d9ee336df983\n// https://www.ecma-international.org/ecma-262/8.0/index.html#prod-Pattern\nexport class RegExpParser {\n  protected idx: number = 0;\n  protected input: string = \"\";\n  protected groupIdx: number = 0;\n\n  protected saveState() {\n    return {\n      idx: this.idx,\n      input: this.input,\n      groupIdx: this.groupIdx,\n    };\n  }\n\n  protected restoreState(newState: {\n    idx: number;\n    input: string;\n    groupIdx: number;\n  }) {\n    this.idx = newState.idx;\n    this.input = newState.input;\n    this.groupIdx = newState.groupIdx;\n  }\n\n  public pattern(input: string): RegExpPattern {\n    // parser state\n    this.idx = 0;\n    this.input = input;\n    this.groupIdx = 0;\n\n    this.consumeChar(\"/\");\n    const value = this.disjunction();\n    this.consumeChar(\"/\");\n\n    const flags: RegExpFlags = {\n      type: \"Flags\",\n      loc: { begin: this.idx, end: input.length },\n      global: false,\n      ignoreCase: false,\n      multiLine: false,\n      unicode: false,\n      sticky: false,\n    };\n\n    while (this.isRegExpFlag()) {\n      switch (this.popChar()) {\n        case \"g\":\n          addFlag(flags, \"global\");\n          break;\n        case \"i\":\n          addFlag(flags, \"ignoreCase\");\n          break;\n        case \"m\":\n          addFlag(flags, \"multiLine\");\n          break;\n        case \"u\":\n          addFlag(flags, \"unicode\");\n          break;\n        case \"y\":\n          addFlag(flags, \"sticky\");\n          break;\n      }\n    }\n\n    if (this.idx !== this.input.length) {\n      throw Error(\"Redundant input: \" + this.input.substring(this.idx));\n    }\n    return {\n      type: \"Pattern\",\n      flags: flags,\n      value: value,\n      loc: this.loc(0),\n    };\n  }\n\n  protected disjunction(): Disjunction {\n    const alts = [];\n    const begin = this.idx;\n\n    alts.push(this.alternative());\n\n    while (this.peekChar() === \"|\") {\n      this.consumeChar(\"|\");\n      alts.push(this.alternative());\n    }\n\n    return { type: \"Disjunction\", value: alts, loc: this.loc(begin) };\n  }\n\n  protected alternative(): Alternative {\n    const terms = [];\n    const begin = this.idx;\n\n    while (this.isTerm()) {\n      terms.push(this.term());\n    }\n\n    return { type: \"Alternative\", value: terms, loc: this.loc(begin) };\n  }\n\n  protected term(): Term {\n    if (this.isAssertion()) {\n      return this.assertion();\n    } else {\n      return this.atom();\n    }\n  }\n\n  protected assertion(): Assertion {\n    const begin = this.idx;\n    switch (this.popChar()) {\n      case \"^\":\n        return {\n          type: \"StartAnchor\",\n          loc: this.loc(begin),\n        };\n      case \"$\":\n        return { type: \"EndAnchor\", loc: this.loc(begin) };\n      // '\\b' or '\\B'\n      case \"\\\\\":\n        switch (this.popChar()) {\n          case \"b\":\n            return {\n              type: \"WordBoundary\",\n              loc: this.loc(begin),\n            };\n          case \"B\":\n            return {\n              type: \"NonWordBoundary\",\n              loc: this.loc(begin),\n            };\n        }\n        // istanbul ignore next\n        throw Error(\"Invalid Assertion Escape\");\n      // '(?=' or '(?!'\n      case \"(\":\n        this.consumeChar(\"?\");\n\n        let type: \"Lookahead\" | \"NegativeLookahead\" | undefined;\n        switch (this.popChar()) {\n          case \"=\":\n            type = \"Lookahead\";\n            break;\n          case \"!\":\n            type = \"NegativeLookahead\";\n            break;\n        }\n        ASSERT_EXISTS(type);\n\n        const disjunction = this.disjunction();\n\n        this.consumeChar(\")\");\n\n        return {\n          type: type!,\n          value: disjunction,\n          loc: this.loc(begin),\n        };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected quantifier(\n    isBacktracking: boolean = false,\n  ): Quantifier | undefined {\n    let range: Partial | undefined = undefined;\n    const begin = this.idx;\n    switch (this.popChar()) {\n      case \"*\":\n        range = {\n          atLeast: 0,\n          atMost: Infinity,\n        };\n        break;\n      case \"+\":\n        range = {\n          atLeast: 1,\n          atMost: Infinity,\n        };\n        break;\n      case \"?\":\n        range = {\n          atLeast: 0,\n          atMost: 1,\n        };\n        break;\n      case \"{\":\n        const atLeast = this.integerIncludingZero();\n        switch (this.popChar()) {\n          case \"}\":\n            range = {\n              atLeast: atLeast,\n              atMost: atLeast,\n            };\n            break;\n          case \",\":\n            let atMost;\n            if (this.isDigit()) {\n              atMost = this.integerIncludingZero();\n              range = {\n                atLeast: atLeast,\n                atMost: atMost,\n              };\n            } else {\n              range = {\n                atLeast: atLeast,\n                atMost: Infinity,\n              };\n            }\n            this.consumeChar(\"}\");\n            break;\n        }\n        // throwing exceptions from \"ASSERT_EXISTS\" during backtracking\n        // causes severe performance degradations\n        if (isBacktracking === true && range === undefined) {\n          return undefined;\n        }\n        ASSERT_EXISTS(range);\n        break;\n    }\n\n    // throwing exceptions from \"ASSERT_EXISTS\" during backtracking\n    // causes severe performance degradations\n    if (isBacktracking === true && range === undefined) {\n      return undefined;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(range)) {\n      if (this.peekChar(0) === \"?\") {\n        this.consumeChar(\"?\");\n        range.greedy = false;\n      } else {\n        range.greedy = true;\n      }\n\n      range.type = \"Quantifier\";\n      range.loc = this.loc(begin);\n      return range as Quantifier;\n    }\n  }\n\n  protected atom(): Atom {\n    let atom: Omit | undefined;\n    const begin = this.idx;\n    switch (this.peekChar()) {\n      case \".\":\n        atom = this.dotAll();\n        break;\n      case \"\\\\\":\n        atom = this.atomEscape();\n        break;\n      case \"[\":\n        atom = this.characterClass();\n        break;\n      case \"(\":\n        atom = this.group();\n        break;\n    }\n\n    if (atom === undefined && this.isPatternCharacter()) {\n      atom = this.patternCharacter();\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(atom)) {\n      atom.loc = this.loc(begin);\n\n      if (this.isQuantifier()) {\n        atom.quantifier = this.quantifier();\n      }\n\n      return atom;\n    }\n\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected dotAll(): Omit {\n    this.consumeChar(\".\");\n    return {\n      type: \"Set\",\n      complement: true,\n      value: [cc(\"\\n\"), cc(\"\\r\"), cc(\"\\u2028\"), cc(\"\\u2029\")],\n    };\n  }\n\n  protected atomEscape(): Omit {\n    this.consumeChar(\"\\\\\");\n\n    switch (this.peekChar()) {\n      case \"1\":\n      case \"2\":\n      case \"3\":\n      case \"4\":\n      case \"5\":\n      case \"6\":\n      case \"7\":\n      case \"8\":\n      case \"9\":\n        return this.decimalEscapeAtom();\n      case \"d\":\n      case \"D\":\n      case \"s\":\n      case \"S\":\n      case \"w\":\n      case \"W\":\n        return this.characterClassEscape();\n      case \"f\":\n      case \"n\":\n      case \"r\":\n      case \"t\":\n      case \"v\":\n        return this.controlEscapeAtom();\n      case \"c\":\n        return this.controlLetterEscapeAtom();\n      case \"0\":\n        return this.nulCharacterAtom();\n      case \"x\":\n        return this.hexEscapeSequenceAtom();\n      case \"u\":\n        return this.regExpUnicodeEscapeSequenceAtom();\n      default:\n        return this.identityEscapeAtom();\n    }\n  }\n\n  protected decimalEscapeAtom(): Omit {\n    const value = this.positiveInteger();\n\n    return { type: \"GroupBackReference\", value: value };\n  }\n\n  protected characterClassEscape(): Omit {\n    let set: (number | Range)[] | undefined;\n    let complement = false;\n    switch (this.popChar()) {\n      case \"d\":\n        set = digitsCharCodes;\n        break;\n      case \"D\":\n        set = digitsCharCodes;\n        complement = true;\n        break;\n      case \"s\":\n        set = whitespaceCodes;\n        break;\n      case \"S\":\n        set = whitespaceCodes;\n        complement = true;\n        break;\n      case \"w\":\n        set = wordCharCodes;\n        break;\n      case \"W\":\n        set = wordCharCodes;\n        complement = true;\n        break;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(set)) {\n      return { type: \"Set\", value: set, complement: complement };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected controlEscapeAtom(): Omit {\n    let escapeCode;\n    switch (this.popChar()) {\n      case \"f\":\n        escapeCode = cc(\"\\f\");\n        break;\n      case \"n\":\n        escapeCode = cc(\"\\n\");\n        break;\n      case \"r\":\n        escapeCode = cc(\"\\r\");\n        break;\n      case \"t\":\n        escapeCode = cc(\"\\t\");\n        break;\n      case \"v\":\n        escapeCode = cc(\"\\v\");\n        break;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(escapeCode)) {\n      return { type: \"Character\", value: escapeCode };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected controlLetterEscapeAtom(): Omit {\n    this.consumeChar(\"c\");\n    const letter = this.popChar();\n    if (/[a-zA-Z]/.test(letter) === false) {\n      throw Error(\"Invalid \");\n    }\n\n    const letterCode = letter.toUpperCase().charCodeAt(0) - 64;\n    return { type: \"Character\", value: letterCode };\n  }\n\n  protected nulCharacterAtom(): Omit {\n    // TODO implement '[lookahead \u2209 DecimalDigit]'\n    // TODO: for the deprecated octal escape sequence\n    this.consumeChar(\"0\");\n    return { type: \"Character\", value: cc(\"\\0\") };\n  }\n\n  protected hexEscapeSequenceAtom(): Omit {\n    this.consumeChar(\"x\");\n    return this.parseHexDigits(2);\n  }\n\n  protected regExpUnicodeEscapeSequenceAtom(): Omit {\n    this.consumeChar(\"u\");\n    return this.parseHexDigits(4);\n  }\n\n  protected identityEscapeAtom(): Omit {\n    // TODO: implement \"SourceCharacter but not UnicodeIDContinue\"\n    // // http://unicode.org/reports/tr31/#Specific_Character_Adjustments\n    const escapedChar = this.popChar();\n    return { type: \"Character\", value: cc(escapedChar) };\n  }\n\n  protected classPatternCharacterAtom(): Omit {\n    switch (this.peekChar()) {\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n      // istanbul ignore next\n      case \"\\\\\":\n      // istanbul ignore next\n      case \"]\":\n        throw Error(\"TBD\");\n      default:\n        const nextChar = this.popChar();\n        return { type: \"Character\", value: cc(nextChar) };\n    }\n  }\n\n  protected characterClass(): Omit {\n    const set: (number | Range)[] = [];\n    let complement = false;\n    this.consumeChar(\"[\");\n    if (this.peekChar(0) === \"^\") {\n      this.consumeChar(\"^\");\n      complement = true;\n    }\n\n    while (this.isClassAtom()) {\n      const from = this.classAtom();\n      const isFromSingleChar = from.type === \"Character\";\n      if (isCharacter(from) && this.isRangeDash()) {\n        this.consumeChar(\"-\");\n        const to = this.classAtom();\n        const isToSingleChar = to.type === \"Character\";\n\n        // a range can only be used when both sides are single characters\n        if (isCharacter(to)) {\n          if (to.value < from.value) {\n            throw Error(\"Range out of order in character class\");\n          }\n          set.push({ from: from.value, to: to.value });\n        } else {\n          // literal dash\n          insertToSet(from.value, set);\n          set.push(cc(\"-\"));\n          insertToSet(to.value, set);\n        }\n      } else {\n        insertToSet(from.value, set);\n      }\n    }\n\n    this.consumeChar(\"]\");\n\n    return { type: \"Set\", complement: complement, value: set };\n  }\n\n  protected classAtom(): Omit {\n    switch (this.peekChar()) {\n      // istanbul ignore next\n      case \"]\":\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n        throw Error(\"TBD\");\n      case \"\\\\\":\n        return this.classEscape();\n      default:\n        return this.classPatternCharacterAtom();\n    }\n  }\n\n  protected classEscape(): Omit {\n    this.consumeChar(\"\\\\\");\n    switch (this.peekChar()) {\n      // Matches a backspace.\n      // (Not to be confused with \\b word boundary outside characterClass)\n      case \"b\":\n        this.consumeChar(\"b\");\n        return { type: \"Character\", value: cc(\"\\u0008\") };\n      case \"d\":\n      case \"D\":\n      case \"s\":\n      case \"S\":\n      case \"w\":\n      case \"W\":\n        return this.characterClassEscape();\n      case \"f\":\n      case \"n\":\n      case \"r\":\n      case \"t\":\n      case \"v\":\n        return this.controlEscapeAtom();\n      case \"c\":\n        return this.controlLetterEscapeAtom();\n      case \"0\":\n        return this.nulCharacterAtom();\n      case \"x\":\n        return this.hexEscapeSequenceAtom();\n      case \"u\":\n        return this.regExpUnicodeEscapeSequenceAtom();\n      default:\n        return this.identityEscapeAtom();\n    }\n  }\n\n  protected group(): Omit {\n    let capturing = true;\n    this.consumeChar(\"(\");\n    switch (this.peekChar(0)) {\n      case \"?\":\n        this.consumeChar(\"?\");\n        this.consumeChar(\":\");\n        capturing = false;\n        break;\n      default:\n        this.groupIdx++;\n        break;\n    }\n    const value = this.disjunction();\n    this.consumeChar(\")\");\n\n    const groupAst: Omit = {\n      type: \"Group\",\n      capturing: capturing,\n      value: value,\n    };\n\n    if (capturing) {\n      groupAst[\"idx\"] = this.groupIdx;\n    }\n\n    return groupAst;\n  }\n\n  protected positiveInteger(): number {\n    let number = this.popChar();\n\n    // istanbul ignore next - can't ever get here due to previous lookahead checks\n    // still implementing this error checking in case this ever changes.\n    if (decimalPatternNoZero.test(number) === false) {\n      throw Error(\"Expecting a positive integer\");\n    }\n\n    while (decimalPattern.test(this.peekChar(0))) {\n      number += this.popChar();\n    }\n\n    return parseInt(number, 10);\n  }\n\n  protected integerIncludingZero(): number {\n    let number = this.popChar();\n    if (decimalPattern.test(number) === false) {\n      throw Error(\"Expecting an integer\");\n    }\n\n    while (decimalPattern.test(this.peekChar(0))) {\n      number += this.popChar();\n    }\n\n    return parseInt(number, 10);\n  }\n\n  protected patternCharacter(): Omit {\n    const nextChar = this.popChar();\n    switch (nextChar) {\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n      // istanbul ignore next\n      case \"^\":\n      // istanbul ignore next\n      case \"$\":\n      // istanbul ignore next\n      case \"\\\\\":\n      // istanbul ignore next\n      case \".\":\n      // istanbul ignore next\n      case \"*\":\n      // istanbul ignore next\n      case \"+\":\n      // istanbul ignore next\n      case \"?\":\n      // istanbul ignore next\n      case \"(\":\n      // istanbul ignore next\n      case \")\":\n      // istanbul ignore next\n      case \"[\":\n      // istanbul ignore next\n      case \"|\":\n        // istanbul ignore next\n        throw Error(\"TBD\");\n      default:\n        return { type: \"Character\", value: cc(nextChar) };\n    }\n  }\n  protected isRegExpFlag(): boolean {\n    switch (this.peekChar(0)) {\n      case \"g\":\n      case \"i\":\n      case \"m\":\n      case \"u\":\n      case \"y\":\n        return true;\n      default:\n        return false;\n    }\n  }\n\n  protected isRangeDash(): boolean {\n    return this.peekChar() === \"-\" && this.isClassAtom(1);\n  }\n\n  protected isDigit(): boolean {\n    return decimalPattern.test(this.peekChar(0));\n  }\n\n  protected isClassAtom(howMuch = 0): boolean {\n    switch (this.peekChar(howMuch)) {\n      case \"]\":\n      case \"\\n\":\n      case \"\\r\":\n      case \"\\u2028\":\n      case \"\\u2029\":\n        return false;\n      default:\n        return true;\n    }\n  }\n\n  protected isTerm() {\n    return this.isAtom() || this.isAssertion();\n  }\n\n  protected isAtom(): boolean {\n    if (this.isPatternCharacter()) {\n      return true;\n    }\n\n    switch (this.peekChar(0)) {\n      case \".\":\n      case \"\\\\\": // atomEscape\n      case \"[\": // characterClass\n      // TODO: isAtom must be called before isAssertion - disambiguate\n      case \"(\": // group\n        return true;\n      default:\n        return false;\n    }\n  }\n\n  protected isAssertion(): boolean {\n    switch (this.peekChar(0)) {\n      case \"^\":\n      case \"$\":\n        return true;\n      // '\\b' or '\\B'\n      case \"\\\\\":\n        switch (this.peekChar(1)) {\n          case \"b\":\n          case \"B\":\n            return true;\n          default:\n            return false;\n        }\n      // '(?=' or '(?!'\n      case \"(\":\n        return (\n          this.peekChar(1) === \"?\" &&\n          (this.peekChar(2) === \"=\" || this.peekChar(2) === \"!\")\n        );\n      default:\n        return false;\n    }\n  }\n\n  protected isQuantifier(): boolean {\n    const prevState = this.saveState();\n    try {\n      return this.quantifier(true) !== undefined;\n    } catch (e) {\n      return false;\n    } finally {\n      this.restoreState(prevState);\n    }\n  }\n\n  protected isPatternCharacter(): boolean {\n    switch (this.peekChar()) {\n      case \"^\":\n      case \"$\":\n      case \"\\\\\":\n      case \".\":\n      case \"*\":\n      case \"+\":\n      case \"?\":\n      case \"(\":\n      case \")\":\n      case \"[\":\n      case \"|\":\n      case \"/\":\n      case \"\\n\":\n      case \"\\r\":\n      case \"\\u2028\":\n      case \"\\u2029\":\n        return false;\n      default:\n        return true;\n    }\n  }\n\n  protected parseHexDigits(howMany: number): Omit {\n    let hexString = \"\";\n    for (let i = 0; i < howMany; i++) {\n      const hexChar = this.popChar();\n      if (hexDigitPattern.test(hexChar) === false) {\n        throw Error(\"Expecting a HexDecimal digits\");\n      }\n      hexString += hexChar;\n    }\n    const charCode = parseInt(hexString, 16);\n    return { type: \"Character\", value: charCode };\n  }\n\n  protected peekChar(howMuch = 0): string {\n    return this.input[this.idx + howMuch];\n  }\n\n  protected popChar(): string {\n    const nextChar = this.peekChar(0);\n    this.consumeChar(undefined);\n    return nextChar;\n  }\n\n  protected consumeChar(char: string | undefined): void {\n    if (char !== undefined && this.input[this.idx] !== char) {\n      throw Error(\n        \"Expected: '\" +\n          char +\n          \"' but found: '\" +\n          this.input[this.idx] +\n          \"' at offset: \" +\n          this.idx,\n      );\n    }\n\n    if (this.idx >= this.input.length) {\n      throw Error(\"Unexpected end of input\");\n    }\n    this.idx++;\n  }\n\n  protected loc(begin: number): Location {\n    return { begin: begin, end: this.idx };\n  }\n}\n", "import type {\n  Alternative,\n  Assertion,\n  Character,\n  Disjunction,\n  Group,\n  GroupBackReference,\n  IRegExpAST,\n  Quantifier,\n  RegExpAstPart,\n  RegExpFlags,\n  RegExpPattern,\n  Set,\n} from \"../types\";\n\nexport class BaseRegExpVisitor {\n  public visitChildren(node: IRegExpAST) {\n    for (const key in node) {\n      const child = (node as any)[key];\n      /* istanbul ignore else */\n      if (node.hasOwnProperty(key)) {\n        if (child.type !== undefined) {\n          this.visit(child);\n        } else if (Array.isArray(child)) {\n          child.forEach((subChild) => {\n            this.visit(subChild);\n          }, this);\n        }\n      }\n    }\n  }\n\n  public visit(node: RegExpAstPart): void {\n    switch (node.type) {\n      case \"Pattern\":\n        this.visitPattern(node);\n        break;\n      case \"Flags\":\n        this.visitFlags(node);\n        break;\n      case \"Disjunction\":\n        this.visitDisjunction(node);\n        break;\n      case \"Alternative\":\n        this.visitAlternative(node);\n        break;\n      case \"StartAnchor\":\n        this.visitStartAnchor(node);\n        break;\n      case \"EndAnchor\":\n        this.visitEndAnchor(node);\n        break;\n      case \"WordBoundary\":\n        this.visitWordBoundary(node);\n        break;\n      case \"NonWordBoundary\":\n        this.visitNonWordBoundary(node);\n        break;\n      case \"Lookahead\":\n        this.visitLookahead(node);\n        break;\n      case \"NegativeLookahead\":\n        this.visitNegativeLookahead(node);\n        break;\n      case \"Character\":\n        this.visitCharacter(node);\n        break;\n      case \"Set\":\n        this.visitSet(node);\n        break;\n      case \"Group\":\n        this.visitGroup(node);\n        break;\n      case \"GroupBackReference\":\n        this.visitGroupBackReference(node);\n        break;\n      case \"Quantifier\":\n        this.visitQuantifier(node);\n        break;\n    }\n\n    this.visitChildren(node);\n  }\n\n  public visitPattern(node: RegExpPattern): void {}\n\n  public visitFlags(node: RegExpFlags): void {}\n\n  public visitDisjunction(node: Disjunction): void {}\n\n  public visitAlternative(node: Alternative): void {}\n\n  // Assertion\n  public visitStartAnchor(node: Assertion): void {}\n\n  public visitEndAnchor(node: Assertion): void {}\n\n  public visitWordBoundary(node: Assertion): void {}\n\n  public visitNonWordBoundary(node: Assertion): void {}\n\n  public visitLookahead(node: Assertion): void {}\n\n  public visitNegativeLookahead(node: Assertion): void {}\n\n  // atoms\n  public visitCharacter(node: Character): void {}\n\n  public visitSet(node: Set): void {}\n\n  public visitGroup(node: Group): void {}\n\n  public visitGroupBackReference(node: GroupBackReference): void {}\n\n  public visitQuantifier(node: Quantifier): void {}\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { DefaultNameRegexp } from '../utils/cst-utils.js';\nimport { isCommentTerminal, terminalRegex } from '../utils/grammar-utils.js';\nimport { isMultilineComment } from '../utils/regexp-utils.js';\nimport { isTerminalRule } from './generated/ast.js';\n\nexport interface GrammarConfig {\n    /**\n     * Lists all rule names which are classified as multiline comment rules\n     */\n    multilineCommentRules: string[]\n    /**\n     * A regular expression which matches characters of names\n     */\n    nameRegexp: RegExp\n}\n\n/**\n * Create the default grammar configuration (used by `createDefaultModule`). This can be overridden in a\n * language-specific module.\n */\nexport function createGrammarConfig(services: LangiumCoreServices): GrammarConfig {\n    const rules: string[] = [];\n    const grammar = services.Grammar;\n    for (const rule of grammar.rules) {\n        if (isTerminalRule(rule) && isCommentTerminal(rule) && isMultilineComment(terminalRegex(rule))) {\n            rules.push(rule.name);\n        }\n    }\n    return {\n        multilineCommentRules: rules,\n        nameRegexp: DefaultNameRegexp\n    };\n}\n", "export function PRINT_ERROR(msg: string) {\n  /* istanbul ignore else - can't override global.console in node.js */\n  if (console && console.error) {\n    console.error(`Error: ${msg}`);\n  }\n}\n\nexport function PRINT_WARNING(msg: string) {\n  /* istanbul ignore else - can't override global.console in node.js*/\n  if (console && console.warn) {\n    // TODO: modify docs accordingly\n    console.warn(`Warning: ${msg}`);\n  }\n}\n", "export function timer(func: () => T): { time: number; value: T } {\n  const start = new Date().getTime();\n  const val = func();\n  const end = new Date().getTime();\n  const total = end - start;\n  return { time: total, value: val };\n}\n", "// based on: https://github.com/petkaantonov/bluebird/blob/b97c0d2d487e8c5076e8bd897e0dcd4622d31846/src/util.js#L201-L216\nexport function toFastProperties(toBecomeFast: any) {\n  function FakeConstructor() {}\n\n  // If our object is used as a constructor, it would receive\n  FakeConstructor.prototype = toBecomeFast;\n  const fakeInstance = new (FakeConstructor as any)();\n\n  function fakeAccess() {\n    return typeof fakeInstance.bar;\n  }\n\n  // help V8 understand this is a \"real\" prototype by actually using\n  // the fake instance.\n  fakeAccess();\n  fakeAccess();\n\n  // Always true condition to suppress the Firefox warning of unreachable\n  // code after a return statement.\n  if (1) return toBecomeFast;\n\n  // Eval prevents optimization of this method (even though this is dead code)\n  // - https://esbuild.github.io/content-types/#direct-eval\n  /* istanbul ignore next */\n  // tslint:disable-next-line\n  (0, eval)(toBecomeFast);\n}\n", "import { assign, forEach, isRegExp, isString, map, pickBy } from \"lodash-es\";\nimport type {\n  IGASTVisitor,\n  IProduction,\n  IProductionWithOccurrence,\n  ISerializedGast,\n  TokenType,\n} from \"@chevrotain/types\";\n\n// TODO: duplicated code to avoid extracting another sub-package -- how to avoid?\nfunction tokenLabel(tokType: TokenType): string {\n  if (hasTokenLabel(tokType)) {\n    return tokType.LABEL;\n  } else {\n    return tokType.name;\n  }\n}\n\n// TODO: duplicated code to avoid extracting another sub-package -- how to avoid?\nfunction hasTokenLabel(\n  obj: TokenType,\n): obj is TokenType & Pick, \"LABEL\"> {\n  return isString(obj.LABEL) && obj.LABEL !== \"\";\n}\n\nexport abstract class AbstractProduction\n  implements IProduction\n{\n  public get definition(): T[] {\n    return this._definition;\n  }\n  public set definition(value: T[]) {\n    this._definition = value;\n  }\n\n  constructor(protected _definition: T[]) {}\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n    forEach(this.definition, (prod) => {\n      prod.accept(visitor);\n    });\n  }\n}\n\nexport class NonTerminal\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public nonTerminalName!: string;\n  public label?: string;\n  public referencedRule!: Rule;\n  public idx: number = 1;\n\n  constructor(options: {\n    nonTerminalName: string;\n    label?: string;\n    referencedRule?: Rule;\n    idx?: number;\n  }) {\n    super([]);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n\n  set definition(definition: IProduction[]) {\n    // immutable\n  }\n\n  get definition(): IProduction[] {\n    if (this.referencedRule !== undefined) {\n      return this.referencedRule.definition;\n    }\n    return [];\n  }\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n    // don't visit children of a reference, we will get cyclic infinite loops if we do so\n  }\n}\n\nexport class Rule extends AbstractProduction {\n  public name!: string;\n  public orgText: string = \"\";\n\n  constructor(options: {\n    name: string;\n    definition: IProduction[];\n    orgText?: string;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Alternative extends AbstractProduction {\n  public ignoreAmbiguities: boolean = false;\n\n  constructor(options: {\n    definition: IProduction[];\n    ignoreAmbiguities?: boolean;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Option\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionMandatory\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionMandatoryWithSeparator\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    separator: TokenType;\n    idx?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Repetition\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionWithSeparator\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    separator: TokenType;\n    idx?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Alternation\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public ignoreAmbiguities: boolean = false;\n  public hasPredicates: boolean = false;\n  public maxLookahead?: number;\n\n  public get definition(): Alternative[] {\n    return this._definition;\n  }\n  public set definition(value: Alternative[]) {\n    this._definition = value;\n  }\n\n  constructor(options: {\n    definition: Alternative[];\n    idx?: number;\n    ignoreAmbiguities?: boolean;\n    hasPredicates?: boolean;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Terminal implements IProductionWithOccurrence {\n  public terminalType!: TokenType;\n  public label?: string;\n  public idx: number = 1;\n\n  constructor(options: {\n    terminalType: TokenType;\n    label?: string;\n    idx?: number;\n  }) {\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n  }\n}\n\nexport interface ISerializedBasic extends ISerializedGast {\n  type:\n    | \"Alternative\"\n    | \"Option\"\n    | \"RepetitionMandatory\"\n    | \"Repetition\"\n    | \"Alternation\";\n  idx?: number;\n}\n\nexport interface ISerializedGastRule extends ISerializedGast {\n  type: \"Rule\";\n  name: string;\n  orgText: string;\n}\n\nexport interface ISerializedNonTerminal extends ISerializedGast {\n  type: \"NonTerminal\";\n  name: string;\n  label?: string;\n  idx: number;\n}\n\nexport interface ISerializedTerminal extends ISerializedGast {\n  type: \"Terminal\";\n  name: string;\n  terminalLabel?: string;\n  label?: string;\n  pattern?: string;\n  idx: number;\n}\n\nexport interface ISerializedTerminalWithSeparator extends ISerializedGast {\n  type: \"RepetitionMandatoryWithSeparator\" | \"RepetitionWithSeparator\";\n  idx: number;\n  separator: ISerializedTerminal;\n}\n\nexport type ISerializedGastAny =\n  | ISerializedBasic\n  | ISerializedGastRule\n  | ISerializedNonTerminal\n  | ISerializedTerminal\n  | ISerializedTerminalWithSeparator;\n\nexport function serializeGrammar(topRules: Rule[]): ISerializedGast[] {\n  return map(topRules, serializeProduction);\n}\n\nexport function serializeProduction(node: IProduction): ISerializedGast {\n  function convertDefinition(definition: IProduction[]): ISerializedGast[] {\n    return map(definition, serializeProduction);\n  }\n  /* istanbul ignore else */\n  if (node instanceof NonTerminal) {\n    const serializedNonTerminal: ISerializedNonTerminal = {\n      type: \"NonTerminal\",\n      name: node.nonTerminalName,\n      idx: node.idx,\n    };\n\n    if (isString(node.label)) {\n      serializedNonTerminal.label = node.label;\n    }\n\n    return serializedNonTerminal;\n  } else if (node instanceof Alternative) {\n    return {\n      type: \"Alternative\",\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Option) {\n    return {\n      type: \"Option\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionMandatory) {\n    return {\n      type: \"RepetitionMandatory\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionMandatoryWithSeparator) {\n    return {\n      type: \"RepetitionMandatoryWithSeparator\",\n      idx: node.idx,\n      separator: (\n        serializeProduction(new Terminal({ terminalType: node.separator }))\n      ),\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionWithSeparator) {\n    return {\n      type: \"RepetitionWithSeparator\",\n      idx: node.idx,\n      separator: (\n        serializeProduction(new Terminal({ terminalType: node.separator }))\n      ),\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Repetition) {\n    return {\n      type: \"Repetition\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Alternation) {\n    return {\n      type: \"Alternation\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Terminal) {\n    const serializedTerminal = {\n      type: \"Terminal\",\n      name: node.terminalType.name,\n      label: tokenLabel(node.terminalType),\n      idx: node.idx,\n    };\n\n    if (isString(node.label)) {\n      serializedTerminal.terminalLabel = node.label;\n    }\n\n    const pattern = node.terminalType.PATTERN;\n    if (node.terminalType.PATTERN) {\n      serializedTerminal.pattern = isRegExp(pattern)\n        ? (pattern).source\n        : pattern;\n    }\n\n    return serializedTerminal;\n  } else if (node instanceof Rule) {\n    return {\n      type: \"Rule\",\n      name: node.name,\n      orgText: node.orgText,\n      definition: convertDefinition(node.definition),\n    };\n    /* c8 ignore next 3 */\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n", "import {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"./model.js\";\nimport type { IProduction } from \"@chevrotain/types\";\n\nexport abstract class GAstVisitor {\n  public visit(node: IProduction): any {\n    const nodeAny: any = node;\n    switch (nodeAny.constructor) {\n      case NonTerminal:\n        return this.visitNonTerminal(nodeAny);\n      case Alternative:\n        return this.visitAlternative(nodeAny);\n      case Option:\n        return this.visitOption(nodeAny);\n      case RepetitionMandatory:\n        return this.visitRepetitionMandatory(nodeAny);\n      case RepetitionMandatoryWithSeparator:\n        return this.visitRepetitionMandatoryWithSeparator(nodeAny);\n      case RepetitionWithSeparator:\n        return this.visitRepetitionWithSeparator(nodeAny);\n      case Repetition:\n        return this.visitRepetition(nodeAny);\n      case Alternation:\n        return this.visitAlternation(nodeAny);\n      case Terminal:\n        return this.visitTerminal(nodeAny);\n      case Rule:\n        return this.visitRule(nodeAny);\n      /* c8 ignore next 2 */\n      default:\n        throw Error(\"non exhaustive match\");\n    }\n  }\n\n  /* c8 ignore next */\n  public visitNonTerminal(node: NonTerminal): any {}\n\n  /* c8 ignore next */\n  public visitAlternative(node: Alternative): any {}\n\n  /* c8 ignore next */\n  public visitOption(node: Option): any {}\n\n  /* c8 ignore next */\n  public visitRepetition(node: Repetition): any {}\n\n  /* c8 ignore next */\n  public visitRepetitionMandatory(node: RepetitionMandatory): any {}\n\n  /* c8 ignore next 3 */\n  public visitRepetitionMandatoryWithSeparator(\n    node: RepetitionMandatoryWithSeparator,\n  ): any {}\n\n  /* c8 ignore next */\n  public visitRepetitionWithSeparator(node: RepetitionWithSeparator): any {}\n\n  /* c8 ignore next */\n  public visitAlternation(node: Alternation): any {}\n\n  /* c8 ignore next */\n  public visitTerminal(node: Terminal): any {}\n\n  /* c8 ignore next */\n  public visitRule(node: Rule): any {}\n}\n", "import { every, includes, some } from \"lodash-es\";\nimport {\n  AbstractProduction,\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"./model.js\";\nimport type { IProduction, IProductionWithOccurrence } from \"@chevrotain/types\";\n\nexport function isSequenceProd(\n  prod: IProduction,\n): prod is { definition: IProduction[] } & IProduction {\n  return (\n    prod instanceof Alternative ||\n    prod instanceof Option ||\n    prod instanceof Repetition ||\n    prod instanceof RepetitionMandatory ||\n    prod instanceof RepetitionMandatoryWithSeparator ||\n    prod instanceof RepetitionWithSeparator ||\n    prod instanceof Terminal ||\n    prod instanceof Rule\n  );\n}\n\nexport function isOptionalProd(\n  prod: IProduction,\n  alreadyVisited: NonTerminal[] = [],\n): boolean {\n  const isDirectlyOptional =\n    prod instanceof Option ||\n    prod instanceof Repetition ||\n    prod instanceof RepetitionWithSeparator;\n  if (isDirectlyOptional) {\n    return true;\n  }\n\n  // note that this can cause infinite loop if one optional empty TOP production has a cyclic dependency with another\n  // empty optional top rule\n  // may be indirectly optional ((A?B?C?) | (D?E?F?))\n  if (prod instanceof Alternation) {\n    // for OR its enough for just one of the alternatives to be optional\n    return some((prod).definition, (subProd: IProduction) => {\n      return isOptionalProd(subProd, alreadyVisited);\n    });\n  } else if (prod instanceof NonTerminal && includes(alreadyVisited, prod)) {\n    // avoiding stack overflow due to infinite recursion\n    return false;\n  } else if (prod instanceof AbstractProduction) {\n    if (prod instanceof NonTerminal) {\n      alreadyVisited.push(prod);\n    }\n    return every(\n      (prod).definition,\n      (subProd: IProduction) => {\n        return isOptionalProd(subProd, alreadyVisited);\n      },\n    );\n  } else {\n    return false;\n  }\n}\n\nexport function isBranchingProd(\n  prod: IProduction,\n): prod is { definition: IProduction[] } & IProduction {\n  return prod instanceof Alternation;\n}\n\nexport function getProductionDslName(prod: IProductionWithOccurrence): string {\n  /* istanbul ignore else */\n  if (prod instanceof NonTerminal) {\n    return \"SUBRULE\";\n  } else if (prod instanceof Option) {\n    return \"OPTION\";\n  } else if (prod instanceof Alternation) {\n    return \"OR\";\n  } else if (prod instanceof RepetitionMandatory) {\n    return \"AT_LEAST_ONE\";\n  } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n    return \"AT_LEAST_ONE_SEP\";\n  } else if (prod instanceof RepetitionWithSeparator) {\n    return \"MANY_SEP\";\n  } else if (prod instanceof Repetition) {\n    return \"MANY\";\n  } else if (prod instanceof Terminal) {\n    return \"CONSUME\";\n    /* c8 ignore next 3 */\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n", "import { drop, forEach } from \"lodash-es\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { IProduction } from \"@chevrotain/types\";\n\n/**\n *  A Grammar Walker that computes the \"remaining\" grammar \"after\" a productions in the grammar.\n */\nexport abstract class RestWalker {\n  walk(prod: { definition: IProduction[] }, prevRest: any[] = []): void {\n    forEach(prod.definition, (subProd: IProduction, index) => {\n      const currRest = drop(prod.definition, index + 1);\n      /* istanbul ignore else */\n      if (subProd instanceof NonTerminal) {\n        this.walkProdRef(subProd, currRest, prevRest);\n      } else if (subProd instanceof Terminal) {\n        this.walkTerminal(subProd, currRest, prevRest);\n      } else if (subProd instanceof Alternative) {\n        this.walkFlat(subProd, currRest, prevRest);\n      } else if (subProd instanceof Option) {\n        this.walkOption(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionMandatory) {\n        this.walkAtLeastOne(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionMandatoryWithSeparator) {\n        this.walkAtLeastOneSep(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionWithSeparator) {\n        this.walkManySep(subProd, currRest, prevRest);\n      } else if (subProd instanceof Repetition) {\n        this.walkMany(subProd, currRest, prevRest);\n      } else if (subProd instanceof Alternation) {\n        this.walkOr(subProd, currRest, prevRest);\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    });\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {}\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {}\n\n  walkFlat(\n    flatProd: Alternative,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABCDEF => after the D the rest is EF\n    const fullOrRest = currRest.concat(prevRest);\n    this.walk(flatProd, fullOrRest);\n  }\n\n  walkOption(\n    optionProd: Option,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)?F => after the (DE)? the rest is F\n    const fullOrRest = currRest.concat(prevRest);\n    this.walk(optionProd, fullOrRest);\n  }\n\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)+F => after the (DE)+ the rest is (DE)?F\n    const fullAtLeastOneRest: IProduction[] = [\n      new Option({ definition: atLeastOneProd.definition }),\n    ].concat(currRest, prevRest);\n    this.walk(atLeastOneProd, fullAtLeastOneRest);\n  }\n\n  walkAtLeastOneSep(\n    atLeastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC DE(,DE)* F => after the (,DE)+ the rest is (,DE)?F\n    const fullAtLeastOneSepRest = restForRepetitionWithSeparator(\n      atLeastOneSepProd,\n      currRest,\n      prevRest,\n    );\n    this.walk(atLeastOneSepProd, fullAtLeastOneSepRest);\n  }\n\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)*F => after the (DE)* the rest is (DE)?F\n    const fullManyRest: IProduction[] = [\n      new Option({ definition: manyProd.definition }),\n    ].concat(currRest, prevRest);\n    this.walk(manyProd, fullManyRest);\n  }\n\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC (DE(,DE)*)? F => after the (,DE)* the rest is (,DE)?F\n    const fullManySepRest = restForRepetitionWithSeparator(\n      manySepProd,\n      currRest,\n      prevRest,\n    );\n    this.walk(manySepProd, fullManySepRest);\n  }\n\n  walkOr(\n    orProd: Alternation,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(D|E|F)G => when finding the (D|E|F) the rest is G\n    const fullOrRest = currRest.concat(prevRest);\n    // walk all different alternatives\n    forEach(orProd.definition, (alt) => {\n      // wrapping each alternative in a single definition wrapper\n      // to avoid errors in computing the rest of that alternative in the invocation to computeInProdFollows\n      // (otherwise for OR([alt1,alt2]) alt2 will be considered in 'rest' of alt1\n      const prodWrapper = new Alternative({ definition: [alt] });\n      this.walk(prodWrapper, fullOrRest);\n    });\n  }\n}\n\nfunction restForRepetitionWithSeparator(\n  repSepProd: RepetitionWithSeparator,\n  currRest: IProduction[],\n  prevRest: IProduction[],\n) {\n  const repSepRest = [\n    new Option({\n      definition: [\n        new Terminal({ terminalType: repSepProd.separator }) as IProduction,\n      ].concat(repSepProd.definition),\n    }) as IProduction,\n  ];\n  const fullRepSepRest: IProduction[] = repSepRest.concat(currRest, prevRest);\n  return fullRepSepRest;\n}\n", "import { flatten, map, uniq } from \"lodash-es\";\nimport {\n  isBranchingProd,\n  isOptionalProd,\n  isSequenceProd,\n  NonTerminal,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { IProduction, TokenType } from \"@chevrotain/types\";\n\nexport function first(prod: IProduction): TokenType[] {\n  /* istanbul ignore else */\n  if (prod instanceof NonTerminal) {\n    // this could in theory cause infinite loops if\n    // (1) prod A refs prod B.\n    // (2) prod B refs prod A\n    // (3) AB can match the empty set\n    // in other words a cycle where everything is optional so the first will keep\n    // looking ahead for the next optional part and will never exit\n    // currently there is no safeguard for this unique edge case because\n    // (1) not sure a grammar in which this can happen is useful for anything (productive)\n    return first((prod).referencedRule);\n  } else if (prod instanceof Terminal) {\n    return firstForTerminal(prod);\n  } else if (isSequenceProd(prod)) {\n    return firstForSequence(prod);\n  } else if (isBranchingProd(prod)) {\n    return firstForBranching(prod);\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function firstForSequence(prod: {\n  definition: IProduction[];\n}): TokenType[] {\n  let firstSet: TokenType[] = [];\n  const seq = prod.definition;\n  let nextSubProdIdx = 0;\n  let hasInnerProdsRemaining = seq.length > nextSubProdIdx;\n  let currSubProd;\n  // so we enter the loop at least once (if the definition is not empty\n  let isLastInnerProdOptional = true;\n  // scan a sequence until it's end or until we have found a NONE optional production in it\n  while (hasInnerProdsRemaining && isLastInnerProdOptional) {\n    currSubProd = seq[nextSubProdIdx];\n    isLastInnerProdOptional = isOptionalProd(currSubProd);\n    firstSet = firstSet.concat(first(currSubProd));\n    nextSubProdIdx = nextSubProdIdx + 1;\n    hasInnerProdsRemaining = seq.length > nextSubProdIdx;\n  }\n\n  return uniq(firstSet);\n}\n\nexport function firstForBranching(prod: {\n  definition: IProduction[];\n}): TokenType[] {\n  const allAlternativesFirsts: TokenType[][] = map(\n    prod.definition,\n    (innerProd) => {\n      return first(innerProd);\n    },\n  );\n  return uniq(flatten(allAlternativesFirsts));\n}\n\nexport function firstForTerminal(terminal: Terminal): TokenType[] {\n  return [terminal.terminalType];\n}\n", "// TODO: can this be removed? where is it used?\nexport const IN = \"_~IN~_\";\n", "import { RestWalker } from \"./rest.js\";\nimport { first } from \"./first.js\";\nimport { assign, forEach } from \"lodash-es\";\nimport { IN } from \"../constants.js\";\nimport { Alternative, NonTerminal, Rule, Terminal } from \"@chevrotain/gast\";\nimport { IProduction, TokenType } from \"@chevrotain/types\";\n\n// This ResyncFollowsWalker computes all of the follows required for RESYNC\n// (skipping reference production).\nexport class ResyncFollowsWalker extends RestWalker {\n  public follows: Record = {};\n\n  constructor(private topProd: Rule) {\n    super();\n  }\n\n  startWalking(): Record {\n    this.walk(this.topProd);\n    return this.follows;\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // do nothing! just like in the public sector after 13:00\n  }\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    const followName =\n      buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.idx) +\n      this.topProd.name;\n    const fullRest: IProduction[] = currRest.concat(prevRest);\n    const restProd = new Alternative({ definition: fullRest });\n    const t_in_topProd_follows = first(restProd);\n    this.follows[followName] = t_in_topProd_follows;\n  }\n}\n\nexport function computeAllProdsFollows(\n  topProductions: Rule[],\n): Record {\n  const reSyncFollows = {};\n\n  forEach(topProductions, (topProd) => {\n    const currRefsFollow = new ResyncFollowsWalker(topProd).startWalking();\n    assign(reSyncFollows, currRefsFollow);\n  });\n  return reSyncFollows;\n}\n\nexport function buildBetweenProdsFollowPrefix(\n  inner: Rule,\n  occurenceInParent: number,\n): string {\n  return inner.name + occurenceInParent + IN;\n}\n\nexport function buildInProdFollowPrefix(terminal: Terminal): string {\n  const terminalName = terminal.terminalType.name;\n  return terminalName + terminal.idx + IN;\n}\n", "import {\n  Alternative,\n  Assertion,\n  Atom,\n  Disjunction,\n  RegExpParser,\n  RegExpPattern,\n} from \"@chevrotain/regexp-to-ast\";\n\nlet regExpAstCache: { [regex: string]: RegExpPattern } = {};\nconst regExpParser = new RegExpParser();\n\n// this should be moved to regexp-to-ast\nexport type ASTNode =\n  | RegExpPattern\n  | Disjunction\n  | Alternative\n  | Assertion\n  | Atom;\n\nexport function getRegExpAst(regExp: RegExp): RegExpPattern {\n  const regExpStr = regExp.toString();\n  if (regExpAstCache.hasOwnProperty(regExpStr)) {\n    return regExpAstCache[regExpStr];\n  } else {\n    const regExpAst = regExpParser.pattern(regExpStr);\n    regExpAstCache[regExpStr] = regExpAst;\n    return regExpAst;\n  }\n}\n\nexport function clearRegExpParserCache() {\n  regExpAstCache = {};\n}\n", "import {\n  Alternative,\n  Atom,\n  BaseRegExpVisitor,\n  Character,\n  Disjunction,\n  Group,\n  Set,\n} from \"@chevrotain/regexp-to-ast\";\nimport { every, find, forEach, includes, isArray, values } from \"lodash-es\";\nimport { PRINT_ERROR, PRINT_WARNING } from \"@chevrotain/utils\";\nimport { ASTNode, getRegExpAst } from \"./reg_exp_parser.js\";\nimport { charCodeToOptimizedIndex, minOptimizationVal } from \"./lexer.js\";\n\nconst complementErrorMessage =\n  \"Complement Sets are not supported for first char optimization\";\nexport const failedOptimizationPrefixMsg =\n  'Unable to use \"first char\" lexer optimizations:\\n';\n\nexport function getOptimizedStartCodesIndices(\n  regExp: RegExp,\n  ensureOptimizations = false,\n): number[] {\n  try {\n    const ast = getRegExpAst(regExp);\n    const firstChars = firstCharOptimizedIndices(\n      ast.value,\n      {},\n      ast.flags.ignoreCase,\n    );\n    return firstChars;\n  } catch (e) {\n    /* istanbul ignore next */\n    // Testing this relies on the regexp-to-ast library having a bug... */\n    // TODO: only the else branch needs to be ignored, try to fix with newer prettier / tsc\n    if (e.message === complementErrorMessage) {\n      if (ensureOptimizations) {\n        PRINT_WARNING(\n          `${failedOptimizationPrefixMsg}` +\n            `\\tUnable to optimize: < ${regExp.toString()} >\\n` +\n            \"\\tComplement Sets cannot be automatically optimized.\\n\" +\n            \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n            \"\\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#COMPLEMENT for details.\",\n        );\n      }\n    } else {\n      let msgSuffix = \"\";\n      if (ensureOptimizations) {\n        msgSuffix =\n          \"\\n\\tThis will disable the lexer's first char optimizations.\\n\" +\n          \"\\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#REGEXP_PARSING for details.\";\n      }\n      PRINT_ERROR(\n        `${failedOptimizationPrefixMsg}\\n` +\n          `\\tFailed parsing: < ${regExp.toString()} >\\n` +\n          `\\tUsing the @chevrotain/regexp-to-ast library\\n` +\n          \"\\tPlease open an issue at: https://github.com/chevrotain/chevrotain/issues\" +\n          msgSuffix,\n      );\n    }\n  }\n\n  return [];\n}\n\nexport function firstCharOptimizedIndices(\n  ast: ASTNode,\n  result: { [charCode: number]: number },\n  ignoreCase: boolean,\n): number[] {\n  switch (ast.type) {\n    case \"Disjunction\":\n      for (let i = 0; i < ast.value.length; i++) {\n        firstCharOptimizedIndices(ast.value[i], result, ignoreCase);\n      }\n      break;\n    case \"Alternative\":\n      const terms = ast.value;\n      for (let i = 0; i < terms.length; i++) {\n        const term = terms[i];\n\n        // skip terms that cannot effect the first char results\n        switch (term.type) {\n          case \"EndAnchor\":\n          // A group back reference cannot affect potential starting char.\n          // because if a back reference is the first production than automatically\n          // the group being referenced has had to come BEFORE so its codes have already been added\n          case \"GroupBackReference\":\n          // assertions do not affect potential starting codes\n          case \"Lookahead\":\n          case \"NegativeLookahead\":\n          case \"StartAnchor\":\n          case \"WordBoundary\":\n          case \"NonWordBoundary\":\n            continue;\n        }\n\n        const atom = term;\n        switch (atom.type) {\n          case \"Character\":\n            addOptimizedIdxToResult(atom.value, result, ignoreCase);\n            break;\n          case \"Set\":\n            if (atom.complement === true) {\n              throw Error(complementErrorMessage);\n            }\n            forEach(atom.value, (code) => {\n              if (typeof code === \"number\") {\n                addOptimizedIdxToResult(code, result, ignoreCase);\n              } else {\n                // range\n                const range = code as any;\n                // cannot optimize when ignoreCase is\n                if (ignoreCase === true) {\n                  for (\n                    let rangeCode = range.from;\n                    rangeCode <= range.to;\n                    rangeCode++\n                  ) {\n                    addOptimizedIdxToResult(rangeCode, result, ignoreCase);\n                  }\n                }\n                // Optimization (2 orders of magnitude less work for very large ranges)\n                else {\n                  // handle unoptimized values\n                  for (\n                    let rangeCode = range.from;\n                    rangeCode <= range.to && rangeCode < minOptimizationVal;\n                    rangeCode++\n                  ) {\n                    addOptimizedIdxToResult(rangeCode, result, ignoreCase);\n                  }\n\n                  // Less common charCode where we optimize for faster init time, by using larger \"buckets\"\n                  if (range.to >= minOptimizationVal) {\n                    const minUnOptVal =\n                      range.from >= minOptimizationVal\n                        ? range.from\n                        : minOptimizationVal;\n                    const maxUnOptVal = range.to;\n                    const minOptIdx = charCodeToOptimizedIndex(minUnOptVal);\n                    const maxOptIdx = charCodeToOptimizedIndex(maxUnOptVal);\n\n                    for (\n                      let currOptIdx = minOptIdx;\n                      currOptIdx <= maxOptIdx;\n                      currOptIdx++\n                    ) {\n                      result[currOptIdx] = currOptIdx;\n                    }\n                  }\n                }\n              }\n            });\n            break;\n          case \"Group\":\n            firstCharOptimizedIndices(atom.value, result, ignoreCase);\n            break;\n          /* istanbul ignore next */\n          default:\n            throw Error(\"Non Exhaustive Match\");\n        }\n\n        // reached a mandatory production, no more **start** codes can be found on this alternative\n        const isOptionalQuantifier =\n          atom.quantifier !== undefined && atom.quantifier.atLeast === 0;\n        if (\n          // A group may be optional due to empty contents /(?:)/\n          // or if everything inside it is optional /((a)?)/\n          (atom.type === \"Group\" && isWholeOptional(atom) === false) ||\n          // If this term is not a group it may only be optional if it has an optional quantifier\n          (atom.type !== \"Group\" && isOptionalQuantifier === false)\n        ) {\n          break;\n        }\n      }\n      break;\n    /* istanbul ignore next */\n    default:\n      throw Error(\"non exhaustive match!\");\n  }\n\n  // console.log(Object.keys(result).length)\n  return values(result);\n}\n\nfunction addOptimizedIdxToResult(\n  code: number,\n  result: { [charCode: number]: number },\n  ignoreCase: boolean,\n) {\n  const optimizedCharIdx = charCodeToOptimizedIndex(code);\n  result[optimizedCharIdx] = optimizedCharIdx;\n\n  if (ignoreCase === true) {\n    handleIgnoreCase(code, result);\n  }\n}\n\nfunction handleIgnoreCase(\n  code: number,\n  result: { [charCode: number]: number },\n) {\n  const char = String.fromCharCode(code);\n  const upperChar = char.toUpperCase();\n  /* istanbul ignore else */\n  if (upperChar !== char) {\n    const optimizedCharIdx = charCodeToOptimizedIndex(upperChar.charCodeAt(0));\n    result[optimizedCharIdx] = optimizedCharIdx;\n  } else {\n    const lowerChar = char.toLowerCase();\n    if (lowerChar !== char) {\n      const optimizedCharIdx = charCodeToOptimizedIndex(\n        lowerChar.charCodeAt(0),\n      );\n      result[optimizedCharIdx] = optimizedCharIdx;\n    }\n  }\n}\n\nfunction findCode(setNode: Set, targetCharCodes: number[]) {\n  return find(setNode.value, (codeOrRange) => {\n    if (typeof codeOrRange === \"number\") {\n      return includes(targetCharCodes, codeOrRange);\n    } else {\n      // range\n      const range = codeOrRange;\n      return (\n        find(\n          targetCharCodes,\n          (targetCode) => range.from <= targetCode && targetCode <= range.to,\n        ) !== undefined\n      );\n    }\n  });\n}\n\nfunction isWholeOptional(ast: any): boolean {\n  const quantifier = (ast as Atom).quantifier;\n  if (quantifier && quantifier.atLeast === 0) {\n    return true;\n  }\n\n  if (!ast.value) {\n    return false;\n  }\n\n  return isArray(ast.value)\n    ? every(ast.value, isWholeOptional)\n    : isWholeOptional(ast.value);\n}\n\nclass CharCodeFinder extends BaseRegExpVisitor {\n  found: boolean = false;\n\n  constructor(private targetCharCodes: number[]) {\n    super();\n  }\n\n  visitChildren(node: ASTNode) {\n    // No need to keep looking...\n    if (this.found === true) {\n      return;\n    }\n\n    // switch lookaheads as they do not actually consume any characters thus\n    // finding a charCode at lookahead context does not mean that regexp can actually contain it in a match.\n    switch (node.type) {\n      case \"Lookahead\":\n        this.visitLookahead(node);\n        return;\n      case \"NegativeLookahead\":\n        this.visitNegativeLookahead(node);\n        return;\n    }\n\n    super.visitChildren(node);\n  }\n\n  visitCharacter(node: Character) {\n    if (includes(this.targetCharCodes, node.value)) {\n      this.found = true;\n    }\n  }\n\n  visitSet(node: Set) {\n    if (node.complement) {\n      if (findCode(node, this.targetCharCodes) === undefined) {\n        this.found = true;\n      }\n    } else {\n      if (findCode(node, this.targetCharCodes) !== undefined) {\n        this.found = true;\n      }\n    }\n  }\n}\n\nexport function canMatchCharCode(\n  charCodes: number[],\n  pattern: RegExp | string,\n) {\n  if (pattern instanceof RegExp) {\n    const ast = getRegExpAst(pattern);\n    const charCodeFinder = new CharCodeFinder(charCodes);\n    charCodeFinder.visit(ast);\n    return charCodeFinder.found;\n  } else {\n    return (\n      find(pattern, (char) => {\n        return includes(charCodes, (char).charCodeAt(0));\n      }) !== undefined\n    );\n  }\n}\n", "import { BaseRegExpVisitor } from \"@chevrotain/regexp-to-ast\";\nimport {\n  IRegExpExec,\n  Lexer,\n  LexerDefinitionErrorType,\n} from \"./lexer_public.js\";\nimport {\n  compact,\n  defaults,\n  difference,\n  filter,\n  find,\n  first,\n  flatten,\n  forEach,\n  has,\n  includes,\n  indexOf,\n  isArray,\n  isEmpty,\n  isFunction,\n  isRegExp,\n  isString,\n  isUndefined,\n  keys,\n  map,\n  reduce,\n  reject,\n  values,\n} from \"lodash-es\";\nimport { PRINT_ERROR } from \"@chevrotain/utils\";\nimport {\n  canMatchCharCode,\n  failedOptimizationPrefixMsg,\n  getOptimizedStartCodesIndices,\n} from \"./reg_exp.js\";\nimport {\n  ILexerDefinitionError,\n  ILineTerminatorsTester,\n  IMultiModeLexerDefinition,\n  IToken,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { getRegExpAst } from \"./reg_exp_parser.js\";\n\nconst PATTERN = \"PATTERN\";\nexport const DEFAULT_MODE = \"defaultMode\";\nexport const MODES = \"modes\";\n\nexport interface IPatternConfig {\n  pattern: IRegExpExec | string;\n  longerAlt: number[] | undefined;\n  canLineTerminator: boolean;\n  isCustom: boolean;\n  short: number | false;\n  group: string | undefined | false;\n  push: string | undefined;\n  pop: boolean;\n  tokenType: TokenType;\n  tokenTypeIdx: number;\n}\n\nexport interface IAnalyzeResult {\n  patternIdxToConfig: IPatternConfig[];\n  charCodeToPatternIdxToConfig: { [charCode: number]: IPatternConfig[] };\n  emptyGroups: { [groupName: string]: IToken[] };\n  hasCustom: boolean;\n  canBeOptimized: boolean;\n}\n\nexport let SUPPORT_STICKY =\n  typeof (new RegExp(\"(?:)\")).sticky === \"boolean\";\n\nexport function disableSticky() {\n  SUPPORT_STICKY = false;\n}\n\nexport function enableSticky() {\n  SUPPORT_STICKY = true;\n}\n\nexport function analyzeTokenTypes(\n  tokenTypes: TokenType[],\n  options: {\n    positionTracking?: \"full\" | \"onlyStart\" | \"onlyOffset\";\n    ensureOptimizations?: boolean;\n    lineTerminatorCharacters?: (number | string)[];\n    // TODO: should `useSticky` be an argument here?\n    useSticky?: boolean;\n    safeMode?: boolean;\n    tracer?: (msg: string, action: () => void) => void;\n  },\n): IAnalyzeResult {\n  options = defaults(options, {\n    useSticky: SUPPORT_STICKY,\n    debug: false as boolean,\n    safeMode: false as boolean,\n    positionTracking: \"full\",\n    lineTerminatorCharacters: [\"\\r\", \"\\n\"],\n    tracer: (msg: string, action: Function) => action(),\n  });\n\n  const tracer = options.tracer!;\n\n  tracer(\"initCharCodeToOptimizedIndexMap\", () => {\n    initCharCodeToOptimizedIndexMap();\n  });\n\n  let onlyRelevantTypes: TokenType[];\n  tracer(\"Reject Lexer.NA\", () => {\n    onlyRelevantTypes = reject(tokenTypes, (currType) => {\n      return currType[PATTERN] === Lexer.NA;\n    });\n  });\n\n  let hasCustom = false;\n  let allTransformedPatterns: (IRegExpExec | string)[];\n  tracer(\"Transform Patterns\", () => {\n    hasCustom = false;\n    allTransformedPatterns = map(\n      onlyRelevantTypes,\n      (currType): IRegExpExec | string => {\n        const currPattern = currType[PATTERN];\n\n        /* istanbul ignore else */\n        if (isRegExp(currPattern)) {\n          const regExpSource = currPattern.source;\n          if (\n            regExpSource.length === 1 &&\n            // only these regExp meta characters which can appear in a length one regExp\n            regExpSource !== \"^\" &&\n            regExpSource !== \"$\" &&\n            regExpSource !== \".\" &&\n            !currPattern.ignoreCase\n          ) {\n            return regExpSource;\n          } else if (\n            regExpSource.length === 2 &&\n            regExpSource[0] === \"\\\\\" &&\n            // not a meta character\n            !includes(\n              [\n                \"d\",\n                \"D\",\n                \"s\",\n                \"S\",\n                \"t\",\n                \"r\",\n                \"n\",\n                \"t\",\n                \"0\",\n                \"c\",\n                \"b\",\n                \"B\",\n                \"f\",\n                \"v\",\n                \"w\",\n                \"W\",\n              ],\n              regExpSource[1],\n            )\n          ) {\n            // escaped meta Characters: /\\+/ /\\[/\n            // or redundant escaping: /\\a/\n            // without the escaping \"\\\"\n            return regExpSource[1];\n          } else {\n            return options.useSticky\n              ? addStickyFlag(currPattern)\n              : addStartOfInput(currPattern);\n          }\n        } else if (isFunction(currPattern)) {\n          hasCustom = true;\n          // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object\n          return { exec: currPattern };\n        } else if (typeof currPattern === \"object\") {\n          hasCustom = true;\n          // ICustomPattern\n          return currPattern;\n        } else if (typeof currPattern === \"string\") {\n          if (currPattern.length === 1) {\n            return currPattern;\n          } else {\n            const escapedRegExpString = currPattern.replace(\n              /[\\\\^$.*+?()[\\]{}|]/g,\n              \"\\\\$&\",\n            );\n            const wrappedRegExp = new RegExp(escapedRegExpString);\n            return options.useSticky\n              ? addStickyFlag(wrappedRegExp)\n              : addStartOfInput(wrappedRegExp);\n          }\n        } else {\n          throw Error(\"non exhaustive match\");\n        }\n      },\n    );\n  });\n\n  let patternIdxToType: number[];\n  let patternIdxToGroup: (string | undefined | false)[];\n  let patternIdxToLongerAltIdxArr: (number[] | undefined)[];\n  let patternIdxToPushMode: (string | undefined)[];\n  let patternIdxToPopMode: boolean[];\n  tracer(\"misc mapping\", () => {\n    patternIdxToType = map(\n      onlyRelevantTypes,\n      (currType) => currType.tokenTypeIdx!,\n    );\n\n    patternIdxToGroup = map(onlyRelevantTypes, (clazz: any) => {\n      const groupName = clazz.GROUP;\n      /* istanbul ignore next */\n      if (groupName === Lexer.SKIPPED) {\n        return undefined;\n      } else if (isString(groupName)) {\n        return groupName;\n      } else if (isUndefined(groupName)) {\n        return false;\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    });\n\n    patternIdxToLongerAltIdxArr = map(onlyRelevantTypes, (clazz: any) => {\n      const longerAltType = clazz.LONGER_ALT;\n\n      if (longerAltType) {\n        const longerAltIdxArr = isArray(longerAltType)\n          ? map(longerAltType, (type: any) => indexOf(onlyRelevantTypes, type))\n          : [indexOf(onlyRelevantTypes, longerAltType)];\n        return longerAltIdxArr;\n      }\n    });\n\n    patternIdxToPushMode = map(\n      onlyRelevantTypes,\n      (clazz: any) => clazz.PUSH_MODE,\n    );\n\n    patternIdxToPopMode = map(onlyRelevantTypes, (clazz: any) =>\n      has(clazz, \"POP_MODE\"),\n    );\n  });\n\n  let patternIdxToCanLineTerminator: boolean[];\n  tracer(\"Line Terminator Handling\", () => {\n    const lineTerminatorCharCodes = getCharCodes(\n      options.lineTerminatorCharacters!,\n    );\n    patternIdxToCanLineTerminator = map(onlyRelevantTypes, (tokType) => false);\n    if (options.positionTracking !== \"onlyOffset\") {\n      patternIdxToCanLineTerminator = map(onlyRelevantTypes, (tokType) => {\n        if (has(tokType, \"LINE_BREAKS\")) {\n          return !!tokType.LINE_BREAKS;\n        } else {\n          return (\n            checkLineBreaksIssues(tokType, lineTerminatorCharCodes) === false &&\n            canMatchCharCode(\n              lineTerminatorCharCodes,\n              tokType.PATTERN as RegExp | string,\n            )\n          );\n        }\n      });\n    }\n  });\n\n  let patternIdxToIsCustom: boolean[];\n  let patternIdxToShort: (number | false)[];\n  let emptyGroups!: { [groupName: string]: IToken[] };\n  let patternIdxToConfig!: IPatternConfig[];\n  tracer(\"Misc Mapping #2\", () => {\n    patternIdxToIsCustom = map(onlyRelevantTypes, isCustomPattern);\n    patternIdxToShort = map(allTransformedPatterns, isShortPattern);\n\n    emptyGroups = reduce(\n      onlyRelevantTypes,\n      (acc, clazz: any) => {\n        const groupName = clazz.GROUP;\n        if (isString(groupName) && !(groupName === Lexer.SKIPPED)) {\n          acc[groupName] = [];\n        }\n        return acc;\n      },\n      {} as { [groupName: string]: IToken[] },\n    );\n\n    patternIdxToConfig = map(\n      allTransformedPatterns,\n      (x, idx): IPatternConfig => {\n        return {\n          pattern: allTransformedPatterns[idx],\n          longerAlt: patternIdxToLongerAltIdxArr[idx],\n          canLineTerminator: patternIdxToCanLineTerminator[idx],\n          isCustom: patternIdxToIsCustom[idx],\n          short: patternIdxToShort[idx],\n          group: patternIdxToGroup[idx],\n          push: patternIdxToPushMode[idx],\n          pop: patternIdxToPopMode[idx],\n          tokenTypeIdx: patternIdxToType[idx],\n          tokenType: onlyRelevantTypes[idx],\n        };\n      },\n    );\n  });\n\n  let canBeOptimized = true;\n  let charCodeToPatternIdxToConfig: { [charCode: number]: IPatternConfig[] } =\n    [];\n\n  if (!options.safeMode) {\n    tracer(\"First Char Optimization\", () => {\n      charCodeToPatternIdxToConfig = reduce(\n        onlyRelevantTypes,\n        (result, currTokType, idx) => {\n          if (typeof currTokType.PATTERN === \"string\") {\n            const charCode = currTokType.PATTERN.charCodeAt(0);\n            const optimizedIdx = charCodeToOptimizedIndex(charCode);\n            addToMapOfArrays(result, optimizedIdx, patternIdxToConfig[idx]);\n          } else if (isArray(currTokType.START_CHARS_HINT)) {\n            let lastOptimizedIdx: number;\n            forEach(currTokType.START_CHARS_HINT, (charOrInt) => {\n              const charCode =\n                typeof charOrInt === \"string\"\n                  ? charOrInt.charCodeAt(0)\n                  : charOrInt;\n              const currOptimizedIdx = charCodeToOptimizedIndex(charCode);\n              // Avoid adding the config multiple times\n              /* istanbul ignore else */\n              // - Difficult to check this scenario effects as it is only a performance\n              //   optimization that does not change correctness\n              if (lastOptimizedIdx !== currOptimizedIdx) {\n                lastOptimizedIdx = currOptimizedIdx;\n                addToMapOfArrays(\n                  result,\n                  currOptimizedIdx,\n                  patternIdxToConfig[idx],\n                );\n              }\n            });\n          } else if (isRegExp(currTokType.PATTERN)) {\n            if (currTokType.PATTERN.unicode) {\n              canBeOptimized = false;\n              if (options.ensureOptimizations) {\n                PRINT_ERROR(\n                  `${failedOptimizationPrefixMsg}` +\n                    `\\tUnable to analyze < ${currTokType.PATTERN.toString()} > pattern.\\n` +\n                    \"\\tThe regexp unicode flag is not currently supported by the regexp-to-ast library.\\n\" +\n                    \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n                    \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNICODE_OPTIMIZE\",\n                );\n              }\n            } else {\n              const optimizedCodes = getOptimizedStartCodesIndices(\n                currTokType.PATTERN,\n                options.ensureOptimizations,\n              );\n              /* istanbul ignore if */\n              // start code will only be empty given an empty regExp or failure of regexp-to-ast library\n              // the first should be a different validation and the second cannot be tested.\n              if (isEmpty(optimizedCodes)) {\n                // we cannot understand what codes may start possible matches\n                // The optimization correctness requires knowing start codes for ALL patterns.\n                // Not actually sure this is an error, no debug message\n                canBeOptimized = false;\n              }\n              forEach(optimizedCodes, (code) => {\n                addToMapOfArrays(result, code, patternIdxToConfig[idx]);\n              });\n            }\n          } else {\n            if (options.ensureOptimizations) {\n              PRINT_ERROR(\n                `${failedOptimizationPrefixMsg}` +\n                  `\\tTokenType: <${currTokType.name}> is using a custom token pattern without providing  parameter.\\n` +\n                  \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n                  \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE\",\n              );\n            }\n            canBeOptimized = false;\n          }\n\n          return result;\n        },\n        [] as { [charCode: number]: IPatternConfig[] },\n      );\n    });\n  }\n\n  return {\n    emptyGroups: emptyGroups,\n    patternIdxToConfig: patternIdxToConfig,\n    charCodeToPatternIdxToConfig: charCodeToPatternIdxToConfig,\n    hasCustom: hasCustom,\n    canBeOptimized: canBeOptimized,\n  };\n}\n\nexport function validatePatterns(\n  tokenTypes: TokenType[],\n  validModesNames: string[],\n): ILexerDefinitionError[] {\n  let errors: ILexerDefinitionError[] = [];\n\n  const missingResult = findMissingPatterns(tokenTypes);\n  errors = errors.concat(missingResult.errors);\n\n  const invalidResult = findInvalidPatterns(missingResult.valid);\n  const validTokenTypes = invalidResult.valid;\n  errors = errors.concat(invalidResult.errors);\n\n  errors = errors.concat(validateRegExpPattern(validTokenTypes));\n\n  errors = errors.concat(findInvalidGroupType(validTokenTypes));\n\n  errors = errors.concat(\n    findModesThatDoNotExist(validTokenTypes, validModesNames),\n  );\n\n  errors = errors.concat(findUnreachablePatterns(validTokenTypes));\n\n  return errors;\n}\n\nfunction validateRegExpPattern(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  let errors: ILexerDefinitionError[] = [];\n  const withRegExpPatterns = filter(tokenTypes, (currTokType) =>\n    isRegExp(currTokType[PATTERN]),\n  );\n\n  errors = errors.concat(findEndOfInputAnchor(withRegExpPatterns));\n\n  errors = errors.concat(findStartOfInputAnchor(withRegExpPatterns));\n\n  errors = errors.concat(findUnsupportedFlags(withRegExpPatterns));\n\n  errors = errors.concat(findDuplicatePatterns(withRegExpPatterns));\n\n  errors = errors.concat(findEmptyMatchRegExps(withRegExpPatterns));\n\n  return errors;\n}\n\nexport interface ILexerFilterResult {\n  errors: ILexerDefinitionError[];\n  valid: TokenType[];\n}\n\nexport function findMissingPatterns(\n  tokenTypes: TokenType[],\n): ILexerFilterResult {\n  const tokenTypesWithMissingPattern = filter(tokenTypes, (currType) => {\n    return !has(currType, PATTERN);\n  });\n\n  const errors = map(tokenTypesWithMissingPattern, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- missing static 'PATTERN' property\",\n      type: LexerDefinitionErrorType.MISSING_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  const valid = difference(tokenTypes, tokenTypesWithMissingPattern);\n  return { errors, valid };\n}\n\nexport function findInvalidPatterns(\n  tokenTypes: TokenType[],\n): ILexerFilterResult {\n  const tokenTypesWithInvalidPattern = filter(tokenTypes, (currType) => {\n    const pattern = currType[PATTERN];\n    return (\n      !isRegExp(pattern) &&\n      !isFunction(pattern) &&\n      !has(pattern, \"exec\") &&\n      !isString(pattern)\n    );\n  });\n\n  const errors = map(tokenTypesWithInvalidPattern, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' can only be a RegExp, a\" +\n        \" Function matching the {CustomPatternMatcherFunc} type or an Object matching the {ICustomPattern} interface.\",\n      type: LexerDefinitionErrorType.INVALID_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  const valid = difference(tokenTypes, tokenTypesWithInvalidPattern);\n  return { errors, valid };\n}\n\nconst end_of_input = /[^\\\\][$]/;\n\nexport function findEndOfInputAnchor(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  class EndAnchorFinder extends BaseRegExpVisitor {\n    found = false;\n\n    visitEndAnchor(node: unknown) {\n      this.found = true;\n    }\n  }\n\n  const invalidRegex = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN;\n\n    try {\n      const regexpAst = getRegExpAst(pattern as RegExp);\n      const endAnchorVisitor = new EndAnchorFinder();\n      endAnchorVisitor.visit(regexpAst);\n\n      return endAnchorVisitor.found;\n    } catch (e) {\n      // old behavior in case of runtime exceptions with regexp-to-ast.\n      /* istanbul ignore next - cannot ensure an error in regexp-to-ast*/\n      return end_of_input.test((pattern as RegExp).source);\n    }\n  });\n\n  const errors = map(invalidRegex, (currType) => {\n    return {\n      message:\n        \"Unexpected RegExp Anchor Error:\\n\" +\n        \"\\tToken Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' cannot contain end of input anchor '$'\\n\" +\n        \"\\tSee chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.EOI_ANCHOR_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findEmptyMatchRegExps(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const matchesEmptyString = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN as RegExp;\n    return pattern.test(\"\");\n  });\n\n  const errors = map(matchesEmptyString, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' must not match an empty string\",\n      type: LexerDefinitionErrorType.EMPTY_MATCH_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nconst start_of_input = /[^\\\\[][\\^]|^\\^/;\n\nexport function findStartOfInputAnchor(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  class StartAnchorFinder extends BaseRegExpVisitor {\n    found = false;\n\n    visitStartAnchor(node: unknown) {\n      this.found = true;\n    }\n  }\n\n  const invalidRegex = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN as RegExp;\n    try {\n      const regexpAst = getRegExpAst(pattern);\n      const startAnchorVisitor = new StartAnchorFinder();\n      startAnchorVisitor.visit(regexpAst);\n\n      return startAnchorVisitor.found;\n    } catch (e) {\n      // old behavior in case of runtime exceptions with regexp-to-ast.\n      /* istanbul ignore next - cannot ensure an error in regexp-to-ast*/\n      return start_of_input.test(pattern.source);\n    }\n  });\n\n  const errors = map(invalidRegex, (currType) => {\n    return {\n      message:\n        \"Unexpected RegExp Anchor Error:\\n\" +\n        \"\\tToken Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' cannot contain start of input anchor '^'\\n\" +\n        \"\\tSee https://chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.SOI_ANCHOR_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findUnsupportedFlags(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const invalidFlags = filter(tokenTypes, (currType) => {\n    const pattern = currType[PATTERN];\n    return pattern instanceof RegExp && (pattern.multiline || pattern.global);\n  });\n\n  const errors = map(invalidFlags, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' may NOT contain global('g') or multiline('m')\",\n      type: LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\n// This can only test for identical duplicate RegExps, not semantically equivalent ones.\nexport function findDuplicatePatterns(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const found: TokenType[] = [];\n  let identicalPatterns = map(tokenTypes, (outerType: any) => {\n    return reduce(\n      tokenTypes,\n      (result, innerType) => {\n        if (\n          outerType.PATTERN.source === (innerType.PATTERN as RegExp).source &&\n          !includes(found, innerType) &&\n          innerType.PATTERN !== Lexer.NA\n        ) {\n          // this avoids duplicates in the result, each Token Type may only appear in one \"set\"\n          // in essence we are creating Equivalence classes on equality relation.\n          found.push(innerType);\n          result.push(innerType);\n          return result;\n        }\n        return result;\n      },\n      [] as TokenType[],\n    );\n  });\n\n  identicalPatterns = compact(identicalPatterns);\n\n  const duplicatePatterns = filter(identicalPatterns, (currIdenticalSet) => {\n    return currIdenticalSet.length > 1;\n  });\n\n  const errors = map(duplicatePatterns, (setOfIdentical: any) => {\n    const tokenTypeNames = map(setOfIdentical, (currType: any) => {\n      return currType.name;\n    });\n\n    const dupPatternSrc = (first(setOfIdentical)).PATTERN;\n    return {\n      message:\n        `The same RegExp pattern ->${dupPatternSrc}<-` +\n        `has been used in all of the following Token Types: ${tokenTypeNames.join(\n          \", \",\n        )} <-`,\n      type: LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND,\n      tokenTypes: setOfIdentical,\n    };\n  });\n\n  return errors;\n}\n\nexport function findInvalidGroupType(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const invalidTypes = filter(tokenTypes, (clazz: any) => {\n    if (!has(clazz, \"GROUP\")) {\n      return false;\n    }\n    const group = clazz.GROUP;\n\n    return group !== Lexer.SKIPPED && group !== Lexer.NA && !isString(group);\n  });\n\n  const errors = map(invalidTypes, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'GROUP' can only be Lexer.SKIPPED/Lexer.NA/A String\",\n      type: LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findModesThatDoNotExist(\n  tokenTypes: TokenType[],\n  validModes: string[],\n): ILexerDefinitionError[] {\n  const invalidModes = filter(tokenTypes, (clazz: any) => {\n    return (\n      clazz.PUSH_MODE !== undefined && !includes(validModes, clazz.PUSH_MODE)\n    );\n  });\n\n  const errors = map(invalidModes, (tokType) => {\n    const msg =\n      `Token Type: ->${tokType.name}<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->${tokType.PUSH_MODE}<-` +\n      `which does not exist`;\n    return {\n      message: msg,\n      type: LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST,\n      tokenTypes: [tokType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findUnreachablePatterns(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const errors: ILexerDefinitionError[] = [];\n\n  const canBeTested = reduce(\n    tokenTypes,\n    (result, tokType, idx) => {\n      const pattern = tokType.PATTERN;\n\n      if (pattern === Lexer.NA) {\n        return result;\n      }\n\n      // a more comprehensive validation for all forms of regExps would require\n      // deeper regExp analysis capabilities\n      if (isString(pattern)) {\n        result.push({ str: pattern, idx, tokenType: tokType });\n      } else if (isRegExp(pattern) && noMetaChar(pattern)) {\n        result.push({ str: pattern.source, idx, tokenType: tokType });\n      }\n      return result;\n    },\n    [] as { str: string; idx: number; tokenType: TokenType }[],\n  );\n\n  forEach(tokenTypes, (tokType, testIdx) => {\n    forEach(canBeTested, ({ str, idx, tokenType }) => {\n      if (testIdx < idx && testTokenType(str, tokType.PATTERN)) {\n        const msg =\n          `Token: ->${tokenType.name}<- can never be matched.\\n` +\n          `Because it appears AFTER the Token Type ->${tokType.name}<-` +\n          `in the lexer's definition.\\n` +\n          `See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNREACHABLE`;\n        errors.push({\n          message: msg,\n          type: LexerDefinitionErrorType.UNREACHABLE_PATTERN,\n          tokenTypes: [tokType, tokenType],\n        });\n      }\n    });\n  });\n\n  return errors;\n}\n\nfunction testTokenType(str: string, pattern: any): boolean {\n  /* istanbul ignore else */\n  if (isRegExp(pattern)) {\n    const regExpArray = pattern.exec(str);\n    return regExpArray !== null && regExpArray.index === 0;\n  } else if (isFunction(pattern)) {\n    // maintain the API of custom patterns\n    return pattern(str, 0, [], {});\n  } else if (has(pattern, \"exec\")) {\n    // maintain the API of custom patterns\n    return pattern.exec(str, 0, [], {});\n  } else if (typeof pattern === \"string\") {\n    return pattern === str;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nfunction noMetaChar(regExp: RegExp): boolean {\n  //https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp\n  const metaChars = [\n    \".\",\n    \"\\\\\",\n    \"[\",\n    \"]\",\n    \"|\",\n    \"^\",\n    \"$\",\n    \"(\",\n    \")\",\n    \"?\",\n    \"*\",\n    \"+\",\n    \"{\",\n  ];\n  return (\n    find(metaChars, (char) => regExp.source.indexOf(char) !== -1) === undefined\n  );\n}\n\nexport function addStartOfInput(pattern: RegExp): RegExp {\n  const flags = pattern.ignoreCase ? \"i\" : \"\";\n  // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.\n  // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)\n  return new RegExp(`^(?:${pattern.source})`, flags);\n}\n\nexport function addStickyFlag(pattern: RegExp): RegExp {\n  const flags = pattern.ignoreCase ? \"iy\" : \"y\";\n  // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.\n  // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)\n  return new RegExp(`${pattern.source}`, flags);\n}\n\nexport function performRuntimeChecks(\n  lexerDefinition: IMultiModeLexerDefinition,\n  trackLines: boolean,\n  lineTerminatorCharacters: (number | string)[],\n): ILexerDefinitionError[] {\n  const errors: ILexerDefinitionError[] = [];\n\n  // some run time checks to help the end users.\n  if (!has(lexerDefinition, DEFAULT_MODE)) {\n    errors.push({\n      message:\n        \"A MultiMode Lexer cannot be initialized without a <\" +\n        DEFAULT_MODE +\n        \"> property in its definition\\n\",\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE,\n    });\n  }\n  if (!has(lexerDefinition, MODES)) {\n    errors.push({\n      message:\n        \"A MultiMode Lexer cannot be initialized without a <\" +\n        MODES +\n        \"> property in its definition\\n\",\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY,\n    });\n  }\n\n  if (\n    has(lexerDefinition, MODES) &&\n    has(lexerDefinition, DEFAULT_MODE) &&\n    !has(lexerDefinition.modes, lexerDefinition.defaultMode)\n  ) {\n    errors.push({\n      message:\n        `A MultiMode Lexer cannot be initialized with a ${DEFAULT_MODE}: <${lexerDefinition.defaultMode}>` +\n        `which does not exist\\n`,\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST,\n    });\n  }\n\n  if (has(lexerDefinition, MODES)) {\n    forEach(lexerDefinition.modes, (currModeValue, currModeName) => {\n      forEach(currModeValue, (currTokType, currIdx) => {\n        if (isUndefined(currTokType)) {\n          errors.push({\n            message:\n              `A Lexer cannot be initialized using an undefined Token Type. Mode:` +\n              `<${currModeName}> at index: <${currIdx}>\\n`,\n            type: LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED,\n          });\n        } else if (has(currTokType, \"LONGER_ALT\")) {\n          const longerAlt = isArray(currTokType.LONGER_ALT)\n            ? currTokType.LONGER_ALT\n            : [currTokType.LONGER_ALT];\n          forEach(longerAlt, (currLongerAlt) => {\n            if (\n              !isUndefined(currLongerAlt) &&\n              !includes(currModeValue, currLongerAlt)\n            ) {\n              errors.push({\n                message: `A MultiMode Lexer cannot be initialized with a longer_alt <${currLongerAlt.name}> on token <${currTokType.name}> outside of mode <${currModeName}>\\n`,\n                type: LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE,\n              });\n            }\n          });\n        }\n      });\n    });\n  }\n\n  return errors;\n}\n\nexport function performWarningRuntimeChecks(\n  lexerDefinition: IMultiModeLexerDefinition,\n  trackLines: boolean,\n  lineTerminatorCharacters: (number | string)[],\n): ILexerDefinitionError[] {\n  const warnings = [];\n  let hasAnyLineBreak = false;\n  const allTokenTypes = compact(flatten(values(lexerDefinition.modes)));\n\n  const concreteTokenTypes = reject(\n    allTokenTypes,\n    (currType) => currType[PATTERN] === Lexer.NA,\n  );\n  const terminatorCharCodes = getCharCodes(lineTerminatorCharacters);\n  if (trackLines) {\n    forEach(concreteTokenTypes, (tokType) => {\n      const currIssue = checkLineBreaksIssues(tokType, terminatorCharCodes);\n      if (currIssue !== false) {\n        const message = buildLineBreakIssueMessage(tokType, currIssue);\n        const warningDescriptor = {\n          message,\n          type: currIssue.issue,\n          tokenType: tokType,\n        };\n        warnings.push(warningDescriptor);\n      } else {\n        // we don't want to attempt to scan if the user explicitly specified the line_breaks option.\n        if (has(tokType, \"LINE_BREAKS\")) {\n          if (tokType.LINE_BREAKS === true) {\n            hasAnyLineBreak = true;\n          }\n        } else {\n          if (\n            canMatchCharCode(terminatorCharCodes, tokType.PATTERN as RegExp)\n          ) {\n            hasAnyLineBreak = true;\n          }\n        }\n      }\n    });\n  }\n\n  if (trackLines && !hasAnyLineBreak) {\n    warnings.push({\n      message:\n        \"Warning: No LINE_BREAKS Found.\\n\" +\n        \"\\tThis Lexer has been defined to track line and column information,\\n\" +\n        \"\\tBut none of the Token Types can be identified as matching a line terminator.\\n\" +\n        \"\\tSee https://chevrotain.io/docs/guide/resolving_lexer_errors.html#LINE_BREAKS \\n\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.NO_LINE_BREAKS_FLAGS,\n    });\n  }\n  return warnings;\n}\n\nexport function cloneEmptyGroups(emptyGroups: {\n  [groupName: string]: IToken;\n}): { [groupName: string]: IToken } {\n  const clonedResult: any = {};\n  const groupKeys = keys(emptyGroups);\n\n  forEach(groupKeys, (currKey) => {\n    const currGroupValue = emptyGroups[currKey];\n\n    /* istanbul ignore else */\n    if (isArray(currGroupValue)) {\n      clonedResult[currKey] = [];\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  });\n\n  return clonedResult;\n}\n\n// TODO: refactor to avoid duplication\nexport function isCustomPattern(tokenType: TokenType): boolean {\n  const pattern = tokenType.PATTERN;\n  /* istanbul ignore else */\n  if (isRegExp(pattern)) {\n    return false;\n  } else if (isFunction(pattern)) {\n    // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object\n    return true;\n  } else if (has(pattern, \"exec\")) {\n    // ICustomPattern\n    return true;\n  } else if (isString(pattern)) {\n    return false;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function isShortPattern(pattern: any): number | false {\n  if (isString(pattern) && pattern.length === 1) {\n    return pattern.charCodeAt(0);\n  } else {\n    return false;\n  }\n}\n\n/**\n * Faster than using a RegExp for default newline detection during lexing.\n */\nexport const LineTerminatorOptimizedTester: ILineTerminatorsTester = {\n  // implements /\\n|\\r\\n?/g.test\n  test: function (text) {\n    const len = text.length;\n    for (let i = this.lastIndex; i < len; i++) {\n      const c = text.charCodeAt(i);\n      if (c === 10) {\n        this.lastIndex = i + 1;\n        return true;\n      } else if (c === 13) {\n        if (text.charCodeAt(i + 1) === 10) {\n          this.lastIndex = i + 2;\n        } else {\n          this.lastIndex = i + 1;\n        }\n        return true;\n      }\n    }\n    return false;\n  },\n\n  lastIndex: 0,\n};\n\nfunction checkLineBreaksIssues(\n  tokType: TokenType,\n  lineTerminatorCharCodes: number[],\n):\n  | {\n      issue:\n        | LexerDefinitionErrorType.IDENTIFY_TERMINATOR\n        | LexerDefinitionErrorType.CUSTOM_LINE_BREAK;\n      errMsg?: string;\n    }\n  | false {\n  if (has(tokType, \"LINE_BREAKS\")) {\n    // if the user explicitly declared the line_breaks option we will respect their choice\n    // and assume it is correct.\n    return false;\n  } else {\n    /* istanbul ignore else */\n    if (isRegExp(tokType.PATTERN)) {\n      try {\n        // TODO: why is the casting suddenly needed?\n        canMatchCharCode(lineTerminatorCharCodes, tokType.PATTERN as RegExp);\n      } catch (e) {\n        /* istanbul ignore next - to test this we would have to mock  to throw an error */\n        return {\n          issue: LexerDefinitionErrorType.IDENTIFY_TERMINATOR,\n          errMsg: (e as Error).message,\n        };\n      }\n      return false;\n    } else if (isString(tokType.PATTERN)) {\n      // string literal patterns can always be analyzed to detect line terminator usage\n      return false;\n    } else if (isCustomPattern(tokType)) {\n      // custom token types\n      return { issue: LexerDefinitionErrorType.CUSTOM_LINE_BREAK };\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  }\n}\n\nexport function buildLineBreakIssueMessage(\n  tokType: TokenType,\n  details: {\n    issue:\n      | LexerDefinitionErrorType.IDENTIFY_TERMINATOR\n      | LexerDefinitionErrorType.CUSTOM_LINE_BREAK;\n    errMsg?: string;\n  },\n): string {\n  /* istanbul ignore else */\n  if (details.issue === LexerDefinitionErrorType.IDENTIFY_TERMINATOR) {\n    return (\n      \"Warning: unable to identify line terminator usage in pattern.\\n\" +\n      `\\tThe problem is in the <${tokType.name}> Token Type\\n` +\n      `\\t Root cause: ${details.errMsg}.\\n` +\n      \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#IDENTIFY_TERMINATOR\"\n    );\n  } else if (details.issue === LexerDefinitionErrorType.CUSTOM_LINE_BREAK) {\n    return (\n      \"Warning: A Custom Token Pattern should specify the  option.\\n\" +\n      `\\tThe problem is in the <${tokType.name}> Token Type\\n` +\n      \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_LINE_BREAK\"\n    );\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nfunction getCharCodes(charsOrCodes: (number | string)[]): number[] {\n  const charCodes = map(charsOrCodes, (numOrString) => {\n    if (isString(numOrString)) {\n      return numOrString.charCodeAt(0);\n    } else {\n      return numOrString;\n    }\n  });\n\n  return charCodes;\n}\n\nfunction addToMapOfArrays(\n  map: Record,\n  key: number,\n  value: T,\n): void {\n  if (map[key] === undefined) {\n    map[key] = [value];\n  } else {\n    map[key].push(value);\n  }\n}\n\nexport const minOptimizationVal = 256;\n\n/**\n * We are mapping charCode above ASCI (256) into buckets each in the size of 256.\n * This is because ASCI are the most common start chars so each one of those will get its own\n * possible token configs vector.\n *\n * Tokens starting with charCodes \"above\" ASCI are uncommon, so we can \"afford\"\n * to place these into buckets of possible token configs, What we gain from\n * this is avoiding the case of creating an optimization 'charCodeToPatternIdxToConfig'\n * which would contain 10,000+ arrays of small size (e.g unicode Identifiers scenario).\n * Our 'charCodeToPatternIdxToConfig' max size will now be:\n * 256 + (2^16 / 2^8) - 1 === 511\n *\n * note the hack for fast division integer part extraction\n * See: https://stackoverflow.com/a/4228528\n */\nlet charCodeToOptimizedIdxMap: number[] = [];\nexport function charCodeToOptimizedIndex(charCode: number): number {\n  return charCode < minOptimizationVal\n    ? charCode\n    : charCodeToOptimizedIdxMap[charCode];\n}\n\n/**\n * This is a compromise between cold start / hot running performance\n * Creating this array takes ~3ms on a modern machine,\n * But if we perform the computation at runtime as needed the CSS Lexer benchmark\n * performance degrades by ~10%\n *\n * TODO: Perhaps it should be lazy initialized only if a charCode > 255 is used.\n */\nfunction initCharCodeToOptimizedIndexMap() {\n  if (isEmpty(charCodeToOptimizedIdxMap)) {\n    charCodeToOptimizedIdxMap = new Array(65536);\n    for (let i = 0; i < 65536; i++) {\n      charCodeToOptimizedIdxMap[i] = i > 255 ? 255 + ~~(i / 255) : i;\n    }\n  }\n}\n", "import {\n  clone,\n  compact,\n  difference,\n  flatten,\n  forEach,\n  has,\n  includes,\n  isArray,\n  isEmpty,\n  map,\n} from \"lodash-es\";\nimport { IToken, TokenType } from \"@chevrotain/types\";\n\nexport function tokenStructuredMatcher(\n  tokInstance: IToken,\n  tokConstructor: TokenType,\n) {\n  const instanceType = tokInstance.tokenTypeIdx;\n  if (instanceType === tokConstructor.tokenTypeIdx) {\n    return true;\n  } else {\n    return (\n      tokConstructor.isParent === true &&\n      tokConstructor.categoryMatchesMap![instanceType] === true\n    );\n  }\n}\n\n// Optimized tokenMatcher in case our grammar does not use token categories\n// Being so tiny it is much more likely to be in-lined and this avoid the function call overhead\nexport function tokenStructuredMatcherNoCategories(\n  token: IToken,\n  tokType: TokenType,\n) {\n  return token.tokenTypeIdx === tokType.tokenTypeIdx;\n}\n\nexport let tokenShortNameIdx = 1;\nexport const tokenIdxToClass: { [tokenIdx: number]: TokenType } = {};\n\nexport function augmentTokenTypes(tokenTypes: TokenType[]): void {\n  // collect the parent Token Types as well.\n  const tokenTypesAndParents = expandCategories(tokenTypes);\n\n  // add required tokenType and categoryMatches properties\n  assignTokenDefaultProps(tokenTypesAndParents);\n\n  // fill up the categoryMatches\n  assignCategoriesMapProp(tokenTypesAndParents);\n  assignCategoriesTokensProp(tokenTypesAndParents);\n\n  forEach(tokenTypesAndParents, (tokType) => {\n    tokType.isParent = tokType.categoryMatches!.length > 0;\n  });\n}\n\nexport function expandCategories(tokenTypes: TokenType[]): TokenType[] {\n  let result = clone(tokenTypes);\n\n  let categories = tokenTypes;\n  let searching = true;\n  while (searching) {\n    categories = compact(\n      flatten(map(categories, (currTokType) => currTokType.CATEGORIES)),\n    );\n\n    const newCategories = difference(categories, result);\n\n    result = result.concat(newCategories);\n\n    if (isEmpty(newCategories)) {\n      searching = false;\n    } else {\n      categories = newCategories;\n    }\n  }\n  return result;\n}\n\nexport function assignTokenDefaultProps(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    if (!hasShortKeyProperty(currTokType)) {\n      tokenIdxToClass[tokenShortNameIdx] = currTokType;\n      (currTokType).tokenTypeIdx = tokenShortNameIdx++;\n    }\n\n    // CATEGORIES? : TokenType | TokenType[]\n    if (\n      hasCategoriesProperty(currTokType) &&\n      !isArray(currTokType.CATEGORIES)\n      // &&\n      // !isUndefined(currTokType.CATEGORIES.PATTERN)\n    ) {\n      currTokType.CATEGORIES = [currTokType.CATEGORIES as unknown as TokenType];\n    }\n\n    if (!hasCategoriesProperty(currTokType)) {\n      currTokType.CATEGORIES = [];\n    }\n\n    if (!hasExtendingTokensTypesProperty(currTokType)) {\n      currTokType.categoryMatches = [];\n    }\n\n    if (!hasExtendingTokensTypesMapProperty(currTokType)) {\n      currTokType.categoryMatchesMap = {};\n    }\n  });\n}\n\nexport function assignCategoriesTokensProp(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    // avoid duplications\n    currTokType.categoryMatches = [];\n    forEach(currTokType.categoryMatchesMap!, (val, key) => {\n      currTokType.categoryMatches!.push(\n        tokenIdxToClass[key as unknown as number].tokenTypeIdx!,\n      );\n    });\n  });\n}\n\nexport function assignCategoriesMapProp(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    singleAssignCategoriesToksMap([], currTokType);\n  });\n}\n\nexport function singleAssignCategoriesToksMap(\n  path: TokenType[],\n  nextNode: TokenType,\n): void {\n  forEach(path, (pathNode) => {\n    nextNode.categoryMatchesMap![pathNode.tokenTypeIdx!] = true;\n  });\n\n  forEach(nextNode.CATEGORIES, (nextCategory) => {\n    const newPath = path.concat(nextNode);\n    // avoids infinite loops due to cyclic categories.\n    if (!includes(newPath, nextCategory)) {\n      singleAssignCategoriesToksMap(newPath, nextCategory);\n    }\n  });\n}\n\nexport function hasShortKeyProperty(tokType: TokenType): boolean {\n  return has(tokType, \"tokenTypeIdx\");\n}\n\nexport function hasCategoriesProperty(tokType: TokenType): boolean {\n  return has(tokType, \"CATEGORIES\");\n}\n\nexport function hasExtendingTokensTypesProperty(tokType: TokenType): boolean {\n  return has(tokType, \"categoryMatches\");\n}\n\nexport function hasExtendingTokensTypesMapProperty(\n  tokType: TokenType,\n): boolean {\n  return has(tokType, \"categoryMatchesMap\");\n}\n\nexport function isTokenType(tokType: TokenType): boolean {\n  return has(tokType, \"tokenTypeIdx\");\n}\n", "import { ILexerErrorMessageProvider, IToken } from \"@chevrotain/types\";\n\nexport const defaultLexerErrorProvider: ILexerErrorMessageProvider = {\n  buildUnableToPopLexerModeMessage(token: IToken): string {\n    return `Unable to pop Lexer Mode after encountering Token ->${token.image}<- The Mode Stack is empty`;\n  },\n\n  buildUnexpectedCharactersMessage(\n    fullText: string,\n    startOffset: number,\n    length: number,\n    line?: number,\n    column?: number,\n  ): string {\n    return (\n      `unexpected character: ->${fullText.charAt(\n        startOffset,\n      )}<- at offset: ${startOffset},` + ` skipped ${length} characters.`\n    );\n  },\n};\n", "import {\n  analyzeTokenTypes,\n  charCodeToOptimizedIndex,\n  cloneEmptyGroups,\n  DEFAULT_MODE,\n  IAnalyzeResult,\n  IPatternConfig,\n  LineTerminatorOptimizedTester,\n  performRuntimeChecks,\n  performWarningRuntimeChecks,\n  SUPPORT_STICKY,\n  validatePatterns,\n} from \"./lexer.js\";\nimport {\n  assign,\n  clone,\n  forEach,\n  identity,\n  isArray,\n  isEmpty,\n  isUndefined,\n  keys,\n  last,\n  map,\n  noop,\n  reduce,\n  reject,\n} from \"lodash-es\";\nimport { PRINT_WARNING, timer, toFastProperties } from \"@chevrotain/utils\";\nimport { augmentTokenTypes } from \"./tokens.js\";\nimport {\n  CustomPatternMatcherFunc,\n  CustomPatternMatcherReturn,\n  ILexerConfig,\n  ILexerDefinitionError,\n  ILexingError,\n  IMultiModeLexerDefinition,\n  IToken,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { defaultLexerErrorProvider } from \"./lexer_errors_public.js\";\nimport { clearRegExpParserCache } from \"./reg_exp_parser.js\";\n\nexport interface ILexingResult {\n  tokens: IToken[];\n  groups: { [groupName: string]: IToken[] };\n  errors: ILexingError[];\n}\n\nexport enum LexerDefinitionErrorType {\n  MISSING_PATTERN,\n  INVALID_PATTERN,\n  EOI_ANCHOR_FOUND,\n  UNSUPPORTED_FLAGS_FOUND,\n  DUPLICATE_PATTERNS_FOUND,\n  INVALID_GROUP_TYPE_FOUND,\n  PUSH_MODE_DOES_NOT_EXIST,\n  MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE,\n  MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY,\n  MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST,\n  LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED,\n  SOI_ANCHOR_FOUND,\n  EMPTY_MATCH_PATTERN,\n  NO_LINE_BREAKS_FLAGS,\n  UNREACHABLE_PATTERN,\n  IDENTIFY_TERMINATOR,\n  CUSTOM_LINE_BREAK,\n  MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE,\n}\n\nexport interface IRegExpExec {\n  exec: CustomPatternMatcherFunc;\n}\n\nconst DEFAULT_LEXER_CONFIG: Required = {\n  deferDefinitionErrorsHandling: false,\n  positionTracking: \"full\",\n  lineTerminatorsPattern: /\\n|\\r\\n?/g,\n  lineTerminatorCharacters: [\"\\n\", \"\\r\"],\n  ensureOptimizations: false,\n  safeMode: false,\n  errorMessageProvider: defaultLexerErrorProvider,\n  traceInitPerf: false,\n  skipValidations: false,\n  recoveryEnabled: true,\n};\n\nObject.freeze(DEFAULT_LEXER_CONFIG);\n\nexport class Lexer {\n  public static SKIPPED =\n    \"This marks a skipped Token pattern, this means each token identified by it will\" +\n    \"be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.\";\n\n  public static NA = /NOT_APPLICABLE/;\n  public lexerDefinitionErrors: ILexerDefinitionError[] = [];\n  public lexerDefinitionWarning: ILexerDefinitionError[] = [];\n\n  protected patternIdxToConfig: Record = {};\n  protected charCodeToPatternIdxToConfig: {\n    [modeName: string]: { [charCode: number]: IPatternConfig[] };\n  } = {};\n\n  protected modes: string[] = [];\n  protected defaultMode!: string;\n  protected emptyGroups: { [groupName: string]: IToken } = {};\n\n  private config: Required;\n  private trackStartLines: boolean = true;\n  private trackEndLines: boolean = true;\n  private hasCustom: boolean = false;\n  private canModeBeOptimized: Record = {};\n\n  private traceInitPerf!: boolean | number;\n  private traceInitMaxIdent!: number;\n  private traceInitIndent: number;\n\n  constructor(\n    protected lexerDefinition: TokenType[] | IMultiModeLexerDefinition,\n    config: ILexerConfig = DEFAULT_LEXER_CONFIG,\n  ) {\n    if (typeof config === \"boolean\") {\n      throw Error(\n        \"The second argument to the Lexer constructor is now an ILexerConfig Object.\\n\" +\n          \"a boolean 2nd argument is no longer supported\",\n      );\n    }\n\n    // todo: defaults func?\n    this.config = assign({}, DEFAULT_LEXER_CONFIG, config) as any;\n\n    const traceInitVal = this.config.traceInitPerf;\n    if (traceInitVal === true) {\n      this.traceInitMaxIdent = Infinity;\n      this.traceInitPerf = true;\n    } else if (typeof traceInitVal === \"number\") {\n      this.traceInitMaxIdent = traceInitVal;\n      this.traceInitPerf = true;\n    }\n    this.traceInitIndent = -1;\n\n    this.TRACE_INIT(\"Lexer Constructor\", () => {\n      let actualDefinition!: IMultiModeLexerDefinition;\n      let hasOnlySingleMode = true;\n      this.TRACE_INIT(\"Lexer Config handling\", () => {\n        if (\n          this.config.lineTerminatorsPattern ===\n          DEFAULT_LEXER_CONFIG.lineTerminatorsPattern\n        ) {\n          // optimized built-in implementation for the defaults definition of lineTerminators\n          this.config.lineTerminatorsPattern = LineTerminatorOptimizedTester;\n        } else {\n          if (\n            this.config.lineTerminatorCharacters ===\n            DEFAULT_LEXER_CONFIG.lineTerminatorCharacters\n          ) {\n            throw Error(\n              \"Error: Missing  property on the Lexer config.\\n\" +\n                \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#MISSING_LINE_TERM_CHARS\",\n            );\n          }\n        }\n\n        if (config.safeMode && config.ensureOptimizations) {\n          throw Error(\n            '\"safeMode\" and \"ensureOptimizations\" flags are mutually exclusive.',\n          );\n        }\n\n        this.trackStartLines = /full|onlyStart/i.test(\n          this.config.positionTracking,\n        );\n        this.trackEndLines = /full/i.test(this.config.positionTracking);\n\n        // Convert SingleModeLexerDefinition into a IMultiModeLexerDefinition.\n        if (isArray(lexerDefinition)) {\n          actualDefinition = {\n            modes: { defaultMode: clone(lexerDefinition) },\n            defaultMode: DEFAULT_MODE,\n          };\n        } else {\n          // no conversion needed, input should already be a IMultiModeLexerDefinition\n          hasOnlySingleMode = false;\n          actualDefinition = clone(lexerDefinition);\n        }\n      });\n\n      if (this.config.skipValidations === false) {\n        this.TRACE_INIT(\"performRuntimeChecks\", () => {\n          this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat(\n            performRuntimeChecks(\n              actualDefinition,\n              this.trackStartLines,\n              this.config.lineTerminatorCharacters,\n            ),\n          );\n        });\n\n        this.TRACE_INIT(\"performWarningRuntimeChecks\", () => {\n          this.lexerDefinitionWarning = this.lexerDefinitionWarning.concat(\n            performWarningRuntimeChecks(\n              actualDefinition,\n              this.trackStartLines,\n              this.config.lineTerminatorCharacters,\n            ),\n          );\n        });\n      }\n\n      // for extra robustness to avoid throwing an none informative error message\n      actualDefinition.modes = actualDefinition.modes\n        ? actualDefinition.modes\n        : {};\n\n      // an error of undefined TokenTypes will be detected in \"performRuntimeChecks\" above.\n      // this transformation is to increase robustness in the case of partially invalid lexer definition.\n      forEach(actualDefinition.modes, (currModeValue, currModeName) => {\n        actualDefinition.modes[currModeName] = reject(\n          currModeValue,\n          (currTokType) => isUndefined(currTokType),\n        );\n      });\n\n      const allModeNames = keys(actualDefinition.modes);\n\n      forEach(\n        actualDefinition.modes,\n        (currModDef: TokenType[], currModName) => {\n          this.TRACE_INIT(`Mode: <${currModName}> processing`, () => {\n            this.modes.push(currModName);\n\n            if (this.config.skipValidations === false) {\n              this.TRACE_INIT(`validatePatterns`, () => {\n                this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat(\n                  validatePatterns(currModDef, allModeNames),\n                );\n              });\n            }\n\n            // If definition errors were encountered, the analysis phase may fail unexpectedly/\n            // Considering a lexer with definition errors may never be used, there is no point\n            // to performing the analysis anyhow...\n            if (isEmpty(this.lexerDefinitionErrors)) {\n              augmentTokenTypes(currModDef);\n\n              let currAnalyzeResult!: IAnalyzeResult;\n              this.TRACE_INIT(`analyzeTokenTypes`, () => {\n                currAnalyzeResult = analyzeTokenTypes(currModDef, {\n                  lineTerminatorCharacters:\n                    this.config.lineTerminatorCharacters,\n                  positionTracking: config.positionTracking,\n                  ensureOptimizations: config.ensureOptimizations,\n                  safeMode: config.safeMode,\n                  tracer: this.TRACE_INIT,\n                });\n              });\n\n              this.patternIdxToConfig[currModName] =\n                currAnalyzeResult.patternIdxToConfig;\n\n              this.charCodeToPatternIdxToConfig[currModName] =\n                currAnalyzeResult.charCodeToPatternIdxToConfig;\n\n              this.emptyGroups = assign(\n                {},\n                this.emptyGroups,\n                currAnalyzeResult.emptyGroups,\n              ) as any;\n\n              this.hasCustom = currAnalyzeResult.hasCustom || this.hasCustom;\n\n              this.canModeBeOptimized[currModName] =\n                currAnalyzeResult.canBeOptimized;\n            }\n          });\n        },\n      );\n\n      this.defaultMode = actualDefinition.defaultMode;\n\n      if (\n        !isEmpty(this.lexerDefinitionErrors) &&\n        !this.config.deferDefinitionErrorsHandling\n      ) {\n        const allErrMessages = map(this.lexerDefinitionErrors, (error) => {\n          return error.message;\n        });\n        const allErrMessagesString = allErrMessages.join(\n          \"-----------------------\\n\",\n        );\n        throw new Error(\n          \"Errors detected in definition of Lexer:\\n\" + allErrMessagesString,\n        );\n      }\n\n      // Only print warning if there are no errors, This will avoid pl\n      forEach(this.lexerDefinitionWarning, (warningDescriptor) => {\n        PRINT_WARNING(warningDescriptor.message);\n      });\n\n      this.TRACE_INIT(\"Choosing sub-methods implementations\", () => {\n        // Choose the relevant internal implementations for this specific parser.\n        // These implementations should be in-lined by the JavaScript engine\n        // to provide optimal performance in each scenario.\n        if (SUPPORT_STICKY) {\n          this.chopInput = identity;\n          this.match = this.matchWithTest;\n        } else {\n          this.updateLastIndex = noop;\n          this.match = this.matchWithExec;\n        }\n\n        if (hasOnlySingleMode) {\n          this.handleModes = noop;\n        }\n\n        if (this.trackStartLines === false) {\n          this.computeNewColumn = identity;\n        }\n\n        if (this.trackEndLines === false) {\n          this.updateTokenEndLineColumnLocation = noop;\n        }\n\n        if (/full/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createFullToken;\n        } else if (/onlyStart/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createStartOnlyToken;\n        } else if (/onlyOffset/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createOffsetOnlyToken;\n        } else {\n          throw Error(\n            `Invalid  config option: \"${this.config.positionTracking}\"`,\n          );\n        }\n\n        if (this.hasCustom) {\n          this.addToken = this.addTokenUsingPush;\n          this.handlePayload = this.handlePayloadWithCustom;\n        } else {\n          this.addToken = this.addTokenUsingMemberAccess;\n          this.handlePayload = this.handlePayloadNoCustom;\n        }\n      });\n\n      this.TRACE_INIT(\"Failed Optimization Warnings\", () => {\n        const unOptimizedModes = reduce(\n          this.canModeBeOptimized,\n          (cannotBeOptimized, canBeOptimized, modeName) => {\n            if (canBeOptimized === false) {\n              cannotBeOptimized.push(modeName);\n            }\n            return cannotBeOptimized;\n          },\n          [] as string[],\n        );\n\n        if (config.ensureOptimizations && !isEmpty(unOptimizedModes)) {\n          throw Error(\n            `Lexer Modes: < ${unOptimizedModes.join(\n              \", \",\n            )} > cannot be optimized.\\n` +\n              '\\t Disable the \"ensureOptimizations\" lexer config flag to silently ignore this and run the lexer in an un-optimized mode.\\n' +\n              \"\\t Or inspect the console log for details on how to resolve these issues.\",\n          );\n        }\n      });\n\n      this.TRACE_INIT(\"clearRegExpParserCache\", () => {\n        clearRegExpParserCache();\n      });\n\n      this.TRACE_INIT(\"toFastProperties\", () => {\n        toFastProperties(this);\n      });\n    });\n  }\n\n  public tokenize(\n    text: string,\n    initialMode: string = this.defaultMode,\n  ): ILexingResult {\n    if (!isEmpty(this.lexerDefinitionErrors)) {\n      const allErrMessages = map(this.lexerDefinitionErrors, (error) => {\n        return error.message;\n      });\n      const allErrMessagesString = allErrMessages.join(\n        \"-----------------------\\n\",\n      );\n      throw new Error(\n        \"Unable to Tokenize because Errors detected in definition of Lexer:\\n\" +\n          allErrMessagesString,\n      );\n    }\n\n    return this.tokenizeInternal(text, initialMode);\n  }\n\n  // There is quite a bit of duplication between this and \"tokenizeInternalLazy\"\n  // This is intentional due to performance considerations.\n  // this method also used quite a bit of `!` none null assertions because it is too optimized\n  // for `tsc` to always understand it is \"safe\"\n  private tokenizeInternal(text: string, initialMode: string): ILexingResult {\n    let i,\n      j,\n      k,\n      matchAltImage,\n      longerAlt,\n      matchedImage: string | null,\n      payload,\n      altPayload,\n      imageLength,\n      group,\n      tokType,\n      newToken: IToken,\n      errLength,\n      droppedChar,\n      msg,\n      match;\n    const orgText = text;\n    const orgLength = orgText.length;\n    let offset = 0;\n    let matchedTokensIndex = 0;\n    // initializing the tokensArray to the \"guessed\" size.\n    // guessing too little will still reduce the number of array re-sizes on pushes.\n    // guessing too large (Tested by guessing x4 too large) may cost a bit more of memory\n    // but would still have a faster runtime by avoiding (All but one) array resizing.\n    const guessedNumberOfTokens = this.hasCustom\n      ? 0 // will break custom token pattern APIs the matchedTokens array will contain undefined elements.\n      : Math.floor(text.length / 10);\n    const matchedTokens = new Array(guessedNumberOfTokens);\n    const errors: ILexingError[] = [];\n    let line = this.trackStartLines ? 1 : undefined;\n    let column = this.trackStartLines ? 1 : undefined;\n    const groups: any = cloneEmptyGroups(this.emptyGroups);\n    const trackLines = this.trackStartLines;\n    const lineTerminatorPattern = this.config.lineTerminatorsPattern;\n\n    let currModePatternsLength = 0;\n    let patternIdxToConfig: IPatternConfig[] = [];\n    let currCharCodeToPatternIdxToConfig: {\n      [charCode: number]: IPatternConfig[];\n    } = [];\n\n    const modeStack: string[] = [];\n\n    const emptyArray: IPatternConfig[] = [];\n    Object.freeze(emptyArray);\n    let getPossiblePatterns!: (charCode: number) => IPatternConfig[];\n\n    function getPossiblePatternsSlow() {\n      return patternIdxToConfig;\n    }\n\n    function getPossiblePatternsOptimized(charCode: number): IPatternConfig[] {\n      const optimizedCharIdx = charCodeToOptimizedIndex(charCode);\n      const possiblePatterns =\n        currCharCodeToPatternIdxToConfig[optimizedCharIdx];\n      if (possiblePatterns === undefined) {\n        return emptyArray;\n      } else {\n        return possiblePatterns;\n      }\n    }\n\n    const pop_mode = (popToken: IToken) => {\n      // TODO: perhaps avoid this error in the edge case there is no more input?\n      if (\n        modeStack.length === 1 &&\n        // if we have both a POP_MODE and a PUSH_MODE this is in-fact a \"transition\"\n        // So no error should occur.\n        popToken.tokenType.PUSH_MODE === undefined\n      ) {\n        // if we try to pop the last mode there lexer will no longer have ANY mode.\n        // thus the pop is ignored, an error will be created and the lexer will continue parsing in the previous mode.\n        const msg =\n          this.config.errorMessageProvider.buildUnableToPopLexerModeMessage(\n            popToken,\n          );\n\n        errors.push({\n          offset: popToken.startOffset,\n          line: popToken.startLine,\n          column: popToken.startColumn,\n          length: popToken.image.length,\n          message: msg,\n        });\n      } else {\n        modeStack.pop();\n        const newMode = last(modeStack)!;\n        patternIdxToConfig = this.patternIdxToConfig[newMode];\n        currCharCodeToPatternIdxToConfig =\n          this.charCodeToPatternIdxToConfig[newMode];\n        currModePatternsLength = patternIdxToConfig.length;\n        const modeCanBeOptimized =\n          this.canModeBeOptimized[newMode] && this.config.safeMode === false;\n\n        if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {\n          getPossiblePatterns = getPossiblePatternsOptimized;\n        } else {\n          getPossiblePatterns = getPossiblePatternsSlow;\n        }\n      }\n    };\n\n    function push_mode(this: Lexer, newMode: string) {\n      modeStack.push(newMode);\n      currCharCodeToPatternIdxToConfig =\n        this.charCodeToPatternIdxToConfig[newMode];\n\n      patternIdxToConfig = this.patternIdxToConfig[newMode];\n      currModePatternsLength = patternIdxToConfig.length;\n\n      currModePatternsLength = patternIdxToConfig.length;\n      const modeCanBeOptimized =\n        this.canModeBeOptimized[newMode] && this.config.safeMode === false;\n\n      if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {\n        getPossiblePatterns = getPossiblePatternsOptimized;\n      } else {\n        getPossiblePatterns = getPossiblePatternsSlow;\n      }\n    }\n\n    // this pattern seems to avoid a V8 de-optimization, although that de-optimization does not\n    // seem to matter performance wise.\n    push_mode.call(this, initialMode);\n\n    let currConfig!: IPatternConfig;\n\n    const recoveryEnabled = this.config.recoveryEnabled;\n\n    while (offset < orgLength) {\n      matchedImage = null;\n\n      const nextCharCode = orgText.charCodeAt(offset);\n      const chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);\n      const chosenPatternsLength = chosenPatternIdxToConfig.length;\n\n      for (i = 0; i < chosenPatternsLength; i++) {\n        currConfig = chosenPatternIdxToConfig[i];\n        const currPattern = currConfig.pattern;\n        payload = null;\n\n        // manually in-lined because > 600 chars won't be in-lined in V8\n        const singleCharCode = currConfig.short;\n        if (singleCharCode !== false) {\n          if (nextCharCode === singleCharCode) {\n            // single character string\n            matchedImage = currPattern as string;\n          }\n        } else if (currConfig.isCustom === true) {\n          match = (currPattern as IRegExpExec).exec(\n            orgText,\n            offset,\n            matchedTokens,\n            groups,\n          );\n          if (match !== null) {\n            matchedImage = match[0];\n            if ((match as CustomPatternMatcherReturn).payload !== undefined) {\n              payload = (match as CustomPatternMatcherReturn).payload;\n            }\n          } else {\n            matchedImage = null;\n          }\n        } else {\n          this.updateLastIndex(currPattern as RegExp, offset);\n          matchedImage = this.match(currPattern as RegExp, text, offset);\n        }\n\n        if (matchedImage !== null) {\n          // even though this pattern matched we must try a another longer alternative.\n          // this can be used to prioritize keywords over identifiers\n          longerAlt = currConfig.longerAlt;\n          if (longerAlt !== undefined) {\n            // TODO: micro optimize, avoid extra prop access\n            // by saving/linking longerAlt on the original config?\n            const longerAltLength = longerAlt.length;\n            for (k = 0; k < longerAltLength; k++) {\n              const longerAltConfig = patternIdxToConfig[longerAlt[k]];\n              const longerAltPattern = longerAltConfig.pattern;\n              altPayload = null;\n\n              // single Char can never be a longer alt so no need to test it.\n              // manually in-lined because > 600 chars won't be in-lined in V8\n              if (longerAltConfig.isCustom === true) {\n                match = (longerAltPattern as IRegExpExec).exec(\n                  orgText,\n                  offset,\n                  matchedTokens,\n                  groups,\n                );\n                if (match !== null) {\n                  matchAltImage = match[0];\n                  if (\n                    (match as CustomPatternMatcherReturn).payload !== undefined\n                  ) {\n                    altPayload = (match as CustomPatternMatcherReturn).payload;\n                  }\n                } else {\n                  matchAltImage = null;\n                }\n              } else {\n                this.updateLastIndex(longerAltPattern as RegExp, offset);\n                matchAltImage = this.match(\n                  longerAltPattern as RegExp,\n                  text,\n                  offset,\n                );\n              }\n\n              if (matchAltImage && matchAltImage.length > matchedImage.length) {\n                matchedImage = matchAltImage;\n                payload = altPayload;\n                currConfig = longerAltConfig;\n                // Exit the loop early after matching one of the longer alternatives\n                // The first matched alternative takes precedence\n                break;\n              }\n            }\n          }\n          break;\n        }\n      }\n\n      // successful match\n      if (matchedImage !== null) {\n        imageLength = matchedImage.length;\n        group = currConfig.group;\n        if (group !== undefined) {\n          tokType = currConfig.tokenTypeIdx;\n          // TODO: \"offset + imageLength\" and the new column may be computed twice in case of \"full\" location information inside\n          // createFullToken method\n          newToken = this.createTokenInstance(\n            matchedImage,\n            offset,\n            tokType,\n            currConfig.tokenType,\n            line,\n            column,\n            imageLength,\n          );\n\n          this.handlePayload(newToken, payload);\n\n          // TODO: optimize NOOP in case there are no special groups?\n          if (group === false) {\n            matchedTokensIndex = this.addToken(\n              matchedTokens,\n              matchedTokensIndex,\n              newToken,\n            );\n          } else {\n            groups[group].push(newToken);\n          }\n        }\n        text = this.chopInput(text, imageLength);\n        offset = offset + imageLength;\n\n        // TODO: with newlines the column may be assigned twice\n        column = this.computeNewColumn(column!, imageLength);\n\n        if (trackLines === true && currConfig.canLineTerminator === true) {\n          let numOfLTsInMatch = 0;\n          let foundTerminator;\n          let lastLTEndOffset: number;\n          lineTerminatorPattern.lastIndex = 0;\n          do {\n            foundTerminator = lineTerminatorPattern.test(matchedImage);\n            if (foundTerminator === true) {\n              lastLTEndOffset = lineTerminatorPattern.lastIndex - 1;\n              numOfLTsInMatch++;\n            }\n          } while (foundTerminator === true);\n\n          if (numOfLTsInMatch !== 0) {\n            line = line! + numOfLTsInMatch;\n            column = imageLength - lastLTEndOffset!;\n            this.updateTokenEndLineColumnLocation(\n              newToken!,\n              group!,\n              lastLTEndOffset!,\n              numOfLTsInMatch,\n              line,\n              column,\n              imageLength,\n            );\n          }\n        }\n        // will be NOOP if no modes present\n        this.handleModes(currConfig, pop_mode, push_mode, newToken!);\n      } else {\n        // error recovery, drop characters until we identify a valid token's start point\n        const errorStartOffset = offset;\n        const errorLine = line;\n        const errorColumn = column;\n        let foundResyncPoint = recoveryEnabled === false;\n\n        while (foundResyncPoint === false && offset < orgLength) {\n          // Identity Func (when sticky flag is enabled)\n          text = this.chopInput(text, 1);\n          offset++;\n          for (j = 0; j < currModePatternsLength; j++) {\n            const currConfig = patternIdxToConfig[j];\n            const currPattern = currConfig.pattern;\n\n            // manually in-lined because > 600 chars won't be in-lined in V8\n            const singleCharCode = currConfig.short;\n            if (singleCharCode !== false) {\n              if (orgText.charCodeAt(offset) === singleCharCode) {\n                // single character string\n                foundResyncPoint = true;\n              }\n            } else if (currConfig.isCustom === true) {\n              foundResyncPoint =\n                (currPattern as IRegExpExec).exec(\n                  orgText,\n                  offset,\n                  matchedTokens,\n                  groups,\n                ) !== null;\n            } else {\n              this.updateLastIndex(currPattern as RegExp, offset);\n              foundResyncPoint = (currPattern as RegExp).exec(text) !== null;\n            }\n\n            if (foundResyncPoint === true) {\n              break;\n            }\n          }\n        }\n\n        errLength = offset - errorStartOffset;\n        column = this.computeNewColumn(column!, errLength);\n        // at this point we either re-synced or reached the end of the input text\n        msg = this.config.errorMessageProvider.buildUnexpectedCharactersMessage(\n          orgText,\n          errorStartOffset,\n          errLength,\n          errorLine,\n          errorColumn,\n        );\n        errors.push({\n          offset: errorStartOffset,\n          line: errorLine,\n          column: errorColumn,\n          length: errLength,\n          message: msg,\n        });\n\n        if (recoveryEnabled === false) {\n          break;\n        }\n      }\n    }\n\n    // if we do have custom patterns which push directly into the\n    // TODO: custom tokens should not push directly??\n    if (!this.hasCustom) {\n      // if we guessed a too large size for the tokens array this will shrink it to the right size.\n      matchedTokens.length = matchedTokensIndex;\n    }\n\n    return {\n      tokens: matchedTokens,\n      groups: groups,\n      errors: errors,\n    };\n  }\n\n  private handleModes(\n    config: IPatternConfig,\n    pop_mode: (tok: IToken) => void,\n    push_mode: (this: Lexer, pushMode: string) => void,\n    newToken: IToken,\n  ) {\n    if (config.pop === true) {\n      // need to save the PUSH_MODE property as if the mode is popped\n      // patternIdxToPopMode is updated to reflect the new mode after popping the stack\n      const pushMode = config.push;\n      pop_mode(newToken);\n      if (pushMode !== undefined) {\n        push_mode.call(this, pushMode);\n      }\n    } else if (config.push !== undefined) {\n      push_mode.call(this, config.push);\n    }\n  }\n\n  private chopInput(text: string, length: number): string {\n    return text.substring(length);\n  }\n\n  private updateLastIndex(regExp: RegExp, newLastIndex: number): void {\n    regExp.lastIndex = newLastIndex;\n  }\n\n  // TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler\n  private updateTokenEndLineColumnLocation(\n    newToken: IToken,\n    group: string | false,\n    lastLTIdx: number,\n    numOfLTsInMatch: number,\n    line: number,\n    column: number,\n    imageLength: number,\n  ): void {\n    let lastCharIsLT, fixForEndingInLT;\n    if (group !== undefined) {\n      // a none skipped multi line Token, need to update endLine/endColumn\n      lastCharIsLT = lastLTIdx === imageLength - 1;\n      fixForEndingInLT = lastCharIsLT ? -1 : 0;\n      if (!(numOfLTsInMatch === 1 && lastCharIsLT === true)) {\n        // if a token ends in a LT that last LT only affects the line numbering of following Tokens\n        newToken.endLine = line + fixForEndingInLT;\n        // the last LT in a token does not affect the endColumn either as the [columnStart ... columnEnd)\n        // inclusive to exclusive range.\n        newToken.endColumn = column - 1 + -fixForEndingInLT;\n      }\n      // else single LT in the last character of a token, no need to modify the endLine/EndColumn\n    }\n  }\n\n  private computeNewColumn(oldColumn: number, imageLength: number) {\n    return oldColumn + imageLength;\n  }\n\n  // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.\n  /* istanbul ignore next - place holder */\n  private createTokenInstance!: (...args: any[]) => IToken;\n\n  private createOffsetOnlyToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n  ) {\n    return {\n      image,\n      startOffset,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  private createStartOnlyToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n    startLine: number,\n    startColumn: number,\n  ) {\n    return {\n      image,\n      startOffset,\n      startLine,\n      startColumn,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  private createFullToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n    startLine: number,\n    startColumn: number,\n    imageLength: number,\n  ): IToken {\n    return {\n      image,\n      startOffset,\n      endOffset: startOffset + imageLength - 1,\n      startLine,\n      endLine: startLine,\n      startColumn,\n      endColumn: startColumn + imageLength - 1,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.\n  /* istanbul ignore next - place holder */\n  private addToken!: (\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ) => number;\n\n  private addTokenUsingPush(\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ): number {\n    tokenVector.push(tokenToAdd);\n    return index;\n  }\n\n  private addTokenUsingMemberAccess(\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ): number {\n    tokenVector[index] = tokenToAdd;\n    index++;\n    return index;\n  }\n\n  // Place holder, will be replaced by the correct variant according to the hasCustom flag option at runtime.\n  private handlePayload: (token: IToken, payload: any) => void;\n\n  private handlePayloadNoCustom(token: IToken, payload: any): void {}\n\n  private handlePayloadWithCustom(token: IToken, payload: any): void {\n    if (payload !== null) {\n      token.payload = payload;\n    }\n  }\n\n  // place holder to be replaced with chosen alternative at runtime\n  private match!: (\n    pattern: RegExp,\n    text: string,\n    offset: number,\n  ) => string | null;\n\n  private matchWithTest(\n    pattern: RegExp,\n    text: string,\n    offset: number,\n  ): string | null {\n    const found = pattern.test(text);\n    if (found === true) {\n      return text.substring(offset, pattern.lastIndex);\n    }\n    return null;\n  }\n\n  private matchWithExec(pattern: RegExp, text: string): string | null {\n    const regExpArray = pattern.exec(text);\n    return regExpArray !== null ? regExpArray[0] : null;\n  }\n\n  // Duplicated from the parser's perf trace trait to allow future extraction\n  // of the lexer to a separate package.\n  TRACE_INIT = (phaseDesc: string, phaseImpl: () => T): T => {\n    // No need to optimize this using NOOP pattern because\n    // It is not called in a hot spot...\n    if (this.traceInitPerf === true) {\n      this.traceInitIndent++;\n      const indent = new Array(this.traceInitIndent + 1).join(\"\\t\");\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        console.log(`${indent}--> <${phaseDesc}>`);\n      }\n      const { time, value } = timer(phaseImpl);\n      /* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */\n      const traceMethod = time > 10 ? console.warn : console.log;\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);\n      }\n      this.traceInitIndent--;\n      return value;\n    } else {\n      return phaseImpl();\n    }\n  };\n}\n", "import { has, isString, isUndefined } from \"lodash-es\";\nimport { Lexer } from \"./lexer_public.js\";\nimport { augmentTokenTypes, tokenStructuredMatcher } from \"./tokens.js\";\nimport { IToken, ITokenConfig, TokenType } from \"@chevrotain/types\";\n\nexport function tokenLabel(tokType: TokenType): string {\n  if (hasTokenLabel(tokType)) {\n    return tokType.LABEL;\n  } else {\n    return tokType.name;\n  }\n}\n\nexport function tokenName(tokType: TokenType): string {\n  return tokType.name;\n}\n\nexport function hasTokenLabel(\n  obj: TokenType,\n): obj is TokenType & Pick, \"LABEL\"> {\n  return isString(obj.LABEL) && obj.LABEL !== \"\";\n}\n\nconst PARENT = \"parent\";\nconst CATEGORIES = \"categories\";\nconst LABEL = \"label\";\nconst GROUP = \"group\";\nconst PUSH_MODE = \"push_mode\";\nconst POP_MODE = \"pop_mode\";\nconst LONGER_ALT = \"longer_alt\";\nconst LINE_BREAKS = \"line_breaks\";\nconst START_CHARS_HINT = \"start_chars_hint\";\n\nexport function createToken(config: ITokenConfig): TokenType {\n  return createTokenInternal(config);\n}\n\nfunction createTokenInternal(config: ITokenConfig): TokenType {\n  const pattern = config.pattern;\n\n  const tokenType: TokenType = {};\n  tokenType.name = config.name;\n\n  if (!isUndefined(pattern)) {\n    tokenType.PATTERN = pattern;\n  }\n\n  if (has(config, PARENT)) {\n    throw (\n      \"The parent property is no longer supported.\\n\" +\n      \"See: https://github.com/chevrotain/chevrotain/issues/564#issuecomment-349062346 for details.\"\n    );\n  }\n\n  if (has(config, CATEGORIES)) {\n    // casting to ANY as this will be fixed inside `augmentTokenTypes``\n    tokenType.CATEGORIES = config[CATEGORIES];\n  }\n\n  augmentTokenTypes([tokenType]);\n\n  if (has(config, LABEL)) {\n    tokenType.LABEL = config[LABEL];\n  }\n\n  if (has(config, GROUP)) {\n    tokenType.GROUP = config[GROUP];\n  }\n\n  if (has(config, POP_MODE)) {\n    tokenType.POP_MODE = config[POP_MODE];\n  }\n\n  if (has(config, PUSH_MODE)) {\n    tokenType.PUSH_MODE = config[PUSH_MODE];\n  }\n\n  if (has(config, LONGER_ALT)) {\n    tokenType.LONGER_ALT = config[LONGER_ALT];\n  }\n\n  if (has(config, LINE_BREAKS)) {\n    tokenType.LINE_BREAKS = config[LINE_BREAKS];\n  }\n\n  if (has(config, START_CHARS_HINT)) {\n    tokenType.START_CHARS_HINT = config[START_CHARS_HINT];\n  }\n\n  return tokenType;\n}\n\nexport const EOF = createToken({ name: \"EOF\", pattern: Lexer.NA });\naugmentTokenTypes([EOF]);\n\nexport function createTokenInstance(\n  tokType: TokenType,\n  image: string,\n  startOffset: number,\n  endOffset: number,\n  startLine: number,\n  endLine: number,\n  startColumn: number,\n  endColumn: number,\n): IToken {\n  return {\n    image,\n    startOffset,\n    endOffset,\n    startLine,\n    endLine,\n    startColumn,\n    endColumn,\n    tokenTypeIdx: (tokType).tokenTypeIdx,\n    tokenType: tokType,\n  };\n}\n\nexport function tokenMatcher(token: IToken, tokType: TokenType): boolean {\n  return tokenStructuredMatcher(token, tokType);\n}\n", "import { hasTokenLabel, tokenLabel } from \"../scan/tokens_public.js\";\nimport { first, map, reduce } from \"lodash-es\";\nimport {\n  Alternation,\n  getProductionDslName,\n  NonTerminal,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  IParserErrorMessageProvider,\n  IProductionWithOccurrence,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IGrammarValidatorErrorMessageProvider,\n} from \"./grammar/types.js\";\n\nexport const defaultParserErrorProvider: IParserErrorMessageProvider = {\n  buildMismatchTokenMessage({ expected, actual, previous, ruleName }): string {\n    const hasLabel = hasTokenLabel(expected);\n    const expectedMsg = hasLabel\n      ? `--> ${tokenLabel(expected)} <--`\n      : `token of type --> ${expected.name} <--`;\n\n    const msg = `Expecting ${expectedMsg} but found --> '${actual.image}' <--`;\n\n    return msg;\n  },\n\n  buildNotAllInputParsedMessage({ firstRedundant, ruleName }): string {\n    return \"Redundant input, expecting EOF but found: \" + firstRedundant.image;\n  },\n\n  buildNoViableAltMessage({\n    expectedPathsPerAlt,\n    actual,\n    previous,\n    customUserDescription,\n    ruleName,\n  }): string {\n    const errPrefix = \"Expecting: \";\n    // TODO: issue: No Viable Alternative Error may have incomplete details. #502\n    const actualText = first(actual)!.image;\n    const errSuffix = \"\\nbut found: '\" + actualText + \"'\";\n\n    if (customUserDescription) {\n      return errPrefix + customUserDescription + errSuffix;\n    } else {\n      const allLookAheadPaths = reduce(\n        expectedPathsPerAlt,\n        (result, currAltPaths) => result.concat(currAltPaths),\n        [] as TokenType[][],\n      );\n      const nextValidTokenSequences = map(\n        allLookAheadPaths,\n        (currPath) =>\n          `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join(\n            \", \",\n          )}]`,\n      );\n      const nextValidSequenceItems = map(\n        nextValidTokenSequences,\n        (itemMsg, idx) => `  ${idx + 1}. ${itemMsg}`,\n      );\n      const calculatedDescription = `one of these possible Token sequences:\\n${nextValidSequenceItems.join(\n        \"\\n\",\n      )}`;\n\n      return errPrefix + calculatedDescription + errSuffix;\n    }\n  },\n\n  buildEarlyExitMessage({\n    expectedIterationPaths,\n    actual,\n    customUserDescription,\n    ruleName,\n  }): string {\n    const errPrefix = \"Expecting: \";\n    // TODO: issue: No Viable Alternative Error may have incomplete details. #502\n    const actualText = first(actual)!.image;\n    const errSuffix = \"\\nbut found: '\" + actualText + \"'\";\n\n    if (customUserDescription) {\n      return errPrefix + customUserDescription + errSuffix;\n    } else {\n      const nextValidTokenSequences = map(\n        expectedIterationPaths,\n        (currPath) =>\n          `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join(\n            \",\",\n          )}]`,\n      );\n      const calculatedDescription =\n        `expecting at least one iteration which starts with one of these possible Token sequences::\\n  ` +\n        `<${nextValidTokenSequences.join(\" ,\")}>`;\n\n      return errPrefix + calculatedDescription + errSuffix;\n    }\n  },\n};\n\nObject.freeze(defaultParserErrorProvider);\n\nexport const defaultGrammarResolverErrorProvider: IGrammarResolverErrorMessageProvider =\n  {\n    buildRuleNotFoundError(\n      topLevelRule: Rule,\n      undefinedRule: NonTerminal,\n    ): string {\n      const msg =\n        \"Invalid grammar, reference to a rule which is not defined: ->\" +\n        undefinedRule.nonTerminalName +\n        \"<-\\n\" +\n        \"inside top level rule: ->\" +\n        topLevelRule.name +\n        \"<-\";\n      return msg;\n    },\n  };\n\nexport const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessageProvider =\n  {\n    buildDuplicateFoundError(\n      topLevelRule: Rule,\n      duplicateProds: IProductionWithOccurrence[],\n    ): string {\n      function getExtraProductionArgument(\n        prod: IProductionWithOccurrence,\n      ): string {\n        if (prod instanceof Terminal) {\n          return prod.terminalType.name;\n        } else if (prod instanceof NonTerminal) {\n          return prod.nonTerminalName;\n        } else {\n          return \"\";\n        }\n      }\n\n      const topLevelName = topLevelRule.name;\n      const duplicateProd = first(duplicateProds)!;\n      const index = duplicateProd.idx;\n      const dslName = getProductionDslName(duplicateProd);\n      const extraArgument = getExtraProductionArgument(duplicateProd);\n\n      const hasExplicitIndex = index > 0;\n      let msg = `->${dslName}${hasExplicitIndex ? index : \"\"}<- ${\n        extraArgument ? `with argument: ->${extraArgument}<-` : \"\"\n      }\n                  appears more than once (${\n                    duplicateProds.length\n                  } times) in the top level rule: ->${topLevelName}<-.                  \n                  For further details see: https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES \n                  `;\n\n      // white space trimming time! better to trim afterwards as it allows to use WELL formatted multi line template strings...\n      msg = msg.replace(/[ \\t]+/g, \" \");\n      msg = msg.replace(/\\s\\s+/g, \"\\n\");\n\n      return msg;\n    },\n\n    buildNamespaceConflictError(rule: Rule): string {\n      const errMsg =\n        `Namespace conflict found in grammar.\\n` +\n        `The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: <${rule.name}>.\\n` +\n        `To resolve this make sure each Terminal and Non-Terminal names are unique\\n` +\n        `This is easy to accomplish by using the convention that Terminal names start with an uppercase letter\\n` +\n        `and Non-Terminal names start with a lower case letter.`;\n\n      return errMsg;\n    },\n\n    buildAlternationPrefixAmbiguityError(options: {\n      topLevelRule: Rule;\n      prefixPath: TokenType[];\n      ambiguityIndices: number[];\n      alternation: Alternation;\n    }): string {\n      const pathMsg = map(options.prefixPath, (currTok) =>\n        tokenLabel(currTok),\n      ).join(\", \");\n      const occurrence =\n        options.alternation.idx === 0 ? \"\" : options.alternation.idx;\n      const errMsg =\n        `Ambiguous alternatives: <${options.ambiguityIndices.join(\n          \" ,\",\n        )}> due to common lookahead prefix\\n` +\n        `in  inside <${options.topLevelRule.name}> Rule,\\n` +\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n` +\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX\\n` +\n        `For Further details.`;\n\n      return errMsg;\n    },\n\n    buildAlternationAmbiguityError(options: {\n      topLevelRule: Rule;\n      prefixPath: TokenType[];\n      ambiguityIndices: number[];\n      alternation: Alternation;\n    }): string {\n      const pathMsg = map(options.prefixPath, (currtok) =>\n        tokenLabel(currtok),\n      ).join(\", \");\n      const occurrence =\n        options.alternation.idx === 0 ? \"\" : options.alternation.idx;\n      let currMessage =\n        `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(\n          \" ,\",\n        )}> in ` +\n        ` inside <${options.topLevelRule.name}> Rule,\\n` +\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n`;\n\n      currMessage =\n        currMessage +\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\\n` +\n        `For Further details.`;\n      return currMessage;\n    },\n\n    buildEmptyRepetitionError(options: {\n      topLevelRule: Rule;\n      repetition: IProductionWithOccurrence;\n    }): string {\n      let dslName = getProductionDslName(options.repetition);\n      if (options.repetition.idx !== 0) {\n        dslName += options.repetition.idx;\n      }\n\n      const errMsg =\n        `The repetition <${dslName}> within Rule <${options.topLevelRule.name}> can never consume any tokens.\\n` +\n        `This could lead to an infinite loop.`;\n\n      return errMsg;\n    },\n\n    // TODO: remove - `errors_public` from nyc.config.js exclude\n    //       once this method is fully removed from this file\n    buildTokenNameError(options: {\n      tokenType: TokenType;\n      expectedPattern: RegExp;\n    }): string {\n      /* istanbul ignore next */\n      return \"deprecated\";\n    },\n\n    buildEmptyAlternationError(options: {\n      topLevelRule: Rule;\n      alternation: Alternation;\n      emptyChoiceIdx: number;\n    }): string {\n      const errMsg =\n        `Ambiguous empty alternative: <${options.emptyChoiceIdx + 1}>` +\n        ` in  inside <${options.topLevelRule.name}> Rule.\\n` +\n        `Only the last alternative may be an empty alternative.`;\n\n      return errMsg;\n    },\n\n    buildTooManyAlternativesError(options: {\n      topLevelRule: Rule;\n      alternation: Alternation;\n    }): string {\n      const errMsg =\n        `An Alternation cannot have more than 256 alternatives:\\n` +\n        ` inside <${\n          options.topLevelRule.name\n        }> Rule.\\n has ${\n          options.alternation.definition.length + 1\n        } alternatives.`;\n\n      return errMsg;\n    },\n\n    buildLeftRecursionError(options: {\n      topLevelRule: Rule;\n      leftRecursionPath: Rule[];\n    }): string {\n      const ruleName = options.topLevelRule.name;\n      const pathNames = map(\n        options.leftRecursionPath,\n        (currRule) => currRule.name,\n      );\n      const leftRecursivePath = `${ruleName} --> ${pathNames\n        .concat([ruleName])\n        .join(\" --> \")}`;\n      const errMsg =\n        `Left Recursion found in grammar.\\n` +\n        `rule: <${ruleName}> can be invoked from itself (directly or indirectly)\\n` +\n        `without consuming any Tokens. The grammar path that causes this is: \\n ${leftRecursivePath}\\n` +\n        ` To fix this refactor your grammar to remove the left recursion.\\n` +\n        `see: https://en.wikipedia.org/wiki/LL_parser#Left_factoring.`;\n\n      return errMsg;\n    },\n\n    // TODO: remove - `errors_public` from nyc.config.js exclude\n    //       once this method is fully removed from this file\n    buildInvalidRuleNameError(options: {\n      topLevelRule: Rule;\n      expectedPattern: RegExp;\n    }): string {\n      /* istanbul ignore next */\n      return \"deprecated\";\n    },\n\n    buildDuplicateRuleNameError(options: {\n      topLevelRule: Rule | string;\n      grammarName: string;\n    }): string {\n      let ruleName;\n      if (options.topLevelRule instanceof Rule) {\n        ruleName = options.topLevelRule.name;\n      } else {\n        ruleName = options.topLevelRule;\n      }\n\n      const errMsg = `Duplicate definition, rule: ->${ruleName}<- is already defined in the grammar: ->${options.grammarName}<-`;\n\n      return errMsg;\n    },\n  };\n", "import {\n  IParserUnresolvedRefDefinitionError,\n  ParserDefinitionErrorType,\n} from \"../parser/parser.js\";\nimport { forEach, values } from \"lodash-es\";\nimport { GAstVisitor, NonTerminal, Rule } from \"@chevrotain/gast\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IParserDefinitionError,\n} from \"./types.js\";\n\nexport function resolveGrammar(\n  topLevels: Record,\n  errMsgProvider: IGrammarResolverErrorMessageProvider,\n): IParserDefinitionError[] {\n  const refResolver = new GastRefResolverVisitor(topLevels, errMsgProvider);\n  refResolver.resolveRefs();\n  return refResolver.errors;\n}\n\nexport class GastRefResolverVisitor extends GAstVisitor {\n  public errors: IParserUnresolvedRefDefinitionError[] = [];\n  private currTopLevel: Rule;\n\n  constructor(\n    private nameToTopRule: Record,\n    private errMsgProvider: IGrammarResolverErrorMessageProvider,\n  ) {\n    super();\n  }\n\n  public resolveRefs(): void {\n    forEach(values(this.nameToTopRule), (prod) => {\n      this.currTopLevel = prod;\n      prod.accept(this);\n    });\n  }\n\n  public visitNonTerminal(node: NonTerminal): void {\n    const ref = this.nameToTopRule[node.nonTerminalName];\n\n    if (!ref) {\n      const msg = this.errMsgProvider.buildRuleNotFoundError(\n        this.currTopLevel,\n        node,\n      );\n      this.errors.push({\n        message: msg,\n        type: ParserDefinitionErrorType.UNRESOLVED_SUBRULE_REF,\n        ruleName: this.currTopLevel.name,\n        unresolvedRefName: node.nonTerminalName,\n      });\n    } else {\n      node.referencedRule = ref;\n    }\n  }\n}\n", "import {\n  clone,\n  drop,\n  dropRight,\n  first as _first,\n  forEach,\n  isEmpty,\n  last,\n} from \"lodash-es\";\nimport { first } from \"./first.js\";\nimport { RestWalker } from \"./rest.js\";\nimport { TokenMatcher } from \"../parser/parser.js\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  IGrammarPath,\n  IProduction,\n  ISyntacticContentAssistPath,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\n\nexport abstract class AbstractNextPossibleTokensWalker extends RestWalker {\n  protected possibleTokTypes: TokenType[] = [];\n  protected ruleStack: string[];\n  protected occurrenceStack: number[];\n\n  protected nextProductionName = \"\";\n  protected nextProductionOccurrence = 0;\n  protected found = false;\n  protected isAtEndOfPath = false;\n\n  constructor(\n    protected topProd: Rule,\n    protected path: IGrammarPath,\n  ) {\n    super();\n  }\n\n  startWalking(): TokenType[] {\n    this.found = false;\n\n    if (this.path.ruleStack[0] !== this.topProd.name) {\n      throw Error(\"The path does not start with the walker's top Rule!\");\n    }\n\n    // immutable for the win\n    this.ruleStack = clone(this.path.ruleStack).reverse(); // intelij bug requires assertion\n    this.occurrenceStack = clone(this.path.occurrenceStack).reverse(); // intelij bug requires assertion\n\n    // already verified that the first production is valid, we now seek the 2nd production\n    this.ruleStack.pop();\n    this.occurrenceStack.pop();\n\n    this.updateExpectedNext();\n    this.walk(this.topProd);\n\n    return this.possibleTokTypes;\n  }\n\n  walk(\n    prod: { definition: IProduction[] },\n    prevRest: IProduction[] = [],\n  ): void {\n    // stop scanning once we found the path\n    if (!this.found) {\n      super.walk(prod, prevRest);\n    }\n  }\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // found the next production, need to keep walking in it\n    if (\n      refProd.referencedRule.name === this.nextProductionName &&\n      refProd.idx === this.nextProductionOccurrence\n    ) {\n      const fullRest = currRest.concat(prevRest);\n      this.updateExpectedNext();\n      this.walk(refProd.referencedRule, fullRest);\n    }\n  }\n\n  updateExpectedNext(): void {\n    // need to consume the Terminal\n    if (isEmpty(this.ruleStack)) {\n      // must reset nextProductionXXX to avoid walking down another Top Level production while what we are\n      // really seeking is the last Terminal...\n      this.nextProductionName = \"\";\n      this.nextProductionOccurrence = 0;\n      this.isAtEndOfPath = true;\n    } else {\n      this.nextProductionName = this.ruleStack.pop()!;\n      this.nextProductionOccurrence = this.occurrenceStack.pop()!;\n    }\n  }\n}\n\nexport class NextAfterTokenWalker extends AbstractNextPossibleTokensWalker {\n  private nextTerminalName = \"\";\n  private nextTerminalOccurrence = 0;\n\n  constructor(\n    topProd: Rule,\n    protected path: ITokenGrammarPath,\n  ) {\n    super(topProd, path);\n    this.nextTerminalName = this.path.lastTok.name;\n    this.nextTerminalOccurrence = this.path.lastTokOccurrence;\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      this.isAtEndOfPath &&\n      terminal.terminalType.name === this.nextTerminalName &&\n      terminal.idx === this.nextTerminalOccurrence &&\n      !this.found\n    ) {\n      const fullRest = currRest.concat(prevRest);\n      const restProd = new Alternative({ definition: fullRest });\n      this.possibleTokTypes = first(restProd);\n      this.found = true;\n    }\n  }\n}\n\nexport type AlternativesFirstTokens = TokenType[][];\n\nexport interface IFirstAfterRepetition {\n  token: TokenType | undefined;\n  occurrence: number | undefined;\n  isEndOfRule: boolean | undefined;\n}\n\n/**\n * This walker only \"walks\" a single \"TOP\" level in the Grammar Ast, this means\n * it never \"follows\" production refs\n */\nexport class AbstractNextTerminalAfterProductionWalker extends RestWalker {\n  protected result: IFirstAfterRepetition = {\n    token: undefined,\n    occurrence: undefined,\n    isEndOfRule: undefined,\n  };\n\n  constructor(\n    protected topRule: Rule,\n    protected occurrence: number,\n  ) {\n    super();\n  }\n\n  startWalking(): IFirstAfterRepetition {\n    this.walk(this.topRule);\n    return this.result;\n  }\n}\n\nexport class NextTerminalAfterManyWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (manyProd.idx === this.occurrence) {\n      const firstAfterMany = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterMany === undefined;\n      if (firstAfterMany instanceof Terminal) {\n        this.result.token = firstAfterMany.terminalType;\n        this.result.occurrence = firstAfterMany.idx;\n      }\n    } else {\n      super.walkMany(manyProd, currRest, prevRest);\n    }\n  }\n}\n\nexport class NextTerminalAfterManySepWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (manySepProd.idx === this.occurrence) {\n      const firstAfterManySep = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterManySep === undefined;\n      if (firstAfterManySep instanceof Terminal) {\n        this.result.token = firstAfterManySep.terminalType;\n        this.result.occurrence = firstAfterManySep.idx;\n      }\n    } else {\n      super.walkManySep(manySepProd, currRest, prevRest);\n    }\n  }\n}\n\nexport class NextTerminalAfterAtLeastOneWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (atLeastOneProd.idx === this.occurrence) {\n      const firstAfterAtLeastOne = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterAtLeastOne === undefined;\n      if (firstAfterAtLeastOne instanceof Terminal) {\n        this.result.token = firstAfterAtLeastOne.terminalType;\n        this.result.occurrence = firstAfterAtLeastOne.idx;\n      }\n    } else {\n      super.walkAtLeastOne(atLeastOneProd, currRest, prevRest);\n    }\n  }\n}\n\n// TODO: reduce code duplication in the AfterWalkers\nexport class NextTerminalAfterAtLeastOneSepWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkAtLeastOneSep(\n    atleastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (atleastOneSepProd.idx === this.occurrence) {\n      const firstAfterfirstAfterAtLeastOneSep = _first(\n        currRest.concat(prevRest),\n      );\n      this.result.isEndOfRule = firstAfterfirstAfterAtLeastOneSep === undefined;\n      if (firstAfterfirstAfterAtLeastOneSep instanceof Terminal) {\n        this.result.token = firstAfterfirstAfterAtLeastOneSep.terminalType;\n        this.result.occurrence = firstAfterfirstAfterAtLeastOneSep.idx;\n      }\n    } else {\n      super.walkAtLeastOneSep(atleastOneSepProd, currRest, prevRest);\n    }\n  }\n}\n\nexport interface PartialPathAndSuffixes {\n  partialPath: TokenType[];\n  suffixDef: IProduction[];\n}\n\nexport function possiblePathsFrom(\n  targetDef: IProduction[],\n  maxLength: number,\n  currPath: TokenType[] = [],\n): PartialPathAndSuffixes[] {\n  // avoid side effects\n  currPath = clone(currPath);\n  let result: PartialPathAndSuffixes[] = [];\n  let i = 0;\n\n  // TODO: avoid inner funcs\n  function remainingPathWith(nextDef: IProduction[]) {\n    return nextDef.concat(drop(targetDef, i + 1));\n  }\n\n  // TODO: avoid inner funcs\n  function getAlternativesForProd(definition: IProduction[]) {\n    const alternatives = possiblePathsFrom(\n      remainingPathWith(definition),\n      maxLength,\n      currPath,\n    );\n    return result.concat(alternatives);\n  }\n\n  /**\n   * Mandatory productions will halt the loop as the paths computed from their recursive calls will already contain the\n   * following (rest) of the targetDef.\n   *\n   * For optional productions (Option/Repetition/...) the loop will continue to represent the paths that do not include the\n   * the optional production.\n   */\n  while (currPath.length < maxLength && i < targetDef.length) {\n    const prod = targetDef[i];\n\n    /* istanbul ignore else */\n    if (prod instanceof Alternative) {\n      return getAlternativesForProd(prod.definition);\n    } else if (prod instanceof NonTerminal) {\n      return getAlternativesForProd(prod.definition);\n    } else if (prod instanceof Option) {\n      result = getAlternativesForProd(prod.definition);\n    } else if (prod instanceof RepetitionMandatory) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: prod.definition,\n        }),\n      ]);\n      return getAlternativesForProd(newDef);\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n      const newDef = [\n        new Alternative({ definition: prod.definition }),\n        new Repetition({\n          definition: [new Terminal({ terminalType: prod.separator })].concat(\n            prod.definition,\n          ),\n        }),\n      ];\n      return getAlternativesForProd(newDef);\n    } else if (prod instanceof RepetitionWithSeparator) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: [new Terminal({ terminalType: prod.separator })].concat(\n            prod.definition,\n          ),\n        }),\n      ]);\n      result = getAlternativesForProd(newDef);\n    } else if (prod instanceof Repetition) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: prod.definition,\n        }),\n      ]);\n      result = getAlternativesForProd(newDef);\n    } else if (prod instanceof Alternation) {\n      forEach(prod.definition, (currAlt) => {\n        // TODO: this is a limited check for empty alternatives\n        //   It would prevent a common case of infinite loops during parser initialization.\n        //   However **in-directly** empty alternatives may still cause issues.\n        if (isEmpty(currAlt.definition) === false) {\n          result = getAlternativesForProd(currAlt.definition);\n        }\n      });\n      return result;\n    } else if (prod instanceof Terminal) {\n      currPath.push(prod.terminalType);\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n\n    i++;\n  }\n  result.push({\n    partialPath: currPath,\n    suffixDef: drop(targetDef, i),\n  });\n\n  return result;\n}\n\ninterface IPathToExamine {\n  idx: number;\n  def: IProduction[];\n  ruleStack: string[];\n  occurrenceStack: number[];\n}\n\nexport function nextPossibleTokensAfter(\n  initialDef: IProduction[],\n  tokenVector: IToken[],\n  tokMatcher: TokenMatcher,\n  maxLookAhead: number,\n): ISyntacticContentAssistPath[] {\n  const EXIT_NON_TERMINAL: any = \"EXIT_NONE_TERMINAL\";\n  // to avoid creating a new Array each time.\n  const EXIT_NON_TERMINAL_ARR = [EXIT_NON_TERMINAL];\n  const EXIT_ALTERNATIVE: any = \"EXIT_ALTERNATIVE\";\n  let foundCompletePath = false;\n\n  const tokenVectorLength = tokenVector.length;\n  const minimalAlternativesIndex = tokenVectorLength - maxLookAhead - 1;\n\n  const result: ISyntacticContentAssistPath[] = [];\n\n  const possiblePaths: IPathToExamine[] = [];\n  possiblePaths.push({\n    idx: -1,\n    def: initialDef,\n    ruleStack: [],\n    occurrenceStack: [],\n  });\n\n  while (!isEmpty(possiblePaths)) {\n    const currPath = possiblePaths.pop()!;\n\n    // skip alternatives if no more results can be found (assuming deterministic grammar with fixed lookahead)\n    if (currPath === EXIT_ALTERNATIVE) {\n      if (\n        foundCompletePath &&\n        last(possiblePaths)!.idx <= minimalAlternativesIndex\n      ) {\n        // remove irrelevant alternative\n        possiblePaths.pop();\n      }\n      continue;\n    }\n\n    const currDef = currPath.def;\n    const currIdx = currPath.idx;\n    const currRuleStack = currPath.ruleStack;\n    const currOccurrenceStack = currPath.occurrenceStack;\n\n    // For Example: an empty path could exist in a valid grammar in the case of an EMPTY_ALT\n    if (isEmpty(currDef)) {\n      continue;\n    }\n\n    const prod = currDef[0];\n    /* istanbul ignore else */\n    if (prod === EXIT_NON_TERMINAL) {\n      const nextPath = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: dropRight(currRuleStack),\n        occurrenceStack: dropRight(currOccurrenceStack),\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof Terminal) {\n      /* istanbul ignore else */\n      if (currIdx < tokenVectorLength - 1) {\n        const nextIdx = currIdx + 1;\n        const actualToken = tokenVector[nextIdx];\n        if (tokMatcher!(actualToken, prod.terminalType)) {\n          const nextPath = {\n            idx: nextIdx,\n            def: drop(currDef),\n            ruleStack: currRuleStack,\n            occurrenceStack: currOccurrenceStack,\n          };\n          possiblePaths.push(nextPath);\n        }\n        // end of the line\n      } else if (currIdx === tokenVectorLength - 1) {\n        // IGNORE ABOVE ELSE\n        result.push({\n          nextTokenType: prod.terminalType,\n          nextTokenOccurrence: prod.idx,\n          ruleStack: currRuleStack,\n          occurrenceStack: currOccurrenceStack,\n        });\n        foundCompletePath = true;\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    } else if (prod instanceof NonTerminal) {\n      const newRuleStack = clone(currRuleStack);\n      newRuleStack.push(prod.nonTerminalName);\n\n      const newOccurrenceStack = clone(currOccurrenceStack);\n      newOccurrenceStack.push(prod.idx);\n\n      const nextPath = {\n        idx: currIdx,\n        def: prod.definition.concat(EXIT_NON_TERMINAL_ARR, drop(currDef)),\n        ruleStack: newRuleStack,\n        occurrenceStack: newOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof Option) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      const nextPathWith = {\n        idx: currIdx,\n        def: prod.definition.concat(drop(currDef)),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof RepetitionMandatory) {\n      // TODO:(THE NEW operators here take a while...) (convert once?)\n      const secondIteration = new Repetition({\n        definition: prod.definition,\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([secondIteration], drop(currDef));\n      const nextPath = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n      // TODO:(THE NEW operators here take a while...) (convert once?)\n      const separatorGast = new Terminal({\n        terminalType: prod.separator,\n      });\n      const secondIteration = new Repetition({\n        definition: [separatorGast].concat(prod.definition),\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([secondIteration], drop(currDef));\n      const nextPath = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof RepetitionWithSeparator) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      const separatorGast = new Terminal({\n        terminalType: prod.separator,\n      });\n      const nthRepetition = new Repetition({\n        definition: [separatorGast].concat(prod.definition),\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([nthRepetition], drop(currDef));\n      const nextPathWith = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof Repetition) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      // TODO: an empty repetition will cause infinite loops here, will the parser detect this in selfAnalysis?\n      const nthRepetition = new Repetition({\n        definition: prod.definition,\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([nthRepetition], drop(currDef));\n      const nextPathWith = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof Alternation) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      for (let i = prod.definition.length - 1; i >= 0; i--) {\n        const currAlt: any = prod.definition[i];\n        const currAltPath = {\n          idx: currIdx,\n          def: currAlt.definition.concat(drop(currDef)),\n          ruleStack: currRuleStack,\n          occurrenceStack: currOccurrenceStack,\n        };\n        possiblePaths.push(currAltPath);\n        possiblePaths.push(EXIT_ALTERNATIVE);\n      }\n    } else if (prod instanceof Alternative) {\n      possiblePaths.push({\n        idx: currIdx,\n        def: prod.definition.concat(drop(currDef)),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      });\n    } else if (prod instanceof Rule) {\n      // last because we should only encounter at most a single one of these per invocation.\n      possiblePaths.push(\n        expandTopLevelRule(prod, currIdx, currRuleStack, currOccurrenceStack),\n      );\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  }\n  return result;\n}\n\nfunction expandTopLevelRule(\n  topRule: Rule,\n  currIdx: number,\n  currRuleStack: string[],\n  currOccurrenceStack: number[],\n): IPathToExamine {\n  const newRuleStack = clone(currRuleStack);\n  newRuleStack.push(topRule.name);\n\n  const newCurrOccurrenceStack = clone(currOccurrenceStack);\n  // top rule is always assumed to have been called with occurrence index 1\n  newCurrOccurrenceStack.push(1);\n\n  return {\n    idx: currIdx,\n    def: topRule.definition,\n    ruleStack: newRuleStack,\n    occurrenceStack: newCurrOccurrenceStack,\n  };\n}\n", "import { every, flatten, forEach, has, isEmpty, map, reduce } from \"lodash-es\";\nimport { possiblePathsFrom } from \"./interpreter.js\";\nimport { RestWalker } from \"./rest.js\";\nimport { Predicate, TokenMatcher } from \"../parser/parser.js\";\nimport {\n  tokenStructuredMatcher,\n  tokenStructuredMatcherNoCategories,\n} from \"../../scan/tokens.js\";\nimport {\n  Alternation,\n  Alternative as AlternativeGAST,\n  GAstVisitor,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n} from \"@chevrotain/gast\";\nimport {\n  BaseParser,\n  IOrAlt,\n  IProduction,\n  IProductionWithOccurrence,\n  LookaheadProductionType,\n  LookaheadSequence,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\n\nexport enum PROD_TYPE {\n  OPTION,\n  REPETITION,\n  REPETITION_MANDATORY,\n  REPETITION_MANDATORY_WITH_SEPARATOR,\n  REPETITION_WITH_SEPARATOR,\n  ALTERNATION,\n}\n\nexport function getProdType(\n  prod: IProduction | LookaheadProductionType,\n): PROD_TYPE {\n  /* istanbul ignore else */\n  if (prod instanceof Option || prod === \"Option\") {\n    return PROD_TYPE.OPTION;\n  } else if (prod instanceof Repetition || prod === \"Repetition\") {\n    return PROD_TYPE.REPETITION;\n  } else if (\n    prod instanceof RepetitionMandatory ||\n    prod === \"RepetitionMandatory\"\n  ) {\n    return PROD_TYPE.REPETITION_MANDATORY;\n  } else if (\n    prod instanceof RepetitionMandatoryWithSeparator ||\n    prod === \"RepetitionMandatoryWithSeparator\"\n  ) {\n    return PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR;\n  } else if (\n    prod instanceof RepetitionWithSeparator ||\n    prod === \"RepetitionWithSeparator\"\n  ) {\n    return PROD_TYPE.REPETITION_WITH_SEPARATOR;\n  } else if (prod instanceof Alternation || prod === \"Alternation\") {\n    return PROD_TYPE.ALTERNATION;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function getLookaheadPaths(options: {\n  occurrence: number;\n  rule: Rule;\n  prodType: LookaheadProductionType;\n  maxLookahead: number;\n}): LookaheadSequence[] {\n  const { occurrence, rule, prodType, maxLookahead } = options;\n  const type = getProdType(prodType);\n  if (type === PROD_TYPE.ALTERNATION) {\n    return getLookaheadPathsForOr(occurrence, rule, maxLookahead);\n  } else {\n    return getLookaheadPathsForOptionalProd(\n      occurrence,\n      rule,\n      type,\n      maxLookahead,\n    );\n  }\n}\n\nexport function buildLookaheadFuncForOr(\n  occurrence: number,\n  ruleGrammar: Rule,\n  maxLookahead: number,\n  hasPredicates: boolean,\n  dynamicTokensEnabled: boolean,\n  laFuncBuilder: Function,\n): (orAlts?: IOrAlt[]) => number | undefined {\n  const lookAheadPaths = getLookaheadPathsForOr(\n    occurrence,\n    ruleGrammar,\n    maxLookahead,\n  );\n\n  const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)\n    ? tokenStructuredMatcherNoCategories\n    : tokenStructuredMatcher;\n\n  return laFuncBuilder(\n    lookAheadPaths,\n    hasPredicates,\n    tokenMatcher,\n    dynamicTokensEnabled,\n  );\n}\n\n/**\n *  When dealing with an Optional production (OPTION/MANY/2nd iteration of AT_LEAST_ONE/...) we need to compare\n *  the lookahead \"inside\" the production and the lookahead immediately \"after\" it in the same top level rule (context free).\n *\n *  Example: given a production:\n *  ABC(DE)?DF\n *\n *  The optional '(DE)?' should only be entered if we see 'DE'. a single Token 'D' is not sufficient to distinguish between the two\n *  alternatives.\n *\n *  @returns A Lookahead function which will return true IFF the parser should parse the Optional production.\n */\nexport function buildLookaheadFuncForOptionalProd(\n  occurrence: number,\n  ruleGrammar: Rule,\n  k: number,\n  dynamicTokensEnabled: boolean,\n  prodType: PROD_TYPE,\n  lookaheadBuilder: (\n    lookAheadSequence: LookaheadSequence,\n    tokenMatcher: TokenMatcher,\n    dynamicTokensEnabled: boolean,\n  ) => () => boolean,\n): () => boolean {\n  const lookAheadPaths = getLookaheadPathsForOptionalProd(\n    occurrence,\n    ruleGrammar,\n    prodType,\n    k,\n  );\n\n  const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)\n    ? tokenStructuredMatcherNoCategories\n    : tokenStructuredMatcher;\n\n  return lookaheadBuilder(\n    lookAheadPaths[0],\n    tokenMatcher,\n    dynamicTokensEnabled,\n  );\n}\n\nexport type Alternative = TokenType[][];\n\nexport function buildAlternativesLookAheadFunc(\n  alts: LookaheadSequence[],\n  hasPredicates: boolean,\n  tokenMatcher: TokenMatcher,\n  dynamicTokensEnabled: boolean,\n): (orAlts: IOrAlt[]) => number | undefined {\n  const numOfAlts = alts.length;\n  const areAllOneTokenLookahead = every(alts, (currAlt) => {\n    return every(currAlt, (currPath) => {\n      return currPath.length === 1;\n    });\n  });\n\n  // This version takes into account the predicates as well.\n  if (hasPredicates) {\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (\n      this: BaseParser,\n      orAlts: IOrAlt[],\n    ): number | undefined {\n      // unfortunately the predicates must be extracted every single time\n      // as they cannot be cached due to references to parameters(vars) which are no longer valid.\n      // note that in the common case of no predicates, no cpu time will be wasted on this (see else block)\n      const predicates: (Predicate | undefined)[] = map(\n        orAlts,\n        (currAlt) => currAlt.GATE,\n      );\n\n      for (let t = 0; t < numOfAlts; t++) {\n        const currAlt = alts[t];\n        const currNumOfPaths = currAlt.length;\n\n        const currPredicate = predicates[t];\n        if (currPredicate !== undefined && currPredicate.call(this) === false) {\n          // if the predicate does not match there is no point in checking the paths\n          continue;\n        }\n        nextPath: for (let j = 0; j < currNumOfPaths; j++) {\n          const currPath = currAlt[j];\n          const currPathLength = currPath.length;\n          for (let i = 0; i < currPathLength; i++) {\n            const nextToken = this.LA(i + 1);\n            if (tokenMatcher(nextToken, currPath[i]) === false) {\n              // mismatch in current path\n              // try the next pth\n              continue nextPath;\n            }\n          }\n          // found a full path that matches.\n          // this will also work for an empty ALT as the loop will be skipped\n          return t;\n        }\n        // none of the paths for the current alternative matched\n        // try the next alternative\n      }\n      // none of the alternatives could be matched\n      return undefined;\n    };\n  } else if (areAllOneTokenLookahead && !dynamicTokensEnabled) {\n    // optimized (common) case of all the lookaheads paths requiring only\n    // a single token lookahead. These Optimizations cannot work if dynamically defined Tokens are used.\n    const singleTokenAlts = map(alts, (currAlt) => {\n      return flatten(currAlt);\n    });\n\n    const choiceToAlt = reduce(\n      singleTokenAlts,\n      (result, currAlt, idx) => {\n        forEach(currAlt, (currTokType) => {\n          if (!has(result, currTokType.tokenTypeIdx!)) {\n            result[currTokType.tokenTypeIdx!] = idx;\n          }\n          forEach(currTokType.categoryMatches!, (currExtendingType) => {\n            if (!has(result, currExtendingType)) {\n              result[currExtendingType] = idx;\n            }\n          });\n        });\n        return result;\n      },\n      {} as Record,\n    );\n\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (this: BaseParser): number {\n      const nextToken = this.LA(1);\n      return choiceToAlt[nextToken.tokenTypeIdx];\n    };\n  } else {\n    // optimized lookahead without needing to check the predicates at all.\n    // this causes code duplication which is intentional to improve performance.\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (this: BaseParser): number | undefined {\n      for (let t = 0; t < numOfAlts; t++) {\n        const currAlt = alts[t];\n        const currNumOfPaths = currAlt.length;\n        nextPath: for (let j = 0; j < currNumOfPaths; j++) {\n          const currPath = currAlt[j];\n          const currPathLength = currPath.length;\n          for (let i = 0; i < currPathLength; i++) {\n            const nextToken = this.LA(i + 1);\n            if (tokenMatcher(nextToken, currPath[i]) === false) {\n              // mismatch in current path\n              // try the next pth\n              continue nextPath;\n            }\n          }\n          // found a full path that matches.\n          // this will also work for an empty ALT as the loop will be skipped\n          return t;\n        }\n        // none of the paths for the current alternative matched\n        // try the next alternative\n      }\n      // none of the alternatives could be matched\n      return undefined;\n    };\n  }\n}\n\nexport function buildSingleAlternativeLookaheadFunction(\n  alt: LookaheadSequence,\n  tokenMatcher: TokenMatcher,\n  dynamicTokensEnabled: boolean,\n): () => boolean {\n  const areAllOneTokenLookahead = every(alt, (currPath) => {\n    return currPath.length === 1;\n  });\n\n  const numOfPaths = alt.length;\n\n  // optimized (common) case of all the lookaheads paths requiring only\n  // a single token lookahead.\n  if (areAllOneTokenLookahead && !dynamicTokensEnabled) {\n    const singleTokensTypes = flatten(alt);\n\n    if (\n      singleTokensTypes.length === 1 &&\n      isEmpty((singleTokensTypes[0]).categoryMatches)\n    ) {\n      const expectedTokenType = singleTokensTypes[0];\n      const expectedTokenUniqueKey = (expectedTokenType).tokenTypeIdx;\n\n      return function (this: BaseParser): boolean {\n        return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey;\n      };\n    } else {\n      const choiceToAlt = reduce(\n        singleTokensTypes,\n        (result, currTokType, idx) => {\n          result[currTokType.tokenTypeIdx!] = true;\n          forEach(currTokType.categoryMatches!, (currExtendingType) => {\n            result[currExtendingType] = true;\n          });\n          return result;\n        },\n        [] as boolean[],\n      );\n\n      return function (this: BaseParser): boolean {\n        const nextToken = this.LA(1);\n        return choiceToAlt[nextToken.tokenTypeIdx] === true;\n      };\n    }\n  } else {\n    return function (this: BaseParser): boolean {\n      nextPath: for (let j = 0; j < numOfPaths; j++) {\n        const currPath = alt[j];\n        const currPathLength = currPath.length;\n        for (let i = 0; i < currPathLength; i++) {\n          const nextToken = this.LA(i + 1);\n          if (tokenMatcher(nextToken, currPath[i]) === false) {\n            // mismatch in current path\n            // try the next pth\n            continue nextPath;\n          }\n        }\n        // found a full path that matches.\n        return true;\n      }\n\n      // none of the paths matched\n      return false;\n    };\n  }\n}\n\nclass RestDefinitionFinderWalker extends RestWalker {\n  private restDef: IProduction[];\n\n  constructor(\n    private topProd: Rule,\n    private targetOccurrence: number,\n    private targetProdType: PROD_TYPE,\n  ) {\n    super();\n  }\n\n  startWalking(): IProduction[] {\n    this.walk(this.topProd);\n    return this.restDef;\n  }\n\n  private checkIsTarget(\n    node: IProductionWithOccurrence,\n    expectedProdType: PROD_TYPE,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): boolean {\n    if (\n      node.idx === this.targetOccurrence &&\n      this.targetProdType === expectedProdType\n    ) {\n      this.restDef = currRest.concat(prevRest);\n      return true;\n    }\n    // performance optimization, do not iterate over the entire Grammar ast after we have found the target\n    return false;\n  }\n\n  walkOption(\n    optionProd: Option,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (!this.checkIsTarget(optionProd, PROD_TYPE.OPTION, currRest, prevRest)) {\n      super.walkOption(optionProd, currRest, prevRest);\n    }\n  }\n\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        atLeastOneProd,\n        PROD_TYPE.REPETITION_MANDATORY,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(atLeastOneProd, currRest, prevRest);\n    }\n  }\n\n  walkAtLeastOneSep(\n    atLeastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        atLeastOneSepProd,\n        PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(atLeastOneSepProd, currRest, prevRest);\n    }\n  }\n\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(manyProd, PROD_TYPE.REPETITION, currRest, prevRest)\n    ) {\n      super.walkOption(manyProd, currRest, prevRest);\n    }\n  }\n\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        manySepProd,\n        PROD_TYPE.REPETITION_WITH_SEPARATOR,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(manySepProd, currRest, prevRest);\n    }\n  }\n}\n\n/**\n * Returns the definition of a target production in a top level level rule.\n */\nclass InsideDefinitionFinderVisitor extends GAstVisitor {\n  public result: IProduction[] = [];\n\n  constructor(\n    private targetOccurrence: number,\n    private targetProdType: PROD_TYPE,\n    private targetRef?: any,\n  ) {\n    super();\n  }\n\n  private checkIsTarget(\n    node: { definition: IProduction[] } & IProductionWithOccurrence,\n    expectedProdName: PROD_TYPE,\n  ): void {\n    if (\n      node.idx === this.targetOccurrence &&\n      this.targetProdType === expectedProdName &&\n      (this.targetRef === undefined || node === this.targetRef)\n    ) {\n      this.result = node.definition;\n    }\n  }\n\n  public visitOption(node: Option): void {\n    this.checkIsTarget(node, PROD_TYPE.OPTION);\n  }\n\n  public visitRepetition(node: Repetition): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION);\n  }\n\n  public visitRepetitionMandatory(node: RepetitionMandatory): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    node: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR);\n  }\n\n  public visitRepetitionWithSeparator(node: RepetitionWithSeparator): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_WITH_SEPARATOR);\n  }\n\n  public visitAlternation(node: Alternation): void {\n    this.checkIsTarget(node, PROD_TYPE.ALTERNATION);\n  }\n}\n\nfunction initializeArrayOfArrays(size: number): any[][] {\n  const result = new Array(size);\n  for (let i = 0; i < size; i++) {\n    result[i] = [];\n  }\n  return result;\n}\n\n/**\n * A sort of hash function between a Path in the grammar and a string.\n * Note that this returns multiple \"hashes\" to support the scenario of token categories.\n * -  A single path with categories may match multiple **actual** paths.\n */\nfunction pathToHashKeys(path: TokenType[]): string[] {\n  let keys = [\"\"];\n  for (let i = 0; i < path.length; i++) {\n    const tokType = path[i];\n    const longerKeys = [];\n    for (let j = 0; j < keys.length; j++) {\n      const currShorterKey = keys[j];\n      longerKeys.push(currShorterKey + \"_\" + tokType.tokenTypeIdx);\n      for (let t = 0; t < tokType.categoryMatches!.length; t++) {\n        const categoriesKeySuffix = \"_\" + tokType.categoryMatches![t];\n        longerKeys.push(currShorterKey + categoriesKeySuffix);\n      }\n    }\n    keys = longerKeys;\n  }\n  return keys;\n}\n\n/**\n * Imperative style due to being called from a hot spot\n */\nfunction isUniquePrefixHash(\n  altKnownPathsKeys: Record[],\n  searchPathKeys: string[],\n  idx: number,\n): boolean {\n  for (\n    let currAltIdx = 0;\n    currAltIdx < altKnownPathsKeys.length;\n    currAltIdx++\n  ) {\n    // We only want to test vs the other alternatives\n    if (currAltIdx === idx) {\n      continue;\n    }\n    const otherAltKnownPathsKeys = altKnownPathsKeys[currAltIdx];\n    for (let searchIdx = 0; searchIdx < searchPathKeys.length; searchIdx++) {\n      const searchKey = searchPathKeys[searchIdx];\n      if (otherAltKnownPathsKeys[searchKey] === true) {\n        return false;\n      }\n    }\n  }\n  // None of the SearchPathKeys were found in any of the other alternatives\n  return true;\n}\n\nexport function lookAheadSequenceFromAlternatives(\n  altsDefs: IProduction[],\n  k: number,\n): LookaheadSequence[] {\n  const partialAlts = map(altsDefs, (currAlt) =>\n    possiblePathsFrom([currAlt], 1),\n  );\n  const finalResult = initializeArrayOfArrays(partialAlts.length);\n  const altsHashes = map(partialAlts, (currAltPaths) => {\n    const dict: { [key: string]: boolean } = {};\n    forEach(currAltPaths, (item) => {\n      const keys = pathToHashKeys(item.partialPath);\n      forEach(keys, (currKey) => {\n        dict[currKey] = true;\n      });\n    });\n    return dict;\n  });\n  let newData = partialAlts;\n\n  // maxLookahead loop\n  for (let pathLength = 1; pathLength <= k; pathLength++) {\n    const currDataset = newData;\n    newData = initializeArrayOfArrays(currDataset.length);\n\n    // alternatives loop\n    for (let altIdx = 0; altIdx < currDataset.length; altIdx++) {\n      const currAltPathsAndSuffixes = currDataset[altIdx];\n      // paths in current alternative loop\n      for (\n        let currPathIdx = 0;\n        currPathIdx < currAltPathsAndSuffixes.length;\n        currPathIdx++\n      ) {\n        const currPathPrefix = currAltPathsAndSuffixes[currPathIdx].partialPath;\n        const suffixDef = currAltPathsAndSuffixes[currPathIdx].suffixDef;\n        const prefixKeys = pathToHashKeys(currPathPrefix);\n        const isUnique = isUniquePrefixHash(altsHashes, prefixKeys, altIdx);\n        // End of the line for this path.\n        if (isUnique || isEmpty(suffixDef) || currPathPrefix.length === k) {\n          const currAltResult = finalResult[altIdx];\n          // TODO: Can we implement a containsPath using Maps/Dictionaries?\n          if (containsPath(currAltResult, currPathPrefix) === false) {\n            currAltResult.push(currPathPrefix);\n            // Update all new  keys for the current path.\n            for (let j = 0; j < prefixKeys.length; j++) {\n              const currKey = prefixKeys[j];\n              altsHashes[altIdx][currKey] = true;\n            }\n          }\n        }\n        // Expand longer paths\n        else {\n          const newPartialPathsAndSuffixes = possiblePathsFrom(\n            suffixDef,\n            pathLength + 1,\n            currPathPrefix,\n          );\n          newData[altIdx] = newData[altIdx].concat(newPartialPathsAndSuffixes);\n\n          // Update keys for new known paths\n          forEach(newPartialPathsAndSuffixes, (item) => {\n            const prefixKeys = pathToHashKeys(item.partialPath);\n            forEach(prefixKeys, (key) => {\n              altsHashes[altIdx][key] = true;\n            });\n          });\n        }\n      }\n    }\n  }\n\n  return finalResult;\n}\n\nexport function getLookaheadPathsForOr(\n  occurrence: number,\n  ruleGrammar: Rule,\n  k: number,\n  orProd?: Alternation,\n): LookaheadSequence[] {\n  const visitor = new InsideDefinitionFinderVisitor(\n    occurrence,\n    PROD_TYPE.ALTERNATION,\n    orProd,\n  );\n  ruleGrammar.accept(visitor);\n  return lookAheadSequenceFromAlternatives(visitor.result, k);\n}\n\nexport function getLookaheadPathsForOptionalProd(\n  occurrence: number,\n  ruleGrammar: Rule,\n  prodType: PROD_TYPE,\n  k: number,\n): LookaheadSequence[] {\n  const insideDefVisitor = new InsideDefinitionFinderVisitor(\n    occurrence,\n    prodType,\n  );\n  ruleGrammar.accept(insideDefVisitor);\n  const insideDef = insideDefVisitor.result;\n\n  const afterDefWalker = new RestDefinitionFinderWalker(\n    ruleGrammar,\n    occurrence,\n    prodType,\n  );\n  const afterDef = afterDefWalker.startWalking();\n\n  const insideFlat = new AlternativeGAST({ definition: insideDef });\n  const afterFlat = new AlternativeGAST({ definition: afterDef });\n\n  return lookAheadSequenceFromAlternatives([insideFlat, afterFlat], k);\n}\n\nexport function containsPath(\n  alternative: Alternative,\n  searchPath: TokenType[],\n): boolean {\n  compareOtherPath: for (let i = 0; i < alternative.length; i++) {\n    const otherPath = alternative[i];\n    if (otherPath.length !== searchPath.length) {\n      continue;\n    }\n    for (let j = 0; j < otherPath.length; j++) {\n      const searchTok = searchPath[j];\n      const otherTok = otherPath[j];\n\n      const matchingTokens =\n        searchTok === otherTok ||\n        otherTok.categoryMatchesMap![searchTok.tokenTypeIdx!] !== undefined;\n      if (matchingTokens === false) {\n        continue compareOtherPath;\n      }\n    }\n    return true;\n  }\n\n  return false;\n}\n\nexport function isStrictPrefixOfPath(\n  prefix: TokenType[],\n  other: TokenType[],\n): boolean {\n  return (\n    prefix.length < other.length &&\n    every(prefix, (tokType, idx) => {\n      const otherTokType = other[idx];\n      return (\n        tokType === otherTokType ||\n        otherTokType.categoryMatchesMap![tokType.tokenTypeIdx!]\n      );\n    })\n  );\n}\n\nexport function areTokenCategoriesNotUsed(\n  lookAheadPaths: LookaheadSequence[],\n): boolean {\n  return every(lookAheadPaths, (singleAltPaths) =>\n    every(singleAltPaths, (singlePath) =>\n      every(singlePath, (token) => isEmpty(token.categoryMatches!)),\n    ),\n  );\n}\n", "import {\n  clone,\n  compact,\n  difference,\n  drop,\n  dropRight,\n  filter,\n  first,\n  flatMap,\n  flatten,\n  forEach,\n  groupBy,\n  includes,\n  isEmpty,\n  map,\n  pickBy,\n  reduce,\n  reject,\n  values,\n} from \"lodash-es\";\nimport {\n  IParserAmbiguousAlternativesDefinitionError,\n  IParserDuplicatesDefinitionError,\n  IParserEmptyAlternativeDefinitionError,\n  ParserDefinitionErrorType,\n} from \"../parser/parser.js\";\nimport {\n  Alternation,\n  Alternative as AlternativeGAST,\n  GAstVisitor,\n  getProductionDslName,\n  isOptionalProd,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  Alternative,\n  containsPath,\n  getLookaheadPathsForOptionalProd,\n  getLookaheadPathsForOr,\n  getProdType,\n  isStrictPrefixOfPath,\n} from \"./lookahead.js\";\nimport { nextPossibleTokensAfter } from \"./interpreter.js\";\nimport {\n  ILookaheadStrategy,\n  IProduction,\n  IProductionWithOccurrence,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  IGrammarValidatorErrorMessageProvider,\n  IParserDefinitionError,\n} from \"./types.js\";\nimport { tokenStructuredMatcher } from \"../../scan/tokens.js\";\n\nexport function validateLookahead(options: {\n  lookaheadStrategy: ILookaheadStrategy;\n  rules: Rule[];\n  tokenTypes: TokenType[];\n  grammarName: string;\n}): IParserDefinitionError[] {\n  const lookaheadValidationErrorMessages = options.lookaheadStrategy.validate({\n    rules: options.rules,\n    tokenTypes: options.tokenTypes,\n    grammarName: options.grammarName,\n  });\n  return map(lookaheadValidationErrorMessages, (errorMessage) => ({\n    type: ParserDefinitionErrorType.CUSTOM_LOOKAHEAD_VALIDATION,\n    ...errorMessage,\n  }));\n}\n\nexport function validateGrammar(\n  topLevels: Rule[],\n  tokenTypes: TokenType[],\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n  grammarName: string,\n): IParserDefinitionError[] {\n  const duplicateErrors: IParserDefinitionError[] = flatMap(\n    topLevels,\n    (currTopLevel) =>\n      validateDuplicateProductions(currTopLevel, errMsgProvider),\n  );\n\n  const termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace(\n    topLevels,\n    tokenTypes,\n    errMsgProvider,\n  );\n\n  const tooManyAltsErrors = flatMap(topLevels, (curRule) =>\n    validateTooManyAlts(curRule, errMsgProvider),\n  );\n\n  const duplicateRulesError = flatMap(topLevels, (curRule) =>\n    validateRuleDoesNotAlreadyExist(\n      curRule,\n      topLevels,\n      grammarName,\n      errMsgProvider,\n    ),\n  );\n\n  return duplicateErrors.concat(\n    termsNamespaceConflictErrors,\n    tooManyAltsErrors,\n    duplicateRulesError,\n  );\n}\n\nfunction validateDuplicateProductions(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDuplicatesDefinitionError[] {\n  const collectorVisitor = new OccurrenceValidationCollector();\n  topLevelRule.accept(collectorVisitor);\n  const allRuleProductions = collectorVisitor.allProductions;\n\n  const productionGroups = groupBy(\n    allRuleProductions,\n    identifyProductionForDuplicates,\n  );\n\n  const duplicates: any = pickBy(productionGroups, (currGroup) => {\n    return currGroup.length > 1;\n  });\n\n  const errors = map(values(duplicates), (currDuplicates: any) => {\n    const firstProd: any = first(currDuplicates);\n    const msg = errMsgProvider.buildDuplicateFoundError(\n      topLevelRule,\n      currDuplicates,\n    );\n    const dslName = getProductionDslName(firstProd);\n    const defError: IParserDuplicatesDefinitionError = {\n      message: msg,\n      type: ParserDefinitionErrorType.DUPLICATE_PRODUCTIONS,\n      ruleName: topLevelRule.name,\n      dslName: dslName,\n      occurrence: firstProd.idx,\n    };\n\n    const param = getExtraProductionArgument(firstProd);\n    if (param) {\n      defError.parameter = param;\n    }\n\n    return defError;\n  });\n  return errors;\n}\n\nexport function identifyProductionForDuplicates(\n  prod: IProductionWithOccurrence,\n): string {\n  return `${getProductionDslName(prod)}_#_${\n    prod.idx\n  }_#_${getExtraProductionArgument(prod)}`;\n}\n\nfunction getExtraProductionArgument(prod: IProductionWithOccurrence): string {\n  if (prod instanceof Terminal) {\n    return prod.terminalType.name;\n  } else if (prod instanceof NonTerminal) {\n    return prod.nonTerminalName;\n  } else {\n    return \"\";\n  }\n}\n\nexport class OccurrenceValidationCollector extends GAstVisitor {\n  public allProductions: IProductionWithOccurrence[] = [];\n\n  public visitNonTerminal(subrule: NonTerminal): void {\n    this.allProductions.push(subrule);\n  }\n\n  public visitOption(option: Option): void {\n    this.allProductions.push(option);\n  }\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.allProductions.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.allProductions.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.allProductions.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.allProductions.push(many);\n  }\n\n  public visitAlternation(or: Alternation): void {\n    this.allProductions.push(or);\n  }\n\n  public visitTerminal(terminal: Terminal): void {\n    this.allProductions.push(terminal);\n  }\n}\n\nexport function validateRuleDoesNotAlreadyExist(\n  rule: Rule,\n  allRules: Rule[],\n  className: string,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors = [];\n  const occurrences = reduce(\n    allRules,\n    (result, curRule) => {\n      if (curRule.name === rule.name) {\n        return result + 1;\n      }\n      return result;\n    },\n    0,\n  );\n  if (occurrences > 1) {\n    const errMsg = errMsgProvider.buildDuplicateRuleNameError({\n      topLevelRule: rule,\n      grammarName: className,\n    });\n    errors.push({\n      message: errMsg,\n      type: ParserDefinitionErrorType.DUPLICATE_RULE_NAME,\n      ruleName: rule.name,\n    });\n  }\n\n  return errors;\n}\n\n// TODO: is there anyway to get only the rule names of rules inherited from the super grammars?\n// This is not part of the IGrammarErrorProvider because the validation cannot be performed on\n// The grammar structure, only at runtime.\nexport function validateRuleIsOverridden(\n  ruleName: string,\n  definedRulesNames: string[],\n  className: string,\n): IParserDefinitionError[] {\n  const errors = [];\n  let errMsg;\n\n  if (!includes(definedRulesNames, ruleName)) {\n    errMsg =\n      `Invalid rule override, rule: ->${ruleName}<- cannot be overridden in the grammar: ->${className}<-` +\n      `as it is not defined in any of the super grammars `;\n    errors.push({\n      message: errMsg,\n      type: ParserDefinitionErrorType.INVALID_RULE_OVERRIDE,\n      ruleName: ruleName,\n    });\n  }\n\n  return errors;\n}\n\nexport function validateNoLeftRecursion(\n  topRule: Rule,\n  currRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n  path: Rule[] = [],\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n  const nextNonTerminals = getFirstNoneTerminal(currRule.definition);\n  if (isEmpty(nextNonTerminals)) {\n    return [];\n  } else {\n    const ruleName = topRule.name;\n    const foundLeftRecursion = includes(nextNonTerminals, topRule);\n    if (foundLeftRecursion) {\n      errors.push({\n        message: errMsgProvider.buildLeftRecursionError({\n          topLevelRule: topRule,\n          leftRecursionPath: path,\n        }),\n        type: ParserDefinitionErrorType.LEFT_RECURSION,\n        ruleName: ruleName,\n      });\n    }\n\n    // we are only looking for cyclic paths leading back to the specific topRule\n    // other cyclic paths are ignored, we still need this difference to avoid infinite loops...\n    const validNextSteps = difference(nextNonTerminals, path.concat([topRule]));\n    const errorsFromNextSteps = flatMap(validNextSteps, (currRefRule) => {\n      const newPath = clone(path);\n      newPath.push(currRefRule);\n      return validateNoLeftRecursion(\n        topRule,\n        currRefRule,\n        errMsgProvider,\n        newPath,\n      );\n    });\n\n    return errors.concat(errorsFromNextSteps);\n  }\n}\n\nexport function getFirstNoneTerminal(definition: IProduction[]): Rule[] {\n  let result: Rule[] = [];\n  if (isEmpty(definition)) {\n    return result;\n  }\n  const firstProd = first(definition);\n\n  /* istanbul ignore else */\n  if (firstProd instanceof NonTerminal) {\n    result.push(firstProd.referencedRule);\n  } else if (\n    firstProd instanceof AlternativeGAST ||\n    firstProd instanceof Option ||\n    firstProd instanceof RepetitionMandatory ||\n    firstProd instanceof RepetitionMandatoryWithSeparator ||\n    firstProd instanceof RepetitionWithSeparator ||\n    firstProd instanceof Repetition\n  ) {\n    result = result.concat(\n      getFirstNoneTerminal(firstProd.definition),\n    );\n  } else if (firstProd instanceof Alternation) {\n    // each sub definition in alternation is a FLAT\n    result = flatten(\n      map(firstProd.definition, (currSubDef) =>\n        getFirstNoneTerminal((currSubDef).definition),\n      ),\n    );\n  } else if (firstProd instanceof Terminal) {\n    // nothing to see, move along\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n\n  const isFirstOptional = isOptionalProd(firstProd);\n  const hasMore = definition.length > 1;\n  if (isFirstOptional && hasMore) {\n    const rest = drop(definition);\n    return result.concat(getFirstNoneTerminal(rest));\n  } else {\n    return result;\n  }\n}\n\nclass OrCollector extends GAstVisitor {\n  public alternations: Alternation[] = [];\n\n  public visitAlternation(node: Alternation): void {\n    this.alternations.push(node);\n  }\n}\n\nexport function validateEmptyOrAlternative(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserEmptyAlternativeDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  const ors = orCollector.alternations;\n\n  const errors = flatMap(\n    ors,\n    (currOr) => {\n      const exceptLast = dropRight(currOr.definition);\n      return flatMap(exceptLast, (currAlternative, currAltIdx) => {\n        const possibleFirstInAlt = nextPossibleTokensAfter(\n          [currAlternative],\n          [],\n          tokenStructuredMatcher,\n          1,\n        );\n        if (isEmpty(possibleFirstInAlt)) {\n          return [\n            {\n              message: errMsgProvider.buildEmptyAlternationError({\n                topLevelRule: topLevelRule,\n                alternation: currOr,\n                emptyChoiceIdx: currAltIdx,\n              }),\n              type: ParserDefinitionErrorType.NONE_LAST_EMPTY_ALT,\n              ruleName: topLevelRule.name,\n              occurrence: currOr.idx,\n              alternative: currAltIdx + 1,\n            },\n          ];\n        } else {\n          return [];\n        }\n      });\n    },\n  );\n\n  return errors;\n}\n\nexport function validateAmbiguousAlternationAlternatives(\n  topLevelRule: Rule,\n  globalMaxLookahead: number,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  let ors = orCollector.alternations;\n\n  // New Handling of ignoring ambiguities\n  // - https://github.com/chevrotain/chevrotain/issues/869\n  ors = reject(ors, (currOr) => currOr.ignoreAmbiguities === true);\n\n  const errors = flatMap(ors, (currOr: Alternation) => {\n    const currOccurrence = currOr.idx;\n    const actualMaxLookahead = currOr.maxLookahead || globalMaxLookahead;\n    const alternatives = getLookaheadPathsForOr(\n      currOccurrence,\n      topLevelRule,\n      actualMaxLookahead,\n      currOr,\n    );\n    const altsAmbiguityErrors = checkAlternativesAmbiguities(\n      alternatives,\n      currOr,\n      topLevelRule,\n      errMsgProvider,\n    );\n    const altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities(\n      alternatives,\n      currOr,\n      topLevelRule,\n      errMsgProvider,\n    );\n\n    return altsAmbiguityErrors.concat(altsPrefixAmbiguityErrors);\n  });\n\n  return errors;\n}\n\nexport class RepetitionCollector extends GAstVisitor {\n  public allProductions: (IProductionWithOccurrence & {\n    maxLookahead?: number;\n  })[] = [];\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.allProductions.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.allProductions.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.allProductions.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.allProductions.push(many);\n  }\n}\n\nexport function validateTooManyAlts(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  const ors = orCollector.alternations;\n\n  const errors = flatMap(ors, (currOr) => {\n    if (currOr.definition.length > 255) {\n      return [\n        {\n          message: errMsgProvider.buildTooManyAlternativesError({\n            topLevelRule: topLevelRule,\n            alternation: currOr,\n          }),\n          type: ParserDefinitionErrorType.TOO_MANY_ALTS,\n          ruleName: topLevelRule.name,\n          occurrence: currOr.idx,\n        },\n      ];\n    } else {\n      return [];\n    }\n  });\n\n  return errors;\n}\n\nexport function validateSomeNonEmptyLookaheadPath(\n  topLevelRules: Rule[],\n  maxLookahead: number,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n  forEach(topLevelRules, (currTopRule) => {\n    const collectorVisitor = new RepetitionCollector();\n    currTopRule.accept(collectorVisitor);\n    const allRuleProductions = collectorVisitor.allProductions;\n    forEach(allRuleProductions, (currProd) => {\n      const prodType = getProdType(currProd);\n      const actualMaxLookahead = currProd.maxLookahead || maxLookahead;\n      const currOccurrence = currProd.idx;\n      const paths = getLookaheadPathsForOptionalProd(\n        currOccurrence,\n        currTopRule,\n        prodType,\n        actualMaxLookahead,\n      );\n      const pathsInsideProduction = paths[0];\n      if (isEmpty(flatten(pathsInsideProduction))) {\n        const errMsg = errMsgProvider.buildEmptyRepetitionError({\n          topLevelRule: currTopRule,\n          repetition: currProd,\n        });\n        errors.push({\n          message: errMsg,\n          type: ParserDefinitionErrorType.NO_NON_EMPTY_LOOKAHEAD,\n          ruleName: currTopRule.name,\n        });\n      }\n    });\n  });\n\n  return errors;\n}\n\nexport interface IAmbiguityDescriptor {\n  alts: number[];\n  path: TokenType[];\n}\n\nfunction checkAlternativesAmbiguities(\n  alternatives: Alternative[],\n  alternation: Alternation,\n  rule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  const foundAmbiguousPaths: Alternative = [];\n  const identicalAmbiguities = reduce(\n    alternatives,\n    (result, currAlt, currAltIdx) => {\n      // ignore (skip) ambiguities with this alternative\n      if (alternation.definition[currAltIdx].ignoreAmbiguities === true) {\n        return result;\n      }\n\n      forEach(currAlt, (currPath) => {\n        const altsCurrPathAppearsIn = [currAltIdx];\n        forEach(alternatives, (currOtherAlt, currOtherAltIdx) => {\n          if (\n            currAltIdx !== currOtherAltIdx &&\n            containsPath(currOtherAlt, currPath) &&\n            // ignore (skip) ambiguities with this \"other\" alternative\n            alternation.definition[currOtherAltIdx].ignoreAmbiguities !== true\n          ) {\n            altsCurrPathAppearsIn.push(currOtherAltIdx);\n          }\n        });\n\n        if (\n          altsCurrPathAppearsIn.length > 1 &&\n          !containsPath(foundAmbiguousPaths, currPath)\n        ) {\n          foundAmbiguousPaths.push(currPath);\n          result.push({\n            alts: altsCurrPathAppearsIn,\n            path: currPath,\n          });\n        }\n      });\n      return result;\n    },\n    [] as { alts: number[]; path: TokenType[] }[],\n  );\n\n  const currErrors = map(identicalAmbiguities, (currAmbDescriptor) => {\n    const ambgIndices = map(\n      currAmbDescriptor.alts,\n      (currAltIdx) => currAltIdx + 1,\n    );\n\n    const currMessage = errMsgProvider.buildAlternationAmbiguityError({\n      topLevelRule: rule,\n      alternation: alternation,\n      ambiguityIndices: ambgIndices,\n      prefixPath: currAmbDescriptor.path,\n    });\n\n    return {\n      message: currMessage,\n      type: ParserDefinitionErrorType.AMBIGUOUS_ALTS,\n      ruleName: rule.name,\n      occurrence: alternation.idx,\n      alternatives: currAmbDescriptor.alts,\n    };\n  });\n\n  return currErrors;\n}\n\nexport function checkPrefixAlternativesAmbiguities(\n  alternatives: Alternative[],\n  alternation: Alternation,\n  rule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  // flatten\n  const pathsAndIndices = reduce(\n    alternatives,\n    (result, currAlt, idx) => {\n      const currPathsAndIdx = map(currAlt, (currPath) => {\n        return { idx: idx, path: currPath };\n      });\n      return result.concat(currPathsAndIdx);\n    },\n    [] as { idx: number; path: TokenType[] }[],\n  );\n\n  const errors = compact(\n    flatMap(pathsAndIndices, (currPathAndIdx) => {\n      const alternativeGast = alternation.definition[currPathAndIdx.idx];\n      // ignore (skip) ambiguities with this alternative\n      if (alternativeGast.ignoreAmbiguities === true) {\n        return [];\n      }\n      const targetIdx = currPathAndIdx.idx;\n      const targetPath = currPathAndIdx.path;\n\n      const prefixAmbiguitiesPathsAndIndices = filter(\n        pathsAndIndices,\n        (searchPathAndIdx) => {\n          // prefix ambiguity can only be created from lower idx (higher priority) path\n          return (\n            // ignore (skip) ambiguities with this \"other\" alternative\n            alternation.definition[searchPathAndIdx.idx].ignoreAmbiguities !==\n              true &&\n            searchPathAndIdx.idx < targetIdx &&\n            // checking for strict prefix because identical lookaheads\n            // will be be detected using a different validation.\n            isStrictPrefixOfPath(searchPathAndIdx.path, targetPath)\n          );\n        },\n      );\n\n      const currPathPrefixErrors = map(\n        prefixAmbiguitiesPathsAndIndices,\n        (currAmbPathAndIdx): IParserAmbiguousAlternativesDefinitionError => {\n          const ambgIndices = [currAmbPathAndIdx.idx + 1, targetIdx + 1];\n          const occurrence = alternation.idx === 0 ? \"\" : alternation.idx;\n\n          const message = errMsgProvider.buildAlternationPrefixAmbiguityError({\n            topLevelRule: rule,\n            alternation: alternation,\n            ambiguityIndices: ambgIndices,\n            prefixPath: currAmbPathAndIdx.path,\n          });\n          return {\n            message: message,\n            type: ParserDefinitionErrorType.AMBIGUOUS_PREFIX_ALTS,\n            ruleName: rule.name,\n            occurrence: occurrence,\n            alternatives: ambgIndices,\n          };\n        },\n      );\n\n      return currPathPrefixErrors;\n    }),\n  );\n\n  return errors;\n}\n\nfunction checkTerminalAndNoneTerminalsNameSpace(\n  topLevels: Rule[],\n  tokenTypes: TokenType[],\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n\n  const tokenNames = map(tokenTypes, (currToken) => currToken.name);\n\n  forEach(topLevels, (currRule) => {\n    const currRuleName = currRule.name;\n    if (includes(tokenNames, currRuleName)) {\n      const errMsg = errMsgProvider.buildNamespaceConflictError(currRule);\n\n      errors.push({\n        message: errMsg,\n        type: ParserDefinitionErrorType.CONFLICT_TOKENS_RULES_NAMESPACE,\n        ruleName: currRuleName,\n      });\n    }\n  });\n\n  return errors;\n}\n", "import { Rule } from \"@chevrotain/gast\";\nimport { defaults, forEach } from \"lodash-es\";\nimport { resolveGrammar as orgResolveGrammar } from \"../resolver.js\";\nimport { validateGrammar as orgValidateGrammar } from \"../checks.js\";\nimport {\n  defaultGrammarResolverErrorProvider,\n  defaultGrammarValidatorErrorProvider,\n} from \"../../errors_public.js\";\nimport { TokenType } from \"@chevrotain/types\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IGrammarValidatorErrorMessageProvider,\n  IParserDefinitionError,\n} from \"../types.js\";\n\ntype ResolveGrammarOpts = {\n  rules: Rule[];\n  errMsgProvider?: IGrammarResolverErrorMessageProvider;\n};\nexport function resolveGrammar(\n  options: ResolveGrammarOpts,\n): IParserDefinitionError[] {\n  const actualOptions: Required = defaults(options, {\n    errMsgProvider: defaultGrammarResolverErrorProvider,\n  });\n\n  const topRulesTable: { [ruleName: string]: Rule } = {};\n  forEach(options.rules, (rule) => {\n    topRulesTable[rule.name] = rule;\n  });\n  return orgResolveGrammar(topRulesTable, actualOptions.errMsgProvider);\n}\n\nexport function validateGrammar(options: {\n  rules: Rule[];\n  tokenTypes: TokenType[];\n  grammarName: string;\n  errMsgProvider: IGrammarValidatorErrorMessageProvider;\n}): IParserDefinitionError[] {\n  options = defaults(options, {\n    errMsgProvider: defaultGrammarValidatorErrorProvider,\n  });\n\n  return orgValidateGrammar(\n    options.rules,\n    options.tokenTypes,\n    options.errMsgProvider,\n    options.grammarName,\n  );\n}\n", "import { includes } from \"lodash-es\";\nimport {\n  IRecognitionException,\n  IRecognizerContext,\n  IToken,\n} from \"@chevrotain/types\";\n\nconst MISMATCHED_TOKEN_EXCEPTION = \"MismatchedTokenException\";\nconst NO_VIABLE_ALT_EXCEPTION = \"NoViableAltException\";\nconst EARLY_EXIT_EXCEPTION = \"EarlyExitException\";\nconst NOT_ALL_INPUT_PARSED_EXCEPTION = \"NotAllInputParsedException\";\n\nconst RECOGNITION_EXCEPTION_NAMES = [\n  MISMATCHED_TOKEN_EXCEPTION,\n  NO_VIABLE_ALT_EXCEPTION,\n  EARLY_EXIT_EXCEPTION,\n  NOT_ALL_INPUT_PARSED_EXCEPTION,\n];\n\nObject.freeze(RECOGNITION_EXCEPTION_NAMES);\n\n// hacks to bypass no support for custom Errors in javascript/typescript\nexport function isRecognitionException(error: Error) {\n  // can't do instanceof on hacked custom js exceptions\n  return includes(RECOGNITION_EXCEPTION_NAMES, error.name);\n}\n\nabstract class RecognitionException\n  extends Error\n  implements IRecognitionException\n{\n  context: IRecognizerContext;\n  resyncedTokens: IToken[] = [];\n\n  protected constructor(\n    message: string,\n    public token: IToken,\n  ) {\n    super(message);\n\n    // fix prototype chain when typescript target is ES5\n    Object.setPrototypeOf(this, new.target.prototype);\n\n    /* istanbul ignore next - V8 workaround to remove constructor from stacktrace when typescript target is ES5 */\n    if (Error.captureStackTrace) {\n      Error.captureStackTrace(this, this.constructor);\n    }\n  }\n}\n\nexport class MismatchedTokenException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = MISMATCHED_TOKEN_EXCEPTION;\n  }\n}\n\nexport class NoViableAltException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = NO_VIABLE_ALT_EXCEPTION;\n  }\n}\n\nexport class NotAllInputParsedException extends RecognitionException {\n  constructor(message: string, token: IToken) {\n    super(message, token);\n    this.name = NOT_ALL_INPUT_PARSED_EXCEPTION;\n  }\n}\n\nexport class EarlyExitException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = EARLY_EXIT_EXCEPTION;\n  }\n}\n", "import {\n  createTokenInstance,\n  EOF,\n  tokenMatcher,\n} from \"../../../scan/tokens_public.js\";\nimport {\n  AbstractNextTerminalAfterProductionWalker,\n  IFirstAfterRepetition,\n} from \"../../grammar/interpreter.js\";\nimport {\n  clone,\n  dropRight,\n  find,\n  flatten,\n  has,\n  includes,\n  isEmpty,\n  map,\n} from \"lodash-es\";\nimport {\n  IParserConfig,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { MismatchedTokenException } from \"../../exceptions_public.js\";\nimport { IN } from \"../../constants.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\nexport const EOF_FOLLOW_KEY: any = {};\n\nexport interface IFollowKey {\n  ruleName: string;\n  idxInCallingRule: number;\n  inRule: string;\n}\n\nexport const IN_RULE_RECOVERY_EXCEPTION = \"InRuleRecoveryException\";\n\nexport class InRuleRecoveryException extends Error {\n  constructor(message: string) {\n    super(message);\n    this.name = IN_RULE_RECOVERY_EXCEPTION;\n  }\n}\n\n/**\n * This trait is responsible for the error recovery and fault tolerant logic\n */\nexport class Recoverable {\n  recoveryEnabled: boolean;\n  firstAfterRepMap: Record;\n  resyncFollows: Record;\n\n  initRecoverable(config: IParserConfig) {\n    this.firstAfterRepMap = {};\n    this.resyncFollows = {};\n\n    this.recoveryEnabled = has(config, \"recoveryEnabled\")\n      ? (config.recoveryEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.recoveryEnabled;\n\n    // performance optimization, NOOP will be inlined which\n    // effectively means that this optional feature does not exist\n    // when not used.\n    if (this.recoveryEnabled) {\n      this.attemptInRepetitionRecovery = attemptInRepetitionRecovery;\n    }\n  }\n\n  public getTokenToInsert(tokType: TokenType): IToken {\n    const tokToInsert = createTokenInstance(\n      tokType,\n      \"\",\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n    );\n    tokToInsert.isInsertedInRecovery = true;\n    return tokToInsert;\n  }\n\n  public canTokenTypeBeInsertedInRecovery(tokType: TokenType): boolean {\n    return true;\n  }\n\n  public canTokenTypeBeDeletedInRecovery(tokType: TokenType): boolean {\n    return true;\n  }\n\n  tryInRepetitionRecovery(\n    this: MixedInParser,\n    grammarRule: Function,\n    grammarRuleArgs: any[],\n    lookAheadFunc: () => boolean,\n    expectedTokType: TokenType,\n  ): void {\n    // TODO: can the resyncTokenType be cached?\n    const reSyncTokType = this.findReSyncTokenType();\n    const savedLexerState = this.exportLexerState();\n    const resyncedTokens: IToken[] = [];\n    let passedResyncPoint = false;\n\n    const nextTokenWithoutResync = this.LA(1);\n    let currToken = this.LA(1);\n\n    const generateErrorMessage = () => {\n      const previousToken = this.LA(0);\n      // we are preemptively re-syncing before an error has been detected, therefor we must reproduce\n      // the error that would have been thrown\n      const msg = this.errorMessageProvider.buildMismatchTokenMessage({\n        expected: expectedTokType,\n        actual: nextTokenWithoutResync,\n        previous: previousToken,\n        ruleName: this.getCurrRuleFullName(),\n      });\n      const error = new MismatchedTokenException(\n        msg,\n        nextTokenWithoutResync,\n        this.LA(0),\n      );\n      // the first token here will be the original cause of the error, this is not part of the resyncedTokens property.\n      error.resyncedTokens = dropRight(resyncedTokens);\n      this.SAVE_ERROR(error);\n    };\n\n    while (!passedResyncPoint) {\n      // re-synced to a point where we can safely exit the repetition/\n      if (this.tokenMatcher(currToken, expectedTokType)) {\n        generateErrorMessage();\n        return; // must return here to avoid reverting the inputIdx\n      } else if (lookAheadFunc.call(this)) {\n        // we skipped enough tokens so we can resync right back into another iteration of the repetition grammar rule\n        generateErrorMessage();\n        // recursive invocation in other to support multiple re-syncs in the same top level repetition grammar rule\n        grammarRule.apply(this, grammarRuleArgs);\n        return; // must return here to avoid reverting the inputIdx\n      } else if (this.tokenMatcher(currToken, reSyncTokType)) {\n        passedResyncPoint = true;\n      } else {\n        currToken = this.SKIP_TOKEN();\n        this.addToResyncTokens(currToken, resyncedTokens);\n      }\n    }\n\n    // we were unable to find a CLOSER point to resync inside the Repetition, reset the state.\n    // The parsing exception we were trying to prevent will happen in the NEXT parsing step. it may be handled by\n    // \"between rules\" resync recovery later in the flow.\n    this.importLexerState(savedLexerState);\n  }\n\n  shouldInRepetitionRecoveryBeTried(\n    this: MixedInParser,\n    expectTokAfterLastMatch: TokenType,\n    nextTokIdx: number,\n    notStuck: boolean | undefined,\n  ): boolean {\n    // Edge case of arriving from a MANY repetition which is stuck\n    // Attempting recovery in this case could cause an infinite loop\n    if (notStuck === false) {\n      return false;\n    }\n\n    // no need to recover, next token is what we expect...\n    if (this.tokenMatcher(this.LA(1), expectTokAfterLastMatch)) {\n      return false;\n    }\n\n    // error recovery is disabled during backtracking as it can make the parser ignore a valid grammar path\n    // and prefer some backtracking path that includes recovered errors.\n    if (this.isBackTracking()) {\n      return false;\n    }\n\n    // if we can perform inRule recovery (single token insertion or deletion) we always prefer that recovery algorithm\n    // because if it works, it makes the least amount of changes to the input stream (greedy algorithm)\n    //noinspection RedundantIfStatementJS\n    if (\n      this.canPerformInRuleRecovery(\n        expectTokAfterLastMatch,\n        this.getFollowsForInRuleRecovery(expectTokAfterLastMatch, nextTokIdx),\n      )\n    ) {\n      return false;\n    }\n\n    return true;\n  }\n\n  // Error Recovery functionality\n  getFollowsForInRuleRecovery(\n    this: MixedInParser,\n    tokType: TokenType,\n    tokIdxInRule: number,\n  ): TokenType[] {\n    const grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule);\n    const follows = this.getNextPossibleTokenTypes(grammarPath);\n    return follows;\n  }\n\n  tryInRuleRecovery(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n    follows: TokenType[],\n  ): IToken {\n    if (this.canRecoverWithSingleTokenInsertion(expectedTokType, follows)) {\n      const tokToInsert = this.getTokenToInsert(expectedTokType);\n      return tokToInsert;\n    }\n\n    if (this.canRecoverWithSingleTokenDeletion(expectedTokType)) {\n      const nextTok = this.SKIP_TOKEN();\n      this.consumeToken();\n      return nextTok;\n    }\n\n    throw new InRuleRecoveryException(\"sad sad panda\");\n  }\n\n  canPerformInRuleRecovery(\n    this: MixedInParser,\n    expectedToken: TokenType,\n    follows: TokenType[],\n  ): boolean {\n    return (\n      this.canRecoverWithSingleTokenInsertion(expectedToken, follows) ||\n      this.canRecoverWithSingleTokenDeletion(expectedToken)\n    );\n  }\n\n  canRecoverWithSingleTokenInsertion(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n    follows: TokenType[],\n  ): boolean {\n    if (!this.canTokenTypeBeInsertedInRecovery(expectedTokType)) {\n      return false;\n    }\n\n    // must know the possible following tokens to perform single token insertion\n    if (isEmpty(follows)) {\n      return false;\n    }\n\n    const mismatchedTok = this.LA(1);\n    const isMisMatchedTokInFollows =\n      find(follows, (possibleFollowsTokType: TokenType) => {\n        return this.tokenMatcher(mismatchedTok, possibleFollowsTokType);\n      }) !== undefined;\n\n    return isMisMatchedTokInFollows;\n  }\n\n  canRecoverWithSingleTokenDeletion(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n  ): boolean {\n    if (!this.canTokenTypeBeDeletedInRecovery(expectedTokType)) {\n      return false;\n    }\n\n    const isNextTokenWhatIsExpected = this.tokenMatcher(\n      this.LA(2),\n      expectedTokType,\n    );\n    return isNextTokenWhatIsExpected;\n  }\n\n  isInCurrentRuleReSyncSet(\n    this: MixedInParser,\n    tokenTypeIdx: TokenType,\n  ): boolean {\n    const followKey = this.getCurrFollowKey();\n    const currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey);\n    return includes(currentRuleReSyncSet, tokenTypeIdx);\n  }\n\n  findReSyncTokenType(this: MixedInParser): TokenType {\n    const allPossibleReSyncTokTypes = this.flattenFollowSet();\n    // this loop will always terminate as EOF is always in the follow stack and also always (virtually) in the input\n    let nextToken = this.LA(1);\n    let k = 2;\n    while (true) {\n      const foundMatch = find(allPossibleReSyncTokTypes, (resyncTokType) => {\n        const canMatch = tokenMatcher(nextToken, resyncTokType);\n        return canMatch;\n      });\n      if (foundMatch !== undefined) {\n        return foundMatch;\n      }\n      nextToken = this.LA(k);\n      k++;\n    }\n  }\n\n  getCurrFollowKey(this: MixedInParser): IFollowKey {\n    // the length is at least one as we always add the ruleName to the stack before invoking the rule.\n    if (this.RULE_STACK.length === 1) {\n      return EOF_FOLLOW_KEY;\n    }\n    const currRuleShortName = this.getLastExplicitRuleShortName();\n    const currRuleIdx = this.getLastExplicitRuleOccurrenceIndex();\n    const prevRuleShortName = this.getPreviousExplicitRuleShortName();\n\n    return {\n      ruleName: this.shortRuleNameToFullName(currRuleShortName),\n      idxInCallingRule: currRuleIdx,\n      inRule: this.shortRuleNameToFullName(prevRuleShortName),\n    };\n  }\n\n  buildFullFollowKeyStack(this: MixedInParser): IFollowKey[] {\n    const explicitRuleStack = this.RULE_STACK;\n    const explicitOccurrenceStack = this.RULE_OCCURRENCE_STACK;\n\n    return map(explicitRuleStack, (ruleName, idx) => {\n      if (idx === 0) {\n        return EOF_FOLLOW_KEY;\n      }\n      return {\n        ruleName: this.shortRuleNameToFullName(ruleName),\n        idxInCallingRule: explicitOccurrenceStack[idx],\n        inRule: this.shortRuleNameToFullName(explicitRuleStack[idx - 1]),\n      };\n    });\n  }\n\n  flattenFollowSet(this: MixedInParser): TokenType[] {\n    const followStack = map(this.buildFullFollowKeyStack(), (currKey) => {\n      return this.getFollowSetFromFollowKey(currKey);\n    });\n    return flatten(followStack);\n  }\n\n  getFollowSetFromFollowKey(\n    this: MixedInParser,\n    followKey: IFollowKey,\n  ): TokenType[] {\n    if (followKey === EOF_FOLLOW_KEY) {\n      return [EOF];\n    }\n\n    const followName =\n      followKey.ruleName + followKey.idxInCallingRule + IN + followKey.inRule;\n\n    return this.resyncFollows[followName];\n  }\n\n  // It does not make any sense to include a virtual EOF token in the list of resynced tokens\n  // as EOF does not really exist and thus does not contain any useful information (line/column numbers)\n  addToResyncTokens(\n    this: MixedInParser,\n    token: IToken,\n    resyncTokens: IToken[],\n  ): IToken[] {\n    if (!this.tokenMatcher(token, EOF)) {\n      resyncTokens.push(token);\n    }\n    return resyncTokens;\n  }\n\n  reSyncTo(this: MixedInParser, tokType: TokenType): IToken[] {\n    const resyncedTokens: IToken[] = [];\n    let nextTok = this.LA(1);\n    while (this.tokenMatcher(nextTok, tokType) === false) {\n      nextTok = this.SKIP_TOKEN();\n      this.addToResyncTokens(nextTok, resyncedTokens);\n    }\n    // the last token is not part of the error.\n    return dropRight(resyncedTokens);\n  }\n\n  attemptInRepetitionRecovery(\n    this: MixedInParser,\n    prodFunc: Function,\n    args: any[],\n    lookaheadFunc: () => boolean,\n    dslMethodIdx: number,\n    prodOccurrence: number,\n    nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker,\n    notStuck?: boolean,\n  ): void {\n    // by default this is a NO-OP\n    // The actual implementation is with the function(not method) below\n  }\n\n  getCurrentGrammarPath(\n    this: MixedInParser,\n    tokType: TokenType,\n    tokIdxInRule: number,\n  ): ITokenGrammarPath {\n    const pathRuleStack: string[] = this.getHumanReadableRuleStack();\n    const pathOccurrenceStack: number[] = clone(this.RULE_OCCURRENCE_STACK);\n    const grammarPath: any = {\n      ruleStack: pathRuleStack,\n      occurrenceStack: pathOccurrenceStack,\n      lastTok: tokType,\n      lastTokOccurrence: tokIdxInRule,\n    };\n\n    return grammarPath;\n  }\n  getHumanReadableRuleStack(this: MixedInParser): string[] {\n    return map(this.RULE_STACK, (currShortName) =>\n      this.shortRuleNameToFullName(currShortName),\n    );\n  }\n}\n\nexport function attemptInRepetitionRecovery(\n  this: MixedInParser,\n  prodFunc: Function,\n  args: any[],\n  lookaheadFunc: () => boolean,\n  dslMethodIdx: number,\n  prodOccurrence: number,\n  nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker,\n  notStuck?: boolean,\n): void {\n  const key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence);\n  let firstAfterRepInfo = this.firstAfterRepMap[key];\n  if (firstAfterRepInfo === undefined) {\n    const currRuleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[currRuleName];\n    const walker: AbstractNextTerminalAfterProductionWalker =\n      new nextToksWalker(ruleGrammar, prodOccurrence);\n    firstAfterRepInfo = walker.startWalking();\n    this.firstAfterRepMap[key] = firstAfterRepInfo;\n  }\n\n  let expectTokAfterLastMatch = firstAfterRepInfo.token;\n  let nextTokIdx = firstAfterRepInfo.occurrence;\n  const isEndOfRule = firstAfterRepInfo.isEndOfRule;\n\n  // special edge case of a TOP most repetition after which the input should END.\n  // this will force an attempt for inRule recovery in that scenario.\n  if (\n    this.RULE_STACK.length === 1 &&\n    isEndOfRule &&\n    expectTokAfterLastMatch === undefined\n  ) {\n    expectTokAfterLastMatch = EOF;\n    nextTokIdx = 1;\n  }\n\n  // We don't have anything to re-sync to...\n  // this condition was extracted from `shouldInRepetitionRecoveryBeTried` to act as a type-guard\n  if (expectTokAfterLastMatch === undefined || nextTokIdx === undefined) {\n    return;\n  }\n\n  if (\n    this.shouldInRepetitionRecoveryBeTried(\n      expectTokAfterLastMatch,\n      nextTokIdx,\n      notStuck,\n    )\n  ) {\n    // TODO: performance optimization: instead of passing the original args here, we modify\n    // the args param (or create a new one) and make sure the lookahead func is explicitly provided\n    // to avoid searching the cache for it once more.\n    this.tryInRepetitionRecovery(\n      prodFunc,\n      args,\n      lookaheadFunc,\n      expectTokAfterLastMatch,\n    );\n  }\n}\n", "// Lookahead keys are 32Bit integers in the form\n// TTTTTTTT-ZZZZZZZZZZZZ-YYYY-XXXXXXXX\n// XXXX -> Occurrence Index bitmap.\n// YYYY -> DSL Method Type bitmap.\n// ZZZZZZZZZZZZZZZ -> Rule short Index bitmap.\n// TTTTTTTTT -> alternation alternative index bitmap\n\nexport const BITS_FOR_METHOD_TYPE = 4;\nexport const BITS_FOR_OCCURRENCE_IDX = 8;\nexport const BITS_FOR_RULE_IDX = 12;\n// TODO: validation, this means that there may at most 2^8 --> 256 alternatives for an alternation.\nexport const BITS_FOR_ALT_IDX = 8;\n\n// short string used as part of mapping keys.\n// being short improves the performance when composing KEYS for maps out of these\n// The 5 - 8 bits (16 possible values, are reserved for the DSL method indices)\nexport const OR_IDX = 1 << BITS_FOR_OCCURRENCE_IDX;\nexport const OPTION_IDX = 2 << BITS_FOR_OCCURRENCE_IDX;\nexport const MANY_IDX = 3 << BITS_FOR_OCCURRENCE_IDX;\nexport const AT_LEAST_ONE_IDX = 4 << BITS_FOR_OCCURRENCE_IDX;\nexport const MANY_SEP_IDX = 5 << BITS_FOR_OCCURRENCE_IDX;\nexport const AT_LEAST_ONE_SEP_IDX = 6 << BITS_FOR_OCCURRENCE_IDX;\n\n// this actually returns a number, but it is always used as a string (object prop key)\nexport function getKeyForAutomaticLookahead(\n  ruleIdx: number,\n  dslMethodIdx: number,\n  occurrence: number,\n): number {\n  return occurrence | dslMethodIdx | ruleIdx;\n}\n\nconst BITS_START_FOR_ALT_IDX = 32 - BITS_FOR_ALT_IDX;\n", "import {\n  ILookaheadStrategy,\n  ILookaheadValidationError,\n  IOrAlt,\n  OptionalProductionType,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { flatMap, isEmpty } from \"lodash-es\";\nimport { defaultGrammarValidatorErrorProvider } from \"../errors_public.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser/parser.js\";\nimport {\n  validateAmbiguousAlternationAlternatives,\n  validateEmptyOrAlternative,\n  validateNoLeftRecursion,\n  validateSomeNonEmptyLookaheadPath,\n} from \"./checks.js\";\nimport {\n  buildAlternativesLookAheadFunc,\n  buildLookaheadFuncForOptionalProd,\n  buildLookaheadFuncForOr,\n  buildSingleAlternativeLookaheadFunction,\n  getProdType,\n} from \"./lookahead.js\";\nimport { IParserDefinitionError } from \"./types.js\";\n\nexport class LLkLookaheadStrategy implements ILookaheadStrategy {\n  readonly maxLookahead: number;\n\n  constructor(options?: { maxLookahead?: number }) {\n    this.maxLookahead =\n      options?.maxLookahead ?? DEFAULT_PARSER_CONFIG.maxLookahead;\n  }\n\n  validate(options: {\n    rules: Rule[];\n    tokenTypes: TokenType[];\n    grammarName: string;\n  }): ILookaheadValidationError[] {\n    const leftRecursionErrors = this.validateNoLeftRecursion(options.rules);\n\n    if (isEmpty(leftRecursionErrors)) {\n      const emptyAltErrors = this.validateEmptyOrAlternatives(options.rules);\n      const ambiguousAltsErrors = this.validateAmbiguousAlternationAlternatives(\n        options.rules,\n        this.maxLookahead,\n      );\n      const emptyRepetitionErrors = this.validateSomeNonEmptyLookaheadPath(\n        options.rules,\n        this.maxLookahead,\n      );\n      const allErrors = [\n        ...leftRecursionErrors,\n        ...emptyAltErrors,\n        ...ambiguousAltsErrors,\n        ...emptyRepetitionErrors,\n      ];\n      return allErrors;\n    }\n    return leftRecursionErrors;\n  }\n\n  validateNoLeftRecursion(rules: Rule[]): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateNoLeftRecursion(\n        currTopRule,\n        currTopRule,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateEmptyOrAlternatives(rules: Rule[]): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateEmptyOrAlternative(\n        currTopRule,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateAmbiguousAlternationAlternatives(\n    rules: Rule[],\n    maxLookahead: number,\n  ): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateAmbiguousAlternationAlternatives(\n        currTopRule,\n        maxLookahead,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateSomeNonEmptyLookaheadPath(\n    rules: Rule[],\n    maxLookahead: number,\n  ): IParserDefinitionError[] {\n    return validateSomeNonEmptyLookaheadPath(\n      rules,\n      maxLookahead,\n      defaultGrammarValidatorErrorProvider,\n    );\n  }\n\n  buildLookaheadForAlternation(options: {\n    prodOccurrence: number;\n    rule: Rule;\n    maxLookahead: number;\n    hasPredicates: boolean;\n    dynamicTokensEnabled: boolean;\n  }): (orAlts?: IOrAlt[] | undefined) => number | undefined {\n    return buildLookaheadFuncForOr(\n      options.prodOccurrence,\n      options.rule,\n      options.maxLookahead,\n      options.hasPredicates,\n      options.dynamicTokensEnabled,\n      buildAlternativesLookAheadFunc,\n    );\n  }\n\n  buildLookaheadForOptional(options: {\n    prodOccurrence: number;\n    prodType: OptionalProductionType;\n    rule: Rule;\n    maxLookahead: number;\n    dynamicTokensEnabled: boolean;\n  }): () => boolean {\n    return buildLookaheadFuncForOptionalProd(\n      options.prodOccurrence,\n      options.rule,\n      options.maxLookahead,\n      options.dynamicTokensEnabled,\n      getProdType(options.prodType),\n      buildSingleAlternativeLookaheadFunction,\n    );\n  }\n}\n", "import { forEach, has } from \"lodash-es\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\nimport {\n  ILookaheadStrategy,\n  IParserConfig,\n  OptionalProductionType,\n} from \"@chevrotain/types\";\nimport {\n  AT_LEAST_ONE_IDX,\n  AT_LEAST_ONE_SEP_IDX,\n  getKeyForAutomaticLookahead,\n  MANY_IDX,\n  MANY_SEP_IDX,\n  OPTION_IDX,\n  OR_IDX,\n} from \"../../grammar/keys.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  Alternation,\n  GAstVisitor,\n  getProductionDslName,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n} from \"@chevrotain/gast\";\nimport { LLkLookaheadStrategy } from \"../../grammar/llk_lookahead.js\";\n\n/**\n * Trait responsible for the lookahead related utilities and optimizations.\n */\nexport class LooksAhead {\n  maxLookahead: number;\n  lookAheadFuncsCache: any;\n  dynamicTokensEnabled: boolean;\n  lookaheadStrategy: ILookaheadStrategy;\n\n  initLooksAhead(config: IParserConfig) {\n    this.dynamicTokensEnabled = has(config, \"dynamicTokensEnabled\")\n      ? (config.dynamicTokensEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.dynamicTokensEnabled;\n\n    this.maxLookahead = has(config, \"maxLookahead\")\n      ? (config.maxLookahead as number) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.maxLookahead;\n\n    this.lookaheadStrategy = has(config, \"lookaheadStrategy\")\n      ? (config.lookaheadStrategy as ILookaheadStrategy) // assumes end user provides the correct config value/type\n      : new LLkLookaheadStrategy({ maxLookahead: this.maxLookahead });\n\n    this.lookAheadFuncsCache = new Map();\n  }\n\n  preComputeLookaheadFunctions(this: MixedInParser, rules: Rule[]): void {\n    forEach(rules, (currRule) => {\n      this.TRACE_INIT(`${currRule.name} Rule Lookahead`, () => {\n        const {\n          alternation,\n          repetition,\n          option,\n          repetitionMandatory,\n          repetitionMandatoryWithSeparator,\n          repetitionWithSeparator,\n        } = collectMethods(currRule);\n\n        forEach(alternation, (currProd) => {\n          const prodIdx = currProd.idx === 0 ? \"\" : currProd.idx;\n          this.TRACE_INIT(`${getProductionDslName(currProd)}${prodIdx}`, () => {\n            const laFunc = this.lookaheadStrategy.buildLookaheadForAlternation({\n              prodOccurrence: currProd.idx,\n              rule: currRule,\n              maxLookahead: currProd.maxLookahead || this.maxLookahead,\n              hasPredicates: currProd.hasPredicates,\n              dynamicTokensEnabled: this.dynamicTokensEnabled,\n            });\n\n            const key = getKeyForAutomaticLookahead(\n              this.fullRuleNameToShort[currRule.name],\n              OR_IDX,\n              currProd.idx,\n            );\n            this.setLaFuncCache(key, laFunc);\n          });\n        });\n\n        forEach(repetition, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            MANY_IDX,\n            \"Repetition\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(option, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            OPTION_IDX,\n            \"Option\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionMandatory, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            AT_LEAST_ONE_IDX,\n            \"RepetitionMandatory\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionMandatoryWithSeparator, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            AT_LEAST_ONE_SEP_IDX,\n            \"RepetitionMandatoryWithSeparator\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionWithSeparator, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            MANY_SEP_IDX,\n            \"RepetitionWithSeparator\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n      });\n    });\n  }\n\n  computeLookaheadFunc(\n    this: MixedInParser,\n    rule: Rule,\n    prodOccurrence: number,\n    prodKey: number,\n    prodType: OptionalProductionType,\n    prodMaxLookahead: number | undefined,\n    dslMethodName: string,\n  ): void {\n    this.TRACE_INIT(\n      `${dslMethodName}${prodOccurrence === 0 ? \"\" : prodOccurrence}`,\n      () => {\n        const laFunc = this.lookaheadStrategy.buildLookaheadForOptional({\n          prodOccurrence,\n          rule,\n          maxLookahead: prodMaxLookahead || this.maxLookahead,\n          dynamicTokensEnabled: this.dynamicTokensEnabled,\n          prodType,\n        });\n        const key = getKeyForAutomaticLookahead(\n          this.fullRuleNameToShort[rule.name],\n          prodKey,\n          prodOccurrence,\n        );\n        this.setLaFuncCache(key, laFunc);\n      },\n    );\n  }\n\n  // this actually returns a number, but it is always used as a string (object prop key)\n  getKeyForAutomaticLookahead(\n    this: MixedInParser,\n    dslMethodIdx: number,\n    occurrence: number,\n  ): number {\n    const currRuleShortName: any = this.getLastExplicitRuleShortName();\n    return getKeyForAutomaticLookahead(\n      currRuleShortName,\n      dslMethodIdx,\n      occurrence,\n    );\n  }\n\n  getLaFuncFromCache(this: MixedInParser, key: number): Function {\n    return this.lookAheadFuncsCache.get(key);\n  }\n\n  /* istanbul ignore next */\n  setLaFuncCache(this: MixedInParser, key: number, value: Function): void {\n    this.lookAheadFuncsCache.set(key, value);\n  }\n}\n\nclass DslMethodsCollectorVisitor extends GAstVisitor {\n  public dslMethods: {\n    option: Option[];\n    alternation: Alternation[];\n    repetition: Repetition[];\n    repetitionWithSeparator: RepetitionWithSeparator[];\n    repetitionMandatory: RepetitionMandatory[];\n    repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator[];\n  } = {\n    option: [],\n    alternation: [],\n    repetition: [],\n    repetitionWithSeparator: [],\n    repetitionMandatory: [],\n    repetitionMandatoryWithSeparator: [],\n  };\n\n  reset() {\n    this.dslMethods = {\n      option: [],\n      alternation: [],\n      repetition: [],\n      repetitionWithSeparator: [],\n      repetitionMandatory: [],\n      repetitionMandatoryWithSeparator: [],\n    };\n  }\n\n  public visitOption(option: Option): void {\n    this.dslMethods.option.push(option);\n  }\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.dslMethods.repetitionWithSeparator.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.dslMethods.repetitionMandatory.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.dslMethods.repetitionMandatoryWithSeparator.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.dslMethods.repetition.push(many);\n  }\n\n  public visitAlternation(or: Alternation): void {\n    this.dslMethods.alternation.push(or);\n  }\n}\n\nconst collectorVisitor = new DslMethodsCollectorVisitor();\nexport function collectMethods(rule: Rule): {\n  option: Option[];\n  alternation: Alternation[];\n  repetition: Repetition[];\n  repetitionWithSeparator: RepetitionWithSeparator[];\n  repetitionMandatory: RepetitionMandatory[];\n  repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator[];\n} {\n  collectorVisitor.reset();\n  rule.accept(collectorVisitor);\n  const dslMethods = collectorVisitor.dslMethods;\n  // avoid uncleaned references\n  collectorVisitor.reset();\n  return dslMethods;\n}\n", "import { CstNode, CstNodeLocation, IToken } from \"@chevrotain/types\";\n\n/**\n * This nodeLocation tracking is not efficient and should only be used\n * when error recovery is enabled or the Token Vector contains virtual Tokens\n * (e.g, Python Indent/Outdent)\n * As it executes the calculation for every single terminal/nonTerminal\n * and does not rely on the fact the token vector is **sorted**\n */\nexport function setNodeLocationOnlyOffset(\n  currNodeLocation: CstNodeLocation,\n  newLocationInfo: Required>,\n): void {\n  // First (valid) update for this cst node\n  if (isNaN(currNodeLocation.startOffset) === true) {\n    // assumption1: Token location information is either NaN or a valid number\n    // assumption2: Token location information is fully valid if it exist\n    // (both start/end offsets exist and are numbers).\n    currNodeLocation.startOffset = newLocationInfo.startOffset;\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n  }\n  // Once the startOffset has been updated with a valid number it should never receive\n  // any farther updates as the Token vector is sorted.\n  // We still have to check this this condition for every new possible location info\n  // because with error recovery enabled we may encounter invalid tokens (NaN location props)\n  else if (currNodeLocation.endOffset! < newLocationInfo.endOffset === true) {\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n  }\n}\n\n/**\n * This nodeLocation tracking is not efficient and should only be used\n * when error recovery is enabled or the Token Vector contains virtual Tokens\n * (e.g, Python Indent/Outdent)\n * As it executes the calculation for every single terminal/nonTerminal\n * and does not rely on the fact the token vector is **sorted**\n */\nexport function setNodeLocationFull(\n  currNodeLocation: CstNodeLocation,\n  newLocationInfo: CstNodeLocation,\n): void {\n  // First (valid) update for this cst node\n  if (isNaN(currNodeLocation.startOffset) === true) {\n    // assumption1: Token location information is either NaN or a valid number\n    // assumption2: Token location information is fully valid if it exist\n    // (all start/end props exist and are numbers).\n    currNodeLocation.startOffset = newLocationInfo.startOffset;\n    currNodeLocation.startColumn = newLocationInfo.startColumn;\n    currNodeLocation.startLine = newLocationInfo.startLine;\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n    currNodeLocation.endColumn = newLocationInfo.endColumn;\n    currNodeLocation.endLine = newLocationInfo.endLine;\n  }\n  // Once the start props has been updated with a valid number it should never receive\n  // any farther updates as the Token vector is sorted.\n  // We still have to check this this condition for every new possible location info\n  // because with error recovery enabled we may encounter invalid tokens (NaN location props)\n  else if (currNodeLocation.endOffset! < newLocationInfo.endOffset! === true) {\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n    currNodeLocation.endColumn = newLocationInfo.endColumn;\n    currNodeLocation.endLine = newLocationInfo.endLine;\n  }\n}\n\nexport function addTerminalToCst(\n  node: CstNode,\n  token: IToken,\n  tokenTypeName: string,\n): void {\n  if (node.children[tokenTypeName] === undefined) {\n    node.children[tokenTypeName] = [token];\n  } else {\n    node.children[tokenTypeName].push(token);\n  }\n}\n\nexport function addNoneTerminalToCst(\n  node: CstNode,\n  ruleName: string,\n  ruleResult: any,\n): void {\n  if (node.children[ruleName] === undefined) {\n    node.children[ruleName] = [ruleResult];\n  } else {\n    node.children[ruleName].push(ruleResult);\n  }\n}\n", "const NAME = \"name\";\n\nexport function defineNameProp(obj: {}, nameValue: string): void {\n  Object.defineProperty(obj, NAME, {\n    enumerable: false,\n    configurable: true,\n    writable: false,\n    value: nameValue,\n  });\n}\n", "import {\n  compact,\n  filter,\n  forEach,\n  isArray,\n  isEmpty,\n  isFunction,\n  isUndefined,\n  keys,\n  map,\n} from \"lodash-es\";\nimport { defineNameProp } from \"../../lang/lang_extensions.js\";\nimport { CstNode, ICstVisitor } from \"@chevrotain/types\";\n\nexport function defaultVisit(ctx: any, param: IN): void {\n  const childrenNames = keys(ctx);\n  const childrenNamesLength = childrenNames.length;\n  for (let i = 0; i < childrenNamesLength; i++) {\n    const currChildName = childrenNames[i];\n    const currChildArray = ctx[currChildName];\n    const currChildArrayLength = currChildArray.length;\n    for (let j = 0; j < currChildArrayLength; j++) {\n      const currChild: any = currChildArray[j];\n      // distinction between Tokens Children and CstNode children\n      if (currChild.tokenTypeIdx === undefined) {\n        this[currChild.name](currChild.children, param);\n      }\n    }\n  }\n  // defaultVisit does not support generic out param\n}\n\nexport function createBaseSemanticVisitorConstructor(\n  grammarName: string,\n  ruleNames: string[],\n): {\n  new (...args: any[]): ICstVisitor;\n} {\n  const derivedConstructor: any = function () {};\n\n  // can be overwritten according to:\n  // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/\n  // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname\n  defineNameProp(derivedConstructor, grammarName + \"BaseSemantics\");\n\n  const semanticProto = {\n    visit: function (cstNode: CstNode | CstNode[], param: any) {\n      // enables writing more concise visitor methods when CstNode has only a single child\n      if (isArray(cstNode)) {\n        // A CST Node's children dictionary can never have empty arrays as values\n        // If a key is defined there will be at least one element in the corresponding value array.\n        cstNode = cstNode[0];\n      }\n\n      // enables passing optional CstNodes concisely.\n      if (isUndefined(cstNode)) {\n        return undefined;\n      }\n\n      return this[cstNode.name](cstNode.children, param);\n    },\n\n    validateVisitor: function () {\n      const semanticDefinitionErrors = validateVisitor(this, ruleNames);\n      if (!isEmpty(semanticDefinitionErrors)) {\n        const errorMessages = map(\n          semanticDefinitionErrors,\n          (currDefError) => currDefError.msg,\n        );\n        throw Error(\n          `Errors Detected in CST Visitor <${this.constructor.name}>:\\n\\t` +\n            `${errorMessages.join(\"\\n\\n\").replace(/\\n/g, \"\\n\\t\")}`,\n        );\n      }\n    },\n  };\n\n  derivedConstructor.prototype = semanticProto;\n  derivedConstructor.prototype.constructor = derivedConstructor;\n\n  derivedConstructor._RULE_NAMES = ruleNames;\n\n  return derivedConstructor;\n}\n\nexport function createBaseVisitorConstructorWithDefaults(\n  grammarName: string,\n  ruleNames: string[],\n  baseConstructor: Function,\n): {\n  new (...args: any[]): ICstVisitor;\n} {\n  const derivedConstructor: any = function () {};\n\n  // can be overwritten according to:\n  // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/\n  // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname\n  defineNameProp(derivedConstructor, grammarName + \"BaseSemanticsWithDefaults\");\n\n  const withDefaultsProto = Object.create(baseConstructor.prototype);\n  forEach(ruleNames, (ruleName) => {\n    withDefaultsProto[ruleName] = defaultVisit;\n  });\n\n  derivedConstructor.prototype = withDefaultsProto;\n  derivedConstructor.prototype.constructor = derivedConstructor;\n\n  return derivedConstructor;\n}\n\nexport enum CstVisitorDefinitionError {\n  REDUNDANT_METHOD,\n  MISSING_METHOD,\n}\n\nexport interface IVisitorDefinitionError {\n  msg: string;\n  type: CstVisitorDefinitionError;\n  methodName: string;\n}\n\nexport function validateVisitor(\n  visitorInstance: ICstVisitor,\n  ruleNames: string[],\n): IVisitorDefinitionError[] {\n  const missingErrors = validateMissingCstMethods(visitorInstance, ruleNames);\n\n  return missingErrors;\n}\n\nexport function validateMissingCstMethods(\n  visitorInstance: ICstVisitor,\n  ruleNames: string[],\n): IVisitorDefinitionError[] {\n  const missingRuleNames = filter(ruleNames, (currRuleName) => {\n    return isFunction((visitorInstance as any)[currRuleName]) === false;\n  });\n\n  const errors: IVisitorDefinitionError[] = map(\n    missingRuleNames,\n    (currRuleName) => {\n      return {\n        msg: `Missing visitor method: <${currRuleName}> on ${(\n          visitorInstance.constructor.name\n        )} CST Visitor.`,\n        type: CstVisitorDefinitionError.MISSING_METHOD,\n        methodName: currRuleName,\n      };\n    },\n  );\n\n  return compact(errors);\n}\n", "import {\n  addNoneTerminalToCst,\n  addTerminalToCst,\n  setNodeLocationFull,\n  setNodeLocationOnlyOffset,\n} from \"../../cst/cst.js\";\nimport { has, isUndefined, keys, noop } from \"lodash-es\";\nimport {\n  createBaseSemanticVisitorConstructor,\n  createBaseVisitorConstructorWithDefaults,\n} from \"../../cst/cst_visitor.js\";\nimport {\n  CstNode,\n  CstNodeLocation,\n  ICstVisitor,\n  IParserConfig,\n  IToken,\n  nodeLocationTrackingOptions,\n} from \"@chevrotain/types\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * This trait is responsible for the CST building logic.\n */\nexport class TreeBuilder {\n  outputCst: boolean;\n  CST_STACK: CstNode[];\n  baseCstVisitorConstructor: Function;\n  baseCstVisitorWithDefaultsConstructor: Function;\n\n  // dynamically assigned Methods\n  setNodeLocationFromNode: (\n    nodeLocation: CstNodeLocation,\n    locationInformation: CstNodeLocation,\n  ) => void;\n  setNodeLocationFromToken: (\n    nodeLocation: CstNodeLocation,\n    locationInformation: CstNodeLocation,\n  ) => void;\n  cstPostRule: (this: MixedInParser, ruleCstNode: CstNode) => void;\n\n  setInitialNodeLocation: (cstNode: CstNode) => void;\n  nodeLocationTracking: nodeLocationTrackingOptions;\n\n  initTreeBuilder(this: MixedInParser, config: IParserConfig) {\n    this.CST_STACK = [];\n\n    // outputCst is no longer exposed/defined in the pubic API\n    this.outputCst = (config as any).outputCst;\n\n    this.nodeLocationTracking = has(config, \"nodeLocationTracking\")\n      ? (config.nodeLocationTracking as nodeLocationTrackingOptions) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.nodeLocationTracking;\n\n    if (!this.outputCst) {\n      this.cstInvocationStateUpdate = noop;\n      this.cstFinallyStateUpdate = noop;\n      this.cstPostTerminal = noop;\n      this.cstPostNonTerminal = noop;\n      this.cstPostRule = noop;\n    } else {\n      if (/full/i.test(this.nodeLocationTracking)) {\n        if (this.recoveryEnabled) {\n          this.setNodeLocationFromToken = setNodeLocationFull;\n          this.setNodeLocationFromNode = setNodeLocationFull;\n          this.cstPostRule = noop;\n          this.setInitialNodeLocation = this.setInitialNodeLocationFullRecovery;\n        } else {\n          this.setNodeLocationFromToken = noop;\n          this.setNodeLocationFromNode = noop;\n          this.cstPostRule = this.cstPostRuleFull;\n          this.setInitialNodeLocation = this.setInitialNodeLocationFullRegular;\n        }\n      } else if (/onlyOffset/i.test(this.nodeLocationTracking)) {\n        if (this.recoveryEnabled) {\n          this.setNodeLocationFromToken = setNodeLocationOnlyOffset;\n          this.setNodeLocationFromNode = setNodeLocationOnlyOffset;\n          this.cstPostRule = noop;\n          this.setInitialNodeLocation =\n            this.setInitialNodeLocationOnlyOffsetRecovery;\n        } else {\n          this.setNodeLocationFromToken = noop;\n          this.setNodeLocationFromNode = noop;\n          this.cstPostRule = this.cstPostRuleOnlyOffset;\n          this.setInitialNodeLocation =\n            this.setInitialNodeLocationOnlyOffsetRegular;\n        }\n      } else if (/none/i.test(this.nodeLocationTracking)) {\n        this.setNodeLocationFromToken = noop;\n        this.setNodeLocationFromNode = noop;\n        this.cstPostRule = noop;\n        this.setInitialNodeLocation = noop;\n      } else {\n        throw Error(\n          `Invalid  config option: \"${config.nodeLocationTracking}\"`,\n        );\n      }\n    }\n  }\n\n  setInitialNodeLocationOnlyOffsetRecovery(\n    this: MixedInParser,\n    cstNode: any,\n  ): void {\n    cstNode.location = {\n      startOffset: NaN,\n      endOffset: NaN,\n    };\n  }\n\n  setInitialNodeLocationOnlyOffsetRegular(\n    this: MixedInParser,\n    cstNode: any,\n  ): void {\n    cstNode.location = {\n      // without error recovery the starting Location of a new CstNode is guaranteed\n      // To be the next Token's startOffset (for valid inputs).\n      // For invalid inputs there won't be any CSTOutput so this potential\n      // inaccuracy does not matter\n      startOffset: this.LA(1).startOffset,\n      endOffset: NaN,\n    };\n  }\n\n  setInitialNodeLocationFullRecovery(this: MixedInParser, cstNode: any): void {\n    cstNode.location = {\n      startOffset: NaN,\n      startLine: NaN,\n      startColumn: NaN,\n      endOffset: NaN,\n      endLine: NaN,\n      endColumn: NaN,\n    };\n  }\n\n  /**\n     *  @see setInitialNodeLocationOnlyOffsetRegular for explanation why this work\n\n     * @param cstNode\n     */\n  setInitialNodeLocationFullRegular(this: MixedInParser, cstNode: any): void {\n    const nextToken = this.LA(1);\n    cstNode.location = {\n      startOffset: nextToken.startOffset,\n      startLine: nextToken.startLine,\n      startColumn: nextToken.startColumn,\n      endOffset: NaN,\n      endLine: NaN,\n      endColumn: NaN,\n    };\n  }\n\n  cstInvocationStateUpdate(this: MixedInParser, fullRuleName: string): void {\n    const cstNode: CstNode = {\n      name: fullRuleName,\n      children: Object.create(null),\n    };\n\n    this.setInitialNodeLocation(cstNode);\n    this.CST_STACK.push(cstNode);\n  }\n\n  cstFinallyStateUpdate(this: MixedInParser): void {\n    this.CST_STACK.pop();\n  }\n\n  cstPostRuleFull(this: MixedInParser, ruleCstNode: CstNode): void {\n    // casts to `required` are safe because `cstPostRuleFull` should only be invoked when full location is enabled\n    const prevToken = this.LA(0) as Required;\n    const loc = ruleCstNode.location as Required;\n\n    // If this condition is true it means we consumed at least one Token\n    // In this CstNode.\n    if (loc.startOffset <= prevToken.startOffset === true) {\n      loc.endOffset = prevToken.endOffset;\n      loc.endLine = prevToken.endLine;\n      loc.endColumn = prevToken.endColumn;\n    }\n    // \"empty\" CstNode edge case\n    else {\n      loc.startOffset = NaN;\n      loc.startLine = NaN;\n      loc.startColumn = NaN;\n    }\n  }\n\n  cstPostRuleOnlyOffset(this: MixedInParser, ruleCstNode: CstNode): void {\n    const prevToken = this.LA(0);\n    // `location' is not null because `cstPostRuleOnlyOffset` will only be invoked when location tracking is enabled.\n    const loc = ruleCstNode.location!;\n\n    // If this condition is true it means we consumed at least one Token\n    // In this CstNode.\n    if (loc.startOffset <= prevToken.startOffset === true) {\n      loc.endOffset = prevToken.endOffset;\n    }\n    // \"empty\" CstNode edge case\n    else {\n      loc.startOffset = NaN;\n    }\n  }\n\n  cstPostTerminal(\n    this: MixedInParser,\n    key: string,\n    consumedToken: IToken,\n  ): void {\n    const rootCst = this.CST_STACK[this.CST_STACK.length - 1];\n    addTerminalToCst(rootCst, consumedToken, key);\n    // This is only used when **both** error recovery and CST Output are enabled.\n    this.setNodeLocationFromToken(rootCst.location!, consumedToken);\n  }\n\n  cstPostNonTerminal(\n    this: MixedInParser,\n    ruleCstResult: CstNode,\n    ruleName: string,\n  ): void {\n    const preCstNode = this.CST_STACK[this.CST_STACK.length - 1];\n    addNoneTerminalToCst(preCstNode, ruleName, ruleCstResult);\n    // This is only used when **both** error recovery and CST Output are enabled.\n    this.setNodeLocationFromNode(preCstNode.location!, ruleCstResult.location!);\n  }\n\n  getBaseCstVisitorConstructor(\n    this: MixedInParser,\n  ): {\n    new (...args: any[]): ICstVisitor;\n  } {\n    if (isUndefined(this.baseCstVisitorConstructor)) {\n      const newBaseCstVisitorConstructor = createBaseSemanticVisitorConstructor(\n        this.className,\n        keys(this.gastProductionsCache),\n      );\n      this.baseCstVisitorConstructor = newBaseCstVisitorConstructor;\n      return newBaseCstVisitorConstructor;\n    }\n\n    return this.baseCstVisitorConstructor;\n  }\n\n  getBaseCstVisitorConstructorWithDefaults(\n    this: MixedInParser,\n  ): {\n    new (...args: any[]): ICstVisitor;\n  } {\n    if (isUndefined(this.baseCstVisitorWithDefaultsConstructor)) {\n      const newConstructor = createBaseVisitorConstructorWithDefaults(\n        this.className,\n        keys(this.gastProductionsCache),\n        this.getBaseCstVisitorConstructor(),\n      );\n      this.baseCstVisitorWithDefaultsConstructor = newConstructor;\n      return newConstructor;\n    }\n\n    return this.baseCstVisitorWithDefaultsConstructor;\n  }\n\n  getLastExplicitRuleShortName(this: MixedInParser): number {\n    const ruleStack = this.RULE_STACK;\n    return ruleStack[ruleStack.length - 1];\n  }\n\n  getPreviousExplicitRuleShortName(this: MixedInParser): number {\n    const ruleStack = this.RULE_STACK;\n    return ruleStack[ruleStack.length - 2];\n  }\n\n  getLastExplicitRuleOccurrenceIndex(this: MixedInParser): number {\n    const occurrenceStack = this.RULE_OCCURRENCE_STACK;\n    return occurrenceStack[occurrenceStack.length - 1];\n  }\n}\n", "import { END_OF_FILE } from \"../parser.js\";\nimport { IToken } from \"@chevrotain/types\";\nimport { MixedInParser } from \"./parser_traits.js\";\n\n/**\n * Trait responsible abstracting over the interaction with Lexer output (Token vector).\n *\n * This could be generalized to support other kinds of lexers, e.g.\n * - Just in Time Lexing / Lexer-Less parsing.\n * - Streaming Lexer.\n */\nexport class LexerAdapter {\n  tokVector: IToken[];\n  tokVectorLength: number;\n  currIdx: number;\n\n  initLexerAdapter() {\n    this.tokVector = [];\n    this.tokVectorLength = 0;\n    this.currIdx = -1;\n  }\n\n  set input(newInput: IToken[]) {\n    // @ts-ignore - `this parameter` not supported in setters/getters\n    //   - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters\n    if (this.selfAnalysisDone !== true) {\n      throw Error(\n        `Missing  invocation at the end of the Parser's constructor.`,\n      );\n    }\n    // @ts-ignore - `this parameter` not supported in setters/getters\n    //   - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters\n    this.reset();\n    this.tokVector = newInput;\n    this.tokVectorLength = newInput.length;\n  }\n\n  get input(): IToken[] {\n    return this.tokVector;\n  }\n\n  // skips a token and returns the next token\n  SKIP_TOKEN(this: MixedInParser): IToken {\n    if (this.currIdx <= this.tokVector.length - 2) {\n      this.consumeToken();\n      return this.LA(1);\n    } else {\n      return END_OF_FILE;\n    }\n  }\n\n  // Lexer (accessing Token vector) related methods which can be overridden to implement lazy lexers\n  // or lexers dependent on parser context.\n  LA(this: MixedInParser, howMuch: number): IToken {\n    const soughtIdx = this.currIdx + howMuch;\n    if (soughtIdx < 0 || this.tokVectorLength <= soughtIdx) {\n      return END_OF_FILE;\n    } else {\n      return this.tokVector[soughtIdx];\n    }\n  }\n\n  consumeToken(this: MixedInParser) {\n    this.currIdx++;\n  }\n\n  exportLexerState(this: MixedInParser): number {\n    return this.currIdx;\n  }\n\n  importLexerState(this: MixedInParser, newState: number) {\n    this.currIdx = newState;\n  }\n\n  resetLexerState(this: MixedInParser): void {\n    this.currIdx = -1;\n  }\n\n  moveToTerminatedState(this: MixedInParser): void {\n    this.currIdx = this.tokVector.length - 1;\n  }\n\n  getLexerPosition(this: MixedInParser): number {\n    return this.exportLexerState();\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IRuleConfig,\n  ISerializedGast,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  SubruleMethodOpts,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { includes, values } from \"lodash-es\";\nimport { isRecognitionException } from \"../../exceptions_public.js\";\nimport { DEFAULT_RULE_CONFIG, ParserDefinitionErrorType } from \"../parser.js\";\nimport { defaultGrammarValidatorErrorProvider } from \"../../errors_public.js\";\nimport { validateRuleIsOverridden } from \"../../grammar/checks.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { Rule, serializeGrammar } from \"@chevrotain/gast\";\nimport { IParserDefinitionError } from \"../../grammar/types.js\";\nimport { ParserMethodInternal } from \"../types.js\";\n\n/**\n * This trait is responsible for implementing the public API\n * for defining Chevrotain parsers, i.e:\n * - CONSUME\n * - RULE\n * - OPTION\n * - ...\n */\nexport class RecognizerApi {\n  ACTION(this: MixedInParser, impl: () => T): T {\n    return impl.call(this);\n  }\n\n  consume(\n    this: MixedInParser,\n    idx: number,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, idx, options);\n  }\n\n  subrule(\n    this: MixedInParser,\n    idx: number,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, idx, options);\n  }\n\n  option(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, idx);\n  }\n\n  or(\n    this: MixedInParser,\n    idx: number,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): any {\n    return this.orInternal(altsOrOpts, idx);\n  }\n\n  many(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    return this.manyInternal(idx, actionORMethodDef);\n  }\n\n  atLeastOne(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    return this.atLeastOneInternal(idx, actionORMethodDef);\n  }\n\n  CONSUME(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 0, options);\n  }\n\n  CONSUME1(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 1, options);\n  }\n\n  CONSUME2(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 2, options);\n  }\n\n  CONSUME3(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 3, options);\n  }\n\n  CONSUME4(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 4, options);\n  }\n\n  CONSUME5(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 5, options);\n  }\n\n  CONSUME6(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 6, options);\n  }\n\n  CONSUME7(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 7, options);\n  }\n\n  CONSUME8(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 8, options);\n  }\n\n  CONSUME9(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 9, options);\n  }\n\n  SUBRULE(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 0, options);\n  }\n\n  SUBRULE1(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 1, options);\n  }\n\n  SUBRULE2(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 2, options);\n  }\n\n  SUBRULE3(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 3, options);\n  }\n\n  SUBRULE4(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 4, options);\n  }\n\n  SUBRULE5(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 5, options);\n  }\n\n  SUBRULE6(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 6, options);\n  }\n\n  SUBRULE7(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 7, options);\n  }\n\n  SUBRULE8(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 8, options);\n  }\n\n  SUBRULE9(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 9, options);\n  }\n\n  OPTION(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 0);\n  }\n\n  OPTION1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 1);\n  }\n\n  OPTION2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 2);\n  }\n\n  OPTION3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 3);\n  }\n\n  OPTION4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 4);\n  }\n\n  OPTION5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 5);\n  }\n\n  OPTION6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 6);\n  }\n\n  OPTION7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 7);\n  }\n\n  OPTION8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 8);\n  }\n\n  OPTION9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 9);\n  }\n\n  OR(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 0);\n  }\n\n  OR1(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 1);\n  }\n\n  OR2(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 2);\n  }\n\n  OR3(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 3);\n  }\n\n  OR4(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 4);\n  }\n\n  OR5(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 5);\n  }\n\n  OR6(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 6);\n  }\n\n  OR7(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 7);\n  }\n\n  OR8(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 8);\n  }\n\n  OR9(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 9);\n  }\n\n  MANY(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(0, actionORMethodDef);\n  }\n\n  MANY1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(1, actionORMethodDef);\n  }\n\n  MANY2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(2, actionORMethodDef);\n  }\n\n  MANY3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(3, actionORMethodDef);\n  }\n\n  MANY4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(4, actionORMethodDef);\n  }\n\n  MANY5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(5, actionORMethodDef);\n  }\n\n  MANY6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(6, actionORMethodDef);\n  }\n\n  MANY7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(7, actionORMethodDef);\n  }\n\n  MANY8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(8, actionORMethodDef);\n  }\n\n  MANY9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(9, actionORMethodDef);\n  }\n\n  MANY_SEP(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(0, options);\n  }\n\n  MANY_SEP1(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(1, options);\n  }\n\n  MANY_SEP2(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(2, options);\n  }\n\n  MANY_SEP3(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(3, options);\n  }\n\n  MANY_SEP4(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(4, options);\n  }\n\n  MANY_SEP5(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(5, options);\n  }\n\n  MANY_SEP6(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(6, options);\n  }\n\n  MANY_SEP7(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(7, options);\n  }\n\n  MANY_SEP8(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(8, options);\n  }\n\n  MANY_SEP9(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(9, options);\n  }\n\n  AT_LEAST_ONE(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(0, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    return this.atLeastOneInternal(1, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(2, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(3, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(4, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(5, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(6, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(7, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(8, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(9, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE_SEP(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(0, options);\n  }\n\n  AT_LEAST_ONE_SEP1(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(1, options);\n  }\n\n  AT_LEAST_ONE_SEP2(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(2, options);\n  }\n\n  AT_LEAST_ONE_SEP3(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(3, options);\n  }\n\n  AT_LEAST_ONE_SEP4(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(4, options);\n  }\n\n  AT_LEAST_ONE_SEP5(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(5, options);\n  }\n\n  AT_LEAST_ONE_SEP6(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(6, options);\n  }\n\n  AT_LEAST_ONE_SEP7(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(7, options);\n  }\n\n  AT_LEAST_ONE_SEP8(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(8, options);\n  }\n\n  AT_LEAST_ONE_SEP9(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(9, options);\n  }\n\n  RULE(\n    this: MixedInParser,\n    name: string,\n    implementation: (...implArgs: any[]) => T,\n    config: IRuleConfig = DEFAULT_RULE_CONFIG,\n  ): (idxInCallingRule?: number, ...args: any[]) => T | any {\n    if (includes(this.definedRulesNames, name)) {\n      const errMsg =\n        defaultGrammarValidatorErrorProvider.buildDuplicateRuleNameError({\n          topLevelRule: name,\n          grammarName: this.className,\n        });\n\n      const error = {\n        message: errMsg,\n        type: ParserDefinitionErrorType.DUPLICATE_RULE_NAME,\n        ruleName: name,\n      };\n      this.definitionErrors.push(error);\n    }\n\n    this.definedRulesNames.push(name);\n\n    const ruleImplementation = this.defineRule(name, implementation, config);\n    (this as any)[name] = ruleImplementation;\n    return ruleImplementation;\n  }\n\n  OVERRIDE_RULE(\n    this: MixedInParser,\n    name: string,\n    impl: (...implArgs: any[]) => T,\n    config: IRuleConfig = DEFAULT_RULE_CONFIG,\n  ): (idxInCallingRule?: number, ...args: any[]) => T {\n    const ruleErrors: IParserDefinitionError[] = validateRuleIsOverridden(\n      name,\n      this.definedRulesNames,\n      this.className,\n    );\n    this.definitionErrors = this.definitionErrors.concat(ruleErrors);\n\n    const ruleImplementation = this.defineRule(name, impl, config);\n    (this as any)[name] = ruleImplementation;\n    return ruleImplementation;\n  }\n\n  BACKTRACK(\n    this: MixedInParser,\n    grammarRule: (...args: any[]) => T,\n    args?: any[],\n  ): () => boolean {\n    return function () {\n      // save org state\n      this.isBackTrackingStack.push(1);\n      const orgState = this.saveRecogState();\n      try {\n        grammarRule.apply(this, args);\n        // if no exception was thrown we have succeed parsing the rule.\n        return true;\n      } catch (e) {\n        if (isRecognitionException(e)) {\n          return false;\n        } else {\n          throw e;\n        }\n      } finally {\n        this.reloadRecogState(orgState);\n        this.isBackTrackingStack.pop();\n      }\n    };\n  }\n\n  // GAST export APIs\n  public getGAstProductions(this: MixedInParser): Record {\n    return this.gastProductionsCache;\n  }\n\n  public getSerializedGastProductions(this: MixedInParser): ISerializedGast[] {\n    return serializeGrammar(values(this.gastProductionsCache));\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IParserConfig,\n  IRuleConfig,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  ParserMethod,\n  SubruleMethodOpts,\n  TokenType,\n  TokenTypeDictionary,\n  TokenVocabulary,\n} from \"@chevrotain/types\";\nimport {\n  clone,\n  every,\n  flatten,\n  has,\n  isArray,\n  isEmpty,\n  isObject,\n  reduce,\n  uniq,\n  values,\n} from \"lodash-es\";\nimport {\n  AT_LEAST_ONE_IDX,\n  AT_LEAST_ONE_SEP_IDX,\n  BITS_FOR_METHOD_TYPE,\n  BITS_FOR_OCCURRENCE_IDX,\n  MANY_IDX,\n  MANY_SEP_IDX,\n  OPTION_IDX,\n  OR_IDX,\n} from \"../../grammar/keys.js\";\nimport {\n  isRecognitionException,\n  MismatchedTokenException,\n  NotAllInputParsedException,\n} from \"../../exceptions_public.js\";\nimport { PROD_TYPE } from \"../../grammar/lookahead.js\";\nimport {\n  AbstractNextTerminalAfterProductionWalker,\n  NextTerminalAfterAtLeastOneSepWalker,\n  NextTerminalAfterAtLeastOneWalker,\n  NextTerminalAfterManySepWalker,\n  NextTerminalAfterManyWalker,\n} from \"../../grammar/interpreter.js\";\nimport { DEFAULT_RULE_CONFIG, IParserState, TokenMatcher } from \"../parser.js\";\nimport { IN_RULE_RECOVERY_EXCEPTION } from \"./recoverable.js\";\nimport { EOF } from \"../../../scan/tokens_public.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  augmentTokenTypes,\n  isTokenType,\n  tokenStructuredMatcher,\n  tokenStructuredMatcherNoCategories,\n} from \"../../../scan/tokens.js\";\nimport { Rule } from \"@chevrotain/gast\";\nimport { ParserMethodInternal } from \"../types.js\";\n\n/**\n * This trait is responsible for the runtime parsing engine\n * Used by the official API (recognizer_api.ts)\n */\nexport class RecognizerEngine {\n  isBackTrackingStack: boolean[];\n  className: string;\n  RULE_STACK: number[];\n  RULE_OCCURRENCE_STACK: number[];\n  definedRulesNames: string[];\n  tokensMap: { [fqn: string]: TokenType };\n  gastProductionsCache: Record;\n  shortRuleNameToFull: Record;\n  fullRuleNameToShort: Record;\n  // The shortName Index must be coded \"after\" the first 8bits to enable building unique lookahead keys\n  ruleShortNameIdx: number;\n  tokenMatcher: TokenMatcher;\n  subruleIdx: number;\n\n  initRecognizerEngine(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfig,\n  ) {\n    this.className = this.constructor.name;\n    // TODO: would using an ES6 Map or plain object be faster (CST building scenario)\n    this.shortRuleNameToFull = {};\n    this.fullRuleNameToShort = {};\n    this.ruleShortNameIdx = 256;\n    this.tokenMatcher = tokenStructuredMatcherNoCategories;\n    this.subruleIdx = 0;\n\n    this.definedRulesNames = [];\n    this.tokensMap = {};\n    this.isBackTrackingStack = [];\n    this.RULE_STACK = [];\n    this.RULE_OCCURRENCE_STACK = [];\n    this.gastProductionsCache = {};\n\n    if (has(config, \"serializedGrammar\")) {\n      throw Error(\n        \"The Parser's configuration can no longer contain a  property.\\n\" +\n          \"\\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_6-0-0\\n\" +\n          \"\\tFor Further details.\",\n      );\n    }\n\n    if (isArray(tokenVocabulary)) {\n      // This only checks for Token vocabularies provided as arrays.\n      // That is good enough because the main objective is to detect users of pre-V4.0 APIs\n      // rather than all edge cases of empty Token vocabularies.\n      if (isEmpty(tokenVocabulary as any[])) {\n        throw Error(\n          \"A Token Vocabulary cannot be empty.\\n\" +\n            \"\\tNote that the first argument for the parser constructor\\n\" +\n            \"\\tis no longer a Token vector (since v4.0).\",\n        );\n      }\n\n      if (typeof (tokenVocabulary as any[])[0].startOffset === \"number\") {\n        throw Error(\n          \"The Parser constructor no longer accepts a token vector as the first argument.\\n\" +\n            \"\\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_4-0-0\\n\" +\n            \"\\tFor Further details.\",\n        );\n      }\n    }\n\n    if (isArray(tokenVocabulary)) {\n      this.tokensMap = reduce(\n        tokenVocabulary,\n        (acc, tokType: TokenType) => {\n          acc[tokType.name] = tokType;\n          return acc;\n        },\n        {} as { [tokenName: string]: TokenType },\n      );\n    } else if (\n      has(tokenVocabulary, \"modes\") &&\n      every(flatten(values((tokenVocabulary).modes)), isTokenType)\n    ) {\n      const allTokenTypes = flatten(values((tokenVocabulary).modes));\n      const uniqueTokens = uniq(allTokenTypes);\n      this.tokensMap = reduce(\n        uniqueTokens,\n        (acc, tokType: TokenType) => {\n          acc[tokType.name] = tokType;\n          return acc;\n        },\n        {} as { [tokenName: string]: TokenType },\n      );\n    } else if (isObject(tokenVocabulary)) {\n      this.tokensMap = clone(tokenVocabulary as TokenTypeDictionary);\n    } else {\n      throw new Error(\n        \" argument must be An Array of Token constructors,\" +\n          \" A dictionary of Token constructors or an IMultiModeLexerDefinition\",\n      );\n    }\n\n    // always add EOF to the tokenNames -> constructors map. it is useful to assure all the input has been\n    // parsed with a clear error message (\"expecting EOF but found ...\")\n    this.tokensMap[\"EOF\"] = EOF;\n\n    const allTokenTypes = has(tokenVocabulary, \"modes\")\n      ? flatten(values((tokenVocabulary).modes))\n      : values(tokenVocabulary);\n    const noTokenCategoriesUsed = every(allTokenTypes, (tokenConstructor) =>\n      isEmpty(tokenConstructor.categoryMatches),\n    );\n\n    this.tokenMatcher = noTokenCategoriesUsed\n      ? tokenStructuredMatcherNoCategories\n      : tokenStructuredMatcher;\n\n    // Because ES2015+ syntax should be supported for creating Token classes\n    // We cannot assume that the Token classes were created using the \"extendToken\" utilities\n    // Therefore we must augment the Token classes both on Lexer initialization and on Parser initialization\n    augmentTokenTypes(values(this.tokensMap));\n  }\n\n  defineRule(\n    this: MixedInParser,\n    ruleName: string,\n    impl: (...args: ARGS) => R,\n    config: IRuleConfig,\n  ): ParserMethodInternal {\n    if (this.selfAnalysisDone) {\n      throw Error(\n        `Grammar rule <${ruleName}> may not be defined after the 'performSelfAnalysis' method has been called'\\n` +\n          `Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.`,\n      );\n    }\n    const resyncEnabled: boolean = has(config, \"resyncEnabled\")\n      ? (config.resyncEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_RULE_CONFIG.resyncEnabled;\n    const recoveryValueFunc = has(config, \"recoveryValueFunc\")\n      ? (config.recoveryValueFunc as () => R) // assumes end user provides the correct config value/type\n      : DEFAULT_RULE_CONFIG.recoveryValueFunc;\n\n    // performance optimization: Use small integers as keys for the longer human readable \"full\" rule names.\n    // this greatly improves Map access time (as much as 8% for some performance benchmarks).\n    const shortName =\n      this.ruleShortNameIdx << (BITS_FOR_METHOD_TYPE + BITS_FOR_OCCURRENCE_IDX);\n\n    this.ruleShortNameIdx++;\n    this.shortRuleNameToFull[shortName] = ruleName;\n    this.fullRuleNameToShort[ruleName] = shortName;\n\n    let invokeRuleWithTry: ParserMethod;\n\n    // Micro optimization, only check the condition **once** on rule definition\n    // instead of **every single** rule invocation.\n    if (this.outputCst === true) {\n      invokeRuleWithTry = function invokeRuleWithTry(\n        this: MixedInParser,\n        ...args: ARGS\n      ): R {\n        try {\n          this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);\n          impl.apply(this, args);\n          const cst = this.CST_STACK[this.CST_STACK.length - 1];\n          this.cstPostRule(cst);\n          return cst as unknown as R;\n        } catch (e) {\n          return this.invokeRuleCatch(e, resyncEnabled, recoveryValueFunc) as R;\n        } finally {\n          this.ruleFinallyStateUpdate();\n        }\n      };\n    } else {\n      invokeRuleWithTry = function invokeRuleWithTryCst(\n        this: MixedInParser,\n        ...args: ARGS\n      ): R {\n        try {\n          this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);\n          return impl.apply(this, args);\n        } catch (e) {\n          return this.invokeRuleCatch(e, resyncEnabled, recoveryValueFunc) as R;\n        } finally {\n          this.ruleFinallyStateUpdate();\n        }\n      };\n    }\n\n    const wrappedGrammarRule: ParserMethodInternal = Object.assign(\n      invokeRuleWithTry as any,\n      { ruleName, originalGrammarAction: impl },\n    );\n\n    return wrappedGrammarRule;\n  }\n\n  invokeRuleCatch(\n    this: MixedInParser,\n    e: Error,\n    resyncEnabledConfig: boolean,\n    recoveryValueFunc: Function,\n  ): unknown {\n    const isFirstInvokedRule = this.RULE_STACK.length === 1;\n    // note the reSync is always enabled for the first rule invocation, because we must always be able to\n    // reSync with EOF and just output some INVALID ParseTree\n    // during backtracking reSync recovery is disabled, otherwise we can't be certain the backtracking\n    // path is really the most valid one\n    const reSyncEnabled =\n      resyncEnabledConfig && !this.isBackTracking() && this.recoveryEnabled;\n\n    if (isRecognitionException(e)) {\n      const recogError: any = e;\n      if (reSyncEnabled) {\n        const reSyncTokType = this.findReSyncTokenType();\n        if (this.isInCurrentRuleReSyncSet(reSyncTokType)) {\n          recogError.resyncedTokens = this.reSyncTo(reSyncTokType);\n          if (this.outputCst) {\n            const partialCstResult: any =\n              this.CST_STACK[this.CST_STACK.length - 1];\n            partialCstResult.recoveredNode = true;\n            return partialCstResult;\n          } else {\n            return recoveryValueFunc(e);\n          }\n        } else {\n          if (this.outputCst) {\n            const partialCstResult: any =\n              this.CST_STACK[this.CST_STACK.length - 1];\n            partialCstResult.recoveredNode = true;\n            recogError.partialCstResult = partialCstResult;\n          }\n          // to be handled Further up the call stack\n          throw recogError;\n        }\n      } else if (isFirstInvokedRule) {\n        // otherwise a Redundant input error will be created as well and we cannot guarantee that this is indeed the case\n        this.moveToTerminatedState();\n        // the parser should never throw one of its own errors outside its flow.\n        // even if error recovery is disabled\n        return recoveryValueFunc(e);\n      } else {\n        // to be recovered Further up the call stack\n        throw recogError;\n      }\n    } else {\n      // some other Error type which we don't know how to handle (for example a built in JavaScript Error)\n      throw e;\n    }\n  }\n\n  // Implementation of parsing DSL\n  optionInternal(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n  ): OUT | undefined {\n    const key = this.getKeyForAutomaticLookahead(OPTION_IDX, occurrence);\n    return this.optionInternalLogic(actionORMethodDef, occurrence, key);\n  }\n\n  optionInternalLogic(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n    key: number,\n  ): OUT | undefined {\n    let lookAheadFunc = this.getLaFuncFromCache(key);\n    let action: GrammarAction;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookAheadFunc;\n        lookAheadFunc = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    if (lookAheadFunc.call(this) === true) {\n      return action.call(this);\n    }\n    return undefined;\n  }\n\n  atLeastOneInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      AT_LEAST_ONE_IDX,\n      prodOccurrence,\n    );\n    return this.atLeastOneInternalLogic(\n      prodOccurrence,\n      actionORMethodDef,\n      laKey,\n    );\n  }\n\n  atLeastOneInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n    key: number,\n  ): void {\n    let lookAheadFunc = this.getLaFuncFromCache(key);\n    let action;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookAheadFunc;\n        lookAheadFunc = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    if ((lookAheadFunc).call(this) === true) {\n      let notStuck = this.doSingleRepetition(action);\n      while (\n        (lookAheadFunc).call(this) === true &&\n        notStuck === true\n      ) {\n        notStuck = this.doSingleRepetition(action);\n      }\n    } else {\n      throw this.raiseEarlyExitException(\n        prodOccurrence,\n        PROD_TYPE.REPETITION_MANDATORY,\n        (>actionORMethodDef).ERR_MSG,\n      );\n    }\n\n    // note that while it may seem that this can cause an error because by using a recursive call to\n    // AT_LEAST_ONE we change the grammar to AT_LEAST_TWO, AT_LEAST_THREE ... , the possible recursive call\n    // from the tryInRepetitionRecovery(...) will only happen IFF there really are TWO/THREE/.... items.\n\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    this.attemptInRepetitionRecovery(\n      this.atLeastOneInternal,\n      [prodOccurrence, actionORMethodDef],\n      lookAheadFunc,\n      AT_LEAST_ONE_IDX,\n      prodOccurrence,\n      NextTerminalAfterAtLeastOneWalker,\n    );\n  }\n\n  atLeastOneSepFirstInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      AT_LEAST_ONE_SEP_IDX,\n      prodOccurrence,\n    );\n    this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, laKey);\n  }\n\n  atLeastOneSepFirstInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: AtLeastOneSepMethodOpts,\n    key: number,\n  ): void {\n    const action = options.DEF;\n    const separator = options.SEP;\n\n    const firstIterationLookaheadFunc = this.getLaFuncFromCache(key);\n\n    // 1st iteration\n    if (firstIterationLookaheadFunc.call(this) === true) {\n      (>action).call(this);\n\n      //  TODO: Optimization can move this function construction into \"attemptInRepetitionRecovery\"\n      //  because it is only needed in error recovery scenarios.\n      const separatorLookAheadFunc = () => {\n        return this.tokenMatcher(this.LA(1), separator);\n      };\n\n      // 2nd..nth iterations\n      while (this.tokenMatcher(this.LA(1), separator) === true) {\n        // note that this CONSUME will never enter recovery because\n        // the separatorLookAheadFunc checks that the separator really does exist.\n        this.CONSUME(separator);\n        // No need for checking infinite loop here due to consuming the separator.\n        (>action).call(this);\n      }\n\n      // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n      this.attemptInRepetitionRecovery(\n        this.repetitionSepSecondInternal,\n        [\n          prodOccurrence,\n          separator,\n          separatorLookAheadFunc,\n          action,\n          NextTerminalAfterAtLeastOneSepWalker,\n        ],\n        separatorLookAheadFunc,\n        AT_LEAST_ONE_SEP_IDX,\n        prodOccurrence,\n        NextTerminalAfterAtLeastOneSepWalker,\n      );\n    } else {\n      throw this.raiseEarlyExitException(\n        prodOccurrence,\n        PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR,\n        options.ERR_MSG,\n      );\n    }\n  }\n\n  manyInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(MANY_IDX, prodOccurrence);\n    return this.manyInternalLogic(prodOccurrence, actionORMethodDef, laKey);\n  }\n\n  manyInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    key: number,\n  ) {\n    let lookaheadFunction = this.getLaFuncFromCache(key);\n    let action;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookaheadFunction;\n        lookaheadFunction = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    let notStuck = true;\n    while (lookaheadFunction.call(this) === true && notStuck === true) {\n      notStuck = this.doSingleRepetition(action);\n    }\n\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    this.attemptInRepetitionRecovery(\n      this.manyInternal,\n      [prodOccurrence, actionORMethodDef],\n      lookaheadFunction,\n      MANY_IDX,\n      prodOccurrence,\n      NextTerminalAfterManyWalker,\n      // The notStuck parameter is only relevant when \"attemptInRepetitionRecovery\"\n      // is invoked from manyInternal, in the MANY_SEP case and AT_LEAST_ONE[_SEP]\n      // An infinite loop cannot occur as:\n      // - Either the lookahead is guaranteed to consume something (Single Token Separator)\n      // - AT_LEAST_ONE by definition is guaranteed to consume something (or error out).\n      notStuck,\n    );\n  }\n\n  manySepFirstInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: ManySepMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      MANY_SEP_IDX,\n      prodOccurrence,\n    );\n    this.manySepFirstInternalLogic(prodOccurrence, options, laKey);\n  }\n\n  manySepFirstInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: ManySepMethodOpts,\n    key: number,\n  ): void {\n    const action = options.DEF;\n    const separator = options.SEP;\n    const firstIterationLaFunc = this.getLaFuncFromCache(key);\n\n    // 1st iteration\n    if (firstIterationLaFunc.call(this) === true) {\n      action.call(this);\n\n      const separatorLookAheadFunc = () => {\n        return this.tokenMatcher(this.LA(1), separator);\n      };\n      // 2nd..nth iterations\n      while (this.tokenMatcher(this.LA(1), separator) === true) {\n        // note that this CONSUME will never enter recovery because\n        // the separatorLookAheadFunc checks that the separator really does exist.\n        this.CONSUME(separator);\n        // No need for checking infinite loop here due to consuming the separator.\n        action.call(this);\n      }\n\n      // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n      this.attemptInRepetitionRecovery(\n        this.repetitionSepSecondInternal,\n        [\n          prodOccurrence,\n          separator,\n          separatorLookAheadFunc,\n          action,\n          NextTerminalAfterManySepWalker,\n        ],\n        separatorLookAheadFunc,\n        MANY_SEP_IDX,\n        prodOccurrence,\n        NextTerminalAfterManySepWalker,\n      );\n    }\n  }\n\n  repetitionSepSecondInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    separator: TokenType,\n    separatorLookAheadFunc: () => boolean,\n    action: GrammarAction,\n    nextTerminalAfterWalker: typeof AbstractNextTerminalAfterProductionWalker,\n  ): void {\n    while (separatorLookAheadFunc()) {\n      // note that this CONSUME will never enter recovery because\n      // the separatorLookAheadFunc checks that the separator really does exist.\n      this.CONSUME(separator);\n      action.call(this);\n    }\n\n    // we can only arrive to this function after an error\n    // has occurred (hence the name 'second') so the following\n    // IF will always be entered, its possible to remove it...\n    // however it is kept to avoid confusion and be consistent.\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    /* istanbul ignore else */\n    this.attemptInRepetitionRecovery(\n      this.repetitionSepSecondInternal,\n      [\n        prodOccurrence,\n        separator,\n        separatorLookAheadFunc,\n        action,\n        nextTerminalAfterWalker,\n      ],\n      separatorLookAheadFunc,\n      AT_LEAST_ONE_SEP_IDX,\n      prodOccurrence,\n      nextTerminalAfterWalker,\n    );\n  }\n\n  doSingleRepetition(this: MixedInParser, action: Function): any {\n    const beforeIteration = this.getLexerPosition();\n    action.call(this);\n    const afterIteration = this.getLexerPosition();\n\n    // This boolean will indicate if this repetition progressed\n    // or if we are \"stuck\" (potential infinite loop in the repetition).\n    return afterIteration > beforeIteration;\n  }\n\n  orInternal(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n    occurrence: number,\n  ): T {\n    const laKey = this.getKeyForAutomaticLookahead(OR_IDX, occurrence);\n    const alts = isArray(altsOrOpts) ? altsOrOpts : altsOrOpts.DEF;\n\n    const laFunc = this.getLaFuncFromCache(laKey);\n    const altIdxToTake = laFunc.call(this, alts);\n    if (altIdxToTake !== undefined) {\n      const chosenAlternative: any = alts[altIdxToTake];\n      return chosenAlternative.ALT.call(this);\n    }\n    this.raiseNoAltException(\n      occurrence,\n      (altsOrOpts as OrMethodOpts).ERR_MSG,\n    );\n  }\n\n  ruleFinallyStateUpdate(this: MixedInParser): void {\n    this.RULE_STACK.pop();\n    this.RULE_OCCURRENCE_STACK.pop();\n\n    // NOOP when cst is disabled\n    this.cstFinallyStateUpdate();\n\n    if (this.RULE_STACK.length === 0 && this.isAtEndOfInput() === false) {\n      const firstRedundantTok = this.LA(1);\n      const errMsg = this.errorMessageProvider.buildNotAllInputParsedMessage({\n        firstRedundant: firstRedundantTok,\n        ruleName: this.getCurrRuleFullName(),\n      });\n      this.SAVE_ERROR(\n        new NotAllInputParsedException(errMsg, firstRedundantTok),\n      );\n    }\n  }\n\n  subruleInternal(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    idx: number,\n    options?: SubruleMethodOpts,\n  ): R {\n    let ruleResult;\n    try {\n      const args = options !== undefined ? options.ARGS : undefined;\n      this.subruleIdx = idx;\n      ruleResult = ruleToCall.apply(this, args);\n      this.cstPostNonTerminal(\n        ruleResult,\n        options !== undefined && options.LABEL !== undefined\n          ? options.LABEL\n          : ruleToCall.ruleName,\n      );\n      return ruleResult;\n    } catch (e) {\n      throw this.subruleInternalError(e, options, ruleToCall.ruleName);\n    }\n  }\n\n  subruleInternalError(\n    this: MixedInParser,\n    e: any,\n    options: SubruleMethodOpts | undefined,\n    ruleName: string,\n  ): void {\n    if (isRecognitionException(e) && e.partialCstResult !== undefined) {\n      this.cstPostNonTerminal(\n        e.partialCstResult,\n        options !== undefined && options.LABEL !== undefined\n          ? options.LABEL\n          : ruleName,\n      );\n\n      delete e.partialCstResult;\n    }\n    throw e;\n  }\n\n  consumeInternal(\n    this: MixedInParser,\n    tokType: TokenType,\n    idx: number,\n    options: ConsumeMethodOpts | undefined,\n  ): IToken {\n    let consumedToken!: IToken;\n    try {\n      const nextToken = this.LA(1);\n      if (this.tokenMatcher(nextToken, tokType) === true) {\n        this.consumeToken();\n        consumedToken = nextToken;\n      } else {\n        this.consumeInternalError(tokType, nextToken, options);\n      }\n    } catch (eFromConsumption) {\n      consumedToken = this.consumeInternalRecovery(\n        tokType,\n        idx,\n        eFromConsumption,\n      );\n    }\n\n    this.cstPostTerminal(\n      options !== undefined && options.LABEL !== undefined\n        ? options.LABEL\n        : tokType.name,\n      consumedToken,\n    );\n    return consumedToken;\n  }\n\n  consumeInternalError(\n    this: MixedInParser,\n    tokType: TokenType,\n    nextToken: IToken,\n    options: ConsumeMethodOpts | undefined,\n  ): void {\n    let msg;\n    const previousToken = this.LA(0);\n    if (options !== undefined && options.ERR_MSG) {\n      msg = options.ERR_MSG;\n    } else {\n      msg = this.errorMessageProvider.buildMismatchTokenMessage({\n        expected: tokType,\n        actual: nextToken,\n        previous: previousToken,\n        ruleName: this.getCurrRuleFullName(),\n      });\n    }\n    throw this.SAVE_ERROR(\n      new MismatchedTokenException(msg, nextToken, previousToken),\n    );\n  }\n\n  consumeInternalRecovery(\n    this: MixedInParser,\n    tokType: TokenType,\n    idx: number,\n    eFromConsumption: Error,\n  ): IToken {\n    // no recovery allowed during backtracking, otherwise backtracking may recover invalid syntax and accept it\n    // but the original syntax could have been parsed successfully without any backtracking + recovery\n    if (\n      this.recoveryEnabled &&\n      // TODO: more robust checking of the exception type. Perhaps Typescript extending expressions?\n      eFromConsumption.name === \"MismatchedTokenException\" &&\n      !this.isBackTracking()\n    ) {\n      const follows = this.getFollowsForInRuleRecovery(tokType, idx);\n      try {\n        return this.tryInRuleRecovery(tokType, follows);\n      } catch (eFromInRuleRecovery) {\n        if (eFromInRuleRecovery.name === IN_RULE_RECOVERY_EXCEPTION) {\n          // failed in RuleRecovery.\n          // throw the original error in order to trigger reSync error recovery\n          throw eFromConsumption;\n        } else {\n          throw eFromInRuleRecovery;\n        }\n      }\n    } else {\n      throw eFromConsumption;\n    }\n  }\n\n  saveRecogState(this: MixedInParser): IParserState {\n    // errors is a getter which will clone the errors array\n    const savedErrors = this.errors;\n    const savedRuleStack = clone(this.RULE_STACK);\n    return {\n      errors: savedErrors,\n      lexerState: this.exportLexerState(),\n      RULE_STACK: savedRuleStack,\n      CST_STACK: this.CST_STACK,\n    };\n  }\n\n  reloadRecogState(this: MixedInParser, newState: IParserState) {\n    this.errors = newState.errors;\n    this.importLexerState(newState.lexerState);\n    this.RULE_STACK = newState.RULE_STACK;\n  }\n\n  ruleInvocationStateUpdate(\n    this: MixedInParser,\n    shortName: number,\n    fullName: string,\n    idxInCallingRule: number,\n  ): void {\n    this.RULE_OCCURRENCE_STACK.push(idxInCallingRule);\n    this.RULE_STACK.push(shortName);\n    // NOOP when cst is disabled\n    this.cstInvocationStateUpdate(fullName);\n  }\n\n  isBackTracking(this: MixedInParser): boolean {\n    return this.isBackTrackingStack.length !== 0;\n  }\n\n  getCurrRuleFullName(this: MixedInParser): string {\n    const shortName = this.getLastExplicitRuleShortName();\n    return this.shortRuleNameToFull[shortName];\n  }\n\n  shortRuleNameToFullName(this: MixedInParser, shortName: number) {\n    return this.shortRuleNameToFull[shortName];\n  }\n\n  public isAtEndOfInput(this: MixedInParser): boolean {\n    return this.tokenMatcher(this.LA(1), EOF);\n  }\n\n  public reset(this: MixedInParser): void {\n    this.resetLexerState();\n    this.subruleIdx = 0;\n    this.isBackTrackingStack = [];\n    this.errors = [];\n    this.RULE_STACK = [];\n    // TODO: extract a specific reset for TreeBuilder trait\n    this.CST_STACK = [];\n    this.RULE_OCCURRENCE_STACK = [];\n  }\n}\n", "import {\n  IParserConfig,\n  IParserErrorMessageProvider,\n  IRecognitionException,\n} from \"@chevrotain/types\";\nimport {\n  EarlyExitException,\n  isRecognitionException,\n  NoViableAltException,\n} from \"../../exceptions_public.js\";\nimport { clone, has } from \"lodash-es\";\nimport {\n  getLookaheadPathsForOptionalProd,\n  getLookaheadPathsForOr,\n  PROD_TYPE,\n} from \"../../grammar/lookahead.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * Trait responsible for runtime parsing errors.\n */\nexport class ErrorHandler {\n  _errors: IRecognitionException[];\n  errorMessageProvider: IParserErrorMessageProvider;\n\n  initErrorHandler(config: IParserConfig) {\n    this._errors = [];\n    this.errorMessageProvider = has(config, \"errorMessageProvider\")\n      ? (config.errorMessageProvider as IParserErrorMessageProvider) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.errorMessageProvider;\n  }\n\n  SAVE_ERROR(\n    this: MixedInParser,\n    error: IRecognitionException,\n  ): IRecognitionException {\n    if (isRecognitionException(error)) {\n      error.context = {\n        ruleStack: this.getHumanReadableRuleStack(),\n        ruleOccurrenceStack: clone(this.RULE_OCCURRENCE_STACK),\n      };\n      this._errors.push(error);\n      return error;\n    } else {\n      throw Error(\n        \"Trying to save an Error which is not a RecognitionException\",\n      );\n    }\n  }\n\n  get errors(): IRecognitionException[] {\n    return clone(this._errors);\n  }\n\n  set errors(newErrors: IRecognitionException[]) {\n    this._errors = newErrors;\n  }\n\n  // TODO: consider caching the error message computed information\n  raiseEarlyExitException(\n    this: MixedInParser,\n    occurrence: number,\n    prodType: PROD_TYPE,\n    userDefinedErrMsg: string | undefined,\n  ): never {\n    const ruleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[ruleName];\n    const lookAheadPathsPerAlternative = getLookaheadPathsForOptionalProd(\n      occurrence,\n      ruleGrammar,\n      prodType,\n      this.maxLookahead,\n    );\n    const insideProdPaths = lookAheadPathsPerAlternative[0];\n    const actualTokens = [];\n    for (let i = 1; i <= this.maxLookahead; i++) {\n      actualTokens.push(this.LA(i));\n    }\n    const msg = this.errorMessageProvider.buildEarlyExitMessage({\n      expectedIterationPaths: insideProdPaths,\n      actual: actualTokens,\n      previous: this.LA(0),\n      customUserDescription: userDefinedErrMsg,\n      ruleName: ruleName,\n    });\n\n    throw this.SAVE_ERROR(new EarlyExitException(msg, this.LA(1), this.LA(0)));\n  }\n\n  // TODO: consider caching the error message computed information\n  raiseNoAltException(\n    this: MixedInParser,\n    occurrence: number,\n    errMsgTypes: string | undefined,\n  ): never {\n    const ruleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[ruleName];\n    // TODO: getLookaheadPathsForOr can be slow for large enough maxLookahead and certain grammars, consider caching ?\n    const lookAheadPathsPerAlternative = getLookaheadPathsForOr(\n      occurrence,\n      ruleGrammar,\n      this.maxLookahead,\n    );\n\n    const actualTokens = [];\n    for (let i = 1; i <= this.maxLookahead; i++) {\n      actualTokens.push(this.LA(i));\n    }\n    const previousToken = this.LA(0);\n\n    const errMsg = this.errorMessageProvider.buildNoViableAltMessage({\n      expectedPathsPerAlt: lookAheadPathsPerAlternative,\n      actual: actualTokens,\n      previous: previousToken,\n      customUserDescription: errMsgTypes,\n      ruleName: this.getCurrRuleFullName(),\n    });\n\n    throw this.SAVE_ERROR(\n      new NoViableAltException(errMsg, this.LA(1), previousToken),\n    );\n  }\n}\n", "import {\n  ISyntacticContentAssistPath,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  NextAfterTokenWalker,\n  nextPossibleTokensAfter,\n} from \"../../grammar/interpreter.js\";\nimport { first, isUndefined } from \"lodash-es\";\nimport { MixedInParser } from \"./parser_traits.js\";\n\nexport class ContentAssist {\n  initContentAssist() {}\n\n  public computeContentAssist(\n    this: MixedInParser,\n    startRuleName: string,\n    precedingInput: IToken[],\n  ): ISyntacticContentAssistPath[] {\n    const startRuleGast = this.gastProductionsCache[startRuleName];\n\n    if (isUndefined(startRuleGast)) {\n      throw Error(`Rule ->${startRuleName}<- does not exist in this grammar.`);\n    }\n\n    return nextPossibleTokensAfter(\n      [startRuleGast],\n      precedingInput,\n      this.tokenMatcher,\n      this.maxLookahead,\n    );\n  }\n\n  // TODO: should this be a member method or a utility? it does not have any state or usage of 'this'...\n  // TODO: should this be more explicitly part of the public API?\n  public getNextPossibleTokenTypes(\n    this: MixedInParser,\n    grammarPath: ITokenGrammarPath,\n  ): TokenType[] {\n    const topRuleName = first(grammarPath.ruleStack)!;\n    const gastProductions = this.getGAstProductions();\n    const topProduction = gastProductions[topRuleName];\n    const nextPossibleTokenTypes = new NextAfterTokenWalker(\n      topProduction,\n      grammarPath,\n    ).startWalking();\n    return nextPossibleTokenTypes;\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  CstNode,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IParserConfig,\n  IProduction,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  SubruleMethodOpts,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  forEach,\n  has,\n  isArray,\n  isFunction,\n  last as peek,\n  some,\n} from \"lodash-es\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { Lexer } from \"../../../scan/lexer_public.js\";\nimport {\n  augmentTokenTypes,\n  hasShortKeyProperty,\n} from \"../../../scan/tokens.js\";\nimport {\n  createToken,\n  createTokenInstance,\n} from \"../../../scan/tokens_public.js\";\nimport { END_OF_FILE } from \"../parser.js\";\nimport { BITS_FOR_OCCURRENCE_IDX } from \"../../grammar/keys.js\";\nimport { ParserMethodInternal } from \"../types.js\";\n\ntype ProdWithDef = IProduction & { definition?: IProduction[] };\nconst RECORDING_NULL_OBJECT = {\n  description: \"This Object indicates the Parser is during Recording Phase\",\n};\nObject.freeze(RECORDING_NULL_OBJECT);\n\nconst HANDLE_SEPARATOR = true;\nconst MAX_METHOD_IDX = Math.pow(2, BITS_FOR_OCCURRENCE_IDX) - 1;\n\nconst RFT = createToken({ name: \"RECORDING_PHASE_TOKEN\", pattern: Lexer.NA });\naugmentTokenTypes([RFT]);\nconst RECORDING_PHASE_TOKEN = createTokenInstance(\n  RFT,\n  \"This IToken indicates the Parser is in Recording Phase\\n\\t\" +\n    \"\" +\n    \"See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details\",\n  // Using \"-1\" instead of NaN (as in EOF) because an actual number is less likely to\n  // cause errors if the output of LA or CONSUME would be (incorrectly) used during the recording phase.\n  -1,\n  -1,\n  -1,\n  -1,\n  -1,\n  -1,\n);\nObject.freeze(RECORDING_PHASE_TOKEN);\n\nconst RECORDING_PHASE_CSTNODE: CstNode = {\n  name:\n    \"This CSTNode indicates the Parser is in Recording Phase\\n\\t\" +\n    \"See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details\",\n  children: {},\n};\n\n/**\n * This trait handles the creation of the GAST structure for Chevrotain Grammars\n */\nexport class GastRecorder {\n  recordingProdStack: ProdWithDef[];\n  RECORDING_PHASE: boolean;\n\n  initGastRecorder(this: MixedInParser, config: IParserConfig): void {\n    this.recordingProdStack = [];\n    this.RECORDING_PHASE = false;\n  }\n\n  enableRecording(this: MixedInParser): void {\n    this.RECORDING_PHASE = true;\n\n    this.TRACE_INIT(\"Enable Recording\", () => {\n      /**\n       * Warning Dark Voodoo Magic upcoming!\n       * We are \"replacing\" the public parsing DSL methods API\n       * With **new** alternative implementations on the Parser **instance**\n       *\n       * So far this is the only way I've found to avoid performance regressions during parsing time.\n       * - Approx 30% performance regression was measured on Chrome 75 Canary when attempting to replace the \"internal\"\n       *   implementations directly instead.\n       */\n      for (let i = 0; i < 10; i++) {\n        const idx = i > 0 ? i : \"\";\n        this[`CONSUME${idx}` as \"CONSUME\"] = function (arg1, arg2) {\n          return this.consumeInternalRecord(arg1, i, arg2);\n        };\n        this[`SUBRULE${idx}` as \"SUBRULE\"] = function (arg1, arg2) {\n          return this.subruleInternalRecord(arg1, i, arg2) as any;\n        };\n        this[`OPTION${idx}` as \"OPTION\"] = function (arg1) {\n          return this.optionInternalRecord(arg1, i);\n        };\n        this[`OR${idx}` as \"OR\"] = function (arg1) {\n          return this.orInternalRecord(arg1, i);\n        };\n        this[`MANY${idx}` as \"MANY\"] = function (arg1) {\n          this.manyInternalRecord(i, arg1);\n        };\n        this[`MANY_SEP${idx}` as \"MANY_SEP\"] = function (arg1) {\n          this.manySepFirstInternalRecord(i, arg1);\n        };\n        this[`AT_LEAST_ONE${idx}` as \"AT_LEAST_ONE\"] = function (arg1) {\n          this.atLeastOneInternalRecord(i, arg1);\n        };\n        this[`AT_LEAST_ONE_SEP${idx}` as \"AT_LEAST_ONE_SEP\"] = function (arg1) {\n          this.atLeastOneSepFirstInternalRecord(i, arg1);\n        };\n      }\n\n      // DSL methods with the idx(suffix) as an argument\n      this[`consume`] = function (idx, arg1, arg2) {\n        return this.consumeInternalRecord(arg1, idx, arg2);\n      };\n      this[`subrule`] = function (idx, arg1, arg2) {\n        return this.subruleInternalRecord(arg1, idx, arg2) as any;\n      };\n      this[`option`] = function (idx, arg1) {\n        return this.optionInternalRecord(arg1, idx);\n      };\n      this[`or`] = function (idx, arg1) {\n        return this.orInternalRecord(arg1, idx);\n      };\n      this[`many`] = function (idx, arg1) {\n        this.manyInternalRecord(idx, arg1);\n      };\n      this[`atLeastOne`] = function (idx, arg1) {\n        this.atLeastOneInternalRecord(idx, arg1);\n      };\n\n      this.ACTION = this.ACTION_RECORD;\n      this.BACKTRACK = this.BACKTRACK_RECORD;\n      this.LA = this.LA_RECORD;\n    });\n  }\n\n  disableRecording(this: MixedInParser) {\n    this.RECORDING_PHASE = false;\n    // By deleting these **instance** properties, any future invocation\n    // will be deferred to the original methods on the **prototype** object\n    // This seems to get rid of any incorrect optimizations that V8 may\n    // do during the recording phase.\n    this.TRACE_INIT(\"Deleting Recording methods\", () => {\n      const that: any = this;\n\n      for (let i = 0; i < 10; i++) {\n        const idx = i > 0 ? i : \"\";\n        delete that[`CONSUME${idx}`];\n        delete that[`SUBRULE${idx}`];\n        delete that[`OPTION${idx}`];\n        delete that[`OR${idx}`];\n        delete that[`MANY${idx}`];\n        delete that[`MANY_SEP${idx}`];\n        delete that[`AT_LEAST_ONE${idx}`];\n        delete that[`AT_LEAST_ONE_SEP${idx}`];\n      }\n\n      delete that[`consume`];\n      delete that[`subrule`];\n      delete that[`option`];\n      delete that[`or`];\n      delete that[`many`];\n      delete that[`atLeastOne`];\n\n      delete that.ACTION;\n      delete that.BACKTRACK;\n      delete that.LA;\n    });\n  }\n\n  //   Parser methods are called inside an ACTION?\n  //   Maybe try/catch/finally on ACTIONS while disabling the recorders state changes?\n  // @ts-expect-error -- noop place holder\n  ACTION_RECORD(this: MixedInParser, impl: () => T): T {\n    // NO-OP during recording\n  }\n\n  // Executing backtracking logic will break our recording logic assumptions\n  BACKTRACK_RECORD(\n    grammarRule: (...args: any[]) => T,\n    args?: any[],\n  ): () => boolean {\n    return () => true;\n  }\n\n  // LA is part of the official API and may be used for custom lookahead logic\n  // by end users who may forget to wrap it in ACTION or inside a GATE\n  LA_RECORD(howMuch: number): IToken {\n    // We cannot use the RECORD_PHASE_TOKEN here because someone may depend\n    // On LA return EOF at the end of the input so an infinite loop may occur.\n    return END_OF_FILE;\n  }\n\n  topLevelRuleRecord(name: string, def: Function): Rule {\n    try {\n      const newTopLevelRule = new Rule({ definition: [], name: name });\n      newTopLevelRule.name = name;\n      this.recordingProdStack.push(newTopLevelRule);\n      def.call(this);\n      this.recordingProdStack.pop();\n      return newTopLevelRule;\n    } catch (originalError) {\n      if (originalError.KNOWN_RECORDER_ERROR !== true) {\n        try {\n          originalError.message =\n            originalError.message +\n            '\\n\\t This error was thrown during the \"grammar recording phase\" For more info see:\\n\\t' +\n            \"https://chevrotain.io/docs/guide/internals.html#grammar-recording\";\n        } catch (mutabilityError) {\n          // We may not be able to modify the original error object\n          throw originalError;\n        }\n      }\n      throw originalError;\n    }\n  }\n\n  // Implementation of parsing DSL\n  optionInternalRecord(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n  ): OUT {\n    return recordProd.call(this, Option, actionORMethodDef, occurrence);\n  }\n\n  atLeastOneInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    recordProd.call(this, RepetitionMandatory, actionORMethodDef, occurrence);\n  }\n\n  atLeastOneSepFirstInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    recordProd.call(\n      this,\n      RepetitionMandatoryWithSeparator,\n      options,\n      occurrence,\n      HANDLE_SEPARATOR,\n    );\n  }\n\n  manyInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    recordProd.call(this, Repetition, actionORMethodDef, occurrence);\n  }\n\n  manySepFirstInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    options: ManySepMethodOpts,\n  ): void {\n    recordProd.call(\n      this,\n      RepetitionWithSeparator,\n      options,\n      occurrence,\n      HANDLE_SEPARATOR,\n    );\n  }\n\n  orInternalRecord(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n    occurrence: number,\n  ): T {\n    return recordOrProd.call(this, altsOrOpts, occurrence);\n  }\n\n  subruleInternalRecord(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    occurrence: number,\n    options?: SubruleMethodOpts,\n  ): R | CstNode {\n    assertMethodIdxIsValid(occurrence);\n    if (!ruleToCall || has(ruleToCall, \"ruleName\") === false) {\n      const error: any = new Error(\n        ` argument is invalid` +\n          ` expecting a Parser method reference but got: <${JSON.stringify(\n            ruleToCall,\n          )}>` +\n          `\\n inside top level rule: <${\n            (this.recordingProdStack[0]).name\n          }>`,\n      );\n      error.KNOWN_RECORDER_ERROR = true;\n      throw error;\n    }\n\n    const prevProd: any = peek(this.recordingProdStack);\n    const ruleName = ruleToCall.ruleName;\n    const newNoneTerminal = new NonTerminal({\n      idx: occurrence,\n      nonTerminalName: ruleName,\n      label: options?.LABEL,\n      // The resolving of the `referencedRule` property will be done once all the Rule's GASTs have been created\n      referencedRule: undefined,\n    });\n    prevProd.definition.push(newNoneTerminal);\n\n    return this.outputCst\n      ? RECORDING_PHASE_CSTNODE\n      : RECORDING_NULL_OBJECT;\n  }\n\n  consumeInternalRecord(\n    this: MixedInParser,\n    tokType: TokenType,\n    occurrence: number,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    assertMethodIdxIsValid(occurrence);\n    if (!hasShortKeyProperty(tokType)) {\n      const error: any = new Error(\n        ` argument is invalid` +\n          ` expecting a TokenType reference but got: <${JSON.stringify(\n            tokType,\n          )}>` +\n          `\\n inside top level rule: <${\n            (this.recordingProdStack[0]).name\n          }>`,\n      );\n      error.KNOWN_RECORDER_ERROR = true;\n      throw error;\n    }\n    const prevProd: any = peek(this.recordingProdStack);\n    const newNoneTerminal = new Terminal({\n      idx: occurrence,\n      terminalType: tokType,\n      label: options?.LABEL,\n    });\n    prevProd.definition.push(newNoneTerminal);\n\n    return RECORDING_PHASE_TOKEN;\n  }\n}\n\nfunction recordProd(\n  prodConstructor: any,\n  mainProdArg: any,\n  occurrence: number,\n  handleSep: boolean = false,\n): any {\n  assertMethodIdxIsValid(occurrence);\n  const prevProd: any = peek(this.recordingProdStack);\n  const grammarAction = isFunction(mainProdArg) ? mainProdArg : mainProdArg.DEF;\n\n  const newProd = new prodConstructor({ definition: [], idx: occurrence });\n  if (handleSep) {\n    newProd.separator = mainProdArg.SEP;\n  }\n  if (has(mainProdArg, \"MAX_LOOKAHEAD\")) {\n    newProd.maxLookahead = mainProdArg.MAX_LOOKAHEAD;\n  }\n\n  this.recordingProdStack.push(newProd);\n  grammarAction.call(this);\n  prevProd.definition.push(newProd);\n  this.recordingProdStack.pop();\n\n  return RECORDING_NULL_OBJECT;\n}\n\nfunction recordOrProd(mainProdArg: any, occurrence: number): any {\n  assertMethodIdxIsValid(occurrence);\n  const prevProd: any = peek(this.recordingProdStack);\n  // Only an array of alternatives\n  const hasOptions = isArray(mainProdArg) === false;\n  const alts: IOrAlt[] =\n    hasOptions === false ? mainProdArg : mainProdArg.DEF;\n\n  const newOrProd = new Alternation({\n    definition: [],\n    idx: occurrence,\n    ignoreAmbiguities: hasOptions && mainProdArg.IGNORE_AMBIGUITIES === true,\n  });\n  if (has(mainProdArg, \"MAX_LOOKAHEAD\")) {\n    newOrProd.maxLookahead = mainProdArg.MAX_LOOKAHEAD;\n  }\n\n  const hasPredicates = some(alts, (currAlt: any) => isFunction(currAlt.GATE));\n  newOrProd.hasPredicates = hasPredicates;\n\n  prevProd.definition.push(newOrProd);\n\n  forEach(alts, (currAlt) => {\n    const currAltFlat = new Alternative({ definition: [] });\n    newOrProd.definition.push(currAltFlat);\n    if (has(currAlt, \"IGNORE_AMBIGUITIES\")) {\n      currAltFlat.ignoreAmbiguities = currAlt.IGNORE_AMBIGUITIES as boolean; // assumes end user provides the correct config value/type\n    }\n    // **implicit** ignoreAmbiguities due to usage of gate\n    else if (has(currAlt, \"GATE\")) {\n      currAltFlat.ignoreAmbiguities = true;\n    }\n    this.recordingProdStack.push(currAltFlat);\n    currAlt.ALT.call(this);\n    this.recordingProdStack.pop();\n  });\n  return RECORDING_NULL_OBJECT;\n}\n\nfunction getIdxSuffix(idx: number): string {\n  return idx === 0 ? \"\" : `${idx}`;\n}\n\nfunction assertMethodIdxIsValid(idx: number): void {\n  if (idx < 0 || idx > MAX_METHOD_IDX) {\n    const error: any = new Error(\n      // The stack trace will contain all the needed details\n      `Invalid DSL Method idx value: <${idx}>\\n\\t` +\n        `Idx value must be a none negative value smaller than ${\n          MAX_METHOD_IDX + 1\n        }`,\n    );\n    error.KNOWN_RECORDER_ERROR = true;\n    throw error;\n  }\n}\n", "import { IParserConfig } from \"@chevrotain/types\";\nimport { has } from \"lodash-es\";\nimport { timer } from \"@chevrotain/utils\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * Trait responsible for runtime parsing errors.\n */\nexport class PerformanceTracer {\n  traceInitPerf: boolean | number;\n  traceInitMaxIdent: number;\n  traceInitIndent: number;\n\n  initPerformanceTracer(config: IParserConfig) {\n    if (has(config, \"traceInitPerf\")) {\n      const userTraceInitPerf = config.traceInitPerf;\n      const traceIsNumber = typeof userTraceInitPerf === \"number\";\n      this.traceInitMaxIdent = traceIsNumber\n        ? userTraceInitPerf\n        : Infinity;\n      this.traceInitPerf = traceIsNumber\n        ? userTraceInitPerf > 0\n        : (userTraceInitPerf as boolean); // assumes end user provides the correct config value/type\n    } else {\n      this.traceInitMaxIdent = 0;\n      this.traceInitPerf = DEFAULT_PARSER_CONFIG.traceInitPerf;\n    }\n\n    this.traceInitIndent = -1;\n  }\n\n  TRACE_INIT(this: MixedInParser, phaseDesc: string, phaseImpl: () => T): T {\n    // No need to optimize this using NOOP pattern because\n    // It is not called in a hot spot...\n    if (this.traceInitPerf === true) {\n      this.traceInitIndent++;\n      const indent = new Array(this.traceInitIndent + 1).join(\"\\t\");\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        console.log(`${indent}--> <${phaseDesc}>`);\n      }\n      const { time, value } = timer(phaseImpl);\n      /* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */\n      const traceMethod = time > 10 ? console.warn : console.log;\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);\n      }\n      this.traceInitIndent--;\n      return value;\n    } else {\n      return phaseImpl();\n    }\n  }\n}\n", "export function applyMixins(derivedCtor: any, baseCtors: any[]) {\n  baseCtors.forEach((baseCtor) => {\n    const baseProto = baseCtor.prototype;\n    Object.getOwnPropertyNames(baseProto).forEach((propName) => {\n      if (propName === \"constructor\") {\n        return;\n      }\n\n      const basePropDescriptor = Object.getOwnPropertyDescriptor(\n        baseProto,\n        propName,\n      );\n      // Handle Accessors\n      if (\n        basePropDescriptor &&\n        (basePropDescriptor.get || basePropDescriptor.set)\n      ) {\n        Object.defineProperty(\n          derivedCtor.prototype,\n          propName,\n          basePropDescriptor,\n        );\n      } else {\n        derivedCtor.prototype[propName] = baseCtor.prototype[propName];\n      }\n    });\n  });\n}\n", "import { clone, forEach, has, isEmpty, map, values } from \"lodash-es\";\nimport { toFastProperties } from \"@chevrotain/utils\";\nimport { computeAllProdsFollows } from \"../grammar/follow.js\";\nimport { createTokenInstance, EOF } from \"../../scan/tokens_public.js\";\nimport {\n  defaultGrammarValidatorErrorProvider,\n  defaultParserErrorProvider,\n} from \"../errors_public.js\";\nimport {\n  resolveGrammar,\n  validateGrammar,\n} from \"../grammar/gast/gast_resolver_public.js\";\nimport {\n  CstNode,\n  IParserConfig,\n  IRecognitionException,\n  IRuleConfig,\n  IToken,\n  TokenType,\n  TokenVocabulary,\n} from \"@chevrotain/types\";\nimport { Recoverable } from \"./traits/recoverable.js\";\nimport { LooksAhead } from \"./traits/looksahead.js\";\nimport { TreeBuilder } from \"./traits/tree_builder.js\";\nimport { LexerAdapter } from \"./traits/lexer_adapter.js\";\nimport { RecognizerApi } from \"./traits/recognizer_api.js\";\nimport { RecognizerEngine } from \"./traits/recognizer_engine.js\";\n\nimport { ErrorHandler } from \"./traits/error_handler.js\";\nimport { MixedInParser } from \"./traits/parser_traits.js\";\nimport { ContentAssist } from \"./traits/context_assist.js\";\nimport { GastRecorder } from \"./traits/gast_recorder.js\";\nimport { PerformanceTracer } from \"./traits/perf_tracer.js\";\nimport { applyMixins } from \"./utils/apply_mixins.js\";\nimport { IParserDefinitionError } from \"../grammar/types.js\";\nimport { Rule } from \"@chevrotain/gast\";\nimport { IParserConfigInternal, ParserMethodInternal } from \"./types.js\";\nimport { validateLookahead } from \"../grammar/checks.js\";\n\nexport const END_OF_FILE = createTokenInstance(\n  EOF,\n  \"\",\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n);\nObject.freeze(END_OF_FILE);\n\nexport type TokenMatcher = (token: IToken, tokType: TokenType) => boolean;\n\nexport const DEFAULT_PARSER_CONFIG: Required<\n  Omit\n> = Object.freeze({\n  recoveryEnabled: false,\n  maxLookahead: 3,\n  dynamicTokensEnabled: false,\n  outputCst: true,\n  errorMessageProvider: defaultParserErrorProvider,\n  nodeLocationTracking: \"none\",\n  traceInitPerf: false,\n  skipValidations: false,\n});\n\nexport const DEFAULT_RULE_CONFIG: Required> = Object.freeze({\n  recoveryValueFunc: () => undefined,\n  resyncEnabled: true,\n});\n\nexport enum ParserDefinitionErrorType {\n  INVALID_RULE_NAME = 0,\n  DUPLICATE_RULE_NAME = 1,\n  INVALID_RULE_OVERRIDE = 2,\n  DUPLICATE_PRODUCTIONS = 3,\n  UNRESOLVED_SUBRULE_REF = 4,\n  LEFT_RECURSION = 5,\n  NONE_LAST_EMPTY_ALT = 6,\n  AMBIGUOUS_ALTS = 7,\n  CONFLICT_TOKENS_RULES_NAMESPACE = 8,\n  INVALID_TOKEN_NAME = 9,\n  NO_NON_EMPTY_LOOKAHEAD = 10,\n  AMBIGUOUS_PREFIX_ALTS = 11,\n  TOO_MANY_ALTS = 12,\n  CUSTOM_LOOKAHEAD_VALIDATION = 13,\n}\n\nexport interface IParserDuplicatesDefinitionError\n  extends IParserDefinitionError {\n  dslName: string;\n  occurrence: number;\n  parameter?: string;\n}\n\nexport interface IParserEmptyAlternativeDefinitionError\n  extends IParserDefinitionError {\n  occurrence: number;\n  alternative: number;\n}\n\nexport interface IParserAmbiguousAlternativesDefinitionError\n  extends IParserDefinitionError {\n  occurrence: number | string;\n  alternatives: number[];\n}\n\nexport interface IParserUnresolvedRefDefinitionError\n  extends IParserDefinitionError {\n  unresolvedRefName: string;\n}\n\nexport interface IParserState {\n  errors: IRecognitionException[];\n  lexerState: any;\n  RULE_STACK: number[];\n  CST_STACK: CstNode[];\n}\n\nexport type Predicate = () => boolean;\n\nexport function EMPTY_ALT(): () => undefined;\nexport function EMPTY_ALT(value: T): () => T;\nexport function EMPTY_ALT(value: any = undefined) {\n  return function () {\n    return value;\n  };\n}\n\nexport class Parser {\n  // Set this flag to true if you don't want the Parser to throw error when problems in it's definition are detected.\n  // (normally during the parser's constructor).\n  // This is a design time flag, it will not affect the runtime error handling of the parser, just design time errors,\n  // for example: duplicate rule names, referencing an unresolved subrule, ect...\n  // This flag should not be enabled during normal usage, it is used in special situations, for example when\n  // needing to display the parser definition errors in some GUI(online playground).\n  static DEFER_DEFINITION_ERRORS_HANDLING: boolean = false;\n\n  /**\n   *  @deprecated use the **instance** method with the same name instead\n   */\n  static performSelfAnalysis(parserInstance: Parser): void {\n    throw Error(\n      \"The **static** `performSelfAnalysis` method has been deprecated.\" +\n        \"\\t\\nUse the **instance** method with the same name instead.\",\n    );\n  }\n\n  public performSelfAnalysis(this: MixedInParser): void {\n    this.TRACE_INIT(\"performSelfAnalysis\", () => {\n      let defErrorsMsgs;\n\n      this.selfAnalysisDone = true;\n      const className = this.className;\n\n      this.TRACE_INIT(\"toFastProps\", () => {\n        // Without this voodoo magic the parser would be x3-x4 slower\n        // It seems it is better to invoke `toFastProperties` **before**\n        // Any manipulations of the `this` object done during the recording phase.\n        toFastProperties(this);\n      });\n\n      this.TRACE_INIT(\"Grammar Recording\", () => {\n        try {\n          this.enableRecording();\n          // Building the GAST\n          forEach(this.definedRulesNames, (currRuleName) => {\n            const wrappedRule = (this as any)[\n              currRuleName\n            ] as ParserMethodInternal;\n            const originalGrammarAction = wrappedRule[\"originalGrammarAction\"];\n            let recordedRuleGast!: Rule;\n            this.TRACE_INIT(`${currRuleName} Rule`, () => {\n              recordedRuleGast = this.topLevelRuleRecord(\n                currRuleName,\n                originalGrammarAction,\n              );\n            });\n            this.gastProductionsCache[currRuleName] = recordedRuleGast;\n          });\n        } finally {\n          this.disableRecording();\n        }\n      });\n\n      let resolverErrors: IParserDefinitionError[] = [];\n      this.TRACE_INIT(\"Grammar Resolving\", () => {\n        resolverErrors = resolveGrammar({\n          rules: values(this.gastProductionsCache),\n        });\n        this.definitionErrors = this.definitionErrors.concat(resolverErrors);\n      });\n\n      this.TRACE_INIT(\"Grammar Validations\", () => {\n        // only perform additional grammar validations IFF no resolving errors have occurred.\n        // as unresolved grammar may lead to unhandled runtime exceptions in the follow up validations.\n        if (isEmpty(resolverErrors) && this.skipValidations === false) {\n          const validationErrors = validateGrammar({\n            rules: values(this.gastProductionsCache),\n            tokenTypes: values(this.tokensMap),\n            errMsgProvider: defaultGrammarValidatorErrorProvider,\n            grammarName: className,\n          });\n          const lookaheadValidationErrors = validateLookahead({\n            lookaheadStrategy: this.lookaheadStrategy,\n            rules: values(this.gastProductionsCache),\n            tokenTypes: values(this.tokensMap),\n            grammarName: className,\n          });\n          this.definitionErrors = this.definitionErrors.concat(\n            validationErrors,\n            lookaheadValidationErrors,\n          );\n        }\n      });\n\n      // this analysis may fail if the grammar is not perfectly valid\n      if (isEmpty(this.definitionErrors)) {\n        // The results of these computations are not needed unless error recovery is enabled.\n        if (this.recoveryEnabled) {\n          this.TRACE_INIT(\"computeAllProdsFollows\", () => {\n            const allFollows = computeAllProdsFollows(\n              values(this.gastProductionsCache),\n            );\n            this.resyncFollows = allFollows;\n          });\n        }\n\n        this.TRACE_INIT(\"ComputeLookaheadFunctions\", () => {\n          this.lookaheadStrategy.initialize?.({\n            rules: values(this.gastProductionsCache),\n          });\n          this.preComputeLookaheadFunctions(values(this.gastProductionsCache));\n        });\n      }\n\n      if (\n        !Parser.DEFER_DEFINITION_ERRORS_HANDLING &&\n        !isEmpty(this.definitionErrors)\n      ) {\n        defErrorsMsgs = map(\n          this.definitionErrors,\n          (defError) => defError.message,\n        );\n        throw new Error(\n          `Parser Definition Errors detected:\\n ${defErrorsMsgs.join(\n            \"\\n-------------------------------\\n\",\n          )}`,\n        );\n      }\n    });\n  }\n\n  definitionErrors: IParserDefinitionError[] = [];\n  selfAnalysisDone = false;\n  protected skipValidations: boolean;\n\n  constructor(tokenVocabulary: TokenVocabulary, config: IParserConfig) {\n    const that: MixedInParser = this as any;\n    that.initErrorHandler(config);\n    that.initLexerAdapter();\n    that.initLooksAhead(config);\n    that.initRecognizerEngine(tokenVocabulary, config);\n    that.initRecoverable(config);\n    that.initTreeBuilder(config);\n    that.initContentAssist();\n    that.initGastRecorder(config);\n    that.initPerformanceTracer(config);\n\n    if (has(config, \"ignoredIssues\")) {\n      throw new Error(\n        \"The  IParserConfig property has been deprecated.\\n\\t\" +\n          \"Please use the  flag on the relevant DSL method instead.\\n\\t\" +\n          \"See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#IGNORING_AMBIGUITIES\\n\\t\" +\n          \"For further details.\",\n      );\n    }\n\n    this.skipValidations = has(config, \"skipValidations\")\n      ? (config.skipValidations as boolean) // casting assumes the end user passing the correct type\n      : DEFAULT_PARSER_CONFIG.skipValidations;\n  }\n}\n\napplyMixins(Parser, [\n  Recoverable,\n  LooksAhead,\n  TreeBuilder,\n  LexerAdapter,\n  RecognizerEngine,\n  RecognizerApi,\n  ErrorHandler,\n  ContentAssist,\n  GastRecorder,\n  PerformanceTracer,\n]);\n\nexport class CstParser extends Parser {\n  constructor(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfigInternal = DEFAULT_PARSER_CONFIG,\n  ) {\n    const configClone = clone(config);\n    configClone.outputCst = true;\n    super(tokenVocabulary, configClone);\n  }\n}\n\nexport class EmbeddedActionsParser extends Parser {\n  constructor(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfigInternal = DEFAULT_PARSER_CONFIG,\n  ) {\n    const configClone = clone(config);\n    configClone.outputCst = false;\n    super(tokenVocabulary, configClone);\n  }\n}\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport map from \"lodash-es/map.js\"\r\nimport filter from \"lodash-es/filter.js\"\r\nimport {\r\n    IProduction,\r\n    IProductionWithOccurrence,\r\n    TokenType,\r\n    Alternation,\r\n    NonTerminal,\r\n    Rule,\r\n    Option,\r\n    RepetitionMandatory,\r\n    Repetition,\r\n    Terminal,\r\n    Alternative,\r\n    RepetitionWithSeparator,\r\n    RepetitionMandatoryWithSeparator,\r\n    LookaheadProductionType\r\n} from \"chevrotain\"\r\n\r\nexport function buildATNKey(rule: Rule, type: LookaheadProductionType, occurrence: number): string {\r\n    return `${rule.name}_${type}_${occurrence}`;\r\n}\r\n\r\nexport interface ATN {\r\n    decisionMap: Record\r\n    states: ATNState[]\r\n    decisionStates: DecisionState[]\r\n    ruleToStartState: Map\r\n    ruleToStopState: Map\r\n}\r\n\r\nexport const ATN_INVALID_TYPE = 0\r\nexport const ATN_BASIC = 1\r\nexport const ATN_RULE_START = 2\r\nexport const ATN_PLUS_BLOCK_START = 4\r\nexport const ATN_STAR_BLOCK_START = 5\r\n// Currently unused as the ATN is not used for lexing\r\nexport const ATN_TOKEN_START = 6\r\nexport const ATN_RULE_STOP = 7\r\nexport const ATN_BLOCK_END = 8\r\nexport const ATN_STAR_LOOP_BACK = 9\r\nexport const ATN_STAR_LOOP_ENTRY = 10\r\nexport const ATN_PLUS_LOOP_BACK = 11\r\nexport const ATN_LOOP_END = 12\r\n\r\nexport type ATNState =\r\n    | BasicState\r\n    | BasicBlockStartState\r\n    | PlusBlockStartState\r\n    | PlusLoopbackState\r\n    | StarBlockStartState\r\n    | StarLoopbackState\r\n    | StarLoopEntryState\r\n    | BlockEndState\r\n    | RuleStartState\r\n    | RuleStopState\r\n    | LoopEndState\r\n\r\nexport interface ATNBaseState {\r\n    atn: ATN\r\n    production: IProductionWithOccurrence\r\n    stateNumber: number\r\n    rule: Rule\r\n    epsilonOnlyTransitions: boolean\r\n    transitions: Transition[]\r\n    nextTokenWithinRule: number[]\r\n}\r\n\r\nexport interface BasicState extends ATNBaseState {\r\n    type: typeof ATN_BASIC\r\n}\r\n\r\nexport interface BlockStartState extends DecisionState {\r\n    end: BlockEndState\r\n}\r\n\r\nexport interface BasicBlockStartState extends BlockStartState {\r\n    type: typeof ATN_BASIC\r\n}\r\n\r\nexport interface PlusBlockStartState extends BlockStartState {\r\n    loopback: PlusLoopbackState\r\n    type: typeof ATN_PLUS_BLOCK_START\r\n}\r\n\r\nexport interface PlusLoopbackState extends DecisionState {\r\n    type: typeof ATN_PLUS_LOOP_BACK\r\n}\r\n\r\nexport interface StarBlockStartState extends BlockStartState {\r\n    type: typeof ATN_STAR_BLOCK_START\r\n}\r\n\r\nexport interface StarLoopbackState extends ATNBaseState {\r\n    type: typeof ATN_STAR_LOOP_BACK\r\n}\r\n\r\nexport interface StarLoopEntryState extends DecisionState {\r\n    loopback: StarLoopbackState\r\n    type: typeof ATN_STAR_LOOP_ENTRY\r\n}\r\n\r\nexport interface BlockEndState extends ATNBaseState {\r\n    start: BlockStartState\r\n    type: typeof ATN_BLOCK_END\r\n}\r\n\r\nexport interface DecisionState extends ATNBaseState {\r\n    decision: number\r\n}\r\n\r\nexport interface LoopEndState extends ATNBaseState {\r\n    loopback: ATNState\r\n    type: typeof ATN_LOOP_END\r\n}\r\n\r\nexport interface RuleStartState extends ATNBaseState {\r\n    stop: RuleStopState\r\n    type: typeof ATN_RULE_START\r\n}\r\n\r\nexport interface RuleStopState extends ATNBaseState {\r\n    type: typeof ATN_RULE_STOP\r\n}\r\n\r\nexport interface Transition {\r\n    target: ATNState\r\n    isEpsilon(): boolean\r\n}\r\n\r\nexport abstract class AbstractTransition implements Transition {\r\n    target: ATNState\r\n\r\n    constructor(target: ATNState) {\r\n        this.target = target\r\n    }\r\n\r\n    isEpsilon() {\r\n        return false\r\n    }\r\n}\r\n\r\nexport class AtomTransition extends AbstractTransition {\r\n    tokenType: TokenType\r\n\r\n    constructor(target: ATNState, tokenType: TokenType) {\r\n        super(target)\r\n        this.tokenType = tokenType\r\n    }\r\n}\r\n\r\nexport class EpsilonTransition extends AbstractTransition {\r\n    constructor(target: ATNState) {\r\n        super(target)\r\n    }\r\n\r\n    isEpsilon() {\r\n        return true\r\n    }\r\n}\r\n\r\nexport class RuleTransition extends AbstractTransition {\r\n    rule: Rule\r\n    followState: ATNState\r\n\r\n    constructor(ruleStart: RuleStartState, rule: Rule, followState: ATNState) {\r\n        super(ruleStart)\r\n        this.rule = rule\r\n        this.followState = followState\r\n    }\r\n\r\n    isEpsilon() {\r\n        return true\r\n    }\r\n}\r\n\r\ninterface ATNHandle {\r\n    left: ATNState\r\n    right: ATNState\r\n}\r\n\r\nexport function createATN(rules: Rule[]): ATN {\r\n    const atn: ATN = {\r\n        decisionMap: {},\r\n        decisionStates: [],\r\n        ruleToStartState: new Map(),\r\n        ruleToStopState: new Map(),\r\n        states: []\r\n    }\r\n    createRuleStartAndStopATNStates(atn, rules)\r\n    const ruleLength = rules.length\r\n    for (let i = 0; i < ruleLength; i++) {\r\n        const rule = rules[i]\r\n        const ruleBlock = block(atn, rule, rule)\r\n        if (ruleBlock === undefined) {\r\n            continue\r\n        }\r\n        buildRuleHandle(atn, rule, ruleBlock)\r\n    }\r\n    return atn\r\n}\r\n\r\nfunction createRuleStartAndStopATNStates(atn: ATN, rules: Rule[]): void {\r\n    const ruleLength = rules.length\r\n    for (let i = 0; i < ruleLength; i++) {\r\n        const rule = rules[i]\r\n        const start = newState(atn, rule, undefined, {\r\n            type: ATN_RULE_START\r\n        })\r\n        const stop = newState(atn, rule, undefined, {\r\n            type: ATN_RULE_STOP\r\n        })\r\n        start.stop = stop\r\n        atn.ruleToStartState.set(rule, start)\r\n        atn.ruleToStopState.set(rule, stop)\r\n    }\r\n}\r\n\r\nfunction atom(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    production: IProduction\r\n): ATNHandle | undefined {\r\n    if (production instanceof Terminal) {\r\n        return tokenRef(atn, rule, production.terminalType, production)\r\n    } else if (production instanceof NonTerminal) {\r\n        return ruleRef(atn, rule, production)\r\n    } else if (production instanceof Alternation) {\r\n        return alternation(atn, rule, production)\r\n    } else if (production instanceof Option) {\r\n        return option(atn, rule, production)\r\n    } else if (production instanceof Repetition) {\r\n        return repetition(atn, rule, production)\r\n    } else if (production instanceof RepetitionWithSeparator) {\r\n        return repetitionSep(atn, rule, production)\r\n    } else if (production instanceof RepetitionMandatory) {\r\n        return repetitionMandatory(atn, rule, production)\r\n    } else if (production instanceof RepetitionMandatoryWithSeparator) {\r\n        return repetitionMandatorySep(atn, rule, production)\r\n    } else {\r\n        return block(atn, rule, production as Alternative)\r\n    }\r\n}\r\n\r\nfunction repetition(atn: ATN, rule: Rule, repetition: Repetition): ATNHandle {\r\n    const starState = newState(atn, rule, repetition, {\r\n        type: ATN_STAR_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, starState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        starState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    return star(atn, rule, repetition, handle)\r\n}\r\n\r\nfunction repetitionSep(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionWithSeparator\r\n): ATNHandle {\r\n    const starState = newState(atn, rule, repetition, {\r\n        type: ATN_STAR_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, starState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        starState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    const sep = tokenRef(atn, rule, repetition.separator, repetition)\r\n    return star(atn, rule, repetition, handle, sep)\r\n}\r\n\r\nfunction repetitionMandatory(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionMandatory\r\n): ATNHandle {\r\n    const plusState = newState(atn, rule, repetition, {\r\n        type: ATN_PLUS_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, plusState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        plusState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    return plus(atn, rule, repetition, handle)\r\n}\r\n\r\nfunction repetitionMandatorySep(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionMandatoryWithSeparator\r\n): ATNHandle {\r\n    const plusState = newState(atn, rule, repetition, {\r\n        type: ATN_PLUS_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, plusState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        plusState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    const sep = tokenRef(atn, rule, repetition.separator, repetition)\r\n    return plus(atn, rule, repetition, handle, sep)\r\n}\r\n\r\nfunction alternation(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    alternation: Alternation\r\n): ATNHandle {\r\n    const start = newState(atn, rule, alternation, {\r\n        type: ATN_BASIC\r\n    })\r\n    defineDecisionState(atn, start)\r\n    const alts = map(alternation.definition, (e) => atom(atn, rule, e))\r\n    const handle = makeAlts(atn, rule, start, alternation, ...alts)\r\n    return handle\r\n}\r\n\r\nfunction option(atn: ATN, rule: Rule, option: Option): ATNHandle {\r\n    const start = newState(atn, rule, option, {\r\n        type: ATN_BASIC\r\n    })\r\n    defineDecisionState(atn, start)\r\n    const handle = makeAlts(atn, rule, start, option, block(atn, rule, option))\r\n    return optional(atn, rule, option, handle)\r\n}\r\n\r\nfunction block(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    block: { definition: IProduction[] }\r\n): ATNHandle | undefined {\r\n    const handles = filter(\r\n        map(block.definition, (e) => atom(atn, rule, e)),\r\n        (e) => e !== undefined\r\n    ) as ATNHandle[]\r\n    if (handles.length === 1) {\r\n        return handles[0]\r\n    } else if (handles.length === 0) {\r\n        return undefined\r\n    } else {\r\n        return makeBlock(atn, handles)\r\n    }\r\n}\r\n\r\nfunction plus(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    plus: IProductionWithOccurrence,\r\n    handle: ATNHandle,\r\n    sep?: ATNHandle\r\n): ATNHandle {\r\n    const blkStart = handle.left as PlusBlockStartState\r\n    const blkEnd = handle.right\r\n\r\n    const loop = newState(atn, rule, plus, {\r\n        type: ATN_PLUS_LOOP_BACK\r\n    })\r\n    defineDecisionState(atn, loop)\r\n    const end = newState(atn, rule, plus, {\r\n        type: ATN_LOOP_END\r\n    })\r\n    blkStart.loopback = loop\r\n    end.loopback = loop\r\n    atn.decisionMap[buildATNKey(rule, sep ? 'RepetitionMandatoryWithSeparator' : 'RepetitionMandatory', plus.idx)] = loop;\r\n    epsilon(blkEnd, loop) // block can see loop back\r\n\r\n    // Depending on whether we have a separator we put the exit transition at index 1 or 0\r\n    // This influences the chosen option in the lookahead DFA\r\n    if (sep === undefined) {\r\n        epsilon(loop, blkStart) // loop back to start\r\n        epsilon(loop, end) // exit\r\n    } else {\r\n        epsilon(loop, end) // exit\r\n        // loop back to start with separator\r\n        epsilon(loop, sep.left)\r\n        epsilon(sep.right, blkStart)\r\n    }\r\n\r\n    return {\r\n        left: blkStart,\r\n        right: end\r\n    }\r\n}\r\n\r\nfunction star(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    star: IProductionWithOccurrence,\r\n    handle: ATNHandle,\r\n    sep?: ATNHandle\r\n): ATNHandle {\r\n    const start = handle.left\r\n    const end = handle.right\r\n\r\n    const entry = newState(atn, rule, star, {\r\n        type: ATN_STAR_LOOP_ENTRY\r\n    })\r\n    defineDecisionState(atn, entry)\r\n    const loopEnd = newState(atn, rule, star, {\r\n        type: ATN_LOOP_END\r\n    })\r\n    const loop = newState(atn, rule, star, {\r\n        type: ATN_STAR_LOOP_BACK\r\n    })\r\n    entry.loopback = loop\r\n    loopEnd.loopback = loop\r\n\r\n    epsilon(entry, start) // loop enter edge (alt 2)\r\n    epsilon(entry, loopEnd) // bypass loop edge (alt 1)\r\n    epsilon(end, loop) // block end hits loop back\r\n\r\n    if (sep !== undefined) {\r\n        epsilon(loop, loopEnd) // end loop\r\n        // loop back to start of handle using separator\r\n        epsilon(loop, sep.left)\r\n        epsilon(sep.right, start)\r\n    } else {\r\n        epsilon(loop, entry) // loop back to entry/exit decision\r\n    }\r\n\r\n    atn.decisionMap[buildATNKey(rule, sep ? 'RepetitionWithSeparator' : 'Repetition', star.idx)] = entry;\r\n    return {\r\n        left: entry,\r\n        right: loopEnd\r\n    }\r\n}\r\n\r\nfunction optional(atn: ATN, rule: Rule, optional: Option, handle: ATNHandle): ATNHandle {\r\n    const start = handle.left as DecisionState\r\n    const end = handle.right\r\n\r\n    epsilon(start, end)\r\n\r\n    atn.decisionMap[buildATNKey(rule, 'Option', optional.idx)] = start;\r\n    return handle\r\n}\r\n\r\nfunction defineDecisionState(atn: ATN, state: DecisionState): number {\r\n    atn.decisionStates.push(state)\r\n    state.decision = atn.decisionStates.length - 1\r\n    return state.decision\r\n}\r\n\r\nfunction makeAlts(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    start: BlockStartState,\r\n    production: IProductionWithOccurrence,\r\n    ...alts: (ATNHandle | undefined)[]\r\n): ATNHandle {\r\n    const end = newState(atn, rule, production, {\r\n        type: ATN_BLOCK_END,\r\n        start\r\n    })\r\n    start.end = end\r\n    for (const alt of alts) {\r\n        if (alt !== undefined) {\r\n            // hook alts up to decision block\r\n            epsilon(start, alt.left)\r\n            epsilon(alt.right, end)\r\n        } else {\r\n            epsilon(start, end)\r\n        }\r\n    }\r\n\r\n    const handle: ATNHandle = {\r\n        left: start as ATNState,\r\n        right: end\r\n    }\r\n    atn.decisionMap[buildATNKey(rule, getProdType(production), production.idx)] = start\r\n    return handle\r\n}\r\n\r\nfunction getProdType(production: IProduction): LookaheadProductionType {\r\n    if (production instanceof Alternation) {\r\n        return 'Alternation';\r\n    } else if (production instanceof Option) {\r\n        return 'Option';\r\n    } else if (production instanceof Repetition) {\r\n        return 'Repetition';\r\n    } else if (production instanceof RepetitionWithSeparator) {\r\n        return 'RepetitionWithSeparator';\r\n    } else if (production instanceof RepetitionMandatory) {\r\n        return 'RepetitionMandatory';\r\n    } else if (production instanceof RepetitionMandatoryWithSeparator) {\r\n        return 'RepetitionMandatoryWithSeparator';\r\n    } else {\r\n        throw new Error('Invalid production type encountered');\r\n    }\r\n}\r\n\r\nfunction makeBlock(atn: ATN, alts: ATNHandle[]): ATNHandle {\r\n    const altsLength = alts.length\r\n    for (let i = 0; i < altsLength - 1; i++) {\r\n        const handle = alts[i]\r\n        let transition: Transition | undefined\r\n        if (handle.left.transitions.length === 1) {\r\n            transition = handle.left.transitions[0]\r\n        }\r\n        const isRuleTransition = transition instanceof RuleTransition\r\n        const ruleTransition = transition as RuleTransition\r\n        const next = alts[i + 1].left\r\n        if (\r\n            handle.left.type === ATN_BASIC &&\r\n            handle.right.type === ATN_BASIC &&\r\n            transition !== undefined &&\r\n            ((isRuleTransition && ruleTransition.followState === handle.right) ||\r\n                transition.target === handle.right)\r\n        ) {\r\n            // we can avoid epsilon edge to next element\r\n            if (isRuleTransition) {\r\n                ruleTransition.followState = next\r\n            } else {\r\n                transition.target = next\r\n            }\r\n            removeState(atn, handle.right) // we skipped over this state\r\n        } else {\r\n            // need epsilon if previous block's right end node is complex\r\n            epsilon(handle.right, next)\r\n        }\r\n    }\r\n\r\n    const first = alts[0]\r\n    const last = alts[altsLength - 1]\r\n    return {\r\n        left: first.left,\r\n        right: last.right\r\n    }\r\n}\r\n\r\nfunction tokenRef(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    tokenType: TokenType,\r\n    production: IProductionWithOccurrence\r\n): ATNHandle {\r\n    const left = newState(atn, rule, production, {\r\n        type: ATN_BASIC\r\n    })\r\n    const right = newState(atn, rule, production, {\r\n        type: ATN_BASIC\r\n    })\r\n    addTransition(left, new AtomTransition(right, tokenType))\r\n    return {\r\n        left,\r\n        right\r\n    }\r\n}\r\n\r\nfunction ruleRef(\r\n    atn: ATN,\r\n    currentRule: Rule,\r\n    nonTerminal: NonTerminal\r\n): ATNHandle {\r\n    const rule = nonTerminal.referencedRule\r\n    const start = atn.ruleToStartState.get(rule)!\r\n    const left = newState(atn, currentRule, nonTerminal, {\r\n        type: ATN_BASIC\r\n    })\r\n    const right = newState(atn, currentRule, nonTerminal, {\r\n        type: ATN_BASIC\r\n    })\r\n\r\n    const call = new RuleTransition(start, rule, right)\r\n    addTransition(left, call)\r\n\r\n    return {\r\n        left,\r\n        right\r\n    }\r\n}\r\n\r\nfunction buildRuleHandle(atn: ATN, rule: Rule, block: ATNHandle): ATNHandle {\r\n    const start = atn.ruleToStartState.get(rule)!\r\n    epsilon(start, block.left)\r\n    const stop = atn.ruleToStopState.get(rule)!\r\n    epsilon(block.right, stop)\r\n    const handle: ATNHandle = {\r\n        left: start,\r\n        right: stop\r\n    }\r\n    return handle\r\n}\r\n\r\nfunction epsilon(a: ATNBaseState, b: ATNBaseState): void {\r\n    const transition = new EpsilonTransition(b as ATNState)\r\n    addTransition(a, transition)\r\n}\r\n\r\nfunction newState(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    production: IProductionWithOccurrence | undefined,\r\n    partial: Partial\r\n): T {\r\n    const t: T = {\r\n        atn,\r\n        production,\r\n        epsilonOnlyTransitions: false,\r\n        rule,\r\n        transitions: [],\r\n        nextTokenWithinRule: [],\r\n        stateNumber: atn.states.length,\r\n        ...partial\r\n    } as unknown as T\r\n    atn.states.push(t)\r\n    return t\r\n}\r\n\r\nfunction addTransition(state: ATNBaseState, transition: Transition) {\r\n    // A single ATN state can only contain epsilon transitions or non-epsilon transitions\r\n    // Because they are never mixed, only setting the property for the first transition is fine\r\n    if (state.transitions.length === 0) {\r\n        state.epsilonOnlyTransitions = transition.isEpsilon()\r\n    }\r\n    state.transitions.push(transition)\r\n}\r\n\r\nfunction removeState(atn: ATN, state: ATNState): void {\r\n    atn.states.splice(atn.states.indexOf(state), 1)\r\n}\r\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport map from \"lodash-es/map.js\"\r\nimport { ATNState, DecisionState } from \"./atn.js\"\r\n\r\nexport interface DFA {\r\n  start?: DFAState\r\n  states: Record\r\n  decision: number\r\n  atnStartState: DecisionState\r\n}\r\n\r\nexport interface DFAState {\r\n  configs: ATNConfigSet\r\n  edges: Record\r\n  isAcceptState: boolean\r\n  prediction: number\r\n}\r\n\r\nexport const DFA_ERROR = {} as DFAState\r\n\r\nexport interface ATNConfig {\r\n  state: ATNState\r\n  alt: number\r\n  stack: ATNState[]\r\n}\r\n\r\nexport class ATNConfigSet {\r\n  private map: Record = {}\r\n  private configs: ATNConfig[] = []\r\n\r\n  uniqueAlt: number | undefined\r\n\r\n  get size(): number {\r\n    return this.configs.length\r\n  }\r\n\r\n  finalize(): void {\r\n    // Empties the map to free up memory\r\n    this.map = {}\r\n  }\r\n\r\n  add(config: ATNConfig): void {\r\n    const key = getATNConfigKey(config)\r\n    // Only add configs which don't exist in our map already\r\n    // While this does not influence the actual algorithm, adding them anyway would massively increase memory consumption\r\n    if (!(key in this.map)) {\r\n      this.map[key] = this.configs.length\r\n      this.configs.push(config)\r\n    }\r\n  }\r\n\r\n  get elements(): readonly ATNConfig[] {\r\n    return this.configs\r\n  }\r\n\r\n  get alts(): number[] {\r\n    return map(this.configs, (e) => e.alt)\r\n  }\r\n\r\n  get key(): string {\r\n    let value = \"\"\r\n    for (const k in this.map) {\r\n      value += k + \":\"\r\n    }\r\n    return value\r\n  }\r\n}\r\n\r\nexport function getATNConfigKey(config: ATNConfig, alt = true) {\r\n  return `${alt ? `a${config.alt}` : \"\"}s${\r\n    config.state.stateNumber\r\n  }:${config.stack.map((e) => e.stateNumber.toString()).join(\"_\")}`\r\n}\r\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport {\r\n    IToken,\r\n    TokenType,\r\n    tokenMatcher,\r\n    tokenLabel,\r\n    Rule,\r\n    IProductionWithOccurrence,\r\n    NonTerminal,\r\n    Alternation,\r\n    Option,\r\n    RepetitionMandatory,\r\n    RepetitionMandatoryWithSeparator,\r\n    RepetitionWithSeparator,\r\n    Repetition,\r\n    Terminal,\r\n    BaseParser,\r\n    LLkLookaheadStrategy,\r\n    ILookaheadValidationError,\r\n    IOrAlt,\r\n    getLookaheadPaths,\r\n    OptionalProductionType\r\n} from \"chevrotain\";\r\nimport {\r\n    ATN,\r\n    ATNState,\r\n    ATN_RULE_STOP,\r\n    AtomTransition,\r\n    buildATNKey,\r\n    createATN,\r\n    DecisionState,\r\n    EpsilonTransition,\r\n    RuleTransition,\r\n    Transition\r\n} from \"./atn.js\";\r\nimport {\r\n    ATNConfig,\r\n    ATNConfigSet,\r\n    DFA,\r\n    DFAState,\r\n    DFA_ERROR,\r\n    getATNConfigKey\r\n} from \"./dfa.js\";\r\nimport min from \"lodash-es/min.js\";\r\nimport flatMap from \"lodash-es/flatMap.js\";\r\nimport uniqBy from \"lodash-es/uniqBy.js\";\r\nimport map from \"lodash-es/map.js\";\r\nimport flatten from \"lodash-es/flatten.js\";\r\nimport forEach from \"lodash-es/forEach.js\";\r\nimport isEmpty from \"lodash-es/isEmpty.js\";\r\nimport reduce from \"lodash-es/reduce.js\";\r\n\r\ntype DFACache = (predicateSet: PredicateSet) => DFA\r\n\r\nexport type AmbiguityReport = (message: string) => void;\r\n\r\nfunction createDFACache(startState: DecisionState, decision: number): DFACache {\r\n    const map: Record = {}\r\n    return (predicateSet) => {\r\n        const key = predicateSet.toString()\r\n        let existing = map[key]\r\n        if (existing !== undefined) {\r\n            return existing\r\n        } else {\r\n            existing = {\r\n                atnStartState: startState,\r\n                decision,\r\n                states: {}\r\n            }\r\n            map[key] = existing\r\n            return existing\r\n        }\r\n    }\r\n}\r\n\r\nclass PredicateSet {\r\n    private predicates: boolean[] = []\r\n\r\n    is(index: number): boolean {\r\n        return index >= this.predicates.length || this.predicates[index]\r\n    }\r\n\r\n    set(index: number, value: boolean) {\r\n        this.predicates[index] = value\r\n    }\r\n\r\n    toString(): string {\r\n        let value = \"\"\r\n        const size = this.predicates.length\r\n        for (let i = 0; i < size; i++) {\r\n            value += this.predicates[i] === true ? \"1\" : \"0\"\r\n        }\r\n        return value\r\n    }\r\n}\r\n\r\ninterface AdaptivePredictError {\r\n    tokenPath: IToken[]\r\n    possibleTokenTypes: TokenType[]\r\n    actualToken: IToken\r\n}\r\n\r\nconst EMPTY_PREDICATES = new PredicateSet()\r\n\r\nexport interface LLStarLookaheadOptions {\r\n    logging?: AmbiguityReport\r\n}\r\n\r\nexport class LLStarLookaheadStrategy extends LLkLookaheadStrategy {\r\n\r\n    private atn: ATN;\r\n    private dfas: DFACache[];\r\n    private logging: AmbiguityReport;\r\n\r\n    constructor(options?: LLStarLookaheadOptions) {\r\n        super();\r\n        this.logging = options?.logging ?? ((message) => console.log(message));\r\n    }\r\n\r\n    override initialize(options: { rules: Rule[] }): void {\r\n        this.atn = createATN(options.rules);\r\n        this.dfas = initATNSimulator(this.atn);\r\n    }\r\n\r\n    override validateAmbiguousAlternationAlternatives(): ILookaheadValidationError[] {\r\n        return [];\r\n    }\r\n\r\n    override validateEmptyOrAlternatives(): ILookaheadValidationError[] {\r\n        return [];\r\n    }\r\n\r\n    override buildLookaheadForAlternation(options: {\r\n        prodOccurrence: number;\r\n        rule: Rule;\r\n        maxLookahead: number;\r\n        hasPredicates: boolean;\r\n        dynamicTokensEnabled: boolean\r\n    }): (this: BaseParser, orAlts?: IOrAlt[] | undefined) => number | undefined {\r\n        const { prodOccurrence, rule, hasPredicates, dynamicTokensEnabled } = options;\r\n        const dfas = this.dfas;\r\n        const logging = this.logging;\r\n        const key = buildATNKey(rule, 'Alternation', prodOccurrence);\r\n        const decisionState = this.atn.decisionMap[key];\r\n        const decisionIndex = decisionState.decision;\r\n        const partialAlts: (TokenType | undefined)[][] = map(\r\n            getLookaheadPaths({\r\n                maxLookahead: 1,\r\n                occurrence: prodOccurrence,\r\n                prodType: \"Alternation\",\r\n                rule: rule\r\n            }),\r\n            (currAlt) => map(currAlt, (path) => path[0])\r\n        )\r\n\r\n        if (isLL1Sequence(partialAlts, false) && !dynamicTokensEnabled) {\r\n            const choiceToAlt = reduce(\r\n                partialAlts,\r\n                (result, currAlt, idx) => {\r\n                    forEach(currAlt, (currTokType) => {\r\n                        if (currTokType) {\r\n                            result[currTokType.tokenTypeIdx!] = idx\r\n                            forEach(currTokType.categoryMatches!, (currExtendingType) => {\r\n                                result[currExtendingType] = idx\r\n                            })\r\n                        }\r\n                    })\r\n                    return result\r\n                },\r\n                {} as Record\r\n            )\r\n\r\n            if (hasPredicates) {\r\n                return function (this: BaseParser, orAlts) {\r\n                    const nextToken = this.LA(1)\r\n                    const prediction: number | undefined = choiceToAlt[nextToken.tokenTypeIdx]\r\n                    if (orAlts !== undefined && prediction !== undefined) {\r\n                        const gate = orAlts[prediction]?.GATE\r\n                        if (gate !== undefined && gate.call(this) === false) {\r\n                            return undefined;\r\n                        }\r\n                    }\r\n                    return prediction\r\n                }\r\n            } else {\r\n                return function (this: BaseParser): number | undefined {\r\n                    const nextToken = this.LA(1)\r\n                    return choiceToAlt[nextToken.tokenTypeIdx];\r\n                }\r\n            }\r\n        } else if (hasPredicates) {\r\n            return function (this: BaseParser, orAlts) {\r\n                const predicates = new PredicateSet()\r\n                const length = orAlts === undefined ? 0 : orAlts.length\r\n                for (let i = 0; i < length; i++) {\r\n                    const gate = orAlts?.[i].GATE\r\n                    predicates.set(i, gate === undefined || gate.call(this))\r\n                }\r\n                const result = adaptivePredict.call(this, dfas, decisionIndex, predicates, logging);\r\n                return typeof result === 'number' ? result : undefined;\r\n            }\r\n        } else {\r\n            return function (this: BaseParser) {\r\n                const result = adaptivePredict.call(this, dfas, decisionIndex, EMPTY_PREDICATES, logging);\r\n                return typeof result === 'number' ? result : undefined;\r\n            }\r\n        }\r\n    }\r\n\r\n    override buildLookaheadForOptional(options: {\r\n        prodOccurrence: number;\r\n        prodType: OptionalProductionType;\r\n        rule: Rule;\r\n        maxLookahead: number;\r\n        dynamicTokensEnabled: boolean\r\n    }): (this: BaseParser) => boolean {\r\n        const { prodOccurrence, rule, prodType, dynamicTokensEnabled } = options;\r\n        const dfas = this.dfas;\r\n        const logging = this.logging;\r\n        const key = buildATNKey(rule, prodType, prodOccurrence);\r\n        const decisionState = this.atn.decisionMap[key];\r\n        const decisionIndex = decisionState.decision;\r\n        const alts = map(\r\n            getLookaheadPaths({\r\n                maxLookahead: 1,\r\n                occurrence: prodOccurrence,\r\n                prodType,\r\n                rule\r\n            }),\r\n            (e) => {\r\n              return map(e, (g) => g[0])\r\n            }\r\n          )\r\n        \r\n          if (isLL1Sequence(alts) && alts[0][0] && !dynamicTokensEnabled) {\r\n            const alt = alts[0]\r\n            const singleTokensTypes = flatten(alt)\r\n        \r\n            if (\r\n              singleTokensTypes.length === 1 &&\r\n              isEmpty(singleTokensTypes[0].categoryMatches)\r\n            ) {\r\n              const expectedTokenType = singleTokensTypes[0]\r\n              const expectedTokenUniqueKey = expectedTokenType.tokenTypeIdx\r\n        \r\n              return function (this: BaseParser): boolean {\r\n                return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey\r\n              }\r\n            } else {\r\n              const choiceToAlt = reduce(\r\n                singleTokensTypes,\r\n                (result, currTokType) => {\r\n                  if (currTokType !== undefined) {\r\n                    result[currTokType.tokenTypeIdx!] = true\r\n                    forEach(currTokType.categoryMatches, (currExtendingType) => {\r\n                      result[currExtendingType] = true\r\n                    })\r\n                  }\r\n                  return result\r\n                },\r\n                {} as Record\r\n              )\r\n        \r\n              return function (this: BaseParser): boolean {\r\n                const nextToken = this.LA(1)\r\n                return choiceToAlt[nextToken.tokenTypeIdx] === true\r\n              }\r\n            }\r\n          }\r\n          return function (this: BaseParser) {\r\n            const result = adaptivePredict.call(this, dfas, decisionIndex, EMPTY_PREDICATES, logging)\r\n              return typeof result === \"object\" ? false : result === 0;\r\n          }\r\n    }\r\n\r\n}\r\n\r\nfunction isLL1Sequence(sequences: (TokenType | undefined)[][], allowEmpty = true): boolean {\r\n    const fullSet = new Set()\r\n\r\n    for (const alt of sequences) {\r\n        const altSet = new Set()\r\n        for (const tokType of alt) {\r\n            if (tokType === undefined) {\r\n                if (allowEmpty) {\r\n                    // Epsilon production encountered\r\n                    break\r\n                } else {\r\n                    return false;\r\n                }\r\n            }\r\n            const indices = [tokType.tokenTypeIdx!].concat(tokType.categoryMatches!)\r\n            for (const index of indices) {\r\n                if (fullSet.has(index)) {\r\n                    if (!altSet.has(index)) {\r\n                        return false\r\n                    }\r\n                } else {\r\n                    fullSet.add(index)\r\n                    altSet.add(index)\r\n                }\r\n            }\r\n        }\r\n    }\r\n    return true\r\n}\r\n\r\nfunction initATNSimulator(atn: ATN): DFACache[] {\r\n    const decisionLength = atn.decisionStates.length\r\n    const decisionToDFA: DFACache[] = Array(decisionLength)\r\n    for (let i = 0; i < decisionLength; i++) {\r\n        decisionToDFA[i] = createDFACache(atn.decisionStates[i], i)\r\n    }\r\n    return decisionToDFA;\r\n}\r\n\r\nfunction adaptivePredict(\r\n    this: BaseParser,\r\n    dfaCaches: DFACache[],\r\n    decision: number,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): number | AdaptivePredictError {\r\n    const dfa = dfaCaches[decision](predicateSet)\r\n    let start = dfa.start\r\n    if (start === undefined) {\r\n        const closure = computeStartState(dfa.atnStartState as ATNState)\r\n        start = addDFAState(dfa, newDFAState(closure))\r\n        dfa.start = start\r\n    }\r\n\r\n    const alt = performLookahead.apply(this, [dfa, start, predicateSet, logging])\r\n    return alt\r\n}\r\n\r\nfunction performLookahead(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    s0: DFAState,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): number | AdaptivePredictError {\r\n    let previousD = s0\r\n\r\n    let i = 1\r\n    const path: IToken[] = []\r\n    let t = this.LA(i++)\r\n\r\n    while (true) {\r\n        let d = getExistingTargetState(previousD, t)\r\n        if (d === undefined) {\r\n            d = computeLookaheadTarget.apply(this, [dfa, previousD, t, i, predicateSet, logging])\r\n        }\r\n\r\n        if (d === DFA_ERROR) {\r\n            return buildAdaptivePredictError(path, previousD, t)\r\n        }\r\n\r\n        if (d.isAcceptState === true) {\r\n            return d.prediction\r\n        }\r\n\r\n        previousD = d\r\n        path.push(t)\r\n        t = this.LA(i++)\r\n    }\r\n}\r\n\r\nfunction computeLookaheadTarget(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    previousD: DFAState,\r\n    token: IToken,\r\n    lookahead: number,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): DFAState {\r\n    const reach = computeReachSet(previousD.configs, token, predicateSet)\r\n    if (reach.size === 0) {\r\n        addDFAEdge(dfa, previousD, token, DFA_ERROR)\r\n        return DFA_ERROR\r\n    }\r\n\r\n    let newState = newDFAState(reach)\r\n    const predictedAlt = getUniqueAlt(reach, predicateSet)\r\n\r\n    if (predictedAlt !== undefined) {\r\n        newState.isAcceptState = true\r\n        newState.prediction = predictedAlt\r\n        newState.configs.uniqueAlt = predictedAlt\r\n    } else if (hasConflictTerminatingPrediction(reach)) {\r\n        const prediction = min(reach.alts)!\r\n        newState.isAcceptState = true\r\n        newState.prediction = prediction\r\n        newState.configs.uniqueAlt = prediction\r\n        reportLookaheadAmbiguity.apply(this, [dfa, lookahead, reach.alts, logging])\r\n    }\r\n\r\n    newState = addDFAEdge(dfa, previousD, token, newState)\r\n    return newState\r\n}\r\n\r\nfunction reportLookaheadAmbiguity(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    lookahead: number,\r\n    ambiguityIndices: number[],\r\n    logging: AmbiguityReport\r\n) {\r\n    const prefixPath: TokenType[] = []\r\n    for (let i = 1; i <= lookahead; i++) {\r\n        prefixPath.push(this.LA(i).tokenType)\r\n    }\r\n    const atnState = dfa.atnStartState\r\n    const topLevelRule = atnState.rule\r\n    const production = atnState.production\r\n    const message = buildAmbiguityError({\r\n        topLevelRule,\r\n        ambiguityIndices,\r\n        production,\r\n        prefixPath\r\n    })\r\n    logging(message)\r\n}\r\n\r\nfunction buildAmbiguityError(options: {\r\n    topLevelRule: Rule\r\n    prefixPath: TokenType[]\r\n    ambiguityIndices: number[]\r\n    production: IProductionWithOccurrence\r\n}): string {\r\n    const pathMsg = map(options.prefixPath, (currtok) =>\r\n        tokenLabel(currtok)\r\n    ).join(\", \")\r\n    const occurrence =\r\n        options.production.idx === 0 ? \"\" : options.production.idx\r\n    let currMessage =\r\n        `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(\r\n            \", \"\r\n        )}> in <${getProductionDslName(options.production)}${occurrence}>` +\r\n        ` inside <${options.topLevelRule.name}> Rule,\\n` +\r\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n`\r\n\r\n    currMessage =\r\n        currMessage +\r\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\\n` +\r\n        `For Further details.`\r\n    return currMessage\r\n}\r\n\r\nfunction getProductionDslName(prod: IProductionWithOccurrence): string {\r\n    if (prod instanceof NonTerminal) {\r\n        return \"SUBRULE\"\r\n    } else if (prod instanceof Option) {\r\n        return \"OPTION\"\r\n    } else if (prod instanceof Alternation) {\r\n        return \"OR\"\r\n    } else if (prod instanceof RepetitionMandatory) {\r\n        return \"AT_LEAST_ONE\"\r\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\r\n        return \"AT_LEAST_ONE_SEP\"\r\n    } else if (prod instanceof RepetitionWithSeparator) {\r\n        return \"MANY_SEP\"\r\n    } else if (prod instanceof Repetition) {\r\n        return \"MANY\"\r\n    } else if (prod instanceof Terminal) {\r\n        return \"CONSUME\"\r\n    } else {\r\n        throw Error(\"non exhaustive match\")\r\n    }\r\n}\r\n\r\nfunction buildAdaptivePredictError(\r\n    path: IToken[],\r\n    previous: DFAState,\r\n    current: IToken\r\n): AdaptivePredictError {\r\n    const nextTransitions = flatMap(\r\n        previous.configs.elements,\r\n        (e) => e.state.transitions\r\n    )\r\n    const nextTokenTypes = uniqBy(\r\n        nextTransitions\r\n            .filter((e): e is AtomTransition => e instanceof AtomTransition)\r\n            .map((e) => e.tokenType),\r\n        (e) => e.tokenTypeIdx\r\n    )\r\n    return {\r\n        actualToken: current,\r\n        possibleTokenTypes: nextTokenTypes,\r\n        tokenPath: path\r\n    }\r\n}\r\n\r\nfunction getExistingTargetState(\r\n    state: DFAState,\r\n    token: IToken\r\n): DFAState | undefined {\r\n    return state.edges[token.tokenTypeIdx]\r\n}\r\n\r\nfunction computeReachSet(\r\n    configs: ATNConfigSet,\r\n    token: IToken,\r\n    predicateSet: PredicateSet\r\n): ATNConfigSet {\r\n    const intermediate = new ATNConfigSet()\r\n    const skippedStopStates: ATNConfig[] = []\r\n\r\n    for (const c of configs.elements) {\r\n        if (predicateSet.is(c.alt) === false) {\r\n            continue\r\n        }\r\n        if (c.state.type === ATN_RULE_STOP) {\r\n            skippedStopStates.push(c)\r\n            continue\r\n        }\r\n        const transitionLength = c.state.transitions.length\r\n        for (let i = 0; i < transitionLength; i++) {\r\n            const transition = c.state.transitions[i]\r\n            const target = getReachableTarget(transition, token)\r\n            if (target !== undefined) {\r\n                intermediate.add({\r\n                    state: target,\r\n                    alt: c.alt,\r\n                    stack: c.stack\r\n                })\r\n            }\r\n        }\r\n    }\r\n\r\n    let reach: ATNConfigSet | undefined\r\n\r\n    if (skippedStopStates.length === 0 && intermediate.size === 1) {\r\n        reach = intermediate\r\n    }\r\n\r\n    if (reach === undefined) {\r\n        reach = new ATNConfigSet()\r\n        for (const c of intermediate.elements) {\r\n            closure(c, reach)\r\n        }\r\n    }\r\n\r\n    if (skippedStopStates.length > 0 && !hasConfigInRuleStopState(reach)) {\r\n        for (const c of skippedStopStates) {\r\n            reach.add(c)\r\n        }\r\n    }\r\n\r\n    return reach\r\n}\r\n\r\nfunction getReachableTarget(\r\n    transition: Transition,\r\n    token: IToken\r\n): ATNState | undefined {\r\n    if (\r\n        transition instanceof AtomTransition &&\r\n        tokenMatcher(token, transition.tokenType)\r\n    ) {\r\n        return transition.target\r\n    }\r\n    return undefined\r\n}\r\n\r\nfunction getUniqueAlt(\r\n    configs: ATNConfigSet,\r\n    predicateSet: PredicateSet\r\n): number | undefined {\r\n    let alt: number | undefined\r\n    for (const c of configs.elements) {\r\n        if (predicateSet.is(c.alt) === true) {\r\n            if (alt === undefined) {\r\n                alt = c.alt\r\n            } else if (alt !== c.alt) {\r\n                return undefined\r\n            }\r\n        }\r\n    }\r\n    return alt\r\n}\r\n\r\nfunction newDFAState(closure: ATNConfigSet): DFAState {\r\n    return {\r\n        configs: closure,\r\n        edges: {},\r\n        isAcceptState: false,\r\n        prediction: -1\r\n    }\r\n}\r\n\r\nfunction addDFAEdge(\r\n    dfa: DFA,\r\n    from: DFAState,\r\n    token: IToken,\r\n    to: DFAState\r\n): DFAState {\r\n    to = addDFAState(dfa, to)\r\n    from.edges[token.tokenTypeIdx] = to\r\n    return to\r\n}\r\n\r\nfunction addDFAState(dfa: DFA, state: DFAState): DFAState {\r\n    if (state === DFA_ERROR) {\r\n        return state\r\n    }\r\n    // Repetitions have the same config set\r\n    // Therefore, storing the key of the config in a map allows us to create a loop in our DFA\r\n    const mapKey = state.configs.key\r\n    const existing = dfa.states[mapKey]\r\n    if (existing !== undefined) {\r\n        return existing\r\n    }\r\n    state.configs.finalize()\r\n    dfa.states[mapKey] = state\r\n    return state\r\n}\r\n\r\nfunction computeStartState(atnState: ATNState): ATNConfigSet {\r\n    const configs = new ATNConfigSet()\r\n\r\n    const numberOfTransitions = atnState.transitions.length\r\n    for (let i = 0; i < numberOfTransitions; i++) {\r\n        const target = atnState.transitions[i].target\r\n        const config: ATNConfig = {\r\n            state: target,\r\n            alt: i,\r\n            stack: []\r\n        }\r\n        closure(config, configs)\r\n    }\r\n\r\n    return configs\r\n}\r\n\r\nfunction closure(config: ATNConfig, configs: ATNConfigSet): void {\r\n    const p = config.state\r\n\r\n    if (p.type === ATN_RULE_STOP) {\r\n        if (config.stack.length > 0) {\r\n            const atnStack = [...config.stack]\r\n            const followState = atnStack.pop()!\r\n            const followConfig: ATNConfig = {\r\n                state: followState,\r\n                alt: config.alt,\r\n                stack: atnStack\r\n            }\r\n            closure(followConfig, configs)\r\n        } else {\r\n            // Dipping into outer context, simply add the config\r\n            // This will stop computation once every config is at the rule stop state\r\n            configs.add(config)\r\n        }\r\n        return\r\n    }\r\n\r\n    if (!p.epsilonOnlyTransitions) {\r\n        configs.add(config)\r\n    }\r\n\r\n    const transitionLength = p.transitions.length\r\n    for (let i = 0; i < transitionLength; i++) {\r\n        const transition = p.transitions[i]\r\n        const c = getEpsilonTarget(config, transition)\r\n\r\n        if (c !== undefined) {\r\n            closure(c, configs)\r\n        }\r\n    }\r\n}\r\n\r\nfunction getEpsilonTarget(\r\n    config: ATNConfig,\r\n    transition: Transition\r\n): ATNConfig | undefined {\r\n    if (transition instanceof EpsilonTransition) {\r\n        return {\r\n            state: transition.target,\r\n            alt: config.alt,\r\n            stack: config.stack\r\n        }\r\n    } else if (transition instanceof RuleTransition) {\r\n        const stack = [...config.stack, transition.followState]\r\n        return {\r\n            state: transition.target,\r\n            alt: config.alt,\r\n            stack\r\n        }\r\n    }\r\n    return undefined\r\n}\r\n\r\nfunction hasConfigInRuleStopState(configs: ATNConfigSet): boolean {\r\n    for (const c of configs.elements) {\r\n        if (c.state.type === ATN_RULE_STOP) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n\r\nfunction allConfigsInRuleStopStates(configs: ATNConfigSet): boolean {\r\n    for (const c of configs.elements) {\r\n        if (c.state.type !== ATN_RULE_STOP) {\r\n            return false\r\n        }\r\n    }\r\n    return true\r\n}\r\n\r\nfunction hasConflictTerminatingPrediction(configs: ATNConfigSet): boolean {\r\n    if (allConfigsInRuleStopStates(configs)) {\r\n        return true\r\n    }\r\n    const altSets = getConflictingAltSets(configs.elements)\r\n    const heuristic =\r\n        hasConflictingAltSet(altSets) && !hasStateAssociatedWithOneAlt(altSets)\r\n    return heuristic\r\n}\r\n\r\nfunction getConflictingAltSets(\r\n    configs: readonly ATNConfig[]\r\n): Map> {\r\n    const configToAlts = new Map>()\r\n    for (const c of configs) {\r\n        const key = getATNConfigKey(c, false)\r\n        let alts = configToAlts.get(key)\r\n        if (alts === undefined) {\r\n            alts = {}\r\n            configToAlts.set(key, alts)\r\n        }\r\n        alts[c.alt] = true\r\n    }\r\n    return configToAlts\r\n}\r\n\r\nfunction hasConflictingAltSet(\r\n    altSets: Map>\r\n): boolean {\r\n    for (const value of Array.from(altSets.values())) {\r\n        if (Object.keys(value).length > 1) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n\r\nfunction hasStateAssociatedWithOneAlt(\r\n    altSets: Map>\r\n): boolean {\r\n    for (const value of Array.from(altSets.values())) {\r\n        if (Object.keys(value).length === 1) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n", "/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\n'use strict';\nexport var DocumentUri;\n(function (DocumentUri) {\n    function is(value) {\n        return typeof value === 'string';\n    }\n    DocumentUri.is = is;\n})(DocumentUri || (DocumentUri = {}));\nexport var URI;\n(function (URI) {\n    function is(value) {\n        return typeof value === 'string';\n    }\n    URI.is = is;\n})(URI || (URI = {}));\nexport var integer;\n(function (integer) {\n    integer.MIN_VALUE = -2147483648;\n    integer.MAX_VALUE = 2147483647;\n    function is(value) {\n        return typeof value === 'number' && integer.MIN_VALUE <= value && value <= integer.MAX_VALUE;\n    }\n    integer.is = is;\n})(integer || (integer = {}));\nexport var uinteger;\n(function (uinteger) {\n    uinteger.MIN_VALUE = 0;\n    uinteger.MAX_VALUE = 2147483647;\n    function is(value) {\n        return typeof value === 'number' && uinteger.MIN_VALUE <= value && value <= uinteger.MAX_VALUE;\n    }\n    uinteger.is = is;\n})(uinteger || (uinteger = {}));\n/**\n * The Position namespace provides helper functions to work with\n * {@link Position} literals.\n */\nexport var Position;\n(function (Position) {\n    /**\n     * Creates a new Position literal from the given line and character.\n     * @param line The position's line.\n     * @param character The position's character.\n     */\n    function create(line, character) {\n        if (line === Number.MAX_VALUE) {\n            line = uinteger.MAX_VALUE;\n        }\n        if (character === Number.MAX_VALUE) {\n            character = uinteger.MAX_VALUE;\n        }\n        return { line, character };\n    }\n    Position.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Position} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Is.uinteger(candidate.line) && Is.uinteger(candidate.character);\n    }\n    Position.is = is;\n})(Position || (Position = {}));\n/**\n * The Range namespace provides helper functions to work with\n * {@link Range} literals.\n */\nexport var Range;\n(function (Range) {\n    function create(one, two, three, four) {\n        if (Is.uinteger(one) && Is.uinteger(two) && Is.uinteger(three) && Is.uinteger(four)) {\n            return { start: Position.create(one, two), end: Position.create(three, four) };\n        }\n        else if (Position.is(one) && Position.is(two)) {\n            return { start: one, end: two };\n        }\n        else {\n            throw new Error(`Range#create called with invalid arguments[${one}, ${two}, ${three}, ${four}]`);\n        }\n    }\n    Range.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Range} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Position.is(candidate.start) && Position.is(candidate.end);\n    }\n    Range.is = is;\n})(Range || (Range = {}));\n/**\n * The Location namespace provides helper functions to work with\n * {@link Location} literals.\n */\nexport var Location;\n(function (Location) {\n    /**\n     * Creates a Location literal.\n     * @param uri The location's uri.\n     * @param range The location's range.\n     */\n    function create(uri, range) {\n        return { uri, range };\n    }\n    Location.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Location} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && (Is.string(candidate.uri) || Is.undefined(candidate.uri));\n    }\n    Location.is = is;\n})(Location || (Location = {}));\n/**\n * The LocationLink namespace provides helper functions to work with\n * {@link LocationLink} literals.\n */\nexport var LocationLink;\n(function (LocationLink) {\n    /**\n     * Creates a LocationLink literal.\n     * @param targetUri The definition's uri.\n     * @param targetRange The full range of the definition.\n     * @param targetSelectionRange The span of the symbol definition at the target.\n     * @param originSelectionRange The span of the symbol being defined in the originating source file.\n     */\n    function create(targetUri, targetRange, targetSelectionRange, originSelectionRange) {\n        return { targetUri, targetRange, targetSelectionRange, originSelectionRange };\n    }\n    LocationLink.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link LocationLink} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.targetRange) && Is.string(candidate.targetUri)\n            && Range.is(candidate.targetSelectionRange)\n            && (Range.is(candidate.originSelectionRange) || Is.undefined(candidate.originSelectionRange));\n    }\n    LocationLink.is = is;\n})(LocationLink || (LocationLink = {}));\n/**\n * The Color namespace provides helper functions to work with\n * {@link Color} literals.\n */\nexport var Color;\n(function (Color) {\n    /**\n     * Creates a new Color literal.\n     */\n    function create(red, green, blue, alpha) {\n        return {\n            red,\n            green,\n            blue,\n            alpha,\n        };\n    }\n    Color.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Color} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.numberRange(candidate.red, 0, 1)\n            && Is.numberRange(candidate.green, 0, 1)\n            && Is.numberRange(candidate.blue, 0, 1)\n            && Is.numberRange(candidate.alpha, 0, 1);\n    }\n    Color.is = is;\n})(Color || (Color = {}));\n/**\n * The ColorInformation namespace provides helper functions to work with\n * {@link ColorInformation} literals.\n */\nexport var ColorInformation;\n(function (ColorInformation) {\n    /**\n     * Creates a new ColorInformation literal.\n     */\n    function create(range, color) {\n        return {\n            range,\n            color,\n        };\n    }\n    ColorInformation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ColorInformation} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && Color.is(candidate.color);\n    }\n    ColorInformation.is = is;\n})(ColorInformation || (ColorInformation = {}));\n/**\n * The Color namespace provides helper functions to work with\n * {@link ColorPresentation} literals.\n */\nexport var ColorPresentation;\n(function (ColorPresentation) {\n    /**\n     * Creates a new ColorInformation literal.\n     */\n    function create(label, textEdit, additionalTextEdits) {\n        return {\n            label,\n            textEdit,\n            additionalTextEdits,\n        };\n    }\n    ColorPresentation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ColorInformation} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.label)\n            && (Is.undefined(candidate.textEdit) || TextEdit.is(candidate))\n            && (Is.undefined(candidate.additionalTextEdits) || Is.typedArray(candidate.additionalTextEdits, TextEdit.is));\n    }\n    ColorPresentation.is = is;\n})(ColorPresentation || (ColorPresentation = {}));\n/**\n * A set of predefined range kinds.\n */\nexport var FoldingRangeKind;\n(function (FoldingRangeKind) {\n    /**\n     * Folding range for a comment\n     */\n    FoldingRangeKind.Comment = 'comment';\n    /**\n     * Folding range for an import or include\n     */\n    FoldingRangeKind.Imports = 'imports';\n    /**\n     * Folding range for a region (e.g. `#region`)\n     */\n    FoldingRangeKind.Region = 'region';\n})(FoldingRangeKind || (FoldingRangeKind = {}));\n/**\n * The folding range namespace provides helper functions to work with\n * {@link FoldingRange} literals.\n */\nexport var FoldingRange;\n(function (FoldingRange) {\n    /**\n     * Creates a new FoldingRange literal.\n     */\n    function create(startLine, endLine, startCharacter, endCharacter, kind, collapsedText) {\n        const result = {\n            startLine,\n            endLine\n        };\n        if (Is.defined(startCharacter)) {\n            result.startCharacter = startCharacter;\n        }\n        if (Is.defined(endCharacter)) {\n            result.endCharacter = endCharacter;\n        }\n        if (Is.defined(kind)) {\n            result.kind = kind;\n        }\n        if (Is.defined(collapsedText)) {\n            result.collapsedText = collapsedText;\n        }\n        return result;\n    }\n    FoldingRange.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link FoldingRange} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.uinteger(candidate.startLine) && Is.uinteger(candidate.startLine)\n            && (Is.undefined(candidate.startCharacter) || Is.uinteger(candidate.startCharacter))\n            && (Is.undefined(candidate.endCharacter) || Is.uinteger(candidate.endCharacter))\n            && (Is.undefined(candidate.kind) || Is.string(candidate.kind));\n    }\n    FoldingRange.is = is;\n})(FoldingRange || (FoldingRange = {}));\n/**\n * The DiagnosticRelatedInformation namespace provides helper functions to work with\n * {@link DiagnosticRelatedInformation} literals.\n */\nexport var DiagnosticRelatedInformation;\n(function (DiagnosticRelatedInformation) {\n    /**\n     * Creates a new DiagnosticRelatedInformation literal.\n     */\n    function create(location, message) {\n        return {\n            location,\n            message\n        };\n    }\n    DiagnosticRelatedInformation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DiagnosticRelatedInformation} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Location.is(candidate.location) && Is.string(candidate.message);\n    }\n    DiagnosticRelatedInformation.is = is;\n})(DiagnosticRelatedInformation || (DiagnosticRelatedInformation = {}));\n/**\n * The diagnostic's severity.\n */\nexport var DiagnosticSeverity;\n(function (DiagnosticSeverity) {\n    /**\n     * Reports an error.\n     */\n    DiagnosticSeverity.Error = 1;\n    /**\n     * Reports a warning.\n     */\n    DiagnosticSeverity.Warning = 2;\n    /**\n     * Reports an information.\n     */\n    DiagnosticSeverity.Information = 3;\n    /**\n     * Reports a hint.\n     */\n    DiagnosticSeverity.Hint = 4;\n})(DiagnosticSeverity || (DiagnosticSeverity = {}));\n/**\n * The diagnostic tags.\n *\n * @since 3.15.0\n */\nexport var DiagnosticTag;\n(function (DiagnosticTag) {\n    /**\n     * Unused or unnecessary code.\n     *\n     * Clients are allowed to render diagnostics with this tag faded out instead of having\n     * an error squiggle.\n     */\n    DiagnosticTag.Unnecessary = 1;\n    /**\n     * Deprecated or obsolete code.\n     *\n     * Clients are allowed to rendered diagnostics with this tag strike through.\n     */\n    DiagnosticTag.Deprecated = 2;\n})(DiagnosticTag || (DiagnosticTag = {}));\n/**\n * The CodeDescription namespace provides functions to deal with descriptions for diagnostic codes.\n *\n * @since 3.16.0\n */\nexport var CodeDescription;\n(function (CodeDescription) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.href);\n    }\n    CodeDescription.is = is;\n})(CodeDescription || (CodeDescription = {}));\n/**\n * The Diagnostic namespace provides helper functions to work with\n * {@link Diagnostic} literals.\n */\nexport var Diagnostic;\n(function (Diagnostic) {\n    /**\n     * Creates a new Diagnostic literal.\n     */\n    function create(range, message, severity, code, source, relatedInformation) {\n        let result = { range, message };\n        if (Is.defined(severity)) {\n            result.severity = severity;\n        }\n        if (Is.defined(code)) {\n            result.code = code;\n        }\n        if (Is.defined(source)) {\n            result.source = source;\n        }\n        if (Is.defined(relatedInformation)) {\n            result.relatedInformation = relatedInformation;\n        }\n        return result;\n    }\n    Diagnostic.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Diagnostic} interface.\n     */\n    function is(value) {\n        var _a;\n        let candidate = value;\n        return Is.defined(candidate)\n            && Range.is(candidate.range)\n            && Is.string(candidate.message)\n            && (Is.number(candidate.severity) || Is.undefined(candidate.severity))\n            && (Is.integer(candidate.code) || Is.string(candidate.code) || Is.undefined(candidate.code))\n            && (Is.undefined(candidate.codeDescription) || (Is.string((_a = candidate.codeDescription) === null || _a === void 0 ? void 0 : _a.href)))\n            && (Is.string(candidate.source) || Is.undefined(candidate.source))\n            && (Is.undefined(candidate.relatedInformation) || Is.typedArray(candidate.relatedInformation, DiagnosticRelatedInformation.is));\n    }\n    Diagnostic.is = is;\n})(Diagnostic || (Diagnostic = {}));\n/**\n * The Command namespace provides helper functions to work with\n * {@link Command} literals.\n */\nexport var Command;\n(function (Command) {\n    /**\n     * Creates a new Command literal.\n     */\n    function create(title, command, ...args) {\n        let result = { title, command };\n        if (Is.defined(args) && args.length > 0) {\n            result.arguments = args;\n        }\n        return result;\n    }\n    Command.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Command} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.title) && Is.string(candidate.command);\n    }\n    Command.is = is;\n})(Command || (Command = {}));\n/**\n * The TextEdit namespace provides helper function to create replace,\n * insert and delete edits more easily.\n */\nexport var TextEdit;\n(function (TextEdit) {\n    /**\n     * Creates a replace text edit.\n     * @param range The range of text to be replaced.\n     * @param newText The new text.\n     */\n    function replace(range, newText) {\n        return { range, newText };\n    }\n    TextEdit.replace = replace;\n    /**\n     * Creates an insert text edit.\n     * @param position The position to insert the text at.\n     * @param newText The text to be inserted.\n     */\n    function insert(position, newText) {\n        return { range: { start: position, end: position }, newText };\n    }\n    TextEdit.insert = insert;\n    /**\n     * Creates a delete text edit.\n     * @param range The range of text to be deleted.\n     */\n    function del(range) {\n        return { range, newText: '' };\n    }\n    TextEdit.del = del;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate)\n            && Is.string(candidate.newText)\n            && Range.is(candidate.range);\n    }\n    TextEdit.is = is;\n})(TextEdit || (TextEdit = {}));\nexport var ChangeAnnotation;\n(function (ChangeAnnotation) {\n    function create(label, needsConfirmation, description) {\n        const result = { label };\n        if (needsConfirmation !== undefined) {\n            result.needsConfirmation = needsConfirmation;\n        }\n        if (description !== undefined) {\n            result.description = description;\n        }\n        return result;\n    }\n    ChangeAnnotation.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.label) &&\n            (Is.boolean(candidate.needsConfirmation) || candidate.needsConfirmation === undefined) &&\n            (Is.string(candidate.description) || candidate.description === undefined);\n    }\n    ChangeAnnotation.is = is;\n})(ChangeAnnotation || (ChangeAnnotation = {}));\nexport var ChangeAnnotationIdentifier;\n(function (ChangeAnnotationIdentifier) {\n    function is(value) {\n        const candidate = value;\n        return Is.string(candidate);\n    }\n    ChangeAnnotationIdentifier.is = is;\n})(ChangeAnnotationIdentifier || (ChangeAnnotationIdentifier = {}));\nexport var AnnotatedTextEdit;\n(function (AnnotatedTextEdit) {\n    /**\n     * Creates an annotated replace text edit.\n     *\n     * @param range The range of text to be replaced.\n     * @param newText The new text.\n     * @param annotation The annotation.\n     */\n    function replace(range, newText, annotation) {\n        return { range, newText, annotationId: annotation };\n    }\n    AnnotatedTextEdit.replace = replace;\n    /**\n     * Creates an annotated insert text edit.\n     *\n     * @param position The position to insert the text at.\n     * @param newText The text to be inserted.\n     * @param annotation The annotation.\n     */\n    function insert(position, newText, annotation) {\n        return { range: { start: position, end: position }, newText, annotationId: annotation };\n    }\n    AnnotatedTextEdit.insert = insert;\n    /**\n     * Creates an annotated delete text edit.\n     *\n     * @param range The range of text to be deleted.\n     * @param annotation The annotation.\n     */\n    function del(range, annotation) {\n        return { range, newText: '', annotationId: annotation };\n    }\n    AnnotatedTextEdit.del = del;\n    function is(value) {\n        const candidate = value;\n        return TextEdit.is(candidate) && (ChangeAnnotation.is(candidate.annotationId) || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    AnnotatedTextEdit.is = is;\n})(AnnotatedTextEdit || (AnnotatedTextEdit = {}));\n/**\n * The TextDocumentEdit namespace provides helper function to create\n * an edit that manipulates a text document.\n */\nexport var TextDocumentEdit;\n(function (TextDocumentEdit) {\n    /**\n     * Creates a new `TextDocumentEdit`\n     */\n    function create(textDocument, edits) {\n        return { textDocument, edits };\n    }\n    TextDocumentEdit.create = create;\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate)\n            && OptionalVersionedTextDocumentIdentifier.is(candidate.textDocument)\n            && Array.isArray(candidate.edits);\n    }\n    TextDocumentEdit.is = is;\n})(TextDocumentEdit || (TextDocumentEdit = {}));\nexport var CreateFile;\n(function (CreateFile) {\n    function create(uri, options, annotation) {\n        let result = {\n            kind: 'create',\n            uri\n        };\n        if (options !== undefined && (options.overwrite !== undefined || options.ignoreIfExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    CreateFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'create' && Is.string(candidate.uri) && (candidate.options === undefined ||\n            ((candidate.options.overwrite === undefined || Is.boolean(candidate.options.overwrite)) && (candidate.options.ignoreIfExists === undefined || Is.boolean(candidate.options.ignoreIfExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    CreateFile.is = is;\n})(CreateFile || (CreateFile = {}));\nexport var RenameFile;\n(function (RenameFile) {\n    function create(oldUri, newUri, options, annotation) {\n        let result = {\n            kind: 'rename',\n            oldUri,\n            newUri\n        };\n        if (options !== undefined && (options.overwrite !== undefined || options.ignoreIfExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    RenameFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'rename' && Is.string(candidate.oldUri) && Is.string(candidate.newUri) && (candidate.options === undefined ||\n            ((candidate.options.overwrite === undefined || Is.boolean(candidate.options.overwrite)) && (candidate.options.ignoreIfExists === undefined || Is.boolean(candidate.options.ignoreIfExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    RenameFile.is = is;\n})(RenameFile || (RenameFile = {}));\nexport var DeleteFile;\n(function (DeleteFile) {\n    function create(uri, options, annotation) {\n        let result = {\n            kind: 'delete',\n            uri\n        };\n        if (options !== undefined && (options.recursive !== undefined || options.ignoreIfNotExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    DeleteFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'delete' && Is.string(candidate.uri) && (candidate.options === undefined ||\n            ((candidate.options.recursive === undefined || Is.boolean(candidate.options.recursive)) && (candidate.options.ignoreIfNotExists === undefined || Is.boolean(candidate.options.ignoreIfNotExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    DeleteFile.is = is;\n})(DeleteFile || (DeleteFile = {}));\nexport var WorkspaceEdit;\n(function (WorkspaceEdit) {\n    function is(value) {\n        let candidate = value;\n        return candidate &&\n            (candidate.changes !== undefined || candidate.documentChanges !== undefined) &&\n            (candidate.documentChanges === undefined || candidate.documentChanges.every((change) => {\n                if (Is.string(change.kind)) {\n                    return CreateFile.is(change) || RenameFile.is(change) || DeleteFile.is(change);\n                }\n                else {\n                    return TextDocumentEdit.is(change);\n                }\n            }));\n    }\n    WorkspaceEdit.is = is;\n})(WorkspaceEdit || (WorkspaceEdit = {}));\nclass TextEditChangeImpl {\n    constructor(edits, changeAnnotations) {\n        this.edits = edits;\n        this.changeAnnotations = changeAnnotations;\n    }\n    insert(position, newText, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.insert(position, newText);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.insert(position, newText, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.insert(position, newText, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    replace(range, newText, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.replace(range, newText);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.replace(range, newText, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.replace(range, newText, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    delete(range, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.del(range);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.del(range, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.del(range, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    add(edit) {\n        this.edits.push(edit);\n    }\n    all() {\n        return this.edits;\n    }\n    clear() {\n        this.edits.splice(0, this.edits.length);\n    }\n    assertChangeAnnotations(value) {\n        if (value === undefined) {\n            throw new Error(`Text edit change is not configured to manage change annotations.`);\n        }\n    }\n}\n/**\n * A helper class\n */\nclass ChangeAnnotations {\n    constructor(annotations) {\n        this._annotations = annotations === undefined ? Object.create(null) : annotations;\n        this._counter = 0;\n        this._size = 0;\n    }\n    all() {\n        return this._annotations;\n    }\n    get size() {\n        return this._size;\n    }\n    manage(idOrAnnotation, annotation) {\n        let id;\n        if (ChangeAnnotationIdentifier.is(idOrAnnotation)) {\n            id = idOrAnnotation;\n        }\n        else {\n            id = this.nextId();\n            annotation = idOrAnnotation;\n        }\n        if (this._annotations[id] !== undefined) {\n            throw new Error(`Id ${id} is already in use.`);\n        }\n        if (annotation === undefined) {\n            throw new Error(`No annotation provided for id ${id}`);\n        }\n        this._annotations[id] = annotation;\n        this._size++;\n        return id;\n    }\n    nextId() {\n        this._counter++;\n        return this._counter.toString();\n    }\n}\n/**\n * A workspace change helps constructing changes to a workspace.\n */\nexport class WorkspaceChange {\n    constructor(workspaceEdit) {\n        this._textEditChanges = Object.create(null);\n        if (workspaceEdit !== undefined) {\n            this._workspaceEdit = workspaceEdit;\n            if (workspaceEdit.documentChanges) {\n                this._changeAnnotations = new ChangeAnnotations(workspaceEdit.changeAnnotations);\n                workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n                workspaceEdit.documentChanges.forEach((change) => {\n                    if (TextDocumentEdit.is(change)) {\n                        const textEditChange = new TextEditChangeImpl(change.edits, this._changeAnnotations);\n                        this._textEditChanges[change.textDocument.uri] = textEditChange;\n                    }\n                });\n            }\n            else if (workspaceEdit.changes) {\n                Object.keys(workspaceEdit.changes).forEach((key) => {\n                    const textEditChange = new TextEditChangeImpl(workspaceEdit.changes[key]);\n                    this._textEditChanges[key] = textEditChange;\n                });\n            }\n        }\n        else {\n            this._workspaceEdit = {};\n        }\n    }\n    /**\n     * Returns the underlying {@link WorkspaceEdit} literal\n     * use to be returned from a workspace edit operation like rename.\n     */\n    get edit() {\n        this.initDocumentChanges();\n        if (this._changeAnnotations !== undefined) {\n            if (this._changeAnnotations.size === 0) {\n                this._workspaceEdit.changeAnnotations = undefined;\n            }\n            else {\n                this._workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n            }\n        }\n        return this._workspaceEdit;\n    }\n    getTextEditChange(key) {\n        if (OptionalVersionedTextDocumentIdentifier.is(key)) {\n            this.initDocumentChanges();\n            if (this._workspaceEdit.documentChanges === undefined) {\n                throw new Error('Workspace edit is not configured for document changes.');\n            }\n            const textDocument = { uri: key.uri, version: key.version };\n            let result = this._textEditChanges[textDocument.uri];\n            if (!result) {\n                const edits = [];\n                const textDocumentEdit = {\n                    textDocument,\n                    edits\n                };\n                this._workspaceEdit.documentChanges.push(textDocumentEdit);\n                result = new TextEditChangeImpl(edits, this._changeAnnotations);\n                this._textEditChanges[textDocument.uri] = result;\n            }\n            return result;\n        }\n        else {\n            this.initChanges();\n            if (this._workspaceEdit.changes === undefined) {\n                throw new Error('Workspace edit is not configured for normal text edit changes.');\n            }\n            let result = this._textEditChanges[key];\n            if (!result) {\n                let edits = [];\n                this._workspaceEdit.changes[key] = edits;\n                result = new TextEditChangeImpl(edits);\n                this._textEditChanges[key] = result;\n            }\n            return result;\n        }\n    }\n    initDocumentChanges() {\n        if (this._workspaceEdit.documentChanges === undefined && this._workspaceEdit.changes === undefined) {\n            this._changeAnnotations = new ChangeAnnotations();\n            this._workspaceEdit.documentChanges = [];\n            this._workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n        }\n    }\n    initChanges() {\n        if (this._workspaceEdit.documentChanges === undefined && this._workspaceEdit.changes === undefined) {\n            this._workspaceEdit.changes = Object.create(null);\n        }\n    }\n    createFile(uri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = CreateFile.create(uri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = CreateFile.create(uri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    renameFile(oldUri, newUri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = RenameFile.create(oldUri, newUri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = RenameFile.create(oldUri, newUri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    deleteFile(uri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = DeleteFile.create(uri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = DeleteFile.create(uri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n}\n/**\n * The TextDocumentIdentifier namespace provides helper functions to work with\n * {@link TextDocumentIdentifier} literals.\n */\nexport var TextDocumentIdentifier;\n(function (TextDocumentIdentifier) {\n    /**\n     * Creates a new TextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     */\n    function create(uri) {\n        return { uri };\n    }\n    TextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link TextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri);\n    }\n    TextDocumentIdentifier.is = is;\n})(TextDocumentIdentifier || (TextDocumentIdentifier = {}));\n/**\n * The VersionedTextDocumentIdentifier namespace provides helper functions to work with\n * {@link VersionedTextDocumentIdentifier} literals.\n */\nexport var VersionedTextDocumentIdentifier;\n(function (VersionedTextDocumentIdentifier) {\n    /**\n     * Creates a new VersionedTextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     * @param version The document's version.\n     */\n    function create(uri, version) {\n        return { uri, version };\n    }\n    VersionedTextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link VersionedTextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && Is.integer(candidate.version);\n    }\n    VersionedTextDocumentIdentifier.is = is;\n})(VersionedTextDocumentIdentifier || (VersionedTextDocumentIdentifier = {}));\n/**\n * The OptionalVersionedTextDocumentIdentifier namespace provides helper functions to work with\n * {@link OptionalVersionedTextDocumentIdentifier} literals.\n */\nexport var OptionalVersionedTextDocumentIdentifier;\n(function (OptionalVersionedTextDocumentIdentifier) {\n    /**\n     * Creates a new OptionalVersionedTextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     * @param version The document's version.\n     */\n    function create(uri, version) {\n        return { uri, version };\n    }\n    OptionalVersionedTextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link OptionalVersionedTextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && (candidate.version === null || Is.integer(candidate.version));\n    }\n    OptionalVersionedTextDocumentIdentifier.is = is;\n})(OptionalVersionedTextDocumentIdentifier || (OptionalVersionedTextDocumentIdentifier = {}));\n/**\n * The TextDocumentItem namespace provides helper functions to work with\n * {@link TextDocumentItem} literals.\n */\nexport var TextDocumentItem;\n(function (TextDocumentItem) {\n    /**\n     * Creates a new TextDocumentItem literal.\n     * @param uri The document's uri.\n     * @param languageId The document's language identifier.\n     * @param version The document's version number.\n     * @param text The document's text.\n     */\n    function create(uri, languageId, version, text) {\n        return { uri, languageId, version, text };\n    }\n    TextDocumentItem.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link TextDocumentItem} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && Is.string(candidate.languageId) && Is.integer(candidate.version) && Is.string(candidate.text);\n    }\n    TextDocumentItem.is = is;\n})(TextDocumentItem || (TextDocumentItem = {}));\n/**\n * Describes the content type that a client supports in various\n * result literals like `Hover`, `ParameterInfo` or `CompletionItem`.\n *\n * Please note that `MarkupKinds` must not start with a `$`. This kinds\n * are reserved for internal usage.\n */\nexport var MarkupKind;\n(function (MarkupKind) {\n    /**\n     * Plain text is supported as a content format\n     */\n    MarkupKind.PlainText = 'plaintext';\n    /**\n     * Markdown is supported as a content format\n     */\n    MarkupKind.Markdown = 'markdown';\n    /**\n     * Checks whether the given value is a value of the {@link MarkupKind} type.\n     */\n    function is(value) {\n        const candidate = value;\n        return candidate === MarkupKind.PlainText || candidate === MarkupKind.Markdown;\n    }\n    MarkupKind.is = is;\n})(MarkupKind || (MarkupKind = {}));\nexport var MarkupContent;\n(function (MarkupContent) {\n    /**\n     * Checks whether the given value conforms to the {@link MarkupContent} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(value) && MarkupKind.is(candidate.kind) && Is.string(candidate.value);\n    }\n    MarkupContent.is = is;\n})(MarkupContent || (MarkupContent = {}));\n/**\n * The kind of a completion entry.\n */\nexport var CompletionItemKind;\n(function (CompletionItemKind) {\n    CompletionItemKind.Text = 1;\n    CompletionItemKind.Method = 2;\n    CompletionItemKind.Function = 3;\n    CompletionItemKind.Constructor = 4;\n    CompletionItemKind.Field = 5;\n    CompletionItemKind.Variable = 6;\n    CompletionItemKind.Class = 7;\n    CompletionItemKind.Interface = 8;\n    CompletionItemKind.Module = 9;\n    CompletionItemKind.Property = 10;\n    CompletionItemKind.Unit = 11;\n    CompletionItemKind.Value = 12;\n    CompletionItemKind.Enum = 13;\n    CompletionItemKind.Keyword = 14;\n    CompletionItemKind.Snippet = 15;\n    CompletionItemKind.Color = 16;\n    CompletionItemKind.File = 17;\n    CompletionItemKind.Reference = 18;\n    CompletionItemKind.Folder = 19;\n    CompletionItemKind.EnumMember = 20;\n    CompletionItemKind.Constant = 21;\n    CompletionItemKind.Struct = 22;\n    CompletionItemKind.Event = 23;\n    CompletionItemKind.Operator = 24;\n    CompletionItemKind.TypeParameter = 25;\n})(CompletionItemKind || (CompletionItemKind = {}));\n/**\n * Defines whether the insert text in a completion item should be interpreted as\n * plain text or a snippet.\n */\nexport var InsertTextFormat;\n(function (InsertTextFormat) {\n    /**\n     * The primary text to be inserted is treated as a plain string.\n     */\n    InsertTextFormat.PlainText = 1;\n    /**\n     * The primary text to be inserted is treated as a snippet.\n     *\n     * A snippet can define tab stops and placeholders with `$1`, `$2`\n     * and `${3:foo}`. `$0` defines the final tab stop, it defaults to\n     * the end of the snippet. Placeholders with equal identifiers are linked,\n     * that is typing in one will update others too.\n     *\n     * See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax\n     */\n    InsertTextFormat.Snippet = 2;\n})(InsertTextFormat || (InsertTextFormat = {}));\n/**\n * Completion item tags are extra annotations that tweak the rendering of a completion\n * item.\n *\n * @since 3.15.0\n */\nexport var CompletionItemTag;\n(function (CompletionItemTag) {\n    /**\n     * Render a completion as obsolete, usually using a strike-out.\n     */\n    CompletionItemTag.Deprecated = 1;\n})(CompletionItemTag || (CompletionItemTag = {}));\n/**\n * The InsertReplaceEdit namespace provides functions to deal with insert / replace edits.\n *\n * @since 3.16.0\n */\nexport var InsertReplaceEdit;\n(function (InsertReplaceEdit) {\n    /**\n     * Creates a new insert / replace edit\n     */\n    function create(newText, insert, replace) {\n        return { newText, insert, replace };\n    }\n    InsertReplaceEdit.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link InsertReplaceEdit} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return candidate && Is.string(candidate.newText) && Range.is(candidate.insert) && Range.is(candidate.replace);\n    }\n    InsertReplaceEdit.is = is;\n})(InsertReplaceEdit || (InsertReplaceEdit = {}));\n/**\n * How whitespace and indentation is handled during completion\n * item insertion.\n *\n * @since 3.16.0\n */\nexport var InsertTextMode;\n(function (InsertTextMode) {\n    /**\n     * The insertion or replace strings is taken as it is. If the\n     * value is multi line the lines below the cursor will be\n     * inserted using the indentation defined in the string value.\n     * The client will not apply any kind of adjustments to the\n     * string.\n     */\n    InsertTextMode.asIs = 1;\n    /**\n     * The editor adjusts leading whitespace of new lines so that\n     * they match the indentation up to the cursor of the line for\n     * which the item is accepted.\n     *\n     * Consider a line like this: <2tabs><3tabs>foo. Accepting a\n     * multi line completion item is indented using 2 tabs and all\n     * following lines inserted will be indented using 2 tabs as well.\n     */\n    InsertTextMode.adjustIndentation = 2;\n})(InsertTextMode || (InsertTextMode = {}));\nexport var CompletionItemLabelDetails;\n(function (CompletionItemLabelDetails) {\n    function is(value) {\n        const candidate = value;\n        return candidate && (Is.string(candidate.detail) || candidate.detail === undefined) &&\n            (Is.string(candidate.description) || candidate.description === undefined);\n    }\n    CompletionItemLabelDetails.is = is;\n})(CompletionItemLabelDetails || (CompletionItemLabelDetails = {}));\n/**\n * The CompletionItem namespace provides functions to deal with\n * completion items.\n */\nexport var CompletionItem;\n(function (CompletionItem) {\n    /**\n     * Create a completion item and seed it with a label.\n     * @param label The completion item's label\n     */\n    function create(label) {\n        return { label };\n    }\n    CompletionItem.create = create;\n})(CompletionItem || (CompletionItem = {}));\n/**\n * The CompletionList namespace provides functions to deal with\n * completion lists.\n */\nexport var CompletionList;\n(function (CompletionList) {\n    /**\n     * Creates a new completion list.\n     *\n     * @param items The completion items.\n     * @param isIncomplete The list is not complete.\n     */\n    function create(items, isIncomplete) {\n        return { items: items ? items : [], isIncomplete: !!isIncomplete };\n    }\n    CompletionList.create = create;\n})(CompletionList || (CompletionList = {}));\nexport var MarkedString;\n(function (MarkedString) {\n    /**\n     * Creates a marked string from plain text.\n     *\n     * @param plainText The plain text.\n     */\n    function fromPlainText(plainText) {\n        return plainText.replace(/[\\\\`*_{}[\\]()#+\\-.!]/g, '\\\\$&'); // escape markdown syntax tokens: http://daringfireball.net/projects/markdown/syntax#backslash\n    }\n    MarkedString.fromPlainText = fromPlainText;\n    /**\n     * Checks whether the given value conforms to the {@link MarkedString} type.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.string(candidate) || (Is.objectLiteral(candidate) && Is.string(candidate.language) && Is.string(candidate.value));\n    }\n    MarkedString.is = is;\n})(MarkedString || (MarkedString = {}));\nexport var Hover;\n(function (Hover) {\n    /**\n     * Checks whether the given value conforms to the {@link Hover} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return !!candidate && Is.objectLiteral(candidate) && (MarkupContent.is(candidate.contents) ||\n            MarkedString.is(candidate.contents) ||\n            Is.typedArray(candidate.contents, MarkedString.is)) && (value.range === undefined || Range.is(value.range));\n    }\n    Hover.is = is;\n})(Hover || (Hover = {}));\n/**\n * The ParameterInformation namespace provides helper functions to work with\n * {@link ParameterInformation} literals.\n */\nexport var ParameterInformation;\n(function (ParameterInformation) {\n    /**\n     * Creates a new parameter information literal.\n     *\n     * @param label A label string.\n     * @param documentation A doc string.\n     */\n    function create(label, documentation) {\n        return documentation ? { label, documentation } : { label };\n    }\n    ParameterInformation.create = create;\n})(ParameterInformation || (ParameterInformation = {}));\n/**\n * The SignatureInformation namespace provides helper functions to work with\n * {@link SignatureInformation} literals.\n */\nexport var SignatureInformation;\n(function (SignatureInformation) {\n    function create(label, documentation, ...parameters) {\n        let result = { label };\n        if (Is.defined(documentation)) {\n            result.documentation = documentation;\n        }\n        if (Is.defined(parameters)) {\n            result.parameters = parameters;\n        }\n        else {\n            result.parameters = [];\n        }\n        return result;\n    }\n    SignatureInformation.create = create;\n})(SignatureInformation || (SignatureInformation = {}));\n/**\n * A document highlight kind.\n */\nexport var DocumentHighlightKind;\n(function (DocumentHighlightKind) {\n    /**\n     * A textual occurrence.\n     */\n    DocumentHighlightKind.Text = 1;\n    /**\n     * Read-access of a symbol, like reading a variable.\n     */\n    DocumentHighlightKind.Read = 2;\n    /**\n     * Write-access of a symbol, like writing to a variable.\n     */\n    DocumentHighlightKind.Write = 3;\n})(DocumentHighlightKind || (DocumentHighlightKind = {}));\n/**\n * DocumentHighlight namespace to provide helper functions to work with\n * {@link DocumentHighlight} literals.\n */\nexport var DocumentHighlight;\n(function (DocumentHighlight) {\n    /**\n     * Create a DocumentHighlight object.\n     * @param range The range the highlight applies to.\n     * @param kind The highlight kind\n     */\n    function create(range, kind) {\n        let result = { range };\n        if (Is.number(kind)) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    DocumentHighlight.create = create;\n})(DocumentHighlight || (DocumentHighlight = {}));\n/**\n * A symbol kind.\n */\nexport var SymbolKind;\n(function (SymbolKind) {\n    SymbolKind.File = 1;\n    SymbolKind.Module = 2;\n    SymbolKind.Namespace = 3;\n    SymbolKind.Package = 4;\n    SymbolKind.Class = 5;\n    SymbolKind.Method = 6;\n    SymbolKind.Property = 7;\n    SymbolKind.Field = 8;\n    SymbolKind.Constructor = 9;\n    SymbolKind.Enum = 10;\n    SymbolKind.Interface = 11;\n    SymbolKind.Function = 12;\n    SymbolKind.Variable = 13;\n    SymbolKind.Constant = 14;\n    SymbolKind.String = 15;\n    SymbolKind.Number = 16;\n    SymbolKind.Boolean = 17;\n    SymbolKind.Array = 18;\n    SymbolKind.Object = 19;\n    SymbolKind.Key = 20;\n    SymbolKind.Null = 21;\n    SymbolKind.EnumMember = 22;\n    SymbolKind.Struct = 23;\n    SymbolKind.Event = 24;\n    SymbolKind.Operator = 25;\n    SymbolKind.TypeParameter = 26;\n})(SymbolKind || (SymbolKind = {}));\n/**\n * Symbol tags are extra annotations that tweak the rendering of a symbol.\n *\n * @since 3.16\n */\nexport var SymbolTag;\n(function (SymbolTag) {\n    /**\n     * Render a symbol as obsolete, usually using a strike-out.\n     */\n    SymbolTag.Deprecated = 1;\n})(SymbolTag || (SymbolTag = {}));\nexport var SymbolInformation;\n(function (SymbolInformation) {\n    /**\n     * Creates a new symbol information literal.\n     *\n     * @param name The name of the symbol.\n     * @param kind The kind of the symbol.\n     * @param range The range of the location of the symbol.\n     * @param uri The resource of the location of symbol.\n     * @param containerName The name of the symbol containing the symbol.\n     */\n    function create(name, kind, range, uri, containerName) {\n        let result = {\n            name,\n            kind,\n            location: { uri, range }\n        };\n        if (containerName) {\n            result.containerName = containerName;\n        }\n        return result;\n    }\n    SymbolInformation.create = create;\n})(SymbolInformation || (SymbolInformation = {}));\nexport var WorkspaceSymbol;\n(function (WorkspaceSymbol) {\n    /**\n     * Create a new workspace symbol.\n     *\n     * @param name The name of the symbol.\n     * @param kind The kind of the symbol.\n     * @param uri The resource of the location of the symbol.\n     * @param range An options range of the location.\n     * @returns A WorkspaceSymbol.\n     */\n    function create(name, kind, uri, range) {\n        return range !== undefined\n            ? { name, kind, location: { uri, range } }\n            : { name, kind, location: { uri } };\n    }\n    WorkspaceSymbol.create = create;\n})(WorkspaceSymbol || (WorkspaceSymbol = {}));\nexport var DocumentSymbol;\n(function (DocumentSymbol) {\n    /**\n     * Creates a new symbol information literal.\n     *\n     * @param name The name of the symbol.\n     * @param detail The detail of the symbol.\n     * @param kind The kind of the symbol.\n     * @param range The range of the symbol.\n     * @param selectionRange The selectionRange of the symbol.\n     * @param children Children of the symbol.\n     */\n    function create(name, detail, kind, range, selectionRange, children) {\n        let result = {\n            name,\n            detail,\n            kind,\n            range,\n            selectionRange\n        };\n        if (children !== undefined) {\n            result.children = children;\n        }\n        return result;\n    }\n    DocumentSymbol.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DocumentSymbol} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return candidate &&\n            Is.string(candidate.name) && Is.number(candidate.kind) &&\n            Range.is(candidate.range) && Range.is(candidate.selectionRange) &&\n            (candidate.detail === undefined || Is.string(candidate.detail)) &&\n            (candidate.deprecated === undefined || Is.boolean(candidate.deprecated)) &&\n            (candidate.children === undefined || Array.isArray(candidate.children)) &&\n            (candidate.tags === undefined || Array.isArray(candidate.tags));\n    }\n    DocumentSymbol.is = is;\n})(DocumentSymbol || (DocumentSymbol = {}));\n/**\n * A set of predefined code action kinds\n */\nexport var CodeActionKind;\n(function (CodeActionKind) {\n    /**\n     * Empty kind.\n     */\n    CodeActionKind.Empty = '';\n    /**\n     * Base kind for quickfix actions: 'quickfix'\n     */\n    CodeActionKind.QuickFix = 'quickfix';\n    /**\n     * Base kind for refactoring actions: 'refactor'\n     */\n    CodeActionKind.Refactor = 'refactor';\n    /**\n     * Base kind for refactoring extraction actions: 'refactor.extract'\n     *\n     * Example extract actions:\n     *\n     * - Extract method\n     * - Extract function\n     * - Extract variable\n     * - Extract interface from class\n     * - ...\n     */\n    CodeActionKind.RefactorExtract = 'refactor.extract';\n    /**\n     * Base kind for refactoring inline actions: 'refactor.inline'\n     *\n     * Example inline actions:\n     *\n     * - Inline function\n     * - Inline variable\n     * - Inline constant\n     * - ...\n     */\n    CodeActionKind.RefactorInline = 'refactor.inline';\n    /**\n     * Base kind for refactoring rewrite actions: 'refactor.rewrite'\n     *\n     * Example rewrite actions:\n     *\n     * - Convert JavaScript function to class\n     * - Add or remove parameter\n     * - Encapsulate field\n     * - Make method static\n     * - Move method to base class\n     * - ...\n     */\n    CodeActionKind.RefactorRewrite = 'refactor.rewrite';\n    /**\n     * Base kind for source actions: `source`\n     *\n     * Source code actions apply to the entire file.\n     */\n    CodeActionKind.Source = 'source';\n    /**\n     * Base kind for an organize imports source action: `source.organizeImports`\n     */\n    CodeActionKind.SourceOrganizeImports = 'source.organizeImports';\n    /**\n     * Base kind for auto-fix source actions: `source.fixAll`.\n     *\n     * Fix all actions automatically fix errors that have a clear fix that do not require user input.\n     * They should not suppress errors or perform unsafe fixes such as generating new types or classes.\n     *\n     * @since 3.15.0\n     */\n    CodeActionKind.SourceFixAll = 'source.fixAll';\n})(CodeActionKind || (CodeActionKind = {}));\n/**\n * The reason why code actions were requested.\n *\n * @since 3.17.0\n */\nexport var CodeActionTriggerKind;\n(function (CodeActionTriggerKind) {\n    /**\n     * Code actions were explicitly requested by the user or by an extension.\n     */\n    CodeActionTriggerKind.Invoked = 1;\n    /**\n     * Code actions were requested automatically.\n     *\n     * This typically happens when current selection in a file changes, but can\n     * also be triggered when file content changes.\n     */\n    CodeActionTriggerKind.Automatic = 2;\n})(CodeActionTriggerKind || (CodeActionTriggerKind = {}));\n/**\n * The CodeActionContext namespace provides helper functions to work with\n * {@link CodeActionContext} literals.\n */\nexport var CodeActionContext;\n(function (CodeActionContext) {\n    /**\n     * Creates a new CodeActionContext literal.\n     */\n    function create(diagnostics, only, triggerKind) {\n        let result = { diagnostics };\n        if (only !== undefined && only !== null) {\n            result.only = only;\n        }\n        if (triggerKind !== undefined && triggerKind !== null) {\n            result.triggerKind = triggerKind;\n        }\n        return result;\n    }\n    CodeActionContext.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link CodeActionContext} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.typedArray(candidate.diagnostics, Diagnostic.is)\n            && (candidate.only === undefined || Is.typedArray(candidate.only, Is.string))\n            && (candidate.triggerKind === undefined || candidate.triggerKind === CodeActionTriggerKind.Invoked || candidate.triggerKind === CodeActionTriggerKind.Automatic);\n    }\n    CodeActionContext.is = is;\n})(CodeActionContext || (CodeActionContext = {}));\nexport var CodeAction;\n(function (CodeAction) {\n    function create(title, kindOrCommandOrEdit, kind) {\n        let result = { title };\n        let checkKind = true;\n        if (typeof kindOrCommandOrEdit === 'string') {\n            checkKind = false;\n            result.kind = kindOrCommandOrEdit;\n        }\n        else if (Command.is(kindOrCommandOrEdit)) {\n            result.command = kindOrCommandOrEdit;\n        }\n        else {\n            result.edit = kindOrCommandOrEdit;\n        }\n        if (checkKind && kind !== undefined) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    CodeAction.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && Is.string(candidate.title) &&\n            (candidate.diagnostics === undefined || Is.typedArray(candidate.diagnostics, Diagnostic.is)) &&\n            (candidate.kind === undefined || Is.string(candidate.kind)) &&\n            (candidate.edit !== undefined || candidate.command !== undefined) &&\n            (candidate.command === undefined || Command.is(candidate.command)) &&\n            (candidate.isPreferred === undefined || Is.boolean(candidate.isPreferred)) &&\n            (candidate.edit === undefined || WorkspaceEdit.is(candidate.edit));\n    }\n    CodeAction.is = is;\n})(CodeAction || (CodeAction = {}));\n/**\n * The CodeLens namespace provides helper functions to work with\n * {@link CodeLens} literals.\n */\nexport var CodeLens;\n(function (CodeLens) {\n    /**\n     * Creates a new CodeLens literal.\n     */\n    function create(range, data) {\n        let result = { range };\n        if (Is.defined(data)) {\n            result.data = data;\n        }\n        return result;\n    }\n    CodeLens.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link CodeLens} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Range.is(candidate.range) && (Is.undefined(candidate.command) || Command.is(candidate.command));\n    }\n    CodeLens.is = is;\n})(CodeLens || (CodeLens = {}));\n/**\n * The FormattingOptions namespace provides helper functions to work with\n * {@link FormattingOptions} literals.\n */\nexport var FormattingOptions;\n(function (FormattingOptions) {\n    /**\n     * Creates a new FormattingOptions literal.\n     */\n    function create(tabSize, insertSpaces) {\n        return { tabSize, insertSpaces };\n    }\n    FormattingOptions.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link FormattingOptions} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.uinteger(candidate.tabSize) && Is.boolean(candidate.insertSpaces);\n    }\n    FormattingOptions.is = is;\n})(FormattingOptions || (FormattingOptions = {}));\n/**\n * The DocumentLink namespace provides helper functions to work with\n * {@link DocumentLink} literals.\n */\nexport var DocumentLink;\n(function (DocumentLink) {\n    /**\n     * Creates a new DocumentLink literal.\n     */\n    function create(range, target, data) {\n        return { range, target, data };\n    }\n    DocumentLink.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DocumentLink} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Range.is(candidate.range) && (Is.undefined(candidate.target) || Is.string(candidate.target));\n    }\n    DocumentLink.is = is;\n})(DocumentLink || (DocumentLink = {}));\n/**\n * The SelectionRange namespace provides helper function to work with\n * SelectionRange literals.\n */\nexport var SelectionRange;\n(function (SelectionRange) {\n    /**\n     * Creates a new SelectionRange\n     * @param range the range.\n     * @param parent an optional parent.\n     */\n    function create(range, parent) {\n        return { range, parent };\n    }\n    SelectionRange.create = create;\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && (candidate.parent === undefined || SelectionRange.is(candidate.parent));\n    }\n    SelectionRange.is = is;\n})(SelectionRange || (SelectionRange = {}));\n/**\n * A set of predefined token types. This set is not fixed\n * an clients can specify additional token types via the\n * corresponding client capabilities.\n *\n * @since 3.16.0\n */\nexport var SemanticTokenTypes;\n(function (SemanticTokenTypes) {\n    SemanticTokenTypes[\"namespace\"] = \"namespace\";\n    /**\n     * Represents a generic type. Acts as a fallback for types which can't be mapped to\n     * a specific type like class or enum.\n     */\n    SemanticTokenTypes[\"type\"] = \"type\";\n    SemanticTokenTypes[\"class\"] = \"class\";\n    SemanticTokenTypes[\"enum\"] = \"enum\";\n    SemanticTokenTypes[\"interface\"] = \"interface\";\n    SemanticTokenTypes[\"struct\"] = \"struct\";\n    SemanticTokenTypes[\"typeParameter\"] = \"typeParameter\";\n    SemanticTokenTypes[\"parameter\"] = \"parameter\";\n    SemanticTokenTypes[\"variable\"] = \"variable\";\n    SemanticTokenTypes[\"property\"] = \"property\";\n    SemanticTokenTypes[\"enumMember\"] = \"enumMember\";\n    SemanticTokenTypes[\"event\"] = \"event\";\n    SemanticTokenTypes[\"function\"] = \"function\";\n    SemanticTokenTypes[\"method\"] = \"method\";\n    SemanticTokenTypes[\"macro\"] = \"macro\";\n    SemanticTokenTypes[\"keyword\"] = \"keyword\";\n    SemanticTokenTypes[\"modifier\"] = \"modifier\";\n    SemanticTokenTypes[\"comment\"] = \"comment\";\n    SemanticTokenTypes[\"string\"] = \"string\";\n    SemanticTokenTypes[\"number\"] = \"number\";\n    SemanticTokenTypes[\"regexp\"] = \"regexp\";\n    SemanticTokenTypes[\"operator\"] = \"operator\";\n    /**\n     * @since 3.17.0\n     */\n    SemanticTokenTypes[\"decorator\"] = \"decorator\";\n})(SemanticTokenTypes || (SemanticTokenTypes = {}));\n/**\n * A set of predefined token modifiers. This set is not fixed\n * an clients can specify additional token types via the\n * corresponding client capabilities.\n *\n * @since 3.16.0\n */\nexport var SemanticTokenModifiers;\n(function (SemanticTokenModifiers) {\n    SemanticTokenModifiers[\"declaration\"] = \"declaration\";\n    SemanticTokenModifiers[\"definition\"] = \"definition\";\n    SemanticTokenModifiers[\"readonly\"] = \"readonly\";\n    SemanticTokenModifiers[\"static\"] = \"static\";\n    SemanticTokenModifiers[\"deprecated\"] = \"deprecated\";\n    SemanticTokenModifiers[\"abstract\"] = \"abstract\";\n    SemanticTokenModifiers[\"async\"] = \"async\";\n    SemanticTokenModifiers[\"modification\"] = \"modification\";\n    SemanticTokenModifiers[\"documentation\"] = \"documentation\";\n    SemanticTokenModifiers[\"defaultLibrary\"] = \"defaultLibrary\";\n})(SemanticTokenModifiers || (SemanticTokenModifiers = {}));\n/**\n * @since 3.16.0\n */\nexport var SemanticTokens;\n(function (SemanticTokens) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && (candidate.resultId === undefined || typeof candidate.resultId === 'string') &&\n            Array.isArray(candidate.data) && (candidate.data.length === 0 || typeof candidate.data[0] === 'number');\n    }\n    SemanticTokens.is = is;\n})(SemanticTokens || (SemanticTokens = {}));\n/**\n * The InlineValueText namespace provides functions to deal with InlineValueTexts.\n *\n * @since 3.17.0\n */\nexport var InlineValueText;\n(function (InlineValueText) {\n    /**\n     * Creates a new InlineValueText literal.\n     */\n    function create(range, text) {\n        return { range, text };\n    }\n    InlineValueText.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range) && Is.string(candidate.text);\n    }\n    InlineValueText.is = is;\n})(InlineValueText || (InlineValueText = {}));\n/**\n * The InlineValueVariableLookup namespace provides functions to deal with InlineValueVariableLookups.\n *\n * @since 3.17.0\n */\nexport var InlineValueVariableLookup;\n(function (InlineValueVariableLookup) {\n    /**\n     * Creates a new InlineValueText literal.\n     */\n    function create(range, variableName, caseSensitiveLookup) {\n        return { range, variableName, caseSensitiveLookup };\n    }\n    InlineValueVariableLookup.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range) && Is.boolean(candidate.caseSensitiveLookup)\n            && (Is.string(candidate.variableName) || candidate.variableName === undefined);\n    }\n    InlineValueVariableLookup.is = is;\n})(InlineValueVariableLookup || (InlineValueVariableLookup = {}));\n/**\n * The InlineValueEvaluatableExpression namespace provides functions to deal with InlineValueEvaluatableExpression.\n *\n * @since 3.17.0\n */\nexport var InlineValueEvaluatableExpression;\n(function (InlineValueEvaluatableExpression) {\n    /**\n     * Creates a new InlineValueEvaluatableExpression literal.\n     */\n    function create(range, expression) {\n        return { range, expression };\n    }\n    InlineValueEvaluatableExpression.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range)\n            && (Is.string(candidate.expression) || candidate.expression === undefined);\n    }\n    InlineValueEvaluatableExpression.is = is;\n})(InlineValueEvaluatableExpression || (InlineValueEvaluatableExpression = {}));\n/**\n * The InlineValueContext namespace provides helper functions to work with\n * {@link InlineValueContext} literals.\n *\n * @since 3.17.0\n */\nexport var InlineValueContext;\n(function (InlineValueContext) {\n    /**\n     * Creates a new InlineValueContext literal.\n     */\n    function create(frameId, stoppedLocation) {\n        return { frameId, stoppedLocation };\n    }\n    InlineValueContext.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link InlineValueContext} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.defined(candidate) && Range.is(value.stoppedLocation);\n    }\n    InlineValueContext.is = is;\n})(InlineValueContext || (InlineValueContext = {}));\n/**\n * Inlay hint kinds.\n *\n * @since 3.17.0\n */\nexport var InlayHintKind;\n(function (InlayHintKind) {\n    /**\n     * An inlay hint that for a type annotation.\n     */\n    InlayHintKind.Type = 1;\n    /**\n     * An inlay hint that is for a parameter.\n     */\n    InlayHintKind.Parameter = 2;\n    function is(value) {\n        return value === 1 || value === 2;\n    }\n    InlayHintKind.is = is;\n})(InlayHintKind || (InlayHintKind = {}));\nexport var InlayHintLabelPart;\n(function (InlayHintLabelPart) {\n    function create(value) {\n        return { value };\n    }\n    InlayHintLabelPart.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate)\n            && (candidate.tooltip === undefined || Is.string(candidate.tooltip) || MarkupContent.is(candidate.tooltip))\n            && (candidate.location === undefined || Location.is(candidate.location))\n            && (candidate.command === undefined || Command.is(candidate.command));\n    }\n    InlayHintLabelPart.is = is;\n})(InlayHintLabelPart || (InlayHintLabelPart = {}));\nexport var InlayHint;\n(function (InlayHint) {\n    function create(position, label, kind) {\n        const result = { position, label };\n        if (kind !== undefined) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    InlayHint.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Position.is(candidate.position)\n            && (Is.string(candidate.label) || Is.typedArray(candidate.label, InlayHintLabelPart.is))\n            && (candidate.kind === undefined || InlayHintKind.is(candidate.kind))\n            && (candidate.textEdits === undefined) || Is.typedArray(candidate.textEdits, TextEdit.is)\n            && (candidate.tooltip === undefined || Is.string(candidate.tooltip) || MarkupContent.is(candidate.tooltip))\n            && (candidate.paddingLeft === undefined || Is.boolean(candidate.paddingLeft))\n            && (candidate.paddingRight === undefined || Is.boolean(candidate.paddingRight));\n    }\n    InlayHint.is = is;\n})(InlayHint || (InlayHint = {}));\nexport var StringValue;\n(function (StringValue) {\n    function createSnippet(value) {\n        return { kind: 'snippet', value };\n    }\n    StringValue.createSnippet = createSnippet;\n})(StringValue || (StringValue = {}));\nexport var InlineCompletionItem;\n(function (InlineCompletionItem) {\n    function create(insertText, filterText, range, command) {\n        return { insertText, filterText, range, command };\n    }\n    InlineCompletionItem.create = create;\n})(InlineCompletionItem || (InlineCompletionItem = {}));\nexport var InlineCompletionList;\n(function (InlineCompletionList) {\n    function create(items) {\n        return { items };\n    }\n    InlineCompletionList.create = create;\n})(InlineCompletionList || (InlineCompletionList = {}));\n/**\n * Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.\n *\n * @since 3.18.0\n * @proposed\n */\nexport var InlineCompletionTriggerKind;\n(function (InlineCompletionTriggerKind) {\n    /**\n     * Completion was triggered explicitly by a user gesture.\n     */\n    InlineCompletionTriggerKind.Invoked = 0;\n    /**\n     * Completion was triggered automatically while editing.\n     */\n    InlineCompletionTriggerKind.Automatic = 1;\n})(InlineCompletionTriggerKind || (InlineCompletionTriggerKind = {}));\nexport var SelectedCompletionInfo;\n(function (SelectedCompletionInfo) {\n    function create(range, text) {\n        return { range, text };\n    }\n    SelectedCompletionInfo.create = create;\n})(SelectedCompletionInfo || (SelectedCompletionInfo = {}));\nexport var InlineCompletionContext;\n(function (InlineCompletionContext) {\n    function create(triggerKind, selectedCompletionInfo) {\n        return { triggerKind, selectedCompletionInfo };\n    }\n    InlineCompletionContext.create = create;\n})(InlineCompletionContext || (InlineCompletionContext = {}));\nexport var WorkspaceFolder;\n(function (WorkspaceFolder) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && URI.is(candidate.uri) && Is.string(candidate.name);\n    }\n    WorkspaceFolder.is = is;\n})(WorkspaceFolder || (WorkspaceFolder = {}));\nexport const EOL = ['\\n', '\\r\\n', '\\r'];\n/**\n * @deprecated Use the text document from the new vscode-languageserver-textdocument package.\n */\nexport var TextDocument;\n(function (TextDocument) {\n    /**\n     * Creates a new ITextDocument literal from the given uri and content.\n     * @param uri The document's uri.\n     * @param languageId The document's language Id.\n     * @param version The document's version.\n     * @param content The document's content.\n     */\n    function create(uri, languageId, version, content) {\n        return new FullTextDocument(uri, languageId, version, content);\n    }\n    TextDocument.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ITextDocument} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && (Is.undefined(candidate.languageId) || Is.string(candidate.languageId)) && Is.uinteger(candidate.lineCount)\n            && Is.func(candidate.getText) && Is.func(candidate.positionAt) && Is.func(candidate.offsetAt) ? true : false;\n    }\n    TextDocument.is = is;\n    function applyEdits(document, edits) {\n        let text = document.getText();\n        let sortedEdits = mergeSort(edits, (a, b) => {\n            let diff = a.range.start.line - b.range.start.line;\n            if (diff === 0) {\n                return a.range.start.character - b.range.start.character;\n            }\n            return diff;\n        });\n        let lastModifiedOffset = text.length;\n        for (let i = sortedEdits.length - 1; i >= 0; i--) {\n            let e = sortedEdits[i];\n            let startOffset = document.offsetAt(e.range.start);\n            let endOffset = document.offsetAt(e.range.end);\n            if (endOffset <= lastModifiedOffset) {\n                text = text.substring(0, startOffset) + e.newText + text.substring(endOffset, text.length);\n            }\n            else {\n                throw new Error('Overlapping edit');\n            }\n            lastModifiedOffset = startOffset;\n        }\n        return text;\n    }\n    TextDocument.applyEdits = applyEdits;\n    function mergeSort(data, compare) {\n        if (data.length <= 1) {\n            // sorted\n            return data;\n        }\n        const p = (data.length / 2) | 0;\n        const left = data.slice(0, p);\n        const right = data.slice(p);\n        mergeSort(left, compare);\n        mergeSort(right, compare);\n        let leftIdx = 0;\n        let rightIdx = 0;\n        let i = 0;\n        while (leftIdx < left.length && rightIdx < right.length) {\n            let ret = compare(left[leftIdx], right[rightIdx]);\n            if (ret <= 0) {\n                // smaller_equal -> take left to preserve order\n                data[i++] = left[leftIdx++];\n            }\n            else {\n                // greater -> take right\n                data[i++] = right[rightIdx++];\n            }\n        }\n        while (leftIdx < left.length) {\n            data[i++] = left[leftIdx++];\n        }\n        while (rightIdx < right.length) {\n            data[i++] = right[rightIdx++];\n        }\n        return data;\n    }\n})(TextDocument || (TextDocument = {}));\n/**\n * @deprecated Use the text document from the new vscode-languageserver-textdocument package.\n */\nclass FullTextDocument {\n    constructor(uri, languageId, version, content) {\n        this._uri = uri;\n        this._languageId = languageId;\n        this._version = version;\n        this._content = content;\n        this._lineOffsets = undefined;\n    }\n    get uri() {\n        return this._uri;\n    }\n    get languageId() {\n        return this._languageId;\n    }\n    get version() {\n        return this._version;\n    }\n    getText(range) {\n        if (range) {\n            let start = this.offsetAt(range.start);\n            let end = this.offsetAt(range.end);\n            return this._content.substring(start, end);\n        }\n        return this._content;\n    }\n    update(event, version) {\n        this._content = event.text;\n        this._version = version;\n        this._lineOffsets = undefined;\n    }\n    getLineOffsets() {\n        if (this._lineOffsets === undefined) {\n            let lineOffsets = [];\n            let text = this._content;\n            let isLineStart = true;\n            for (let i = 0; i < text.length; i++) {\n                if (isLineStart) {\n                    lineOffsets.push(i);\n                    isLineStart = false;\n                }\n                let ch = text.charAt(i);\n                isLineStart = (ch === '\\r' || ch === '\\n');\n                if (ch === '\\r' && i + 1 < text.length && text.charAt(i + 1) === '\\n') {\n                    i++;\n                }\n            }\n            if (isLineStart && text.length > 0) {\n                lineOffsets.push(text.length);\n            }\n            this._lineOffsets = lineOffsets;\n        }\n        return this._lineOffsets;\n    }\n    positionAt(offset) {\n        offset = Math.max(Math.min(offset, this._content.length), 0);\n        let lineOffsets = this.getLineOffsets();\n        let low = 0, high = lineOffsets.length;\n        if (high === 0) {\n            return Position.create(0, offset);\n        }\n        while (low < high) {\n            let mid = Math.floor((low + high) / 2);\n            if (lineOffsets[mid] > offset) {\n                high = mid;\n            }\n            else {\n                low = mid + 1;\n            }\n        }\n        // low is the least x for which the line offset is larger than the current offset\n        // or array.length if no line offset is larger than the current offset\n        let line = low - 1;\n        return Position.create(line, offset - lineOffsets[line]);\n    }\n    offsetAt(position) {\n        let lineOffsets = this.getLineOffsets();\n        if (position.line >= lineOffsets.length) {\n            return this._content.length;\n        }\n        else if (position.line < 0) {\n            return 0;\n        }\n        let lineOffset = lineOffsets[position.line];\n        let nextLineOffset = (position.line + 1 < lineOffsets.length) ? lineOffsets[position.line + 1] : this._content.length;\n        return Math.max(Math.min(lineOffset + position.character, nextLineOffset), lineOffset);\n    }\n    get lineCount() {\n        return this.getLineOffsets().length;\n    }\n}\nvar Is;\n(function (Is) {\n    const toString = Object.prototype.toString;\n    function defined(value) {\n        return typeof value !== 'undefined';\n    }\n    Is.defined = defined;\n    function undefined(value) {\n        return typeof value === 'undefined';\n    }\n    Is.undefined = undefined;\n    function boolean(value) {\n        return value === true || value === false;\n    }\n    Is.boolean = boolean;\n    function string(value) {\n        return toString.call(value) === '[object String]';\n    }\n    Is.string = string;\n    function number(value) {\n        return toString.call(value) === '[object Number]';\n    }\n    Is.number = number;\n    function numberRange(value, min, max) {\n        return toString.call(value) === '[object Number]' && min <= value && value <= max;\n    }\n    Is.numberRange = numberRange;\n    function integer(value) {\n        return toString.call(value) === '[object Number]' && -2147483648 <= value && value <= 2147483647;\n    }\n    Is.integer = integer;\n    function uinteger(value) {\n        return toString.call(value) === '[object Number]' && 0 <= value && value <= 2147483647;\n    }\n    Is.uinteger = uinteger;\n    function func(value) {\n        return toString.call(value) === '[object Function]';\n    }\n    Is.func = func;\n    function objectLiteral(value) {\n        // Strictly speaking class instances pass this check as well. Since the LSP\n        // doesn't use classes we ignore this for now. If we do we need to add something\n        // like this: `Object.getPrototypeOf(Object.getPrototypeOf(x)) === null`\n        return value !== null && typeof value === 'object';\n    }\n    Is.objectLiteral = objectLiteral;\n    function typedArray(value, check) {\n        return Array.isArray(value) && value.every(check);\n    }\n    Is.typedArray = typedArray;\n})(Is || (Is = {}));\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IToken, TokenType } from 'chevrotain';\nimport type { Range } from 'vscode-languageserver-types';\nimport type { AbstractElement } from '../languages/generated/ast.js';\nimport type { AstNode, CompositeCstNode, CstNode, LeafCstNode, RootCstNode } from '../syntax-tree.js';\nimport { Position } from 'vscode-languageserver-types';\nimport { isCompositeCstNode } from '../syntax-tree.js';\nimport { tokenToRange } from '../utils/cst-utils.js';\n\nexport class CstNodeBuilder {\n\n    private rootNode!: RootCstNodeImpl;\n    private nodeStack: CompositeCstNodeImpl[] = [];\n\n    private get current(): CompositeCstNodeImpl {\n        return this.nodeStack[this.nodeStack.length - 1];\n    }\n\n    buildRootNode(input: string): RootCstNode {\n        this.rootNode = new RootCstNodeImpl(input);\n        this.rootNode.root = this.rootNode;\n        this.nodeStack = [this.rootNode];\n        return this.rootNode;\n    }\n\n    buildCompositeNode(feature: AbstractElement): CompositeCstNode {\n        const compositeNode = new CompositeCstNodeImpl();\n        compositeNode.grammarSource = feature;\n        compositeNode.root = this.rootNode;\n        this.current.content.push(compositeNode);\n        this.nodeStack.push(compositeNode);\n        return compositeNode;\n    }\n\n    buildLeafNode(token: IToken, feature: AbstractElement): LeafCstNode {\n        const leafNode = new LeafCstNodeImpl(token.startOffset, token.image.length, tokenToRange(token), token.tokenType, false);\n        leafNode.grammarSource = feature;\n        leafNode.root = this.rootNode;\n        this.current.content.push(leafNode);\n        return leafNode;\n    }\n\n    removeNode(node: CstNode): void {\n        const parent = node.container;\n        if (parent) {\n            const index = parent.content.indexOf(node);\n            if (index >= 0) {\n                parent.content.splice(index, 1);\n            }\n        }\n    }\n\n    construct(item: { $type: string | symbol | undefined, $cstNode: CstNode }): void {\n        const current: CstNode = this.current;\n        // The specified item could be a datatype ($type is symbol) or a fragment ($type is undefined)\n        // Only if the $type is a string, we actually assign the element\n        if (typeof item.$type === 'string') {\n            this.current.astNode = item;\n        }\n        item.$cstNode = current;\n        const node = this.nodeStack.pop();\n        // Empty composite nodes are not valid\n        // Simply remove the node from the tree\n        if (node?.content.length === 0) {\n            this.removeNode(node);\n        }\n    }\n\n    addHiddenTokens(hiddenTokens: IToken[]): void {\n        for (const token of hiddenTokens) {\n            const hiddenNode = new LeafCstNodeImpl(token.startOffset, token.image.length, tokenToRange(token), token.tokenType, true);\n            hiddenNode.root = this.rootNode;\n            this.addHiddenToken(this.rootNode, hiddenNode);\n        }\n    }\n\n    private addHiddenToken(node: CompositeCstNode, token: LeafCstNode): void {\n        const { offset: tokenStart, end: tokenEnd } = token;\n\n        for (let i = 0; i < node.content.length; i++) {\n            const child = node.content[i];\n            const { offset: childStart, end: childEnd } = child;\n            if (isCompositeCstNode(child) && tokenStart > childStart && tokenEnd < childEnd) {\n                this.addHiddenToken(child, token);\n                return;\n            } else if (tokenEnd <= childStart) {\n                node.content.splice(i, 0, token);\n                return;\n            }\n        }\n\n        // We know that we haven't found a suited position for the token\n        // So we simply add it to the end of the current node\n        node.content.push(token);\n    }\n}\n\nexport abstract class AbstractCstNode implements CstNode {\n    abstract get offset(): number;\n    abstract get length(): number;\n    abstract get end(): number;\n    abstract get range(): Range;\n\n    container?: CompositeCstNode;\n    grammarSource: AbstractElement;\n    root: RootCstNode;\n    private _astNode?: AstNode;\n\n    /** @deprecated use `container` instead. */\n    get parent(): CompositeCstNode | undefined {\n        return this.container;\n    }\n\n    /** @deprecated use `grammarSource` instead. */\n    get feature(): AbstractElement {\n        return this.grammarSource;\n    }\n\n    get hidden(): boolean {\n        return false;\n    }\n\n    get astNode(): AstNode {\n        const node = typeof this._astNode?.$type === 'string' ? this._astNode : this.container?.astNode;\n        if (!node) {\n            throw new Error('This node has no associated AST element');\n        }\n        return node;\n    }\n\n    set astNode(value: AstNode) {\n        this._astNode = value;\n    }\n\n    /** @deprecated use `astNode` instead. */\n    get element(): AstNode {\n        return this.astNode;\n    }\n\n    get text(): string {\n        return this.root.fullText.substring(this.offset, this.end);\n    }\n}\n\nexport class LeafCstNodeImpl extends AbstractCstNode implements LeafCstNode {\n    get offset(): number {\n        return this._offset;\n    }\n\n    get length(): number {\n        return this._length;\n    }\n\n    get end(): number {\n        return this._offset + this._length;\n    }\n\n    override get hidden(): boolean {\n        return this._hidden;\n    }\n\n    get tokenType(): TokenType {\n        return this._tokenType;\n    }\n\n    get range(): Range {\n        return this._range;\n    }\n\n    private _hidden: boolean;\n    private _offset: number;\n    private _length: number;\n    private _range: Range;\n    private _tokenType: TokenType;\n\n    constructor(offset: number, length: number, range: Range, tokenType: TokenType, hidden = false) {\n        super();\n        this._hidden = hidden;\n        this._offset = offset;\n        this._tokenType = tokenType;\n        this._length = length;\n        this._range = range;\n    }\n}\n\nexport class CompositeCstNodeImpl extends AbstractCstNode implements CompositeCstNode {\n    readonly content: CstNode[] = new CstNodeContainer(this);\n    private _rangeCache?: Range;\n\n    /** @deprecated use `content` instead. */\n    get children(): CstNode[] {\n        return this.content;\n    }\n\n    get offset(): number {\n        return this.firstNonHiddenNode?.offset ?? 0;\n    }\n\n    get length(): number {\n        return this.end - this.offset;\n    }\n\n    get end(): number {\n        return this.lastNonHiddenNode?.end ?? 0;\n    }\n\n    get range(): Range {\n        const firstNode = this.firstNonHiddenNode;\n        const lastNode = this.lastNonHiddenNode;\n        if (firstNode && lastNode) {\n            if (this._rangeCache === undefined) {\n                const { range: firstRange } = firstNode;\n                const { range: lastRange } = lastNode;\n                this._rangeCache = { start: firstRange.start, end: lastRange.end.line < firstRange.start.line ? firstRange.start : lastRange.end };\n            }\n            return this._rangeCache;\n        } else {\n            return { start: Position.create(0, 0), end: Position.create(0, 0) };\n        }\n    }\n\n    private get firstNonHiddenNode(): CstNode | undefined {\n        for (const child of this.content) {\n            if (!child.hidden) {\n                return child;\n            }\n        }\n        return this.content[0];\n    }\n\n    private get lastNonHiddenNode(): CstNode | undefined {\n        for (let i = this.content.length - 1; i >= 0; i--) {\n            const child = this.content[i];\n            if (!child.hidden) {\n                return child;\n            }\n        }\n        return this.content[this.content.length - 1];\n    }\n}\n\nclass CstNodeContainer extends Array {\n    readonly parent: CompositeCstNode;\n\n    constructor(parent: CompositeCstNode) {\n        super();\n        this.parent = parent;\n        Object.setPrototypeOf(this, CstNodeContainer.prototype);\n    }\n\n    override push(...items: CstNode[]): number {\n        this.addParents(items);\n        return super.push(...items);\n    }\n\n    override unshift(...items: CstNode[]): number {\n        this.addParents(items);\n        return super.unshift(...items);\n    }\n\n    override splice(start: number, count: number, ...items: CstNode[]): CstNode[] {\n        this.addParents(items);\n        return super.splice(start, count, ...items);\n    }\n\n    private addParents(items: CstNode[]): void {\n        for (const item of items) {\n            (item).container = this.parent;\n        }\n    }\n}\n\nexport class RootCstNodeImpl extends CompositeCstNodeImpl implements RootCstNode {\n    private _text = '';\n\n    override get text(): string {\n        return this._text.substring(this.offset, this.end);\n    }\n\n    get fullText(): string {\n        return this._text;\n    }\n\n    constructor(input?: string) {\n        super();\n        this._text = input ?? '';\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\nimport type { DSLMethodOpts, ILexingError, IOrAlt, IParserErrorMessageProvider, IRecognitionException, IToken, TokenType, TokenVocabulary } from 'chevrotain';\nimport type { AbstractElement, Action, Assignment, ParserRule } from '../languages/generated/ast.js';\nimport type { Linker } from '../references/linker.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstReflection, CompositeCstNode, CstNode } from '../syntax-tree.js';\nimport type { Lexer } from './lexer.js';\nimport type { IParserConfig } from './parser-config.js';\nimport type { ValueConverter } from './value-converter.js';\nimport { defaultParserErrorProvider, EmbeddedActionsParser, LLkLookaheadStrategy } from 'chevrotain';\nimport { LLStarLookaheadStrategy } from 'chevrotain-allstar';\nimport { isAssignment, isCrossReference, isKeyword } from '../languages/generated/ast.js';\nimport { getTypeName, isDataTypeRule } from '../utils/grammar-utils.js';\nimport { assignMandatoryProperties, getContainerOfType, linkContentToContainer } from '../utils/ast-utils.js';\nimport { CstNodeBuilder } from './cst-node-builder.js';\n\nexport type ParseResult = {\n    value: T,\n    parserErrors: IRecognitionException[],\n    lexerErrors: ILexingError[]\n}\n\nexport const DatatypeSymbol = Symbol('Datatype');\n\ninterface DataTypeNode {\n    $cstNode: CompositeCstNode\n    /** Instead of a string, this node is uniquely identified by the `Datatype` symbol */\n    $type: symbol\n    /** Used as a storage for all parsed terminals, keywords and sub-datatype rules */\n    value: string\n}\n\nfunction isDataTypeNode(node: { $type: string | symbol | undefined }): node is DataTypeNode {\n    return node.$type === DatatypeSymbol;\n}\n\ntype RuleResult = (args: Args) => any;\n\ntype Args = Record;\n\ntype RuleImpl = (args: Args) => any;\n\ninterface AssignmentElement {\n    assignment?: Assignment\n    isCrossRef: boolean\n}\n\nexport interface BaseParser {\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult;\n    alternatives(idx: number, choices: Array>): void;\n    optional(idx: number, callback: DSLMethodOpts): void;\n    many(idx: number, callback: DSLMethodOpts): void;\n    atLeastOne(idx: number, callback: DSLMethodOpts): void;\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void;\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void;\n    action($type: string, action: Action): void;\n    construct(): unknown;\n    isRecording(): boolean;\n    get unorderedGroups(): Map;\n    getRuleStack(): number[];\n}\n\nconst ruleSuffix = '\\u200B';\nconst withRuleSuffix = (name: string): string => name.endsWith(ruleSuffix) ? name : name + ruleSuffix;\n\nexport abstract class AbstractLangiumParser implements BaseParser {\n\n    protected readonly lexer: Lexer;\n    protected readonly wrapper: ChevrotainWrapper;\n    protected _unorderedGroups: Map = new Map();\n\n    constructor(services: LangiumCoreServices) {\n        this.lexer = services.parser.Lexer;\n        const tokens = this.lexer.definition;\n        this.wrapper = new ChevrotainWrapper(tokens, {\n            ...services.parser.ParserConfig,\n            errorMessageProvider: services.parser.ParserErrorMessageProvider\n        });\n    }\n\n    alternatives(idx: number, choices: Array>): void {\n        this.wrapper.wrapOr(idx, choices);\n    }\n\n    optional(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapOption(idx, callback);\n    }\n\n    many(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapMany(idx, callback);\n    }\n\n    atLeastOne(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapAtLeastOne(idx, callback);\n    }\n\n    abstract rule(rule: ParserRule, impl: RuleImpl): RuleResult;\n    abstract consume(idx: number, tokenType: TokenType, feature: AbstractElement): void;\n    abstract subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void;\n    abstract action($type: string, action: Action): void;\n    abstract construct(): unknown;\n\n    isRecording(): boolean {\n        return this.wrapper.IS_RECORDING;\n    }\n\n    get unorderedGroups(): Map {\n        return this._unorderedGroups;\n    }\n\n    getRuleStack(): number[] {\n        return (this.wrapper as any).RULE_STACK;\n    }\n\n    finalize(): void {\n        this.wrapper.wrapSelfAnalysis();\n    }\n}\n\nexport class LangiumParser extends AbstractLangiumParser {\n    private readonly linker: Linker;\n    private readonly converter: ValueConverter;\n    private readonly astReflection: AstReflection;\n    private readonly nodeBuilder = new CstNodeBuilder();\n    private stack: any[] = [];\n    private mainRule!: RuleResult;\n    private assignmentMap = new Map();\n\n    private get current(): any {\n        return this.stack[this.stack.length - 1];\n    }\n\n    constructor(services: LangiumCoreServices) {\n        super(services);\n        this.linker = services.references.Linker;\n        this.converter = services.parser.ValueConverter;\n        this.astReflection = services.shared.AstReflection;\n    }\n\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult {\n        const type = rule.fragment ? undefined : isDataTypeRule(rule) ? DatatypeSymbol : getTypeName(rule);\n        const ruleMethod = this.wrapper.DEFINE_RULE(withRuleSuffix(rule.name), this.startImplementation(type, impl).bind(this));\n        if (rule.entry) {\n            this.mainRule = ruleMethod;\n        }\n        return ruleMethod;\n    }\n\n    parse(input: string): ParseResult {\n        this.nodeBuilder.buildRootNode(input);\n        const lexerResult = this.lexer.tokenize(input);\n        this.wrapper.input = lexerResult.tokens;\n        const result = this.mainRule.call(this.wrapper, {});\n        this.nodeBuilder.addHiddenTokens(lexerResult.hidden);\n        this.unorderedGroups.clear();\n        return {\n            value: result,\n            lexerErrors: lexerResult.errors,\n            parserErrors: this.wrapper.errors\n        };\n    }\n\n    private startImplementation($type: string | symbol | undefined, implementation: RuleImpl): RuleImpl {\n        return (args) => {\n            if (!this.isRecording()) {\n                const node: any = { $type };\n                this.stack.push(node);\n                if ($type === DatatypeSymbol) {\n                    node.value = '';\n                }\n            }\n            let result: unknown;\n            try {\n                result = implementation(args);\n            } catch (err) {\n                result = undefined;\n            }\n            if (!this.isRecording() && result === undefined) {\n                result = this.construct();\n            }\n            return result;\n        };\n    }\n\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void {\n        const token = this.wrapper.wrapConsume(idx, tokenType);\n        if (!this.isRecording() && this.isValidToken(token)) {\n            const leafNode = this.nodeBuilder.buildLeafNode(token, feature);\n            const { assignment, isCrossRef } = this.getAssignment(feature);\n            const current = this.current;\n            if (assignment) {\n                const convertedValue = isKeyword(feature) ? token.image : this.converter.convert(token.image, leafNode);\n                this.assign(assignment.operator, assignment.feature, convertedValue, leafNode, isCrossRef);\n            } else if (isDataTypeNode(current)) {\n                let text = token.image;\n                if (!isKeyword(feature)) {\n                    text = this.converter.convert(text, leafNode).toString();\n                }\n                current.value += text;\n            }\n        }\n    }\n\n    /**\n     * Most consumed parser tokens are valid. However there are two cases in which they are not valid:\n     *\n     * 1. They were inserted during error recovery by the parser. These tokens don't really exist and should not be further processed\n     * 2. They contain invalid token ranges. This might include the special EOF token, or other tokens produced by invalid token builders.\n     */\n    private isValidToken(token: IToken): boolean {\n        return !token.isInsertedInRecovery && !isNaN(token.startOffset) && typeof token.endOffset === 'number' && !isNaN(token.endOffset);\n    }\n\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void {\n        let cstNode: CompositeCstNode | undefined;\n        if (!this.isRecording()) {\n            cstNode = this.nodeBuilder.buildCompositeNode(feature);\n        }\n        const subruleResult = this.wrapper.wrapSubrule(idx, rule, args) as any;\n        if (!this.isRecording() && cstNode && cstNode.length > 0) {\n            this.performSubruleAssignment(subruleResult, feature, cstNode);\n        }\n    }\n\n    private performSubruleAssignment(result: any, feature: AbstractElement, cstNode: CompositeCstNode): void {\n        const { assignment, isCrossRef } = this.getAssignment(feature);\n        if (assignment) {\n            this.assign(assignment.operator, assignment.feature, result, cstNode, isCrossRef);\n        } else if (!assignment) {\n            // If we call a subrule without an assignment we either:\n            // 1. append the result of the subrule (data type rule)\n            // 2. override the current object with the newly parsed object\n            // If the current element is an AST node and the result of the subrule\n            // is a data type rule, we can safely discard the results.\n            const current = this.current;\n            if (isDataTypeNode(current)) {\n                current.value += result.toString();\n            } else if (typeof result === 'object' && result) {\n                const resultKind = result.$type;\n                const object = this.assignWithoutOverride(result, current);\n                if (resultKind) {\n                    object.$type = resultKind;\n                }\n                const newItem = object;\n                this.stack.pop();\n                this.stack.push(newItem);\n            }\n        }\n    }\n\n    action($type: string, action: Action): void {\n        if (!this.isRecording()) {\n            let last = this.current;\n            // This branch is used for left recursive grammar rules.\n            // Those don't call `construct` before another action.\n            // Therefore, we need to call it here.\n            if (!last.$cstNode && action.feature && action.operator) {\n                last = this.construct(false);\n                const feature = last.$cstNode.feature;\n                this.nodeBuilder.buildCompositeNode(feature);\n            }\n            const newItem = { $type };\n            this.stack.pop();\n            this.stack.push(newItem);\n            if (action.feature && action.operator) {\n                this.assign(action.operator, action.feature, last, last.$cstNode, false);\n            }\n        }\n    }\n\n    construct(pop = true): unknown {\n        if (this.isRecording()) {\n            return undefined;\n        }\n        const obj = this.current;\n        linkContentToContainer(obj);\n        this.nodeBuilder.construct(obj);\n        if (pop) {\n            this.stack.pop();\n        }\n        if (isDataTypeNode(obj)) {\n            return this.converter.convert(obj.value, obj.$cstNode);\n        } else {\n            assignMandatoryProperties(this.astReflection, obj);\n        }\n        return obj;\n    }\n\n    private getAssignment(feature: AbstractElement): AssignmentElement {\n        if (!this.assignmentMap.has(feature)) {\n            const assignment = getContainerOfType(feature, isAssignment);\n            this.assignmentMap.set(feature, {\n                assignment: assignment,\n                isCrossRef: assignment ? isCrossReference(assignment.terminal) : false\n            });\n        }\n        return this.assignmentMap.get(feature)!;\n    }\n\n    private assign(operator: string, feature: string, value: unknown, cstNode: CstNode, isCrossRef: boolean): void {\n        const obj = this.current;\n        let item: unknown;\n        if (isCrossRef && typeof value === 'string') {\n            item = this.linker.buildReference(obj, feature, cstNode, value);\n        } else {\n            item = value;\n        }\n        switch (operator) {\n            case '=': {\n                obj[feature] = item;\n                break;\n            }\n            case '?=': {\n                obj[feature] = true;\n                break;\n            }\n            case '+=': {\n                if (!Array.isArray(obj[feature])) {\n                    obj[feature] = [];\n                }\n                obj[feature].push(item);\n            }\n        }\n    }\n\n    private assignWithoutOverride(target: any, source: any): any {\n        for (const [name, existingValue] of Object.entries(source)) {\n            const newValue = target[name];\n            if (newValue === undefined) {\n                target[name] = existingValue;\n            } else if (Array.isArray(newValue) && Array.isArray(existingValue)) {\n                existingValue.push(...newValue);\n                target[name] = existingValue;\n            }\n        }\n        return target;\n    }\n\n    get definitionErrors(): IParserDefinitionError[] {\n        return this.wrapper.definitionErrors;\n    }\n}\n\nexport interface IParserDefinitionError {\n    message: string\n    type: number\n    ruleName?: string\n}\n\nexport abstract class AbstractParserErrorMessageProvider implements IParserErrorMessageProvider {\n\n    buildMismatchTokenMessage(options: {\n        expected: TokenType\n        actual: IToken\n        previous: IToken\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildMismatchTokenMessage(options);\n    }\n\n    buildNotAllInputParsedMessage(options: {\n        firstRedundant: IToken\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildNotAllInputParsedMessage(options);\n    }\n\n    buildNoViableAltMessage(options: {\n        expectedPathsPerAlt: TokenType[][][]\n        actual: IToken[]\n        previous: IToken\n        customUserDescription: string\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildNoViableAltMessage(options);\n    }\n\n    buildEarlyExitMessage(options: {\n        expectedIterationPaths: TokenType[][]\n        actual: IToken[]\n        previous: IToken\n        customUserDescription: string\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildEarlyExitMessage(options);\n    }\n\n}\n\nexport class LangiumParserErrorMessageProvider extends AbstractParserErrorMessageProvider {\n\n    override buildMismatchTokenMessage({ expected, actual }: {\n        expected: TokenType\n        actual: IToken\n        previous: IToken\n        ruleName: string\n    }): string {\n        const expectedMsg = expected.LABEL\n            ? '`' + expected.LABEL + '`'\n            : expected.name.endsWith(':KW')\n                ? `keyword '${expected.name.substring(0, expected.name.length - 3)}'`\n                : `token of type '${expected.name}'`;\n        return `Expecting ${expectedMsg} but found \\`${actual.image}\\`.`;\n    }\n\n    override buildNotAllInputParsedMessage({ firstRedundant }: {\n        firstRedundant: IToken\n        ruleName: string\n    }): string {\n        return `Expecting end of file but found \\`${firstRedundant.image}\\`.`;\n    }\n}\n\nexport interface CompletionParserResult {\n    tokens: IToken[]\n    elementStack: AbstractElement[]\n    tokenIndex: number\n}\n\nexport class LangiumCompletionParser extends AbstractLangiumParser {\n    private mainRule!: RuleResult;\n    private tokens: IToken[] = [];\n\n    private elementStack: AbstractElement[] = [];\n    private lastElementStack: AbstractElement[] = [];\n    private nextTokenIndex = 0;\n    private stackSize = 0;\n\n    action(): void {\n        // NOOP\n    }\n\n    construct(): unknown {\n        // NOOP\n        return undefined;\n    }\n\n    parse(input: string): CompletionParserResult {\n        this.resetState();\n        const tokens = this.lexer.tokenize(input);\n        this.tokens = tokens.tokens;\n        this.wrapper.input = [...this.tokens];\n        this.mainRule.call(this.wrapper, {});\n        this.unorderedGroups.clear();\n        return {\n            tokens: this.tokens,\n            elementStack: [...this.lastElementStack],\n            tokenIndex: this.nextTokenIndex\n        };\n    }\n\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult {\n        const ruleMethod = this.wrapper.DEFINE_RULE(withRuleSuffix(rule.name), this.startImplementation(impl).bind(this));\n        if (rule.entry) {\n            this.mainRule = ruleMethod;\n        }\n        return ruleMethod;\n    }\n\n    private resetState(): void {\n        this.elementStack = [];\n        this.lastElementStack = [];\n        this.nextTokenIndex = 0;\n        this.stackSize = 0;\n    }\n\n    private startImplementation(implementation: RuleImpl): RuleImpl {\n        return (args) => {\n            const size = this.keepStackSize();\n            try {\n                implementation(args);\n            } finally {\n                this.resetStackSize(size);\n            }\n        };\n    }\n\n    private removeUnexpectedElements(): void {\n        this.elementStack.splice(this.stackSize);\n    }\n\n    keepStackSize(): number {\n        const size = this.elementStack.length;\n        this.stackSize = size;\n        return size;\n    }\n\n    resetStackSize(size: number): void {\n        this.removeUnexpectedElements();\n        this.stackSize = size;\n    }\n\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void {\n        this.wrapper.wrapConsume(idx, tokenType);\n        if (!this.isRecording()) {\n            this.lastElementStack = [...this.elementStack, feature];\n            this.nextTokenIndex = this.currIdx + 1;\n        }\n    }\n\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void {\n        this.before(feature);\n        this.wrapper.wrapSubrule(idx, rule, args);\n        this.after(feature);\n    }\n\n    before(element: AbstractElement): void {\n        if (!this.isRecording()) {\n            this.elementStack.push(element);\n        }\n    }\n\n    after(element: AbstractElement): void {\n        if (!this.isRecording()) {\n            const index = this.elementStack.lastIndexOf(element);\n            if (index >= 0) {\n                this.elementStack.splice(index);\n            }\n        }\n    }\n\n    get currIdx(): number {\n        return (this.wrapper as any).currIdx;\n    }\n}\n\nconst defaultConfig: IParserConfig = {\n    recoveryEnabled: true,\n    nodeLocationTracking: 'full',\n    skipValidations: true,\n    errorMessageProvider: new LangiumParserErrorMessageProvider()\n};\n\n/**\n * This class wraps the embedded actions parser of chevrotain and exposes protected methods.\n * This way, we can build the `LangiumParser` as a composition.\n */\nclass ChevrotainWrapper extends EmbeddedActionsParser {\n\n    // This array is set in the base implementation of Chevrotain.\n    definitionErrors: IParserDefinitionError[];\n\n    constructor(tokens: TokenVocabulary, config?: IParserConfig) {\n        const useDefaultLookahead = config && 'maxLookahead' in config;\n        super(tokens, {\n            ...defaultConfig,\n            lookaheadStrategy: useDefaultLookahead\n                ? new LLkLookaheadStrategy({ maxLookahead: config.maxLookahead })\n                : new LLStarLookaheadStrategy(),\n            ...config,\n        });\n    }\n\n    get IS_RECORDING(): boolean {\n        return this.RECORDING_PHASE;\n    }\n\n    DEFINE_RULE(name: string, impl: RuleImpl): RuleResult {\n        return this.RULE(name, impl);\n    }\n\n    wrapSelfAnalysis(): void {\n        this.performSelfAnalysis();\n    }\n\n    wrapConsume(idx: number, tokenType: TokenType): IToken {\n        return this.consume(idx, tokenType);\n    }\n\n    wrapSubrule(idx: number, rule: RuleResult, args: Args): unknown {\n        return this.subrule(idx, rule, {\n            ARGS: [args]\n        });\n    }\n\n    wrapOr(idx: number, choices: Array>): void {\n        this.or(idx, choices);\n    }\n\n    wrapOption(idx: number, callback: DSLMethodOpts): void {\n        this.option(idx, callback);\n    }\n\n    wrapMany(idx: number, callback: DSLMethodOpts): void {\n        this.many(idx, callback);\n    }\n\n    wrapAtLeastOne(idx: number, callback: DSLMethodOpts): void {\n        this.atLeastOne(idx, callback);\n    }\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IOrAlt, TokenType, TokenTypeDictionary } from 'chevrotain';\nimport type { AbstractElement, Action, Alternatives, Condition, CrossReference, Grammar, Group, Keyword, NamedArgument, ParserRule, RuleCall, UnorderedGroup } from '../languages/generated/ast.js';\nimport type { BaseParser } from './langium-parser.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { Cardinality } from '../utils/grammar-utils.js';\nimport { EMPTY_ALT, EOF } from 'chevrotain';\nimport { isAction, isAlternatives, isEndOfFile, isAssignment, isConjunction, isCrossReference, isDisjunction, isGroup, isKeyword, isNegation, isParameterReference, isParserRule, isRuleCall, isTerminalRule, isUnorderedGroup, isBooleanLiteral } from '../languages/generated/ast.js';\nimport { assertUnreachable, ErrorWithLocation } from '../utils/errors.js';\nimport { stream } from '../utils/stream.js';\nimport { findNameAssignment, getAllReachableRules, getTypeName } from '../utils/grammar-utils.js';\n\ntype RuleContext = {\n    optional: number,\n    consume: number,\n    subrule: number,\n    many: number,\n    or: number\n} & ParserContext;\n\ntype ParserContext = {\n    parser: BaseParser\n    tokens: TokenTypeDictionary\n    rules: Map\n    ruleNames: Map\n}\n\ntype Rule = (args: Args) => unknown;\n\ntype Args = Record;\n\ntype Predicate = (args: Args) => boolean;\n\ntype Method = (args: Args) => void;\n\nexport function createParser(grammar: Grammar, parser: T, tokens: TokenTypeDictionary): T {\n    const rules = new Map();\n    const parserContext: ParserContext = {\n        parser,\n        tokens,\n        rules,\n        ruleNames: new Map()\n    };\n    buildRules(parserContext, grammar);\n    return parser;\n}\n\nfunction buildRules(parserContext: ParserContext, grammar: Grammar): void {\n    const reachable = getAllReachableRules(grammar, false);\n    const parserRules = stream(grammar.rules).filter(isParserRule).filter(rule => reachable.has(rule));\n    for (const rule of parserRules) {\n        const ctx: RuleContext = {\n            ...parserContext,\n            consume: 1,\n            optional: 1,\n            subrule: 1,\n            many: 1,\n            or: 1\n        };\n        ctx.rules.set(\n            rule.name,\n            parserContext.parser.rule(rule, buildElement(ctx, rule.definition))\n        );\n    }\n}\n\nfunction buildElement(ctx: RuleContext, element: AbstractElement, ignoreGuard = false): Method {\n    let method: Method;\n    if (isKeyword(element)) {\n        method = buildKeyword(ctx, element);\n    } else if (isAction(element)) {\n        method = buildAction(ctx, element);\n    } else if (isAssignment(element)) {\n        method = buildElement(ctx, element.terminal);\n    } else if (isCrossReference(element)) {\n        method = buildCrossReference(ctx, element);\n    } else if (isRuleCall(element)) {\n        method = buildRuleCall(ctx, element);\n    } else if (isAlternatives(element)) {\n        method = buildAlternatives(ctx, element);\n    } else if (isUnorderedGroup(element)) {\n        method = buildUnorderedGroup(ctx, element);\n    } else if (isGroup(element)) {\n        method = buildGroup(ctx, element);\n    } else if(isEndOfFile(element)) {\n        const idx = ctx.consume++;\n        method = () => ctx.parser.consume(idx, EOF, element);\n    } else {\n        throw new ErrorWithLocation(element.$cstNode, `Unexpected element type: ${element.$type}`);\n    }\n    return wrap(ctx, ignoreGuard ? undefined : getGuardCondition(element), method, element.cardinality);\n}\n\nfunction buildAction(ctx: RuleContext, action: Action): Method {\n    const actionType = getTypeName(action);\n    return () => ctx.parser.action(actionType, action);\n}\n\nfunction buildRuleCall(ctx: RuleContext, ruleCall: RuleCall): Method {\n    const rule = ruleCall.rule.ref;\n    if (isParserRule(rule)) {\n        const idx = ctx.subrule++;\n        const predicate = ruleCall.arguments.length > 0 ? buildRuleCallPredicate(rule, ruleCall.arguments) : () => ({});\n        return (args) => ctx.parser.subrule(idx, getRule(ctx, rule), ruleCall, predicate(args));\n    } else if (isTerminalRule(rule)) {\n        const idx = ctx.consume++;\n        const method = getToken(ctx, rule.name);\n        return () => ctx.parser.consume(idx, method, ruleCall);\n    } else if (!rule) {\n        throw new ErrorWithLocation(ruleCall.$cstNode, `Undefined rule type: ${ruleCall.$type}`);\n    } else {\n        assertUnreachable(rule);\n    }\n}\n\nfunction buildRuleCallPredicate(rule: ParserRule, namedArgs: NamedArgument[]): (args: Args) => Args {\n    const predicates = namedArgs.map(e => buildPredicate(e.value));\n    return (args) => {\n        const ruleArgs: Args = {};\n        for (let i = 0; i < predicates.length; i++) {\n            const ruleTarget = rule.parameters[i];\n            const predicate = predicates[i];\n            ruleArgs[ruleTarget.name] = predicate(args);\n        }\n        return ruleArgs;\n    };\n}\n\ninterface PredicatedMethod {\n    ALT: Method,\n    GATE?: Predicate\n}\n\nfunction buildPredicate(condition: Condition): Predicate {\n    if (isDisjunction(condition)) {\n        const left = buildPredicate(condition.left);\n        const right = buildPredicate(condition.right);\n        return (args) => (left(args) || right(args));\n    } else if (isConjunction(condition)) {\n        const left = buildPredicate(condition.left);\n        const right = buildPredicate(condition.right);\n        return (args) => (left(args) && right(args));\n    } else if (isNegation(condition)) {\n        const value = buildPredicate(condition.value);\n        return (args) => !value(args);\n    } else if (isParameterReference(condition)) {\n        const name = condition.parameter.ref!.name;\n        return (args) => args !== undefined && args[name] === true;\n    } else if (isBooleanLiteral(condition)) {\n        const value = Boolean(condition.true);\n        return () => value;\n    }\n    assertUnreachable(condition);\n}\n\nfunction buildAlternatives(ctx: RuleContext, alternatives: Alternatives): Method {\n    if (alternatives.elements.length === 1) {\n        return buildElement(ctx, alternatives.elements[0]);\n    } else {\n        const methods: PredicatedMethod[] = [];\n\n        for (const element of alternatives.elements) {\n            const predicatedMethod: PredicatedMethod = {\n                // Since we handle the guard condition in the alternative already\n                // We can ignore the group guard condition inside\n                ALT: buildElement(ctx, element, true)\n            };\n            const guard = getGuardCondition(element);\n            if (guard) {\n                predicatedMethod.GATE = buildPredicate(guard);\n            }\n            methods.push(predicatedMethod);\n        }\n\n        const idx = ctx.or++;\n        return (args) => ctx.parser.alternatives(idx, methods.map(method => {\n            const alt: IOrAlt = {\n                ALT: () => method.ALT(args)\n            };\n            const gate = method.GATE;\n            if (gate) {\n                alt.GATE = () => gate(args);\n            }\n            return alt;\n        }));\n    }\n}\n\nfunction buildUnorderedGroup(ctx: RuleContext, group: UnorderedGroup): Method {\n    if (group.elements.length === 1) {\n        return buildElement(ctx, group.elements[0]);\n    }\n    const methods: PredicatedMethod[] = [];\n\n    for (const element of group.elements) {\n        const predicatedMethod: PredicatedMethod = {\n            // Since we handle the guard condition in the alternative already\n            // We can ignore the group guard condition inside\n            ALT: buildElement(ctx, element, true)\n        };\n        const guard = getGuardCondition(element);\n        if (guard) {\n            predicatedMethod.GATE = buildPredicate(guard);\n        }\n        methods.push(predicatedMethod);\n    }\n\n    const orIdx = ctx.or++;\n\n    const idFunc = (groupIdx: number, lParser: BaseParser) => {\n        const stackId = lParser.getRuleStack().join('-');\n        return `uGroup_${groupIdx}_${stackId}`;\n    };\n    const alternatives: Method = (args) => ctx.parser.alternatives(orIdx, methods.map((method, idx) => {\n        const alt: IOrAlt = { ALT: () => true };\n        const parser = ctx.parser;\n        alt.ALT = () => {\n            method.ALT(args);\n            if (!parser.isRecording()) {\n                const key = idFunc(orIdx, parser);\n                if (!parser.unorderedGroups.get(key)) {\n                    // init after clear state\n                    parser.unorderedGroups.set(key, []);\n                }\n                const groupState = parser.unorderedGroups.get(key)!;\n                if (typeof groupState?.[idx] === 'undefined') {\n                    // Not accessed yet\n                    groupState[idx] = true;\n                }\n            }\n        };\n        const gate = method.GATE;\n        if (gate) {\n            alt.GATE = () => gate(args);\n        } else {\n            alt.GATE = () => {\n                const trackedAlternatives = parser.unorderedGroups.get(idFunc(orIdx, parser));\n                const allow = !trackedAlternatives?.[idx];\n                return allow;\n            };\n        }\n        return alt;\n    }));\n    const wrapped = wrap(ctx, getGuardCondition(group), alternatives, '*');\n    return (args) => {\n        wrapped(args);\n        if (!ctx.parser.isRecording()) {\n            ctx.parser.unorderedGroups.delete(idFunc(orIdx, ctx.parser));\n        }\n    };\n}\n\nfunction buildGroup(ctx: RuleContext, group: Group): Method {\n    const methods = group.elements.map(e => buildElement(ctx, e));\n    return (args) => methods.forEach(method => method(args));\n}\n\nfunction getGuardCondition(element: AbstractElement): Condition | undefined {\n    if (isGroup(element)) {\n        return element.guardCondition;\n    }\n    return undefined;\n}\n\nfunction buildCrossReference(ctx: RuleContext, crossRef: CrossReference, terminal = crossRef.terminal): Method {\n    if (!terminal) {\n        if (!crossRef.type.ref) {\n            throw new Error('Could not resolve reference to type: ' + crossRef.type.$refText);\n        }\n        const assignment = findNameAssignment(crossRef.type.ref);\n        const assignTerminal = assignment?.terminal;\n        if (!assignTerminal) {\n            throw new Error('Could not find name assignment for type: ' + getTypeName(crossRef.type.ref));\n        }\n        return buildCrossReference(ctx, crossRef, assignTerminal);\n    } else if (isRuleCall(terminal) && isParserRule(terminal.rule.ref)) {\n        const idx = ctx.subrule++;\n        return (args) => ctx.parser.subrule(idx, getRule(ctx, terminal.rule.ref as ParserRule), crossRef, args);\n    } else if (isRuleCall(terminal) && isTerminalRule(terminal.rule.ref)) {\n        const idx = ctx.consume++;\n        const terminalRule = getToken(ctx, terminal.rule.ref.name);\n        return () => ctx.parser.consume(idx, terminalRule, crossRef);\n    } else if (isKeyword(terminal)) {\n        const idx = ctx.consume++;\n        const keyword = getToken(ctx, terminal.value);\n        return () => ctx.parser.consume(idx, keyword, crossRef);\n    }\n    else {\n        throw new Error('Could not build cross reference parser');\n    }\n}\n\nfunction buildKeyword(ctx: RuleContext, keyword: Keyword): Method {\n    const idx = ctx.consume++;\n    const token = ctx.tokens[keyword.value];\n    if (!token) {\n        throw new Error('Could not find token for keyword: ' + keyword.value);\n    }\n    return () => ctx.parser.consume(idx, token, keyword);\n}\n\nfunction wrap(ctx: RuleContext, guard: Condition | undefined, method: Method, cardinality: Cardinality): Method {\n    const gate = guard && buildPredicate(guard);\n\n    if (!cardinality) {\n        if (gate) {\n            const idx = ctx.or++;\n            return (args) => ctx.parser.alternatives(idx, [\n                {\n                    ALT: () => method(args),\n                    GATE: () => gate(args)\n                },\n                {\n                    ALT: EMPTY_ALT(),\n                    GATE: () => !gate(args)\n                }\n            ]);\n        } else {\n            return method;\n        }\n    }\n\n    if (cardinality === '*') {\n        const idx = ctx.many++;\n        return (args) => ctx.parser.many(idx, {\n            DEF: () => method(args),\n            GATE: gate ? () => gate(args) : undefined\n        });\n    } else if (cardinality === '+') {\n        const idx = ctx.many++;\n        if (gate) {\n            const orIdx = ctx.or++;\n            // In the case of a guard condition for the `+` group\n            // We combine it with an empty alternative\n            // If the condition returns true, it needs to parse at least a single iteration\n            // If its false, it is not allowed to parse anything\n            return (args) => ctx.parser.alternatives(orIdx, [\n                {\n                    ALT: () => ctx.parser.atLeastOne(idx, {\n                        DEF: () => method(args)\n                    }),\n                    GATE: () => gate(args)\n                },\n                {\n                    ALT: EMPTY_ALT(),\n                    GATE: () => !gate(args)\n                }\n            ]);\n        } else {\n            return (args) => ctx.parser.atLeastOne(idx, {\n                DEF: () => method(args),\n            });\n        }\n    } else if (cardinality === '?') {\n        const idx = ctx.optional++;\n        return (args) => ctx.parser.optional(idx, {\n            DEF: () => method(args),\n            GATE: gate ? () => gate(args) : undefined\n        });\n    } else {\n        assertUnreachable(cardinality);\n    }\n}\n\nfunction getRule(ctx: ParserContext, element: ParserRule | AbstractElement): Rule {\n    const name = getRuleName(ctx, element);\n    const rule = ctx.rules.get(name);\n    if (!rule) throw new Error(`Rule \"${name}\" not found.\"`);\n    return rule;\n}\n\nfunction getRuleName(ctx: ParserContext, element: ParserRule | AbstractElement): string {\n    if (isParserRule(element)) {\n        return element.name;\n    } else if (ctx.ruleNames.has(element)) {\n        return ctx.ruleNames.get(element)!;\n    } else {\n        let item: AstNode = element;\n        let parent: AstNode = item.$container!;\n        let ruleName: string = element.$type;\n        while (!isParserRule(parent)) {\n            if (isGroup(parent) || isAlternatives(parent) || isUnorderedGroup(parent)) {\n                const index = parent.elements.indexOf(item as AbstractElement);\n                ruleName = index.toString() + ':' + ruleName;\n            }\n            item = parent;\n            parent = parent.$container!;\n        }\n        const rule = parent as ParserRule;\n        ruleName = rule.name + ':' + ruleName;\n        ctx.ruleNames.set(element, ruleName);\n        return ruleName;\n    }\n}\n\nfunction getToken(ctx: ParserContext, name: string): TokenType {\n    const token = ctx.tokens[name];\n    if (!token) throw new Error(`Token \"${name}\" not found.\"`);\n    return token;\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { LangiumCompletionParser } from './langium-parser.js';\nimport { createParser } from './parser-builder-base.js';\n\nexport function createCompletionParser(services: LangiumCoreServices): LangiumCompletionParser {\n    const grammar = services.Grammar;\n    const lexer = services.parser.Lexer;\n    const parser = new LangiumCompletionParser(services);\n    createParser(grammar, parser, lexer.definition);\n    parser.finalize();\n    return parser;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { LangiumParser } from './langium-parser.js';\nimport { createParser } from './parser-builder-base.js';\n\n/**\n * Create and finalize a Langium parser. The parser rules are derived from the grammar, which is\n * available at `services.Grammar`.\n */\nexport function createLangiumParser(services: LangiumCoreServices): LangiumParser {\n    const parser = prepareLangiumParser(services);\n    parser.finalize();\n    return parser;\n}\n\n/**\n * Create a Langium parser without finalizing it. This is used to extract more detailed error\n * information when the parser is initially validated.\n */\nexport function prepareLangiumParser(services: LangiumCoreServices): LangiumParser {\n    const grammar = services.Grammar;\n    const lexer = services.parser.Lexer;\n    const parser = new LangiumParser(services);\n    return createParser(grammar, parser, lexer.definition);\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CustomPatternMatcherFunc, TokenPattern, TokenType, TokenVocabulary } from 'chevrotain';\nimport type { AbstractRule, Grammar, Keyword, TerminalRule } from '../languages/generated/ast.js';\nimport type { Stream } from '../utils/stream.js';\nimport { Lexer } from 'chevrotain';\nimport { isKeyword, isParserRule, isTerminalRule } from '../languages/generated/ast.js';\nimport { streamAllContents } from '../utils/ast-utils.js';\nimport { getAllReachableRules, terminalRegex } from '../utils/grammar-utils.js';\nimport { getCaseInsensitivePattern, isWhitespace, partialMatches } from '../utils/regexp-utils.js';\nimport { stream } from '../utils/stream.js';\n\nexport interface TokenBuilderOptions {\n    caseInsensitive?: boolean\n}\n\nexport interface TokenBuilder {\n    buildTokens(grammar: Grammar, options?: TokenBuilderOptions): TokenVocabulary;\n}\n\nexport class DefaultTokenBuilder implements TokenBuilder {\n\n    buildTokens(grammar: Grammar, options?: TokenBuilderOptions): TokenVocabulary {\n        const reachableRules = stream(getAllReachableRules(grammar, false));\n        const terminalTokens: TokenType[] = this.buildTerminalTokens(reachableRules);\n        const tokens: TokenType[] = this.buildKeywordTokens(reachableRules, terminalTokens, options);\n\n        terminalTokens.forEach(terminalToken => {\n            const pattern = terminalToken.PATTERN;\n            if (typeof pattern === 'object' && pattern && 'test' in pattern && isWhitespace(pattern)) {\n                tokens.unshift(terminalToken);\n            } else {\n                tokens.push(terminalToken);\n            }\n        });\n        // We don't need to add the EOF token explicitly.\n        // It is automatically available at the end of the token stream.\n        return tokens;\n    }\n\n    protected buildTerminalTokens(rules: Stream): TokenType[] {\n        return rules.filter(isTerminalRule).filter(e => !e.fragment)\n            .map(terminal => this.buildTerminalToken(terminal)).toArray();\n    }\n\n    protected buildTerminalToken(terminal: TerminalRule): TokenType {\n        const regex = terminalRegex(terminal);\n        const pattern = this.requiresCustomPattern(regex) ? this.regexPatternFunction(regex) : regex;\n        const tokenType: TokenType = {\n            name: terminal.name,\n            PATTERN: pattern,\n            LINE_BREAKS: true\n        };\n        if (terminal.hidden) {\n            // Only skip tokens that are able to accept whitespace\n            tokenType.GROUP = isWhitespace(regex) ? Lexer.SKIPPED : 'hidden';\n        }\n        return tokenType;\n    }\n\n    protected requiresCustomPattern(regex: RegExp): boolean {\n        if (regex.flags.includes('u')) {\n            // Unicode regexes are not supported by Chevrotain.\n            return true;\n        } else if (regex.source.includes('?<=') || regex.source.includes('? {\n            stickyRegex.lastIndex = offset;\n            const execResult = stickyRegex.exec(text);\n            return execResult;\n        };\n    }\n\n    protected buildKeywordTokens(rules: Stream, terminalTokens: TokenType[], options?: TokenBuilderOptions): TokenType[] {\n        return rules\n            // We filter by parser rules, since keywords in terminal rules get transformed into regex and are not actual tokens\n            .filter(isParserRule)\n            .flatMap(rule => streamAllContents(rule).filter(isKeyword))\n            .distinct(e => e.value).toArray()\n            // Sort keywords by descending length\n            .sort((a, b) => b.value.length - a.value.length)\n            .map(keyword => this.buildKeywordToken(keyword, terminalTokens, Boolean(options?.caseInsensitive)));\n    }\n\n    protected buildKeywordToken(keyword: Keyword, terminalTokens: TokenType[], caseInsensitive: boolean): TokenType {\n        return {\n            name: keyword.value,\n            PATTERN: this.buildKeywordPattern(keyword, caseInsensitive),\n            LONGER_ALT: this.findLongerAlt(keyword, terminalTokens)\n        };\n    }\n\n    protected buildKeywordPattern(keyword: Keyword, caseInsensitive: boolean): TokenPattern {\n        return caseInsensitive ?\n            new RegExp(getCaseInsensitivePattern(keyword.value)) :\n            keyword.value;\n    }\n\n    protected findLongerAlt(keyword: Keyword, terminalTokens: TokenType[]): TokenType[] {\n        return terminalTokens.reduce((longerAlts: TokenType[], token) => {\n            const pattern = token?.PATTERN as RegExp;\n            if (pattern?.source && partialMatches('^' + pattern.source + '$', keyword.value)) {\n                longerAlts.push(token);\n            }\n            return longerAlts;\n        }, []);\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AbstractElement, AbstractRule } from '../languages/generated/ast.js';\nimport type { CstNode } from '../syntax-tree.js';\nimport { isCrossReference, isRuleCall } from '../languages/generated/ast.js';\nimport { getCrossReferenceTerminal, getRuleType } from '../utils/grammar-utils.js';\n\n/**\n * Language-specific service for converting string values from the source text format into a value to be held in the AST.\n */\nexport interface ValueConverter {\n    /**\n     * Converts a string value from the source text format into a value to be held in the AST.\n     */\n    convert(input: string, cstNode: CstNode): ValueType;\n}\n\nexport type ValueType = string | number | boolean | bigint | Date;\n\nexport class DefaultValueConverter implements ValueConverter {\n\n    convert(input: string, cstNode: CstNode): ValueType {\n        let feature: AbstractElement | undefined = cstNode.grammarSource;\n        if (isCrossReference(feature)) {\n            feature = getCrossReferenceTerminal(feature);\n        }\n        if (isRuleCall(feature)) {\n            const rule = feature.rule.ref;\n            if (!rule) {\n                throw new Error('This cst node was not parsed by a rule.');\n            }\n            return this.runConverter(rule, input, cstNode);\n        }\n        return input;\n    }\n\n    // eslint-disable-next-line @typescript-eslint/no-unused-vars\n    protected runConverter(rule: AbstractRule, input: string, cstNode: CstNode): ValueType {\n        switch (rule.name.toUpperCase()) {\n            case 'INT': return ValueConverter.convertInt(input);\n            case 'STRING': return ValueConverter.convertString(input);\n            case 'ID': return ValueConverter.convertID(input);\n        }\n        switch (getRuleType(rule)?.toLowerCase()) {\n            case 'number': return ValueConverter.convertNumber(input);\n            case 'boolean': return ValueConverter.convertBoolean(input);\n            case 'bigint': return ValueConverter.convertBigint(input);\n            case 'date': return ValueConverter.convertDate(input);\n            default: return input;\n        }\n    }\n}\n\nexport namespace ValueConverter {\n\n    export function convertString(input: string): string {\n        let result = '';\n        for (let i = 1; i < input.length - 1; i++) {\n            const c = input.charAt(i);\n            if (c === '\\\\') {\n                const c1 = input.charAt(++i);\n                result += convertEscapeCharacter(c1);\n            } else {\n                result += c;\n            }\n        }\n        return result;\n    }\n\n    function convertEscapeCharacter(char: string): string {\n        switch (char) {\n            case 'b': return '\\b';\n            case 'f': return '\\f';\n            case 'n': return '\\n';\n            case 'r': return '\\r';\n            case 't': return '\\t';\n            case 'v': return '\\v';\n            case '0': return '\\0';\n            default: return char;\n        }\n    }\n\n    export function convertID(input: string): string {\n        if (input.charAt(0) === '^') {\n            return input.substring(1);\n        } else {\n            return input;\n        }\n    }\n\n    export function convertInt(input: string): number {\n        return parseInt(input);\n    }\n\n    export function convertBigint(input: string): bigint {\n        return BigInt(input);\n    }\n\n    export function convertDate(input: string): Date {\n        return new Date(input);\n    }\n\n    export function convertNumber(input: string): number {\n        return Number(input);\n    }\n\n    export function convertBoolean(input: string): boolean {\n        return input.toLowerCase() === 'true';\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n// eslint-disable-next-line no-restricted-imports\nexport * from 'vscode-jsonrpc/lib/common/cancellation.js';\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken, CancellationTokenSource, type AbstractCancellationTokenSource } from '../utils/cancellation.js';\n\nexport type MaybePromise = T | Promise\n\n/**\n * Delays the execution of the current code to the next tick of the event loop.\n * Don't call this method directly in a tight loop to prevent too many promises from being created.\n */\nexport function delayNextTick(): Promise {\n    return new Promise(resolve => {\n        // In case we are running in a non-node environment, `setImmediate` isn't available.\n        // Using `setTimeout` of the browser API accomplishes the same result.\n        if (typeof setImmediate === 'undefined') {\n            setTimeout(resolve, 0);\n        } else {\n            setImmediate(resolve);\n        }\n    });\n}\n\nlet lastTick = 0;\nlet globalInterruptionPeriod = 10;\n\n/**\n * Reset the global interruption period and create a cancellation token source.\n */\nexport function startCancelableOperation(): AbstractCancellationTokenSource {\n    lastTick = Date.now();\n    return new CancellationTokenSource();\n}\n\n/**\n * Change the period duration for `interruptAndCheck` to the given number of milliseconds.\n * The default value is 10ms.\n */\nexport function setInterruptionPeriod(period: number): void {\n    globalInterruptionPeriod = period;\n}\n\n/**\n * This symbol may be thrown in an asynchronous context by any Langium service that receives\n * a `CancellationToken`. This means that the promise returned by such a service is rejected with\n * this symbol as rejection reason.\n */\nexport const OperationCancelled = Symbol('OperationCancelled');\n\n/**\n * Use this in a `catch` block to check whether the thrown object indicates that the operation\n * has been cancelled.\n */\nexport function isOperationCancelled(err: unknown): err is typeof OperationCancelled {\n    return err === OperationCancelled;\n}\n\n/**\n * This function does two things:\n *  1. Check the elapsed time since the last call to this function or to `startCancelableOperation`. If the predefined\n *     period (configured with `setInterruptionPeriod`) is exceeded, execution is delayed with `delayNextTick`.\n *  2. If the predefined period is not met yet or execution is resumed after an interruption, the given cancellation\n *     token is checked, and if cancellation is requested, `OperationCanceled` is thrown.\n *\n * All services in Langium that receive a `CancellationToken` may potentially call this function, so the\n * `CancellationToken` must be caught (with an `async` try-catch block or a `catch` callback attached to\n * the promise) to avoid that event being exposed as an error.\n */\nexport async function interruptAndCheck(token: CancellationToken): Promise {\n    if (token === CancellationToken.None) {\n        // Early exit in case cancellation was disabled by the caller\n        return;\n    }\n    const current = Date.now();\n    if (current - lastTick >= globalInterruptionPeriod) {\n        lastTick = current;\n        await delayNextTick();\n    }\n    if (token.isCancellationRequested) {\n        throw OperationCancelled;\n    }\n}\n\n/**\n * Simple implementation of the deferred pattern.\n * An object that exposes a promise and functions to resolve and reject it.\n */\nexport class Deferred {\n    resolve: (value: T) => this;\n    reject: (err?: unknown) => this;\n\n    promise = new Promise((resolve, reject) => {\n        this.resolve = (arg) => {\n            resolve(arg);\n            return this;\n        };\n        this.reject = (err) => {\n            reject(err);\n            return this;\n        };\n    });\n}\n", "/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\n'use strict';\nclass FullTextDocument {\n    constructor(uri, languageId, version, content) {\n        this._uri = uri;\n        this._languageId = languageId;\n        this._version = version;\n        this._content = content;\n        this._lineOffsets = undefined;\n    }\n    get uri() {\n        return this._uri;\n    }\n    get languageId() {\n        return this._languageId;\n    }\n    get version() {\n        return this._version;\n    }\n    getText(range) {\n        if (range) {\n            const start = this.offsetAt(range.start);\n            const end = this.offsetAt(range.end);\n            return this._content.substring(start, end);\n        }\n        return this._content;\n    }\n    update(changes, version) {\n        for (const change of changes) {\n            if (FullTextDocument.isIncremental(change)) {\n                // makes sure start is before end\n                const range = getWellformedRange(change.range);\n                // update content\n                const startOffset = this.offsetAt(range.start);\n                const endOffset = this.offsetAt(range.end);\n                this._content = this._content.substring(0, startOffset) + change.text + this._content.substring(endOffset, this._content.length);\n                // update the offsets\n                const startLine = Math.max(range.start.line, 0);\n                const endLine = Math.max(range.end.line, 0);\n                let lineOffsets = this._lineOffsets;\n                const addedLineOffsets = computeLineOffsets(change.text, false, startOffset);\n                if (endLine - startLine === addedLineOffsets.length) {\n                    for (let i = 0, len = addedLineOffsets.length; i < len; i++) {\n                        lineOffsets[i + startLine + 1] = addedLineOffsets[i];\n                    }\n                }\n                else {\n                    if (addedLineOffsets.length < 10000) {\n                        lineOffsets.splice(startLine + 1, endLine - startLine, ...addedLineOffsets);\n                    }\n                    else { // avoid too many arguments for splice\n                        this._lineOffsets = lineOffsets = lineOffsets.slice(0, startLine + 1).concat(addedLineOffsets, lineOffsets.slice(endLine + 1));\n                    }\n                }\n                const diff = change.text.length - (endOffset - startOffset);\n                if (diff !== 0) {\n                    for (let i = startLine + 1 + addedLineOffsets.length, len = lineOffsets.length; i < len; i++) {\n                        lineOffsets[i] = lineOffsets[i] + diff;\n                    }\n                }\n            }\n            else if (FullTextDocument.isFull(change)) {\n                this._content = change.text;\n                this._lineOffsets = undefined;\n            }\n            else {\n                throw new Error('Unknown change event received');\n            }\n        }\n        this._version = version;\n    }\n    getLineOffsets() {\n        if (this._lineOffsets === undefined) {\n            this._lineOffsets = computeLineOffsets(this._content, true);\n        }\n        return this._lineOffsets;\n    }\n    positionAt(offset) {\n        offset = Math.max(Math.min(offset, this._content.length), 0);\n        const lineOffsets = this.getLineOffsets();\n        let low = 0, high = lineOffsets.length;\n        if (high === 0) {\n            return { line: 0, character: offset };\n        }\n        while (low < high) {\n            const mid = Math.floor((low + high) / 2);\n            if (lineOffsets[mid] > offset) {\n                high = mid;\n            }\n            else {\n                low = mid + 1;\n            }\n        }\n        // low is the least x for which the line offset is larger than the current offset\n        // or array.length if no line offset is larger than the current offset\n        const line = low - 1;\n        offset = this.ensureBeforeEOL(offset, lineOffsets[line]);\n        return { line, character: offset - lineOffsets[line] };\n    }\n    offsetAt(position) {\n        const lineOffsets = this.getLineOffsets();\n        if (position.line >= lineOffsets.length) {\n            return this._content.length;\n        }\n        else if (position.line < 0) {\n            return 0;\n        }\n        const lineOffset = lineOffsets[position.line];\n        if (position.character <= 0) {\n            return lineOffset;\n        }\n        const nextLineOffset = (position.line + 1 < lineOffsets.length) ? lineOffsets[position.line + 1] : this._content.length;\n        const offset = Math.min(lineOffset + position.character, nextLineOffset);\n        return this.ensureBeforeEOL(offset, lineOffset);\n    }\n    ensureBeforeEOL(offset, lineOffset) {\n        while (offset > lineOffset && isEOL(this._content.charCodeAt(offset - 1))) {\n            offset--;\n        }\n        return offset;\n    }\n    get lineCount() {\n        return this.getLineOffsets().length;\n    }\n    static isIncremental(event) {\n        const candidate = event;\n        return candidate !== undefined && candidate !== null &&\n            typeof candidate.text === 'string' && candidate.range !== undefined &&\n            (candidate.rangeLength === undefined || typeof candidate.rangeLength === 'number');\n    }\n    static isFull(event) {\n        const candidate = event;\n        return candidate !== undefined && candidate !== null &&\n            typeof candidate.text === 'string' && candidate.range === undefined && candidate.rangeLength === undefined;\n    }\n}\nexport var TextDocument;\n(function (TextDocument) {\n    /**\n     * Creates a new text document.\n     *\n     * @param uri The document's uri.\n     * @param languageId  The document's language Id.\n     * @param version The document's initial version number.\n     * @param content The document's content.\n     */\n    function create(uri, languageId, version, content) {\n        return new FullTextDocument(uri, languageId, version, content);\n    }\n    TextDocument.create = create;\n    /**\n     * Updates a TextDocument by modifying its content.\n     *\n     * @param document the document to update. Only documents created by TextDocument.create are valid inputs.\n     * @param changes the changes to apply to the document.\n     * @param version the changes version for the document.\n     * @returns The updated TextDocument. Note: That's the same document instance passed in as first parameter.\n     *\n     */\n    function update(document, changes, version) {\n        if (document instanceof FullTextDocument) {\n            document.update(changes, version);\n            return document;\n        }\n        else {\n            throw new Error('TextDocument.update: document must be created by TextDocument.create');\n        }\n    }\n    TextDocument.update = update;\n    function applyEdits(document, edits) {\n        const text = document.getText();\n        const sortedEdits = mergeSort(edits.map(getWellformedEdit), (a, b) => {\n            const diff = a.range.start.line - b.range.start.line;\n            if (diff === 0) {\n                return a.range.start.character - b.range.start.character;\n            }\n            return diff;\n        });\n        let lastModifiedOffset = 0;\n        const spans = [];\n        for (const e of sortedEdits) {\n            const startOffset = document.offsetAt(e.range.start);\n            if (startOffset < lastModifiedOffset) {\n                throw new Error('Overlapping edit');\n            }\n            else if (startOffset > lastModifiedOffset) {\n                spans.push(text.substring(lastModifiedOffset, startOffset));\n            }\n            if (e.newText.length) {\n                spans.push(e.newText);\n            }\n            lastModifiedOffset = document.offsetAt(e.range.end);\n        }\n        spans.push(text.substr(lastModifiedOffset));\n        return spans.join('');\n    }\n    TextDocument.applyEdits = applyEdits;\n})(TextDocument || (TextDocument = {}));\nfunction mergeSort(data, compare) {\n    if (data.length <= 1) {\n        // sorted\n        return data;\n    }\n    const p = (data.length / 2) | 0;\n    const left = data.slice(0, p);\n    const right = data.slice(p);\n    mergeSort(left, compare);\n    mergeSort(right, compare);\n    let leftIdx = 0;\n    let rightIdx = 0;\n    let i = 0;\n    while (leftIdx < left.length && rightIdx < right.length) {\n        const ret = compare(left[leftIdx], right[rightIdx]);\n        if (ret <= 0) {\n            // smaller_equal -> take left to preserve order\n            data[i++] = left[leftIdx++];\n        }\n        else {\n            // greater -> take right\n            data[i++] = right[rightIdx++];\n        }\n    }\n    while (leftIdx < left.length) {\n        data[i++] = left[leftIdx++];\n    }\n    while (rightIdx < right.length) {\n        data[i++] = right[rightIdx++];\n    }\n    return data;\n}\nfunction computeLineOffsets(text, isAtLineStart, textOffset = 0) {\n    const result = isAtLineStart ? [textOffset] : [];\n    for (let i = 0; i < text.length; i++) {\n        const ch = text.charCodeAt(i);\n        if (isEOL(ch)) {\n            if (ch === 13 /* CharCode.CarriageReturn */ && i + 1 < text.length && text.charCodeAt(i + 1) === 10 /* CharCode.LineFeed */) {\n                i++;\n            }\n            result.push(textOffset + i + 1);\n        }\n    }\n    return result;\n}\nfunction isEOL(char) {\n    return char === 13 /* CharCode.CarriageReturn */ || char === 10 /* CharCode.LineFeed */;\n}\nfunction getWellformedRange(range) {\n    const start = range.start;\n    const end = range.end;\n    if (start.line > end.line || (start.line === end.line && start.character > end.character)) {\n        return { start: end, end: start };\n    }\n    return range;\n}\nfunction getWellformedEdit(textEdit) {\n    const range = getWellformedRange(textEdit.range);\n    if (range !== textEdit.range) {\n        return { newText: textEdit.newText, range };\n    }\n    return textEdit;\n}\n", "// 'path' module extracted from Node.js v8.11.1 (only the posix part)\n// transplited with Babel\n\n// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nfunction assertPath(path) {\n  if (typeof path !== 'string') {\n    throw new TypeError('Path must be a string. Received ' + JSON.stringify(path));\n  }\n}\n\n// Resolves . and .. elements in a path with directory names\nfunction normalizeStringPosix(path, allowAboveRoot) {\n  var res = '';\n  var lastSegmentLength = 0;\n  var lastSlash = -1;\n  var dots = 0;\n  var code;\n  for (var i = 0; i <= path.length; ++i) {\n    if (i < path.length)\n      code = path.charCodeAt(i);\n    else if (code === 47 /*/*/)\n      break;\n    else\n      code = 47 /*/*/;\n    if (code === 47 /*/*/) {\n      if (lastSlash === i - 1 || dots === 1) {\n        // NOOP\n      } else if (lastSlash !== i - 1 && dots === 2) {\n        if (res.length < 2 || lastSegmentLength !== 2 || res.charCodeAt(res.length - 1) !== 46 /*.*/ || res.charCodeAt(res.length - 2) !== 46 /*.*/) {\n          if (res.length > 2) {\n            var lastSlashIndex = res.lastIndexOf('/');\n            if (lastSlashIndex !== res.length - 1) {\n              if (lastSlashIndex === -1) {\n                res = '';\n                lastSegmentLength = 0;\n              } else {\n                res = res.slice(0, lastSlashIndex);\n                lastSegmentLength = res.length - 1 - res.lastIndexOf('/');\n              }\n              lastSlash = i;\n              dots = 0;\n              continue;\n            }\n          } else if (res.length === 2 || res.length === 1) {\n            res = '';\n            lastSegmentLength = 0;\n            lastSlash = i;\n            dots = 0;\n            continue;\n          }\n        }\n        if (allowAboveRoot) {\n          if (res.length > 0)\n            res += '/..';\n          else\n            res = '..';\n          lastSegmentLength = 2;\n        }\n      } else {\n        if (res.length > 0)\n          res += '/' + path.slice(lastSlash + 1, i);\n        else\n          res = path.slice(lastSlash + 1, i);\n        lastSegmentLength = i - lastSlash - 1;\n      }\n      lastSlash = i;\n      dots = 0;\n    } else if (code === 46 /*.*/ && dots !== -1) {\n      ++dots;\n    } else {\n      dots = -1;\n    }\n  }\n  return res;\n}\n\nfunction _format(sep, pathObject) {\n  var dir = pathObject.dir || pathObject.root;\n  var base = pathObject.base || (pathObject.name || '') + (pathObject.ext || '');\n  if (!dir) {\n    return base;\n  }\n  if (dir === pathObject.root) {\n    return dir + base;\n  }\n  return dir + sep + base;\n}\n\nvar posix = {\n  // path.resolve([from ...], to)\n  resolve: function resolve() {\n    var resolvedPath = '';\n    var resolvedAbsolute = false;\n    var cwd;\n\n    for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {\n      var path;\n      if (i >= 0)\n        path = arguments[i];\n      else {\n        if (cwd === undefined)\n          cwd = process.cwd();\n        path = cwd;\n      }\n\n      assertPath(path);\n\n      // Skip empty entries\n      if (path.length === 0) {\n        continue;\n      }\n\n      resolvedPath = path + '/' + resolvedPath;\n      resolvedAbsolute = path.charCodeAt(0) === 47 /*/*/;\n    }\n\n    // At this point the path should be resolved to a full absolute path, but\n    // handle relative paths to be safe (might happen when process.cwd() fails)\n\n    // Normalize the path\n    resolvedPath = normalizeStringPosix(resolvedPath, !resolvedAbsolute);\n\n    if (resolvedAbsolute) {\n      if (resolvedPath.length > 0)\n        return '/' + resolvedPath;\n      else\n        return '/';\n    } else if (resolvedPath.length > 0) {\n      return resolvedPath;\n    } else {\n      return '.';\n    }\n  },\n\n  normalize: function normalize(path) {\n    assertPath(path);\n\n    if (path.length === 0) return '.';\n\n    var isAbsolute = path.charCodeAt(0) === 47 /*/*/;\n    var trailingSeparator = path.charCodeAt(path.length - 1) === 47 /*/*/;\n\n    // Normalize the path\n    path = normalizeStringPosix(path, !isAbsolute);\n\n    if (path.length === 0 && !isAbsolute) path = '.';\n    if (path.length > 0 && trailingSeparator) path += '/';\n\n    if (isAbsolute) return '/' + path;\n    return path;\n  },\n\n  isAbsolute: function isAbsolute(path) {\n    assertPath(path);\n    return path.length > 0 && path.charCodeAt(0) === 47 /*/*/;\n  },\n\n  join: function join() {\n    if (arguments.length === 0)\n      return '.';\n    var joined;\n    for (var i = 0; i < arguments.length; ++i) {\n      var arg = arguments[i];\n      assertPath(arg);\n      if (arg.length > 0) {\n        if (joined === undefined)\n          joined = arg;\n        else\n          joined += '/' + arg;\n      }\n    }\n    if (joined === undefined)\n      return '.';\n    return posix.normalize(joined);\n  },\n\n  relative: function relative(from, to) {\n    assertPath(from);\n    assertPath(to);\n\n    if (from === to) return '';\n\n    from = posix.resolve(from);\n    to = posix.resolve(to);\n\n    if (from === to) return '';\n\n    // Trim any leading backslashes\n    var fromStart = 1;\n    for (; fromStart < from.length; ++fromStart) {\n      if (from.charCodeAt(fromStart) !== 47 /*/*/)\n        break;\n    }\n    var fromEnd = from.length;\n    var fromLen = fromEnd - fromStart;\n\n    // Trim any leading backslashes\n    var toStart = 1;\n    for (; toStart < to.length; ++toStart) {\n      if (to.charCodeAt(toStart) !== 47 /*/*/)\n        break;\n    }\n    var toEnd = to.length;\n    var toLen = toEnd - toStart;\n\n    // Compare paths to find the longest common path from root\n    var length = fromLen < toLen ? fromLen : toLen;\n    var lastCommonSep = -1;\n    var i = 0;\n    for (; i <= length; ++i) {\n      if (i === length) {\n        if (toLen > length) {\n          if (to.charCodeAt(toStart + i) === 47 /*/*/) {\n            // We get here if `from` is the exact base path for `to`.\n            // For example: from='/foo/bar'; to='/foo/bar/baz'\n            return to.slice(toStart + i + 1);\n          } else if (i === 0) {\n            // We get here if `from` is the root\n            // For example: from='/'; to='/foo'\n            return to.slice(toStart + i);\n          }\n        } else if (fromLen > length) {\n          if (from.charCodeAt(fromStart + i) === 47 /*/*/) {\n            // We get here if `to` is the exact base path for `from`.\n            // For example: from='/foo/bar/baz'; to='/foo/bar'\n            lastCommonSep = i;\n          } else if (i === 0) {\n            // We get here if `to` is the root.\n            // For example: from='/foo'; to='/'\n            lastCommonSep = 0;\n          }\n        }\n        break;\n      }\n      var fromCode = from.charCodeAt(fromStart + i);\n      var toCode = to.charCodeAt(toStart + i);\n      if (fromCode !== toCode)\n        break;\n      else if (fromCode === 47 /*/*/)\n        lastCommonSep = i;\n    }\n\n    var out = '';\n    // Generate the relative path based on the path difference between `to`\n    // and `from`\n    for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) {\n      if (i === fromEnd || from.charCodeAt(i) === 47 /*/*/) {\n        if (out.length === 0)\n          out += '..';\n        else\n          out += '/..';\n      }\n    }\n\n    // Lastly, append the rest of the destination (`to`) path that comes after\n    // the common path parts\n    if (out.length > 0)\n      return out + to.slice(toStart + lastCommonSep);\n    else {\n      toStart += lastCommonSep;\n      if (to.charCodeAt(toStart) === 47 /*/*/)\n        ++toStart;\n      return to.slice(toStart);\n    }\n  },\n\n  _makeLong: function _makeLong(path) {\n    return path;\n  },\n\n  dirname: function dirname(path) {\n    assertPath(path);\n    if (path.length === 0) return '.';\n    var code = path.charCodeAt(0);\n    var hasRoot = code === 47 /*/*/;\n    var end = -1;\n    var matchedSlash = true;\n    for (var i = path.length - 1; i >= 1; --i) {\n      code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          if (!matchedSlash) {\n            end = i;\n            break;\n          }\n        } else {\n        // We saw the first non-path separator\n        matchedSlash = false;\n      }\n    }\n\n    if (end === -1) return hasRoot ? '/' : '.';\n    if (hasRoot && end === 1) return '//';\n    return path.slice(0, end);\n  },\n\n  basename: function basename(path, ext) {\n    if (ext !== undefined && typeof ext !== 'string') throw new TypeError('\"ext\" argument must be a string');\n    assertPath(path);\n\n    var start = 0;\n    var end = -1;\n    var matchedSlash = true;\n    var i;\n\n    if (ext !== undefined && ext.length > 0 && ext.length <= path.length) {\n      if (ext.length === path.length && ext === path) return '';\n      var extIdx = ext.length - 1;\n      var firstNonSlashEnd = -1;\n      for (i = path.length - 1; i >= 0; --i) {\n        var code = path.charCodeAt(i);\n        if (code === 47 /*/*/) {\n            // If we reached a path separator that was not part of a set of path\n            // separators at the end of the string, stop now\n            if (!matchedSlash) {\n              start = i + 1;\n              break;\n            }\n          } else {\n          if (firstNonSlashEnd === -1) {\n            // We saw the first non-path separator, remember this index in case\n            // we need it if the extension ends up not matching\n            matchedSlash = false;\n            firstNonSlashEnd = i + 1;\n          }\n          if (extIdx >= 0) {\n            // Try to match the explicit extension\n            if (code === ext.charCodeAt(extIdx)) {\n              if (--extIdx === -1) {\n                // We matched the extension, so mark this as the end of our path\n                // component\n                end = i;\n              }\n            } else {\n              // Extension does not match, so our result is the entire path\n              // component\n              extIdx = -1;\n              end = firstNonSlashEnd;\n            }\n          }\n        }\n      }\n\n      if (start === end) end = firstNonSlashEnd;else if (end === -1) end = path.length;\n      return path.slice(start, end);\n    } else {\n      for (i = path.length - 1; i >= 0; --i) {\n        if (path.charCodeAt(i) === 47 /*/*/) {\n            // If we reached a path separator that was not part of a set of path\n            // separators at the end of the string, stop now\n            if (!matchedSlash) {\n              start = i + 1;\n              break;\n            }\n          } else if (end === -1) {\n          // We saw the first non-path separator, mark this as the end of our\n          // path component\n          matchedSlash = false;\n          end = i + 1;\n        }\n      }\n\n      if (end === -1) return '';\n      return path.slice(start, end);\n    }\n  },\n\n  extname: function extname(path) {\n    assertPath(path);\n    var startDot = -1;\n    var startPart = 0;\n    var end = -1;\n    var matchedSlash = true;\n    // Track the state of characters (if any) we see before our first dot and\n    // after any path separator we find\n    var preDotState = 0;\n    for (var i = path.length - 1; i >= 0; --i) {\n      var code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          // If we reached a path separator that was not part of a set of path\n          // separators at the end of the string, stop now\n          if (!matchedSlash) {\n            startPart = i + 1;\n            break;\n          }\n          continue;\n        }\n      if (end === -1) {\n        // We saw the first non-path separator, mark this as the end of our\n        // extension\n        matchedSlash = false;\n        end = i + 1;\n      }\n      if (code === 46 /*.*/) {\n          // If this is our first dot, mark it as the start of our extension\n          if (startDot === -1)\n            startDot = i;\n          else if (preDotState !== 1)\n            preDotState = 1;\n      } else if (startDot !== -1) {\n        // We saw a non-dot and non-path separator before our dot, so we should\n        // have a good chance at having a non-empty extension\n        preDotState = -1;\n      }\n    }\n\n    if (startDot === -1 || end === -1 ||\n        // We saw a non-dot character immediately before the dot\n        preDotState === 0 ||\n        // The (right-most) trimmed path component is exactly '..'\n        preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n      return '';\n    }\n    return path.slice(startDot, end);\n  },\n\n  format: function format(pathObject) {\n    if (pathObject === null || typeof pathObject !== 'object') {\n      throw new TypeError('The \"pathObject\" argument must be of type Object. Received type ' + typeof pathObject);\n    }\n    return _format('/', pathObject);\n  },\n\n  parse: function parse(path) {\n    assertPath(path);\n\n    var ret = { root: '', dir: '', base: '', ext: '', name: '' };\n    if (path.length === 0) return ret;\n    var code = path.charCodeAt(0);\n    var isAbsolute = code === 47 /*/*/;\n    var start;\n    if (isAbsolute) {\n      ret.root = '/';\n      start = 1;\n    } else {\n      start = 0;\n    }\n    var startDot = -1;\n    var startPart = 0;\n    var end = -1;\n    var matchedSlash = true;\n    var i = path.length - 1;\n\n    // Track the state of characters (if any) we see before our first dot and\n    // after any path separator we find\n    var preDotState = 0;\n\n    // Get non-dir info\n    for (; i >= start; --i) {\n      code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          // If we reached a path separator that was not part of a set of path\n          // separators at the end of the string, stop now\n          if (!matchedSlash) {\n            startPart = i + 1;\n            break;\n          }\n          continue;\n        }\n      if (end === -1) {\n        // We saw the first non-path separator, mark this as the end of our\n        // extension\n        matchedSlash = false;\n        end = i + 1;\n      }\n      if (code === 46 /*.*/) {\n          // If this is our first dot, mark it as the start of our extension\n          if (startDot === -1) startDot = i;else if (preDotState !== 1) preDotState = 1;\n        } else if (startDot !== -1) {\n        // We saw a non-dot and non-path separator before our dot, so we should\n        // have a good chance at having a non-empty extension\n        preDotState = -1;\n      }\n    }\n\n    if (startDot === -1 || end === -1 ||\n    // We saw a non-dot character immediately before the dot\n    preDotState === 0 ||\n    // The (right-most) trimmed path component is exactly '..'\n    preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n      if (end !== -1) {\n        if (startPart === 0 && isAbsolute) ret.base = ret.name = path.slice(1, end);else ret.base = ret.name = path.slice(startPart, end);\n      }\n    } else {\n      if (startPart === 0 && isAbsolute) {\n        ret.name = path.slice(1, startDot);\n        ret.base = path.slice(1, end);\n      } else {\n        ret.name = path.slice(startPart, startDot);\n        ret.base = path.slice(startPart, end);\n      }\n      ret.ext = path.slice(startDot, end);\n    }\n\n    if (startPart > 0) ret.dir = path.slice(0, startPart - 1);else if (isAbsolute) ret.dir = '/';\n\n    return ret;\n  },\n\n  sep: '/',\n  delimiter: ':',\n  win32: null,\n  posix: null\n};\n\nposix.posix = posix;\n\nmodule.exports = posix;\n", "// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n", "// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};", "__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))", "// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n'use strict';\n\n// !!!!!\n// SEE https://github.com/microsoft/vscode/blob/master/src/vs/base/common/platform.ts\n// !!!!!\n\ndeclare const process: { platform: 'win32' };\ndeclare const navigator: { userAgent: string };\n\nexport let isWindows: boolean;\n\nif (typeof process === 'object') {\n\tisWindows = process.platform === 'win32';\n} else if (typeof navigator === 'object') {\n\tlet userAgent = navigator.userAgent;\n\tisWindows = userAgent.indexOf('Windows') >= 0;\n}\n", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n'use strict';\n\nimport { CharCode } from './charCode'\nimport { isWindows } from './platform';\n\nconst _schemePattern = /^\\w[\\w\\d+.-]*$/;\nconst _singleSlashStart = /^\\//;\nconst _doubleSlashStart = /^\\/\\//;\n\nfunction _validateUri(ret: URI, _strict?: boolean): void {\n\n\t// scheme, must be set\n\tif (!ret.scheme && _strict) {\n\t\tthrow new Error(`[UriError]: Scheme is missing: {scheme: \"\", authority: \"${ret.authority}\", path: \"${ret.path}\", query: \"${ret.query}\", fragment: \"${ret.fragment}\"}`);\n\t}\n\n\t// scheme, https://tools.ietf.org/html/rfc3986#section-3.1\n\t// ALPHA *( ALPHA / DIGIT / \"+\" / \"-\" / \".\" )\n\tif (ret.scheme && !_schemePattern.test(ret.scheme)) {\n\t\tthrow new Error('[UriError]: Scheme contains illegal characters.');\n\t}\n\n\t// path, http://tools.ietf.org/html/rfc3986#section-3.3\n\t// If a URI contains an authority component, then the path component\n\t// must either be empty or begin with a slash (\"/\") character.  If a URI\n\t// does not contain an authority component, then the path cannot begin\n\t// with two slash characters (\"//\").\n\tif (ret.path) {\n\t\tif (ret.authority) {\n\t\t\tif (!_singleSlashStart.test(ret.path)) {\n\t\t\t\tthrow new Error('[UriError]: If a URI contains an authority component, then the path component must either be empty or begin with a slash (\"/\") character');\n\t\t\t}\n\t\t} else {\n\t\t\tif (_doubleSlashStart.test(ret.path)) {\n\t\t\t\tthrow new Error('[UriError]: If a URI does not contain an authority component, then the path cannot begin with two slash characters (\"//\")');\n\t\t\t}\n\t\t}\n\t}\n}\n\n// for a while we allowed uris *without* schemes and this is the migration\n// for them, e.g. an uri without scheme and without strict-mode warns and falls\n// back to the file-scheme. that should cause the least carnage and still be a\n// clear warning\nfunction _schemeFix(scheme: string, _strict: boolean): string {\n\tif (!scheme && !_strict) {\n\t\treturn 'file';\n\t}\n\treturn scheme;\n}\n\n// implements a bit of https://tools.ietf.org/html/rfc3986#section-5\nfunction _referenceResolution(scheme: string, path: string): string {\n\n\t// the slash-character is our 'default base' as we don't\n\t// support constructing URIs relative to other URIs. This\n\t// also means that we alter and potentially break paths.\n\t// see https://tools.ietf.org/html/rfc3986#section-5.1.4\n\tswitch (scheme) {\n\t\tcase 'https':\n\t\tcase 'http':\n\t\tcase 'file':\n\t\t\tif (!path) {\n\t\t\t\tpath = _slash;\n\t\t\t} else if (path[0] !== _slash) {\n\t\t\t\tpath = _slash + path;\n\t\t\t}\n\t\t\tbreak;\n\t}\n\treturn path;\n}\n\nconst _empty = '';\nconst _slash = '/';\nconst _regexp = /^(([^:/?#]+?):)?(\\/\\/([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?/;\n\n/**\n * Uniform Resource Identifier (URI) http://tools.ietf.org/html/rfc3986.\n * This class is a simple parser which creates the basic component parts\n * (http://tools.ietf.org/html/rfc3986#section-3) with minimal validation\n * and encoding.\n *\n * ```txt\n *       foo://example.com:8042/over/there?name=ferret#nose\n *       \\_/   \\______________/\\_________/ \\_________/ \\__/\n *        |           |            |            |        |\n *     scheme     authority       path        query   fragment\n *        |   _____________________|__\n *       / \\ /                        \\\n *       urn:example:animal:ferret:nose\n * ```\n */\nexport class URI implements UriComponents {\n\n\tstatic isUri(thing: any): thing is URI {\n\t\tif (thing instanceof URI) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!thing) {\n\t\t\treturn false;\n\t\t}\n\t\treturn typeof (thing).authority === 'string'\n\t\t\t&& typeof (thing).fragment === 'string'\n\t\t\t&& typeof (thing).path === 'string'\n\t\t\t&& typeof (thing).query === 'string'\n\t\t\t&& typeof (thing).scheme === 'string'\n\t\t\t&& typeof (thing).fsPath === 'string'\n\t\t\t&& typeof (thing).with === 'function'\n\t\t\t&& typeof (thing).toString === 'function';\n\t}\n\n\t/**\n\t * scheme is the 'http' part of 'http://www.example.com/some/path?query#fragment'.\n\t * The part before the first colon.\n\t */\n\treadonly scheme: string;\n\n\t/**\n\t * authority is the 'www.example.com' part of 'http://www.example.com/some/path?query#fragment'.\n\t * The part between the first double slashes and the next slash.\n\t */\n\treadonly authority: string;\n\n\t/**\n\t * path is the '/some/path' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly path: string;\n\n\t/**\n\t * query is the 'query' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly query: string;\n\n\t/**\n\t * fragment is the 'fragment' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly fragment: string;\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(scheme: string, authority?: string, path?: string, query?: string, fragment?: string, _strict?: boolean);\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(components: UriComponents);\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(schemeOrData: string | UriComponents, authority?: string, path?: string, query?: string, fragment?: string, _strict: boolean = false) {\n\n\t\tif (typeof schemeOrData === 'object') {\n\t\t\tthis.scheme = schemeOrData.scheme || _empty;\n\t\t\tthis.authority = schemeOrData.authority || _empty;\n\t\t\tthis.path = schemeOrData.path || _empty;\n\t\t\tthis.query = schemeOrData.query || _empty;\n\t\t\tthis.fragment = schemeOrData.fragment || _empty;\n\t\t\t// no validation because it's this URI\n\t\t\t// that creates uri components.\n\t\t\t// _validateUri(this);\n\t\t} else {\n\t\t\tthis.scheme = _schemeFix(schemeOrData, _strict);\n\t\t\tthis.authority = authority || _empty;\n\t\t\tthis.path = _referenceResolution(this.scheme, path || _empty);\n\t\t\tthis.query = query || _empty;\n\t\t\tthis.fragment = fragment || _empty;\n\n\t\t\t_validateUri(this, _strict);\n\t\t}\n\t}\n\n\t// ---- filesystem path -----------------------\n\n\t/**\n\t * Returns a string representing the corresponding file system path of this URI.\n\t * Will handle UNC paths, normalizes windows drive letters to lower-case, and uses the\n\t * platform specific path separator.\n\t *\n\t * * Will *not* validate the path for invalid characters and semantics.\n\t * * Will *not* look at the scheme of this URI.\n\t * * The result shall *not* be used for display purposes but for accessing a file on disk.\n\t *\n\t *\n\t * The *difference* to `URI#path` is the use of the platform specific separator and the handling\n\t * of UNC paths. See the below sample of a file-uri with an authority (UNC path).\n\t *\n\t * ```ts\n\t\tconst u = URI.parse('file://server/c$/folder/file.txt')\n\t\tu.authority === 'server'\n\t\tu.path === '/shares/c$/file.txt'\n\t\tu.fsPath === '\\\\server\\c$\\folder\\file.txt'\n\t```\n\t *\n\t * Using `URI#path` to read a file (using fs-apis) would not be enough because parts of the path,\n\t * namely the server name, would be missing. Therefore `URI#fsPath` exists - it's sugar to ease working\n\t * with URIs that represent files on disk (`file` scheme).\n\t */\n\tget fsPath(): string {\n\t\t// if (this.scheme !== 'file') {\n\t\t// \tconsole.warn(`[UriError] calling fsPath with scheme ${this.scheme}`);\n\t\t// }\n\t\treturn uriToFsPath(this, false);\n\t}\n\n\t// ---- modify to new -------------------------\n\n\twith(change: { scheme?: string; authority?: string | null; path?: string | null; query?: string | null; fragment?: string | null }): URI {\n\n\t\tif (!change) {\n\t\t\treturn this;\n\t\t}\n\n\t\tlet { scheme, authority, path, query, fragment } = change;\n\t\tif (scheme === undefined) {\n\t\t\tscheme = this.scheme;\n\t\t} else if (scheme === null) {\n\t\t\tscheme = _empty;\n\t\t}\n\t\tif (authority === undefined) {\n\t\t\tauthority = this.authority;\n\t\t} else if (authority === null) {\n\t\t\tauthority = _empty;\n\t\t}\n\t\tif (path === undefined) {\n\t\t\tpath = this.path;\n\t\t} else if (path === null) {\n\t\t\tpath = _empty;\n\t\t}\n\t\tif (query === undefined) {\n\t\t\tquery = this.query;\n\t\t} else if (query === null) {\n\t\t\tquery = _empty;\n\t\t}\n\t\tif (fragment === undefined) {\n\t\t\tfragment = this.fragment;\n\t\t} else if (fragment === null) {\n\t\t\tfragment = _empty;\n\t\t}\n\n\t\tif (scheme === this.scheme\n\t\t\t&& authority === this.authority\n\t\t\t&& path === this.path\n\t\t\t&& query === this.query\n\t\t\t&& fragment === this.fragment) {\n\n\t\t\treturn this;\n\t\t}\n\n\t\treturn new Uri(scheme, authority, path, query, fragment);\n\t}\n\n\t// ---- parse & validate ------------------------\n\n\t/**\n\t * Creates a new URI from a string, e.g. `http://www.example.com/some/path`,\n\t * `file:///usr/home`, or `scheme:with/path`.\n\t *\n\t * @param value A string which represents an URI (see `URI#toString`).\n\t */\n\tstatic parse(value: string, _strict: boolean = false): URI {\n\t\tconst match = _regexp.exec(value);\n\t\tif (!match) {\n\t\t\treturn new Uri(_empty, _empty, _empty, _empty, _empty);\n\t\t}\n\t\treturn new Uri(\n\t\t\tmatch[2] || _empty,\n\t\t\tpercentDecode(match[4] || _empty),\n\t\t\tpercentDecode(match[5] || _empty),\n\t\t\tpercentDecode(match[7] || _empty),\n\t\t\tpercentDecode(match[9] || _empty),\n\t\t\t_strict\n\t\t);\n\t}\n\n\t/**\n\t * Creates a new URI from a file system path, e.g. `c:\\my\\files`,\n\t * `/usr/home`, or `\\\\server\\share\\some\\path`.\n\t *\n\t * The *difference* between `URI#parse` and `URI#file` is that the latter treats the argument\n\t * as path, not as stringified-uri. E.g. `URI.file(path)` is **not the same as**\n\t * `URI.parse('file://' + path)` because the path might contain characters that are\n\t * interpreted (# and ?). See the following sample:\n\t * ```ts\n\tconst good = URI.file('/coding/c#/project1');\n\tgood.scheme === 'file';\n\tgood.path === '/coding/c#/project1';\n\tgood.fragment === '';\n\tconst bad = URI.parse('file://' + '/coding/c#/project1');\n\tbad.scheme === 'file';\n\tbad.path === '/coding/c'; // path is now broken\n\tbad.fragment === '/project1';\n\t```\n\t *\n\t * @param path A file system path (see `URI#fsPath`)\n\t */\n\tstatic file(path: string): URI {\n\n\t\tlet authority = _empty;\n\n\t\t// normalize to fwd-slashes on windows,\n\t\t// on other systems bwd-slashes are valid\n\t\t// filename character, eg /f\\oo/ba\\r.txt\n\t\tif (isWindows) {\n\t\t\tpath = path.replace(/\\\\/g, _slash);\n\t\t}\n\n\t\t// check for authority as used in UNC shares\n\t\t// or use the path as given\n\t\tif (path[0] === _slash && path[1] === _slash) {\n\t\t\tconst idx = path.indexOf(_slash, 2);\n\t\t\tif (idx === -1) {\n\t\t\t\tauthority = path.substring(2);\n\t\t\t\tpath = _slash;\n\t\t\t} else {\n\t\t\t\tauthority = path.substring(2, idx);\n\t\t\t\tpath = path.substring(idx) || _slash;\n\t\t\t}\n\t\t}\n\n\t\treturn new Uri('file', authority, path, _empty, _empty);\n\t}\n\n\tstatic from(components: { scheme: string; authority?: string; path?: string; query?: string; fragment?: string }): URI {\n\t\tconst result = new Uri(\n\t\t\tcomponents.scheme,\n\t\t\tcomponents.authority,\n\t\t\tcomponents.path,\n\t\t\tcomponents.query,\n\t\t\tcomponents.fragment,\n\t\t);\n\t\t_validateUri(result, true);\n\t\treturn result;\n\t}\n\n\t// ---- printing/externalize ---------------------------\n\n\t/**\n\t * Creates a string representation for this URI. It's guaranteed that calling\n\t * `URI.parse` with the result of this function creates an URI which is equal\n\t * to this URI.\n\t *\n\t * * The result shall *not* be used for display purposes but for externalization or transport.\n\t * * The result will be encoded using the percentage encoding and encoding happens mostly\n\t * ignore the scheme-specific encoding rules.\n\t *\n\t * @param skipEncoding Do not encode the result, default is `false`\n\t */\n\ttoString(skipEncoding: boolean = false): string {\n\t\treturn _asFormatted(this, skipEncoding);\n\t}\n\n\ttoJSON(): UriComponents {\n\t\treturn this;\n\t}\n\n\tstatic revive(data: UriComponents | URI): URI;\n\tstatic revive(data: UriComponents | URI | undefined): URI | undefined;\n\tstatic revive(data: UriComponents | URI | null): URI | null;\n\tstatic revive(data: UriComponents | URI | undefined | null): URI | undefined | null;\n\tstatic revive(data: UriComponents | URI | undefined | null): URI | undefined | null {\n\t\tif (!data) {\n\t\t\treturn data;\n\t\t} else if (data instanceof URI) {\n\t\t\treturn data;\n\t\t} else {\n\t\t\tconst result = new Uri(data);\n\t\t\tresult._formatted = (data).external;\n\t\t\tresult._fsPath = (data)._sep === _pathSepMarker ? (data).fsPath : null;\n\t\t\treturn result;\n\t\t}\n\t}\n}\n\nexport interface UriComponents {\n\tscheme: string;\n\tauthority: string;\n\tpath: string;\n\tquery: string;\n\tfragment: string;\n}\n\ninterface UriState extends UriComponents {\n\t$mid: number;\n\texternal: string;\n\tfsPath: string;\n\t_sep: 1 | undefined;\n}\n\nconst _pathSepMarker = isWindows ? 1 : undefined;\n\n// This class exists so that URI is compatible with vscode.Uri (API).\nclass Uri extends URI {\n\n\t_formatted: string | null = null;\n\t_fsPath: string | null = null;\n\n\toverride get fsPath(): string {\n\t\tif (!this._fsPath) {\n\t\t\tthis._fsPath = uriToFsPath(this, false);\n\t\t}\n\t\treturn this._fsPath;\n\t}\n\n\toverride toString(skipEncoding: boolean = false): string {\n\t\tif (!skipEncoding) {\n\t\t\tif (!this._formatted) {\n\t\t\t\tthis._formatted = _asFormatted(this, false);\n\t\t\t}\n\t\t\treturn this._formatted;\n\t\t} else {\n\t\t\t// we don't cache that\n\t\t\treturn _asFormatted(this, true);\n\t\t}\n\t}\n\n\toverride toJSON(): UriComponents {\n\t\tconst res = {\n\t\t\t$mid: 1\n\t\t};\n\t\t// cached state\n\t\tif (this._fsPath) {\n\t\t\tres.fsPath = this._fsPath;\n\t\t\tres._sep = _pathSepMarker;\n\t\t}\n\t\tif (this._formatted) {\n\t\t\tres.external = this._formatted;\n\t\t}\n\t\t// uri components\n\t\tif (this.path) {\n\t\t\tres.path = this.path;\n\t\t}\n\t\tif (this.scheme) {\n\t\t\tres.scheme = this.scheme;\n\t\t}\n\t\tif (this.authority) {\n\t\t\tres.authority = this.authority;\n\t\t}\n\t\tif (this.query) {\n\t\t\tres.query = this.query;\n\t\t}\n\t\tif (this.fragment) {\n\t\t\tres.fragment = this.fragment;\n\t\t}\n\t\treturn res;\n\t}\n}\n\n// reserved characters: https://tools.ietf.org/html/rfc3986#section-2.2\nconst encodeTable: { [ch: number]: string } = {\n\t[CharCode.Colon]: '%3A', // gen-delims\n\t[CharCode.Slash]: '%2F',\n\t[CharCode.QuestionMark]: '%3F',\n\t[CharCode.Hash]: '%23',\n\t[CharCode.OpenSquareBracket]: '%5B',\n\t[CharCode.CloseSquareBracket]: '%5D',\n\t[CharCode.AtSign]: '%40',\n\n\t[CharCode.ExclamationMark]: '%21', // sub-delims\n\t[CharCode.DollarSign]: '%24',\n\t[CharCode.Ampersand]: '%26',\n\t[CharCode.SingleQuote]: '%27',\n\t[CharCode.OpenParen]: '%28',\n\t[CharCode.CloseParen]: '%29',\n\t[CharCode.Asterisk]: '%2A',\n\t[CharCode.Plus]: '%2B',\n\t[CharCode.Comma]: '%2C',\n\t[CharCode.Semicolon]: '%3B',\n\t[CharCode.Equals]: '%3D',\n\n\t[CharCode.Space]: '%20',\n};\n\nfunction encodeURIComponentFast(uriComponent: string, isPath: boolean, isAuthority: boolean): string {\n\tlet res: string | undefined = undefined;\n\tlet nativeEncodePos = -1;\n\n\tfor (let pos = 0; pos < uriComponent.length; pos++) {\n\t\tconst code = uriComponent.charCodeAt(pos);\n\n\t\t// unreserved characters: https://tools.ietf.org/html/rfc3986#section-2.3\n\t\tif (\n\t\t\t(code >= CharCode.a && code <= CharCode.z)\n\t\t\t|| (code >= CharCode.A && code <= CharCode.Z)\n\t\t\t|| (code >= CharCode.Digit0 && code <= CharCode.Digit9)\n\t\t\t|| code === CharCode.Dash\n\t\t\t|| code === CharCode.Period\n\t\t\t|| code === CharCode.Underline\n\t\t\t|| code === CharCode.Tilde\n\t\t\t|| (isPath && code === CharCode.Slash)\n\t\t\t|| (isAuthority && code === CharCode.OpenSquareBracket)\n\t\t\t|| (isAuthority && code === CharCode.CloseSquareBracket)\n\t\t\t|| (isAuthority && code === CharCode.Colon)\n\t\t) {\n\t\t\t// check if we are delaying native encode\n\t\t\tif (nativeEncodePos !== -1) {\n\t\t\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos, pos));\n\t\t\t\tnativeEncodePos = -1;\n\t\t\t}\n\t\t\t// check if we write into a new string (by default we try to return the param)\n\t\t\tif (res !== undefined) {\n\t\t\t\tres += uriComponent.charAt(pos);\n\t\t\t}\n\n\t\t} else {\n\t\t\t// encoding needed, we need to allocate a new string\n\t\t\tif (res === undefined) {\n\t\t\t\tres = uriComponent.substr(0, pos);\n\t\t\t}\n\n\t\t\t// check with default table first\n\t\t\tconst escaped = encodeTable[code];\n\t\t\tif (escaped !== undefined) {\n\n\t\t\t\t// check if we are delaying native encode\n\t\t\t\tif (nativeEncodePos !== -1) {\n\t\t\t\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos, pos));\n\t\t\t\t\tnativeEncodePos = -1;\n\t\t\t\t}\n\n\t\t\t\t// append escaped variant to result\n\t\t\t\tres += escaped;\n\n\t\t\t} else if (nativeEncodePos === -1) {\n\t\t\t\t// use native encode only when needed\n\t\t\t\tnativeEncodePos = pos;\n\t\t\t}\n\t\t}\n\t}\n\n\tif (nativeEncodePos !== -1) {\n\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos));\n\t}\n\n\treturn res !== undefined ? res : uriComponent;\n}\n\nfunction encodeURIComponentMinimal(path: string): string {\n\tlet res: string | undefined = undefined;\n\tfor (let pos = 0; pos < path.length; pos++) {\n\t\tconst code = path.charCodeAt(pos);\n\t\tif (code === CharCode.Hash || code === CharCode.QuestionMark) {\n\t\t\tif (res === undefined) {\n\t\t\t\tres = path.substr(0, pos);\n\t\t\t}\n\t\t\tres += encodeTable[code];\n\t\t} else {\n\t\t\tif (res !== undefined) {\n\t\t\t\tres += path[pos];\n\t\t\t}\n\t\t}\n\t}\n\treturn res !== undefined ? res : path;\n}\n\n/**\n * Compute `fsPath` for the given uri\n */\nexport function uriToFsPath(uri: URI, keepDriveLetterCasing: boolean): string {\n\n\tlet value: string;\n\tif (uri.authority && uri.path.length > 1 && uri.scheme === 'file') {\n\t\t// unc path: file://shares/c$/far/boo\n\t\tvalue = `//${uri.authority}${uri.path}`;\n\t} else if (\n\t\turi.path.charCodeAt(0) === CharCode.Slash\n\t\t&& (uri.path.charCodeAt(1) >= CharCode.A && uri.path.charCodeAt(1) <= CharCode.Z || uri.path.charCodeAt(1) >= CharCode.a && uri.path.charCodeAt(1) <= CharCode.z)\n\t\t&& uri.path.charCodeAt(2) === CharCode.Colon\n\t) {\n\t\tif (!keepDriveLetterCasing) {\n\t\t\t// windows drive letter: file:///c:/far/boo\n\t\t\tvalue = uri.path[1].toLowerCase() + uri.path.substr(2);\n\t\t} else {\n\t\t\tvalue = uri.path.substr(1);\n\t\t}\n\t} else {\n\t\t// other path\n\t\tvalue = uri.path;\n\t}\n\tif (isWindows) {\n\t\tvalue = value.replace(/\\//g, '\\\\');\n\t}\n\treturn value;\n}\n\n/**\n * Create the external version of a uri\n */\nfunction _asFormatted(uri: URI, skipEncoding: boolean): string {\n\n\tconst encoder = !skipEncoding\n\t\t? encodeURIComponentFast\n\t\t: encodeURIComponentMinimal;\n\n\tlet res = '';\n\tlet { scheme, authority, path, query, fragment } = uri;\n\tif (scheme) {\n\t\tres += scheme;\n\t\tres += ':';\n\t}\n\tif (authority || scheme === 'file') {\n\t\tres += _slash;\n\t\tres += _slash;\n\t}\n\tif (authority) {\n\t\tlet idx = authority.indexOf('@');\n\t\tif (idx !== -1) {\n\t\t\t// @\n\t\t\tconst userinfo = authority.substr(0, idx);\n\t\t\tauthority = authority.substr(idx + 1);\n\t\t\tidx = userinfo.lastIndexOf(':');\n\t\t\tif (idx === -1) {\n\t\t\t\tres += encoder(userinfo, false, false);\n\t\t\t} else {\n\t\t\t\t// :@\n\t\t\t\tres += encoder(userinfo.substr(0, idx), false, false);\n\t\t\t\tres += ':';\n\t\t\t\tres += encoder(userinfo.substr(idx + 1), false, true);\n\t\t\t}\n\t\t\tres += '@';\n\t\t}\n\t\tauthority = authority.toLowerCase();\n\t\tidx = authority.lastIndexOf(':');\n\t\tif (idx === -1) {\n\t\t\tres += encoder(authority, false, true);\n\t\t} else {\n\t\t\t// :\n\t\t\tres += encoder(authority.substr(0, idx), false, true);\n\t\t\tres += authority.substr(idx);\n\t\t}\n\t}\n\tif (path) {\n\t\t// lower-case windows drive letters in /C:/fff or C:/fff\n\t\tif (path.length >= 3 && path.charCodeAt(0) === CharCode.Slash && path.charCodeAt(2) === CharCode.Colon) {\n\t\t\tconst code = path.charCodeAt(1);\n\t\t\tif (code >= CharCode.A && code <= CharCode.Z) {\n\t\t\t\tpath = `/${String.fromCharCode(code + 32)}:${path.substr(3)}`; // \"/c:\".length === 3\n\t\t\t}\n\t\t} else if (path.length >= 2 && path.charCodeAt(1) === CharCode.Colon) {\n\t\t\tconst code = path.charCodeAt(0);\n\t\t\tif (code >= CharCode.A && code <= CharCode.Z) {\n\t\t\t\tpath = `${String.fromCharCode(code + 32)}:${path.substr(2)}`; // \"/c:\".length === 3\n\t\t\t}\n\t\t}\n\t\t// encode the rest of the path\n\t\tres += encoder(path, true, false);\n\t}\n\tif (query) {\n\t\tres += '?';\n\t\tres += encoder(query, false, false);\n\t}\n\tif (fragment) {\n\t\tres += '#';\n\t\tres += !skipEncoding ? encodeURIComponentFast(fragment, false, false) : fragment;\n\t}\n\treturn res;\n}\n\n// --- decode\n\nfunction decodeURIComponentGraceful(str: string): string {\n\ttry {\n\t\treturn decodeURIComponent(str);\n\t} catch {\n\t\tif (str.length > 3) {\n\t\t\treturn str.substr(0, 3) + decodeURIComponentGraceful(str.substr(3));\n\t\t} else {\n\t\t\treturn str;\n\t\t}\n\t}\n}\n\nconst _rEncodedAsHex = /(%[0-9A-Za-z][0-9A-Za-z])+/g;\n\nfunction percentDecode(str: string): string {\n\tif (!str.match(_rEncodedAsHex)) {\n\t\treturn str;\n\t}\n\treturn str.replace(_rEncodedAsHex, (match) => decodeURIComponentGraceful(match));\n}\n\n/**\n * Mapped-type that replaces all occurrences of URI with UriComponents\n */\nexport type UriDto = { [K in keyof T]: T[K] extends URI\n\t? UriComponents\n\t: UriDto };\n", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n\n'use strict';\n\nimport { CharCode } from './charCode';\nimport { URI } from './uri';\nimport * as nodePath from 'path';\n\nconst posixPath = nodePath.posix || nodePath;\nconst slash = '/';\n\nexport namespace Utils {\n\n    /**\n     * Joins one or more input paths to the path of URI. \n     * '/' is used as the directory separation character. \n     * \n     * The resolved path will be normalized. That means:\n     *  - all '..' and '.' segments are resolved.\n     *  - multiple, sequential occurences of '/' are replaced by a single instance of '/'.\n     *  - trailing separators are preserved.\n     * \n     * @param uri The input URI.\n     * @param paths The paths to be joined with the path of URI.\n     * @returns A URI with the joined path. All other properties of the URI (scheme, authority, query, fragments, ...) will be taken from the input URI.\n     */\n    export function joinPath(uri: URI, ...paths: string[]): URI {\n        return uri.with({ path: posixPath.join(uri.path, ...paths) });\n    }\n\n\n    /**\n     * Resolves one or more paths against the path of a URI. \n     * '/' is used as the directory separation character. \n     * \n     * The resolved path will be normalized. That means:\n     *  - all '..' and '.' segments are resolved. \n     *  - multiple, sequential occurences of '/' are replaced by a single instance of '/'.\n     *  - trailing separators are removed.\n     * \n     * @param uri The input URI.\n     * @param paths The paths to resolve against the path of URI.\n     * @returns A URI with the resolved path. All other properties of the URI (scheme, authority, query, fragments, ...) will be taken from the input URI.\n     */\n    export function resolvePath(uri: URI, ...paths: string[]): URI {\n        let path = uri.path; \n        let slashAdded = false;\n        if (path[0] !== slash) {\n            path = slash + path; // make the path abstract: for posixPath.resolve the first segments has to be absolute or cwd is used.\n            slashAdded = true;\n        }\n        let resolvedPath = posixPath.resolve(path, ...paths);\n        if (slashAdded && resolvedPath[0] === slash && !uri.authority) {\n            resolvedPath = resolvedPath.substring(1);\n        }\n        return uri.with({ path: resolvedPath });\n    }\n\n    /**\n     * Returns a URI where the path is the directory name of the input uri, similar to the Unix dirname command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The orignal URI is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The last segment of the URIs path.\n     */\n    export function dirname(uri: URI): URI {\n        if (uri.path.length === 0 || uri.path === slash) {\n            return uri;\n        }\n        let path = posixPath.dirname(uri.path);\n        if (path.length === 1 && path.charCodeAt(0) === CharCode.Period) {\n            path = '';\n        }\n        return uri.with({ path });\n    }\n\n    /**\n     * Returns the last segment of the path of a URI, similar to the Unix basename command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The empty string is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The base name of the URIs path.\n     */\n    export function basename(uri: URI): string {\n        return posixPath.basename(uri.path);\n    }\n\n    /**\n     * Returns the extension name of the path of a URI, similar to the Unix extname command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The empty string is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The extension name of the URIs path.\n     */\n    export function extname(uri: URI): string {\n        return posixPath.extname(uri.path);\n    }\n}", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { URI, Utils } from 'vscode-uri';\n\nexport { URI };\n\nexport namespace UriUtils {\n\n    export const basename = Utils.basename;\n    export const dirname = Utils.dirname;\n    export const extname = Utils.extname;\n    export const joinPath = Utils.joinPath;\n    export const resolvePath = Utils.resolvePath;\n\n    export function equals(a?: URI | string, b?: URI | string): boolean {\n        return a?.toString() === b?.toString();\n    }\n\n    export function relative(from: URI | string, to: URI | string): string {\n        const fromPath = typeof from === 'string' ? from : from.path;\n        const toPath = typeof to === 'string' ? to : to.path;\n        const fromParts = fromPath.split('/').filter(e => e.length > 0);\n        const toParts = toPath.split('/').filter(e => e.length > 0);\n        let i = 0;\n        for (; i < fromParts.length; i++) {\n            if (fromParts[i] !== toParts[i]) {\n                break;\n            }\n        }\n        const backPart = '../'.repeat(fromParts.length - i);\n        const toPart = toParts.slice(i).join('/');\n        return backPart + toPart;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/**\n * Re-export 'TextDocument' from 'vscode-languageserver-textdocument' for convenience,\n *  including both type _and_ symbol (namespace), as we here and there also refer to the symbol,\n *  the overhead is very small, just a few kilobytes.\n * Everything else of that package (at the time contributing) is also defined\n *  in 'vscode-languageserver-protocol' or 'vscode-languageserver-types'.\n */\nexport { TextDocument } from 'vscode-languageserver-textdocument';\n\nimport type { Diagnostic, Range } from 'vscode-languageserver-types';\nimport type { FileSystemProvider } from './file-system-provider.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, Mutable, Reference } from '../syntax-tree.js';\nimport type { MultiMap } from '../utils/collections.js';\nimport type { Stream } from '../utils/stream.js';\nimport { TextDocument } from './documents.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { stream } from '../utils/stream.js';\nimport { URI } from '../utils/uri-utils.js';\n\n/**\n * A Langium document holds the parse result (AST and CST) and any additional state that is derived\n * from the AST, e.g. the result of scope precomputation.\n */\nexport interface LangiumDocument {\n    /** The Uniform Resource Identifier (URI) of the document */\n    readonly uri: URI;\n    /** The text document used to convert between offsets and positions */\n    readonly textDocument: TextDocument;\n    /** The current state of the document */\n    state: DocumentState;\n    /** The parse result holds the Abstract Syntax Tree (AST) and potentially also parser / lexer errors */\n    parseResult: ParseResult;\n    /** Result of the scope precomputation phase */\n    precomputedScopes?: PrecomputedScopes;\n    /** An array of all cross-references found in the AST while linking */\n    references: Reference[];\n    /** Result of the validation phase */\n    diagnostics?: Diagnostic[]\n}\n\n/**\n * A document is subject to several phases that are run in predefined order. Any state value implies that\n * smaller state values are finished as well.\n */\nexport enum DocumentState {\n    /**\n     * The text content has changed and needs to be parsed again. The AST held by this outdated\n     * document instance is no longer valid.\n     */\n    Changed = 0,\n    /**\n     * An AST has been created from the text content. The document structure can be traversed,\n     * but cross-references cannot be resolved yet. If necessary, the structure can be manipulated\n     * at this stage as a preprocessing step.\n     */\n    Parsed = 1,\n    /**\n     * The `IndexManager` service has processed AST nodes of this document. This means the\n     * exported symbols are available in the global scope and can be resolved from other documents.\n     */\n    IndexedContent = 2,\n    /**\n     * The `ScopeComputation` service has processed this document. This means the local symbols\n     * are stored in a MultiMap so they can be looked up by the `ScopeProvider` service.\n     * Once a document has reached this state, you may follow every reference - it will lazily\n     * resolve its `ref` property and yield either the target AST node or `undefined` in case\n     * the target is not in scope.\n     */\n    ComputedScopes = 3,\n    /**\n     * The `Linker` service has processed this document. All outgoing references have been\n     * resolved or marked as erroneous.\n     */\n    Linked = 4,\n    /**\n     * The `IndexManager` service has processed AST node references of this document. This is\n     * necessary to determine which documents are affected by a change in one of the workspace\n     * documents.\n     */\n    IndexedReferences = 5,\n    /**\n     * The `DocumentValidator` service has processed this document. The language server listens\n     * to the results of this phase and sends diagnostics to the client.\n     */\n    Validated = 6\n}\n\n/**\n * Result of the scope precomputation phase (`ScopeComputation` service).\n * It maps every AST node to the set of symbols that are visible in the subtree of that node.\n */\nexport type PrecomputedScopes = MultiMap\n\nexport interface DocumentSegment {\n    readonly range: Range\n    readonly offset: number\n    readonly length: number\n    readonly end: number\n}\n\n/**\n * Surrogate definition of the `TextDocuments` interface from the `vscode-languageserver` package.\n * No implementation object is expected to be offered by `LangiumCoreServices`, but only by `LangiumLSPServices`.\n */\nexport type TextDocumentProvider = {\n    get(uri: string): TextDocument | undefined\n}\n\n/**\n * Shared service for creating `LangiumDocument` instances.\n *\n * Register a custom implementation if special (additional) behavior is required for your language(s).\n * Note: If you specialize {@link fromString} or {@link fromTextDocument} you probably might want to\n * specialize {@link update}, too!\n */\nexport interface LangiumDocumentFactory {\n    /**\n     * Create a Langium document from a `TextDocument` (usually associated with a file).\n     */\n    fromTextDocument(textDocument: TextDocument, uri?: URI): LangiumDocument;\n    /**\n     * Create a Langium document from a `TextDocument` asynchronously. This action can be cancelled if a cancellable parser implementation has been provided.\n     */\n    fromTextDocument(textDocument: TextDocument, uri: URI | undefined, cancellationToken: CancellationToken): Promise>;\n\n    /**\n     * Create an Langium document from an in-memory string.\n     */\n    fromString(text: string, uri: URI): LangiumDocument;\n    /**\n     * Create a Langium document from an in-memory string asynchronously. This action can be cancelled if a cancellable parser implementation has been provided.\n     */\n    fromString(text: string, uri: URI, cancellationToken: CancellationToken): Promise>;\n\n    /**\n     * Create an Langium document from a model that has been constructed in memory.\n     */\n    fromModel(model: T, uri: URI): LangiumDocument;\n\n    /**\n     * Create an Langium document from a specified `URI`. The factory will use the `FileSystemAccess` service to read the file.\n     */\n    fromUri(uri: URI, cancellationToken?: CancellationToken): Promise>;\n\n    /**\n     * Update the given document after changes in the corresponding textual representation.\n     * Method is called by the document builder after it has been requested to build an existing\n     * document and the document's state is {@link DocumentState.Changed}.\n     * The text parsing is expected to be done the same way as in {@link fromTextDocument}\n     * and {@link fromString}.\n     */\n    update(document: LangiumDocument, cancellationToken: CancellationToken): Promise>\n}\n\nexport class DefaultLangiumDocumentFactory implements LangiumDocumentFactory {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly textDocuments?: TextDocumentProvider;\n    protected readonly fileSystemProvider: FileSystemProvider;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n        this.textDocuments = services.workspace.TextDocuments;\n        this.fileSystemProvider = services.workspace.FileSystemProvider;\n    }\n\n    async fromUri(uri: URI, cancellationToken = CancellationToken.None): Promise> {\n        const content = await this.fileSystemProvider.readFile(uri);\n        return this.createAsync(uri, content, cancellationToken);\n    }\n\n    fromTextDocument(textDocument: TextDocument, uri?: URI): LangiumDocument;\n    fromTextDocument(textDocument: TextDocument, uri: URI | undefined, cancellationToken: CancellationToken): Promise>;\n    fromTextDocument(textDocument: TextDocument, uri?: URI, cancellationToken?: CancellationToken): LangiumDocument | Promise> {\n        uri = uri ?? URI.parse(textDocument.uri);\n        if (cancellationToken) {\n            return this.createAsync(uri, textDocument, cancellationToken);\n        } else {\n            return this.create(uri, textDocument);\n        }\n    }\n\n    fromString(text: string, uri: URI): LangiumDocument;\n    fromString(text: string, uri: URI, cancellationToken: CancellationToken): Promise>;\n    fromString(text: string, uri: URI, cancellationToken?: CancellationToken): LangiumDocument | Promise> {\n        if (cancellationToken) {\n            return this.createAsync(uri, text, cancellationToken);\n        } else {\n            return this.create(uri, text);\n        }\n    }\n\n    fromModel(model: T, uri: URI): LangiumDocument {\n        return this.create(uri, { $model: model });\n    }\n\n    protected create(uri: URI, content: string | TextDocument | { $model: T }): LangiumDocument {\n        if (typeof content === 'string') {\n            const parseResult = this.parse(uri, content);\n            return this.createLangiumDocument(parseResult, uri, undefined, content);\n\n        } else if ('$model' in content) {\n            const parseResult = { value: content.$model, parserErrors: [], lexerErrors: [] };\n            return this.createLangiumDocument(parseResult, uri);\n\n        } else {\n            const parseResult = this.parse(uri, content.getText());\n            return this.createLangiumDocument(parseResult, uri, content);\n        }\n    }\n\n    protected async createAsync(uri: URI, content: string | TextDocument, cancelToken: CancellationToken): Promise> {\n        if (typeof content === 'string') {\n            const parseResult = await this.parseAsync(uri, content, cancelToken);\n            return this.createLangiumDocument(parseResult, uri, undefined, content);\n        } else {\n            const parseResult = await this.parseAsync(uri, content.getText(), cancelToken);\n            return this.createLangiumDocument(parseResult, uri, content);\n        }\n    }\n\n    /**\n     * Create a LangiumDocument from a given parse result.\n     *\n     * A TextDocument is created on demand if it is not provided as argument here. Usually this\n     * should not be necessary because the main purpose of the TextDocument is to convert between\n     * text ranges and offsets, which is done solely in LSP request handling.\n     *\n     * With the introduction of {@link update} below this method is supposed to be mainly called\n     * during workspace initialization and on addition/recognition of new files, while changes in\n     * existing documents are processed via {@link update}.\n     */\n    protected createLangiumDocument(parseResult: ParseResult, uri: URI, textDocument?: TextDocument, text?: string): LangiumDocument {\n        let document: LangiumDocument;\n        if (textDocument) {\n            document = {\n                parseResult,\n                uri,\n                state: DocumentState.Parsed,\n                references: [],\n                textDocument\n            };\n        } else {\n            const textDocumentGetter = this.createTextDocumentGetter(uri, text);\n            document = {\n                parseResult,\n                uri,\n                state: DocumentState.Parsed,\n                references: [],\n                get textDocument() {\n                    return textDocumentGetter();\n                }\n            };\n        }\n        (parseResult.value as Mutable).$document = document;\n        return document;\n    }\n\n    async update(document: Mutable>, cancellationToken: CancellationToken): Promise> {\n        // The CST full text property contains the original text that was used to create the AST.\n        const oldText = document.parseResult.value.$cstNode?.root.fullText;\n        const textDocument = this.textDocuments?.get(document.uri.toString());\n        const text = textDocument ? textDocument.getText() : await this.fileSystemProvider.readFile(document.uri);\n\n        if (textDocument) {\n            Object.defineProperty(\n                document,\n                'textDocument',\n                {\n                    value: textDocument\n                }\n            );\n        } else {\n            const textDocumentGetter = this.createTextDocumentGetter(document.uri, text);\n            Object.defineProperty(\n                document,\n                'textDocument',\n                {\n                    get: textDocumentGetter\n                }\n            );\n        }\n\n        // Some of these documents can be pretty large, so parsing them again can be quite expensive.\n        // Therefore, we only parse if the text has actually changed.\n        if (oldText !== text) {\n            document.parseResult = await this.parseAsync(document.uri, text, cancellationToken);\n            (document.parseResult.value as Mutable).$document = document;\n        }\n        document.state = DocumentState.Parsed;\n        return document;\n    }\n\n    protected parse(uri: URI, text: string): ParseResult {\n        const services = this.serviceRegistry.getServices(uri);\n        return services.parser.LangiumParser.parse(text);\n    }\n\n    protected parseAsync(uri: URI, text: string, cancellationToken: CancellationToken): Promise> {\n        const services = this.serviceRegistry.getServices(uri);\n        return services.parser.AsyncParser.parse(text, cancellationToken);\n    }\n\n    protected createTextDocumentGetter(uri: URI, text?: string): () => TextDocument {\n        const serviceRegistry = this.serviceRegistry;\n        let textDoc: TextDocument | undefined = undefined;\n        return () => {\n            return textDoc ??= TextDocument.create(\n                uri.toString(), serviceRegistry.getServices(uri).LanguageMetaData.languageId, 0, text ?? ''\n            );\n        };\n    }\n}\n\n/**\n * Shared service for managing Langium documents.\n */\nexport interface LangiumDocuments {\n\n    /**\n     * A stream of all documents managed under this service.\n     */\n    readonly all: Stream\n\n    /**\n     * Manage a new document under this service.\n     * @throws an error if a document with the same URI is already present.\n     */\n    addDocument(document: LangiumDocument): void;\n\n    /**\n     * Retrieve the document with the given URI, if present. Otherwise returns `undefined`.\n     */\n    getDocument(uri: URI): LangiumDocument | undefined;\n\n    /**\n     * Retrieve the document with the given URI. If not present, a new one will be created using the file system access.\n     * The new document will be added to the list of documents managed under this service.\n     */\n    getOrCreateDocument(uri: URI, cancellationToken?: CancellationToken): Promise;\n\n    /**\n     * Creates a new document with the given URI and text content.\n     * The new document is automatically added to this service and can be retrieved using {@link getDocument}.\n     *\n     * @throws an error if a document with the same URI is already present.\n     */\n    createDocument(uri: URI, text: string): LangiumDocument;\n\n    /**\n     * Creates a new document with the given URI and text content asynchronously.\n     * The process can be interrupted with a cancellation token.\n     * The new document is automatically added to this service and can be retrieved using {@link getDocument}.\n     *\n     * @throws an error if a document with the same URI is already present.\n     */\n    createDocument(uri: URI, text: string, cancellationToken: CancellationToken): Promise;\n\n    /**\n     * Returns `true` if a document with the given URI is managed under this service.\n     */\n    hasDocument(uri: URI): boolean;\n\n    /**\n     * Flag the document with the given URI as `Changed`, if present, meaning that its content\n     * is no longer valid. The content (parseResult) stays untouched, while internal data may\n     * be dropped to reduce memory footprint.\n     *\n     * @returns the affected {@link LangiumDocument} if existing for convenience\n     */\n    invalidateDocument(uri: URI): LangiumDocument | undefined;\n\n    /**\n     * Remove the document with the given URI, if present, and mark it as `Changed`, meaning\n     * that its content is no longer valid. The next call to `getOrCreateDocument` with the same\n     * URI will create a new document instance.\n     *\n     * @returns the affected {@link LangiumDocument} if existing for convenience\n     */\n    deleteDocument(uri: URI): LangiumDocument | undefined;\n}\n\nexport class DefaultLangiumDocuments implements LangiumDocuments {\n\n    protected readonly langiumDocumentFactory: LangiumDocumentFactory;\n\n    protected readonly documentMap: Map = new Map();\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.langiumDocumentFactory = services.workspace.LangiumDocumentFactory;\n    }\n\n    get all(): Stream {\n        return stream(this.documentMap.values());\n    }\n\n    addDocument(document: LangiumDocument): void {\n        const uriString = document.uri.toString();\n        if (this.documentMap.has(uriString)) {\n            throw new Error(`A document with the URI '${uriString}' is already present.`);\n        }\n        this.documentMap.set(uriString, document);\n    }\n\n    getDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        return this.documentMap.get(uriString);\n    }\n\n    async getOrCreateDocument(uri: URI, cancellationToken?: CancellationToken): Promise {\n        let document = this.getDocument(uri);\n        if (document) {\n            return document;\n        }\n        document = await this.langiumDocumentFactory.fromUri(uri, cancellationToken);\n        this.addDocument(document);\n        return document;\n    }\n\n    createDocument(uri: URI, text: string): LangiumDocument;\n    createDocument(uri: URI, text: string, cancellationToken: CancellationToken): Promise;\n    createDocument(uri: URI, text: string, cancellationToken?: CancellationToken): LangiumDocument | Promise {\n        if (cancellationToken) {\n            return this.langiumDocumentFactory.fromString(text, uri, cancellationToken).then(document => {\n                this.addDocument(document);\n                return document;\n            });\n        } else {\n            const document = this.langiumDocumentFactory.fromString(text, uri);\n            this.addDocument(document);\n            return document;\n        }\n    }\n\n    hasDocument(uri: URI): boolean {\n        return this.documentMap.has(uri.toString());\n    }\n\n    invalidateDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        const langiumDoc = this.documentMap.get(uriString);\n        if (langiumDoc) {\n            langiumDoc.state = DocumentState.Changed;\n            langiumDoc.precomputedScopes = undefined;\n            langiumDoc.references = [];\n            langiumDoc.diagnostics = undefined;\n        }\n        return langiumDoc;\n    }\n\n    deleteDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        const langiumDoc = this.documentMap.get(uriString);\n        if (langiumDoc) {\n            langiumDoc.state = DocumentState.Changed;\n            this.documentMap.delete(uriString);\n        }\n        return langiumDoc;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection, CstNode, LinkingError, Reference, ReferenceInfo } from '../syntax-tree.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { LangiumDocument, LangiumDocuments } from '../workspace/documents.js';\nimport type { ScopeProvider } from './scope-provider.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { isAstNode, isAstNodeDescription, isLinkingError } from '../syntax-tree.js';\nimport { getDocument, streamAst, streamReferences } from '../utils/ast-utils.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\nimport { DocumentState } from '../workspace/documents.js';\n\n/**\n * Language-specific service for resolving cross-references in the AST.\n */\nexport interface Linker {\n\n    /**\n     * Links all cross-references within the specified document. The default implementation loads only target\n     * elements from documents that are present in the `LangiumDocuments` service. The linked references are\n     * stored in the document's `references` property.\n     *\n     * @param document A LangiumDocument that shall be linked.\n     * @param cancelToken A token for cancelling the operation.\n     */\n    link(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Unlinks all references within the specified document and removes them from the list of `references`.\n     *\n     * @param document A LangiumDocument that shall be unlinked.\n     */\n    unlink(document: LangiumDocument): void;\n\n    /**\n     * Determines a candidate AST node description for linking the given reference.\n     *\n     * @param node The AST node containing the reference.\n     * @param refId The reference identifier used to build a scope.\n     * @param reference The actual reference to resolve.\n     */\n    getCandidate(refInfo: ReferenceInfo): AstNodeDescription | LinkingError;\n\n    /**\n     * Creates a cross reference node being aware of its containing AstNode, the corresponding CstNode,\n     * the cross reference text denoting the target AstNode being already extracted of the document text,\n     * as well as the unique cross reference identifier.\n     *\n     * Default behavior:\n     *  - The returned Reference's 'ref' property pointing to the target AstNode is populated lazily on its\n     *    first visit.\n     *  - If the target AstNode cannot be resolved on the first visit, an error indicator will be installed\n     *    and further resolution attempts will *not* be performed.\n     *\n     * @param node The containing AST node\n     * @param refNode The corresponding CST node\n     * @param refId The cross reference identifier like ':'\n     * @param refText The cross reference text denoting the target AstNode\n     * @returns the desired Reference node, whose behavior wrt. resolving the cross reference is implementation specific.\n     */\n    buildReference(node: AstNode, property: string, refNode: CstNode | undefined, refText: string): Reference;\n\n}\n\ninterface DefaultReference extends Reference {\n    _ref?: AstNode | LinkingError;\n    _nodeDescription?: AstNodeDescription;\n}\n\nexport class DefaultLinker implements Linker {\n    protected readonly reflection: AstReflection;\n    protected readonly scopeProvider: ScopeProvider;\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly langiumDocuments: () => LangiumDocuments;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n        this.langiumDocuments = () => services.shared.workspace.LangiumDocuments;\n        this.scopeProvider = services.references.ScopeProvider;\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    async link(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        for (const node of streamAst(document.parseResult.value)) {\n            await interruptAndCheck(cancelToken);\n            streamReferences(node).forEach(ref => this.doLink(ref, document));\n        }\n    }\n\n    protected doLink(refInfo: ReferenceInfo, document: LangiumDocument): void {\n        const ref = refInfo.reference as DefaultReference;\n        // The reference may already have been resolved lazily by accessing its `ref` property.\n        if (ref._ref === undefined) {\n            try {\n                const description = this.getCandidate(refInfo);\n                if (isLinkingError(description)) {\n                    ref._ref = description;\n                } else {\n                    ref._nodeDescription = description;\n                    if (this.langiumDocuments().hasDocument(description.documentUri)) {\n                        // The target document is already loaded\n                        const linkedNode = this.loadAstNode(description);\n                        ref._ref = linkedNode ?? this.createLinkingError(refInfo, description);\n                    }\n                }\n            } catch (err) {\n                ref._ref = {\n                    ...refInfo,\n                    message: `An error occurred while resolving reference to '${ref.$refText}': ${err}`\n                };\n            }\n        }\n        // Add the reference to the document's array of references\n        document.references.push(ref);\n    }\n\n    unlink(document: LangiumDocument): void {\n        for (const ref of document.references) {\n            delete (ref as DefaultReference)._ref;\n            delete (ref as DefaultReference)._nodeDescription;\n        }\n        document.references = [];\n    }\n\n    getCandidate(refInfo: ReferenceInfo): AstNodeDescription | LinkingError {\n        const scope = this.scopeProvider.getScope(refInfo);\n        const description = scope.getElement(refInfo.reference.$refText);\n        return description ?? this.createLinkingError(refInfo);\n    }\n\n    buildReference(node: AstNode, property: string, refNode: CstNode | undefined, refText: string): Reference {\n        // See behavior description in doc of Linker, update that on changes in here.\n        // eslint-disable-next-line @typescript-eslint/no-this-alias\n        const linker = this;\n        const reference: DefaultReference = {\n            $refNode: refNode,\n            $refText: refText,\n\n            get ref() {\n                if (isAstNode(this._ref)) {\n                    // Most frequent case: the target is already resolved.\n                    return this._ref;\n                } else if (isAstNodeDescription(this._nodeDescription)) {\n                    // A candidate has been found before, but it is not loaded yet.\n                    const linkedNode = linker.loadAstNode(this._nodeDescription);\n                    this._ref = linkedNode ??\n                        linker.createLinkingError({ reference, container: node, property }, this._nodeDescription);\n                } else if (this._ref === undefined) {\n                    // The reference has not been linked yet, so do that now.\n                    const refData = linker.getLinkedNode({ reference, container: node, property });\n                    if (refData.error && getDocument(node).state < DocumentState.ComputedScopes) {\n                        // Document scope is not ready, don't set `this._ref` so linker can retry later.\n                        return undefined;\n                    }\n                    this._ref = refData.node ?? refData.error;\n                    this._nodeDescription = refData.descr;\n                }\n                return isAstNode(this._ref) ? this._ref : undefined;\n            },\n            get $nodeDescription() {\n                return this._nodeDescription;\n            },\n            get error() {\n                return isLinkingError(this._ref) ? this._ref : undefined;\n            }\n        };\n        return reference;\n    }\n\n    protected getLinkedNode(refInfo: ReferenceInfo): { node?: AstNode, descr?: AstNodeDescription, error?: LinkingError } {\n        try {\n            const description = this.getCandidate(refInfo);\n            if (isLinkingError(description)) {\n                return { error: description };\n            }\n            const linkedNode = this.loadAstNode(description);\n            if (linkedNode) {\n                return { node: linkedNode, descr: description };\n            }\n            else {\n                return {\n                    descr: description,\n                    error:\n                        this.createLinkingError(refInfo, description)\n                };\n            }\n        } catch (err) {\n            return {\n                error: {\n                    ...refInfo,\n                    message: `An error occurred while resolving reference to '${refInfo.reference.$refText}': ${err}`\n                }\n            };\n        }\n    }\n\n    protected loadAstNode(nodeDescription: AstNodeDescription): AstNode | undefined {\n        if (nodeDescription.node) {\n            return nodeDescription.node;\n        }\n        const doc = this.langiumDocuments().getDocument(nodeDescription.documentUri);\n        if (!doc) {\n            return undefined;\n        }\n        return this.astNodeLocator.getAstNode(doc.parseResult.value, nodeDescription.path);\n    }\n\n    protected createLinkingError(refInfo: ReferenceInfo, targetDescription?: AstNodeDescription): LinkingError {\n        // Check whether the document is sufficiently processed by the DocumentBuilder. If not, this is a hint for a bug\n        // in the language implementation.\n        const document = getDocument(refInfo.container);\n        if (document.state < DocumentState.ComputedScopes) {\n            console.warn(`Attempted reference resolution before document reached ComputedScopes state (${document.uri}).`);\n        }\n        const referenceType = this.reflection.getReferenceType(refInfo);\n        return {\n            ...refInfo,\n            message: `Could not resolve reference to ${referenceType} named '${refInfo.reference.$refText}'.`,\n            targetDescription\n        };\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport { findNodeForProperty } from '../utils/grammar-utils.js';\n\nexport interface NamedAstNode extends AstNode {\n    name: string;\n}\n\nexport function isNamed(node: AstNode): node is NamedAstNode {\n    return typeof (node as NamedAstNode).name === 'string';\n}\n\n/**\n * Utility service for retrieving the `name` of an `AstNode` or the `CstNode` containing a `name`.\n */\nexport interface NameProvider {\n    /**\n     * Returns the `name` of a given AstNode.\n     * @param node Specified `AstNode` whose name node shall be retrieved.\n     */\n    getName(node: AstNode): string | undefined;\n    /**\n     * Returns the `CstNode` which contains the parsed value of the `name` assignment.\n     * @param node Specified `AstNode` whose name node shall be retrieved.\n     */\n    getNameNode(node: AstNode): CstNode | undefined;\n}\n\nexport class DefaultNameProvider implements NameProvider {\n    getName(node: AstNode): string | undefined {\n        if (isNamed(node)) {\n            return node.name;\n        }\n        return undefined;\n    }\n\n    getNameNode(node: AstNode): CstNode | undefined {\n        return findNodeForProperty(node.$cstNode, 'name');\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode, GenericAstNode } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { ReferenceDescription } from '../workspace/ast-descriptions.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { NameProvider } from './name-provider.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { findAssignment } from '../utils/grammar-utils.js';\nimport { isReference } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { isChildNode, toDocumentSegment } from '../utils/cst-utils.js';\nimport { stream } from '../utils/stream.js';\nimport { UriUtils } from '../utils/uri-utils.js';\n\n/**\n * Language-specific service for finding references and declaration of a given `CstNode`.\n */\nexport interface References {\n\n    /**\n     * If the CstNode is a reference node the target CstNode will be returned.\n     * If the CstNode is a significant node of the CstNode this CstNode will be returned.\n     *\n     * @param sourceCstNode CstNode that points to a AstNode\n     */\n    findDeclaration(sourceCstNode: CstNode): AstNode | undefined;\n\n    /**\n     * If the CstNode is a reference node the target CstNode will be returned.\n     * If the CstNode is a significant node of the CstNode this CstNode will be returned.\n     *\n     * @param sourceCstNode CstNode that points to a AstNode\n     */\n    findDeclarationNode(sourceCstNode: CstNode): CstNode | undefined;\n\n    /**\n     * Finds all references to the target node as references (local references) or reference descriptions.\n     *\n     * @param targetNode Specified target node whose references should be returned\n     */\n    findReferences(targetNode: AstNode, options: FindReferencesOptions): Stream;\n}\n\nexport interface FindReferencesOptions {\n    /**\n     * @deprecated Since v1.2.0. Please use `documentUri` instead.\n     */\n    onlyLocal?: boolean;\n    /**\n     * When set, the `findReferences` method will only return references/declarations from the specified document.\n     */\n    documentUri?: URI;\n    /**\n     * Whether the returned list of references should include the declaration.\n     */\n    includeDeclaration?: boolean;\n}\n\nexport class DefaultReferences implements References {\n    protected readonly nameProvider: NameProvider;\n    protected readonly index: IndexManager;\n    protected readonly nodeLocator: AstNodeLocator;\n\n    constructor(services: LangiumCoreServices) {\n        this.nameProvider = services.references.NameProvider;\n        this.index = services.shared.workspace.IndexManager;\n        this.nodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    findDeclaration(sourceCstNode: CstNode): AstNode | undefined {\n        if (sourceCstNode) {\n            const assignment = findAssignment(sourceCstNode);\n            const nodeElem = sourceCstNode.astNode;\n            if (assignment && nodeElem) {\n                const reference = (nodeElem as GenericAstNode)[assignment.feature];\n\n                if (isReference(reference)) {\n                    return reference.ref;\n                } else if (Array.isArray(reference)) {\n                    for (const ref of reference) {\n                        if (isReference(ref) && ref.$refNode\n                            && ref.$refNode.offset <= sourceCstNode.offset\n                            && ref.$refNode.end >= sourceCstNode.end) {\n                            return ref.ref;\n                        }\n                    }\n                }\n            }\n            if (nodeElem) {\n                const nameNode = this.nameProvider.getNameNode(nodeElem);\n                // Only return the targeted node in case the targeted cst node is the name node or part of it\n                if (nameNode && (nameNode === sourceCstNode || isChildNode(sourceCstNode, nameNode))) {\n                    return nodeElem;\n                }\n            }\n        }\n        return undefined;\n    }\n\n    findDeclarationNode(sourceCstNode: CstNode): CstNode | undefined {\n        const astNode = this.findDeclaration(sourceCstNode);\n        if (astNode?.$cstNode) {\n            const targetNode = this.nameProvider.getNameNode(astNode);\n            return targetNode ?? astNode.$cstNode;\n        }\n        return undefined;\n    }\n\n    findReferences(targetNode: AstNode, options: FindReferencesOptions): Stream {\n        const refs: ReferenceDescription[] = [];\n        if (options.includeDeclaration) {\n            const ref = this.getReferenceToSelf(targetNode);\n            if (ref) {\n                refs.push(ref);\n            }\n        }\n        let indexReferences = this.index.findAllReferences(targetNode, this.nodeLocator.getAstNodePath(targetNode));\n        if (options.documentUri) {\n            indexReferences = indexReferences.filter(ref => UriUtils.equals(ref.sourceUri, options.documentUri));\n        }\n        refs.push(...indexReferences);\n        return stream(refs);\n    }\n\n    protected getReferenceToSelf(targetNode: AstNode): ReferenceDescription | undefined {\n        const nameNode = this.nameProvider.getNameNode(targetNode);\n        if (nameNode) {\n            const doc = getDocument(targetNode);\n            const path = this.nodeLocator.getAstNodePath(targetNode);\n            return {\n                sourceUri: doc.uri,\n                sourcePath: path,\n                targetUri: doc.uri,\n                targetPath: path,\n                segment: toDocumentSegment(nameNode),\n                local: true\n            };\n        }\n        return undefined;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Stream } from './stream.js';\nimport { Reduction, stream } from './stream.js';\n\n/**\n * A multimap is a variation of a Map that has potentially multiple values for every key.\n */\nexport class MultiMap {\n\n    private map = new Map();\n\n    constructor()\n    constructor(elements: Array<[K, V]>)\n    constructor(elements?: Array<[K, V]>) {\n        if (elements) {\n            for (const [key, value] of elements) {\n                this.add(key, value);\n            }\n        }\n    }\n\n    /**\n     * The total number of values in the multimap.\n     */\n    get size(): number {\n        return Reduction.sum(stream(this.map.values()).map(a => a.length));\n    }\n\n    /**\n     * Clear all entries in the multimap.\n     */\n    clear(): void {\n        this.map.clear();\n    }\n\n    /**\n     * Operates differently depending on whether a `value` is given:\n     *  * With a value, this method deletes the specific key / value pair from the multimap.\n     *  * Without a value, all values associated with the given key are deleted.\n     *\n     * @returns `true` if a value existed and has been removed, or `false` if the specified\n     *     key / value does not exist.\n     */\n    delete(key: K, value?: V): boolean {\n        if (value === undefined) {\n            return this.map.delete(key);\n        } else {\n            const values = this.map.get(key);\n            if (values) {\n                const index = values.indexOf(value);\n                if (index >= 0) {\n                    if (values.length === 1) {\n                        this.map.delete(key);\n                    } else {\n                        values.splice(index, 1);\n                    }\n                    return true;\n                }\n            }\n            return false;\n        }\n    }\n\n    /**\n     * Returns an array of all values associated with the given key. If no value exists,\n     * an empty array is returned.\n     *\n     * _Note:_ The returned array is assumed not to be modified. Use the `set` method to add a\n     * value and `delete` to remove a value from the multimap.\n     */\n    get(key: K): readonly V[] {\n        return this.map.get(key) ?? [];\n    }\n\n    /**\n     * Operates differently depending on whether a `value` is given:\n     *  * With a value, this method returns `true` if the specific key / value pair is present in the multimap.\n     *  * Without a value, this method returns `true` if the given key is present in the multimap.\n     */\n    has(key: K, value?: V): boolean {\n        if (value === undefined) {\n            return this.map.has(key);\n        } else {\n            const values = this.map.get(key);\n            if (values) {\n                return values.indexOf(value) >= 0;\n            }\n            return false;\n        }\n    }\n\n    /**\n     * Add the given key / value pair to the multimap.\n     */\n    add(key: K, value: V): this {\n        if (this.map.has(key)) {\n            this.map.get(key)!.push(value);\n        } else {\n            this.map.set(key, [value]);\n        }\n        return this;\n    }\n\n    /**\n     * Add the given set of key / value pairs to the multimap.\n     */\n    addAll(key: K, values: Iterable): this {\n        if (this.map.has(key)) {\n            this.map.get(key)!.push(...values);\n        } else {\n            this.map.set(key, Array.from(values));\n        }\n        return this;\n    }\n\n    /**\n     * Invokes the given callback function for every key / value pair in the multimap.\n     */\n    forEach(callbackfn: (value: V, key: K, map: this) => void): void {\n        this.map.forEach((array, key) =>\n            array.forEach(value => callbackfn(value, key, this))\n        );\n    }\n\n    /**\n     * Returns an iterator of key, value pairs for every entry in the map.\n     */\n    [Symbol.iterator](): Iterator<[K, V]> {\n        return this.entries().iterator();\n    }\n\n    /**\n     * Returns a stream of key, value pairs for every entry in the map.\n     */\n    entries(): Stream<[K, V]> {\n        return stream(this.map.entries())\n            .flatMap(([key, array]) => array.map(value => [key, value] as [K, V]));\n    }\n\n    /**\n     * Returns a stream of keys in the map.\n     */\n    keys(): Stream {\n        return stream(this.map.keys());\n    }\n\n    /**\n     * Returns a stream of values in the map.\n     */\n    values(): Stream {\n        return stream(this.map.values()).flat();\n    }\n\n    /**\n     * Returns a stream of key, value set pairs for every key in the map.\n     */\n    entriesGroupedByKey(): Stream<[K, V[]]> {\n        return stream(this.map.entries());\n    }\n\n}\n\nexport class BiMap {\n\n    private map = new Map();\n    private inverse = new Map();\n\n    get size(): number {\n        return this.map.size;\n    }\n\n    constructor()\n    constructor(elements: Array<[K, V]>)\n    constructor(elements?: Array<[K, V]>) {\n        if (elements) {\n            for (const [key, value] of elements) {\n                this.set(key, value);\n            }\n        }\n    }\n\n    clear(): void {\n        this.map.clear();\n        this.inverse.clear();\n    }\n\n    set(key: K, value: V): this {\n        this.map.set(key, value);\n        this.inverse.set(value, key);\n        return this;\n    }\n\n    get(key: K): V | undefined {\n        return this.map.get(key);\n    }\n\n    getKey(value: V): K | undefined {\n        return this.inverse.get(value);\n    }\n\n    delete(key: K): boolean {\n        const value = this.map.get(key);\n        if (value !== undefined) {\n            this.map.delete(key);\n            this.inverse.delete(value);\n            return true;\n        }\n        return false;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription } from '../syntax-tree.js';\nimport type { AstNodeDescriptionProvider } from '../workspace/ast-descriptions.js';\nimport type { LangiumDocument, PrecomputedScopes } from '../workspace/documents.js';\nimport type { NameProvider } from './name-provider.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { streamAllContents, streamContents } from '../utils/ast-utils.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\n\n/**\n * Language-specific service for precomputing global and local scopes. The service methods are executed\n * as the first and second phase in the `DocumentBuilder`.\n */\nexport interface ScopeComputation {\n\n    /**\n     * Creates descriptions of all AST nodes that shall be exported into the _global_ scope from the given\n     * document. These descriptions are gathered by the `IndexManager` and stored in the global index so\n     * they can be referenced from other documents.\n     *\n     * _Note:_ You should not resolve any cross-references in this service method. Cross-reference resolution\n     * depends on the scope computation phase to be completed (`computeScope` method), which runs after the\n     * initial indexing where this method is used.\n     *\n     * @param document The document from which to gather exported AST nodes.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    computeExports(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Precomputes the _local_ scopes for a document, which are necessary for the default way of\n     * resolving references to symbols in the same document. The result is a multimap assigning a\n     * set of AST node descriptions to every level of the AST. These data are used by the `ScopeProvider`\n     * service to determine which target nodes are visible in the context of a specific cross-reference.\n     *\n     * _Note:_ You should not resolve any cross-references in this service method. Cross-reference\n     * resolution depends on the scope computation phase to be completed.\n     *\n     * @param document The document in which to compute scopes.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    computeLocalScopes(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n}\n\n/**\n * The default scope computation creates and collectes descriptions of the AST nodes to be exported into the\n * _global_ scope from the given document. By default those are the document's root AST node and its directly\n * contained child nodes.\n *\n * Besides, it gathers all AST nodes that have a name (according to the `NameProvider` service) and includes them\n * in the local scope of their particular container nodes. As a result, for every cross-reference in the AST,\n * target elements from the same level (siblings) and further up towards the root (parents and siblings of parents)\n * are visible. Elements being nested inside lower levels (children, children of siblings and parents' siblings)\n * are _invisible_ by default, but that can be changed by customizing this service.\n */\nexport class DefaultScopeComputation implements ScopeComputation {\n\n    protected readonly nameProvider: NameProvider;\n    protected readonly descriptions: AstNodeDescriptionProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.nameProvider = services.references.NameProvider;\n        this.descriptions = services.workspace.AstNodeDescriptionProvider;\n    }\n\n    async computeExports(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        return this.computeExportsForNode(document.parseResult.value, document, undefined, cancelToken);\n    }\n\n    /**\n     * Creates {@link AstNodeDescription AstNodeDescriptions} for the given {@link AstNode parentNode} and its children.\n     * The list of children to be considered is determined by the function parameter {@link children}.\n     * By default only the direct children of {@link parentNode} are visited, nested nodes are not exported.\n     *\n     * @param parentNode AST node to be exported, i.e., of which an {@link AstNodeDescription} shall be added to the returned list.\n     * @param document The document containing the AST node to be exported.\n     * @param children A function called with {@link parentNode} as single argument and returning an {@link Iterable} supplying the children to be visited, which must be directly or transitively contained in {@link parentNode}.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution.\n     * @returns A list of {@link AstNodeDescription AstNodeDescriptions} to be published to index.\n     */\n    async computeExportsForNode(parentNode: AstNode, document: LangiumDocument, children: (root: AstNode) => Iterable = streamContents, cancelToken: CancellationToken = CancellationToken.None): Promise {\n        const exports: AstNodeDescription[] = [];\n\n        this.exportNode(parentNode, exports, document);\n        for (const node of children(parentNode)) {\n            await interruptAndCheck(cancelToken);\n            this.exportNode(node, exports, document);\n        }\n        return exports;\n    }\n\n    /**\n     * Add a single node to the list of exports if it has a name. Override this method to change how\n     * symbols are exported, e.g. by modifying their exported name.\n     */\n    protected exportNode(node: AstNode, exports: AstNodeDescription[], document: LangiumDocument): void {\n        const name = this.nameProvider.getName(node);\n        if (name) {\n            exports.push(this.descriptions.createDescription(node, name, document));\n        }\n    }\n\n    async computeLocalScopes(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const rootNode = document.parseResult.value;\n        const scopes = new MultiMap();\n        // Here we navigate the full AST - local scopes shall be available in the whole document\n        for (const node of streamAllContents(rootNode)) {\n            await interruptAndCheck(cancelToken);\n            this.processNode(node, document, scopes);\n        }\n        return scopes;\n    }\n\n    /**\n     * Process a single node during scopes computation. The default implementation makes the node visible\n     * in the subtree of its container (if the node has a name). Override this method to change this,\n     * e.g. by increasing the visibility to a higher level in the AST.\n     */\n    protected processNode(node: AstNode, document: LangiumDocument, scopes: PrecomputedScopes): void {\n        const container = node.$container;\n        if (container) {\n            const name = this.nameProvider.getName(node);\n            if (name) {\n                scopes.add(container, this.descriptions.createDescription(node, name, document));\n            }\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNodeDescription } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport { EMPTY_STREAM, stream } from '../utils/stream.js';\n\n/**\n * A scope describes what target elements are visible from a specific cross-reference context.\n */\nexport interface Scope {\n\n    /**\n     * Find a target element matching the given name. If no element is found, `undefined` is returned.\n     * If multiple matching elements are present, the selection of the returned element should be done\n     * according to the semantics of your language. Usually it is the element that is most closely defined.\n     *\n     * @param name Name of the cross-reference target as it appears in the source text.\n     */\n    getElement(name: string): AstNodeDescription | undefined;\n\n    /**\n     * Create a stream of all elements in the scope. This is used to compute completion proposals to be\n     * shown in the editor.\n     */\n    getAllElements(): Stream;\n\n}\n\nexport interface ScopeOptions {\n    caseInsensitive?: boolean;\n}\n\n/**\n * The default scope implementation is based on a `Stream`. It has an optional _outer scope_ describing\n * the next level of elements, which are queried when a target element is not found in the stream provided\n * to this scope.\n */\nexport class StreamScope implements Scope {\n    readonly elements: Stream;\n    readonly outerScope?: Scope;\n    readonly caseInsensitive: boolean;\n\n    constructor(elements: Stream, outerScope?: Scope, options?: ScopeOptions) {\n        this.elements = elements;\n        this.outerScope = outerScope;\n        this.caseInsensitive = options?.caseInsensitive ?? false;\n    }\n\n    getAllElements(): Stream {\n        if (this.outerScope) {\n            return this.elements.concat(this.outerScope.getAllElements());\n        } else {\n            return this.elements;\n        }\n    }\n\n    getElement(name: string): AstNodeDescription | undefined {\n        const local = this.caseInsensitive\n            ? this.elements.find(e => e.name.toLowerCase() === name.toLowerCase())\n            : this.elements.find(e => e.name === name);\n        if (local) {\n            return local;\n        }\n        if (this.outerScope) {\n            return this.outerScope.getElement(name);\n        }\n        return undefined;\n    }\n}\n\nexport class MapScope implements Scope {\n    readonly elements: Map;\n    readonly outerScope?: Scope;\n    readonly caseInsensitive: boolean;\n\n    constructor(elements: Iterable, outerScope?: Scope, options?: ScopeOptions) {\n        this.elements = new Map();\n        this.caseInsensitive = options?.caseInsensitive ?? false;\n        for (const element of elements) {\n            const name = this.caseInsensitive\n                ? element.name.toLowerCase()\n                : element.name;\n            this.elements.set(name, element);\n        }\n        this.outerScope = outerScope;\n    }\n\n    getElement(name: string): AstNodeDescription | undefined {\n        const localName = this.caseInsensitive ? name.toLowerCase() : name;\n        const local = this.elements.get(localName);\n        if (local) {\n            return local;\n        }\n        if (this.outerScope) {\n            return this.outerScope.getElement(name);\n        }\n        return undefined;\n    }\n\n    getAllElements(): Stream {\n        let elementStream = stream(this.elements.values());\n        if (this.outerScope) {\n            elementStream = elementStream.concat(this.outerScope.getAllElements());\n        }\n        return elementStream;\n    }\n\n}\n\nexport const EMPTY_SCOPE: Scope = {\n    getElement(): undefined {\n        return undefined;\n    },\n    getAllElements(): Stream {\n        return EMPTY_STREAM;\n    }\n};\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Disposable } from './disposable.js';\nimport type { URI } from './uri-utils.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\n\nexport abstract class DisposableCache implements Disposable {\n\n    protected toDispose: Disposable[] = [];\n    protected isDisposed = false;\n\n    onDispose(disposable: Disposable): void {\n        this.toDispose.push(disposable);\n    }\n\n    dispose(): void {\n        this.throwIfDisposed();\n        this.clear();\n        this.isDisposed = true;\n        this.toDispose.forEach(disposable => disposable.dispose());\n    }\n\n    protected throwIfDisposed(): void {\n        if (this.isDisposed) {\n            throw new Error('This cache has already been disposed');\n        }\n    }\n\n    abstract clear(): void;\n}\n\nexport class SimpleCache extends DisposableCache {\n    protected readonly cache = new Map();\n\n    has(key: K): boolean {\n        this.throwIfDisposed();\n        return this.cache.has(key);\n    }\n\n    set(key: K, value: V): void {\n        this.throwIfDisposed();\n        this.cache.set(key, value);\n    }\n\n    get(key: K): V | undefined;\n    get(key: K, provider: () => V): V;\n    get(key: K, provider?: () => V): V | undefined {\n        this.throwIfDisposed();\n        if (this.cache.has(key)) {\n            return this.cache.get(key);\n        } else if (provider) {\n            const value = provider();\n            this.cache.set(key, value);\n            return value;\n        } else {\n            return undefined;\n        }\n    }\n\n    delete(key: K): boolean {\n        this.throwIfDisposed();\n        return this.cache.delete(key);\n    }\n\n    clear(): void {\n        this.throwIfDisposed();\n        this.cache.clear();\n    }\n}\n\nexport class ContextCache extends DisposableCache {\n\n    private readonly cache = new Map>();\n    private readonly converter: (input: Context) => ContextKey | Context;\n\n    constructor(converter?: (input: Context) => ContextKey) {\n        super();\n        this.converter = converter ?? (value => value);\n    }\n\n    has(contextKey: Context, key: Key): boolean {\n        this.throwIfDisposed();\n        return this.cacheForContext(contextKey).has(key);\n    }\n\n    set(contextKey: Context, key: Key, value: Value): void {\n        this.throwIfDisposed();\n        this.cacheForContext(contextKey).set(key, value);\n    }\n\n    get(contextKey: Context, key: Key): Value | undefined;\n    get(contextKey: Context, key: Key, provider: () => Value): Value;\n    get(contextKey: Context, key: Key, provider?: () => Value): Value | undefined {\n        this.throwIfDisposed();\n        const contextCache = this.cacheForContext(contextKey);\n        if (contextCache.has(key)) {\n            return contextCache.get(key);\n        } else if (provider) {\n            const value = provider();\n            contextCache.set(key, value);\n            return value;\n        } else {\n            return undefined;\n        }\n    }\n\n    delete(contextKey: Context, key: Key): boolean {\n        this.throwIfDisposed();\n        return this.cacheForContext(contextKey).delete(key);\n    }\n\n    clear(): void;\n    clear(contextKey: Context): void;\n    clear(contextKey?: Context): void {\n        this.throwIfDisposed();\n        if (contextKey) {\n            const mapKey = this.converter(contextKey);\n            this.cache.delete(mapKey);\n        } else {\n            this.cache.clear();\n        }\n    }\n\n    protected cacheForContext(contextKey: Context): Map {\n        const mapKey = this.converter(contextKey);\n        let documentCache = this.cache.get(mapKey);\n        if (!documentCache) {\n            documentCache = new Map();\n            this.cache.set(mapKey, documentCache);\n        }\n        return documentCache;\n    }\n}\n\n/**\n * Every key/value pair in this cache is scoped to a document.\n * If this document is changed or deleted, all associated key/value pairs are deleted.\n */\nexport class DocumentCache extends ContextCache {\n    constructor(sharedServices: LangiumSharedCoreServices) {\n        super(uri => uri.toString());\n        this.onDispose(sharedServices.workspace.DocumentBuilder.onUpdate((changed, deleted) => {\n            const allUris = changed.concat(deleted);\n            for (const uri of allUris) {\n                this.clear(uri);\n            }\n        }));\n    }\n}\n\n/**\n * Every key/value pair in this cache is scoped to the whole workspace.\n * If any document in the workspace changes, the whole cache is evicted.\n */\nexport class WorkspaceCache extends SimpleCache {\n    constructor(sharedServices: LangiumSharedCoreServices) {\n        super();\n        this.onDispose(sharedServices.workspace.DocumentBuilder.onUpdate(() => {\n            this.clear();\n        }));\n    }\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection, ReferenceInfo } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { AstNodeDescriptionProvider } from '../workspace/ast-descriptions.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { NameProvider } from './name-provider.js';\nimport type { Scope, ScopeOptions} from './scope.js';\nimport { MapScope, StreamScope } from './scope.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { stream } from '../utils/stream.js';\nimport { WorkspaceCache } from '../utils/caching.js';\n\n/**\n * Language-specific service for determining the scope of target elements visible in a specific cross-reference context.\n */\nexport interface ScopeProvider {\n\n    /**\n     * Return a scope describing what elements are visible for the given AST node and cross-reference\n     * identifier.\n     *\n     * @param context Information about the reference for which a scope is requested.\n     */\n    getScope(context: ReferenceInfo): Scope;\n\n}\n\nexport class DefaultScopeProvider implements ScopeProvider {\n\n    protected readonly reflection: AstReflection;\n    protected readonly nameProvider: NameProvider;\n    protected readonly descriptions: AstNodeDescriptionProvider;\n    protected readonly indexManager: IndexManager;\n\n    protected readonly globalScopeCache: WorkspaceCache;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n        this.nameProvider = services.references.NameProvider;\n        this.descriptions = services.workspace.AstNodeDescriptionProvider;\n        this.indexManager = services.shared.workspace.IndexManager;\n        this.globalScopeCache = new WorkspaceCache(services.shared);\n    }\n\n    getScope(context: ReferenceInfo): Scope {\n        const scopes: Array> = [];\n        const referenceType = this.reflection.getReferenceType(context);\n\n        const precomputed = getDocument(context.container).precomputedScopes;\n        if (precomputed) {\n            let currentNode: AstNode | undefined = context.container;\n            do {\n                const allDescriptions = precomputed.get(currentNode);\n                if (allDescriptions.length > 0) {\n                    scopes.push(stream(allDescriptions).filter(\n                        desc => this.reflection.isSubtype(desc.type, referenceType)));\n                }\n                currentNode = currentNode.$container;\n            } while (currentNode);\n        }\n\n        let result: Scope = this.getGlobalScope(referenceType, context);\n        for (let i = scopes.length - 1; i >= 0; i--) {\n            result = this.createScope(scopes[i], result);\n        }\n        return result;\n    }\n\n    /**\n     * Create a scope for the given collection of AST node descriptions.\n     */\n    protected createScope(elements: Iterable, outerScope?: Scope, options?: ScopeOptions): Scope {\n        return new StreamScope(stream(elements), outerScope, options);\n    }\n\n    /**\n     * Create a scope for the given collection of AST nodes, which need to be transformed into respective\n     * descriptions first. This is done using the `NameProvider` and `AstNodeDescriptionProvider` services.\n     */\n    protected createScopeForNodes(elements: Iterable, outerScope?: Scope, options?: ScopeOptions): Scope {\n        const s = stream(elements).map(e => {\n            const name = this.nameProvider.getName(e);\n            if (name) {\n                return this.descriptions.createDescription(e, name);\n            }\n            return undefined;\n        }).nonNullable();\n        return new StreamScope(s, outerScope, options);\n    }\n\n    /**\n     * Create a global scope filtered for the given reference type.\n     */\n    protected getGlobalScope(referenceType: string, _context: ReferenceInfo): Scope {\n        return this.globalScopeCache.get(referenceType, () => new MapScope(this.indexManager.allElements(referenceType)));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { URI } from 'vscode-uri';\nimport type { CommentProvider } from '../documentation/comment-provider.js';\nimport type { NameProvider } from '../references/name-provider.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode, GenericAstNode, Mutable, Reference } from '../syntax-tree.js';\nimport { isAstNode, isReference } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { findNodesForProperty } from '../utils/grammar-utils.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { DocumentSegment, LangiumDocument, LangiumDocuments } from '../workspace/documents.js';\n\nexport interface JsonSerializeOptions {\n    /** The space parameter for `JSON.stringify`, controlling whether and how to pretty-print the output. */\n    space?: string | number;\n    /** Whether to include the `$refText` property for references (the name used to identify the target node). */\n    refText?: boolean;\n    /** Whether to include the `$sourceText` property, which holds the full source text from which an AST node was parsed. */\n    sourceText?: boolean;\n    /** Whether to include the `$textRegion` property, which holds information to trace AST node properties to their respective source text regions. */\n    textRegions?: boolean;\n    /** Whether to include the `$comment` property, which holds comments according to the CommentProvider service. */\n    comments?: boolean;\n    /** The replacer parameter for `JSON.stringify`; the default replacer given as parameter should be used to apply basic replacements. */\n    replacer?: (key: string, value: unknown, defaultReplacer: (key: string, value: unknown) => unknown) => unknown\n    /** Used to convert and serialize URIs when the target of a cross-reference is in a different document. */\n    uriConverter?: (uri: URI, reference: Reference) => string\n}\n\nexport interface JsonDeserializeOptions {\n    /** Used to parse and convert URIs when the target of a cross-reference is in a different document. */\n    uriConverter?: (uri: string) => URI\n}\n\n/**\n * {@link AstNode}s that may carry information on their definition area within the DSL text.\n */\nexport interface AstNodeWithTextRegion extends AstNode {\n    $sourceText?: string;\n    $textRegion?: AstNodeRegionWithAssignments;\n}\n\n/**\n * {@link AstNode}s that may carry a semantically relevant comment.\n */\nexport interface AstNodeWithComment extends AstNode {\n    $comment?: string;\n}\n\nexport function isAstNodeWithComment(node: AstNode): node is AstNodeWithComment {\n    return typeof (node as AstNodeWithComment).$comment === 'string';\n}\n\n/**\n * A {@DocumentSegment} representing the definition area of an AstNode within the DSL text.\n * Usually contains text region information on all assigned property values of the AstNode,\n * and may contain the defining file's URI as string.\n */\nexport interface AstNodeRegionWithAssignments extends DocumentSegment {\n    /**\n     * A record containing an entry for each assigned property of the AstNode.\n     * The key is equal to the property name and the value is an array of the property values'\n     * text regions, regardless of whether the property is a single value or list property.\n     */\n    assignments?: Record;\n    /**\n     * The AstNode defining file's URI as string\n     */\n    documentURI?: string;\n}\n\n/**\n * Utility service for transforming an `AstNode` into a JSON string and vice versa.\n */\nexport interface JsonSerializer {\n    /**\n     * Serialize an `AstNode` into a JSON `string`.\n     * @param node The `AstNode` to be serialized.\n     * @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read.\n     */\n    serialize(node: AstNode, options?: JsonSerializeOptions): string;\n    /**\n     * Deserialize (parse) a JSON `string` into an `AstNode`.\n     */\n    deserialize(content: string, options?: JsonDeserializeOptions): T;\n}\n\n/**\n * A cross-reference in the serialized JSON representation of an AstNode.\n */\ninterface IntermediateReference {\n    /** URI pointing to the target element. This is either `#${path}` if the target is in the same document, or `${documentURI}#${path}` otherwise. */\n    $ref?: string\n    /** The actual text used to look up the reference target in the surrounding scope. */\n    $refText?: string\n    /** If any problem occurred while resolving the reference, it is described by this property. */\n    $error?: string\n}\n\nfunction isIntermediateReference(obj: unknown): obj is IntermediateReference {\n    return typeof obj === 'object' && !!obj && ('$ref' in obj || '$error' in obj);\n}\n\nexport class DefaultJsonSerializer implements JsonSerializer {\n\n    /** The set of AstNode properties to be ignored by the serializer. */\n    ignoreProperties = new Set(['$container', '$containerProperty', '$containerIndex', '$document', '$cstNode']);\n\n    /** The document that is currently processed by the serializer; this is used by the replacer function.  */\n    protected currentDocument: LangiumDocument | undefined;\n\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly nameProvider: NameProvider;\n    protected readonly commentProvider: CommentProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.langiumDocuments = services.shared.workspace.LangiumDocuments;\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n        this.nameProvider = services.references.NameProvider;\n        this.commentProvider = services.documentation.CommentProvider;\n    }\n\n    serialize(node: AstNode, options: JsonSerializeOptions = {}): string {\n        const specificReplacer = options?.replacer;\n        const defaultReplacer = (key: string, value: unknown) => this.replacer(key, value, options);\n        const replacer = specificReplacer ? (key: string, value: unknown) => specificReplacer(key, value, defaultReplacer) : defaultReplacer;\n\n        try {\n            this.currentDocument = getDocument(node);\n            return JSON.stringify(node, replacer, options?.space);\n        } finally {\n            this.currentDocument = undefined;\n        }\n    }\n\n    deserialize(content: string, options: JsonDeserializeOptions = {}): T {\n        const root = JSON.parse(content);\n        this.linkNode(root, root, options);\n        return root;\n    }\n\n    protected replacer(key: string, value: unknown, { refText, sourceText, textRegions, comments, uriConverter }: JsonSerializeOptions): unknown {\n        if (this.ignoreProperties.has(key)) {\n            return undefined;\n        } else if (isReference(value)) {\n            const refValue = value.ref;\n            const $refText = refText ? value.$refText : undefined;\n            if (refValue) {\n                const targetDocument = getDocument(refValue);\n                let targetUri = '';\n                if (this.currentDocument && this.currentDocument !== targetDocument) {\n                    if (uriConverter) {\n                        targetUri = uriConverter(targetDocument.uri, value);\n                    } else {\n                        targetUri = targetDocument.uri.toString();\n                    }\n                }\n                const targetPath = this.astNodeLocator.getAstNodePath(refValue);\n                return {\n                    $ref: `${targetUri}#${targetPath}`,\n                    $refText\n                } satisfies IntermediateReference;\n            } else {\n                return {\n                    $error: value.error?.message ?? 'Could not resolve reference',\n                    $refText\n                } satisfies IntermediateReference;\n            }\n        } else if (isAstNode(value)) {\n            let astNode: AstNodeWithTextRegion | undefined = undefined;\n            if (textRegions) {\n                astNode = this.addAstNodeRegionWithAssignmentsTo({ ...value });\n                if ((!key || value.$document) && astNode?.$textRegion) {\n                    // The document URI is added to the root node of the resulting JSON tree\n                    astNode.$textRegion.documentURI = this.currentDocument?.uri.toString();\n                }\n            }\n            if (sourceText && !key) {\n                astNode ??= { ...value };\n                astNode.$sourceText = value.$cstNode?.text;\n            }\n            if (comments) {\n                astNode ??= { ...value };\n                const comment = this.commentProvider.getComment(value);\n                if (comment) {\n                    (astNode as AstNodeWithComment).$comment = comment.replace(/\\r/g, '');\n                }\n            }\n            return astNode ?? value;\n        } else {\n            return value;\n        }\n    }\n\n    protected addAstNodeRegionWithAssignmentsTo(node: AstNodeWithTextRegion) {\n        const createDocumentSegment: (cstNode: CstNode) => AstNodeRegionWithAssignments = cstNode => {\n            offset: cstNode.offset,\n            end: cstNode.end,\n            length: cstNode.length,\n            range: cstNode.range,\n        };\n\n        if (node.$cstNode) {\n            const textRegion = node.$textRegion = createDocumentSegment(node.$cstNode);\n            const assignments: Record = textRegion.assignments = {};\n\n            Object.keys(node).filter(key => !key.startsWith('$')).forEach(key => {\n                const propertyAssignments = findNodesForProperty(node.$cstNode, key).map(createDocumentSegment);\n                if (propertyAssignments.length !== 0) {\n                    assignments[key] = propertyAssignments;\n                }\n            });\n\n            return node;\n        }\n        return undefined;\n    }\n\n    protected linkNode(node: GenericAstNode, root: AstNode, options: JsonDeserializeOptions, container?: AstNode, containerProperty?: string, containerIndex?: number) {\n        for (const [propertyName, item] of Object.entries(node)) {\n            if (Array.isArray(item)) {\n                for (let index = 0; index < item.length; index++) {\n                    const element = item[index];\n                    if (isIntermediateReference(element)) {\n                        item[index] = this.reviveReference(node, propertyName, root, element, options);\n                    } else if (isAstNode(element)) {\n                        this.linkNode(element as GenericAstNode, root, options, node, propertyName, index);\n                    }\n                }\n            } else if (isIntermediateReference(item)) {\n                node[propertyName] = this.reviveReference(node, propertyName, root, item, options);\n            } else if (isAstNode(item)) {\n                this.linkNode(item as GenericAstNode, root, options, node, propertyName);\n            }\n        }\n        const mutable = node as Mutable;\n        mutable.$container = container;\n        mutable.$containerProperty = containerProperty;\n        mutable.$containerIndex = containerIndex;\n    }\n\n    protected reviveReference(container: AstNode, property: string, root: AstNode, reference: IntermediateReference, options: JsonDeserializeOptions): Reference | undefined {\n        let refText = reference.$refText;\n        let error = reference.$error;\n        if (reference.$ref) {\n            const ref = this.getRefNode(root, reference.$ref, options.uriConverter);\n            if (isAstNode(ref)) {\n                if (!refText) {\n                    refText = this.nameProvider.getName(ref);\n                }\n                return {\n                    $refText: refText ?? '',\n                    ref\n                };\n            } else {\n                error = ref;\n            }\n        }\n        if (error) {\n            const ref: Mutable = {\n                $refText: refText ?? ''\n            };\n            ref.error = {\n                container,\n                property,\n                message: error,\n                reference: ref\n            };\n            return ref;\n        } else {\n            return undefined;\n        }\n    }\n\n    protected getRefNode(root: AstNode, uri: string, uriConverter?: (uri: string) => URI): AstNode | string {\n        try {\n            const fragmentIndex = uri.indexOf('#');\n            if (fragmentIndex === 0) {\n                const node = this.astNodeLocator.getAstNode(root, uri.substring(1));\n                if (!node) {\n                    return 'Could not resolve path: ' + uri;\n                }\n                return node;\n            }\n            if (fragmentIndex < 0) {\n                const documentUri = uriConverter ? uriConverter(uri) : URI.parse(uri);\n                const document = this.langiumDocuments.getDocument(documentUri);\n                if (!document) {\n                    return 'Could not find document for URI: ' + uri;\n                }\n                return document.parseResult.value;\n            }\n            const documentUri = uriConverter ? uriConverter(uri.substring(0, fragmentIndex)) : URI.parse(uri.substring(0, fragmentIndex));\n            const document = this.langiumDocuments.getDocument(documentUri);\n            if (!document) {\n                return 'Could not find document for URI: ' + uri;\n            }\n            if (fragmentIndex === uri.length - 1) {\n                return document.parseResult.value;\n            }\n            const node = this.astNodeLocator.getAstNode(document.parseResult.value, uri.substring(fragmentIndex + 1));\n            if (!node) {\n                return 'Could not resolve URI: ' + uri;\n            }\n            return node;\n        } catch (err) {\n            return String(err);\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from './services.js';\nimport { UriUtils, type URI } from './utils/uri-utils.js';\n\n/**\n * The service registry provides access to the language-specific {@link LangiumCoreServices} optionally including LSP-related services.\n * These are resolved via the URI of a text document.\n */\nexport interface ServiceRegistry {\n\n    /**\n     * Register a language via its injected services.\n     */\n    register(language: LangiumCoreServices): void;\n\n    /**\n     * Retrieve the language-specific services for the given URI. In case only one language is\n     * registered, it may be used regardless of the URI format.\n     */\n    getServices(uri: URI): LangiumCoreServices;\n\n    /**\n     * The full set of registered language services.\n     */\n    readonly all: readonly LangiumCoreServices[];\n}\n\n/**\n * Generic registry for Langium services, but capable of being used with extending service sets as well (such as the lsp-complete LangiumCoreServices set)\n */\nexport class DefaultServiceRegistry implements ServiceRegistry {\n\n    protected singleton?: LangiumCoreServices;\n    protected map?: Record;\n\n    register(language: LangiumCoreServices): void {\n        if (!this.singleton && !this.map) {\n            // This is the first language to be registered; store it as singleton.\n            this.singleton = language;\n            return;\n        }\n        if (!this.map) {\n            this.map = {};\n            if (this.singleton) {\n                // Move the previous singleton instance to the new map.\n                for (const ext of this.singleton.LanguageMetaData.fileExtensions) {\n                    this.map[ext] = this.singleton;\n                }\n                this.singleton = undefined;\n            }\n        }\n        // Store the language services in the map.\n        for (const ext of language.LanguageMetaData.fileExtensions) {\n            if (this.map[ext] !== undefined && this.map[ext] !== language) {\n                console.warn(`The file extension ${ext} is used by multiple languages. It is now assigned to '${language.LanguageMetaData.languageId}'.`);\n            }\n            this.map[ext] = language;\n        }\n    }\n\n    getServices(uri: URI): LangiumCoreServices {\n        if (this.singleton !== undefined) {\n            return this.singleton;\n        }\n        if (this.map === undefined) {\n            throw new Error('The service registry is empty. Use `register` to register the services of a language.');\n        }\n        const ext = UriUtils.extname(uri);\n        const services = this.map[ext];\n        if (!services) {\n            throw new Error(`The service registry contains no services for the extension '${ext}'.`);\n        }\n        return services;\n    }\n\n    get all(): readonly LangiumCoreServices[] {\n        if (this.singleton !== undefined) {\n            return [this.singleton];\n        }\n        if (this.map !== undefined) {\n            return Object.values(this.map);\n        }\n        return [];\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CodeDescription, DiagnosticRelatedInformation, DiagnosticTag, integer, Range } from 'vscode-languageserver-types';\nimport type { CancellationToken } from '../utils/cancellation.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstReflection, Properties } from '../syntax-tree.js';\nimport type { MaybePromise } from '../utils/promise-utils.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { DocumentSegment } from '../workspace/documents.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { isOperationCancelled } from '../utils/promise-utils.js';\nimport { stream } from '../utils/stream.js';\n\nexport type DiagnosticInfo> = {\n    /** The AST node to which the diagnostic is attached. */\n    node: N;\n    /** If a property name is given, the diagnostic is restricted to the corresponding text region. */\n    property?: P;\n    /** If the value of a keyword is given, the diagnostic will appear at its corresponding text region */\n    keyword?: string;\n    /** In case of a multi-value property (array), an index can be given to select a specific element. */\n    index?: number;\n    /** If you want to create a diagnostic independent to any property, use the range property. */\n    range?: Range;\n    /** The diagnostic's code, which usually appear in the user interface. */\n    code?: integer | string;\n    /** An optional property to describe the error code. */\n    codeDescription?: CodeDescription;\n    /** Additional metadata about the diagnostic. */\n    tags?: DiagnosticTag[];\n    /** An array of related diagnostic information, e.g. when symbol-names within a scope collide all definitions can be marked via this property. */\n    relatedInformation?: DiagnosticRelatedInformation[];\n    /** A data entry field that is preserved between a `textDocument/publishDiagnostics` notification and `textDocument/codeAction` request. */\n    data?: unknown;\n}\n\n/**\n * Shape of information commonly used in the `data` field of diagnostics.\n */\nexport interface DiagnosticData {\n    /** Diagnostic code for identifying which code action to apply. This code is _not_ shown in the user interface. */\n    code: string\n    /** Specifies where to apply the code action in the form of a `DocumentSegment`. */\n    actionSegment?: DocumentSegment\n    /** Specifies where to apply the code action in the form of a `Range`. */\n    actionRange?: Range\n}\n\n/**\n * Create DiagnosticData for a given diagnostic code. The result can be put into the `data` field of a DiagnosticInfo.\n */\nexport function diagnosticData(code: string): DiagnosticData {\n    return { code };\n}\n\nexport type ValidationAcceptor = (severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo) => void\n\nexport type ValidationCheck = (node: T, accept: ValidationAcceptor, cancelToken: CancellationToken) => MaybePromise;\n\n/**\n * A utility type for associating non-primitive AST types to corresponding validation checks. For example:\n *\n * ```ts\n *   const checks: ValidationChecks = {\n *       State: validator.checkStateNameStartsWithCapital\n *    };\n * ```\n *\n * If an AST type does not extend AstNode, e.g. if it describes a union of string literals, that type's name must not occur as a key in objects of type `ValidationCheck<...>`.\n *\n * @param T a type definition mapping language specific type names (keys) to the corresponding types (values)\n */\nexport type ValidationChecks = {\n    [K in keyof T]?: T[K] extends AstNode ? ValidationCheck | Array> : never\n} & {\n    AstNode?: ValidationCheck | Array>;\n}\n\n/**\n * `fast` checks can be executed after every document change (i.e. as the user is typing). If a check\n * is too slow it can delay the response to document changes, yielding bad user experience. By marking\n * it as `slow`, it will be skipped for normal as-you-type validation. Then it's up to you when to\n * schedule these long-running checks: after the fast checks are done, or after saving a document,\n * or with an explicit command, etc.\n *\n * `built-in` checks are errors produced by the lexer, the parser, or the linker. They cannot be used\n * for custom validation checks.\n */\nexport type ValidationCategory = 'fast' | 'slow' | 'built-in'\n\nexport namespace ValidationCategory {\n    export const all: readonly ValidationCategory[] = ['fast', 'slow', 'built-in'];\n}\n\ntype ValidationCheckEntry = {\n    check: ValidationCheck\n    category: ValidationCategory\n}\n\n/**\n * Manages a set of `ValidationCheck`s to be applied when documents are validated.\n */\nexport class ValidationRegistry {\n    private readonly entries = new MultiMap();\n    private readonly reflection: AstReflection;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n    }\n\n    /**\n     * Register a set of validation checks. Each value in the record can be either a single validation check (i.e. a function)\n     * or an array of validation checks.\n     *\n     * @param checksRecord Set of validation checks to register.\n     * @param category Optional category for the validation checks (defaults to `'fast'`).\n     * @param thisObj Optional object to be used as `this` when calling the validation check functions.\n     */\n    register(checksRecord: ValidationChecks, thisObj: ThisParameterType = this, category: ValidationCategory = 'fast'): void {\n        if (category === 'built-in') {\n            throw new Error(\"The 'built-in' category is reserved for lexer, parser, and linker errors.\");\n        }\n        for (const [type, ch] of Object.entries(checksRecord)) {\n            const callbacks = ch as ValidationCheck | ValidationCheck[];\n            if (Array.isArray(callbacks)) {\n                for (const check of callbacks) {\n                    const entry: ValidationCheckEntry = {\n                        check: this.wrapValidationException(check, thisObj),\n                        category\n                    };\n                    this.addEntry(type, entry);\n                }\n            } else if (typeof callbacks === 'function') {\n                const entry: ValidationCheckEntry = {\n                    check: this.wrapValidationException(callbacks, thisObj),\n                    category\n                };\n                this.addEntry(type, entry);\n            }\n        }\n    }\n\n    protected wrapValidationException(check: ValidationCheck, thisObj: unknown): ValidationCheck {\n        return async (node, accept, cancelToken) => {\n            try {\n                await check.call(thisObj, node, accept, cancelToken);\n            } catch (err) {\n                if (isOperationCancelled(err)) {\n                    throw err;\n                }\n                console.error('An error occurred during validation:', err);\n                const message = err instanceof Error ? err.message : String(err);\n                if (err instanceof Error && err.stack) {\n                    console.error(err.stack);\n                }\n                accept('error', 'An error occurred during validation: ' + message, { node });\n            }\n        };\n    }\n\n    protected addEntry(type: string, entry: ValidationCheckEntry): void {\n        if (type === 'AstNode') {\n            this.entries.add('AstNode', entry);\n            return;\n        }\n        for (const subtype of this.reflection.getAllSubTypes(type)) {\n            this.entries.add(subtype, entry);\n        }\n    }\n\n    getChecks(type: string, categories?: ValidationCategory[]): Stream {\n        let checks = stream(this.entries.get(type))\n            .concat(this.entries.get('AstNode'));\n        if (categories) {\n            checks = checks.filter(entry => categories.includes(entry.category));\n        }\n        return checks.map(entry => entry.check);\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { MismatchedTokenException } from 'chevrotain';\nimport type { DiagnosticSeverity, Position, Range, Diagnostic } from 'vscode-languageserver-types';\nimport type { LanguageMetaData } from '../languages/language-meta-data.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport type { LangiumDocument } from '../workspace/documents.js';\nimport type { DiagnosticData, DiagnosticInfo, ValidationAcceptor, ValidationCategory, ValidationRegistry } from './validation-registry.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { findNodeForKeyword, findNodeForProperty } from '../utils/grammar-utils.js';\nimport { streamAst } from '../utils/ast-utils.js';\nimport { tokenToRange } from '../utils/cst-utils.js';\nimport { interruptAndCheck, isOperationCancelled } from '../utils/promise-utils.js';\nimport { diagnosticData } from './validation-registry.js';\n\nexport interface ValidationOptions {\n    /**\n     * If this is set, only the checks associated with these categories are executed; otherwise\n     * all checks are executed. The default category if not specified to the registry is `'fast'`.\n     */\n    categories?: ValidationCategory[];\n    /** If true, no further diagnostics are reported if there are lexing errors. */\n    stopAfterLexingErrors?: boolean\n    /** If true, no further diagnostics are reported if there are parsing errors. */\n    stopAfterParsingErrors?: boolean\n    /** If true, no further diagnostics are reported if there are linking errors. */\n    stopAfterLinkingErrors?: boolean\n}\n\n/**\n * Language-specific service for validating `LangiumDocument`s.\n */\nexport interface DocumentValidator {\n    /**\n     * Validates the whole specified document.\n     *\n     * @param document specified document to validate\n     * @param options options to control the validation process\n     * @param cancelToken allows to cancel the current operation\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    validateDocument(document: LangiumDocument, options?: ValidationOptions, cancelToken?: CancellationToken): Promise;\n}\n\nexport class DefaultDocumentValidator implements DocumentValidator {\n\n    protected readonly validationRegistry: ValidationRegistry;\n    protected readonly metadata: LanguageMetaData;\n\n    constructor(services: LangiumCoreServices) {\n        this.validationRegistry = services.validation.ValidationRegistry;\n        this.metadata = services.LanguageMetaData;\n    }\n\n    async validateDocument(document: LangiumDocument, options: ValidationOptions = {}, cancelToken = CancellationToken.None): Promise {\n        const parseResult = document.parseResult;\n        const diagnostics: Diagnostic[] = [];\n\n        await interruptAndCheck(cancelToken);\n\n        if (!options.categories || options.categories.includes('built-in')) {\n            this.processLexingErrors(parseResult, diagnostics, options);\n            if (options.stopAfterLexingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.LexingError)) {\n                return diagnostics;\n            }\n\n            this.processParsingErrors(parseResult, diagnostics, options);\n            if (options.stopAfterParsingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.ParsingError)) {\n                return diagnostics;\n            }\n\n            this.processLinkingErrors(document, diagnostics, options);\n            if (options.stopAfterLinkingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.LinkingError)) {\n                return diagnostics;\n            }\n        }\n\n        // Process custom validations\n        try {\n            diagnostics.push(...await this.validateAst(parseResult.value, options, cancelToken));\n        } catch (err) {\n            if (isOperationCancelled(err)) {\n                throw err;\n            }\n            console.error('An error occurred during validation:', err);\n        }\n\n        await interruptAndCheck(cancelToken);\n\n        return diagnostics;\n    }\n\n    protected processLexingErrors(parseResult: ParseResult, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const lexerError of parseResult.lexerErrors) {\n            const diagnostic: Diagnostic = {\n                severity: toDiagnosticSeverity('error'),\n                range: {\n                    start: {\n                        line: lexerError.line! - 1,\n                        character: lexerError.column! - 1\n                    },\n                    end: {\n                        line: lexerError.line! - 1,\n                        character: lexerError.column! + lexerError.length - 1\n                    }\n                },\n                message: lexerError.message,\n                data: diagnosticData(DocumentValidator.LexingError),\n                source: this.getSource()\n            };\n            diagnostics.push(diagnostic);\n        }\n    }\n\n    protected processParsingErrors(parseResult: ParseResult, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const parserError of parseResult.parserErrors) {\n            let range: Range | undefined = undefined;\n            // We can run into the chevrotain error recovery here\n            // The token contained in the parser error might be automatically inserted\n            // In this case every position value will be `NaN`\n            if (isNaN(parserError.token.startOffset)) {\n                // Some special parser error types contain a `previousToken`\n                // We can simply append our diagnostic to that token\n                if ('previousToken' in parserError) {\n                    const token = (parserError as MismatchedTokenException).previousToken;\n                    if (!isNaN(token.startOffset)) {\n                        const position: Position = { line: token.endLine! - 1, character: token.endColumn! };\n                        range = { start: position, end: position};\n                    } else {\n                        // No valid prev token. Might be empty document or containing only hidden tokens.\n                        // Point to document start\n                        const position: Position = { line: 0, character: 0 };\n                        range = { start: position, end: position};\n                    }\n                }\n            } else {\n                range = tokenToRange(parserError.token);\n            }\n            if (range) {\n                const diagnostic: Diagnostic = {\n                    severity: toDiagnosticSeverity('error'),\n                    range,\n                    message: parserError.message,\n                    data: diagnosticData(DocumentValidator.ParsingError),\n                    source: this.getSource()\n                };\n                diagnostics.push(diagnostic);\n            }\n        }\n    }\n\n    protected processLinkingErrors(document: LangiumDocument, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const reference of document.references) {\n            const linkingError = reference.error;\n            if (linkingError) {\n                const info: DiagnosticInfo = {\n                    node: linkingError.container,\n                    property: linkingError.property,\n                    index: linkingError.index,\n                    data: {\n                        code: DocumentValidator.LinkingError,\n                        containerType: linkingError.container.$type,\n                        property: linkingError.property,\n                        refText: linkingError.reference.$refText\n                    } satisfies LinkingErrorData\n                };\n                diagnostics.push(this.toDiagnostic('error', linkingError.message, info));\n            }\n        }\n    }\n\n    protected async validateAst(rootNode: AstNode, options: ValidationOptions, cancelToken = CancellationToken.None): Promise {\n        const validationItems: Diagnostic[] = [];\n        const acceptor: ValidationAcceptor = (severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo) => {\n            validationItems.push(this.toDiagnostic(severity, message, info));\n        };\n\n        await Promise.all(streamAst(rootNode).map(async node => {\n            await interruptAndCheck(cancelToken);\n            const checks = this.validationRegistry.getChecks(node.$type, options.categories);\n            for (const check of checks) {\n                await check(node, acceptor, cancelToken);\n            }\n        }));\n        return validationItems;\n    }\n\n    protected toDiagnostic(severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo): Diagnostic {\n        return {\n            message,\n            range: getDiagnosticRange(info),\n            severity: toDiagnosticSeverity(severity),\n            code: info.code,\n            codeDescription: info.codeDescription,\n            tags: info.tags,\n            relatedInformation: info.relatedInformation,\n            data: info.data,\n            source: this.getSource()\n        };\n    }\n\n    protected getSource(): string | undefined {\n        return this.metadata.languageId;\n    }\n}\n\nexport function getDiagnosticRange(info: DiagnosticInfo): Range {\n    if (info.range) {\n        return info.range;\n    }\n    let cstNode: CstNode | undefined;\n    if (typeof info.property === 'string') {\n        cstNode = findNodeForProperty(info.node.$cstNode, info.property, info.index);\n    } else if (typeof info.keyword === 'string') {\n        cstNode = findNodeForKeyword(info.node.$cstNode, info.keyword, info.index);\n    }\n    cstNode ??= info.node.$cstNode;\n    if (!cstNode) {\n        return {\n            start: { line: 0, character: 0 },\n            end: { line: 0, character: 0 }\n        };\n    }\n    return cstNode.range;\n}\n\nexport function toDiagnosticSeverity(severity: 'error' | 'warning' | 'info' | 'hint'): DiagnosticSeverity {\n    switch (severity) {\n        case 'error':\n            return 1; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Error\n        case 'warning':\n            return 2; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Warning\n        case 'info':\n            return 3; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Information\n        case 'hint':\n            return 4; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Hint\n        default:\n            throw new Error('Invalid diagnostic severity: ' + severity);\n    }\n}\n\nexport namespace DocumentValidator {\n    export const LexingError = 'lexing-error';\n    export const ParsingError = 'parsing-error';\n    export const LinkingError = 'linking-error';\n}\n\nexport interface LinkingErrorData extends DiagnosticData {\n    containerType: string\n    property: string\n    refText: string\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { URI } from '../utils/uri-utils.js';\nimport type { NameProvider } from '../references/name-provider.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, ReferenceInfo } from '../syntax-tree.js';\nimport type { AstNodeLocator } from './ast-node-locator.js';\nimport type { DocumentSegment, LangiumDocument } from './documents.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { isLinkingError } from '../syntax-tree.js';\nimport { getDocument, streamAst, streamReferences } from '../utils/ast-utils.js';\nimport { toDocumentSegment } from '../utils/cst-utils.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\nimport { UriUtils } from '../utils/uri-utils.js';\n\n/**\n * Language-specific service for creating descriptions of AST nodes to be used for cross-reference resolutions.\n */\nexport interface AstNodeDescriptionProvider {\n\n    /**\n     * Create a description for the given AST node. This service method is typically used while indexing\n     * the contents of a document and during scope computation.\n     *\n     * @param node An AST node.\n     * @param name The name to be used to refer to the AST node. By default, this is determined by the\n     *     `NameProvider` service, but alternative names may be provided according to the semantics\n     *     of your language.\n     * @param document The document containing the AST node. If omitted, it is taken from the root AST node.\n     */\n    createDescription(node: AstNode, name: string | undefined, document?: LangiumDocument): AstNodeDescription;\n\n}\n\nexport class DefaultAstNodeDescriptionProvider implements AstNodeDescriptionProvider {\n\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly nameProvider: NameProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n        this.nameProvider = services.references.NameProvider;\n    }\n\n    createDescription(node: AstNode, name: string | undefined, document: LangiumDocument = getDocument(node)): AstNodeDescription {\n        name ??= this.nameProvider.getName(node);\n        const path = this.astNodeLocator.getAstNodePath(node);\n        if (!name) {\n            throw new Error(`Node at path ${path} has no name.`);\n        }\n        let nameNodeSegment: DocumentSegment | undefined;\n        const nameSegmentGetter = () => nameNodeSegment ??= toDocumentSegment(this.nameProvider.getNameNode(node) ?? node.$cstNode);\n        return {\n            node,\n            name,\n            get nameSegment() {\n                return nameSegmentGetter();\n            },\n            selectionSegment: toDocumentSegment(node.$cstNode),\n            type: node.$type,\n            documentUri: document.uri,\n            path\n        };\n    }\n\n}\n\n/**\n * Describes a cross-reference within a document or between two documents.\n */\nexport interface ReferenceDescription {\n    /** URI of the document that holds a reference */\n    sourceUri: URI\n    /** Path to AstNode that holds a reference */\n    sourcePath: string\n    /** Target document uri */\n    targetUri: URI\n    /** Path to the target AstNode inside the document */\n    targetPath: string\n    /** Segment of the reference text. */\n    segment: DocumentSegment\n    /** Marks a local reference i.e. a cross reference inside a document.   */\n    local?: boolean\n}\n\n/**\n * Language-specific service to create descriptions of all cross-references in a document. These are used by the `IndexManager`\n * to determine which documents are affected and should be rebuilt when a document is changed.\n */\nexport interface ReferenceDescriptionProvider {\n    /**\n     * Create descriptions of all cross-references found in the given document. These descriptions are\n     * gathered by the `IndexManager` and stored in the global index so they can be considered when\n     * a document change is reported by the client.\n     *\n     * @param document The document in which to gather cross-references.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    createDescriptions(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n}\n\nexport class DefaultReferenceDescriptionProvider implements ReferenceDescriptionProvider {\n\n    protected readonly nodeLocator: AstNodeLocator;\n\n    constructor(services: LangiumCoreServices) {\n        this.nodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    async createDescriptions(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const descr: ReferenceDescription[] = [];\n        const rootNode = document.parseResult.value;\n        for (const astNode of streamAst(rootNode)) {\n            await interruptAndCheck(cancelToken);\n            streamReferences(astNode).filter(refInfo => !isLinkingError(refInfo)).forEach(refInfo => {\n                // TODO: Consider logging a warning or throw an exception when DocumentState is < than Linked\n                const description = this.createDescription(refInfo);\n                if (description) {\n                    descr.push(description);\n                }\n            });\n        }\n        return descr;\n    }\n\n    protected createDescription(refInfo: ReferenceInfo): ReferenceDescription | undefined {\n        const targetNodeDescr = refInfo.reference.$nodeDescription;\n        const refCstNode = refInfo.reference.$refNode;\n        if (!targetNodeDescr || !refCstNode) {\n            return undefined;\n        }\n        const docUri = getDocument(refInfo.container).uri;\n        return {\n            sourceUri: docUri,\n            sourcePath: this.nodeLocator.getAstNodePath(refInfo.container),\n            targetUri: targetNodeDescr.documentUri,\n            targetPath: targetNodeDescr.path,\n            segment: toDocumentSegment(refCstNode),\n            local: UriUtils.equals(targetNodeDescr.documentUri, docUri)\n        };\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNode } from '../syntax-tree.js';\n\n/**\n * Language-specific service for locating an `AstNode` in a document.\n */\nexport interface AstNodeLocator {\n\n    /**\n     * Creates a path represented by a `string` that identifies an `AstNode` inside its document.\n     * It must be possible to retrieve exactly the same `AstNode` from the document using this path.\n     *\n     * @param node The `AstNode` for which to create the path.\n     * @returns a path represented by a `string` that identifies `node` inside its document.\n     * @see AstNodeLocator.getAstNode\n     */\n    getAstNodePath(node: AstNode): string;\n\n    /**\n     * Locates an `AstNode` inside another node by following the given path.\n     *\n     * @param node Parent element.\n     * @param path Describes how to locate the `AstNode` inside the given `node`.\n     * @returns The `AstNode` located under the given path, or `undefined` if the path cannot be resolved.\n     * @see AstNodeLocator.getAstNodePath\n     */\n    getAstNode(node: AstNode, path: string): T | undefined;\n\n}\n\nexport class DefaultAstNodeLocator implements AstNodeLocator {\n    protected segmentSeparator = '/';\n    protected indexSeparator = '@';\n\n    getAstNodePath(node: AstNode): string {\n        if (node.$container) {\n            const containerPath = this.getAstNodePath(node.$container);\n            const newSegment = this.getPathSegment(node);\n            const nodePath = containerPath + this.segmentSeparator + newSegment;\n            return nodePath;\n        }\n        return '';\n    }\n\n    protected getPathSegment({ $containerProperty, $containerIndex }: AstNode): string {\n        if (!$containerProperty) {\n            throw new Error(\"Missing '$containerProperty' in AST node.\");\n        }\n        if ($containerIndex !== undefined) {\n            return $containerProperty + this.indexSeparator + $containerIndex;\n        }\n        return $containerProperty;\n    }\n\n    getAstNode(node: AstNode, path: string): T | undefined {\n        const segments = path.split(this.segmentSeparator);\n        return segments.reduce((previousValue, currentValue) => {\n            if (!previousValue || currentValue.length === 0) {\n                return previousValue;\n            }\n            const propertyIndex = currentValue.indexOf(this.indexSeparator);\n            if (propertyIndex > 0) {\n                const property = currentValue.substring(0, propertyIndex);\n                const arrayIndex = parseInt(currentValue.substring(propertyIndex + 1));\n                const array = (previousValue as unknown as Record)[property];\n                return array?.[arrayIndex];\n            }\n            return (previousValue as unknown as Record)[currentValue];\n        }, node) as T;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ConfigurationItem, DidChangeConfigurationParams, DidChangeConfigurationRegistrationOptions, InitializeParams, InitializedParams } from 'vscode-languageserver-protocol';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport { Deferred } from '../utils/promise-utils.js';\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\nexport interface ConfigurationProvider {\n\n    /**\n     * A promise that resolves when the configuration provider is ready to be used.\n     */\n    readonly ready: Promise;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialize` request.\n     */\n    initialize(params: InitializeParams): void;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialized` notification.\n     */\n    initialized(params: ConfigurationInitializedParams): Promise;\n\n    /**\n     * Returns a configuration value stored for the given language.\n     *\n     * @param language The language id\n     * @param configuration Configuration name\n     */\n    getConfiguration(language: string, configuration: string): Promise;\n\n    /**\n     *  Updates the cached configurations using the `change` notification parameters.\n     *\n     * @param change The parameters of a change configuration notification.\n     * `settings` property of the change object could be expressed as `Record>`\n     */\n    updateConfiguration(change: DidChangeConfigurationParams): void;\n}\n\nexport interface ConfigurationInitializedParams extends InitializedParams {\n    register?: (params: DidChangeConfigurationRegistrationOptions) => void,\n    fetchConfiguration?: (configuration: ConfigurationItem[]) => Promise\n}\n\n/**\n * Base configuration provider for building up other configuration providers\n */\nexport class DefaultConfigurationProvider implements ConfigurationProvider {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly _ready = new Deferred();\n    protected settings: Record> = {};\n    protected workspaceConfig = false;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n    }\n\n    get ready(): Promise {\n        return this._ready.promise;\n    }\n\n    initialize(params: InitializeParams): void {\n        this.workspaceConfig = params.capabilities.workspace?.configuration ?? false;\n    }\n\n    async initialized(params: ConfigurationInitializedParams): Promise {\n        if (this.workspaceConfig) {\n            if (params.register) {\n                // params.register(...) is a function to be provided by the calling language server for the sake of\n                //  decoupling this implementation from the concrete LSP implementations, specifically the LSP Connection\n\n                const languages = this.serviceRegistry.all;\n                params.register({\n                    // Listen to configuration changes for all languages\n                    section: languages.map(lang => this.toSectionName(lang.LanguageMetaData.languageId))\n                });\n            }\n\n            if (params.fetchConfiguration) {\n                // params.fetchConfiguration(...) is a function to be provided by the calling language server for the sake of\n                //  decoupling this implementation from the concrete LSP implementations, specifically the LSP Connection\n                const configToUpdate = this.serviceRegistry.all.map(lang => {\n                    // Fetch the configuration changes for all languages\n                    section: this.toSectionName(lang.LanguageMetaData.languageId)\n                });\n\n                // get workspace configurations (default scope URI)\n                const configs = await params.fetchConfiguration(configToUpdate);\n                configToUpdate.forEach((conf, idx) => {\n                    this.updateSectionConfiguration(conf.section!, configs[idx]);\n                });\n            }\n        }\n        this._ready.resolve();\n    }\n\n    /**\n     *  Updates the cached configurations using the `change` notification parameters.\n     *\n     * @param change The parameters of a change configuration notification.\n     * `settings` property of the change object could be expressed as `Record>`\n     */\n    updateConfiguration(change: DidChangeConfigurationParams): void {\n        if (!change.settings) {\n            return;\n        }\n        Object.keys(change.settings).forEach(section => {\n            this.updateSectionConfiguration(section, change.settings[section]);\n        });\n    }\n\n    protected updateSectionConfiguration(section: string, configuration: any): void {\n        this.settings[section] = configuration;\n    }\n\n    /**\n    * Returns a configuration value stored for the given language.\n    *\n    * @param language The language id\n    * @param configuration Configuration name\n    */\n    async getConfiguration(language: string, configuration: string): Promise {\n        await this.ready;\n\n        const sectionName = this.toSectionName(language);\n        if (this.settings[sectionName]) {\n            return this.settings[sectionName][configuration];\n        }\n    }\n\n    protected toSectionName(languageId: string): string {\n        return `${languageId}`;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport interface Disposable {\n    /**\n     * Dispose this object.\n     */\n    dispose(): void;\n}\n\nexport interface AsyncDisposable {\n    /**\n     * Dispose this object.\n     */\n    dispose(): Promise;\n}\n\nexport namespace Disposable {\n    export function create(callback: () => Promise): AsyncDisposable;\n    export function create(callback: () => void): Disposable;\n    export function create(callback: () => void | Promise): Disposable | AsyncDisposable {\n        return {\n            dispose: async () => await callback()\n        };\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { Disposable } from '../utils/disposable.js';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { MaybePromise } from '../utils/promise-utils.js';\nimport type { Deferred } from '../utils/promise-utils.js';\nimport type { ValidationOptions } from '../validation/document-validator.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { LangiumDocument, LangiumDocuments, LangiumDocumentFactory } from './documents.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { OperationCancelled, interruptAndCheck } from '../utils/promise-utils.js';\nimport { stream } from '../utils/stream.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { ValidationCategory } from '../validation/validation-registry.js';\nimport { DocumentState } from './documents.js';\n\nexport interface BuildOptions {\n    /**\n     * Control the validation phase with this option:\n     *  - `true` enables all validation checks and forces revalidating the documents\n     *  - `false` or `undefined` disables all validation checks\n     *  - An object runs only the necessary validation checks; the `categories` property restricts this to a specific subset\n     */\n    validation?: boolean | ValidationOptions\n}\n\nexport interface DocumentBuildState {\n    /** Whether a document has completed its last build process. */\n    completed: boolean\n    /** The options used for the last build process. */\n    options: BuildOptions\n    /** Additional information about the last build result. */\n    result?: {\n        validationChecks?: ValidationCategory[]\n    }\n}\n\n/**\n * Shared-service for building and updating `LangiumDocument`s.\n */\nexport interface DocumentBuilder {\n\n    /** The options used for rebuilding documents after an update. */\n    updateBuildOptions: BuildOptions;\n\n    /**\n     * Execute all necessary build steps for the given documents.\n     *\n     * @param documents Set of documents to be built.\n     * @param options Options for the document builder.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    build(documents: Array>, options?: BuildOptions, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * This method is called when a document change is detected. It updates the state of all\n     * affected documents, including those with references to the changed ones, so they are rebuilt.\n     *\n     * @param changed URIs of changed or created documents\n     * @param deleted URIs of deleted documents\n     * @param cancelToken allows to cancel the current operation\n     * @throws `OperationCancelled` if cancellation is detected during execution\n     */\n    update(changed: URI[], deleted: URI[], cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Notify the given callback when a document update was triggered, but before any document\n     * is rebuilt. Listeners to this event should not perform any long-running task.\n     */\n    onUpdate(callback: DocumentUpdateListener): Disposable;\n\n    /**\n     * Notify the given callback when a set of documents has been built reaching a desired target state.\n     */\n    onBuildPhase(targetState: DocumentState, callback: DocumentBuildListener): Disposable;\n\n    /**\n     * Wait until the workspace has reached the specified state for all documents.\n     *\n     * @param state The desired state. The promise won't resolve until all documents have reached this state\n     * @param cancelToken Optionally allows to cancel the wait operation, disposing any listeners in the process\n     * @throws `OperationCancelled` if cancellation has been requested before the state has been reached\n     */\n    waitUntil(state: DocumentState, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Wait until the document specified by the {@link uri} has reached the specified state.\n     *\n     * @param state The desired state. The promise won't resolve until the document has reached this state.\n     * @param uri The specified URI that points to the document. If the URI does not exist, the promise will resolve once the workspace has reached the specified state.\n     * @param cancelToken Optionally allows to cancel the wait operation, disposing any listeners in the process.\n     * @return The URI of the document that has reached the desired state, or `undefined` if the document does not exist.\n     * @throws `OperationCancelled` if cancellation has been requested before the state has been reached\n     */\n    waitUntil(state: DocumentState, uri?: URI, cancelToken?: CancellationToken): Promise;\n}\n\nexport type DocumentUpdateListener = (changed: URI[], deleted: URI[]) => void | Promise\nexport type DocumentBuildListener = (built: LangiumDocument[], cancelToken: CancellationToken) => void | Promise\nexport class DefaultDocumentBuilder implements DocumentBuilder {\n\n    updateBuildOptions: BuildOptions = {\n        // Default: run only the built-in validation checks and those in the _fast_ category (includes those without category)\n        validation: {\n            categories: ['built-in', 'fast']\n        }\n    };\n\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly langiumDocumentFactory: LangiumDocumentFactory;\n    protected readonly indexManager: IndexManager;\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly updateListeners: DocumentUpdateListener[] = [];\n    protected readonly buildPhaseListeners = new MultiMap();\n    protected readonly buildState = new Map();\n    protected readonly documentBuildWaiters = new Map>();\n    protected currentState = DocumentState.Changed;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.langiumDocuments = services.workspace.LangiumDocuments;\n        this.langiumDocumentFactory = services.workspace.LangiumDocumentFactory;\n        this.indexManager = services.workspace.IndexManager;\n        this.serviceRegistry = services.ServiceRegistry;\n    }\n\n    async build(documents: Array>, options: BuildOptions = {}, cancelToken = CancellationToken.None): Promise {\n        for (const document of documents) {\n            const key = document.uri.toString();\n            if (document.state === DocumentState.Validated) {\n                if (typeof options.validation === 'boolean' && options.validation) {\n                    // Force re-running all validation checks\n                    document.state = DocumentState.IndexedReferences;\n                    document.diagnostics = undefined;\n                    this.buildState.delete(key);\n                } else if (typeof options.validation === 'object') {\n                    const buildState = this.buildState.get(key);\n                    const previousCategories = buildState?.result?.validationChecks;\n                    if (previousCategories) {\n                        // Validation with explicit options was requested for a document that has already been partly validated.\n                        // In this case, we need to merge the previous validation categories with the new ones.\n                        const newCategories = options.validation.categories ?? ValidationCategory.all as ValidationCategory[];\n                        const categories = newCategories.filter(c => !previousCategories.includes(c));\n                        if (categories.length > 0) {\n                            this.buildState.set(key, {\n                                completed: false,\n                                options: {\n                                    validation: {\n                                        ...options.validation,\n                                        categories\n                                    }\n                                },\n                                result: buildState.result\n                            });\n                            document.state = DocumentState.IndexedReferences;\n                        }\n                    }\n                }\n            } else {\n                // Default: forget any previous build options\n                this.buildState.delete(key);\n            }\n        }\n        this.currentState = DocumentState.Changed;\n        await this.emitUpdate(documents.map(e => e.uri), []);\n        await this.buildDocuments(documents, options, cancelToken);\n    }\n\n    async update(changed: URI[], deleted: URI[], cancelToken = CancellationToken.None): Promise {\n        this.currentState = DocumentState.Changed;\n        // Remove all metadata of documents that are reported as deleted\n        for (const deletedUri of deleted) {\n            this.langiumDocuments.deleteDocument(deletedUri);\n            this.buildState.delete(deletedUri.toString());\n            this.indexManager.remove(deletedUri);\n        }\n        // Set the state of all changed documents to `Changed` so they are completely rebuilt\n        for (const changedUri of changed) {\n            const invalidated = this.langiumDocuments.invalidateDocument(changedUri);\n            if (!invalidated) {\n                // We create an unparsed, invalid document.\n                // This will be parsed as soon as we reach the first document builder phase.\n                // This allows to cancel the parsing process later in case we need it.\n                const newDocument = this.langiumDocumentFactory.fromModel({ $type: 'INVALID' }, changedUri);\n                newDocument.state = DocumentState.Changed;\n                this.langiumDocuments.addDocument(newDocument);\n            }\n            this.buildState.delete(changedUri.toString());\n        }\n        // Set the state of all documents that should be relinked to `ComputedScopes` (if not already lower)\n        const allChangedUris = stream(changed).concat(deleted).map(uri => uri.toString()).toSet();\n        this.langiumDocuments.all\n            .filter(doc => !allChangedUris.has(doc.uri.toString()) && this.shouldRelink(doc, allChangedUris))\n            .forEach(doc => {\n                const linker = this.serviceRegistry.getServices(doc.uri).references.Linker;\n                linker.unlink(doc);\n                doc.state = Math.min(doc.state, DocumentState.ComputedScopes);\n                doc.diagnostics = undefined;\n            });\n        // Notify listeners of the update\n        await this.emitUpdate(changed, deleted);\n        // Only allow interrupting the execution after all state changes are done\n        await interruptAndCheck(cancelToken);\n\n        // Collect all documents that we should rebuild\n        const rebuildDocuments = this.langiumDocuments.all\n            .filter(doc =>\n                // This includes those that were reported as changed and those that we selected for relinking\n                doc.state < DocumentState.Linked\n                // This includes those for which a previous build has been cancelled\n                || !this.buildState.get(doc.uri.toString())?.completed\n            )\n            .toArray();\n        await this.buildDocuments(rebuildDocuments, this.updateBuildOptions, cancelToken);\n    }\n\n    protected async emitUpdate(changed: URI[], deleted: URI[]): Promise {\n        await Promise.all(this.updateListeners.map(listener => listener(changed, deleted)));\n    }\n\n    /**\n     * Check whether the given document should be relinked after changes were found in the given URIs.\n     */\n    protected shouldRelink(document: LangiumDocument, changedUris: Set): boolean {\n        // Relink documents with linking errors -- maybe those references can be resolved now\n        if (document.references.some(ref => ref.error !== undefined)) {\n            return true;\n        }\n        // Check whether the document is affected by any of the changed URIs\n        return this.indexManager.isAffected(document, changedUris);\n    }\n\n    onUpdate(callback: DocumentUpdateListener): Disposable {\n        this.updateListeners.push(callback);\n        return Disposable.create(() => {\n            const index = this.updateListeners.indexOf(callback);\n            if (index >= 0) {\n                this.updateListeners.splice(index, 1);\n            }\n        });\n    }\n\n    /**\n     * Build the given documents by stepping through all build phases. If a document's state indicates\n     * that a certain build phase is already done, the phase is skipped for that document.\n     */\n    protected async buildDocuments(documents: LangiumDocument[], options: BuildOptions, cancelToken: CancellationToken): Promise {\n        this.prepareBuild(documents, options);\n        // 0. Parse content\n        await this.runCancelable(documents, DocumentState.Parsed, cancelToken, doc =>\n            this.langiumDocumentFactory.update(doc, cancelToken)\n        );\n        // 1. Index content\n        await this.runCancelable(documents, DocumentState.IndexedContent, cancelToken, doc =>\n            this.indexManager.updateContent(doc, cancelToken)\n        );\n        // 2. Compute scopes\n        await this.runCancelable(documents, DocumentState.ComputedScopes, cancelToken, async doc => {\n            const scopeComputation = this.serviceRegistry.getServices(doc.uri).references.ScopeComputation;\n            doc.precomputedScopes = await scopeComputation.computeLocalScopes(doc, cancelToken);\n        });\n        // 3. Linking\n        await this.runCancelable(documents, DocumentState.Linked, cancelToken, doc => {\n            const linker = this.serviceRegistry.getServices(doc.uri).references.Linker;\n            return linker.link(doc, cancelToken);\n        });\n        // 4. Index references\n        await this.runCancelable(documents, DocumentState.IndexedReferences, cancelToken, doc =>\n            this.indexManager.updateReferences(doc, cancelToken)\n        );\n        // 5. Validation\n        const toBeValidated = documents.filter(doc => this.shouldValidate(doc));\n        await this.runCancelable(toBeValidated, DocumentState.Validated, cancelToken, doc =>\n            this.validate(doc, cancelToken)\n        );\n\n        // If we've made it to this point without being cancelled, we can mark the build state as completed.\n        for (const doc of documents) {\n            const state = this.buildState.get(doc.uri.toString());\n            if (state) {\n                state.completed = true;\n            }\n        }\n    }\n\n    protected prepareBuild(documents: LangiumDocument[], options: BuildOptions): void {\n        for (const doc of documents) {\n            const key = doc.uri.toString();\n            const state = this.buildState.get(key);\n            // If the document has no previous build state, we set it. If it has one, but it's already marked\n            // as completed, we overwrite it. If the previous build was not completed, we keep its state\n            // and continue where it was cancelled.\n            if (!state || state.completed) {\n                this.buildState.set(key, {\n                    completed: false,\n                    options,\n                    result: state?.result\n                });\n            }\n        }\n    }\n\n    protected async runCancelable(documents: LangiumDocument[], targetState: DocumentState, cancelToken: CancellationToken,\n        callback: (document: LangiumDocument) => MaybePromise): Promise {\n        const filtered = documents.filter(e => e.state < targetState);\n        for (const document of filtered) {\n            await interruptAndCheck(cancelToken);\n            await callback(document);\n            document.state = targetState;\n        }\n        await this.notifyBuildPhase(filtered, targetState, cancelToken);\n        this.currentState = targetState;\n    }\n\n    onBuildPhase(targetState: DocumentState, callback: DocumentBuildListener): Disposable {\n        this.buildPhaseListeners.add(targetState, callback);\n        return Disposable.create(() => {\n            this.buildPhaseListeners.delete(targetState, callback);\n        });\n    }\n\n    waitUntil(state: DocumentState, cancelToken?: CancellationToken): Promise;\n    waitUntil(state: DocumentState, uri?: URI, cancelToken?: CancellationToken): Promise;\n    waitUntil(state: DocumentState, uriOrToken?: URI | CancellationToken, cancelToken?: CancellationToken): Promise {\n        let uri: URI | undefined = undefined;\n        if (uriOrToken && 'path' in uriOrToken) {\n            uri = uriOrToken;\n        } else {\n            cancelToken = uriOrToken;\n        }\n        cancelToken ??= CancellationToken.None;\n        if (uri) {\n            const document = this.langiumDocuments.getDocument(uri);\n            if (document && document.state > state) {\n                return Promise.resolve(uri);\n            }\n        }\n        if (this.currentState >= state) {\n            return Promise.resolve(undefined);\n        } else if (cancelToken.isCancellationRequested) {\n            return Promise.reject(OperationCancelled);\n        }\n        return new Promise((resolve, reject) => {\n            const buildDisposable = this.onBuildPhase(state, () => {\n                buildDisposable.dispose();\n                cancelDisposable.dispose();\n                if (uri) {\n                    const document = this.langiumDocuments.getDocument(uri);\n                    resolve(document?.uri);\n                } else {\n                    resolve(undefined);\n                }\n            });\n            const cancelDisposable = cancelToken!.onCancellationRequested(() => {\n                buildDisposable.dispose();\n                cancelDisposable.dispose();\n                reject(OperationCancelled);\n            });\n        });\n    }\n\n    protected async notifyBuildPhase(documents: LangiumDocument[], state: DocumentState, cancelToken: CancellationToken): Promise {\n        if (documents.length === 0) {\n            // Don't notify when no document has been processed\n            return;\n        }\n        const listeners = this.buildPhaseListeners.get(state);\n        for (const listener of listeners) {\n            await interruptAndCheck(cancelToken);\n            await listener(documents, cancelToken);\n        }\n    }\n\n    /**\n     * Determine whether the given document should be validated during a build. The default\n     * implementation checks the `validation` property of the build options. If it's set to `true`\n     * or a `ValidationOptions` object, the document is included in the validation phase.\n     */\n    protected shouldValidate(document: LangiumDocument): boolean {\n        return Boolean(this.getBuildOptions(document).validation);\n    }\n\n    /**\n     * Run validation checks on the given document and store the resulting diagnostics in the document.\n     * If the document already contains diagnostics, the new ones are added to the list.\n     */\n    protected async validate(document: LangiumDocument, cancelToken: CancellationToken): Promise {\n        const validator = this.serviceRegistry.getServices(document.uri).validation.DocumentValidator;\n        const validationSetting = this.getBuildOptions(document).validation;\n        const options = typeof validationSetting === 'object' ? validationSetting : undefined;\n        const diagnostics = await validator.validateDocument(document, options, cancelToken);\n        if (document.diagnostics) {\n            document.diagnostics.push(...diagnostics);\n        } else {\n            document.diagnostics = diagnostics;\n        }\n\n        // Store information about the executed validation in the build state\n        const state = this.buildState.get(document.uri.toString());\n        if (state) {\n            state.result ??= {};\n            const newCategories = options?.categories ?? ValidationCategory.all;\n            if (state.result.validationChecks) {\n                state.result.validationChecks.push(...newCategories);\n            } else {\n                state.result.validationChecks = [...newCategories];\n            }\n        }\n    }\n\n    protected getBuildOptions(document: LangiumDocument): BuildOptions {\n        return this.buildState.get(document.uri.toString())?.options ?? {};\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { ContextCache } from '../utils/caching.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport type { Stream } from '../utils/stream.js';\nimport { stream } from '../utils/stream.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { UriUtils } from '../utils/uri-utils.js';\nimport type { ReferenceDescription } from './ast-descriptions.js';\nimport type { LangiumDocument, LangiumDocuments } from './documents.js';\n\n/**\n * The index manager is responsible for keeping metadata about symbols and cross-references\n * in the workspace. It is used to look up symbols in the global scope, mostly during linking\n * and completion. This service is shared between all languages of a language server.\n */\nexport interface IndexManager {\n\n    /**\n     * Removes the specified document URI from the index.\n     * Necessary when documents are deleted and not referenceable anymore.\n     *\n     * @param uri The URI of the document for which index data shall be removed\n     */\n    remove(uri: URI): void;\n\n    /**\n     * Updates the information about the exportable content of a document inside the index.\n     *\n     * @param document Document to be updated\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    updateContent(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Updates the information about the cross-references of a document inside the index.\n     *\n     * @param document Document to be updated\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    updateReferences(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Determine whether the given document could be affected by changes of the documents\n     * identified by the given URIs (second parameter). The document is typically regarded as\n     * affected if it contains a reference to any of the changed files.\n     *\n     * @param document Document to check whether it's affected\n     * @param changedUris URIs of the changed documents\n     */\n    isAffected(document: LangiumDocument, changedUris: Set): boolean;\n\n    /**\n     * Compute a list of all exported elements, optionally filtered using a type identifier and document URIs.\n     *\n     * @param nodeType The type to filter with, or `undefined` to return descriptions of all types.\n     * @param uris If specified, only returns elements from the given URIs.\n     * @returns a `Stream` containing all globally visible nodes (of a given type).\n     */\n    allElements(nodeType?: string, uris?: Set): Stream;\n\n    /**\n     * Returns all known references that are pointing to the given `targetNode`.\n     *\n     * @param targetNode the `AstNode` to look up references for\n     * @param astNodePath the path that points to the `targetNode` inside the document. See also `AstNodeLocator`\n     *\n     * @returns a `Stream` of references that are targeting the `targetNode`\n     */\n    findAllReferences(targetNode: AstNode, astNodePath: string): Stream;\n\n}\n\nexport class DefaultIndexManager implements IndexManager {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly documents: LangiumDocuments;\n    protected readonly astReflection: AstReflection;\n\n    /**\n     * The symbol index stores all `AstNodeDescription` items exported by a document.\n     * The key used in this map is the string representation of the specific document URI.\n     */\n    protected readonly symbolIndex = new Map();\n    /**\n     * This is a cache for the `allElements()` method.\n     * It caches the descriptions from `symbolIndex` grouped by types.\n     */\n    protected readonly symbolByTypeIndex = new ContextCache();\n    /**\n     * This index keeps track of all `ReferenceDescription` items exported by a document.\n     * This is used to compute which elements are affected by a document change\n     * and for finding references to an AST node.\n     */\n    protected readonly referenceIndex = new Map();\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.documents = services.workspace.LangiumDocuments;\n        this.serviceRegistry = services.ServiceRegistry;\n        this.astReflection = services.AstReflection;\n    }\n\n    findAllReferences(targetNode: AstNode, astNodePath: string): Stream {\n        const targetDocUri = getDocument(targetNode).uri;\n        const result: ReferenceDescription[] = [];\n        this.referenceIndex.forEach(docRefs => {\n            docRefs.forEach(refDescr => {\n                if (UriUtils.equals(refDescr.targetUri, targetDocUri) && refDescr.targetPath === astNodePath) {\n                    result.push(refDescr);\n                }\n            });\n        });\n        return stream(result);\n    }\n\n    allElements(nodeType?: string, uris?: Set): Stream {\n        let documentUris = stream(this.symbolIndex.keys());\n        if (uris) {\n            documentUris = documentUris.filter(uri => !uris || uris.has(uri));\n        }\n        return documentUris\n            .map(uri => this.getFileDescriptions(uri, nodeType))\n            .flat();\n    }\n\n    protected getFileDescriptions(uri: string, nodeType?: string): AstNodeDescription[] {\n        if (!nodeType) {\n            return this.symbolIndex.get(uri) ?? [];\n        }\n        const descriptions = this.symbolByTypeIndex.get(uri, nodeType, () => {\n            const allFileDescriptions = this.symbolIndex.get(uri) ?? [];\n            return allFileDescriptions.filter(e => this.astReflection.isSubtype(e.type, nodeType));\n        });\n        return descriptions;\n    }\n\n    remove(uri: URI): void {\n        const uriString = uri.toString();\n        this.symbolIndex.delete(uriString);\n        this.symbolByTypeIndex.clear(uriString);\n        this.referenceIndex.delete(uriString);\n    }\n\n    async updateContent(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const services = this.serviceRegistry.getServices(document.uri);\n        const exports = await services.references.ScopeComputation.computeExports(document, cancelToken);\n        const uri = document.uri.toString();\n        this.symbolIndex.set(uri, exports);\n        this.symbolByTypeIndex.clear(uri);\n    }\n\n    async updateReferences(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const services = this.serviceRegistry.getServices(document.uri);\n        const indexData = await services.workspace.ReferenceDescriptionProvider.createDescriptions(document, cancelToken);\n        this.referenceIndex.set(document.uri.toString(), indexData);\n    }\n\n    isAffected(document: LangiumDocument, changedUris: Set): boolean {\n        const references = this.referenceIndex.get(document.uri.toString());\n        if (!references) {\n            return false;\n        }\n        return references.some(ref => !ref.local && changedUris.has(ref.targetUri.toString()));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { InitializeParams, InitializedParams } from 'vscode-languageserver-protocol';\nimport type { WorkspaceFolder } from 'vscode-languageserver-types';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { Deferred, interruptAndCheck } from '../utils/promise-utils.js';\nimport { URI, UriUtils } from '../utils/uri-utils.js';\nimport type { BuildOptions, DocumentBuilder } from './document-builder.js';\nimport type { LangiumDocument, LangiumDocuments } from './documents.js';\nimport type { FileSystemNode, FileSystemProvider } from './file-system-provider.js';\nimport type { WorkspaceLock } from './workspace-lock.js';\n\n// export type WorkspaceFolder from 'vscode-languageserver-types' for convenience,\n//  is supposed to avoid confusion as 'WorkspaceFolder' might accidentally be imported via 'vscode-languageclient'\nexport type { WorkspaceFolder };\n\n/**\n * The workspace manager is responsible for finding source files in the workspace.\n * This service is shared between all languages of a language server.\n */\nexport interface WorkspaceManager {\n\n    /** The options used for the initial workspace build. */\n    initialBuildOptions: BuildOptions | undefined;\n\n    /**\n     * A promise that resolves when the workspace manager is ready to be used.\n     * Use this to ensure that the workspace manager has finished its initialization.\n     */\n    readonly ready: Promise;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialize` request.\n     */\n    initialize(params: InitializeParams): void;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialized` notification.\n     */\n    initialized(params: InitializedParams): Promise;\n\n    /**\n     * Does the initial indexing of workspace folders.\n     * Collects information about exported and referenced AstNodes in\n     * each language file and stores it locally.\n     *\n     * @param folders The set of workspace folders to be indexed.\n     */\n    initializeWorkspace(folders: WorkspaceFolder[], cancelToken?: CancellationToken): Promise;\n\n}\n\nexport class DefaultWorkspaceManager implements WorkspaceManager {\n\n    initialBuildOptions: BuildOptions = {};\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly documentBuilder: DocumentBuilder;\n    protected readonly fileSystemProvider: FileSystemProvider;\n    protected readonly mutex: WorkspaceLock;\n    protected readonly _ready = new Deferred();\n    protected folders?: WorkspaceFolder[];\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n        this.langiumDocuments = services.workspace.LangiumDocuments;\n        this.documentBuilder = services.workspace.DocumentBuilder;\n        this.fileSystemProvider = services.workspace.FileSystemProvider;\n        this.mutex = services.workspace.WorkspaceLock;\n    }\n\n    get ready(): Promise {\n        return this._ready.promise;\n    }\n\n    initialize(params: InitializeParams): void {\n        this.folders = params.workspaceFolders ?? undefined;\n    }\n\n    initialized(_params: InitializedParams): Promise {\n        // Initialize the workspace even if there are no workspace folders\n        // We still want to load additional documents (language library or similar) during initialization\n        return this.mutex.write(token => this.initializeWorkspace(this.folders ?? [], token));\n    }\n\n    async initializeWorkspace(folders: WorkspaceFolder[], cancelToken = CancellationToken.None): Promise {\n        const documents = await this.performStartup(folders);\n        // Only after creating all documents do we check whether we need to cancel the initialization\n        // The document builder will later pick up on all unprocessed documents\n        await interruptAndCheck(cancelToken);\n        await this.documentBuilder.build(documents, this.initialBuildOptions, cancelToken);\n    }\n\n    /**\n     * Performs the uninterruptable startup sequence of the workspace manager.\n     * This methods loads all documents in the workspace and other documents and returns them.\n     */\n    protected async performStartup(folders: WorkspaceFolder[]): Promise {\n        const fileExtensions = this.serviceRegistry.all.flatMap(e => e.LanguageMetaData.fileExtensions);\n        const documents: LangiumDocument[] = [];\n        const collector = (document: LangiumDocument) => {\n            documents.push(document);\n            if (!this.langiumDocuments.hasDocument(document.uri)) {\n                this.langiumDocuments.addDocument(document);\n            }\n        };\n        // Even though we don't await the initialization of the workspace manager,\n        // we can still assume that all library documents and file documents are loaded by the time we start building documents.\n        // The mutex prevents anything from performing a workspace build until we check the cancellation token\n        await this.loadAdditionalDocuments(folders, collector);\n        await Promise.all(\n            folders.map(wf => [wf, this.getRootFolder(wf)] as [WorkspaceFolder, URI])\n                .map(async entry => this.traverseFolder(...entry, fileExtensions, collector))\n        );\n        this._ready.resolve();\n        return documents;\n    }\n\n    /**\n     * Load all additional documents that shall be visible in the context of the given workspace\n     * folders and add them to the collector. This can be used to include built-in libraries of\n     * your language, which can be either loaded from provided files or constructed in memory.\n     */\n    protected loadAdditionalDocuments(_folders: WorkspaceFolder[], _collector: (document: LangiumDocument) => void): Promise {\n        return Promise.resolve();\n    }\n\n    /**\n     * Determine the root folder of the source documents in the given workspace folder.\n     * The default implementation returns the URI of the workspace folder, but you can override\n     * this to return a subfolder like `src` instead.\n     */\n    protected getRootFolder(workspaceFolder: WorkspaceFolder): URI {\n        return URI.parse(workspaceFolder.uri);\n    }\n\n    /**\n     * Traverse the file system folder identified by the given URI and its subfolders. All\n     * contained files that match the file extensions are added to the collector.\n     */\n    protected async traverseFolder(workspaceFolder: WorkspaceFolder, folderPath: URI, fileExtensions: string[], collector: (document: LangiumDocument) => void): Promise {\n        const content = await this.fileSystemProvider.readDirectory(folderPath);\n        await Promise.all(content.map(async entry => {\n            if (this.includeEntry(workspaceFolder, entry, fileExtensions)) {\n                if (entry.isDirectory) {\n                    await this.traverseFolder(workspaceFolder, entry.uri, fileExtensions, collector);\n                } else if (entry.isFile) {\n                    const document = await this.langiumDocuments.getOrCreateDocument(entry.uri);\n                    collector(document);\n                }\n            }\n        }));\n    }\n\n    /**\n     * Determine whether the given folder entry shall be included while indexing the workspace.\n     */\n    protected includeEntry(_workspaceFolder: WorkspaceFolder, entry: FileSystemNode, fileExtensions: string[]): boolean {\n        const name = UriUtils.basename(entry.uri);\n        if (name.startsWith('.')) {\n            return false;\n        }\n        if (entry.isDirectory) {\n            return name !== 'node_modules' && name !== 'out';\n        } else if (entry.isFile) {\n            const extname = UriUtils.extname(entry.uri);\n            return fileExtensions.includes(extname);\n        }\n        return false;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ILexingError, IMultiModeLexerDefinition, IToken, TokenType, TokenTypeDictionary, TokenVocabulary } from 'chevrotain';\nimport type { LangiumCoreServices } from '../services.js';\nimport { Lexer as ChevrotainLexer } from 'chevrotain';\n\nexport interface LexerResult {\n    /**\n     * A list of all tokens that were lexed from the input.\n     *\n     * Note that Langium requires the optional properties\n     * `startLine`, `startColumn`, `endOffset`, `endLine` and `endColumn` to be set on each token.\n     */\n    tokens: IToken[];\n    /**\n     * Contains hidden tokens, usually comments.\n     */\n    hidden: IToken[];\n    errors: ILexingError[];\n}\n\nexport interface Lexer {\n    readonly definition: TokenTypeDictionary;\n    tokenize(text: string): LexerResult;\n}\n\nexport class DefaultLexer implements Lexer {\n\n    protected chevrotainLexer: ChevrotainLexer;\n    protected tokenTypes: TokenTypeDictionary;\n\n    constructor(services: LangiumCoreServices) {\n        const tokens = services.parser.TokenBuilder.buildTokens(services.Grammar, {\n            caseInsensitive: services.LanguageMetaData.caseInsensitive\n        });\n        this.tokenTypes = this.toTokenTypeDictionary(tokens);\n        const lexerTokens = isTokenTypeDictionary(tokens) ? Object.values(tokens) : tokens;\n        this.chevrotainLexer = new ChevrotainLexer(lexerTokens, {\n            positionTracking: 'full'\n        });\n    }\n\n    get definition(): TokenTypeDictionary {\n        return this.tokenTypes;\n    }\n\n    tokenize(text: string): LexerResult {\n        const chevrotainResult = this.chevrotainLexer.tokenize(text);\n        return {\n            tokens: chevrotainResult.tokens,\n            errors: chevrotainResult.errors,\n            hidden: chevrotainResult.groups.hidden ?? []\n        };\n    }\n\n    protected toTokenTypeDictionary(buildTokens: TokenVocabulary): TokenTypeDictionary {\n        if (isTokenTypeDictionary(buildTokens)) return buildTokens;\n        const tokens = isIMultiModeLexerDefinition(buildTokens) ? Object.values(buildTokens.modes).flat() : buildTokens;\n        const res: TokenTypeDictionary = {};\n        tokens.forEach(token => res[token.name] = token);\n        return res;\n    }\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is TokenType array\n */\nexport function isTokenTypeArray(tokenVocabulary: TokenVocabulary): tokenVocabulary is TokenType[] {\n    return Array.isArray(tokenVocabulary) && (tokenVocabulary.length === 0 || 'name' in tokenVocabulary[0]);\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is IMultiModeLexerDefinition\n */\nexport function isIMultiModeLexerDefinition(tokenVocabulary: TokenVocabulary): tokenVocabulary is IMultiModeLexerDefinition {\n    return tokenVocabulary && 'modes' in tokenVocabulary && 'defaultMode' in tokenVocabulary;\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is TokenTypeDictionary\n */\nexport function isTokenTypeDictionary(tokenVocabulary: TokenVocabulary): tokenVocabulary is TokenTypeDictionary {\n    return !isTokenTypeArray(tokenVocabulary) && !isIMultiModeLexerDefinition(tokenVocabulary);\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { Position, Range } from 'vscode-languageserver-types';\nimport type { CstNode } from '../syntax-tree.js';\nimport { NEWLINE_REGEXP, escapeRegExp } from '../utils/regexp-utils.js';\nimport { URI } from '../utils/uri-utils.js';\n\nexport interface JSDocComment extends JSDocValue {\n    readonly elements: JSDocElement[]\n    getTag(name: string): JSDocTag | undefined\n    getTags(name: string): JSDocTag[]\n}\n\nexport type JSDocElement = JSDocParagraph | JSDocTag;\n\nexport type JSDocInline = JSDocTag | JSDocLine;\n\nexport interface JSDocValue {\n    /**\n     * Represents the range that this JSDoc element occupies.\n     * If the JSDoc was parsed from a `CstNode`, the range will represent the location in the source document.\n     */\n    readonly range: Range\n    /**\n     * Renders this JSDoc element to a plain text representation.\n     */\n    toString(): string\n    /**\n     * Renders this JSDoc element to a markdown representation.\n     *\n     * @param options Rendering options to customize the markdown result.\n     */\n    toMarkdown(options?: JSDocRenderOptions): string\n}\n\nexport interface JSDocParagraph extends JSDocValue {\n    readonly inlines: JSDocInline[]\n}\n\nexport interface JSDocLine extends JSDocValue {\n    readonly text: string\n}\n\nexport interface JSDocTag extends JSDocValue {\n    readonly name: string\n    readonly content: JSDocParagraph\n    readonly inline: boolean\n}\n\nexport interface JSDocParseOptions {\n    /**\n     * The start symbol of your comment format. Defaults to `/**`.\n     */\n    readonly start?: RegExp | string\n    /**\n     * The symbol that start a line of your comment format. Defaults to `*`.\n     */\n    readonly line?: RegExp | string\n    /**\n     * The end symbol of your comment format. Defaults to `*\\/`.\n     */\n    readonly end?: RegExp | string\n}\n\nexport interface JSDocRenderOptions {\n    /**\n     * Determines the style for rendering tags. Defaults to `italic`.\n     */\n    tag?: 'plain' | 'italic' | 'bold' | 'bold-italic'\n    /**\n     * Determines the default for rendering `@link` tags. Defaults to `plain`.\n     */\n    link?: 'code' | 'plain'\n    /**\n     * Custom tag rendering function.\n     * Return a markdown formatted tag or `undefined` to fall back to the default rendering.\n     */\n    renderTag?(tag: JSDocTag): string | undefined\n    /**\n     * Custom link rendering function. Accepts a link target and a display value for the link.\n     * Return a markdown formatted link with the format `[$display]($link)` or `undefined` if the link is not a valid target.\n     */\n    renderLink?(link: string, display: string): string | undefined\n}\n\n/**\n * Parses a JSDoc from a `CstNode` containing a comment.\n *\n * @param node A `CstNode` from a parsed Langium document.\n * @param options Parsing options specialized to your language. See {@link JSDocParseOptions}.\n */\nexport function parseJSDoc(node: CstNode, options?: JSDocParseOptions): JSDocComment;\n/**\n * Parses a JSDoc from a string comment.\n *\n * @param content A string containing the source of the JSDoc comment.\n * @param start The start position the comment occupies in the source document.\n * @param options Parsing options specialized to your language. See {@link JSDocParseOptions}.\n */\nexport function parseJSDoc(content: string, start?: Position, options?: JSDocParseOptions): JSDocComment;\nexport function parseJSDoc(node: CstNode | string, start?: Position | JSDocParseOptions, options?: JSDocParseOptions): JSDocComment {\n    let opts: JSDocParseOptions | undefined;\n    let position: Position | undefined;\n    if (typeof node === 'string') {\n        position = start as Position | undefined;\n        opts = options as JSDocParseOptions | undefined;\n    } else {\n        position = node.range.start;\n        opts = start as JSDocParseOptions | undefined;\n    }\n    if (!position) {\n        position = Position.create(0, 0);\n    }\n\n    const lines = getLines(node);\n    const normalizedOptions = normalizeOptions(opts);\n\n    const tokens = tokenize({\n        lines,\n        position,\n        options: normalizedOptions\n    });\n\n    return parseJSDocComment({\n        index: 0,\n        tokens,\n        position\n    });\n}\n\nexport function isJSDoc(node: CstNode | string, options?: JSDocParseOptions): boolean {\n    const normalizedOptions = normalizeOptions(options);\n    const lines = getLines(node);\n    if (lines.length === 0) {\n        return false;\n    }\n\n    const first = lines[0];\n    const last = lines[lines.length - 1];\n    const firstRegex = normalizedOptions.start;\n    const lastRegex = normalizedOptions.end;\n\n    return Boolean(firstRegex?.exec(first)) && Boolean(lastRegex?.exec(last));\n}\n\nfunction getLines(node: CstNode | string): string[] {\n    let content = '';\n    if (typeof node === 'string') {\n        content = node;\n    } else {\n        content = node.text;\n    }\n    const lines = content.split(NEWLINE_REGEXP);\n    return lines;\n}\n\n// Tokenization\n\ninterface JSDocToken {\n    type: 'text' | 'tag' | 'inline-tag' | 'break'\n    content: string\n    range: Range\n}\n\nconst tagRegex = /\\s*(@([\\p{L}][\\p{L}\\p{N}]*)?)/uy;\nconst inlineTagRegex = /\\{(@[\\p{L}][\\p{L}\\p{N}]*)(\\s*)([^\\r\\n}]+)?\\}/gu;\n\nfunction tokenize(context: TokenizationContext): JSDocToken[] {\n    const tokens: JSDocToken[] = [];\n    let currentLine = context.position.line;\n    let currentCharacter = context.position.character;\n    for (let i = 0; i < context.lines.length; i++) {\n        const first = i === 0;\n        const last = i === context.lines.length - 1;\n        let line = context.lines[i];\n        let index = 0;\n\n        if (first && context.options.start) {\n            const match = context.options.start?.exec(line);\n            if (match) {\n                index = match.index + match[0].length;\n            }\n        } else {\n            const match = context.options.line?.exec(line);\n            if (match) {\n                index = match.index + match[0].length;\n            }\n        }\n        if (last) {\n            const match = context.options.end?.exec(line);\n            if (match) {\n                line = line.substring(0, match.index);\n            }\n        }\n\n        line = line.substring(0, lastCharacter(line));\n        const whitespaceEnd = skipWhitespace(line, index);\n\n        if (whitespaceEnd >= line.length) {\n            // Only create a break token when we already have previous tokens\n            if (tokens.length > 0) {\n                const position = Position.create(currentLine, currentCharacter);\n                tokens.push({\n                    type: 'break',\n                    content: '',\n                    range: Range.create(position, position)\n                });\n            }\n        } else {\n            tagRegex.lastIndex = index;\n            const tagMatch = tagRegex.exec(line);\n            if (tagMatch) {\n                const fullMatch = tagMatch[0];\n                const value = tagMatch[1];\n                const start = Position.create(currentLine, currentCharacter + index);\n                const end = Position.create(currentLine, currentCharacter + index + fullMatch.length);\n                tokens.push({\n                    type: 'tag',\n                    content: value,\n                    range: Range.create(start, end)\n                });\n                index += fullMatch.length;\n                index = skipWhitespace(line, index);\n            }\n\n            if (index < line.length) {\n                const rest = line.substring(index);\n                const inlineTagMatches = Array.from(rest.matchAll(inlineTagRegex));\n                tokens.push(...buildInlineTokens(inlineTagMatches, rest, currentLine, currentCharacter + index));\n            }\n        }\n\n        currentLine++;\n        currentCharacter = 0;\n    }\n\n    // Remove last break token if there is one\n    if (tokens.length > 0 && tokens[tokens.length - 1].type === 'break') {\n        return tokens.slice(0, -1);\n    }\n\n    return tokens;\n}\n\nfunction buildInlineTokens(tags: RegExpMatchArray[], line: string, lineIndex: number, characterIndex: number): JSDocToken[] {\n    const tokens: JSDocToken[] = [];\n\n    if (tags.length === 0) {\n        const start = Position.create(lineIndex, characterIndex);\n        const end = Position.create(lineIndex, characterIndex + line.length);\n        tokens.push({\n            type: 'text',\n            content: line,\n            range: Range.create(start, end)\n        });\n    } else {\n        let lastIndex = 0;\n        for (const match of tags) {\n            const matchIndex = match.index!;\n            const startContent = line.substring(lastIndex, matchIndex);\n            if (startContent.length > 0) {\n                tokens.push({\n                    type: 'text',\n                    content: line.substring(lastIndex, matchIndex),\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + characterIndex),\n                        Position.create(lineIndex, matchIndex + characterIndex)\n                    )\n                });\n            }\n            let offset = startContent.length + 1;\n            const tagName = match[1];\n            tokens.push({\n                type: 'inline-tag',\n                content: tagName,\n                range: Range.create(\n                    Position.create(lineIndex, lastIndex + offset + characterIndex),\n                    Position.create(lineIndex, lastIndex + offset + tagName.length + characterIndex)\n                )\n            });\n            offset += tagName.length;\n            if (match.length === 4) {\n                offset += match[2].length;\n                const value = match[3];\n                tokens.push({\n                    type: 'text',\n                    content: value,\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + offset + characterIndex),\n                        Position.create(lineIndex, lastIndex + offset + value.length + characterIndex)\n                    )\n                });\n            } else {\n                tokens.push({\n                    type: 'text',\n                    content: '',\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + offset + characterIndex),\n                        Position.create(lineIndex, lastIndex + offset + characterIndex)\n                    )\n                });\n            }\n            lastIndex = matchIndex + match[0].length;\n        }\n        const endContent = line.substring(lastIndex);\n        if (endContent.length > 0) {\n            tokens.push({\n                type: 'text',\n                content: endContent,\n                range: Range.create(\n                    Position.create(lineIndex, lastIndex + characterIndex),\n                    Position.create(lineIndex, lastIndex + characterIndex + endContent.length)\n                )\n            });\n        }\n    }\n\n    return tokens;\n}\n\nconst nonWhitespaceRegex = /\\S/;\nconst whitespaceEndRegex = /\\s*$/;\n\nfunction skipWhitespace(line: string, index: number): number {\n    const match = line.substring(index).match(nonWhitespaceRegex);\n    if (match) {\n        return index + match.index!;\n    } else {\n        return line.length;\n    }\n}\n\nfunction lastCharacter(line: string): number | undefined {\n    const match = line.match(whitespaceEndRegex);\n    if (match && typeof match.index === 'number') {\n        return match.index;\n    }\n    return undefined;\n}\n\n// Parsing\n\nfunction parseJSDocComment(context: ParseContext): JSDocComment {\n    const startPosition: Position = Position.create(context.position.line, context.position.character);\n    if (context.tokens.length === 0) {\n        return new JSDocCommentImpl([], Range.create(startPosition, startPosition));\n    }\n    const elements: JSDocElement[] = [];\n    while (context.index < context.tokens.length) {\n        const element = parseJSDocElement(context, elements[elements.length - 1]);\n        if (element) {\n            elements.push(element);\n        }\n    }\n    const start = elements[0]?.range.start ?? startPosition;\n    const end = elements[elements.length - 1]?.range.end ?? startPosition;\n    return new JSDocCommentImpl(elements, Range.create(start, end));\n}\n\nfunction parseJSDocElement(context: ParseContext, last?: JSDocElement): JSDocElement | undefined {\n    const next = context.tokens[context.index];\n    if (next.type === 'tag') {\n        return parseJSDocTag(context, false);\n    } else if (next.type === 'text' || next.type === 'inline-tag') {\n        return parseJSDocText(context);\n    } else {\n        appendEmptyLine(next, last);\n        context.index++;\n        return undefined;\n    }\n}\n\nfunction appendEmptyLine(token: JSDocToken, element?: JSDocElement): void {\n    if (element) {\n        const line = new JSDocLineImpl('', token.range);\n        if ('inlines' in element) {\n            element.inlines.push(line);\n        } else {\n            element.content.inlines.push(line);\n        }\n    }\n}\n\nfunction parseJSDocText(context: ParseContext): JSDocParagraph {\n    let token = context.tokens[context.index];\n    const firstToken = token;\n    let lastToken = token;\n    const lines: JSDocInline[] = [];\n    while (token && token.type !== 'break' && token.type !== 'tag') {\n        lines.push(parseJSDocInline(context));\n        lastToken = token;\n        token = context.tokens[context.index];\n    }\n    return new JSDocTextImpl(lines, Range.create(firstToken.range.start, lastToken.range.end));\n}\n\nfunction parseJSDocInline(context: ParseContext): JSDocInline {\n    const token = context.tokens[context.index];\n    if (token.type === 'inline-tag') {\n        return parseJSDocTag(context, true);\n    } else {\n        return parseJSDocLine(context);\n    }\n}\n\nfunction parseJSDocTag(context: ParseContext, inline: boolean): JSDocTag {\n    const tagToken = context.tokens[context.index++];\n    const name = tagToken.content.substring(1);\n    const nextToken = context.tokens[context.index];\n    if (nextToken?.type === 'text') {\n        if (inline) {\n            const docLine = parseJSDocLine(context);\n            return new JSDocTagImpl(\n                name,\n                new JSDocTextImpl([docLine], docLine.range),\n                inline,\n                Range.create(tagToken.range.start, docLine.range.end)\n            );\n        } else {\n            const textDoc = parseJSDocText(context);\n            return new JSDocTagImpl(\n                name,\n                textDoc,\n                inline,\n                Range.create(tagToken.range.start, textDoc.range.end)\n            );\n        }\n    } else {\n        const range = tagToken.range;\n        return new JSDocTagImpl(name, new JSDocTextImpl([], range), inline, range);\n    }\n}\n\nfunction parseJSDocLine(context: ParseContext): JSDocLine {\n    const token = context.tokens[context.index++];\n    return new JSDocLineImpl(token.content, token.range);\n}\n\ninterface NormalizedOptions {\n    start?: RegExp\n    end?: RegExp\n    line?: RegExp\n}\n\ninterface TokenizationContext {\n    position: Position\n    lines: string[]\n    options: NormalizedOptions\n}\n\ninterface ParseContext {\n    position: Position\n    tokens: JSDocToken[]\n    index: number\n}\n\nfunction normalizeOptions(options?: JSDocParseOptions): NormalizedOptions {\n    if (!options) {\n        return normalizeOptions({\n            start: '/**',\n            end: '*/',\n            line: '*'\n        });\n    }\n    const { start, end, line } = options;\n    return {\n        start: normalizeOption(start, true),\n        end: normalizeOption(end, false),\n        line: normalizeOption(line, true)\n    };\n}\n\nfunction normalizeOption(option: RegExp | string | undefined, start: boolean): RegExp | undefined {\n    if (typeof option === 'string' || typeof option === 'object') {\n        const escaped = typeof option === 'string' ? escapeRegExp(option) : option.source;\n        if (start) {\n            return new RegExp(`^\\\\s*${escaped}`);\n        } else {\n            return new RegExp(`\\\\s*${escaped}\\\\s*$`);\n        }\n    } else {\n        return option;\n    }\n}\n\nclass JSDocCommentImpl implements JSDocComment {\n\n    readonly elements: JSDocElement[];\n    readonly range: Range;\n\n    constructor(elements: JSDocElement[], range: Range) {\n        this.elements = elements;\n        this.range = range;\n    }\n\n    getTag(name: string): JSDocTag | undefined {\n        return this.getAllTags().find(e => e.name === name);\n    }\n\n    getTags(name: string): JSDocTag[] {\n        return this.getAllTags().filter(e => e.name === name);\n    }\n\n    private getAllTags(): JSDocTag[] {\n        return this.elements.filter((e): e is JSDocTag => 'name' in e);\n    }\n\n    toString(): string {\n        let value = '';\n        for (const element of this.elements) {\n            if (value.length === 0) {\n                value = element.toString();\n            } else {\n                const text = element.toString();\n                value += fillNewlines(value) + text;\n            }\n        }\n        return value.trim();\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        let value = '';\n        for (const element of this.elements) {\n            if (value.length === 0) {\n                value = element.toMarkdown(options);\n            } else {\n                const text = element.toMarkdown(options);\n                value += fillNewlines(value) + text;\n            }\n        }\n        return value.trim();\n    }\n}\n\nclass JSDocTagImpl implements JSDocTag {\n    name: string;\n    content: JSDocParagraph;\n    range: Range;\n    inline: boolean;\n\n    constructor(name: string, content: JSDocParagraph, inline: boolean, range: Range) {\n        this.name = name;\n        this.content = content;\n        this.inline = inline;\n        this.range = range;\n    }\n\n    toString(): string {\n        let text = `@${this.name}`;\n        const content = this.content.toString();\n        if (this.content.inlines.length === 1) {\n            text = `${text} ${content}`;\n        } else if (this.content.inlines.length > 1) {\n            text = `${text}\\n${content}`;\n        }\n        if (this.inline) {\n            // Inline tags are surrounded by curly braces\n            return `{${text}}`;\n        } else {\n            return text;\n        }\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        return options?.renderTag?.(this) ?? this.toMarkdownDefault(options);\n    }\n\n    private toMarkdownDefault(options?: JSDocRenderOptions): string {\n        const content = this.content.toMarkdown(options);\n        if (this.inline) {\n            const rendered = renderInlineTag(this.name, content, options ?? {});\n            if (typeof rendered === 'string') {\n                return rendered;\n            }\n        }\n        let marker = '';\n        if (options?.tag === 'italic' || options?.tag === undefined) {\n            marker = '*';\n        } else if (options?.tag === 'bold') {\n            marker = '**';\n        } else if (options?.tag === 'bold-italic') {\n            marker = '***';\n        }\n        let text = `${marker}@${this.name}${marker}`;\n        if (this.content.inlines.length === 1) {\n            text = `${text} \u2014 ${content}`;\n        } else if (this.content.inlines.length > 1) {\n            text = `${text}\\n${content}`;\n        }\n        if (this.inline) {\n            // Inline tags are surrounded by curly braces\n            return `{${text}}`;\n        } else {\n            return text;\n        }\n    }\n}\n\nfunction renderInlineTag(tag: string, content: string, options: JSDocRenderOptions): string | undefined {\n    if (tag === 'linkplain' || tag === 'linkcode' || tag === 'link') {\n        const index = content.indexOf(' ');\n        let display = content;\n        if (index > 0) {\n            const displayStart = skipWhitespace(content, index);\n            display = content.substring(displayStart);\n            content = content.substring(0, index);\n        }\n        if (tag === 'linkcode' || (tag === 'link' && options.link === 'code')) {\n            // Surround the display value in a markdown inline code block\n            display = `\\`${display}\\``;\n        }\n        const renderedLink = options.renderLink?.(content, display) ?? renderLinkDefault(content, display);\n        return renderedLink;\n    }\n    return undefined;\n}\n\nfunction renderLinkDefault(content: string, display: string): string {\n    try {\n        URI.parse(content, true);\n        return `[${display}](${content})`;\n    } catch {\n        return content;\n    }\n}\n\nclass JSDocTextImpl implements JSDocParagraph {\n    inlines: JSDocInline[];\n    range: Range;\n\n    constructor(lines: JSDocInline[], range: Range) {\n        this.inlines = lines;\n        this.range = range;\n    }\n\n    toString(): string {\n        let text = '';\n        for (let i = 0; i < this.inlines.length; i++) {\n            const inline = this.inlines[i];\n            const next = this.inlines[i + 1];\n            text += inline.toString();\n            if (next && next.range.start.line > inline.range.start.line) {\n                text += '\\n';\n            }\n        }\n        return text;\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        let text = '';\n        for (let i = 0; i < this.inlines.length; i++) {\n            const inline = this.inlines[i];\n            const next = this.inlines[i + 1];\n            text += inline.toMarkdown(options);\n            if (next && next.range.start.line > inline.range.start.line) {\n                text += '\\n';\n            }\n        }\n        return text;\n    }\n}\n\nclass JSDocLineImpl implements JSDocLine {\n    text: string;\n    range: Range;\n\n    constructor(text: string, range: Range) {\n        this.text = text;\n        this.range = range;\n    }\n\n    toString(): string {\n        return this.text;\n    }\n    toMarkdown(): string {\n        return this.text;\n    }\n\n}\n\nfunction fillNewlines(text: string): string {\n    if (text.endsWith('\\n')) {\n        return '\\n';\n    } else {\n        return '\\n\\n';\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription } from '../syntax-tree.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { CommentProvider } from './comment-provider.js';\nimport type { JSDocTag } from './jsdoc.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { isJSDoc, parseJSDoc } from './jsdoc.js';\n\n/**\n * Provides documentation for AST nodes.\n */\nexport interface DocumentationProvider {\n    /**\n     * Returns a markdown documentation string for the specified AST node.\n     *\n     * The default implementation `JSDocDocumentationProvider` will inspect the comment associated with the specified node.\n     */\n    getDocumentation(node: AstNode): string | undefined;\n}\n\nexport class JSDocDocumentationProvider implements DocumentationProvider {\n\n    protected readonly indexManager: IndexManager;\n    protected readonly commentProvider: CommentProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.indexManager = services.shared.workspace.IndexManager;\n        this.commentProvider = services.documentation.CommentProvider;\n    }\n\n    getDocumentation(node: AstNode): string | undefined {\n        const comment = this.commentProvider.getComment(node);\n        if (comment && isJSDoc(comment)) {\n            const parsedJSDoc = parseJSDoc(comment);\n            return parsedJSDoc.toMarkdown({\n                renderLink: (link, display) => {\n                    return this.documentationLinkRenderer(node, link, display);\n                },\n                renderTag: (tag) => {\n                    return this.documentationTagRenderer(node, tag);\n                }\n            });\n        }\n        return undefined;\n    }\n\n    protected documentationLinkRenderer(node: AstNode, name: string, display: string): string | undefined {\n        const description = this.findNameInPrecomputedScopes(node, name) ?? this.findNameInGlobalScope(node, name);\n        if (description && description.nameSegment) {\n            const line = description.nameSegment.range.start.line + 1;\n            const character = description.nameSegment.range.start.character + 1;\n            const uri = description.documentUri.with({ fragment: `L${line},${character}` });\n            return `[${display}](${uri.toString()})`;\n        } else {\n            return undefined;\n        }\n    }\n\n    protected documentationTagRenderer(_node: AstNode, _tag: JSDocTag): string | undefined {\n        // Fall back to the default tag rendering\n        return undefined;\n    }\n\n    protected findNameInPrecomputedScopes(node: AstNode, name: string): AstNodeDescription | undefined {\n        const document = getDocument(node);\n        const precomputed = document.precomputedScopes;\n        if (!precomputed) {\n            return undefined;\n        }\n        let currentNode: AstNode | undefined = node;\n        do {\n            const allDescriptions = precomputed.get(currentNode);\n            const description = allDescriptions.find(e => e.name === name);\n            if (description) {\n                return description;\n            }\n            currentNode = currentNode.$container;\n        } while (currentNode);\n\n        return undefined;\n    }\n\n    protected findNameInGlobalScope(node: AstNode, name: string): AstNodeDescription | undefined {\n        const description = this.indexManager.allElements().find(e => e.name === name);\n        return description;\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { GrammarConfig } from '../languages/grammar-config.js';\nimport { isAstNodeWithComment } from '../serializer/json-serializer.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport { findCommentNode } from '../utils/cst-utils.js';\n\n/**\n * Provides comments for AST nodes.\n */\nexport interface CommentProvider {\n    /**\n     * Returns the comment associated with the specified AST node.\n     * @param node The AST node to get the comment for.\n     * @returns The comment associated with the specified AST node or `undefined` if there is no comment.\n     */\n    getComment(node: AstNode): string | undefined;\n}\n\nexport class DefaultCommentProvider implements CommentProvider {\n    protected readonly grammarConfig: () => GrammarConfig;\n    constructor(services: LangiumCoreServices) {\n        this.grammarConfig = () => services.parser.GrammarConfig;\n    }\n    getComment(node: AstNode): string | undefined {\n        if(isAstNodeWithComment(node)) {\n            return node.$comment;\n        }\n        return findCommentNode(node.$cstNode, this.grammarConfig().multilineCommentRules)?.text;\n    }\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n// eslint-disable-next-line no-restricted-imports\nexport * from 'vscode-jsonrpc/lib/common/events.js';\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CancellationToken } from '../utils/cancellation.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { LangiumParser, ParseResult } from './langium-parser.js';\nimport type { Hydrator } from '../serializer/hydrator.js';\nimport type { Event } from '../utils/event.js';\nimport { Deferred, OperationCancelled } from '../utils/promise-utils.js';\nimport { Emitter } from '../utils/event.js';\n\n/**\n * Async parser that allows to cancel the current parsing process.\n * The sync parser implementation is blocking the event loop, which can become quite problematic for large files.\n *\n * Note that the default implementation is not actually async. It just wraps the sync parser in a promise.\n * A real implementation would create worker threads or web workers to offload the parsing work.\n */\nexport interface AsyncParser {\n    parse(text: string, cancelToken: CancellationToken): Promise>;\n}\n\n/**\n * Default implementation of the async parser. This implementation only wraps the sync parser in a promise.\n *\n * A real implementation would create worker threads or web workers to offload the parsing work.\n */\nexport class DefaultAsyncParser implements AsyncParser {\n\n    protected readonly syncParser: LangiumParser;\n\n    constructor(services: LangiumCoreServices) {\n        this.syncParser = services.parser.LangiumParser;\n    }\n\n    parse(text: string): Promise> {\n        return Promise.resolve(this.syncParser.parse(text));\n    }\n}\n\nexport abstract class AbstractThreadedAsyncParser implements AsyncParser {\n\n    /**\n     * The thread count determines how many threads are used to parse files in parallel.\n     * The default value is 8. Decreasing this value increases startup performance, but decreases parallel parsing performance.\n     */\n    protected threadCount = 8;\n    /**\n     * The termination delay determines how long the parser waits for a thread to finish after a cancellation request.\n     * The default value is 200(ms).\n     */\n    protected terminationDelay = 200;\n    protected workerPool: ParserWorker[] = [];\n    protected queue: Array> = [];\n\n    protected readonly hydrator: Hydrator;\n\n    constructor(services: LangiumCoreServices) {\n        this.hydrator = services.serializer.Hydrator;\n    }\n\n    protected initializeWorkers(): void {\n        while (this.workerPool.length < this.threadCount) {\n            const worker = this.createWorker();\n            worker.onReady(() => {\n                if (this.queue.length > 0) {\n                    const deferred = this.queue.shift();\n                    if (deferred) {\n                        worker.lock();\n                        deferred.resolve(worker);\n                    }\n                }\n            });\n            this.workerPool.push(worker);\n        }\n    }\n\n    async parse(text: string, cancelToken: CancellationToken): Promise> {\n        const worker = await this.acquireParserWorker(cancelToken);\n        const deferred = new Deferred>();\n        let timeout: NodeJS.Timeout | undefined;\n        // If the cancellation token is requested, we wait for a certain time before terminating the worker.\n        // Since the cancellation token lives longer than the parsing process, we need to dispose the event listener.\n        // Otherwise, we might accidentally terminate the worker after the parsing process has finished.\n        const cancellation = cancelToken.onCancellationRequested(() => {\n            timeout = setTimeout(() => {\n                this.terminateWorker(worker);\n            }, this.terminationDelay);\n        });\n        worker.parse(text).then(result => {\n            const hydrated = this.hydrator.hydrate(result);\n            deferred.resolve(hydrated);\n        }).catch(err => {\n            deferred.reject(err);\n        }).finally(() => {\n            cancellation.dispose();\n            clearTimeout(timeout);\n        });\n        return deferred.promise;\n    }\n\n    protected terminateWorker(worker: ParserWorker): void {\n        worker.terminate();\n        const index = this.workerPool.indexOf(worker);\n        if (index >= 0) {\n            this.workerPool.splice(index, 1);\n        }\n    }\n\n    protected async acquireParserWorker(cancelToken: CancellationToken): Promise {\n        this.initializeWorkers();\n        for (const worker of this.workerPool) {\n            if (worker.ready) {\n                worker.lock();\n                return worker;\n            }\n        }\n        const deferred = new Deferred();\n        cancelToken.onCancellationRequested(() => {\n            const index = this.queue.indexOf(deferred);\n            if (index >= 0) {\n                this.queue.splice(index, 1);\n            }\n            deferred.reject(OperationCancelled);\n        });\n        this.queue.push(deferred);\n        return deferred.promise;\n    }\n\n    protected abstract createWorker(): ParserWorker;\n}\n\nexport type WorkerMessagePost = (message: unknown) => void;\nexport type WorkerMessageCallback = (cb: (message: unknown) => void) => void;\n\nexport class ParserWorker {\n\n    protected readonly sendMessage: WorkerMessagePost;\n    protected readonly _terminate: () => void;\n    protected readonly onReadyEmitter = new Emitter();\n\n    protected deferred = new Deferred();\n    protected _ready = true;\n    protected _parsing = false;\n\n    get ready(): boolean {\n        return this._ready;\n    }\n\n    get onReady(): Event {\n        return this.onReadyEmitter.event;\n    }\n\n    constructor(sendMessage: WorkerMessagePost, onMessage: WorkerMessageCallback, onError: WorkerMessageCallback, terminate: () => void) {\n        this.sendMessage = sendMessage;\n        this._terminate = terminate;\n        onMessage(result => {\n            const parseResult = result as ParseResult;\n            this.deferred.resolve(parseResult);\n            this.unlock();\n        });\n        onError(error => {\n            this.deferred.reject(error);\n            this.unlock();\n        });\n    }\n\n    terminate(): void {\n        this.deferred.reject(OperationCancelled);\n        this._terminate();\n    }\n\n    lock(): void {\n        this._ready = false;\n    }\n\n    unlock(): void {\n        this._parsing = false;\n        this._ready = true;\n        this.onReadyEmitter.fire();\n    }\n\n    parse(text: string): Promise {\n        if (this._parsing) {\n            throw new Error('Parser worker is busy');\n        }\n        this._parsing = true;\n        this.deferred = new Deferred();\n        this.sendMessage(text);\n        return this.deferred.promise;\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken, CancellationTokenSource } from '../utils/cancellation.js';\nimport { Deferred, isOperationCancelled, type MaybePromise } from '../utils/promise-utils.js';\n\n/**\n * Utility service to execute mutually exclusive actions.\n */\nexport interface WorkspaceLock {\n    /**\n     * Performs a single async action, like initializing the workspace or processing document changes.\n     * Only one action will be executed at a time.\n     *\n     * When another action is queued up, the token provided for the action will be cancelled.\n     * Assuming the action makes use of this token, the next action only has to wait for the current action to finish cancellation.\n     */\n    write(action: (token: CancellationToken) => MaybePromise): Promise;\n\n    /**\n     * Performs a single action, like computing completion results or providing workspace symbols.\n     * Read actions will only be executed after all write actions have finished. They will be executed in parallel if possible.\n     *\n     * If a write action is currently running, the read action will be queued up and executed afterwards.\n     * If a new write action is queued up while a read action is waiting, the write action will receive priority and will be handled before the read action.\n     *\n     * Note that read actions are not allowed to modify anything in the workspace. Please use {@link write} instead.\n     */\n    read(action: () => MaybePromise): Promise;\n\n    /**\n     * Cancels the last queued write action. All previous write actions already have been cancelled.\n     */\n    cancelWrite(): void;\n}\n\ntype LockAction = (token: CancellationToken) => MaybePromise;\n\ninterface LockEntry {\n    action: LockAction;\n    deferred: Deferred;\n    cancellationToken: CancellationToken;\n}\n\nexport class DefaultWorkspaceLock implements WorkspaceLock {\n\n    private previousTokenSource = new CancellationTokenSource();\n    private writeQueue: LockEntry[] = [];\n    private readQueue: LockEntry[] = [];\n    private done = true;\n\n    write(action: (token: CancellationToken) => MaybePromise): Promise {\n        this.cancelWrite();\n        const tokenSource = new CancellationTokenSource();\n        this.previousTokenSource = tokenSource;\n        return this.enqueue(this.writeQueue, action, tokenSource.token);\n    }\n\n    read(action: () => MaybePromise): Promise {\n        return this.enqueue(this.readQueue, action);\n    }\n\n    private enqueue(queue: LockEntry[], action: LockAction, cancellationToken?: CancellationToken): Promise {\n        const deferred = new Deferred();\n        const entry: LockEntry = {\n            action,\n            deferred,\n            cancellationToken: cancellationToken ?? CancellationToken.None\n        };\n        queue.push(entry);\n        this.performNextOperation();\n        return deferred.promise as Promise;\n    }\n\n    private async performNextOperation(): Promise {\n        if (!this.done) {\n            return;\n        }\n        const entries: LockEntry[] = [];\n        if (this.writeQueue.length > 0) {\n            // Just perform the next write action\n            entries.push(this.writeQueue.shift()!);\n        } else if (this.readQueue.length > 0) {\n            // Empty the read queue and perform all actions in parallel\n            entries.push(...this.readQueue.splice(0, this.readQueue.length));\n        } else {\n            return;\n        }\n        this.done = false;\n        await Promise.all(entries.map(async ({ action, deferred, cancellationToken }) => {\n            try {\n                // Move the execution of the action to the next event loop tick via `Promise.resolve()`\n                const result = await Promise.resolve().then(() => action(cancellationToken));\n                deferred.resolve(result);\n            } catch (err) {\n                if (isOperationCancelled(err)) {\n                    // If the operation was cancelled, we don't want to reject the promise\n                    deferred.resolve(undefined);\n                } else {\n                    deferred.reject(err);\n                }\n            }\n        }));\n        this.done = true;\n        this.performNextOperation();\n    }\n\n    cancelWrite(): void {\n        this.previousTokenSource.cancel();\n    }\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\nimport type { TokenType } from 'chevrotain';\nimport { CompositeCstNodeImpl, LeafCstNodeImpl, RootCstNodeImpl } from '../parser/cst-node-builder.js';\nimport { isAbstractElement, type AbstractElement, type Grammar } from '../languages/generated/ast.js';\nimport type { Linker } from '../references/linker.js';\nimport type { Lexer } from '../parser/lexer.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { Reference, AstNode, CstNode, LeafCstNode, GenericAstNode, Mutable, RootCstNode } from '../syntax-tree.js';\nimport { isRootCstNode, isCompositeCstNode, isLeafCstNode, isAstNode, isReference } from '../syntax-tree.js';\nimport { streamAst } from '../utils/ast-utils.js';\nimport { BiMap } from '../utils/collections.js';\nimport { streamCst } from '../utils/cst-utils.js';\n\n/**\n * The hydrator service is responsible for allowing AST parse results to be sent across worker threads.\n */\nexport interface Hydrator {\n    /**\n     * Converts a parse result to a plain object. The resulting object can be sent across worker threads.\n     */\n    dehydrate(result: ParseResult): ParseResult;\n    /**\n     * Converts a plain object to a parse result. The included AST node can then be used in the main thread.\n     * Calling this method on objects that have not been dehydrated first will result in undefined behavior.\n     */\n    hydrate(result: ParseResult): ParseResult;\n}\n\nexport interface DehydrateContext {\n    astNodes: Map;\n    cstNodes: Map;\n}\n\nexport interface HydrateContext {\n    astNodes: Map;\n    cstNodes: Map;\n}\n\nexport class DefaultHydrator implements Hydrator {\n\n    protected readonly grammar: Grammar;\n    protected readonly lexer: Lexer;\n    protected readonly linker: Linker;\n\n    protected readonly grammarElementIdMap = new BiMap();\n    protected readonly tokenTypeIdMap = new BiMap();\n\n    constructor(services: LangiumCoreServices) {\n        this.grammar = services.Grammar;\n        this.lexer = services.parser.Lexer;\n        this.linker = services.references.Linker;\n    }\n\n    dehydrate(result: ParseResult): ParseResult {\n        return {\n            // We need to create shallow copies of the errors\n            // The original errors inherit from the `Error` class, which is not transferable across worker threads\n            lexerErrors: result.lexerErrors.map(e => ({ ...e })),\n            parserErrors: result.parserErrors.map(e => ({ ...e })),\n            value: this.dehydrateAstNode(result.value, this.createDehyrationContext(result.value))\n        };\n    }\n\n    protected createDehyrationContext(node: AstNode): DehydrateContext {\n        const astNodes = new Map();\n        const cstNodes = new Map();\n        for (const astNode of streamAst(node)) {\n            astNodes.set(astNode, {});\n        }\n        if (node.$cstNode) {\n            for (const cstNode of streamCst(node.$cstNode)) {\n                cstNodes.set(cstNode, {});\n            }\n        }\n        return {\n            astNodes,\n            cstNodes\n        };\n    }\n\n    protected dehydrateAstNode(node: AstNode, context: DehydrateContext): object {\n        const obj = context.astNodes.get(node) as Record;\n        obj.$type = node.$type;\n        obj.$containerIndex = node.$containerIndex;\n        obj.$containerProperty = node.$containerProperty;\n        if (node.$cstNode !== undefined) {\n            obj.$cstNode = this.dehydrateCstNode(node.$cstNode, context);\n        }\n        for (const [name, value] of Object.entries(node)) {\n            if (name.startsWith('$')) {\n                continue;\n            }\n            if (Array.isArray(value)) {\n                const arr: any[] = [];\n                obj[name] = arr;\n                for (const item of value) {\n                    if (isAstNode(item)) {\n                        arr.push(this.dehydrateAstNode(item, context));\n                    } else if (isReference(item)) {\n                        arr.push(this.dehydrateReference(item, context));\n                    } else {\n                        arr.push(item);\n                    }\n                }\n            } else if (isAstNode(value)) {\n                obj[name] = this.dehydrateAstNode(value, context);\n            } else if (isReference(value)) {\n                obj[name] = this.dehydrateReference(value, context);\n            } else if (value !== undefined) {\n                obj[name] = value;\n            }\n        }\n        return obj;\n    }\n\n    protected dehydrateReference(reference: Reference, context: DehydrateContext): any {\n        const obj: Record = {};\n        obj.$refText = reference.$refText;\n        if (reference.$refNode) {\n            obj.$refNode = context.cstNodes.get(reference.$refNode);\n        }\n        return obj;\n    }\n\n    protected dehydrateCstNode(node: CstNode, context: DehydrateContext): any {\n        const cstNode = context.cstNodes.get(node) as Record;\n        if (isRootCstNode(node)) {\n            cstNode.fullText = node.fullText;\n        } else {\n            // Note: This returns undefined for hidden nodes (i.e. comments)\n            cstNode.grammarSource = this.getGrammarElementId(node.grammarSource);\n        }\n        cstNode.hidden = node.hidden;\n        cstNode.astNode = context.astNodes.get(node.astNode);\n        if (isCompositeCstNode(node)) {\n            cstNode.content = node.content.map(child => this.dehydrateCstNode(child, context));\n        } else if (isLeafCstNode(node)) {\n            cstNode.tokenType = node.tokenType.name;\n            cstNode.offset = node.offset;\n            cstNode.length = node.length;\n            cstNode.startLine = node.range.start.line;\n            cstNode.startColumn = node.range.start.character;\n            cstNode.endLine = node.range.end.line;\n            cstNode.endColumn = node.range.end.character;\n        }\n        return cstNode;\n    }\n\n    hydrate(result: ParseResult): ParseResult {\n        const node = result.value;\n        const context = this.createHydrationContext(node);\n        if ('$cstNode' in node) {\n            this.hydrateCstNode(node.$cstNode, context);\n        }\n        return {\n            lexerErrors: result.lexerErrors,\n            parserErrors: result.parserErrors,\n            value: this.hydrateAstNode(node, context) as T\n        };\n    }\n\n    protected createHydrationContext(node: any): HydrateContext {\n        const astNodes = new Map();\n        const cstNodes = new Map();\n        for (const astNode of streamAst(node)) {\n            astNodes.set(astNode, {} as AstNode);\n        }\n        let root: RootCstNode;\n        if (node.$cstNode) {\n            for (const cstNode of streamCst(node.$cstNode)) {\n                let cst: Mutable | undefined;\n                if ('fullText' in cstNode) {\n                    cst = new RootCstNodeImpl(cstNode.fullText as string);\n                    root = cst as RootCstNode;\n                } else if ('content' in cstNode) {\n                    cst = new CompositeCstNodeImpl();\n                } else if ('tokenType' in cstNode) {\n                    cst = this.hydrateCstLeafNode(cstNode);\n                }\n                if (cst) {\n                    cstNodes.set(cstNode, cst);\n                    cst.root = root!;\n                }\n            }\n        }\n        return {\n            astNodes,\n            cstNodes\n        };\n    }\n\n    protected hydrateAstNode(node: any, context: HydrateContext): AstNode {\n        const astNode = context.astNodes.get(node) as Mutable;\n        astNode.$type = node.$type;\n        astNode.$containerIndex = node.$containerIndex;\n        astNode.$containerProperty = node.$containerProperty;\n        if (node.$cstNode) {\n            astNode.$cstNode = context.cstNodes.get(node.$cstNode);\n        }\n        for (const [name, value] of Object.entries(node)) {\n            if (name.startsWith('$')) {\n                continue;\n            }\n            if (Array.isArray(value)) {\n                const arr: unknown[] = [];\n                astNode[name] = arr;\n                for (const item of value) {\n                    if (isAstNode(item)) {\n                        arr.push(this.setParent(this.hydrateAstNode(item, context), astNode));\n                    } else if (isReference(item)) {\n                        arr.push(this.hydrateReference(item, astNode, name, context));\n                    } else {\n                        arr.push(item);\n                    }\n                }\n            } else if (isAstNode(value)) {\n                astNode[name] = this.setParent(this.hydrateAstNode(value, context), astNode);\n            } else if (isReference(value)) {\n                astNode[name] = this.hydrateReference(value, astNode, name, context);\n            } else if (value !== undefined) {\n                astNode[name] = value;\n            }\n        }\n        return astNode;\n    }\n\n    protected setParent(node: any, parent: any): any {\n        node.$container = parent as AstNode;\n        return node;\n    }\n\n    protected hydrateReference(reference: any, node: AstNode, name: string, context: HydrateContext): Reference {\n        return this.linker.buildReference(node, name, context.cstNodes.get(reference.$refNode)!, reference.$refText);\n    }\n\n    protected hydrateCstNode(cstNode: any, context: HydrateContext, num = 0): CstNode {\n        const cstNodeObj = context.cstNodes.get(cstNode) as Mutable;\n        if (typeof cstNode.grammarSource === 'number') {\n            cstNodeObj.grammarSource = this.getGrammarElement(cstNode.grammarSource);\n        }\n        cstNodeObj.astNode = context.astNodes.get(cstNode.astNode)!;\n        if (isCompositeCstNode(cstNodeObj)) {\n            for (const child of cstNode.content) {\n                const hydrated = this.hydrateCstNode(child, context, num++);\n                cstNodeObj.content.push(hydrated);\n            }\n        }\n        return cstNodeObj;\n    }\n\n    protected hydrateCstLeafNode(cstNode: any): LeafCstNode {\n        const tokenType = this.getTokenType(cstNode.tokenType);\n        const offset = cstNode.offset;\n        const length = cstNode.length;\n        const startLine = cstNode.startLine;\n        const startColumn = cstNode.startColumn;\n        const endLine = cstNode.endLine;\n        const endColumn = cstNode.endColumn;\n        const hidden = cstNode.hidden;\n        const node = new LeafCstNodeImpl(\n            offset,\n            length,\n            {\n                start: {\n                    line: startLine,\n                    character: startColumn\n                },\n                end: {\n                    line: endLine,\n                    character: endColumn\n                }\n            },\n            tokenType,\n            hidden\n        );\n        return node;\n    }\n\n    protected getTokenType(name: string): TokenType {\n        return this.lexer.definition[name];\n    }\n\n    protected getGrammarElementId(node: AbstractElement): number | undefined {\n        if (this.grammarElementIdMap.size === 0) {\n            this.createGrammarElementIdMap();\n        }\n        return this.grammarElementIdMap.get(node);\n    }\n\n    protected getGrammarElement(id: number): AbstractElement {\n        if (this.grammarElementIdMap.size === 0) {\n            this.createGrammarElementIdMap();\n        }\n        const element = this.grammarElementIdMap.getKey(id);\n        if (element) {\n            return element;\n        } else {\n            throw new Error('Invalid grammar element id: ' + id);\n        }\n    }\n\n    protected createGrammarElementIdMap(): void {\n        let id = 0;\n        for (const element of streamAst(this.grammar)) {\n            if (isAbstractElement(element)) {\n                this.grammarElementIdMap.set(element, id++);\n            }\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n******************************************************************************/\n\nimport type { Module } from './dependency-injection.js';\nimport type { LangiumDefaultCoreServices, LangiumDefaultSharedCoreServices, LangiumCoreServices, LangiumSharedCoreServices } from './services.js';\nimport type { FileSystemProvider } from './workspace/file-system-provider.js';\nimport { createGrammarConfig } from './languages/grammar-config.js';\nimport { createCompletionParser } from './parser/completion-parser-builder.js';\nimport { createLangiumParser } from './parser/langium-parser-builder.js';\nimport { DefaultTokenBuilder } from './parser/token-builder.js';\nimport { DefaultValueConverter } from './parser/value-converter.js';\nimport { DefaultLinker } from './references/linker.js';\nimport { DefaultNameProvider } from './references/name-provider.js';\nimport { DefaultReferences } from './references/references.js';\nimport { DefaultScopeComputation } from './references/scope-computation.js';\nimport { DefaultScopeProvider } from './references/scope-provider.js';\nimport { DefaultJsonSerializer } from './serializer/json-serializer.js';\nimport { DefaultServiceRegistry } from './service-registry.js';\nimport { DefaultDocumentValidator } from './validation/document-validator.js';\nimport { ValidationRegistry } from './validation/validation-registry.js';\nimport { DefaultAstNodeDescriptionProvider, DefaultReferenceDescriptionProvider } from './workspace/ast-descriptions.js';\nimport { DefaultAstNodeLocator } from './workspace/ast-node-locator.js';\nimport { DefaultConfigurationProvider } from './workspace/configuration.js';\nimport { DefaultDocumentBuilder } from './workspace/document-builder.js';\nimport { DefaultLangiumDocumentFactory, DefaultLangiumDocuments } from './workspace/documents.js';\nimport { DefaultIndexManager } from './workspace/index-manager.js';\nimport { DefaultWorkspaceManager } from './workspace/workspace-manager.js';\nimport { DefaultLexer } from './parser/lexer.js';\nimport { JSDocDocumentationProvider } from './documentation/documentation-provider.js';\nimport { DefaultCommentProvider } from './documentation/comment-provider.js';\nimport { LangiumParserErrorMessageProvider } from './parser/langium-parser.js';\nimport { DefaultAsyncParser } from './parser/async-parser.js';\nimport { DefaultWorkspaceLock } from './workspace/workspace-lock.js';\nimport { DefaultHydrator } from './serializer/hydrator.js';\n\n/**\n * Context required for creating the default language-specific dependency injection module.\n */\nexport interface DefaultCoreModuleContext {\n    shared: LangiumSharedCoreServices;\n}\n\n/**\n * Creates a dependency injection module configuring the default core services.\n * This is a set of services that are dedicated to a specific language.\n */\nexport function createDefaultCoreModule(context: DefaultCoreModuleContext): Module {\n    return {\n        documentation: {\n            CommentProvider: (services) => new DefaultCommentProvider(services),\n            DocumentationProvider: (services) => new JSDocDocumentationProvider(services)\n        },\n        parser: {\n            AsyncParser: (services) => new DefaultAsyncParser(services),\n            GrammarConfig: (services) => createGrammarConfig(services),\n            LangiumParser: (services) => createLangiumParser(services),\n            CompletionParser: (services) => createCompletionParser(services),\n            ValueConverter: () => new DefaultValueConverter(),\n            TokenBuilder: () => new DefaultTokenBuilder(),\n            Lexer: (services) => new DefaultLexer(services),\n            ParserErrorMessageProvider: () => new LangiumParserErrorMessageProvider()\n        },\n        workspace: {\n            AstNodeLocator: () => new DefaultAstNodeLocator(),\n            AstNodeDescriptionProvider: (services) => new DefaultAstNodeDescriptionProvider(services),\n            ReferenceDescriptionProvider: (services) => new DefaultReferenceDescriptionProvider(services)\n        },\n        references: {\n            Linker: (services) => new DefaultLinker(services),\n            NameProvider: () => new DefaultNameProvider(),\n            ScopeProvider: (services) => new DefaultScopeProvider(services),\n            ScopeComputation: (services) => new DefaultScopeComputation(services),\n            References: (services) => new DefaultReferences(services)\n        },\n        serializer: {\n            Hydrator: (services) => new DefaultHydrator(services),\n            JsonSerializer: (services) => new DefaultJsonSerializer(services)\n        },\n        validation: {\n            DocumentValidator: (services) => new DefaultDocumentValidator(services),\n            ValidationRegistry: (services) => new ValidationRegistry(services)\n        },\n        shared: () => context.shared\n    };\n}\n\n/**\n * Context required for creating the default shared dependency injection module.\n */\nexport interface DefaultSharedCoreModuleContext {\n    /**\n     * Factory function to create a {@link FileSystemProvider}.\n     *\n     * Langium exposes an `EmptyFileSystem` and `NodeFileSystem`, exported through `langium/node`.\n     * When running Langium as part of a vscode language server or a Node.js app, using the `NodeFileSystem` is recommended,\n     * the `EmptyFileSystem` in every other use case.\n     */\n    fileSystemProvider: (services: LangiumSharedCoreServices) => FileSystemProvider;\n}\n\n/**\n * Creates a dependency injection module configuring the default shared core services.\n * This is the set of services that are shared between multiple languages.\n */\nexport function createDefaultSharedCoreModule(context: DefaultSharedCoreModuleContext): Module {\n    return {\n        ServiceRegistry: () => new DefaultServiceRegistry(),\n        workspace: {\n            LangiumDocuments: (services) => new DefaultLangiumDocuments(services),\n            LangiumDocumentFactory: (services) => new DefaultLangiumDocumentFactory(services),\n            DocumentBuilder: (services) => new DefaultDocumentBuilder(services),\n            IndexManager: (services) => new DefaultIndexManager(services),\n            WorkspaceManager: (services) => new DefaultWorkspaceManager(services),\n            FileSystemProvider: (services) => context.fileSystemProvider(services),\n            WorkspaceLock: () => new DefaultWorkspaceLock(),\n            ConfigurationProvider: (services) => new DefaultConfigurationProvider(services)\n        }\n    };\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\n/**\n * A `Module` is a description of possibly grouped service factories.\n *\n * Given a type I = { group: { service: A } },\n * Module := { group: { service: (injector: I) => A } }\n *\n * Making `I` available during the creation of `I` allows us to create cyclic\n * dependencies.\n */\nexport type Module = {\n    [K in keyof T]: Module | ((injector: I) => T[K])\n}\n\nexport namespace Module {\n    export const merge = (m1: Module, m2: Module) => (_merge(_merge({}, m1), m2) as Module);\n}\n\n/**\n * Given a set of modules, the inject function returns a lazily evaluated injector\n * that injects dependencies into the requested service when it is requested the\n * first time. Subsequent requests will return the same service.\n *\n * In the case of cyclic dependencies, an Error will be thrown. This can be fixed\n * by injecting a provider `() => T` instead of a `T`.\n *\n * Please note that the arguments may be objects or arrays. However, the result will\n * be an object. Using it with for..of will have no effect.\n *\n * @param module1 first Module\n * @param module2 (optional) second Module\n * @param module3 (optional) third Module\n * @param module4 (optional) fourth Module\n * @param module5 (optional) fifth Module\n * @param module6 (optional) sixth Module\n * @param module7 (optional) seventh Module\n * @param module8 (optional) eighth Module\n * @param module9 (optional) ninth Module\n * @returns a new object of type I\n */\nexport function inject(\n    module1: Module, module2?: Module, module3?: Module, module4?: Module, module5?: Module, module6?: Module, module7?: Module, module8?: Module, module9?: Module\n): I {\n    const module = [module1, module2, module3, module4, module5, module6, module7, module8, module9].reduce(_merge, {}) as Module;\n    return _inject(module);\n}\n\nconst isProxy = Symbol('isProxy');\n\n/**\n * Eagerly load all services in the given dependency injection container. This is sometimes\n * necessary because services can register event listeners in their constructors.\n */\nexport function eagerLoad(item: T): T {\n    if (item && (item as any)[isProxy]) {\n        for (const value of Object.values(item)) {\n            eagerLoad(value);\n        }\n    }\n    return item;\n}\n\n/**\n * Helper function that returns an injector by creating a proxy.\n * Invariant: injector is of type I. If injector is undefined, then T = I.\n */\nfunction _inject(module: Module, injector?: any): T {\n    const proxy: any = new Proxy({} as any, {\n        deleteProperty: () => false,\n        get: (obj, prop) => _resolve(obj, prop, module, injector || proxy),\n        getOwnPropertyDescriptor: (obj, prop) => (_resolve(obj, prop, module, injector || proxy), Object.getOwnPropertyDescriptor(obj, prop)), // used by for..in\n        has: (_, prop) => prop in module, // used by ..in..\n        ownKeys: () => [...Reflect.ownKeys(module), isProxy] // used by for..in\n    });\n    proxy[isProxy] = true;\n    return proxy;\n}\n\n/**\n * Internally used to tag a requested dependency, directly before calling the factory.\n * This allows us to find cycles during instance creation.\n */\nconst __requested__ = Symbol();\n\n/**\n * Returns the value `obj[prop]`. If the value does not exist, yet, it is resolved from\n * the module description. The result of service factories is cached. Groups are\n * recursively proxied.\n *\n * @param obj an object holding all group proxies and services\n * @param prop the key of a value within obj\n * @param module an object containing groups and service factories\n * @param injector the first level proxy that provides access to all values\n * @returns the requested value `obj[prop]`\n * @throws Error if a dependency cycle is detected\n */\nfunction _resolve(obj: any, prop: string | symbol | number, module: Module, injector: I): T[keyof T] | undefined {\n    if (prop in obj) {\n        if (obj[prop] instanceof Error) {\n            throw new Error('Construction failure. Please make sure that your dependencies are constructable.', {cause: obj[prop]});\n        }\n        if (obj[prop] === __requested__) {\n            throw new Error('Cycle detected. Please make \"' + String(prop) + '\" lazy. See https://langium.org/docs/configuration-services/#resolving-cyclic-dependencies');\n        }\n        return obj[prop];\n    } else if (prop in module) {\n        const value: Module | ((injector: I) => T[keyof T]) = module[prop as keyof T];\n        obj[prop] = __requested__;\n        try {\n            obj[prop] = (typeof value === 'function') ? value(injector) : _inject(value, injector);\n        } catch (error) {\n            obj[prop] = error instanceof Error ? error : undefined;\n            throw error;\n        }\n        return obj[prop];\n    } else {\n        return undefined;\n    }\n}\n\n/**\n * Performs a deep-merge of two modules by writing source entries into the target module.\n *\n * @param target the module which is written\n * @param source the module which is read\n * @returns the target module\n */\nfunction _merge(target: Module, source?: Module): Module {\n    if (source) {\n        for (const [key, value2] of Object.entries(source)) {\n            if (value2 !== undefined) {\n                const value1 = target[key];\n                if (value1 !== null && value2 !== null && typeof value1 === 'object' && typeof value2 === 'object') {\n                    target[key] = _merge(value1, value2);\n                } else {\n                    target[key] = value2;\n                }\n            }\n        }\n    }\n    return target;\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport * from './caching.js';\nexport * from './event.js';\nexport * from './collections.js';\nexport * from './disposable.js';\nexport * from './errors.js';\nexport * from './grammar-loader.js';\nexport * from './promise-utils.js';\nexport * from './stream.js';\nexport * from './uri-utils.js';\n\nimport * as AstUtils from './ast-utils.js';\nimport * as Cancellation from './cancellation.js';\nimport * as CstUtils from './cst-utils.js';\nimport * as GrammarUtils from './grammar-utils.js';\nimport * as RegExpUtils from './regexp-utils.js';\nexport { AstUtils, Cancellation, CstUtils, GrammarUtils, RegExpUtils };\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { URI } from '../utils/uri-utils.js';\n\nexport interface FileSystemNode {\n    readonly isFile: boolean;\n    readonly isDirectory: boolean;\n    readonly uri: URI;\n}\n\nexport type FileSystemFilter = (node: FileSystemNode) => boolean;\n\n/**\n * Provides methods to interact with an abstract file system. The default implementation is based on the node.js `fs` API.\n */\nexport interface FileSystemProvider {\n    /**\n     * Reads a document asynchronously from a given URI.\n     * @returns The string content of the file with the specified URI.\n     */\n    readFile(uri: URI): Promise;\n    /**\n     * Reads the directory information for the given URI.\n     * @returns The list of file system entries that are contained within the specified directory.\n     */\n    readDirectory(uri: URI): Promise;\n}\n\nexport class EmptyFileSystemProvider implements FileSystemProvider {\n\n    readFile(): Promise {\n        throw new Error('No file system is available.');\n    }\n\n    async readDirectory(): Promise {\n        return [];\n    }\n\n}\n\nexport const EmptyFileSystem = {\n    fileSystemProvider: () => new EmptyFileSystemProvider()\n};\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { createDefaultCoreModule, createDefaultSharedCoreModule } from '../default-module.js';\nimport type { Module } from '../dependency-injection.js';\nimport { inject } from '../dependency-injection.js';\nimport * as ast from '../languages/generated/ast.js';\nimport type { LangiumCoreServices, LangiumSharedCoreServices, PartialLangiumCoreServices, PartialLangiumSharedCoreServices } from '../services.js';\nimport type { Mutable } from '../syntax-tree.js';\nimport { EmptyFileSystem } from '../workspace/file-system-provider.js';\nimport { URI } from './uri-utils.js';\n\nconst minimalGrammarModule: Module = {\n    Grammar: () => undefined as unknown as ast.Grammar,\n    LanguageMetaData: () => ({\n        caseInsensitive: false,\n        fileExtensions: ['.langium'],\n        languageId: 'langium'\n    })\n};\n\nconst minimalSharedGrammarModule: Module = {\n    AstReflection: () => new ast.LangiumGrammarAstReflection()\n};\n\nfunction createMinimalGrammarServices(): LangiumCoreServices {\n    const shared = inject(\n        createDefaultSharedCoreModule(EmptyFileSystem),\n        minimalSharedGrammarModule\n    );\n    const grammar = inject(\n        createDefaultCoreModule({ shared }),\n        minimalGrammarModule\n    );\n    shared.ServiceRegistry.register(grammar);\n    return grammar;\n}\n\n/**\n * Load a Langium grammar for your language from a JSON string. This is used by several services,\n * most notably the parser builder which interprets the grammar to create a parser.\n */\nexport function loadGrammarFromJson(json: string): ast.Grammar {\n    const services = createMinimalGrammarServices();\n    const astNode = services.serializer.JsonSerializer.deserialize(json) as Mutable;\n    services.shared.workspace.LangiumDocumentFactory.fromModel(astNode, URI.parse(`memory://${astNode.name ?? 'grammar'}.langium`));\n    return astNode;\n}\n", "var __defProp = Object.defineProperty;\nvar __name = (target, value) => __defProp(target, \"name\", { value, configurable: true });\n\n// src/language/generated/ast.ts\nimport { AbstractAstReflection } from \"langium\";\nvar Statement = \"Statement\";\nvar Architecture = \"Architecture\";\nfunction isArchitecture(item) {\n  return reflection.isInstance(item, Architecture);\n}\n__name(isArchitecture, \"isArchitecture\");\nvar Branch = \"Branch\";\nfunction isBranch(item) {\n  return reflection.isInstance(item, Branch);\n}\n__name(isBranch, \"isBranch\");\nvar Checkout = \"Checkout\";\nvar CherryPicking = \"CherryPicking\";\nvar Commit = \"Commit\";\nfunction isCommit(item) {\n  return reflection.isInstance(item, Commit);\n}\n__name(isCommit, \"isCommit\");\nvar Common = \"Common\";\nfunction isCommon(item) {\n  return reflection.isInstance(item, Common);\n}\n__name(isCommon, \"isCommon\");\nvar GitGraph = \"GitGraph\";\nfunction isGitGraph(item) {\n  return reflection.isInstance(item, GitGraph);\n}\n__name(isGitGraph, \"isGitGraph\");\nvar Info = \"Info\";\nfunction isInfo(item) {\n  return reflection.isInstance(item, Info);\n}\n__name(isInfo, \"isInfo\");\nvar Merge = \"Merge\";\nfunction isMerge(item) {\n  return reflection.isInstance(item, Merge);\n}\n__name(isMerge, \"isMerge\");\nvar Packet = \"Packet\";\nfunction isPacket(item) {\n  return reflection.isInstance(item, Packet);\n}\n__name(isPacket, \"isPacket\");\nvar PacketBlock = \"PacketBlock\";\nfunction isPacketBlock(item) {\n  return reflection.isInstance(item, PacketBlock);\n}\n__name(isPacketBlock, \"isPacketBlock\");\nvar Pie = \"Pie\";\nfunction isPie(item) {\n  return reflection.isInstance(item, Pie);\n}\n__name(isPie, \"isPie\");\nvar PieSection = \"PieSection\";\nfunction isPieSection(item) {\n  return reflection.isInstance(item, PieSection);\n}\n__name(isPieSection, \"isPieSection\");\nvar Direction = \"Direction\";\nvar MermaidAstReflection = class extends AbstractAstReflection {\n  static {\n    __name(this, \"MermaidAstReflection\");\n  }\n  getAllTypes() {\n    return [\"Architecture\", \"Branch\", \"Checkout\", \"CherryPicking\", \"Commit\", \"Common\", \"Direction\", \"Edge\", \"GitGraph\", \"Group\", \"Info\", \"Junction\", \"Merge\", \"Packet\", \"PacketBlock\", \"Pie\", \"PieSection\", \"Service\", \"Statement\"];\n  }\n  computeIsSubtype(subtype, supertype) {\n    switch (subtype) {\n      case Branch:\n      case Checkout:\n      case CherryPicking:\n      case Commit:\n      case Merge: {\n        return this.isSubtype(Statement, supertype);\n      }\n      case Direction: {\n        return this.isSubtype(GitGraph, supertype);\n      }\n      default: {\n        return false;\n      }\n    }\n  }\n  getReferenceType(refInfo) {\n    const referenceId = `${refInfo.container.$type}:${refInfo.property}`;\n    switch (referenceId) {\n      default: {\n        throw new Error(`${referenceId} is not a valid reference id.`);\n      }\n    }\n  }\n  getTypeMetaData(type) {\n    switch (type) {\n      case \"Architecture\": {\n        return {\n          name: \"Architecture\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"edges\", defaultValue: [] },\n            { name: \"groups\", defaultValue: [] },\n            { name: \"junctions\", defaultValue: [] },\n            { name: \"services\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Branch\": {\n        return {\n          name: \"Branch\",\n          properties: [\n            { name: \"name\" },\n            { name: \"order\" }\n          ]\n        };\n      }\n      case \"Checkout\": {\n        return {\n          name: \"Checkout\",\n          properties: [\n            { name: \"branch\" }\n          ]\n        };\n      }\n      case \"CherryPicking\": {\n        return {\n          name: \"CherryPicking\",\n          properties: [\n            { name: \"id\" },\n            { name: \"parent\" },\n            { name: \"tags\", defaultValue: [] }\n          ]\n        };\n      }\n      case \"Commit\": {\n        return {\n          name: \"Commit\",\n          properties: [\n            { name: \"id\" },\n            { name: \"message\" },\n            { name: \"tags\", defaultValue: [] },\n            { name: \"type\" }\n          ]\n        };\n      }\n      case \"Common\": {\n        return {\n          name: \"Common\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Edge\": {\n        return {\n          name: \"Edge\",\n          properties: [\n            { name: \"lhsDir\" },\n            { name: \"lhsGroup\", defaultValue: false },\n            { name: \"lhsId\" },\n            { name: \"lhsInto\", defaultValue: false },\n            { name: \"rhsDir\" },\n            { name: \"rhsGroup\", defaultValue: false },\n            { name: \"rhsId\" },\n            { name: \"rhsInto\", defaultValue: false },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"GitGraph\": {\n        return {\n          name: \"GitGraph\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"statements\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Group\": {\n        return {\n          name: \"Group\",\n          properties: [\n            { name: \"icon\" },\n            { name: \"id\" },\n            { name: \"in\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Info\": {\n        return {\n          name: \"Info\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Junction\": {\n        return {\n          name: \"Junction\",\n          properties: [\n            { name: \"id\" },\n            { name: \"in\" }\n          ]\n        };\n      }\n      case \"Merge\": {\n        return {\n          name: \"Merge\",\n          properties: [\n            { name: \"branch\" },\n            { name: \"id\" },\n            { name: \"tags\", defaultValue: [] },\n            { name: \"type\" }\n          ]\n        };\n      }\n      case \"Packet\": {\n        return {\n          name: \"Packet\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"blocks\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"PacketBlock\": {\n        return {\n          name: \"PacketBlock\",\n          properties: [\n            { name: \"end\" },\n            { name: \"label\" },\n            { name: \"start\" }\n          ]\n        };\n      }\n      case \"Pie\": {\n        return {\n          name: \"Pie\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"sections\", defaultValue: [] },\n            { name: \"showData\", defaultValue: false },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"PieSection\": {\n        return {\n          name: \"PieSection\",\n          properties: [\n            { name: \"label\" },\n            { name: \"value\" }\n          ]\n        };\n      }\n      case \"Service\": {\n        return {\n          name: \"Service\",\n          properties: [\n            { name: \"icon\" },\n            { name: \"iconText\" },\n            { name: \"id\" },\n            { name: \"in\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Direction\": {\n        return {\n          name: \"Direction\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"dir\" },\n            { name: \"statements\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      default: {\n        return {\n          name: type,\n          properties: []\n        };\n      }\n    }\n  }\n};\nvar reflection = new MermaidAstReflection();\n\n// src/language/generated/grammar.ts\nimport { loadGrammarFromJson } from \"langium\";\nvar loadedInfoGrammar;\nvar InfoGrammar = /* @__PURE__ */ __name(() => loadedInfoGrammar ?? (loadedInfoGrammar = loadGrammarFromJson('{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Info\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Info\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"info\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"showInfo\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[],\"cardinality\":\"?\"}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}')), \"InfoGrammar\");\nvar loadedPacketGrammar;\nvar PacketGrammar = /* @__PURE__ */ __name(() => loadedPacketGrammar ?? (loadedPacketGrammar = loadGrammarFromJson(`{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Packet\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Packet\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"packet-beta\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"blocks\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"*\"}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"Assignment\",\"feature\":\"blocks\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"+\"}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"PacketBlock\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"start\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"-\"},{\"$type\":\"Assignment\",\"feature\":\"end\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"Keyword\",\"value\":\":\"},{\"$type\":\"Assignment\",\"feature\":\"label\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"INT\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/0|[1-9][0-9]*/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"STRING\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]*\\\\\"|'[^']*'/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@7\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@8\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}`)), \"PacketGrammar\");\nvar loadedPieGrammar;\nvar PieGrammar = /* @__PURE__ */ __name(() => loadedPieGrammar ?? (loadedPieGrammar = loadGrammarFromJson('{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Pie\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Pie\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"pie\"},{\"$type\":\"Assignment\",\"feature\":\"showData\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"Keyword\",\"value\":\"showData\"},\"cardinality\":\"?\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"sections\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"*\"}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"Assignment\",\"feature\":\"sections\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"+\"}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"PieSection\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"label\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}},{\"$type\":\"Keyword\",\"value\":\":\"},{\"$type\":\"Assignment\",\"feature\":\"value\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"PIE_SECTION_LABEL\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]+\\\\\"/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"PIE_SECTION_VALUE\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/(0|[1-9][0-9]*)(\\\\\\\\.[0-9]+)?/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@7\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@8\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}')), \"PieGrammar\");\nvar loadedArchitectureGrammar;\nvar ArchitectureGrammar = /* @__PURE__ */ __name(() => loadedArchitectureGrammar ?? (loadedArchitectureGrammar = loadGrammarFromJson('{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Architecture\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Architecture\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"architecture-beta\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@16\"},\"arguments\":[]}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[],\"cardinality\":\"*\"}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Statement\",\"fragment\":true,\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"groups\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"services\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"junctions\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@7\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"edges\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@8\"},\"arguments\":[]}}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"LeftPort\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\":\"},{\"$type\":\"Assignment\",\"feature\":\"lhsDir\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"RightPort\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"rhsDir\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}},{\"$type\":\"Keyword\",\"value\":\":\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Arrow\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"lhsInto\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@15\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"--\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"-\"},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@13\"},\"arguments\":[]}},{\"$type\":\"Keyword\",\"value\":\"-\"}]}]},{\"$type\":\"Assignment\",\"feature\":\"rhsInto\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@15\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Group\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"group\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"icon\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@12\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@13\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"in\"},{\"$type\":\"Assignment\",\"feature\":\"in\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Service\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"service\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"iconText\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@11\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"icon\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@12\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@13\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"in\"},{\"$type\":\"Assignment\",\"feature\":\"in\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Junction\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"junction\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"in\"},{\"$type\":\"Assignment\",\"feature\":\"in\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Edge\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"lhsId\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"lhsGroup\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@14\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"rhsId\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@10\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"rhsGroup\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@14\"},\"arguments\":[]},\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARROW_DIRECTION\",\"definition\":{\"$type\":\"TerminalAlternatives\",\"elements\":[{\"$type\":\"TerminalAlternatives\",\"elements\":[{\"$type\":\"TerminalAlternatives\",\"elements\":[{\"$type\":\"CharacterRange\",\"left\":{\"$type\":\"Keyword\",\"value\":\"L\"}},{\"$type\":\"CharacterRange\",\"left\":{\"$type\":\"Keyword\",\"value\":\"R\"}}]},{\"$type\":\"CharacterRange\",\"left\":{\"$type\":\"Keyword\",\"value\":\"T\"}}]},{\"$type\":\"CharacterRange\",\"left\":{\"$type\":\"Keyword\",\"value\":\"B\"}}]},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARCH_ID\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\w]+/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARCH_TEXT_ICON\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\(\\\\\"[^\\\\\"]+\\\\\"\\\\\\\\)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARCH_ICON\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\([\\\\\\\\w-:]+\\\\\\\\)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARCH_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\[[\\\\\\\\w ]+\\\\\\\\]/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARROW_GROUP\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\{group\\\\\\\\}/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ARROW_INTO\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/<|>/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@21\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}')), \"ArchitectureGrammar\");\nvar loadedGitGraphGrammar;\nvar GitGraphGrammar = /* @__PURE__ */ __name(() => loadedGitGraphGrammar ?? (loadedGitGraphGrammar = loadGrammarFromJson(`{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"GitGraph\",\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false},{\"$type\":\"ParserRule\",\"name\":\"GitGraph\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"Keyword\",\"value\":\":\"}]},{\"$type\":\"Keyword\",\"value\":\"gitGraph:\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@12\"},\"arguments\":[]},{\"$type\":\"Keyword\",\"value\":\":\"}]}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@0\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"statements\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@11\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Statement\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@13\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@14\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@15\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@16\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Direction\",\"definition\":{\"$type\":\"Assignment\",\"feature\":\"dir\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"LR\"},{\"$type\":\"Keyword\",\"value\":\"TB\"},{\"$type\":\"Keyword\",\"value\":\"BT\"}]}},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Commit\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"commit\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"msg:\",\"cardinality\":\"?\"},{\"$type\":\"Assignment\",\"feature\":\"message\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"type:\"},{\"$type\":\"Assignment\",\"feature\":\"type\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"NORMAL\"},{\"$type\":\"Keyword\",\"value\":\"REVERSE\"},{\"$type\":\"Keyword\",\"value\":\"HIGHLIGHT\"}]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Branch\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"branch\"},{\"$type\":\"Assignment\",\"feature\":\"name\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"order:\"},{\"$type\":\"Assignment\",\"feature\":\"order\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Merge\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"merge\"},{\"$type\":\"Assignment\",\"feature\":\"branch\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"type:\"},{\"$type\":\"Assignment\",\"feature\":\"type\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"NORMAL\"},{\"$type\":\"Keyword\",\"value\":\"REVERSE\"},{\"$type\":\"Keyword\",\"value\":\"HIGHLIGHT\"}]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Checkout\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"checkout\"},{\"$type\":\"Keyword\",\"value\":\"switch\"}]},{\"$type\":\"Assignment\",\"feature\":\"branch\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"CherryPicking\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"cherry-pick\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"parent:\"},{\"$type\":\"Assignment\",\"feature\":\"parent\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"INT\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[0-9]+(?=\\\\\\\\s)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ID\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"string\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\w([-\\\\\\\\./\\\\\\\\w]*[-\\\\\\\\w])?/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"STRING\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]*\\\\\"|'[^']*'/\"},\"fragment\":false,\"hidden\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"imports\":[],\"types\":[],\"usedGrammars\":[]}`)), \"GitGraphGrammar\");\n\n// src/language/generated/module.ts\nvar InfoLanguageMetaData = {\n  languageId: \"info\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar PacketLanguageMetaData = {\n  languageId: \"packet\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar PieLanguageMetaData = {\n  languageId: \"pie\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar ArchitectureLanguageMetaData = {\n  languageId: \"architecture\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar GitGraphLanguageMetaData = {\n  languageId: \"gitGraph\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar MermaidGeneratedSharedModule = {\n  AstReflection: /* @__PURE__ */ __name(() => new MermaidAstReflection(), \"AstReflection\")\n};\nvar InfoGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => InfoGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => InfoLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar PacketGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => PacketGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => PacketLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar PieGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => PieGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => PieLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar ArchitectureGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => ArchitectureGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => ArchitectureLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar GitGraphGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => GitGraphGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => GitGraphLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\n\n// src/language/common/valueConverter.ts\nimport { DefaultValueConverter } from \"langium\";\n\n// src/language/common/matcher.ts\nvar accessibilityDescrRegex = /accDescr(?:[\\t ]*:([^\\n\\r]*)|\\s*{([^}]*)})/;\nvar accessibilityTitleRegex = /accTitle[\\t ]*:([^\\n\\r]*)/;\nvar titleRegex = /title([\\t ][^\\n\\r]*|)/;\n\n// src/language/common/valueConverter.ts\nvar rulesRegexes = {\n  ACC_DESCR: accessibilityDescrRegex,\n  ACC_TITLE: accessibilityTitleRegex,\n  TITLE: titleRegex\n};\nvar AbstractMermaidValueConverter = class extends DefaultValueConverter {\n  static {\n    __name(this, \"AbstractMermaidValueConverter\");\n  }\n  runConverter(rule, input, cstNode) {\n    let value = this.runCommonConverter(rule, input, cstNode);\n    if (value === void 0) {\n      value = this.runCustomConverter(rule, input, cstNode);\n    }\n    if (value === void 0) {\n      return super.runConverter(rule, input, cstNode);\n    }\n    return value;\n  }\n  runCommonConverter(rule, input, _cstNode) {\n    const regex = rulesRegexes[rule.name];\n    if (regex === void 0) {\n      return void 0;\n    }\n    const match = regex.exec(input);\n    if (match === null) {\n      return void 0;\n    }\n    if (match[1] !== void 0) {\n      return match[1].trim().replace(/[\\t ]{2,}/gm, \" \");\n    }\n    if (match[2] !== void 0) {\n      return match[2].replace(/^\\s*/gm, \"\").replace(/\\s+$/gm, \"\").replace(/[\\t ]{2,}/gm, \" \").replace(/[\\n\\r]{2,}/gm, \"\\n\");\n    }\n    return void 0;\n  }\n};\nvar CommonValueConverter = class extends AbstractMermaidValueConverter {\n  static {\n    __name(this, \"CommonValueConverter\");\n  }\n  runCustomConverter(_rule, _input, _cstNode) {\n    return void 0;\n  }\n};\n\n// src/language/common/tokenBuilder.ts\nimport { DefaultTokenBuilder } from \"langium\";\nvar AbstractMermaidTokenBuilder = class extends DefaultTokenBuilder {\n  static {\n    __name(this, \"AbstractMermaidTokenBuilder\");\n  }\n  constructor(keywords) {\n    super();\n    this.keywords = new Set(keywords);\n  }\n  buildKeywordTokens(rules, terminalTokens, options) {\n    const tokenTypes = super.buildKeywordTokens(rules, terminalTokens, options);\n    tokenTypes.forEach((tokenType) => {\n      if (this.keywords.has(tokenType.name) && tokenType.PATTERN !== void 0) {\n        tokenType.PATTERN = new RegExp(tokenType.PATTERN.toString() + \"(?:(?=%%)|(?!\\\\S))\");\n      }\n    });\n    return tokenTypes;\n  }\n};\nvar CommonTokenBuilder = class extends AbstractMermaidTokenBuilder {\n  static {\n    __name(this, \"CommonTokenBuilder\");\n  }\n};\n\nexport {\n  __name,\n  Statement,\n  Architecture,\n  isArchitecture,\n  Branch,\n  isBranch,\n  Commit,\n  isCommit,\n  isCommon,\n  GitGraph,\n  isGitGraph,\n  Info,\n  isInfo,\n  Merge,\n  isMerge,\n  Packet,\n  isPacket,\n  PacketBlock,\n  isPacketBlock,\n  Pie,\n  isPie,\n  PieSection,\n  isPieSection,\n  MermaidGeneratedSharedModule,\n  InfoGeneratedModule,\n  PacketGeneratedModule,\n  PieGeneratedModule,\n  ArchitectureGeneratedModule,\n  GitGraphGeneratedModule,\n  AbstractMermaidValueConverter,\n  CommonValueConverter,\n  AbstractMermaidTokenBuilder,\n  CommonTokenBuilder\n};\n"],
  "mappings": "qdAAA,IAAAA,GAAAC,GAAAC,IAAA,cAKA,OAAO,eAAeA,GAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAC5D,IAAIC,GACJ,SAASC,IAAM,CACX,GAAID,KAAS,OACT,MAAM,IAAI,MAAM,wCAAwC,EAE5D,OAAOA,EACX,CALSE,EAAAD,GAAA,QAMR,SAAUA,EAAK,CACZ,SAASE,EAAQC,EAAK,CAClB,GAAIA,IAAQ,OACR,MAAM,IAAI,MAAM,uCAAuC,EAE3DJ,GAAOI,CACX,CALSF,EAAAC,EAAA,WAMTF,EAAI,QAAUE,CAClB,GAAGF,KAAQA,GAAM,CAAC,EAAE,EACpBF,GAAQ,QAAUE,KCtBlB,IAAAI,GAAAC,GAAAC,IAAA,cAKA,OAAO,eAAeA,GAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAC5DA,GAAQ,YAAcA,GAAQ,MAAQA,GAAQ,KAAOA,GAAQ,MAAQA,GAAQ,OAASA,GAAQ,OAASA,GAAQ,QAAU,OACzH,SAASC,GAAQC,EAAO,CACpB,OAAOA,IAAU,IAAQA,IAAU,EACvC,CAFSC,EAAAF,GAAA,WAGTD,GAAQ,QAAUC,GAClB,SAASG,GAAOF,EAAO,CACnB,OAAO,OAAOA,GAAU,UAAYA,aAAiB,MACzD,CAFSC,EAAAC,GAAA,UAGTJ,GAAQ,OAASI,GACjB,SAASC,GAAOH,EAAO,CACnB,OAAO,OAAOA,GAAU,UAAYA,aAAiB,MACzD,CAFSC,EAAAE,GAAA,UAGTL,GAAQ,OAASK,GACjB,SAASC,GAAMJ,EAAO,CAClB,OAAOA,aAAiB,KAC5B,CAFSC,EAAAG,GAAA,SAGTN,GAAQ,MAAQM,GAChB,SAASC,GAAKL,EAAO,CACjB,OAAO,OAAOA,GAAU,UAC5B,CAFSC,EAAAI,GAAA,QAGTP,GAAQ,KAAOO,GACf,SAASC,GAAMN,EAAO,CAClB,OAAO,MAAM,QAAQA,CAAK,CAC9B,CAFSC,EAAAK,GAAA,SAGTR,GAAQ,MAAQQ,GAChB,SAASC,GAAYP,EAAO,CACxB,OAAOM,GAAMN,CAAK,GAAKA,EAAM,MAAMQ,GAAQN,GAAOM,CAAI,CAAC,CAC3D,CAFSP,EAAAM,GAAA,eAGTT,GAAQ,YAAcS,KClCtB,IAAAE,GAAAC,GAAAC,IAAA,cAKA,OAAO,eAAeA,GAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAC5DA,GAAQ,QAAUA,GAAQ,MAAQ,OAClC,IAAMC,GAAQ,KACVC,IACH,SAAUA,EAAO,CACd,IAAMC,EAAc,CAAE,SAAU,CAAE,CAAE,EACpCD,EAAM,KAAO,UAAY,CAAE,OAAOC,CAAa,CACnD,GAAGD,KAAUF,GAAQ,MAAQE,GAAQ,CAAC,EAAE,EACxC,IAAME,GAAN,KAAmB,CAbnB,MAamB,CAAAC,EAAA,qBACf,IAAIC,EAAUC,EAAU,KAAMC,EAAQ,CAC7B,KAAK,aACN,KAAK,WAAa,CAAC,EACnB,KAAK,UAAY,CAAC,GAEtB,KAAK,WAAW,KAAKF,CAAQ,EAC7B,KAAK,UAAU,KAAKC,CAAO,EACvB,MAAM,QAAQC,CAAM,GACpBA,EAAO,KAAK,CAAE,QAASH,EAAA,IAAM,KAAK,OAAOC,EAAUC,CAAO,EAAnC,UAAqC,CAAC,CAErE,CACA,OAAOD,EAAUC,EAAU,KAAM,CAC7B,GAAI,CAAC,KAAK,WACN,OAEJ,IAAIE,EAAoC,GACxC,QAAS,EAAI,EAAGC,EAAM,KAAK,WAAW,OAAQ,EAAIA,EAAK,IACnD,GAAI,KAAK,WAAW,CAAC,IAAMJ,EACvB,GAAI,KAAK,UAAU,CAAC,IAAMC,EAAS,CAE/B,KAAK,WAAW,OAAO,EAAG,CAAC,EAC3B,KAAK,UAAU,OAAO,EAAG,CAAC,EAC1B,MACJ,MAEIE,EAAoC,GAIhD,GAAIA,EACA,MAAM,IAAI,MAAM,mFAAmF,CAE3G,CACA,UAAUE,EAAM,CACZ,GAAI,CAAC,KAAK,WACN,MAAO,CAAC,EAEZ,IAAMC,EAAM,CAAC,EAAGC,EAAY,KAAK,WAAW,MAAM,CAAC,EAAGC,EAAW,KAAK,UAAU,MAAM,CAAC,EACvF,QAASC,EAAI,EAAGL,EAAMG,EAAU,OAAQE,EAAIL,EAAKK,IAC7C,GAAI,CACAH,EAAI,KAAKC,EAAUE,CAAC,EAAE,MAAMD,EAASC,CAAC,EAAGJ,CAAI,CAAC,CAClD,OACOK,EAAG,IAEFf,GAAM,SAAS,EAAE,QAAQ,MAAMe,CAAC,CACxC,CAEJ,OAAOJ,CACX,CACA,SAAU,CACN,MAAO,CAAC,KAAK,YAAc,KAAK,WAAW,SAAW,CAC1D,CACA,SAAU,CACN,KAAK,WAAa,OAClB,KAAK,UAAY,MACrB,CACJ,EACMK,GAAN,MAAMC,CAAQ,CAvEd,MAuEc,CAAAb,EAAA,gBACV,YAAYc,EAAU,CAClB,KAAK,SAAWA,CACpB,CAKA,IAAI,OAAQ,CACR,OAAK,KAAK,SACN,KAAK,OAAS,CAACC,EAAUC,EAAUC,IAAgB,CAC1C,KAAK,aACN,KAAK,WAAa,IAAIlB,IAEtB,KAAK,UAAY,KAAK,SAAS,oBAAsB,KAAK,WAAW,QAAQ,GAC7E,KAAK,SAAS,mBAAmB,IAAI,EAEzC,KAAK,WAAW,IAAIgB,EAAUC,CAAQ,EACtC,IAAME,EAAS,CACX,QAASlB,EAAA,IAAM,CACN,KAAK,aAIV,KAAK,WAAW,OAAOe,EAAUC,CAAQ,EACzCE,EAAO,QAAUL,EAAQ,MACrB,KAAK,UAAY,KAAK,SAAS,sBAAwB,KAAK,WAAW,QAAQ,GAC/E,KAAK,SAAS,qBAAqB,IAAI,EAE/C,EAVS,UAWb,EACA,OAAI,MAAM,QAAQI,CAAW,GACzBA,EAAY,KAAKC,CAAM,EAEpBA,CACX,GAEG,KAAK,MAChB,CAKA,KAAKC,EAAO,CACJ,KAAK,YACL,KAAK,WAAW,OAAO,KAAK,KAAK,WAAYA,CAAK,CAE1D,CACA,SAAU,CACF,KAAK,aACL,KAAK,WAAW,QAAQ,EACxB,KAAK,WAAa,OAE1B,CACJ,EACAxB,GAAQ,QAAUiB,GAClBA,GAAQ,MAAQ,UAAY,CAAE,IC/H9B,IAAAQ,GAAAC,GAAAC,IAAA,cAKA,OAAO,eAAeA,GAAS,aAAc,CAAE,MAAO,EAAK,CAAC,EAC5DA,GAAQ,wBAA0BA,GAAQ,kBAAoB,OAC9D,IAAMC,GAAQ,KACRC,GAAK,KACLC,GAAW,KACbC,IACH,SAAUA,EAAmB,CAC1BA,EAAkB,KAAO,OAAO,OAAO,CACnC,wBAAyB,GACzB,wBAAyBD,GAAS,MAAM,IAC5C,CAAC,EACDC,EAAkB,UAAY,OAAO,OAAO,CACxC,wBAAyB,GACzB,wBAAyBD,GAAS,MAAM,IAC5C,CAAC,EACD,SAASE,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOC,IAAcA,IAAcH,EAAkB,MAC9CG,IAAcH,EAAkB,WAC/BF,GAAG,QAAQK,EAAU,uBAAuB,GAAK,CAAC,CAACA,EAAU,wBACzE,CALSC,EAAAH,EAAA,MAMTD,EAAkB,GAAKC,CAC3B,GAAGD,KAAsBJ,GAAQ,kBAAoBI,GAAoB,CAAC,EAAE,EAC5E,IAAMK,GAAgB,OAAO,OAAO,SAAUC,EAAUC,EAAS,CAC7D,IAAMC,KAAaX,GAAM,SAAS,EAAE,MAAM,WAAWS,EAAS,KAAKC,CAAO,EAAG,CAAC,EAC9E,MAAO,CAAE,SAAU,CAAEC,EAAO,QAAQ,CAAG,CAAE,CAC7C,CAAC,EACKC,GAAN,KAAmB,CAhCnB,MAgCmB,CAAAL,EAAA,qBACf,aAAc,CACV,KAAK,aAAe,EACxB,CACA,QAAS,CACA,KAAK,eACN,KAAK,aAAe,GAChB,KAAK,WACL,KAAK,SAAS,KAAK,MAAS,EAC5B,KAAK,QAAQ,GAGzB,CACA,IAAI,yBAA0B,CAC1B,OAAO,KAAK,YAChB,CACA,IAAI,yBAA0B,CAC1B,OAAI,KAAK,aACEC,IAEN,KAAK,WACN,KAAK,SAAW,IAAIN,GAAS,SAE1B,KAAK,SAAS,MACzB,CACA,SAAU,CACF,KAAK,WACL,KAAK,SAAS,QAAQ,EACtB,KAAK,SAAW,OAExB,CACJ,EACMW,GAAN,KAA8B,CAhE9B,MAgE8B,CAAAN,EAAA,gCAC1B,IAAI,OAAQ,CACR,OAAK,KAAK,SAGN,KAAK,OAAS,IAAIK,IAEf,KAAK,MAChB,CACA,QAAS,CACA,KAAK,OAON,KAAK,OAAO,OAAO,EAHnB,KAAK,OAAST,GAAkB,SAKxC,CACA,SAAU,CACD,KAAK,OAID,KAAK,kBAAkBS,IAE5B,KAAK,OAAO,QAAQ,EAJpB,KAAK,OAAST,GAAkB,IAMxC,CACJ,EACAJ,GAAQ,wBAA0Bc,KC/FlC,IAAAC,GAAA,GAAAC,GAAAD,GAAA,2BAAAE,GAAA,oBAAAC,GAAA,0BAAAC,GAAA,uCAAAC,GAAA,gCAAAC,GAAA,aAAAC,GAAA,UAAAC,GAAA,iBAAAC,EAAA,yBAAAC,GAAA,iBAAAC,GAAA,mBAAAC,GAAA,aAAAC,GAAA,gBAAAC,GAAA,mBAAAC,GAAA,sCAAAC,GAAA,0BAAAC,GAAA,uBAAAC,GAAA,2BAAAC,GAAA,iCAAAC,GAAA,2BAAAC,GAAA,6BAAAC,GAAA,oBAAAC,GAAA,wBAAAC,GAAA,0BAAAC,GAAA,kCAAAC,GAAA,4BAAAC,GAAA,iBAAAC,GAAA,kBAAAC,GAAA,wBAAAC,GAAA,wCAAAC,GAAA,sBAAAC,GAAA,4BAAAC,GAAA,yBAAAC,GAAA,2BAAAC,GAAA,wBAAAC,GAAA,0BAAAC,GAAA,yBAAAC,GAAA,4BAAAC,GAAA,aAAAC,GAAA,eAAAC,GAAA,oBAAAC,GAAA,kBAAAC,GAAA,kBAAAC,EAAA,sBAAAC,GAAA,gBAAAC,GAAA,iBAAAC,GAAA,oBAAAC,GAAA,4BAAAC,GAAA,sBAAAC,GAAA,eAAAC,GAAA,iBAAAC,GAAA,+BAAAC,GAAA,4BAAAC,GAAA,kBAAAC,GAAA,sCAAAC,GAAA,oBAAAC,GAAA,aAAAC,GAAA,WAAAC,GAAA,aAAAC,GAAA,uBAAAC,GAAA,iBAAAC,GAAA,cAAAC,GAAA,gBAAAC,GAAA,oBAAAC,GAAA,gBAAAC,GAAA,eAAAC,GAAA,gBAAAC,GAAA,iBAAAC,GAAA,mBAAAC,GAAA,QAAAC,GAAA,aAAAC,GAAA,uBAAAC,GAAA,uBAAAC,GAAA,mBAAAC,GAAA,mBAAAC,GAAA,sBAAAC,GAAA,2BAAAC,GAAA,4BAAAC,GAAA,kCAAAC,GAAA,wBAAAC,GAAA,wBAAAC,GAAA,kBAAAC,GAAA,mBAAAC,GAAA,cAAAC,GAAA,uBAAAC,GAAA,WAAAC,GAAA,sBAAAC,GAAA,cAAAC,GAAA,yBAAAC,GAAA,yBAAAC,GAAA,uBAAAC,GAAA,gCAAAC,GAAA,YAAAC,GAAA,kBAAAC,GAAA,mBAAAC,GAAA,YAAAC,GAAA,yBAAAC,GAAA,gBAAAC,GAAA,kBAAAC,GAAA,qBAAAC,GAAA,0BAAAC,GAAA,wBAAAC,GAAA,eAAAC,GAAA,yBAAAC,GAAA,0BAAAC,GAAA,6BAAAC,GAAA,WAAAC,EAAA,yBAAAC,KCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,uBAAAE,GAAA,oBAAAC,GAAA,iBAAAC,GAAA,oBAAAC,GAAA,gCAAAC,GAAA,yBAAAC,GAAA,6BAAAC,GAAA,eAAAC,GAAA,qBAAAC,GAAA,gBAAAC,GAAA,oBAAAC,GAAA,qBAAAC,GAAA,YAAAC,GAAA,gBAAAC,GAAA,kBAAAC,GAAA,cAAAC,GAAA,sBAAAC,GAAA,iBAAAC,KC6BM,SAAUC,GAAUC,EAAY,CAClC,OAAO,OAAOA,GAAQ,UAAYA,IAAQ,MAAQ,OAAQA,EAAgB,OAAU,QACxF,CAFgBC,EAAAF,GAAA,aAoCV,SAAUG,GAAYF,EAAY,CACpC,OAAO,OAAOA,GAAQ,UAAYA,IAAQ,MAAQ,OAAQA,EAAkB,UAAa,QAC7F,CAFgBC,EAAAC,GAAA,eAgCV,SAAUC,GAAqBH,EAAY,CAC7C,OAAO,OAAOA,GAAQ,UAAYA,IAAQ,MACnC,OAAQA,EAA2B,MAAS,UAC5C,OAAQA,EAA2B,MAAS,UAC5C,OAAQA,EAA2B,MAAS,QACvD,CALgBC,EAAAE,GAAA,wBA0BV,SAAUC,GAAeJ,EAAY,CACvC,OAAO,OAAOA,GAAQ,UAAYA,IAAQ,MACnCD,GAAWC,EAAqB,SAAS,GACzCE,GAAaF,EAAqB,SAAS,GAC3C,OAAQA,EAAqB,SAAY,QACpD,CALgBC,EAAAG,GAAA,kBAwBV,IAAgBC,GAAhB,KAAqC,CAnJ3C,MAmJ2C,CAAAJ,EAAA,8BAA3C,aAAA,CAEc,KAAA,SAAgE,CAAA,EAChE,KAAA,YAAoD,CAAA,CA6ClE,CAtCI,WAAWK,EAAeC,EAAY,CAClC,OAAOR,GAAUO,CAAI,GAAK,KAAK,UAAUA,EAAK,MAAOC,CAAI,CAC7D,CAEA,UAAUC,EAAiBC,EAAiB,CACxC,GAAID,IAAYC,EACZ,MAAO,GAEX,IAAIC,EAAS,KAAK,SAASF,CAAO,EAC7BE,IACDA,EAAS,KAAK,SAASF,CAAO,EAAI,CAAA,GAEtC,IAAMG,EAAWD,EAAOD,CAAS,EACjC,GAAIE,IAAa,OACb,OAAOA,EACJ,CACH,IAAMC,EAAS,KAAK,iBAAiBJ,EAASC,CAAS,EACvD,OAAAC,EAAOD,CAAS,EAAIG,EACbA,EAEf,CAEA,eAAeL,EAAY,CACvB,IAAMI,EAAW,KAAK,YAAYJ,CAAI,EACtC,GAAII,EACA,OAAOA,EACJ,CACH,IAAME,EAAW,KAAK,YAAW,EAC3BC,EAAkB,CAAA,EACxB,QAAWC,KAAmBF,EACtB,KAAK,UAAUE,EAAiBR,CAAI,GACpCO,EAAM,KAAKC,CAAe,EAGlC,YAAK,YAAYR,CAAI,EAAIO,EAClBA,EAEf,GA8DE,SAAUE,GAAmBV,EAAa,CAC5C,OAAO,OAAOA,GAAS,UAAYA,IAAS,MAAQ,MAAM,QAASA,EAA0B,OAAO,CACxG,CAFgBL,EAAAe,GAAA,sBAWV,SAAUC,GAAcX,EAAa,CACvC,OAAO,OAAOA,GAAS,UAAYA,IAAS,MAAQ,OAAQA,EAAqB,WAAc,QACnG,CAFgBL,EAAAgB,GAAA,iBAQV,SAAUC,GAAcZ,EAAa,CACvC,OAAOU,GAAmBV,CAAI,GAAK,OAAQA,EAAqB,UAAa,QACjF,CAFgBL,EAAAiB,GAAA,iBCfV,IAAOC,GAAP,MAAOC,CAAU,CApQvB,MAoQuB,CAAAC,EAAA,mBAInB,YAAYC,EAAkBC,EAAkD,CAC5E,KAAK,QAAUD,EACf,KAAK,OAASC,CAClB,CAEA,UAAQ,CACJ,IAAMC,EAAW,CACb,MAAO,KAAK,QAAO,EACnB,KAAMH,EAAA,IAAM,KAAK,OAAOG,EAAS,KAAK,EAAhC,QACN,CAAC,OAAO,QAAQ,EAAG,IAAMA,GAE7B,OAAOA,CACX,CAEA,CAAC,OAAO,QAAQ,GAAC,CACb,OAAO,KAAK,SAAQ,CACxB,CAEA,SAAO,CAEH,MAAO,EADU,KAAK,SAAQ,EACN,KAAI,EAAG,IACnC,CAEA,OAAK,CACD,IAAMA,EAAW,KAAK,SAAQ,EAC1BC,EAAQ,EACRC,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MACTD,IACAC,EAAOF,EAAS,KAAI,EAExB,OAAOC,CACX,CAEA,SAAO,CACH,IAAME,EAAc,CAAA,EACdH,EAAW,KAAK,SAAQ,EAC1BE,EACJ,GACIA,EAAOF,EAAS,KAAI,EAChBE,EAAK,QAAU,QACfC,EAAO,KAAKD,EAAK,KAAK,QAErB,CAACA,EAAK,MACf,OAAOC,CACX,CAEA,OAAK,CACD,OAAO,IAAI,IAAI,IAAI,CACvB,CAEA,MAAoBC,EAAqBC,EAAqB,CAC1D,IAAMC,EAAc,KAAK,IAAIC,GAAmB,CAC5CH,EAAQA,EAAMG,CAAO,EAAIA,EACzBF,EAAUA,EAAQE,CAAO,EAAIA,EAChC,EACD,OAAO,IAAI,IAAID,CAAW,CAC9B,CAEA,UAAQ,CACJ,OAAO,KAAK,KAAI,CACpB,CAEA,OAAWE,EAAmB,CAC1B,IAAMR,EAAWQ,EAAM,OAAO,QAAQ,EAAC,EACvC,OAAO,IAAIZ,EACP,KAAO,CAAE,MAAO,KAAK,QAAO,EAAI,UAAW,EAAK,GAChDa,GAAQ,CACJ,IAAIN,EACJ,GAAI,CAACM,EAAM,UAAW,CAClB,EAEI,IADAN,EAAS,KAAK,OAAOM,EAAM,KAAK,EAC5B,CAACN,EAAO,KACR,OAAOA,QAEN,CAACA,EAAO,MACjBM,EAAM,UAAY,GAEtB,EAEI,IADAN,EAASH,EAAS,KAAI,EAClB,CAACG,EAAO,KACR,OAAOA,QAEN,CAACA,EAAO,MACjB,OAAOO,EACX,CAAC,CAET,CAEA,KAAKC,EAAY,IAAG,CAChB,IAAMX,EAAW,KAAK,SAAQ,EAC1BY,EAAQ,GACRT,EACAU,EAAe,GACnB,GACIV,EAASH,EAAS,KAAI,EACjBG,EAAO,OACJU,IACAD,GAASD,GAEbC,GAASE,GAASX,EAAO,KAAK,GAElCU,EAAe,SACV,CAACV,EAAO,MACjB,OAAOS,CACX,CAEA,QAAQG,EAAkBC,EAAY,EAAC,CACnC,IAAMhB,EAAW,KAAK,SAAQ,EAC1BiB,EAAQ,EACRf,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAIe,GAASD,GAAad,EAAK,QAAUa,EACrC,OAAOE,EAEXf,EAAOF,EAAS,KAAI,EACpBiB,IAEJ,MAAO,EACX,CAeA,MAAMC,EAAgC,CAClC,IAAMlB,EAAW,KAAK,SAAQ,EAC1BE,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAI,CAACgB,EAAUhB,EAAK,KAAK,EACrB,MAAO,GAEXA,EAAOF,EAAS,KAAI,EAExB,MAAO,EACX,CAEA,KAAKkB,EAAgC,CACjC,IAAMlB,EAAW,KAAK,SAAQ,EAC1BE,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAIgB,EAAUhB,EAAK,KAAK,EACpB,MAAO,GAEXA,EAAOF,EAAS,KAAI,EAExB,MAAO,EACX,CAEA,QAAQmB,EAA6C,CACjD,IAAMnB,EAAW,KAAK,SAAQ,EAC1BiB,EAAQ,EACRf,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MACTiB,EAAWjB,EAAK,MAAOe,CAAK,EAC5Bf,EAAOF,EAAS,KAAI,EACpBiB,GAER,CAEA,IAAOE,EAA2B,CAC9B,OAAO,IAAIvB,EACP,KAAK,QACJa,GAAS,CACN,GAAM,CAAE,KAAAW,EAAM,MAAAR,CAAK,EAAK,KAAK,OAAOH,CAAK,EACzC,OAAIW,EACOV,GAEA,CAAE,KAAM,GAAO,MAAOS,EAAWP,CAAK,CAAC,CAEtD,CAAC,CAET,CAKA,OAAOM,EAAgC,CACnC,OAAO,IAAItB,EACP,KAAK,QACLa,GAAQ,CACJ,IAAIN,EACJ,EAEI,IADAA,EAAS,KAAK,OAAOM,CAAK,EACtB,CAACN,EAAO,MAAQe,EAAUf,EAAO,KAAK,EACtC,OAAOA,QAEN,CAACA,EAAO,MACjB,OAAOO,EACX,CAAC,CAET,CAEA,aAAW,CACP,OAAO,KAAK,OAAO,GAAwB,GAAM,IAAI,CACzD,CAIA,OAAUS,EAA0DE,EAAgB,CAChF,IAAMrB,EAAW,KAAK,SAAQ,EAC1BsB,EAAmCD,EACnCnB,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MACLoB,IAAkB,OAClBA,EAAgBpB,EAAK,MAErBoB,EAAgBH,EAAWG,EAAepB,EAAK,KAAK,EAExDA,EAAOF,EAAS,KAAI,EAExB,OAAOsB,CACX,CAIA,YAAeH,EAA0DE,EAAgB,CACrF,OAAO,KAAK,gBAAgB,KAAK,SAAQ,EAAIF,EAAYE,CAAY,CACzE,CAEU,gBAAmBrB,EAAuBmB,EAA0DE,EAAgB,CAC1H,IAAMnB,EAAOF,EAAS,KAAI,EAC1B,GAAIE,EAAK,KACL,OAAOmB,EAEX,IAAMC,EAAgB,KAAK,gBAAgBtB,EAAUmB,EAAYE,CAAY,EAC7E,OAAIC,IAAkB,OACXpB,EAAK,MAETiB,EAAWG,EAAepB,EAAK,KAAK,CAC/C,CAIA,KAAKgB,EAAgC,CACjC,IAAMlB,EAAW,KAAK,SAAQ,EAC1BE,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAIgB,EAAUhB,EAAK,KAAK,EACpB,OAAOA,EAAK,MAEhBA,EAAOF,EAAS,KAAI,EAG5B,CAEA,UAAUkB,EAAgC,CACtC,IAAMlB,EAAW,KAAK,SAAQ,EAC1BiB,EAAQ,EACRf,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAIgB,EAAUhB,EAAK,KAAK,EACpB,OAAOe,EAEXf,EAAOF,EAAS,KAAI,EACpBiB,IAEJ,MAAO,EACX,CAEA,SAASF,EAAgB,CACrB,IAAMf,EAAW,KAAK,SAAQ,EAC1BE,EAAOF,EAAS,KAAI,EACxB,KAAO,CAACE,EAAK,MAAM,CACf,GAAIA,EAAK,QAAUa,EACf,MAAO,GAEXb,EAAOF,EAAS,KAAI,EAExB,MAAO,EACX,CAEA,QAAWmB,EAAyC,CAEhD,OAAO,IAAIvB,EACP,KAAO,CAAE,KAAM,KAAK,QAAO,CAAE,GAC5Ba,GAAS,CACN,EAAG,CACC,GAAIA,EAAM,SAAU,CAChB,IAAMP,EAAOO,EAAM,SAAS,KAAI,EAChC,GAAIP,EAAK,KACLO,EAAM,SAAW,WAEjB,QAAOP,EAGf,GAAM,CAAE,KAAAkB,EAAM,MAAAR,CAAK,EAAK,KAAK,OAAOH,EAAM,IAAI,EAC9C,GAAI,CAACW,EAAM,CACP,IAAMG,EAASJ,EAAWP,CAAK,EAC/B,GAAIY,GAAWD,CAAM,EACjBd,EAAM,SAAWc,EAAO,OAAO,QAAQ,EAAC,MAExC,OAAO,CAAE,KAAM,GAAO,MAAOA,CAAM,SAGtCd,EAAM,UACf,OAAOC,EACX,CAAC,CAET,CAEA,KAA2Be,EAAS,CAIhC,GAHIA,IAAU,SACVA,EAAQ,GAERA,GAAS,EACT,OAAO,KAEX,IAAMC,EAASD,EAAQ,EAAI,KAAK,KAAKA,EAAQ,CAAC,EAAmC,KAEjF,OAAO,IAAI7B,EACP,KAAO,CAAE,KAAM8B,EAAO,QAAO,CAAE,GAC9BjB,GAAS,CACN,EAAG,CACC,GAAIA,EAAM,SAAU,CAChB,IAAMP,EAAOO,EAAM,SAAS,KAAI,EAChC,GAAIP,EAAK,KACLO,EAAM,SAAW,WAEjB,QAAOP,EAGf,GAAM,CAAE,KAAAkB,EAAM,MAAAR,CAAK,EAAKc,EAAO,OAAOjB,EAAM,IAAI,EAChD,GAAI,CAACW,EACD,GAAII,GAAWZ,CAAK,EAChBH,EAAM,SAAWG,EAAM,OAAO,QAAQ,EAAC,MAEvC,OAAO,CAAE,KAAM,GAAO,MAAOA,CAAK,QAGrCH,EAAM,UACf,OAAOC,EACX,CAAC,CAET,CAEA,MAAI,CAEA,IAAMP,EADW,KAAK,SAAQ,EACN,KAAI,EAC5B,GAAI,CAAAA,EAAO,KAGX,OAAOA,EAAO,KAClB,CAEA,KAAKwB,EAAY,EAAC,CACd,OAAO,IAAI/B,EACP,IAAK,CACD,IAAMa,EAAQ,KAAK,QAAO,EAC1B,QAASmB,EAAI,EAAGA,EAAID,EAAWC,IAE3B,GADa,KAAK,OAAOnB,CAAK,EACrB,KACL,OAAOA,EAGf,OAAOA,CACX,EACA,KAAK,MAAM,CAEnB,CAEA,MAAMoB,EAAe,CACjB,OAAO,IAAIjC,EACP,KAAO,CAAE,KAAM,EAAG,MAAO,KAAK,QAAO,CAAE,GACvCa,IACIA,EAAM,OACFA,EAAM,KAAOoB,EACNnB,GAEJ,KAAK,OAAOD,EAAM,KAAK,EACjC,CAET,CAEA,SAAkBqB,EAAwB,CACtC,IAAMC,EAAM,IAAI,IAChB,OAAO,KAAK,OAAOC,GAAI,CACnB,IAAMpB,EAAQkB,EAAKA,EAAGE,CAAC,EAAIA,EAC3B,OAAID,EAAI,IAAInB,CAAK,EACN,IAEPmB,EAAI,IAAInB,CAAK,EACN,GAEf,CAAC,CACL,CAEA,QAAiBJ,EAAoByB,EAAyB,CAC1D,IAAMC,EAAc,IAAI,IACxB,QAAWC,KAAQ3B,EAAO,CACtB,IAAMI,EAAQqB,EAAMA,EAAIE,CAAI,EAAIA,EAChCD,EAAY,IAAItB,CAAK,EAEzB,OAAO,KAAK,OAAOoB,GAAI,CACnB,IAAMI,EAASH,EAAMA,EAAID,CAAC,EAAIA,EAC9B,MAAO,CAACE,EAAY,IAAIE,CAAM,CAClC,CAAC,CACL,GAGJ,SAAStB,GAASqB,EAAa,CAC3B,OAAI,OAAOA,GAAS,SACTA,EAEP,OAAOA,EAAS,IACT,YAGP,OAAQA,EAAa,UAAa,WAE1BA,EAAa,SAAQ,EAE1B,OAAO,UAAU,SAAS,KAAKA,CAAI,CAC9C,CAbStC,EAAAiB,GAAA,YAeT,SAASU,GAAca,EAAY,CAC/B,MAAO,CAAC,CAACA,GAAO,OAAQA,EAAoB,OAAO,QAAQ,GAAM,UACrE,CAFSxC,EAAA2B,GAAA,cAQF,IAAMc,GAA4B,IAAI3C,GAA2B,IAAG,GAAc,IAAMe,EAAW,EAK7FA,GAA+C,OAAO,OAAO,CAAE,KAAM,GAAM,MAAO,MAAS,CAAE,EAKpG,SAAUgB,KAAaa,EAA8C,CACvE,GAAIA,EAAY,SAAW,EAAG,CAC1B,IAAMC,EAAaD,EAAY,CAAC,EAChC,GAAIC,aAAsB7C,GACtB,OAAO6C,EAEX,GAAIhB,GAAWgB,CAAU,EACrB,OAAO,IAAI7C,GACP,IAAM6C,EAAW,OAAO,QAAQ,EAAC,EAChCxC,GAAaA,EAAS,KAAI,CAAE,EAGrC,GAAI,OAAOwC,EAAW,QAAW,SAC7B,OAAO,IAAI7C,GACP,KAAO,CAAE,MAAO,CAAC,GAChBc,GACOA,EAAM,MAAQ+B,EAAW,OAClB,CAAE,KAAM,GAAO,MAAOA,EAAW/B,EAAM,OAAO,CAAC,EAE/CC,EAEd,EAIb,OAAI6B,EAAY,OAAS,EAEd,IAAI5C,GACP,KAAO,CAAE,UAAW,EAAG,SAAU,CAAC,GACjCc,GAAS,CACN,EAAG,CACC,GAAIA,EAAM,SAAU,CAChB,IAAMP,EAAOO,EAAM,SAAS,KAAI,EAChC,GAAI,CAACP,EAAK,KACN,OAAOA,EAEXO,EAAM,SAAW,OAErB,GAAIA,EAAM,MAAO,CACb,GAAIA,EAAM,SAAWA,EAAM,MAAM,OAC7B,MAAO,CAAE,KAAM,GAAO,MAAOA,EAAM,MAAMA,EAAM,UAAU,CAAC,EAE9DA,EAAM,MAAQ,OACdA,EAAM,SAAW,EAErB,GAAIA,EAAM,UAAY8B,EAAY,OAAQ,CACtC,IAAMC,EAAaD,EAAY9B,EAAM,WAAW,EAC5Ce,GAAWgB,CAAU,EACrB/B,EAAM,SAAW+B,EAAW,OAAO,QAAQ,EAAC,EACrCA,GAAc,OAAOA,EAAW,QAAW,WAClD/B,EAAM,MAAQ+B,UAGjB/B,EAAM,UAAYA,EAAM,OAASA,EAAM,UAAY8B,EAAY,QACxE,OAAO7B,EACX,CAAC,EAGF4B,EACX,CA3DgBzC,EAAA6B,EAAA,UAoFV,IAAOe,GAAP,cACM9C,EAAiE,CAvxB7E,MAuxB6E,CAAAE,EAAA,uBAGzE,YAAY6C,EAASC,EAAoCC,EAAmC,CACxF,MACI,KAAO,CACH,UAAWA,GAAS,YAAc,CAAC,CAACF,CAAI,EAAE,OAAO,QAAQ,EAAC,CAAE,EAAI,CAACC,EAASD,CAAI,EAAE,OAAO,QAAQ,EAAC,CAAE,EAClG,OAAQ,KAEZjC,GAAQ,CAKJ,IAJIA,EAAM,SACNA,EAAM,UAAU,IAAG,EACnBA,EAAM,OAAS,IAEZA,EAAM,UAAU,OAAS,GAAG,CAE/B,IAAMP,EADWO,EAAM,UAAUA,EAAM,UAAU,OAAS,CAAC,EACrC,KAAI,EAC1B,GAAIP,EAAK,KACLO,EAAM,UAAU,IAAG,MAEnB,QAAAA,EAAM,UAAU,KAAKkC,EAASzC,EAAK,KAAK,EAAE,OAAO,QAAQ,EAAC,CAAE,EACrDA,EAGf,OAAOQ,EACX,CAAC,CAET,CAES,UAAQ,CACb,IAAMV,EAAW,CACb,MAAO,KAAK,QAAO,EACnB,KAAMH,EAAA,IAAM,KAAK,OAAOG,EAAS,KAAK,EAAhC,QACN,MAAOH,EAAA,IAAK,CACRG,EAAS,MAAM,OAAS,EAC5B,EAFO,SAGP,CAAC,OAAO,QAAQ,EAAG,IAAMA,GAE7B,OAAOA,CACX,GAMa6C,IAAjB,SAAiBA,EAAS,CAKtB,SAAgBC,EAAIpB,EAAsB,CACtC,OAAOA,EAAO,OAAO,CAACqB,EAAGC,IAAMD,EAAIC,EAAG,CAAC,CAC3C,CAFgBnD,EAAAiD,EAAA,OAAAD,EAAA,IAAGC,EAOnB,SAAgBG,EAAQvB,EAAsB,CAC1C,OAAOA,EAAO,OAAO,CAACqB,EAAGC,IAAMD,EAAIC,EAAG,CAAC,CAC3C,CAFgBnD,EAAAoD,EAAA,WAAAJ,EAAA,QAAOI,EAOvB,SAAgBC,EAAIxB,EAAsB,CACtC,OAAOA,EAAO,OAAO,CAACqB,EAAGC,IAAM,KAAK,IAAID,EAAGC,CAAC,CAAC,CACjD,CAFgBnD,EAAAqD,EAAA,OAAAL,EAAA,IAAGK,EAOnB,SAAgBC,EAAIzB,EAAsB,CACtC,OAAOA,EAAO,OAAO,CAACqB,EAAGC,IAAM,KAAK,IAAID,EAAGC,CAAC,CAAC,CACjD,CAFgBnD,EAAAsD,EAAA,OAAAN,EAAA,IAAGM,CAIvB,GA9BiBN,KAAAA,GAAS,CAAA,EAAA,EFlzBpB,SAAUO,GAAUC,EAAa,CACnC,OAAO,IAAIC,GAAeD,EAAME,GACxBC,GAAmBD,CAAO,EACnBA,EAAQ,QAER,CAAA,EAEZ,CAAE,YAAa,EAAI,CAAE,CAC5B,CARgBE,EAAAL,GAAA,aAaV,SAAUM,GAAWL,EAAa,CACpC,OAAOD,GAAUC,CAAI,EAAE,OAAOM,EAAa,CAC/C,CAFgBF,EAAAC,GAAA,cAOV,SAAUE,GAAYC,EAAgBC,EAAe,CACvD,KAAOD,EAAM,WAET,GADAA,EAAQA,EAAM,UACVA,IAAUC,EACV,MAAO,GAGf,MAAO,EACX,CARgBL,EAAAG,GAAA,eAUV,SAAUG,GAAaC,EAAa,CAGtC,MAAO,CACH,MAAO,CACH,UAAWA,EAAM,YAAe,EAChC,KAAMA,EAAM,UAAa,GAE7B,IAAK,CACD,UAAWA,EAAM,UACjB,KAAMA,EAAM,QAAW,GAGnC,CAbgBP,EAAAM,GAAA,gBAiBV,SAAUE,GAAkBZ,EAAc,CAC5C,GAAI,CAACA,EACD,OAEJ,GAAM,CAAE,OAAAa,EAAQ,IAAAC,EAAK,MAAAC,CAAK,EAAKf,EAC/B,MAAO,CACH,MAAAe,EACA,OAAAF,EACA,IAAAC,EACA,OAAQA,EAAMD,EAEtB,CAXgBT,EAAAQ,GAAA,qBAahB,IAAYI,IAAZ,SAAYA,EAAe,CACvBA,EAAAA,EAAA,OAAA,CAAA,EAAA,SACAA,EAAAA,EAAA,MAAA,CAAA,EAAA,QACAA,EAAAA,EAAA,aAAA,CAAA,EAAA,eACAA,EAAAA,EAAA,YAAA,CAAA,EAAA,cACAA,EAAAA,EAAA,OAAA,CAAA,EAAA,QACJ,GANYA,KAAAA,GAAe,CAAA,EAAA,EAQrB,SAAUC,GAAaF,EAAcG,EAAS,CAChD,GAAIH,EAAM,IAAI,KAAOG,EAAG,MAAM,MAASH,EAAM,IAAI,OAASG,EAAG,MAAM,MAAQH,EAAM,IAAI,UAAYA,EAAM,MAAM,UACzG,OAAOC,GAAgB,OACpB,GAAID,EAAM,MAAM,KAAOG,EAAG,IAAI,MAASH,EAAM,MAAM,OAASG,EAAG,IAAI,MAAQH,EAAM,MAAM,UAAYG,EAAG,IAAI,UAC7G,OAAOF,GAAgB,MAE3B,IAAMG,EAAcJ,EAAM,MAAM,KAAOG,EAAG,MAAM,MAASH,EAAM,MAAM,OAASG,EAAG,MAAM,MAAQH,EAAM,MAAM,WAAaG,EAAG,MAAM,UAC3HE,EAAYL,EAAM,IAAI,KAAOG,EAAG,IAAI,MAASH,EAAM,IAAI,OAASG,EAAG,IAAI,MAAQH,EAAM,IAAI,WAAaG,EAAG,IAAI,UACnH,OAAIC,GAAeC,EACRJ,GAAgB,OAChBG,EACAH,GAAgB,YAEhBA,GAAgB,YAE/B,CAfgBZ,EAAAa,GAAA,gBAiBV,SAAUI,GAAQN,EAAcG,EAAS,CAE3C,OADmBD,GAAaF,EAAOG,CAAE,EACrBF,GAAgB,KACxC,CAHgBZ,EAAAiB,GAAA,WAOT,IAAMC,GAAoB,eAQ3B,SAAUC,GAA4BC,EAA8BX,EAAgBY,EAAaH,GAAiB,CACpH,GAAIE,EAAS,CACT,GAAIX,EAAS,EAAG,CACZ,IAAMa,EAAcb,EAASW,EAAQ,OAC/BG,EAAeH,EAAQ,KAAK,OAAOE,CAAW,EAC/CD,EAAW,KAAKE,CAAY,GAC7Bd,IAGR,OAAOe,GAAqBJ,EAASX,CAAM,EAGnD,CAZgBT,EAAAmB,GAAA,+BAcV,SAAUM,GAAgBL,EAA8BM,EAAsB,CAChF,GAAIN,EAAS,CACT,IAAMO,EAAWC,GAAgBR,EAAS,EAAI,EAC9C,GAAIO,GAAYE,GAAcF,EAAUD,CAAY,EAChD,OAAOC,EAEX,GAAIG,GAAcV,CAAO,EAAG,CAGxB,IAAMW,EAAWX,EAAQ,QAAQ,UAAUY,GAAK,CAACA,EAAE,MAAM,EACzD,QAAS,EAAID,EAAW,EAAG,GAAK,EAAG,IAAK,CACpC,IAAM3B,EAAQgB,EAAQ,QAAQ,CAAC,EAC/B,GAAIS,GAAczB,EAAOsB,CAAY,EACjC,OAAOtB,IAM3B,CAnBgBJ,EAAAyB,GAAA,mBAqBV,SAAUI,GAAcT,EAAkBM,EAAsB,CAClE,OAAOxB,GAAckB,CAAO,GAAKM,EAAa,SAASN,EAAQ,UAAU,IAAI,CACjF,CAFgBpB,EAAA6B,GAAA,iBAcV,SAAUL,GAAqB5B,EAAea,EAAc,CAC9D,GAAIP,GAAcN,CAAI,EAClB,OAAOA,EACJ,GAAIG,GAAmBH,CAAI,EAAG,CACjC,IAAMqC,EAAeC,GAAatC,EAAMa,EAAQ,EAAK,EACrD,GAAIwB,EACA,OAAOT,GAAqBS,EAAcxB,CAAM,EAI5D,CAVgBT,EAAAwB,GAAA,wBAsBV,SAAUW,GAAyBvC,EAAea,EAAc,CAClE,GAAIP,GAAcN,CAAI,EAClB,OAAOA,EACJ,GAAIG,GAAmBH,CAAI,EAAG,CACjC,IAAMqC,EAAeC,GAAatC,EAAMa,EAAQ,EAAI,EACpD,GAAIwB,EACA,OAAOE,GAAyBF,EAAcxB,CAAM,EAIhE,CAVgBT,EAAAmC,GAAA,4BAYhB,SAASD,GAAatC,EAAwBa,EAAgB2B,EAAgB,CAC1E,IAAIC,EAAO,EACPC,EAAQ1C,EAAK,QAAQ,OAAS,EAC9B2C,EAEJ,KAAOF,GAAQC,GAAO,CAClB,IAAME,EAAS,KAAK,OAAOH,EAAOC,GAAS,CAAC,EACtCG,EAAa7C,EAAK,QAAQ4C,CAAM,EAEtC,GAAIC,EAAW,QAAUhC,GAAUgC,EAAW,IAAMhC,EAEhD,OAAOgC,EAGPA,EAAW,KAAOhC,GAElB8B,EAAcH,EAAUK,EAAa,OACrCJ,EAAOG,EAAS,GAGhBF,EAAQE,EAAS,EAIzB,OAAOD,CACX,CAzBSvC,EAAAkC,GAAA,gBA2BH,SAAUN,GAAgBhC,EAAe8C,EAAS,GAAI,CACxD,KAAO9C,EAAK,WAAW,CACnB,IAAMS,EAAST,EAAK,UAChB+C,EAAQtC,EAAO,QAAQ,QAAQT,CAAI,EACvC,KAAO+C,EAAQ,GAAG,CACdA,IACA,IAAMhB,EAAWtB,EAAO,QAAQsC,CAAK,EACrC,GAAID,GAAU,CAACf,EAAS,OACpB,OAAOA,EAGf/B,EAAOS,EAGf,CAdgBL,EAAA4B,GAAA,mBAgBV,SAAUgB,GAAYhD,EAAe8C,EAAS,GAAI,CACpD,KAAO9C,EAAK,WAAW,CACnB,IAAMS,EAAST,EAAK,UAChB+C,EAAQtC,EAAO,QAAQ,QAAQT,CAAI,EACjCiD,EAAOxC,EAAO,QAAQ,OAAS,EACrC,KAAOsC,EAAQE,GAAM,CACjBF,IACA,IAAMG,EAAOzC,EAAO,QAAQsC,CAAK,EACjC,GAAID,GAAU,CAACI,EAAK,OAChB,OAAOA,EAGflD,EAAOS,EAGf,CAfgBL,EAAA4C,GAAA,eAiBV,SAAUG,GAAiBnD,EAAa,CAC1C,GAAIA,EAAK,MAAM,MAAM,YAAc,EAC/B,OAAOA,EAEX,IAAMoD,EAAOpD,EAAK,MAAM,MAAM,KAC1BiD,EAAOjD,EACP+C,EACJ,KAAO/C,EAAK,WAAW,CACnB,IAAMS,EAAST,EAAK,UACdqD,EAAYN,GAAStC,EAAO,QAAQ,QAAQT,CAAI,EAQtD,GAPIqD,IAAc,GACdrD,EAAOS,EACPsC,EAAQ,SAERA,EAAQM,EAAY,EACpBrD,EAAOS,EAAO,QAAQsC,CAAK,GAE3B/C,EAAK,MAAM,MAAM,OAASoD,EAC1B,MAEJH,EAAOjD,EAEX,OAAOiD,CACX,CAvBgB7C,EAAA+C,GAAA,oBAyBV,SAAUG,GAAiBC,EAAgBzC,EAAY,CACzD,IAAM0C,EAAeC,GAAgBF,EAAOzC,CAAG,EAC/C,OAAK0C,EAGEA,EAAa,OAAO,QAAQ,MAAMA,EAAa,EAAI,EAAGA,EAAa,CAAC,EAFhE,CAAA,CAGf,CANgBpD,EAAAkD,GAAA,oBAQhB,SAASG,GAAgBC,EAAYC,EAAU,CAC3C,IAAMC,EAAWC,GAAeH,CAAC,EAC3BI,EAAWD,GAAeF,CAAC,EAC7BI,EACJ,QAASC,EAAI,EAAGA,EAAIJ,EAAS,QAAUI,EAAIF,EAAS,OAAQE,IAAK,CAC7D,IAAMC,EAAUL,EAASI,CAAC,EACpBE,EAAUJ,EAASE,CAAC,EAC1B,GAAIC,EAAQ,SAAWC,EAAQ,OAC3BH,EAAU,CACN,OAAQE,EAAQ,OAChB,EAAGA,EAAQ,MACX,EAAGC,EAAQ,WAGf,OAGR,OAAOH,CACX,CAlBS3D,EAAAqD,GAAA,mBA0BT,SAASI,GAAe7D,EAAa,CACjC,IAAMmE,EAAsB,CAAA,EAC5B,KAAOnE,EAAK,WAAW,CACnB,IAAMS,EAAST,EAAK,UACd+C,EAAQtC,EAAO,QAAQ,QAAQT,CAAI,EACzCmE,EAAM,KAAK,CACP,OAAA1D,EACA,MAAAsC,EACH,EACD/C,EAAOS,EAEX,OAAO0D,EAAM,QAAO,CACxB,CAZS/D,EAAAyD,GAAA,kBGhUT,IAAAO,GAAA,GAAAC,GAAAD,GAAA,oBAAAE,GAAA,uBAAAC,GAAA,uBAAAC,GAAA,wBAAAC,GAAA,wBAAAC,GAAA,gCAAAC,GAAA,yBAAAC,GAAA,uBAAAC,GAAA,kBAAAC,GAAA,yBAAAC,GAAA,8BAAAC,GAAA,iBAAAC,GAAA,wBAAAC,GAAA,mBAAAC,GAAA,gBAAAC,GAAA,gBAAAC,GAAA,uBAAAC,GAAA,oBAAAC,GAAA,sBAAAC,GAAA,eAAAC,GAAA,mBAAAC,GAAA,0BAAAC,GAAA,kBAAAC,KCQM,IAAOC,GAAP,cAAiC,KAAK,CAR5C,MAQ4C,CAAAC,EAAA,0BACxC,YAAYC,EAA2BC,EAAe,CAClD,MAAMD,EAAO,GAAGC,CAAO,OAAOD,EAAK,MAAM,MAAM,IAAI,IAAIA,EAAK,MAAM,MAAM,SAAS,GAAKC,CAAO,CACjG,GAGE,SAAUC,GAAkBC,EAAQ,CACtC,MAAM,IAAI,MAAM,yCAAyC,CAC7D,CAFgBJ,EAAAG,GAAA,qBCdhB,IAAAE,GAAA,GAAAC,GAAAD,GAAA,qBAAAE,GAAA,iBAAAC,GAAA,iBAAAC,GAAA,WAAAC,GAAA,iBAAAC,GAAA,iBAAAC,GAAA,cAAAC,GAAA,eAAAC,GAAA,mBAAAC,GAAA,mBAAAC,GAAA,cAAAC,GAAA,gBAAAC,GAAA,mBAAAC,GAAA,gBAAAC,GAAA,cAAAC,GAAA,YAAAC,GAAA,kBAAAC,GAAA,UAAAC,GAAA,iBAAAC,GAAA,cAAAC,GAAA,YAAAC,GAAA,gCAAAC,GAAA,4BAAAC,GAAA,kBAAAC,GAAA,iBAAAC,GAAA,aAAAC,GAAA,kBAAAC,GAAA,cAAAC,GAAA,uBAAAC,GAAA,eAAAC,GAAA,kBAAAC,GAAA,eAAAC,GAAA,eAAAC,GAAA,aAAAC,GAAA,eAAAC,GAAA,kBAAAC,GAAA,yBAAAC,GAAA,kBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,SAAAC,GAAA,kBAAAC,GAAA,mBAAAC,GAAA,cAAAC,GAAA,mBAAAC,GAAA,eAAAC,GAAA,iBAAAC,GAAA,aAAAC,GAAA,sBAAAC,GAAA,mBAAAC,GAAA,mBAAAC,GAAA,aAAAC,GAAA,mBAAAC,GAAA,mBAAAC,GAAA,gBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,qBAAAC,GAAA,gBAAAC,GAAA,kBAAAC,GAAA,qBAAAC,GAAA,kBAAAC,GAAA,gBAAAC,GAAA,kBAAAC,GAAA,cAAAC,GAAA,oBAAAC,GAAA,YAAAC,GAAA,mBAAAC,GAAA,gBAAAC,GAAA,cAAAC,GAAA,oBAAAC,GAAA,mBAAAC,GAAA,eAAAC,GAAA,oBAAAC,GAAA,gBAAAC,GAAA,yBAAAC,GAAA,iBAAAC,GAAA,oBAAAC,GAAA,oBAAAC,GAAA,iBAAAC,GAAA,iBAAAC,GAAA,eAAAC,GAAA,iBAAAC,GAAA,oBAAAC,GAAA,2BAAAC,GAAA,oBAAAC,GAAA,mBAAAC,GAAA,uBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,qBAAAC,GAAA,gBAAAC,GAAA,qBAAAC,GAAA,iBAAAC,GAAA,mBAAAC,GAAA,eAAAC,GAAA,eAAAC,IASO,IAAMC,GAA0B,CACnC,GAAI,qBACJ,OAAQ,kCACR,OAAQ,iDACR,aAAc,oEACd,GAAI,MACJ,WAAY,mBACZ,WAAY,gBAKHC,GAAe,eAEtB,SAAUC,GAAeC,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAMF,EAAY,CACnD,CAFgBI,EAAAH,GAAA,kBAMT,IAAMI,GAAe,eAEtB,SAAUC,GAAeJ,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAMG,EAAY,CACnD,CAFgBD,EAAAE,GAAA,kBAMT,IAAMC,GAAY,YAEnB,SAAUC,GAAYN,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAMK,EAAS,CAChD,CAFgBH,EAAAI,GAAA,eAMV,SAAUC,GAAcP,EAAa,CACvC,OAAOQ,GAAgBR,CAAI,GAAKA,IAAS,WAAaA,IAAS,SAAWA,IAAS,WAAaA,IAAS,SAAWA,IAAS,YAAcA,IAAS,WAAaA,IAAS,UAAYA,IAAS,UAAYA,IAAS,aAAeA,IAAS,WAAaA,IAAS,YAAcA,IAAS,QAAUA,IAAS,QAAUA,IAAS,SAAWA,IAAS,UAAYA,IAAS,QAAW,OAAOA,GAAS,UAAa,qBAAqB,KAAKA,CAAI,CAClb,CAFgBE,EAAAK,GAAA,iBAMV,SAAUC,GAAgBR,EAAa,CACzC,OAAOA,IAAS,UAAYA,IAAS,UAAYA,IAAS,WAAaA,IAAS,QAAUA,IAAS,QACvG,CAFgBE,EAAAM,GAAA,mBAMT,IAAMC,GAAiB,iBAExB,SAAUC,GAAiBV,EAAa,CAC1C,OAAOC,EAAW,WAAWD,EAAMS,EAAc,CACrD,CAFgBP,EAAAQ,GAAA,oBAMT,IAAMC,GAAe,eAEtB,SAAUC,GAAeZ,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAMW,EAAY,CACnD,CAFgBT,EAAAU,GAAA,kBAUT,IAAMC,GAAkB,kBAEzB,SAAUC,GAAkBd,EAAa,CAC3C,OAAOC,EAAW,WAAWD,EAAMa,EAAe,CACtD,CAFgBX,EAAAY,GAAA,qBAUT,IAAMC,GAAe,eAEtB,SAAUC,GAAehB,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAMe,EAAY,CACnD,CAFgBb,EAAAc,GAAA,kBAUT,IAAMC,GAAY,YAEnB,SAAUC,GAAYlB,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAMiB,EAAS,CAChD,CAFgBf,EAAAgB,GAAA,eAUT,IAAMC,GAAiB,iBAExB,SAAUC,GAAiBpB,EAAa,CAC1C,OAAOC,EAAW,WAAWD,EAAMmB,EAAc,CACrD,CAFgBjB,EAAAkB,GAAA,oBAWT,IAAMC,GAAc,cAErB,SAAUC,GAActB,EAAa,CACvC,OAAOC,EAAW,WAAWD,EAAMqB,EAAW,CAClD,CAFgBnB,EAAAoB,GAAA,iBAWT,IAAMC,GAAc,cAErB,SAAUC,GAAcxB,EAAa,CACvC,OAAOC,EAAW,WAAWD,EAAMuB,EAAW,CAClD,CAFgBrB,EAAAsB,GAAA,iBAiBT,IAAMC,GAAU,UAEjB,SAAUC,GAAU1B,EAAa,CACnC,OAAOC,EAAW,WAAWD,EAAMyB,EAAO,CAC9C,CAFgBvB,EAAAwB,GAAA,aAUT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgB5B,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAM2B,EAAa,CACpD,CAFgBzB,EAAA0B,GAAA,mBAUT,IAAMC,GAAe,eAEtB,SAAUC,GAAe9B,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAM6B,EAAY,CACnD,CAFgB3B,EAAA4B,GAAA,kBAYT,IAAMC,GAAY,YAEnB,SAAUC,GAAYhC,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAM+B,EAAS,CAChD,CAFgB7B,EAAA8B,GAAA,eAYT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgBlC,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAMiC,EAAa,CACpD,CAFgB/B,EAAAgC,GAAA,mBAUT,IAAMC,GAAW,WAElB,SAAUC,GAAWpC,EAAa,CACpC,OAAOC,EAAW,WAAWD,EAAMmC,EAAQ,CAC/C,CAFgBjC,EAAAkC,GAAA,cAUT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgBtC,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAMqC,EAAa,CACpD,CAFgBnC,EAAAoC,GAAA,mBAUT,IAAMC,GAAY,YAEnB,SAAUC,GAAYxC,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAMuC,EAAS,CAChD,CAFgBrC,EAAAsC,GAAA,eAUT,IAAMC,GAAqB,qBAE5B,SAAUC,GAAqB1C,EAAa,CAC9C,OAAOC,EAAW,WAAWD,EAAMyC,EAAkB,CACzD,CAFgBvC,EAAAwC,GAAA,wBAoBT,IAAMC,GAAa,aAEpB,SAAUC,GAAa5C,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAM2C,EAAU,CACjD,CAFgBzC,EAAA0C,GAAA,gBAUT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgB9C,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAM6C,EAAa,CACpD,CAFgB3C,EAAA4C,GAAA,mBAUT,IAAMC,GAAa,aAEpB,SAAUC,GAAahD,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAM+C,EAAU,CACjD,CAFgB7C,EAAA8C,GAAA,gBAYT,IAAMC,GAAa,aAEpB,SAAUC,GAAalD,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAMiD,EAAU,CACjD,CAFgB/C,EAAAgD,GAAA,gBAUT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgBpD,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAMmD,EAAa,CACpD,CAFgBjD,EAAAkD,GAAA,mBAcT,IAAMC,GAAe,eAEtB,SAAUC,GAAetD,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAMqD,EAAY,CACnD,CAFgBnD,EAAAoD,GAAA,kBAWT,IAAMC,GAAO,OAEd,SAAUC,GAAOxD,EAAa,CAChC,OAAOC,EAAW,WAAWD,EAAMuD,EAAI,CAC3C,CAFgBrD,EAAAsD,GAAA,UAaT,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgB1D,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAMyD,EAAa,CACpD,CAFgBvD,EAAAwD,GAAA,mBAUT,IAAMC,GAAY,YAEnB,SAAUC,GAAY5D,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAM2D,EAAS,CAChD,CAFgBzD,EAAA0D,GAAA,eAYT,IAAMC,GAAS,SAEhB,SAAUC,GAAS9D,EAAa,CAClC,OAAOC,EAAW,WAAWD,EAAM6D,EAAM,CAC7C,CAFgB3D,EAAA4D,GAAA,YAST,IAAMC,GAAe,eAEtB,SAAUC,GAAehE,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAM+D,EAAY,CACnD,CAFgB7D,EAAA8D,GAAA,kBAWT,IAAMC,GAAa,aAEpB,SAAUC,GAAalE,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAMiE,EAAU,CACjD,CAFgB/D,EAAAgE,GAAA,gBAUT,IAAMC,GAAiB,iBAExB,SAAUC,GAAiBpE,EAAa,CAC1C,OAAOC,EAAW,WAAWD,EAAMmE,EAAc,CACrD,CAFgBjE,EAAAkE,GAAA,oBAWT,IAAMC,GAAiB,iBAExB,SAAUC,GAAiBtE,EAAa,CAC1C,OAAOC,EAAW,WAAWD,EAAMqE,EAAc,CACrD,CAFgBnE,EAAAoE,GAAA,oBAQT,IAAMC,GAAY,YAEnB,SAAUC,GAAYxE,EAAa,CACrC,OAAOC,EAAW,WAAWD,EAAMuE,EAAS,CAChD,CAFgBrE,EAAAsE,GAAA,eAUT,IAAMC,GAAQ,QAEf,SAAUC,GAAQ1E,EAAa,CACjC,OAAOC,EAAW,WAAWD,EAAMyE,EAAK,CAC5C,CAFgBvE,EAAAwE,GAAA,WAUT,IAAMC,GAAU,UAEjB,SAAUC,GAAU5E,EAAa,CACnC,OAAOC,EAAW,WAAWD,EAAM2E,EAAO,CAC9C,CAFgBzE,EAAA0E,GAAA,aAST,IAAMC,GAAe,eAEtB,SAAUC,GAAe9E,EAAa,CACxC,OAAOC,EAAW,WAAWD,EAAM6E,EAAY,CACnD,CAFgB3E,EAAA4E,GAAA,kBAST,IAAMC,GAAa,aAEpB,SAAUC,GAAahF,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAM+E,EAAU,CACjD,CAFgB7E,EAAA8E,GAAA,gBAUT,IAAMC,GAAW,WAElB,SAAUC,GAAWlF,EAAa,CACpC,OAAOC,EAAW,WAAWD,EAAMiF,EAAQ,CAC/C,CAFgB/E,EAAAgF,GAAA,cAST,IAAMC,GAAuB,uBAE9B,SAAUC,GAAuBpF,EAAa,CAChD,OAAOC,EAAW,WAAWD,EAAMmF,EAAoB,CAC3D,CAFgBjF,EAAAkF,GAAA,0BAST,IAAMC,GAAgB,gBAEvB,SAAUC,GAAgBtF,EAAa,CACzC,OAAOC,EAAW,WAAWD,EAAMqF,EAAa,CACpD,CAFgBnF,EAAAoF,GAAA,mBAST,IAAMC,GAAmB,mBAE1B,SAAUC,GAAmBxF,EAAa,CAC5C,OAAOC,EAAW,WAAWD,EAAMuF,EAAgB,CACvD,CAFgBrF,EAAAsF,GAAA,sBAST,IAAMC,GAAiB,iBAExB,SAAUC,GAAiB1F,EAAa,CAC1C,OAAOC,EAAW,WAAWD,EAAMyF,EAAc,CACrD,CAFgBvF,EAAAwF,GAAA,oBAST,IAAMC,GAAa,aAEpB,SAAUC,GAAa5F,EAAa,CACtC,OAAOC,EAAW,WAAWD,EAAM2F,EAAU,CACjD,CAFgBzF,EAAA0F,GAAA,gBAQT,IAAMC,GAAW,WAElB,SAAUC,GAAW9F,EAAa,CACpC,OAAOC,EAAW,WAAWD,EAAM6F,EAAQ,CAC/C,CAFgB3F,EAAA4F,GAAA,cAqDV,IAAOC,GAAP,cAA2CC,EAAqB,CA7nBtE,MA6nBsE,CAAA9F,EAAA,oCAElE,aAAW,CACP,MAAO,CAAC,kBAAmB,eAAgB,eAAgB,SAAU,eAAgB,eAAgB,YAAa,aAAc,iBAAkB,iBAAkB,YAAa,cAAe,iBAAkB,cAAe,YAAa,UAAW,gBAAiB,QAAS,eAAgB,YAAa,UAAW,gBAAiB,eAAgB,WAAY,gBAAiB,YAAa,qBAAsB,aAAc,gBAAiB,aAAc,aAAc,WAAY,aAAc,gBAAiB,uBAAwB,gBAAiB,eAAgB,mBAAoB,OAAQ,gBAAiB,iBAAkB,YAAa,iBAAkB,aAAc,eAAgB,UAAU,CACrsB,CAEmB,iBAAiB+F,EAAiBC,EAAiB,CAClE,OAAQD,EAAS,CACb,KAAKpC,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACL,KAAKE,GACD,OAAO,KAAK,UAAUhF,GAAiBqF,CAAS,EAEpD,KAAKnF,GACL,KAAKsB,GACL,KAAKc,GACD,OAAO,KAAK,UAAUxC,GAAcuF,CAAS,EAEjD,KAAKjF,GACL,KAAK4B,GACL,KAAKI,GACL,KAAKU,GACD,OAAO,KAAK,UAAUlD,GAAgByF,CAAS,EAEnD,KAAK/E,GACD,OAAO,KAAK,UAAUd,GAAW6F,CAAS,GAAK,KAAK,UAAUvF,GAAcuF,CAAS,EAEzF,KAAK7E,GACL,KAAKE,GACL,KAAKY,GACL,KAAKM,GACD,OAAO,KAAK,UAAUpC,GAAW6F,CAAS,EAE9C,KAAKrE,GACL,KAAKE,GACL,KAAKwB,GACD,OAAO,KAAK,UAAUpD,GAAc+F,CAAS,EAEjD,KAAKvD,GACD,OAAO,KAAK,UAAU7C,GAAcoG,CAAS,GAAK,KAAK,UAAU/F,GAAc+F,CAAS,EAE5F,KAAK7C,GACD,OAAO,KAAK,UAAUvD,GAAcoG,CAAS,EAEjD,QACI,MAAO,GAGnB,CAEA,iBAAiBC,EAAsB,CACnC,IAAMC,EAAc,GAAGD,EAAQ,UAAU,KAAK,IAAIA,EAAQ,QAAQ,GAClE,OAAQC,EAAa,CACjB,IAAK,cACL,IAAK,sBACL,IAAK,uBACL,IAAK,wBACL,IAAK,qBACD,OAAOjG,GAEX,IAAK,uBACL,IAAK,0BACL,IAAK,gBACD,OAAOL,GAEX,IAAK,uBACD,OAAO2B,GAEX,IAAK,0BACL,IAAK,+BACD,OAAOc,GAEX,IAAK,wBACD,OAAOc,GAEX,QACI,MAAM,IAAI,MAAM,GAAG+C,CAAW,+BAA+B,EAGzE,CAEA,gBAAgBC,EAAY,CACxB,OAAQA,EAAM,CACV,IAAK,kBACD,MAAO,CACH,KAAM,kBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,IAI/B,IAAK,eACD,MAAO,CACH,KAAM,eACN,WAAY,CACR,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,IAIhD,IAAK,YACD,MAAO,CACH,KAAM,YACN,WAAY,CACR,CAAE,KAAM,aAAa,IAIjC,IAAK,iBACD,MAAO,CACH,KAAM,iBACN,WAAY,CACR,CAAE,KAAM,OAAQ,aAAc,EAAK,IAI/C,IAAK,cACD,MAAO,CACH,KAAM,cACN,WAAY,CACR,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,OAAO,IAI3B,IAAK,cACD,MAAO,CACH,KAAM,cACN,WAAY,CACR,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,OAAO,IAI3B,IAAK,UACD,MAAO,CACH,KAAM,UACN,WAAY,CACR,CAAE,KAAM,sBAAuB,aAAc,EAAK,EAClD,CAAE,KAAM,eAAgB,aAAc,CAAA,CAAE,EACxC,CAAE,KAAM,UAAW,aAAc,CAAA,CAAE,EACnC,CAAE,KAAM,aAAc,aAAc,CAAA,CAAE,EACtC,CAAE,KAAM,aAAc,aAAc,EAAK,EACzC,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,QAAS,aAAc,CAAA,CAAE,EACjC,CAAE,KAAM,QAAS,aAAc,CAAA,CAAE,EACjC,CAAE,KAAM,eAAgB,aAAc,CAAA,CAAE,IAIpD,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,MAAM,IAI1B,IAAK,eACD,MAAO,CACH,KAAM,eACN,WAAY,CACR,CAAE,KAAM,MAAM,IAI1B,IAAK,YACD,MAAO,CACH,KAAM,YACN,WAAY,CACR,CAAE,KAAM,aAAc,aAAc,CAAA,CAAE,EACtC,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,aAAc,aAAc,CAAA,CAAE,IAIlD,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,eAAgB,aAAc,EAAK,EAC3C,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,OAAO,IAI3B,IAAK,WACD,MAAO,CACH,KAAM,WACN,WAAY,CACR,CAAE,KAAM,OAAO,IAI3B,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,OAAO,IAI3B,IAAK,YACD,MAAO,CACH,KAAM,YACN,WAAY,CACR,CAAE,KAAM,MAAM,IAI1B,IAAK,qBACD,MAAO,CACH,KAAM,qBACN,WAAY,CACR,CAAE,KAAM,WAAW,IAI/B,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,UAAU,EAClB,CAAE,KAAM,sBAAuB,aAAc,EAAK,EAClD,CAAE,KAAM,YAAY,EACpB,CAAE,KAAM,QAAS,aAAc,EAAK,EACpC,CAAE,KAAM,WAAY,aAAc,EAAK,EACvC,CAAE,KAAM,eAAgB,aAAc,CAAA,CAAE,EACxC,CAAE,KAAM,cAAc,EACtB,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,aAAc,aAAc,CAAA,CAAE,EACtC,CAAE,KAAM,YAAY,EACpB,CAAE,KAAM,WAAY,aAAc,EAAK,IAInD,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,eAAe,IAInC,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,MAAM,IAI1B,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,eAAe,EACvB,CAAE,KAAM,YAAY,EACpB,CAAE,KAAM,SAAS,IAI7B,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,OAAO,IAI3B,IAAK,eACD,MAAO,CACH,KAAM,eACN,WAAY,CACR,CAAE,KAAM,YAAY,EACpB,CAAE,KAAM,WAAY,aAAc,EAAK,EACvC,CAAE,KAAM,SAAU,aAAc,EAAK,EACrC,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,MAAM,IAI1B,IAAK,OACD,MAAO,CACH,KAAM,OACN,WAAY,CACR,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,MAAM,IAI1B,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,cAAc,EACtB,CAAE,KAAM,aAAc,aAAc,EAAK,EACzC,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,MAAM,IAI1B,IAAK,YACD,MAAO,CACH,KAAM,YACN,WAAY,CACR,CAAE,KAAM,QAAS,aAAc,CAAA,CAAE,IAI7C,IAAK,SACD,MAAO,CACH,KAAM,SACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,SAAS,EACjB,CAAE,KAAM,cAAc,EACtB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,UAAU,EAClB,CAAE,KAAM,MAAM,IAI1B,IAAK,eACD,MAAO,CACH,KAAM,eACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,EACpC,CAAE,KAAM,WAAW,IAI/B,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,SAAS,EACjB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,UAAU,EAClB,CAAE,KAAM,UAAU,IAI9B,IAAK,iBACD,MAAO,CACH,KAAM,iBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,MAAM,EACd,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,OAAO,IAI3B,IAAK,iBACD,MAAO,CACH,KAAM,iBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,mBAAoB,aAAc,EAAK,EAC/C,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,UAAU,EAClB,CAAE,KAAM,MAAM,IAI1B,IAAK,YACD,MAAO,CACH,KAAM,YACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,IAI/B,IAAK,QACD,MAAO,CACH,KAAM,QACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,EACpC,CAAE,KAAM,gBAAgB,EACxB,CAAE,KAAM,WAAW,IAI/B,IAAK,UACD,MAAO,CACH,KAAM,UACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,OAAO,IAI3B,IAAK,eACD,MAAO,CACH,KAAM,eACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,UAAU,IAI9B,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,OAAO,IAI3B,IAAK,WACD,MAAO,CACH,KAAM,WACN,WAAY,CACR,CAAE,KAAM,YAAa,aAAc,CAAA,CAAE,EACrC,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,MAAM,IAI1B,IAAK,uBACD,MAAO,CACH,KAAM,uBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,EACpC,CAAE,KAAM,WAAW,IAI/B,IAAK,gBACD,MAAO,CACH,KAAM,gBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,EACpC,CAAE,KAAM,WAAW,IAI/B,IAAK,mBACD,MAAO,CACH,KAAM,mBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,MAAM,IAI1B,IAAK,iBACD,MAAO,CACH,KAAM,iBACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAY,aAAc,CAAA,CAAE,EACpC,CAAE,KAAM,WAAW,IAI/B,IAAK,aACD,MAAO,CACH,KAAM,aACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,EACnB,CAAE,KAAM,UAAU,IAI9B,IAAK,WACD,MAAO,CACH,KAAM,WACN,WAAY,CACR,CAAE,KAAM,aAAa,EACrB,CAAE,KAAM,WAAW,IAI/B,QACI,MAAO,CACH,KAAMA,EACN,WAAY,CAAA,GAI5B,GAGSpG,EAAa,IAAI8F,GC7nC9B,IAAAO,GAAA,GAAAC,GAAAD,GAAA,+BAAAE,GAAA,gBAAAC,GAAA,wBAAAC,GAAA,iBAAAC,GAAA,uBAAAC,GAAA,gBAAAC,GAAA,uBAAAC,GAAA,2BAAAC,GAAA,sBAAAC,GAAA,cAAAC,GAAA,mBAAAC,GAAA,qBAAAC,KAkBM,SAAUC,GAAuBC,EAAa,CAChD,OAAW,CAACC,EAAMC,CAAK,IAAK,OAAO,QAAQF,CAAI,EACtCC,EAAK,WAAW,GAAG,IAChB,MAAM,QAAQC,CAAK,EACnBA,EAAM,QAAQ,CAACC,EAAMC,IAAS,CACtBC,GAAUF,CAAI,IACbA,EAA0B,WAAaH,EACvCG,EAA0B,mBAAqBF,EAC/CE,EAA0B,gBAAkBC,EAErD,CAAC,EACMC,GAAUH,CAAK,IACrBA,EAA2B,WAAaF,EACxCE,EAA2B,mBAAqBD,GAIjE,CAjBgBK,EAAAP,GAAA,0BAwBV,SAAUQ,GAAsCP,EAA2BQ,EAAqC,CAClH,IAAIL,EAAOH,EACX,KAAOG,GAAM,CACT,GAAIK,EAAcL,CAAI,EAClB,OAAOA,EAEXA,EAAOA,EAAK,WAGpB,CATgBG,EAAAC,GAAA,sBAeV,SAAUE,GAAmBT,EAA2BU,EAAkC,CAC5F,IAAIP,EAAOH,EACX,KAAOG,GAAM,CACT,GAAIO,EAAUP,CAAI,EACd,MAAO,GAEXA,EAAOA,EAAK,WAEhB,MAAO,EACX,CATgBG,EAAAG,GAAA,sBAiBV,SAAUE,GAAyCX,EAAa,CAElE,IAAMY,EADWC,GAAab,CAAI,EACV,UACxB,GAAI,CAACY,EACD,MAAM,IAAI,MAAM,2BAA2B,EAE/C,OAAOA,CACX,CAPgBN,EAAAK,GAAA,eAYV,SAAUE,GAAab,EAAa,CACtC,KAAOA,EAAK,YACRA,EAAOA,EAAK,WAEhB,OAAOA,CACX,CALgBM,EAAAO,GAAA,gBAkBV,SAAUC,GAAed,EAAee,EAA0B,CACpE,GAAI,CAACf,EACD,MAAM,IAAI,MAAM,0BAA0B,EAE9C,IAAMgB,EAAQD,GAAS,MAEvB,OAAO,IAAIE,GAA2B,KAAO,CACzC,KAAM,OAAO,KAAKjB,CAAI,EACtB,SAAU,EACV,WAAY,IACZkB,GAAQ,CACR,KAAOA,EAAM,SAAWA,EAAM,KAAK,QAAQ,CACvC,IAAMC,EAAWD,EAAM,KAAKA,EAAM,QAAQ,EAC1C,GAAI,CAACC,EAAS,WAAW,GAAG,EAAG,CAC3B,IAAMjB,EAASF,EAAwBmB,CAAQ,EAC/C,GAAId,GAAUH,CAAK,GAEf,GADAgB,EAAM,WACFE,GAAiBlB,EAAOc,CAAK,EAC7B,MAAO,CAAE,KAAM,GAAO,MAAAd,CAAK,UAExB,MAAM,QAAQA,CAAK,EAAG,CAC7B,KAAOgB,EAAM,WAAahB,EAAM,QAAQ,CACpC,IAAME,EAAQc,EAAM,aACdG,EAAUnB,EAAME,CAAK,EAC3B,GAAIC,GAAUgB,CAAO,GAAKD,GAAiBC,EAASL,CAAK,EACrD,MAAO,CAAE,KAAM,GAAO,MAAOK,CAAO,EAG5CH,EAAM,WAAa,GAG3BA,EAAM,WAEV,OAAOI,EACX,CAAC,CACL,CAnCgBhB,EAAAQ,GAAA,kBAyCV,SAAUS,GAAkBC,EAAeT,EAA0B,CACvE,GAAI,CAACS,EACD,MAAM,IAAI,MAAM,+BAA+B,EAEnD,OAAO,IAAIC,GAAeD,EAAMxB,GAAQc,GAAed,EAAMe,CAAO,CAAC,CACzE,CALgBT,EAAAiB,GAAA,qBAWV,SAAUG,GAAUF,EAAeT,EAA0B,CAC/D,GAAKS,GAEE,GAAIT,GAAS,OAAS,CAACK,GAAiBI,EAAMT,EAAQ,KAAK,EAE9D,OAAO,IAAIU,GAAeD,EAAM,IAAM,CAAA,CAAE,MAHxC,OAAM,IAAI,MAAM,+BAA+B,EAKnD,OAAO,IAAIC,GAAeD,EAAMxB,GAAQc,GAAed,EAAMe,CAAO,EAAG,CAAE,YAAa,EAAI,CAAE,CAChG,CARgBT,EAAAoB,GAAA,aAUhB,SAASN,GAAiBO,EAAkBX,EAAa,OACrD,GAAI,CAACA,EACD,MAAO,GAEX,IAAMY,GAAYC,EAAAF,EAAQ,YAAQ,MAAAE,IAAA,OAAA,OAAAA,EAAE,MACpC,OAAKD,EAGEE,GAAQF,EAAWZ,CAAK,EAFpB,EAGf,CATSV,EAAAc,GAAA,oBAeH,SAAUW,GAAiB/B,EAAa,CAE1C,OAAO,IAAIiB,GAAiC,KAAO,CAC/C,KAAM,OAAO,KAAKjB,CAAI,EACtB,SAAU,EACV,WAAY,IACZkB,GAAQ,CACR,KAAOA,EAAM,SAAWA,EAAM,KAAK,QAAQ,CACvC,IAAMC,EAAWD,EAAM,KAAKA,EAAM,QAAQ,EAC1C,GAAI,CAACC,EAAS,WAAW,GAAG,EAAG,CAC3B,IAAMjB,EAASF,EAAwBmB,CAAQ,EAC/C,GAAIa,GAAY9B,CAAK,EACjB,OAAAgB,EAAM,WACC,CAAE,KAAM,GAAO,MAAO,CAAE,UAAWhB,EAAO,UAAWF,EAAM,SAAAmB,CAAQ,CAAE,EACzE,GAAI,MAAM,QAAQjB,CAAK,EAAG,CAC7B,KAAOgB,EAAM,WAAahB,EAAM,QAAQ,CACpC,IAAME,EAAQc,EAAM,aACdG,EAAUnB,EAAME,CAAK,EAC3B,GAAI4B,GAAYX,CAAO,EACnB,MAAO,CAAE,KAAM,GAAO,MAAO,CAAE,UAAWA,EAAS,UAAWrB,EAAM,SAAAmB,EAAU,MAAAf,CAAK,CAAE,EAG7Fc,EAAM,WAAa,GAG3BA,EAAM,WAEV,OAAOI,EACX,CAAC,CACL,CA7BgBhB,EAAAyB,GAAA,oBAqCV,SAAUE,GAAoBC,EAAqBC,EAASxB,GAAYuB,CAAU,EAAE,YAAY,MAAK,CACvG,IAAME,EAAoB,CAAA,EAC1B,OAAAV,GAAUS,CAAM,EAAE,QAAQnC,GAAO,CAC7B+B,GAAiB/B,CAAI,EAAE,QAAQqC,GAAU,CACjCA,EAAQ,UAAU,MAAQH,GAC1BE,EAAK,KAAKC,EAAQ,SAAS,CAEnC,CAAC,CACL,CAAC,EACMC,EAAOF,CAAI,CACtB,CAVgB9B,EAAA2B,GAAA,uBAkBV,SAAUM,GAA0BC,EAA2BxC,EAAa,CAC9E,IAAMyC,EAAeD,EAAW,gBAAgBxC,EAAK,KAAK,EACpD0C,EAAc1C,EACpB,QAAWmB,KAAYsB,EAAa,WAE5BtB,EAAS,eAAiB,QAAauB,EAAYvB,EAAS,IAAI,IAAM,SACtEuB,EAAYvB,EAAS,IAAI,EAAIwB,GAAiBxB,EAAS,YAAY,EAG/E,CATgBb,EAAAiC,GAAA,6BAWhB,SAASI,GAAiBC,EAA0B,CAChD,OAAI,MAAM,QAAQA,CAAY,EACnB,CAAC,GAAGA,EAAa,IAAID,EAAgB,CAAC,EAEtCC,CAEf,CANStC,EAAAqC,GAAA,oBAcH,SAAUE,GAAyC7C,EAAS8C,EAAsH,CACpL,IAAMC,EAAuB,CAAE,MAAO/C,EAAK,KAAK,EAEhD,OAAW,CAACC,EAAMC,CAAK,IAAK,OAAO,QAAQF,CAAI,EAC3C,GAAI,CAACC,EAAK,WAAW,GAAG,EACpB,GAAII,GAAUH,CAAK,EACf6C,EAAK9C,CAAI,EAAI4C,GAAY3C,EAAO4C,CAAc,UACvCd,GAAY9B,CAAK,EACxB6C,EAAK9C,CAAI,EAAI6C,EACTC,EACA9C,EACAC,EAAM,SACNA,EAAM,QAAQ,UAEX,MAAM,QAAQA,CAAK,EAAG,CAC7B,IAAM8C,EAAyB,CAAA,EAC/B,QAAW3B,KAAWnB,EACdG,GAAUgB,CAAO,EACjB2B,EAAY,KAAKH,GAAYxB,EAASyB,CAAc,CAAC,EAC9Cd,GAAYX,CAAO,EAC1B2B,EAAY,KACRF,EACIC,EACA9C,EACAoB,EAAQ,SACRA,EAAQ,QAAQ,CACnB,EAGL2B,EAAY,KAAK3B,CAAO,EAGhC0B,EAAK9C,CAAI,EAAI+C,OAEbD,EAAK9C,CAAI,EAAIC,EAKzB,OAAAH,GAAuBgD,CAAI,EACpBA,CACX,CAzCgBzC,EAAAuC,GAAA,eCrQhB,IAAAI,GAAA,GAAAC,GAAAD,GAAA,oBAAAE,GAAA,iBAAAC,GAAA,8BAAAC,GAAA,qBAAAC,GAAA,uBAAAC,GAAA,iBAAAC,GAAA,mBAAAC,GAAA,kBAAAC,KCEM,SAAUC,EAAGC,EAAY,CAC7B,OAAOA,EAAK,WAAW,CAAC,CAC1B,CAFgBC,EAAAF,EAAA,MAIV,SAAUG,GAAeC,EAAeC,EAAQ,CAChD,MAAM,QAAQD,CAAI,EACpBA,EAAK,QAAQ,SAAUE,EAAO,CAC5BD,EAAI,KAAKC,CAAO,CAClB,CAAC,EAEDD,EAAI,KAAKD,CAAI,CAEjB,CARgBF,EAAAC,GAAA,eAUV,SAAUI,GACdC,EACAC,EAAkD,CAElD,GAAID,EAAQC,CAAO,IAAM,GACvB,KAAM,kBAAoBA,EAG5B,IAAMC,EAAaF,EAAQC,CAAO,EAClCD,EAAQC,CAAO,EAAI,EACrB,CAVgBP,EAAAK,GAAA,WAYV,SAAUI,GAA0BC,EAAQ,CAEhD,GAAIA,IAAQ,OACV,MAAM,MAAM,yCAAyC,EAEvD,MAAO,EACT,CANgBV,EAAAS,GAAA,iBASV,SAAUE,IAAuB,CACrC,MAAM,MAAM,yCAAyC,CACvD,CAFgBX,EAAAW,GAAA,2BAIV,SAAUC,GAAYF,EAAqB,CAC/C,OAAOA,EAAI,OAAY,WACzB,CAFgBV,EAAAY,GAAA,eCvCT,IAAMC,GAA4B,CAAA,EACzC,QAASC,EAAIC,EAAG,GAAG,EAAGD,GAAKC,EAAG,GAAG,EAAGD,IAClCD,GAAgB,KAAKC,CAAC,EAGjB,IAAME,GAA0B,CAACD,EAAG,GAAG,CAAC,EAAE,OAAOF,EAAe,EACvE,QAASC,EAAIC,EAAG,GAAG,EAAGD,GAAKC,EAAG,GAAG,EAAGD,IAClCE,GAAc,KAAKF,CAAC,EAGtB,QAASA,EAAIC,EAAG,GAAG,EAAGD,GAAKC,EAAG,GAAG,EAAGD,IAClCE,GAAc,KAAKF,CAAC,EAIf,IAAMG,GAA4B,CACvCF,EAAG,GAAG,EACNA,EAAG,IAAI,EACPA,EAAG;CAAI,EACPA,EAAG,IAAI,EACPA,EAAG,GAAI,EACPA,EAAG,IAAI,EACPA,EAAG,GAAI,EACPA,EAAG,MAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,EACXA,EAAG,QAAQ,GCZb,IAAMG,GAAkB,cAClBC,GAAiB,QACjBC,GAAuB,QAIhBC,GAAP,KAAmB,CArBzB,MAqByB,CAAAC,EAAA,qBAAzB,aAAA,CACY,KAAA,IAAc,EACd,KAAA,MAAgB,GAChB,KAAA,SAAmB,CA+xB/B,CA7xBY,WAAS,CACjB,MAAO,CACL,IAAK,KAAK,IACV,MAAO,KAAK,MACZ,SAAU,KAAK,SAEnB,CAEU,aAAaC,EAItB,CACC,KAAK,IAAMA,EAAS,IACpB,KAAK,MAAQA,EAAS,MACtB,KAAK,SAAWA,EAAS,QAC3B,CAEO,QAAQC,EAAa,CAE1B,KAAK,IAAM,EACX,KAAK,MAAQA,EACb,KAAK,SAAW,EAEhB,KAAK,YAAY,GAAG,EACpB,IAAMC,EAAQ,KAAK,YAAW,EAC9B,KAAK,YAAY,GAAG,EAEpB,IAAMC,EAAqB,CACzB,KAAM,QACN,IAAK,CAAE,MAAO,KAAK,IAAK,IAAKF,EAAM,MAAM,EACzC,OAAQ,GACR,WAAY,GACZ,UAAW,GACX,QAAS,GACT,OAAQ,IAGV,KAAO,KAAK,aAAY,GACtB,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHG,GAAQD,EAAO,QAAQ,EACvB,MACF,IAAK,IACHC,GAAQD,EAAO,YAAY,EAC3B,MACF,IAAK,IACHC,GAAQD,EAAO,WAAW,EAC1B,MACF,IAAK,IACHC,GAAQD,EAAO,SAAS,EACxB,MACF,IAAK,IACHC,GAAQD,EAAO,QAAQ,EACvB,MAIN,GAAI,KAAK,MAAQ,KAAK,MAAM,OAC1B,MAAM,MAAM,oBAAsB,KAAK,MAAM,UAAU,KAAK,GAAG,CAAC,EAElE,MAAO,CACL,KAAM,UACN,MAAOA,EACP,MAAOD,EACP,IAAK,KAAK,IAAI,CAAC,EAEnB,CAEU,aAAW,CACnB,IAAMG,EAAO,CAAA,EACPC,EAAQ,KAAK,IAInB,IAFAD,EAAK,KAAK,KAAK,YAAW,CAAE,EAErB,KAAK,SAAQ,IAAO,KACzB,KAAK,YAAY,GAAG,EACpBA,EAAK,KAAK,KAAK,YAAW,CAAE,EAG9B,MAAO,CAAE,KAAM,cAAe,MAAOA,EAAM,IAAK,KAAK,IAAIC,CAAK,CAAC,CACjE,CAEU,aAAW,CACnB,IAAMC,EAAQ,CAAA,EACRD,EAAQ,KAAK,IAEnB,KAAO,KAAK,OAAM,GAChBC,EAAM,KAAK,KAAK,KAAI,CAAE,EAGxB,MAAO,CAAE,KAAM,cAAe,MAAOA,EAAO,IAAK,KAAK,IAAID,CAAK,CAAC,CAClE,CAEU,MAAI,CACZ,OAAI,KAAK,YAAW,EACX,KAAK,UAAS,EAEd,KAAK,KAAI,CAEpB,CAEU,WAAS,CACjB,IAAMA,EAAQ,KAAK,IACnB,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACH,MAAO,CACL,KAAM,cACN,IAAK,KAAK,IAAIA,CAAK,GAEvB,IAAK,IACH,MAAO,CAAE,KAAM,YAAa,IAAK,KAAK,IAAIA,CAAK,CAAC,EAElD,IAAK,KACH,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACH,MAAO,CACL,KAAM,eACN,IAAK,KAAK,IAAIA,CAAK,GAEvB,IAAK,IACH,MAAO,CACL,KAAM,kBACN,IAAK,KAAK,IAAIA,CAAK,GAIzB,MAAM,MAAM,0BAA0B,EAExC,IAAK,IACH,KAAK,YAAY,GAAG,EAEpB,IAAIE,EACJ,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHA,EAAO,YACP,MACF,IAAK,IACHA,EAAO,oBACP,MAEJC,GAAcD,CAAI,EAElB,IAAME,EAAc,KAAK,YAAW,EAEpC,YAAK,YAAY,GAAG,EAEb,CACL,KAAMF,EACN,MAAOE,EACP,IAAK,KAAK,IAAIJ,CAAK,GAIzB,OAAOK,GAAuB,CAChC,CAEU,WACRC,EAA0B,GAAK,CAE/B,IAAIC,EACEP,EAAQ,KAAK,IACnB,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHO,EAAQ,CACN,QAAS,EACT,OAAQ,KAEV,MACF,IAAK,IACHA,EAAQ,CACN,QAAS,EACT,OAAQ,KAEV,MACF,IAAK,IACHA,EAAQ,CACN,QAAS,EACT,OAAQ,GAEV,MACF,IAAK,IACH,IAAMC,EAAU,KAAK,qBAAoB,EACzC,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHD,EAAQ,CACN,QAASC,EACT,OAAQA,GAEV,MACF,IAAK,IACH,IAAIC,EACA,KAAK,QAAO,GACdA,EAAS,KAAK,qBAAoB,EAClCF,EAAQ,CACN,QAASC,EACT,OAAQC,IAGVF,EAAQ,CACN,QAASC,EACT,OAAQ,KAGZ,KAAK,YAAY,GAAG,EACpB,MAIJ,GAAIF,IAAmB,IAAQC,IAAU,OACvC,OAEFJ,GAAcI,CAAK,EACnB,MAKJ,GAAI,EAAAD,IAAmB,IAAQC,IAAU,SAKrCJ,GAAcI,CAAK,EACrB,OAAI,KAAK,SAAS,CAAC,IAAM,KACvB,KAAK,YAAY,GAAG,EACpBA,EAAM,OAAS,IAEfA,EAAM,OAAS,GAGjBA,EAAM,KAAO,aACbA,EAAM,IAAM,KAAK,IAAIP,CAAK,EACnBO,CAEX,CAEU,MAAI,CACZ,IAAIG,EACEV,EAAQ,KAAK,IACnB,OAAQ,KAAK,SAAQ,EAAI,CACvB,IAAK,IACHU,EAAO,KAAK,OAAM,EAClB,MACF,IAAK,KACHA,EAAO,KAAK,WAAU,EACtB,MACF,IAAK,IACHA,EAAO,KAAK,eAAc,EAC1B,MACF,IAAK,IACHA,EAAO,KAAK,MAAK,EACjB,MAQJ,OALIA,IAAS,QAAa,KAAK,mBAAkB,IAC/CA,EAAO,KAAK,iBAAgB,GAI1BP,GAAoBO,CAAI,GAC1BA,EAAK,IAAM,KAAK,IAAIV,CAAK,EAErB,KAAK,aAAY,IACnBU,EAAK,WAAa,KAAK,WAAU,GAG5BA,GAIFL,GAAuB,CAChC,CAEU,QAAM,CACd,YAAK,YAAY,GAAG,EACb,CACL,KAAM,MACN,WAAY,GACZ,MAAO,CAACM,EAAG;CAAI,EAAGA,EAAG,IAAI,EAAGA,EAAG,QAAQ,EAAGA,EAAG,QAAQ,CAAC,EAE1D,CAEU,YAAU,CAGlB,OAFA,KAAK,YAAY,IAAI,EAEb,KAAK,SAAQ,EAAI,CACvB,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,OAAO,KAAK,kBAAiB,EAC/B,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,OAAO,KAAK,qBAAoB,EAClC,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,OAAO,KAAK,kBAAiB,EAC/B,IAAK,IACH,OAAO,KAAK,wBAAuB,EACrC,IAAK,IACH,OAAO,KAAK,iBAAgB,EAC9B,IAAK,IACH,OAAO,KAAK,sBAAqB,EACnC,IAAK,IACH,OAAO,KAAK,gCAA+B,EAC7C,QACE,OAAO,KAAK,mBAAkB,EAEpC,CAEU,mBAAiB,CAGzB,MAAO,CAAE,KAAM,qBAAsB,MAFvB,KAAK,gBAAe,CAEe,CACnD,CAEU,sBAAoB,CAC5B,IAAIC,EACAC,EAAa,GACjB,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHD,EAAME,GACN,MACF,IAAK,IACHF,EAAME,GACND,EAAa,GACb,MACF,IAAK,IACHD,EAAMG,GACN,MACF,IAAK,IACHH,EAAMG,GACNF,EAAa,GACb,MACF,IAAK,IACHD,EAAMI,GACN,MACF,IAAK,IACHJ,EAAMI,GACNH,EAAa,GACb,MAIJ,OAAIV,GAAcS,CAAG,EACZ,CAAE,KAAM,MAAO,MAAOA,EAAK,WAAYC,CAAU,EAGnDR,GAAuB,CAChC,CAEU,mBAAiB,CACzB,IAAIY,EACJ,OAAQ,KAAK,QAAO,EAAI,CACtB,IAAK,IACHA,EAAaN,EAAG,IAAI,EACpB,MACF,IAAK,IACHM,EAAaN,EAAG;CAAI,EACpB,MACF,IAAK,IACHM,EAAaN,EAAG,IAAI,EACpB,MACF,IAAK,IACHM,EAAaN,EAAG,GAAI,EACpB,MACF,IAAK,IACHM,EAAaN,EAAG,IAAI,EACpB,MAIJ,OAAIR,GAAcc,CAAU,EACnB,CAAE,KAAM,YAAa,MAAOA,CAAU,EAGxCZ,GAAuB,CAChC,CAEU,yBAAuB,CAC/B,KAAK,YAAY,GAAG,EACpB,IAAMa,EAAS,KAAK,QAAO,EAC3B,GAAI,WAAW,KAAKA,CAAM,IAAM,GAC9B,MAAM,MAAM,UAAU,EAIxB,MAAO,CAAE,KAAM,YAAa,MADTA,EAAO,YAAW,EAAG,WAAW,CAAC,EAAI,EACX,CAC/C,CAEU,kBAAgB,CAGxB,YAAK,YAAY,GAAG,EACb,CAAE,KAAM,YAAa,MAAOP,EAAG,IAAI,CAAC,CAC7C,CAEU,uBAAqB,CAC7B,YAAK,YAAY,GAAG,EACb,KAAK,eAAe,CAAC,CAC9B,CAEU,iCAA+B,CACvC,YAAK,YAAY,GAAG,EACb,KAAK,eAAe,CAAC,CAC9B,CAEU,oBAAkB,CAG1B,IAAMQ,EAAc,KAAK,QAAO,EAChC,MAAO,CAAE,KAAM,YAAa,MAAOR,EAAGQ,CAAW,CAAC,CACpD,CAEU,2BAAyB,CACjC,OAAQ,KAAK,SAAQ,EAAI,CAEvB,IAAK;EAEL,IAAK,KAEL,IAAK,SAEL,IAAK,SAEL,IAAK,KAEL,IAAK,IACH,MAAM,MAAM,KAAK,EACnB,QACE,IAAMC,EAAW,KAAK,QAAO,EAC7B,MAAO,CAAE,KAAM,YAAa,MAAOT,EAAGS,CAAQ,CAAC,EAErD,CAEU,gBAAc,CACtB,IAAMR,EAA0B,CAAA,EAC5BC,EAAa,GAOjB,IANA,KAAK,YAAY,GAAG,EAChB,KAAK,SAAS,CAAC,IAAM,MACvB,KAAK,YAAY,GAAG,EACpBA,EAAa,IAGR,KAAK,YAAW,GAAI,CACzB,IAAMQ,EAAO,KAAK,UAAS,EACrBC,EAAmBD,EAAK,OAAS,YACvC,GAAIE,GAAYF,CAAI,GAAK,KAAK,YAAW,EAAI,CAC3C,KAAK,YAAY,GAAG,EACpB,IAAMG,EAAK,KAAK,UAAS,EACnBC,EAAiBD,EAAG,OAAS,YAGnC,GAAID,GAAYC,CAAE,EAAG,CACnB,GAAIA,EAAG,MAAQH,EAAK,MAClB,MAAM,MAAM,uCAAuC,EAErDT,EAAI,KAAK,CAAE,KAAMS,EAAK,MAAO,GAAIG,EAAG,KAAK,CAAE,OAG3CE,GAAYL,EAAK,MAAOT,CAAG,EAC3BA,EAAI,KAAKD,EAAG,GAAG,CAAC,EAChBe,GAAYF,EAAG,MAAOZ,CAAG,OAG3Bc,GAAYL,EAAK,MAAOT,CAAG,EAI/B,YAAK,YAAY,GAAG,EAEb,CAAE,KAAM,MAAO,WAAYC,EAAY,MAAOD,CAAG,CAC1D,CAEU,WAAS,CACjB,OAAQ,KAAK,SAAQ,EAAI,CAEvB,IAAK,IAEL,IAAK;EAEL,IAAK,KAEL,IAAK,SAEL,IAAK,SACH,MAAM,MAAM,KAAK,EACnB,IAAK,KACH,OAAO,KAAK,YAAW,EACzB,QACE,OAAO,KAAK,0BAAyB,EAE3C,CAEU,aAAW,CAEnB,OADA,KAAK,YAAY,IAAI,EACb,KAAK,SAAQ,EAAI,CAGvB,IAAK,IACH,YAAK,YAAY,GAAG,EACb,CAAE,KAAM,YAAa,MAAOD,EAAG,IAAQ,CAAC,EACjD,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,OAAO,KAAK,qBAAoB,EAClC,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,OAAO,KAAK,kBAAiB,EAC/B,IAAK,IACH,OAAO,KAAK,wBAAuB,EACrC,IAAK,IACH,OAAO,KAAK,iBAAgB,EAC9B,IAAK,IACH,OAAO,KAAK,sBAAqB,EACnC,IAAK,IACH,OAAO,KAAK,gCAA+B,EAC7C,QACE,OAAO,KAAK,mBAAkB,EAEpC,CAEU,OAAK,CACb,IAAIgB,EAAY,GAEhB,OADA,KAAK,YAAY,GAAG,EACZ,KAAK,SAAS,CAAC,EAAG,CACxB,IAAK,IACH,KAAK,YAAY,GAAG,EACpB,KAAK,YAAY,GAAG,EACpBA,EAAY,GACZ,MACF,QACE,KAAK,WACL,MAEJ,IAAM/B,EAAQ,KAAK,YAAW,EAC9B,KAAK,YAAY,GAAG,EAEpB,IAAMgC,EAA+B,CACnC,KAAM,QACN,UAAWD,EACX,MAAO/B,GAGT,OAAI+B,IACFC,EAAS,IAAS,KAAK,UAGlBA,CACT,CAEU,iBAAe,CACvB,IAAIC,EAAS,KAAK,QAAO,EAIzB,GAAItC,GAAqB,KAAKsC,CAAM,IAAM,GACxC,MAAM,MAAM,8BAA8B,EAG5C,KAAOvC,GAAe,KAAK,KAAK,SAAS,CAAC,CAAC,GACzCuC,GAAU,KAAK,QAAO,EAGxB,OAAO,SAASA,EAAQ,EAAE,CAC5B,CAEU,sBAAoB,CAC5B,IAAIA,EAAS,KAAK,QAAO,EACzB,GAAIvC,GAAe,KAAKuC,CAAM,IAAM,GAClC,MAAM,MAAM,sBAAsB,EAGpC,KAAOvC,GAAe,KAAK,KAAK,SAAS,CAAC,CAAC,GACzCuC,GAAU,KAAK,QAAO,EAGxB,OAAO,SAASA,EAAQ,EAAE,CAC5B,CAEU,kBAAgB,CACxB,IAAMT,EAAW,KAAK,QAAO,EAC7B,OAAQA,EAAU,CAEhB,IAAK;EAEL,IAAK,KAEL,IAAK,SAEL,IAAK,SAEL,IAAK,IAEL,IAAK,IAEL,IAAK,KAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEL,IAAK,IAEH,MAAM,MAAM,KAAK,EACnB,QACE,MAAO,CAAE,KAAM,YAAa,MAAOT,EAAGS,CAAQ,CAAC,EAErD,CACU,cAAY,CACpB,OAAQ,KAAK,SAAS,CAAC,EAAG,CACxB,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACH,MAAO,GACT,QACE,MAAO,GAEb,CAEU,aAAW,CACnB,OAAO,KAAK,SAAQ,IAAO,KAAO,KAAK,YAAY,CAAC,CACtD,CAEU,SAAO,CACf,OAAO9B,GAAe,KAAK,KAAK,SAAS,CAAC,CAAC,CAC7C,CAEU,YAAYwC,EAAU,EAAC,CAC/B,OAAQ,KAAK,SAASA,CAAO,EAAG,CAC9B,IAAK,IACL,IAAK;EACL,IAAK,KACL,IAAK,SACL,IAAK,SACH,MAAO,GACT,QACE,MAAO,GAEb,CAEU,QAAM,CACd,OAAO,KAAK,OAAM,GAAM,KAAK,YAAW,CAC1C,CAEU,QAAM,CACd,GAAI,KAAK,mBAAkB,EACzB,MAAO,GAGT,OAAQ,KAAK,SAAS,CAAC,EAAG,CACxB,IAAK,IACL,IAAK,KACL,IAAK,IAEL,IAAK,IACH,MAAO,GACT,QACE,MAAO,GAEb,CAEU,aAAW,CACnB,OAAQ,KAAK,SAAS,CAAC,EAAG,CACxB,IAAK,IACL,IAAK,IACH,MAAO,GAET,IAAK,KACH,OAAQ,KAAK,SAAS,CAAC,EAAG,CACxB,IAAK,IACL,IAAK,IACH,MAAO,GACT,QACE,MAAO,GAGb,IAAK,IACH,OACE,KAAK,SAAS,CAAC,IAAM,MACpB,KAAK,SAAS,CAAC,IAAM,KAAO,KAAK,SAAS,CAAC,IAAM,KAEtD,QACE,MAAO,GAEb,CAEU,cAAY,CACpB,IAAMC,EAAY,KAAK,UAAS,EAChC,GAAI,CACF,OAAO,KAAK,WAAW,EAAI,IAAM,YACvB,CACV,MAAO,WAEP,KAAK,aAAaA,CAAS,EAE/B,CAEU,oBAAkB,CAC1B,OAAQ,KAAK,SAAQ,EAAI,CACvB,IAAK,IACL,IAAK,IACL,IAAK,KACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK;EACL,IAAK,KACL,IAAK,SACL,IAAK,SACH,MAAO,GACT,QACE,MAAO,GAEb,CAEU,eAAeC,EAAe,CACtC,IAAIC,EAAY,GAChB,QAAS,EAAI,EAAG,EAAID,EAAS,IAAK,CAChC,IAAME,EAAU,KAAK,QAAO,EAC5B,GAAI7C,GAAgB,KAAK6C,CAAO,IAAM,GACpC,MAAM,MAAM,+BAA+B,EAE7CD,GAAaC,EAGf,MAAO,CAAE,KAAM,YAAa,MADX,SAASD,EAAW,EAAE,CACI,CAC7C,CAEU,SAASH,EAAU,EAAC,CAC5B,OAAO,KAAK,MAAM,KAAK,IAAMA,CAAO,CACtC,CAEU,SAAO,CACf,IAAMV,EAAW,KAAK,SAAS,CAAC,EAChC,YAAK,YAAY,MAAS,EACnBA,CACT,CAEU,YAAYe,EAAwB,CAC5C,GAAIA,IAAS,QAAa,KAAK,MAAM,KAAK,GAAG,IAAMA,EACjD,MAAM,MACJ,cACEA,EACA,iBACA,KAAK,MAAM,KAAK,GAAG,EACnB,gBACA,KAAK,GAAG,EAId,GAAI,KAAK,KAAO,KAAK,MAAM,OACzB,MAAM,MAAM,yBAAyB,EAEvC,KAAK,KACP,CAEU,IAAInC,EAAa,CACzB,MAAO,CAAE,MAAOA,EAAO,IAAK,KAAK,GAAG,CACtC,GCvzBI,IAAOoC,GAAP,KAAwB,CAA9B,MAA8B,CAAAC,EAAA,0BACrB,cAAcC,EAAgB,CACnC,QAAWC,KAAOD,EAAM,CACtB,IAAME,EAASF,EAAaC,CAAG,EAE3BD,EAAK,eAAeC,CAAG,IACrBC,EAAM,OAAS,OACjB,KAAK,MAAMA,CAAK,EACP,MAAM,QAAQA,CAAK,GAC5BA,EAAM,QAASC,GAAY,CACzB,KAAK,MAAMA,CAAQ,CACrB,EAAG,IAAI,GAIf,CAEO,MAAMH,EAAmB,CAC9B,OAAQA,EAAK,KAAM,CACjB,IAAK,UACH,KAAK,aAAaA,CAAI,EACtB,MACF,IAAK,QACH,KAAK,WAAWA,CAAI,EACpB,MACF,IAAK,cACH,KAAK,iBAAiBA,CAAI,EAC1B,MACF,IAAK,cACH,KAAK,iBAAiBA,CAAI,EAC1B,MACF,IAAK,cACH,KAAK,iBAAiBA,CAAI,EAC1B,MACF,IAAK,YACH,KAAK,eAAeA,CAAI,EACxB,MACF,IAAK,eACH,KAAK,kBAAkBA,CAAI,EAC3B,MACF,IAAK,kBACH,KAAK,qBAAqBA,CAAI,EAC9B,MACF,IAAK,YACH,KAAK,eAAeA,CAAI,EACxB,MACF,IAAK,oBACH,KAAK,uBAAuBA,CAAI,EAChC,MACF,IAAK,YACH,KAAK,eAAeA,CAAI,EACxB,MACF,IAAK,MACH,KAAK,SAASA,CAAI,EAClB,MACF,IAAK,QACH,KAAK,WAAWA,CAAI,EACpB,MACF,IAAK,qBACH,KAAK,wBAAwBA,CAAI,EACjC,MACF,IAAK,aACH,KAAK,gBAAgBA,CAAI,EACzB,MAGJ,KAAK,cAAcA,CAAI,CACzB,CAEO,aAAaA,EAAmB,CAAS,CAEzC,WAAWA,EAAiB,CAAS,CAErC,iBAAiBA,EAAiB,CAAS,CAE3C,iBAAiBA,EAAiB,CAAS,CAG3C,iBAAiBA,EAAe,CAAS,CAEzC,eAAeA,EAAe,CAAS,CAEvC,kBAAkBA,EAAe,CAAS,CAE1C,qBAAqBA,EAAe,CAAS,CAE7C,eAAeA,EAAe,CAAS,CAEvC,uBAAuBA,EAAe,CAAS,CAG/C,eAAeA,EAAe,CAAS,CAEvC,SAASA,EAAS,CAAS,CAE3B,WAAWA,EAAW,CAAS,CAE/B,wBAAwBA,EAAwB,CAAS,CAEzD,gBAAgBA,EAAgB,CAAS,GJzG3C,IAAMI,GAAiB,UAExBC,GAAe,IAAIC,GAenBC,GAAN,cAAoCC,EAAiB,CA1BrD,MA0BqD,CAAAC,EAAA,8BAArD,aAAA,qBAEY,KAAA,WAAa,GAEb,KAAA,eAA2B,CAAA,EACnC,KAAA,UAAY,EAoEhB,CAjEI,IAAI,UAAQ,CACR,OAAO,KAAK,eAAe,KAAK,EAAE,CACtC,CAEA,MAAMC,EAAa,CACf,KAAK,UAAY,GACjB,KAAK,MAAQA,EACb,KAAK,YAAc,GACnB,KAAK,WAAa,GAClB,KAAK,eAAiB,CAAA,CAC1B,CAES,WAAWC,EAAW,CACvBA,EAAK,aACL,KAAK,WAAa,GAClB,KAAK,eAAiB,CAAA,EAE9B,CAES,eAAeA,EAAe,CACnC,IAAMC,EAAO,OAAO,aAAaD,EAAK,KAAK,EAI3C,GAHI,CAAC,KAAK,WAAaC,IAAS;IAC5B,KAAK,UAAY,IAEjBD,EAAK,WACL,KAAK,WAAa,GAClB,KAAK,eAAiB,CAAA,MACnB,CACH,IAAME,EAAcC,GAAaF,CAAI,EACrC,KAAK,eAAe,KAAKC,CAAW,EAChC,KAAK,aACL,KAAK,aAAeA,GAGhC,CAES,SAASF,EAAS,CACvB,GAAI,CAAC,KAAK,UAAW,CACjB,IAAMI,EAAM,KAAK,MAAM,UAAUJ,EAAK,IAAI,MAAOA,EAAK,IAAI,GAAG,EACvDD,EAAQ,IAAI,OAAOK,CAAG,EAC5B,KAAK,UAAY,EAAQ;EAAK,MAAML,CAAK,EAE7C,GAAIC,EAAK,WACL,KAAK,WAAa,GAClB,KAAK,eAAiB,CAAA,MACnB,CACH,IAAMI,EAAM,KAAK,MAAM,UAAUJ,EAAK,IAAI,MAAOA,EAAK,IAAI,GAAG,EAC7D,KAAK,eAAe,KAAKI,CAAG,EACxB,KAAK,aACL,KAAK,aAAeA,GAGhC,CAES,cAAcJ,EAAgB,CAC/BA,EAAK,OAAS,SAGAA,EACJ,YAId,MAAM,cAAcA,CAAI,CAC5B,GAGEK,GAAU,IAAIT,GAEd,SAAUU,GAAiBC,EAAuB,CACpD,GAAI,CACI,OAAOA,GAAW,WAClBA,EAASA,EAAO,QAEpBA,EAAS,IAAIA,CAAM,IACnB,IAAMC,EAAUd,GAAa,QAAQa,CAAM,EACrCE,EAA+C,CAAA,EACrD,QAAWC,KAAeF,EAAQ,MAAM,MACpCH,GAAQ,MAAME,CAAM,EACpBF,GAAQ,MAAMK,CAAW,EACzBD,EAAM,KAAK,CACP,MAAOJ,GAAQ,YACf,IAAKA,GAAQ,SAChB,EAEL,OAAOI,OACH,CACJ,MAAO,CAAA,EAEf,CApBgBX,EAAAQ,GAAA,oBAsBV,SAAUK,GAAmBJ,EAAuB,CACtD,GAAI,CACA,OAAI,OAAOA,GAAW,WAClBA,EAAS,IAAI,OAAOA,CAAM,GAE9BA,EAASA,EAAO,SAAQ,EACxBF,GAAQ,MAAME,CAAM,EAEpBF,GAAQ,MAAMX,GAAa,QAAQa,CAAM,CAAC,EACnCF,GAAQ,eACX,CACJ,MAAO,GAEf,CAbgBP,EAAAa,GAAA,sBAeV,SAAUC,GAAaC,EAAsB,CAE/C,OADe,OAAOA,GAAU,SAAW,IAAI,OAAOA,CAAK,EAAIA,GACjD,KAAK,GAAG,CAC1B,CAHgBf,EAAAc,GAAA,gBAKV,SAAUT,GAAaU,EAAa,CACtC,OAAOA,EAAM,QAAQ,sBAAuB,MAAM,CACtD,CAFgBf,EAAAK,GAAA,gBAIV,SAAUW,GAA0BC,EAAe,CACrD,OAAO,MAAM,UAAU,IAAI,KAAKA,EAASC,GACrC,KAAK,KAAKA,CAAM,EAAI,IAAIA,EAAO,YAAW,CAAE,GAAGA,EAAO,YAAW,CAAE,IAAMb,GAAaa,CAAM,CAAC,EAC/F,KAAK,EAAE,CACb,CAJgBlB,EAAAgB,GAAA,6BAYV,SAAUG,GAAelB,EAAwBmB,EAAa,CAChE,IAAMC,EAAUC,GAAcrB,CAAK,EAC7BsB,EAAQH,EAAM,MAAMC,CAAO,EACjC,MAAO,CAAC,CAACE,GAASA,EAAM,CAAC,EAAE,OAAS,CACxC,CAJgBvB,EAAAmB,GAAA,kBAYV,SAAUG,GAAcrB,EAAsB,CAC5C,OAAOA,GAAU,WACjBA,EAAQ,IAAI,OAAOA,CAAK,GAE5B,IAAMuB,EAAKvB,EAAOwB,EAASxB,EAAM,OAC7ByB,EAAI,EAER,SAASC,GAAO,CACZ,IAAIC,EAAS,GACTC,EAEJ,SAASC,EAAUC,EAAe,CAC9BH,GAAUH,EAAO,OAAOC,EAAGK,CAAO,EAClCL,GAAKK,CACT,CAHS/B,EAAA8B,EAAA,aAKT,SAASE,EAAeD,EAAe,CACnCH,GAAU,MAAQH,EAAO,OAAOC,EAAGK,CAAO,EAAI,MAC9CL,GAAKK,CACT,CAEA,IALS/B,EAAAgC,EAAA,kBAKFN,EAAID,EAAO,QACd,OAAQA,EAAOC,CAAC,EAAG,CACf,IAAK,KACD,OAAQD,EAAOC,EAAI,CAAC,EAAG,CACnB,IAAK,IACDM,EAAe,CAAC,EAChB,MACJ,IAAK,IACDA,EAAe,CAAC,EAChB,MACJ,IAAK,IACGR,EAAG,QACCC,EAAOC,EAAI,CAAC,IAAM,IAClBM,EAAeP,EAAO,QAAQ,IAAKC,CAAC,EAAIA,EAAI,CAAC,EAE7CM,EAAe,CAAC,EAGpBA,EAAe,CAAC,EAEpB,MACJ,IAAK,IACL,IAAK,IACGR,EAAG,QACHQ,EAAeP,EAAO,QAAQ,IAAKC,CAAC,EAAIA,EAAI,CAAC,EAE7CM,EAAe,CAAC,EAEpB,MACJ,IAAK,IACDA,EAAeP,EAAO,QAAQ,IAAKC,CAAC,EAAIA,EAAI,CAAC,EAC7C,MACJ,QACIM,EAAe,CAAC,EAChB,MAER,MAEJ,IAAK,IACDH,EAAM,mBACNA,EAAI,UAAYH,EAChBG,EAAMA,EAAI,KAAKJ,CAAM,GAAK,CAAA,EAC1BO,EAAeH,EAAI,CAAC,EAAE,MAAM,EAC5B,MAEJ,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACDC,EAAU,CAAC,EACX,MACJ,IAAK,IACDD,EAAM,gBACNA,EAAI,UAAYH,EAChBG,EAAMA,EAAI,KAAKJ,CAAM,EACjBI,EACAC,EAAUD,EAAI,CAAC,EAAE,MAAM,EAEvBG,EAAe,CAAC,EAEpB,MACJ,IAAK,IACD,GAAIP,EAAOC,EAAI,CAAC,IAAM,IAClB,OAAQD,EAAOC,EAAI,CAAC,EAAG,CACnB,IAAK,IACDE,GAAU,MACVF,GAAK,EACLE,GAAUD,EAAO,EAAK,MACtB,MACJ,IAAK,IACDC,GAAU,MACVF,GAAK,EACLE,GAAUD,EAAO,EAAK,IACtB,MACJ,IAAK,IACDE,EAAMH,EACNA,GAAK,EACLC,EAAO,EACPC,GAAUH,EAAO,OAAOI,EAAKH,EAAIG,CAAG,EACpC,MACJ,IAAK,IACD,OAAQJ,EAAOC,EAAI,CAAC,EAAG,CACnB,IAAK,IACL,IAAK,IACDG,EAAMH,EACNA,GAAK,EACLC,EAAO,EACPC,GAAUH,EAAO,OAAOI,EAAKH,EAAIG,CAAG,EACpC,MACJ,QACIC,EAAUL,EAAO,QAAQ,IAAKC,CAAC,EAAIA,EAAI,CAAC,EACxCE,GAAUD,EAAO,EAAK,MACtB,MAER,WAGRG,EAAU,CAAC,EACXF,GAAUD,EAAO,EAAK,MAE1B,MACJ,IAAK,IACD,QAAED,EACKE,EACX,QACII,EAAe,CAAC,EAChB,MAIZ,OAAOJ,CACX,CA/HS,OAAA5B,EAAA2B,EAAA,WAiIF,IAAI,OAAOA,EAAO,EAAI1B,EAAM,KAAK,CAC5C,CAzIgBD,EAAAsB,GAAA,iBJ3JV,SAAUW,GAAaC,EAAoB,CAC7C,OAAOA,EAAQ,MAAM,KAAK,GAASC,GAAa,CAAC,GAAK,EAAE,KAAK,CACjE,CAFgBC,EAAAH,GAAA,gBAOV,SAAUI,GAAeH,EAAoB,CAC/C,OAAOA,EAAQ,MAAM,OAAQ,GAAiCI,GAAe,CAAC,GAAK,EAAE,MAAM,CAC/F,CAFgBF,EAAAC,GAAA,kBAYV,SAAUE,GAAqBL,EAAsBM,EAAqB,CAC5E,IAAMC,EAAY,IAAI,IAChBC,EAAYT,GAAaC,CAAO,EACtC,GAAI,CAACQ,EACD,OAAO,IAAI,IAAIR,EAAQ,KAAK,EAGhC,IAAMS,EAAe,CAACD,CAA6B,EAAE,OAAOL,GAAeH,CAAO,CAAC,EACnF,QAAWU,KAAQD,EACfE,GAAQD,EAAMH,EAAWD,CAAY,EAGzC,IAAMM,EAAQ,IAAI,IAClB,QAAWF,KAAQV,EAAQ,OACnBO,EAAU,IAAIG,EAAK,IAAI,GAAUN,GAAeM,CAAI,GAAKA,EAAK,SAC9DE,EAAM,IAAIF,CAAI,EAGtB,OAAOE,CACX,CAnBgBV,EAAAG,GAAA,wBAqBhB,SAASM,GAAQD,EAAwBG,EAAyBP,EAAqB,CACnFO,EAAW,IAAIH,EAAK,IAAI,EACxBI,GAAkBJ,CAAI,EAAE,QAAQK,GAAO,CACnC,GAAQC,GAAWD,CAAI,GAAMT,GAAoBW,GAAmBF,CAAI,EAAI,CACxE,IAAMG,EAAUH,EAAK,KAAK,IACtBG,GAAW,CAACL,EAAW,IAAIK,EAAQ,IAAI,GACvCP,GAAQO,EAASL,EAAYP,CAAY,EAGrD,CAAC,CACL,CAVSJ,EAAAS,GAAA,WAoBH,SAAUQ,GAA0BC,EAA4B,CAClE,GAAIA,EAAS,SACT,OAAOA,EAAS,SACb,GAAIA,EAAS,KAAK,IAAK,CAC1B,IAAMC,EAAgBC,GAAmBF,EAAS,KAAK,GAAG,EAC1D,OAAOC,GAAe,SAG9B,CARgBnB,EAAAiB,GAAA,6BAeV,SAAUI,GAAkBC,EAA8B,CAC5D,OAAOA,EAAa,QAAU,CAACC,GAAcD,CAAY,EAAE,KAAK,GAAG,CACvE,CAFgBtB,EAAAqB,GAAA,qBAUV,SAAUG,GAAqBX,EAA2BY,EAA4B,CACxF,MAAI,CAACZ,GAAQ,CAACY,EACH,CAAA,EAEJC,GAA6Bb,EAAMY,EAAUZ,EAAK,QAAS,EAAI,CAC1E,CALgBb,EAAAwB,GAAA,wBAgBV,SAAUG,GAAoBd,EAA2BY,EAA8BG,EAAc,CACvG,GAAI,CAACf,GAAQ,CAACY,EACV,OAEJ,IAAMI,EAAQH,GAA6Bb,EAAMY,EAAUZ,EAAK,QAAS,EAAI,EAC7E,GAAIgB,EAAM,SAAW,EAGrB,OAAID,IAAU,OACVA,EAAQ,KAAK,IAAI,EAAG,KAAK,IAAIA,EAAOC,EAAM,OAAS,CAAC,CAAC,EAErDD,EAAQ,EAELC,EAAMD,CAAK,CACtB,CAdgB5B,EAAA2B,GAAA,uBAgBhB,SAASD,GAA6Bb,EAAeY,EAAkBK,EAA8BC,EAAc,CAC/G,GAAI,CAACA,EAAO,CACR,IAAMC,EAAcC,GAAmBpB,EAAK,cAAmBqB,EAAY,EAC3E,GAAIF,GAAeA,EAAY,UAAYP,EACvC,MAAO,CAACZ,CAAI,EAGpB,OAAIsB,GAAmBtB,CAAI,GAAKA,EAAK,UAAYiB,EACtCjB,EAAK,QAAQ,QAAQuB,GAAKV,GAA6BU,EAAGX,EAAUK,EAAS,EAAK,CAAC,EAEvF,CAAA,CACX,CAXS9B,EAAA0B,GAAA,gCAmBH,SAAUW,GAAoBxB,EAA2ByB,EAAe,CAC1E,OAAKzB,EAGE0B,GAA4B1B,EAAMyB,EAASzB,GAAM,OAAO,EAFpD,CAAA,CAGf,CALgBb,EAAAqC,GAAA,uBAgBV,SAAUG,GAAmB3B,EAA2ByB,EAAiBV,EAAc,CACzF,GAAI,CAACf,EACD,OAEJ,IAAMgB,EAAQU,GAA4B1B,EAAMyB,EAASzB,GAAM,OAAO,EACtE,GAAIgB,EAAM,SAAW,EAGrB,OAAID,IAAU,OACVA,EAAQ,KAAK,IAAI,EAAG,KAAK,IAAIA,EAAOC,EAAM,OAAS,CAAC,CAAC,EAErDD,EAAQ,EAELC,EAAMD,CAAK,CACtB,CAdgB5B,EAAAwC,GAAA,sBAgBV,SAAUD,GAA4B1B,EAAeyB,EAAiBR,EAA4B,CACpG,GAAIjB,EAAK,UAAYiB,EACjB,MAAO,CAAA,EAEX,GAAQW,GAAU5B,EAAK,aAAa,GAAKA,EAAK,cAAc,QAAUyB,EAClE,MAAO,CAACzB,CAAI,EAEhB,IAAM6B,EAAeC,GAAU9B,CAAI,EAAE,SAAQ,EACzC+B,EACEC,EAA0B,CAAA,EAChC,EAEI,IADAD,EAASF,EAAa,KAAI,EACtB,CAACE,EAAO,KAAM,CACd,IAAME,EAAYF,EAAO,MACrBE,EAAU,UAAYhB,EACdW,GAAUK,EAAU,aAAa,GAAKA,EAAU,cAAc,QAAUR,GAC5EO,EAAa,KAAKC,CAAS,EAG/BJ,EAAa,MAAK,QAGrB,CAACE,EAAO,MACjB,OAAOC,CACX,CAxBgB7C,EAAAuC,GAAA,+BAgCV,SAAUQ,GAAeC,EAAgB,OAC3C,IAAMC,EAAUD,EAAQ,QAGxB,KAAOC,MAAYC,EAAAF,EAAQ,aAAS,MAAAE,IAAA,OAAA,OAAAA,EAAE,UAAS,CAC3C,IAAMC,EAAalB,GAAmBe,EAAQ,cAAmBd,EAAY,EAC7E,GAAIiB,EACA,OAAOA,EAEXH,EAAUA,EAAQ,UAG1B,CAZgBhD,EAAA+C,GAAA,kBAmBV,SAAU3B,GAAmBgC,EAAsB,CACrD,IAAIC,EAAqBD,EACzB,OAAQE,GAAeD,CAAS,IAEpBE,GAASF,EAAU,UAAU,EAEjCA,EAAYA,EAAU,WAAW,WACtBtD,GAAasD,EAAU,UAAU,EAE5CA,EAAYA,EAAU,WAEtBG,GAAkBH,EAAU,UAAU,GAGvCI,GAA2BL,EAAMC,EAAW,IAAI,GAAK,CAChE,CAfgBrD,EAAAoB,GAAA,sBAiBhB,SAASqC,GAA2BL,EAAwBC,EAAoBK,EAAwD,OAEpI,SAASC,EAAG9C,EAAe+C,EAAyB,CAChD,IAAIC,EAGJ,OAFyB5B,GAAmBpB,EAAUqB,EAAY,IAG9D2B,EAAkBJ,GAA2BG,EAASA,EAASF,CAAK,GAExEA,EAAM,IAAIN,EAAMS,CAAe,EACxBA,CACX,CAEA,GAXS7D,EAAA2D,EAAA,MAWLD,EAAM,IAAIN,CAAI,EACd,OAAOM,EAAM,IAAIN,CAAI,EAEzBM,EAAM,IAAIN,EAAM,MAAS,EACzB,QAAWvC,KAAQD,GAAkByC,CAAS,EAAG,CAC7C,GAAQnB,GAAarB,CAAI,GAAKA,EAAK,QAAQ,YAAW,IAAO,OACzD,OAAA6C,EAAM,IAAIN,EAAMvC,CAAI,EACbA,EACJ,GAAQC,GAAWD,CAAI,GAASd,GAAac,EAAK,KAAK,GAAG,EAC7D,OAAO8C,EAAG9C,EAAMA,EAAK,KAAK,GAAG,EAC1B,GAAQiD,GAAajD,CAAI,IAAK,GAAAqC,EAAArC,EAAK,WAAO,MAAAqC,IAAA,SAAAA,EAAE,KAC/C,OAAOS,EAAG9C,EAAMA,EAAK,QAAQ,GAAG,EAI5C,CA5BSb,EAAAyD,GAAA,8BA8BH,SAAUM,GAAmBjC,EAA4B,CAC3D,IAAMkC,EAASlC,EAAQ,WACvB,GAAQmC,GAAQD,CAAM,EAAG,CACrB,IAAME,EAAWF,EAAO,SAClBpC,EAAQsC,EAAS,QAAQpC,CAAO,EACtC,QAAS,EAAIF,EAAQ,EAAG,GAAK,EAAG,IAAK,CACjC,IAAMuC,EAAOD,EAAS,CAAC,EACvB,GAAQX,GAASY,CAAI,EACjB,OAAOA,EACJ,CACH,IAAMC,EAASxD,GAAkBsD,EAAS,CAAC,CAAC,EAAE,KAASX,EAAQ,EAC/D,GAAIa,EACA,OAAOA,IAKvB,GAAQC,GAAkBL,CAAM,EAC5B,OAAOD,GAAmBC,CAAM,CAIxC,CAtBgBhE,EAAA+D,GAAA,sBA2BV,SAAUO,GAAsBC,EAA2BzC,EAA6B,CAC1F,OAAOyC,IAAgB,KAAOA,IAAgB,KAAYN,GAAQnC,CAAO,GAAK,EAAQA,EAAQ,cAClG,CAFgB9B,EAAAsE,GAAA,yBAIV,SAAUE,GAAmBD,EAAyB,CACxD,OAAOA,IAAgB,KAAOA,IAAgB,GAClD,CAFgBvE,EAAAwE,GAAA,sBAIV,SAAUC,GAAgBC,EAAmB,CAC/C,OAAOA,IAAa,IACxB,CAFgB1E,EAAAyE,GAAA,mBAQV,SAAUE,GAAenE,EAAoB,CAC/C,OAAOoE,GAAuBpE,EAAM,IAAI,GAAK,CACjD,CAFgBR,EAAA2E,GAAA,kBAIhB,SAASC,GAAuBpE,EAAsBqE,EAA4B,CAC9E,GAAIA,EAAQ,IAAIrE,CAAI,EAChB,MAAO,GAEPqE,EAAQ,IAAIrE,CAAI,EAEpB,QAAWK,KAAQD,GAAkBJ,CAAI,EACrC,GAAQM,GAAWD,CAAI,GAKnB,GAJI,CAACA,EAAK,KAAK,KAIPd,GAAac,EAAK,KAAK,GAAG,GAAK,CAAC+D,GAAuB/D,EAAK,KAAK,IAAKgE,CAAO,EACjF,MAAO,OAER,IAAQ3C,GAAarB,CAAI,EAC5B,MAAO,GACJ,GAAQ0C,GAAS1C,CAAI,EACxB,MAAO,GAGf,MAAO,EAAQL,EAAK,UACxB,CAtBSR,EAAA4E,GAAA,0BAwBH,SAAUE,GAAW1B,EAAc,CACrC,OAAO2B,GAAmB3B,EAAK,KAAM,IAAI,GAAK,CAClD,CAFgBpD,EAAA8E,GAAA,cAIhB,SAASC,GAAmB3B,EAA0ByB,EAAgC,CAClF,GAAIA,EAAQ,IAAIzB,CAAI,EAChB,MAAO,GAIX,GAFIyB,EAAQ,IAAIzB,CAAI,EAEZ4B,GAAY5B,CAAI,EACpB,MAAO,GACJ,GAAQ6B,GAAgB7B,CAAI,EAC/B,MAAO,GACJ,GAAQ8B,GAAY9B,CAAI,EAC3B,OAAOA,EAAK,MAAM,MAAMhB,GAAK2C,GAAmB3C,EAAGyC,CAAO,CAAC,EACxD,GAAQf,GAAaV,CAAI,EAAG,CAC/B,GAAIA,EAAK,gBAAkB,OACvB,MAAO,GACJ,GAAIA,EAAK,aAAe,OAC3B,MAAO,GACJ,GAAIA,EAAK,UAAY,OAAW,CACnC,IAAM+B,EAAM/B,EAAK,QAAQ,IACzB,OAAQgC,GAAOD,CAAG,EACPJ,GAAmBI,EAAI,KAAMN,CAAO,EAEpC,OAGX,OAAO,OAGX,OAAO,EAEf,CA9BS7E,EAAA+E,GAAA,sBAgCH,SAAUM,GAAoB7E,EAAoB,CACpD,GAAIA,EAAK,aACL,OAAOA,EAAK,aAAa,KACtB,GAAIA,EAAK,SACZ,OAAOA,EAAK,SACT,GAAIA,EAAK,WAAY,CACxB,IAAMoD,EAAUpD,EAAK,WAAW,IAChC,GAAGoD,EAAS,CAER,GAAQ7D,GAAa6D,CAAO,EACxB,OAAOA,EAAQ,KACX,GAAO0B,GAAY1B,CAAO,GAASwB,GAAOxB,CAAO,EACrD,OAAOA,EAAQ,MAK/B,CAjBgB5D,EAAAqF,GAAA,uBAmBV,SAAUE,GAAYnC,EAAmC,OAC3D,GAAQrD,GAAaqD,CAAI,EACrB,OAAOuB,GAAevB,CAAI,EAAIA,EAAK,MAAOF,EAAAmC,GAAoBjC,CAAI,KAAC,MAAAF,IAAA,OAAAA,EAAIE,EAAK,KACzE,GAAQkC,GAAYlC,CAAI,GAASgC,GAAOhC,CAAI,GAASoC,GAAapC,CAAI,EACzE,OAAOA,EAAK,KACT,GAAQG,GAASH,CAAI,EAAG,CAC3B,IAAMqC,EAAaC,GAActC,CAAI,EACrC,GAAIqC,EACA,OAAOA,UAEAnC,GAAeF,CAAI,EAC9B,OAAOA,EAAK,KAEhB,MAAM,IAAI,MAAM,iCAAiC,CACrD,CAdgBpD,EAAAuF,GAAA,eAgBV,SAAUG,GAActB,EAAkB,OAC5C,GAAIA,EAAO,aACP,OAAOA,EAAO,aAAa,KACxB,GAAI,GAAAlB,EAAAkB,EAAO,QAAI,MAAAlB,IAAA,SAAAA,EAAE,IACpB,OAAOqC,GAAYnB,EAAO,KAAK,GAAG,CAG1C,CAPgBpE,EAAA0F,GAAA,iBASV,SAAUC,GAAYnF,EAAsB,WAC9C,OAAQN,GAAeM,CAAI,GAChBoF,GAAA1C,EAAA1C,EAAK,QAAI,MAAA0C,IAAA,OAAA,OAAAA,EAAE,QAAI,MAAA0C,IAAA,OAAAA,EAAI,SAEnBjB,GAAenE,CAAI,EAAIA,EAAK,MAAOqF,EAAAR,GAAoB7E,CAAI,KAAC,MAAAqF,IAAA,OAAAA,EAAIrF,EAAK,IAEpF,CANgBR,EAAA2F,GAAA,eAQV,SAAUpE,GAAcD,EAA8B,CACxD,IAAMwE,EAAe,CACjB,EAAG,GACH,EAAG,GACH,EAAG,IAEDC,EAASC,GAAuB1E,EAAa,WAAYwE,CAAK,EAC9DG,EAAW,OAAO,QAAQH,CAAK,EAAE,OAAO,CAAC,CAAC,CAAEI,CAAK,IAAMA,CAAK,EAAE,IAAI,CAAC,CAACC,CAAI,IAAMA,CAAI,EAAE,KAAK,EAAE,EACjG,OAAO,IAAI,OAAOJ,EAAQE,CAAQ,CACtC,CATgBjG,EAAAuB,GAAA,iBAYhB,IAAM6E,GAAW,SAAS,OAQ1B,SAASJ,GAAuBlE,EAA8BgE,EAAa,CACvE,GAAQO,GAAuBvE,CAAO,EAClC,OAAOwE,GAA4BxE,CAAO,EACvC,GAAQyE,GAAgBzE,CAAO,EAClC,OAAO0E,GAAqB1E,CAAO,EAChC,GAAQ2E,GAAiB3E,CAAO,EACnC,OAAO4E,GAAsB5E,CAAO,EACjC,GAAQf,GAAmBe,CAAO,EAAG,CACxC,IAAMtB,EAAOsB,EAAQ,KAAK,IAC1B,GAAI,CAACtB,EACD,MAAM,IAAI,MAAM,yBAAyB,EAE7C,OAAOmG,GAAgBX,GAAuBxF,EAAK,UAAU,EAAG,CAC5D,YAAasB,EAAQ,YACrB,UAAWA,EAAQ,UACtB,MACE,IAAQ8E,GAAe9E,CAAO,EACjC,OAAO+E,GAAmB/E,CAAO,EAC9B,GAAQgF,GAAahF,CAAO,EAC/B,OAAOiF,GAAkBjF,CAAO,EAC7B,GAAQkF,GAAalF,CAAO,EAAG,CAClC,IAAMmF,EAAYnF,EAAQ,MAAM,YAAY,GAAG,EACzCiE,EAASjE,EAAQ,MAAM,UAAU,EAAGmF,CAAS,EAC7CC,EAAapF,EAAQ,MAAM,UAAUmF,EAAY,CAAC,EACxD,OAAInB,IACAA,EAAM,EAAIoB,EAAW,SAAS,GAAG,EACjCpB,EAAM,EAAIoB,EAAW,SAAS,GAAG,EACjCpB,EAAM,EAAIoB,EAAW,SAAS,GAAG,GAE9BP,GAAgBZ,EAAQ,CAC3B,YAAajE,EAAQ,YACrB,UAAWA,EAAQ,UACnB,KAAM,GACT,MACE,IAAQqF,GAAWrF,CAAO,EAC7B,OAAO6E,GAAgBP,GAAU,CAC7B,YAAatE,EAAQ,YACrB,UAAWA,EAAQ,UACtB,EAED,MAAM,IAAI,MAAM,6BAA6BA,GAAS,KAAK,EAAE,GAErE,CA1CS9B,EAAAgG,GAAA,0BA4CT,SAASM,GAA4Bc,EAAsC,CACvE,OAAOT,GAAgBS,EAAa,SAAS,IAAI,GAAKpB,GAAuB,CAAC,CAAC,EAAE,KAAK,GAAG,EAAG,CACxF,YAAaoB,EAAa,YAC1B,UAAWA,EAAa,UAC3B,CACL,CALSpH,EAAAsG,GAAA,+BAOT,SAASE,GAAqBa,EAAwB,CAClD,OAAOV,GAAgBU,EAAM,SAAS,IAAI,GAAKrB,GAAuB,CAAC,CAAC,EAAE,KAAK,EAAE,EAAG,CAChF,YAAaqB,EAAM,YACnB,UAAWA,EAAM,UACpB,CACL,CALSrH,EAAAwG,GAAA,wBAOT,SAASO,GAAkBO,EAAqB,CAC5C,OAAOX,GAAgB,GAAGP,EAAQ,KAAKJ,GAAuBsB,EAAM,QAAQ,CAAC,GAAI,CAC7E,YAAaA,EAAM,YACnB,UAAWA,EAAM,UACpB,CACL,CALStH,EAAA+G,GAAA,qBAOT,SAASF,GAAmBU,EAAwB,CAChD,OAAOZ,GAAgB,MAAMX,GAAuBuB,EAAO,QAAQ,CAAC,IAAInB,EAAQ,KAAM,CAClF,YAAamB,EAAO,YACpB,UAAWA,EAAO,UACrB,CACL,CALSvH,EAAA6G,GAAA,sBAOT,SAASH,GAAsBc,EAAyB,CACpD,OAAIA,EAAM,MACCb,GAAgB,IAAIc,GAAeD,EAAM,IAAI,CAAC,IAAIC,GAAeD,EAAM,KAAK,CAAC,IAAK,CACrF,YAAaA,EAAM,YACnB,UAAWA,EAAM,UACjB,KAAM,GACT,EAEEb,GAAgBc,GAAeD,EAAM,IAAI,EAAG,CAC/C,YAAaA,EAAM,YACnB,UAAWA,EAAM,UACjB,KAAM,GACT,CACL,CAbSxH,EAAA0G,GAAA,yBAeT,SAASe,GAAenF,EAAoB,CACxC,OAAOoF,GAAapF,EAAQ,KAAK,CACrC,CAFStC,EAAAyH,GAAA,kBAIT,SAASd,GAAgBgB,EAAeC,EAIvC,OAIG,OAHIA,EAAQ,OAAS,IAASA,EAAQ,aAClCD,EAAQ,KAAIzE,EAAA0E,EAAQ,aAAS,MAAA1E,IAAA,OAAAA,EAAI,EAAE,GAAGyE,CAAK,KAE3CC,EAAQ,YACD,GAAGD,CAAK,GAAGC,EAAQ,WAAW,GAElCD,CACX,CAZS3H,EAAA2G,GAAA,mBS/gBH,SAAUkB,GAAoBC,EAA6B,CAC7D,IAAMC,EAAkB,CAAA,EAClBC,EAAUF,EAAS,QACzB,QAAWG,KAAQD,EAAQ,MACnBE,GAAeD,CAAI,GAAKE,GAAkBF,CAAI,GAAKG,GAAmBC,GAAcJ,CAAI,CAAC,GACzFF,EAAM,KAAKE,EAAK,IAAI,EAG5B,MAAO,CACH,sBAAuBF,EACvB,WAAYO,GAEpB,CAZgBC,EAAAV,GAAA,uBC3BV,SAAUW,GAAYC,EAAW,CAEjC,SAAW,QAAQ,OACrB,QAAQ,MAAM,UAAUA,CAAG,EAAE,CAEjC,CALgBC,EAAAF,GAAA,eAOV,SAAUG,GAAcF,EAAW,CAEnC,SAAW,QAAQ,MAErB,QAAQ,KAAK,YAAYA,CAAG,EAAE,CAElC,CANgBC,EAAAC,GAAA,iBCPV,SAAUC,GAASC,EAAa,CACpC,IAAMC,EAAQ,IAAI,KAAI,EAAG,QAAO,EAC1BC,EAAMF,EAAI,EAGhB,MAAO,CAAE,KAFG,IAAI,KAAI,EAAG,QAAO,EACVC,EACE,MAAOC,CAAG,CAClC,CANgBC,EAAAJ,GAAA,SCCV,SAAUK,GAAiBC,EAAiB,CAChD,SAASC,GAAe,CAAI,CAAnBC,EAAAD,EAAA,mBAGTA,EAAgB,UAAYD,EAC5B,IAAMG,EAAe,IAAKF,EAE1B,SAASG,GAAU,CACjB,OAAO,OAAOD,EAAa,GAC7B,CAFS,OAAAD,EAAAE,EAAA,cAMTA,EAAU,EACVA,EAAU,EAIIJ,KAMV,MAAMA,CAAY,CACxB,CAzBgBE,EAAAH,GAAA,oBCShB,SAASM,GAAWC,EAAkB,CACpC,OAAIC,GAAcD,CAAO,EAChBA,EAAQ,MAERA,EAAQ,IAEnB,CANSE,EAAAH,GAAA,cAST,SAASE,GACPE,EAAc,CAEd,OAAOC,GAASD,EAAI,KAAK,GAAKA,EAAI,QAAU,EAC9C,CAJSD,EAAAD,GAAA,iBAMH,IAAgBI,GAAhB,KAAkC,CAzBxC,MAyBwC,CAAAH,EAAA,2BAGtC,IAAW,YAAU,CACnB,OAAO,KAAK,WACd,CACA,IAAW,WAAWI,EAAU,CAC9B,KAAK,YAAcA,CACrB,CAEA,YAAsBC,EAAgB,CAAhB,KAAA,YAAAA,CAAmB,CAEzC,OAAOC,EAAqB,CAC1BA,EAAQ,MAAM,IAAI,EAClBC,EAAQ,KAAK,WAAaC,GAAQ,CAChCA,EAAK,OAAOF,CAAO,CACrB,CAAC,CACH,GAGWG,EAAP,cACIN,EAAkB,CA9C5B,MA8C4B,CAAAH,EAAA,oBAQ1B,YAAYU,EAKX,CACC,MAAM,CAAA,CAAE,EARH,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,CAEA,IAAI,WAAWC,EAAyB,CAExC,CAEA,IAAI,YAAU,CACZ,OAAI,KAAK,iBAAmB,OACnB,KAAK,eAAe,WAEtB,CAAA,CACT,CAEA,OAAOR,EAAqB,CAC1BA,EAAQ,MAAM,IAAI,CAEpB,GAGWS,GAAP,cAAoBZ,EAAkB,CApF5C,MAoF4C,CAAAH,EAAA,aAI1C,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EAPnB,KAAA,QAAkB,GAQvBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWG,EAAP,cAA2Bb,EAAkB,CArGnD,MAqGmD,CAAAH,EAAA,oBAGjD,YAAYU,EAGX,CACC,MAAMA,EAAQ,UAAU,EANnB,KAAA,kBAA6B,GAOlCC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWI,EAAP,cACId,EAAkB,CArH5B,MAqH4B,CAAAH,EAAA,eAM1B,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EARnB,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWK,EAAP,cACIf,EAAkB,CAzI5B,MAyI4B,CAAAH,EAAA,4BAM1B,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EARnB,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWM,EAAP,cACIhB,EAAkB,CA7J5B,MA6J4B,CAAAH,EAAA,yCAO1B,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EARnB,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWO,EAAP,cACIjB,EAAkB,CAlL5B,MAkL4B,CAAAH,EAAA,mBAO1B,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EARnB,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWQ,EAAP,cACIlB,EAAkB,CAvM5B,MAuM4B,CAAAH,EAAA,gCAO1B,YAAYU,EAIX,CACC,MAAMA,EAAQ,UAAU,EARnB,KAAA,IAAc,EASnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWS,EAAP,cACInB,EAA+B,CA5NzC,MA4NyC,CAAAH,EAAA,oBAQvC,IAAW,YAAU,CACnB,OAAO,KAAK,WACd,CACA,IAAW,WAAWI,EAAoB,CACxC,KAAK,YAAcA,CACrB,CAEA,YAAYM,EAMX,CACC,MAAMA,EAAQ,UAAU,EAnBnB,KAAA,IAAc,EACd,KAAA,kBAA6B,GAC7B,KAAA,cAAyB,GAkB9BC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,GAGWU,EAAP,KAAe,CA1PrB,MA0PqB,CAAAvB,EAAA,iBAKnB,YAAYU,EAIX,CANM,KAAA,IAAc,EAOnBC,GACE,KACAC,GAAOF,EAAUG,GAAMA,IAAM,MAAS,CAAC,CAE3C,CAEA,OAAOP,EAAqB,CAC1BA,EAAQ,MAAM,IAAI,CACpB,GAgDI,SAAUkB,GAAiBC,EAAgB,CAC/C,OAAOC,EAAID,EAAUE,EAAmB,CAC1C,CAFgB3B,EAAAwB,GAAA,oBAIV,SAAUG,GAAoBC,EAAiB,CACnD,SAASC,EAAkBf,EAAyB,CAClD,OAAOY,EAAIZ,EAAYa,EAAmB,CAC5C,CAEA,GAJS3B,EAAA6B,EAAA,qBAILD,aAAgBnB,EAAa,CAC/B,IAAMqB,EAAgD,CACpD,KAAM,cACN,KAAMF,EAAK,gBACX,IAAKA,EAAK,KAGZ,OAAI1B,GAAS0B,EAAK,KAAK,IACrBE,EAAsB,MAAQF,EAAK,OAG9BE,MACF,IAAIF,aAAgBZ,EACzB,MAAyB,CACvB,KAAM,cACN,WAAYa,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBX,EACzB,MAAyB,CACvB,KAAM,SACN,IAAKW,EAAK,IACV,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBV,EACzB,MAAyB,CACvB,KAAM,sBACN,IAAKU,EAAK,IACV,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBT,EACzB,MAAyC,CACvC,KAAM,mCACN,IAAKS,EAAK,IACV,UACED,GAAoB,IAAIJ,EAAS,CAAE,aAAcK,EAAK,SAAS,CAAE,CAAC,EAEpE,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBP,EACzB,MAAyC,CACvC,KAAM,0BACN,IAAKO,EAAK,IACV,UACED,GAAoB,IAAIJ,EAAS,CAAE,aAAcK,EAAK,SAAS,CAAE,CAAC,EAEpE,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBR,EACzB,MAAyB,CACvB,KAAM,aACN,IAAKQ,EAAK,IACV,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBN,EACzB,MAAyB,CACvB,KAAM,cACN,IAAKM,EAAK,IACV,WAAYC,EAAkBD,EAAK,UAAU,GAE1C,GAAIA,aAAgBL,EAAU,CACnC,IAAMQ,EAA0C,CAC9C,KAAM,WACN,KAAMH,EAAK,aAAa,KACxB,MAAO/B,GAAW+B,EAAK,YAAY,EACnC,IAAKA,EAAK,KAGR1B,GAAS0B,EAAK,KAAK,IACrBG,EAAmB,cAAgBH,EAAK,OAG1C,IAAMI,EAAUJ,EAAK,aAAa,QAClC,OAAIA,EAAK,aAAa,UACpBG,EAAmB,QAAUE,GAASD,CAAO,EACnCA,EAAS,OACfA,GAGCD,MACF,IAAIH,aAAgBb,GACzB,MAA4B,CAC1B,KAAM,OACN,KAAMa,EAAK,KACX,QAASA,EAAK,QACd,WAAYC,EAAkBD,EAAK,UAAU,GAI/C,MAAM,MAAM,sBAAsB,GAEtC,CA/FgB5B,EAAA2B,GAAA,uBClTV,IAAgBO,GAAhB,KAA2B,CAdjC,MAciC,CAAAC,EAAA,oBACxB,MAAMC,EAAiB,CAC5B,IAAMC,EAAeD,EACrB,OAAQC,EAAQ,YAAa,CAC3B,KAAKC,EACH,OAAO,KAAK,iBAAiBD,CAAO,EACtC,KAAKE,EACH,OAAO,KAAK,iBAAiBF,CAAO,EACtC,KAAKG,EACH,OAAO,KAAK,YAAYH,CAAO,EACjC,KAAKI,EACH,OAAO,KAAK,yBAAyBJ,CAAO,EAC9C,KAAKK,EACH,OAAO,KAAK,sCAAsCL,CAAO,EAC3D,KAAKM,EACH,OAAO,KAAK,6BAA6BN,CAAO,EAClD,KAAKO,EACH,OAAO,KAAK,gBAAgBP,CAAO,EACrC,KAAKQ,EACH,OAAO,KAAK,iBAAiBR,CAAO,EACtC,KAAKS,EACH,OAAO,KAAK,cAAcT,CAAO,EACnC,KAAKU,GACH,OAAO,KAAK,UAAUV,CAAO,EAE/B,QACE,MAAM,MAAM,sBAAsB,EAExC,CAGO,iBAAiBD,EAAiB,CAAQ,CAG1C,iBAAiBA,EAAiB,CAAQ,CAG1C,YAAYA,EAAY,CAAQ,CAGhC,gBAAgBA,EAAgB,CAAQ,CAGxC,yBAAyBA,EAAyB,CAAQ,CAG1D,sCACLA,EAAsC,CAChC,CAGD,6BAA6BA,EAA6B,CAAQ,CAGlE,iBAAiBA,EAAiB,CAAQ,CAG1C,cAAcA,EAAc,CAAQ,CAGpC,UAAUA,EAAU,CAAQ,GC1D/B,SAAUY,GACdC,EAAiB,CAEjB,OACEA,aAAgBC,GAChBD,aAAgBE,GAChBF,aAAgBG,GAChBH,aAAgBI,GAChBJ,aAAgBK,GAChBL,aAAgBM,GAChBN,aAAgBO,GAChBP,aAAgBQ,EAEpB,CAbgBC,EAAAV,GAAA,kBAeV,SAAUW,GACdV,EACAW,EAAgC,CAAA,EAAE,CAMlC,OAHEX,aAAgBE,GAChBF,aAAgBG,GAChBH,aAAgBM,EAET,GAMLN,aAAgBY,EAEXC,GAAmBb,EAAM,WAAac,GACpCJ,GAAeI,EAASH,CAAc,CAC9C,EACQX,aAAgBe,GAAeC,GAASL,EAAgBX,CAAI,EAE9D,GACEA,aAAgBiB,IACrBjB,aAAgBe,GAClBJ,EAAe,KAAKX,CAAI,EAEnBkB,GACgBlB,EAAM,WAC1Bc,GACQJ,GAAeI,EAASH,CAAc,CAC9C,GAGI,EAEX,CApCgBF,EAAAC,GAAA,kBAsCV,SAAUS,GACdnB,EAAiB,CAEjB,OAAOA,aAAgBY,CACzB,CAJgBH,EAAAU,GAAA,mBAMV,SAAUC,GAAqBpB,EAA+B,CAElE,GAAIA,aAAgBe,EAClB,MAAO,UACF,GAAIf,aAAgBE,EACzB,MAAO,SACF,GAAIF,aAAgBY,EACzB,MAAO,KACF,GAAIZ,aAAgBI,EACzB,MAAO,eACF,GAAIJ,aAAgBK,EACzB,MAAO,mBACF,GAAIL,aAAgBM,EACzB,MAAO,WACF,GAAIN,aAAgBG,EACzB,MAAO,OACF,GAAIH,aAAgBO,EACzB,MAAO,UAGP,MAAM,MAAM,sBAAsB,CAEtC,CAtBgBE,EAAAW,GAAA,wBC1DV,IAAgBC,GAAhB,KAA0B,CAjBhC,MAiBgC,CAAAC,EAAA,mBAC9B,KAAKC,EAAqCC,EAAkB,CAAA,EAAE,CAC5DC,EAAQF,EAAK,WAAY,CAACG,EAAsBC,IAAS,CACvD,IAAMC,EAAWC,GAAKN,EAAK,WAAYI,EAAQ,CAAC,EAEhD,GAAID,aAAmBI,EACrB,KAAK,YAAYJ,EAASE,EAAUJ,CAAQ,UACnCE,aAAmBK,EAC5B,KAAK,aAAaL,EAASE,EAAUJ,CAAQ,UACpCE,aAAmBM,EAC5B,KAAK,SAASN,EAASE,EAAUJ,CAAQ,UAChCE,aAAmBO,EAC5B,KAAK,WAAWP,EAASE,EAAUJ,CAAQ,UAClCE,aAAmBQ,EAC5B,KAAK,eAAeR,EAASE,EAAUJ,CAAQ,UACtCE,aAAmBS,EAC5B,KAAK,kBAAkBT,EAASE,EAAUJ,CAAQ,UACzCE,aAAmBU,EAC5B,KAAK,YAAYV,EAASE,EAAUJ,CAAQ,UACnCE,aAAmBW,EAC5B,KAAK,SAASX,EAASE,EAAUJ,CAAQ,UAChCE,aAAmBY,EAC5B,KAAK,OAAOZ,EAASE,EAAUJ,CAAQ,MAEvC,OAAM,MAAM,sBAAsB,CAEtC,CAAC,CACH,CAEA,aACEe,EACAX,EACAJ,EAAuB,CAChB,CAET,YACEgB,EACAZ,EACAJ,EAAuB,CAChB,CAET,SACEiB,EACAb,EACAJ,EAAuB,CAGvB,IAAMkB,EAAad,EAAS,OAAOJ,CAAQ,EAC3C,KAAK,KAAKiB,EAAeC,CAAU,CACrC,CAEA,WACEC,EACAf,EACAJ,EAAuB,CAGvB,IAAMkB,EAAad,EAAS,OAAOJ,CAAQ,EAC3C,KAAK,KAAKmB,EAAiBD,CAAU,CACvC,CAEA,eACEE,EACAhB,EACAJ,EAAuB,CAGvB,IAAMqB,EAAoC,CACxC,IAAIZ,EAAO,CAAE,WAAYW,EAAe,UAAU,CAAE,GACpD,OAAYhB,EAAeJ,CAAQ,EACrC,KAAK,KAAKoB,EAAgBC,CAAkB,CAC9C,CAEA,kBACEC,EACAlB,EACAJ,EAAuB,CAGvB,IAAMuB,EAAwBC,GAC5BF,EACAlB,EACAJ,CAAQ,EAEV,KAAK,KAAKsB,EAAmBC,CAAqB,CACpD,CAEA,SACEE,EACArB,EACAJ,EAAuB,CAGvB,IAAM0B,EAA8B,CAClC,IAAIjB,EAAO,CAAE,WAAYgB,EAAS,UAAU,CAAE,GAC9C,OAAYrB,EAAeJ,CAAQ,EACrC,KAAK,KAAKyB,EAAUC,CAAY,CAClC,CAEA,YACEC,EACAvB,EACAJ,EAAuB,CAGvB,IAAM4B,EAAkBJ,GACtBG,EACAvB,EACAJ,CAAQ,EAEV,KAAK,KAAK2B,EAAaC,CAAe,CACxC,CAEA,OACEC,EACAzB,EACAJ,EAAuB,CAGvB,IAAMkB,EAAad,EAAS,OAAOJ,CAAQ,EAE3CC,EAAQ4B,EAAO,WAAaC,GAAO,CAIjC,IAAMC,EAAc,IAAIvB,EAAY,CAAE,WAAY,CAACsB,CAAG,CAAC,CAAE,EACzD,KAAK,KAAKC,EAAkBb,CAAU,CACxC,CAAC,CACH,GAGF,SAASM,GACPQ,EACA5B,EACAJ,EAAuB,CAUvB,MARmB,CACjB,IAAIS,EAAO,CACT,WAAY,CACV,IAAIF,EAAS,CAAE,aAAcyB,EAAW,SAAS,CAAE,GACnD,OAAOA,EAAW,UAAU,EAC/B,GAE8C,OAAO5B,EAAUJ,CAAQ,CAE5E,CAdSF,EAAA0B,GAAA,kCC1IH,SAAUS,GAAMC,EAAiB,CAErC,GAAIA,aAAgBC,EASlB,OAAOF,GAAoBC,EAAM,cAAc,EAC1C,GAAIA,aAAgBE,EACzB,OAAOC,GAA2BH,CAAI,EACjC,GAAII,GAAeJ,CAAI,EAC5B,OAAOK,GAAiBL,CAAI,EACvB,GAAIM,GAAgBN,CAAI,EAC7B,OAAOO,GAAkBP,CAAI,EAE7B,MAAM,MAAM,sBAAsB,CAEtC,CArBgBQ,EAAAT,GAAA,SAuBV,SAAUM,GAAiBL,EAEhC,CACC,IAAIS,EAAwB,CAAA,EACtBC,EAAMV,EAAK,WACbW,EAAiB,EACjBC,EAAyBF,EAAI,OAASC,EACtCE,EAEAC,EAA0B,GAE9B,KAAOF,GAA0BE,GAC/BD,EAAcH,EAAIC,CAAc,EAChCG,EAA0BC,GAAeF,CAAW,EACpDJ,EAAWA,EAAS,OAAOV,GAAMc,CAAW,CAAC,EAC7CF,EAAiBA,EAAiB,EAClCC,EAAyBF,EAAI,OAASC,EAGxC,OAAOK,GAAKP,CAAQ,CACtB,CApBgBD,EAAAH,GAAA,oBAsBV,SAAUE,GAAkBP,EAEjC,CACC,IAAMiB,EAAuCC,EAC3ClB,EAAK,WACJmB,GACQpB,GAAMoB,CAAS,CACvB,EAEH,OAAOH,GAAKI,GAAmBH,CAAqB,CAAC,CACvD,CAVgBT,EAAAD,GAAA,qBAYV,SAAUJ,GAAiBkB,EAAkB,CACjD,MAAO,CAACA,EAAS,YAAY,CAC/B,CAFgBb,EAAAL,GAAA,oBClET,IAAMmB,GAAK,SCQZ,IAAOC,GAAP,cAAmCC,EAAU,CATnD,MASmD,CAAAC,EAAA,4BAGjD,YAAoBC,EAAa,CAC/B,MAAK,EADa,KAAA,QAAAA,EAFb,KAAA,QAAuC,CAAA,CAI9C,CAEA,cAAY,CACV,YAAK,KAAK,KAAK,OAAO,EACf,KAAK,OACd,CAEA,aACEC,EACAC,EACAC,EAAuB,CAGzB,CAEA,YACEC,EACAF,EACAC,EAAuB,CAEvB,IAAME,EACJC,GAA8BF,EAAQ,eAAgBA,EAAQ,GAAG,EACjE,KAAK,QAAQ,KACTG,EAA0BL,EAAS,OAAOC,CAAQ,EAClDK,EAAW,IAAIC,EAAY,CAAE,WAAYF,CAAQ,CAAE,EACnDG,EAAuBC,GAAMH,CAAQ,EAC3C,KAAK,QAAQH,CAAU,EAAIK,CAC7B,GAGI,SAAUE,GACdC,EAAsB,CAEtB,IAAMC,EAAgB,CAAA,EAEtB,OAAAC,EAAQF,EAAiBb,GAAW,CAClC,IAAMgB,EAAiB,IAAInB,GAAoBG,CAAO,EAAE,aAAY,EACpEiB,GAAOH,EAAeE,CAAc,CACtC,CAAC,EACMF,CACT,CAVgBf,EAAAa,GAAA,0BAYV,SAAUN,GACdY,EACAC,EAAyB,CAEzB,OAAOD,EAAM,KAAOC,EAAoBC,EAC1C,CALgBrB,EAAAO,GAAA,iCC/ChB,IAAIe,GAAqD,CAAA,EACnDC,GAAe,IAAIC,GAUnB,SAAUC,GAAaC,EAAc,CACzC,IAAMC,EAAYD,EAAO,SAAQ,EACjC,GAAIJ,GAAe,eAAeK,CAAS,EACzC,OAAOL,GAAeK,CAAS,EAC1B,CACL,IAAMC,EAAYL,GAAa,QAAQI,CAAS,EAChD,OAAAL,GAAeK,CAAS,EAAIC,EACrBA,EAEX,CATgBC,EAAAJ,GAAA,gBAWV,SAAUK,IAAsB,CACpCR,GAAiB,CAAA,CACnB,CAFgBO,EAAAC,GAAA,0BCjBhB,IAAMC,GACJ,gEACWC,GACX;EAEI,SAAUC,GACdC,EACAC,EAAsB,GAAK,CAE3B,GAAI,CACF,IAAMC,EAAMC,GAAaH,CAAM,EAM/B,OALmBI,GACjBF,EAAI,MACJ,CAAA,EACAA,EAAI,MAAM,UAAU,QAGfG,EAAG,CAIV,GAAIA,EAAE,UAAYR,GACZI,GACFK,GACE,GAAGR,EAA2B,0BACDE,EAAO,SAAQ,CAAE;;;2FAGiD,MAG9F,CACL,IAAIO,EAAY,GACZN,IACFM,EACE;;iGAGJC,GACE,GAAGV,EAA2B;qBACLE,EAAO,SAAQ,CAAE;;2EAGxCO,CAAS,GAKjB,MAAO,CAAA,CACT,CA5CgBE,EAAAV,GAAA,iCA8CV,SAAUK,GACdF,EACAQ,EACAC,EAAmB,CAEnB,OAAQT,EAAI,KAAM,CAChB,IAAK,cACH,QAAS,EAAI,EAAG,EAAIA,EAAI,MAAM,OAAQ,IACpCE,GAA0BF,EAAI,MAAM,CAAC,EAAGQ,EAAQC,CAAU,EAE5D,MACF,IAAK,cACH,IAAMC,EAAQV,EAAI,MAClB,QAAS,EAAI,EAAG,EAAIU,EAAM,OAAQ,IAAK,CACrC,IAAMC,EAAOD,EAAM,CAAC,EAGpB,OAAQC,EAAK,KAAM,CACjB,IAAK,YAIL,IAAK,qBAEL,IAAK,YACL,IAAK,oBACL,IAAK,cACL,IAAK,eACL,IAAK,kBACH,SAGJ,IAAMC,EAAOD,EACb,OAAQC,EAAK,KAAM,CACjB,IAAK,YACHC,GAAwBD,EAAK,MAAOJ,EAAQC,CAAU,EACtD,MACF,IAAK,MACH,GAAIG,EAAK,aAAe,GACtB,MAAM,MAAMjB,EAAsB,EAEpCmB,EAAQF,EAAK,MAAQG,GAAQ,CAC3B,GAAI,OAAOA,GAAS,SAClBF,GAAwBE,EAAMP,EAAQC,CAAU,MAC3C,CAEL,IAAMO,EAAQD,EAEd,GAAIN,IAAe,GACjB,QACMQ,EAAYD,EAAM,KACtBC,GAAaD,EAAM,GACnBC,IAEAJ,GAAwBI,EAAWT,EAAQC,CAAU,MAIpD,CAEH,QACMQ,EAAYD,EAAM,KACtBC,GAAaD,EAAM,IAAMC,EAAYC,GACrCD,IAEAJ,GAAwBI,EAAWT,EAAQC,CAAU,EAIvD,GAAIO,EAAM,IAAME,GAAoB,CAClC,IAAMC,EACJH,EAAM,MAAQE,GACVF,EAAM,KACNE,GACAE,EAAcJ,EAAM,GACpBK,EAAYC,GAAyBH,CAAW,EAChDI,EAAYD,GAAyBF,CAAW,EAEtD,QACMI,EAAaH,EACjBG,GAAcD,EACdC,IAEAhB,EAAOgB,CAAU,EAAIA,IAK/B,CAAC,EACD,MACF,IAAK,QACHtB,GAA0BU,EAAK,MAAOJ,EAAQC,CAAU,EACxD,MAEF,QACE,MAAM,MAAM,sBAAsB,EAItC,IAAMgB,EACJb,EAAK,aAAe,QAAaA,EAAK,WAAW,UAAY,EAC/D,GAGGA,EAAK,OAAS,SAAWc,GAAgBd,CAAI,IAAM,IAEnDA,EAAK,OAAS,SAAWa,IAAyB,GAEnD,MAGJ,MAEF,QACE,MAAM,MAAM,uBAAuB,EAIvC,OAAOE,GAAOnB,CAAM,CACtB,CAvHgBD,EAAAL,GAAA,6BAyHhB,SAASW,GACPE,EACAP,EACAC,EAAmB,CAEnB,IAAMmB,EAAmBN,GAAyBP,CAAI,EACtDP,EAAOoB,CAAgB,EAAIA,EAEvBnB,IAAe,IACjBoB,GAAiBd,EAAMP,CAAM,CAEjC,CAXSD,EAAAM,GAAA,2BAaT,SAASgB,GACPd,EACAP,EAAsC,CAEtC,IAAMsB,EAAO,OAAO,aAAaf,CAAI,EAC/BgB,EAAYD,EAAK,YAAW,EAElC,GAAIC,IAAcD,EAAM,CACtB,IAAMF,EAAmBN,GAAyBS,EAAU,WAAW,CAAC,CAAC,EACzEvB,EAAOoB,CAAgB,EAAIA,MACtB,CACL,IAAMI,EAAYF,EAAK,YAAW,EAClC,GAAIE,IAAcF,EAAM,CACtB,IAAMF,EAAmBN,GACvBU,EAAU,WAAW,CAAC,CAAC,EAEzBxB,EAAOoB,CAAgB,EAAIA,GAGjC,CAnBSrB,EAAAsB,GAAA,oBAqBT,SAASI,GAASC,EAAcC,EAAyB,CACvD,OAAOC,GAAKF,EAAQ,MAAQG,GAAe,CACzC,GAAI,OAAOA,GAAgB,SACzB,OAAOC,GAASH,EAAiBE,CAAW,EACvC,CAEL,IAAMrB,EAAaqB,EACnB,OACED,GACED,EACCI,GAAevB,EAAM,MAAQuB,GAAcA,GAAcvB,EAAM,EAAE,IAC9D,OAGZ,CAAC,CACH,CAfST,EAAA0B,GAAA,YAiBT,SAASP,GAAgB1B,EAAQ,CAC/B,IAAMwC,EAAcxC,EAAa,WACjC,OAAIwC,GAAcA,EAAW,UAAY,EAChC,GAGJxC,EAAI,MAIFyC,GAAQzC,EAAI,KAAK,EACpB0C,GAAM1C,EAAI,MAAO0B,EAAe,EAChCA,GAAgB1B,EAAI,KAAK,EALpB,EAMX,CAbSO,EAAAmB,GAAA,mBAeT,IAAMiB,GAAN,cAA6BC,EAAiB,CA5P9C,MA4P8C,CAAArC,EAAA,uBAG5C,YAAoB4B,EAAyB,CAC3C,MAAK,EADa,KAAA,gBAAAA,EAFpB,KAAA,MAAiB,EAIjB,CAEA,cAAcU,EAAa,CAEzB,GAAI,KAAK,QAAU,GAMnB,QAAQA,EAAK,KAAM,CACjB,IAAK,YACH,KAAK,eAAeA,CAAI,EACxB,OACF,IAAK,oBACH,KAAK,uBAAuBA,CAAI,EAChC,OAGJ,MAAM,cAAcA,CAAI,EAC1B,CAEA,eAAeA,EAAe,CACxBP,GAAS,KAAK,gBAAiBO,EAAK,KAAK,IAC3C,KAAK,MAAQ,GAEjB,CAEA,SAASA,EAAS,CACZA,EAAK,WACHZ,GAASY,EAAM,KAAK,eAAe,IAAM,SAC3C,KAAK,MAAQ,IAGXZ,GAASY,EAAM,KAAK,eAAe,IAAM,SAC3C,KAAK,MAAQ,GAGnB,GAGI,SAAUC,GACdC,EACAC,EAAwB,CAExB,GAAIA,aAAmB,OAAQ,CAC7B,IAAMhD,EAAMC,GAAa+C,CAAO,EAC1BC,EAAiB,IAAIN,GAAeI,CAAS,EACnD,OAAAE,EAAe,MAAMjD,CAAG,EACjBiD,EAAe,UAEtB,QACEb,GAAUY,EAAUlB,GACXQ,GAASS,EAAoBjB,EAAM,WAAW,CAAC,CAAC,CACxD,IAAM,MAGb,CAhBgBvB,EAAAuC,GAAA,oBC7PhB,IAAMI,GAAU,UACHC,GAAe,cACfC,GAAQ,QAuBVC,GACT,OAAa,IAAI,OAAO,MAAM,EAAG,QAAW,UAUxC,SAAUC,GACdC,EACAC,EAQC,CAEDA,EAAUC,GAASD,EAAS,CAC1B,UAAWE,GACX,MAAO,GACP,SAAU,GACV,iBAAkB,OAClB,yBAA0B,CAAC,KAAM;CAAI,EACrC,OAAQC,EAAA,CAACC,EAAaC,IAAqBA,EAAM,EAAzC,UACT,EAED,IAAMC,EAASN,EAAQ,OAEvBM,EAAO,kCAAmC,IAAK,CAC7CC,GAA+B,CACjC,CAAC,EAED,IAAIC,EACJF,EAAO,kBAAmB,IAAK,CAC7BE,EAAoBC,GAAOV,EAAaW,GAC/BA,EAASC,EAAO,IAAMC,GAAM,EACpC,CACH,CAAC,EAED,IAAIC,EAAY,GACZC,EACJR,EAAO,qBAAsB,IAAK,CAChCO,EAAY,GACZC,EAAyBC,EACvBP,EACCE,GAAkC,CACjC,IAAMM,EAAcN,EAASC,EAAO,EAGpC,GAAIM,GAASD,CAAW,EAAG,CACzB,IAAME,EAAeF,EAAY,OACjC,OACEE,EAAa,SAAW,GAExBA,IAAiB,KACjBA,IAAiB,KACjBA,IAAiB,KACjB,CAACF,EAAY,WAENE,EAEPA,EAAa,SAAW,GACxBA,EAAa,CAAC,IAAM,MAEpB,CAACC,GACC,CACE,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KAEFD,EAAa,CAAC,CAAC,EAMVA,EAAa,CAAC,EAEdlB,EAAQ,UACXoB,GAAcJ,CAAW,EACzBK,GAAgBL,CAAW,MAE5B,IAAIM,GAAWN,CAAW,EAC/B,OAAAH,EAAY,GAEL,CAAE,KAAMG,CAAW,EACrB,GAAI,OAAOA,GAAgB,SAChC,OAAAH,EAAY,GAELG,EACF,GAAI,OAAOA,GAAgB,SAAU,CAC1C,GAAIA,EAAY,SAAW,EACzB,OAAOA,EACF,CACL,IAAMO,EAAsBP,EAAY,QACtC,sBACA,MAAM,EAEFQ,EAAgB,IAAI,OAAOD,CAAmB,EACpD,OAAOvB,EAAQ,UACXoB,GAAcI,CAAa,EAC3BH,GAAgBG,CAAa,OAGnC,OAAM,MAAM,sBAAsB,EAEtC,CAAC,CAEL,CAAC,EAED,IAAIC,EACAC,EACAC,EACAC,EACAC,EACJvB,EAAO,eAAgB,IAAK,CAC1BmB,EAAmBV,EACjBP,EACCE,GAAaA,EAAS,YAAa,EAGtCgB,EAAoBX,EAAIP,EAAoBsB,GAAc,CACxD,IAAMC,EAAYD,EAAM,MAExB,GAAIC,IAAcnB,GAAM,QAEjB,IAAIoB,GAASD,CAAS,EAC3B,OAAOA,EACF,GAAIE,GAAYF,CAAS,EAC9B,MAAO,GAEP,MAAM,MAAM,sBAAsB,EAEtC,CAAC,EAEDJ,EAA8BZ,EAAIP,EAAoBsB,GAAc,CAClE,IAAMI,EAAgBJ,EAAM,WAE5B,GAAII,EAIF,OAHwBC,GAAQD,CAAa,EACzCnB,EAAImB,EAAgBE,GAAcC,GAAQ7B,EAAmB4B,CAAI,CAAC,EAClE,CAACC,GAAQ7B,EAAmB0B,CAAa,CAAC,CAGlD,CAAC,EAEDN,EAAuBb,EACrBP,EACCsB,GAAeA,EAAM,SAAS,EAGjCD,EAAsBd,EAAIP,EAAoBsB,GAC5CQ,EAAIR,EAAO,UAAU,CAAC,CAE1B,CAAC,EAED,IAAIS,EACJjC,EAAO,2BAA4B,IAAK,CACtC,IAAMkC,EAA0BC,GAC9BzC,EAAQ,wBAAyB,EAEnCuC,EAAgCxB,EAAIP,EAAoBkC,GAAY,EAAK,EACrE1C,EAAQ,mBAAqB,eAC/BuC,EAAgCxB,EAAIP,EAAoBkC,GAClDJ,EAAII,EAAS,aAAa,EACrB,CAAC,CAACA,EAAQ,YAGfC,GAAsBD,EAASF,CAAuB,IAAM,IAC5DI,GACEJ,EACAE,EAAQ,OAA0B,CAIzC,EAEL,CAAC,EAED,IAAIG,EACAC,EACAC,EACAC,EACJ1C,EAAO,kBAAmB,IAAK,CAC7BuC,EAAuB9B,EAAIP,EAAmByC,EAAe,EAC7DH,EAAoB/B,EAAID,EAAwBoC,EAAc,EAE9DH,EAAcI,GACZ3C,EACA,CAAC4C,EAAKtB,IAAc,CAClB,IAAMC,EAAYD,EAAM,MACxB,OAAIE,GAASD,CAAS,GAAOA,IAAcnB,GAAM,UAC/CwC,EAAIrB,CAAS,EAAI,CAAA,GAEZqB,CACT,EACA,CAAA,CAAuC,EAGzCJ,EAAqBjC,EACnBD,EACA,CAACuC,EAAGC,KACK,CACL,QAASxC,EAAuBwC,CAAG,EACnC,UAAW3B,EAA4B2B,CAAG,EAC1C,kBAAmBf,EAA8Be,CAAG,EACpD,SAAUT,EAAqBS,CAAG,EAClC,MAAOR,EAAkBQ,CAAG,EAC5B,MAAO5B,EAAkB4B,CAAG,EAC5B,KAAM1B,EAAqB0B,CAAG,EAC9B,IAAKzB,EAAoByB,CAAG,EAC5B,aAAc7B,EAAiB6B,CAAG,EAClC,UAAW9C,EAAkB8C,CAAG,GAEnC,CAEL,CAAC,EAED,IAAIC,EAAiB,GACjBC,EACF,CAAA,EAEF,OAAKxD,EAAQ,UACXM,EAAO,0BAA2B,IAAK,CACrCkD,EAA+BL,GAC7B3C,EACA,CAACiD,EAAQC,EAAaJ,IAAO,CAC3B,GAAI,OAAOI,EAAY,SAAY,SAAU,CAC3C,IAAMC,EAAWD,EAAY,QAAQ,WAAW,CAAC,EAC3CE,GAAeC,GAAyBF,CAAQ,EACtDG,GAAiBL,EAAQG,GAAcZ,EAAmBM,CAAG,CAAC,UACrDnB,GAAQuB,EAAY,gBAAgB,EAAG,CAChD,IAAIK,EACJC,EAAQN,EAAY,iBAAmBO,IAAa,CAClD,IAAMN,GACJ,OAAOM,IAAc,SACjBA,GAAU,WAAW,CAAC,EACtBA,GACAC,GAAmBL,GAAyBF,EAAQ,EAKtDI,IAAqBG,KACvBH,EAAmBG,GACnBJ,GACEL,EACAS,GACAlB,EAAmBM,CAAG,CAAC,EAG7B,CAAC,UACQrC,GAASyC,EAAY,OAAO,EACrC,GAAIA,EAAY,QAAQ,QACtBH,EAAiB,GACbvD,EAAQ,qBACVmE,GACE,GAAGC,EAA2B,wBACHV,EAAY,QAAQ,SAAQ,CAAE;;;gGAG2C,MAGnG,CACL,IAAMW,EAAiBC,GACrBZ,EAAY,QACZ1D,EAAQ,mBAAmB,EAKzBuE,EAAQF,CAAc,IAIxBd,EAAiB,IAEnBS,EAAQK,EAAiBG,IAAQ,CAC/BV,GAAiBL,EAAQe,GAAMxB,EAAmBM,CAAG,CAAC,CACxD,CAAC,OAGCtD,EAAQ,qBACVmE,GACE,GAAGC,EAA2B,gBACXV,EAAY,IAAI;;+FAEgE,EAGvGH,EAAiB,GAGnB,OAAOE,CACT,EACA,CAAA,CAA8C,CAElD,CAAC,EAGI,CACL,YAAaV,EACb,mBAAoBC,EACpB,6BAA8BQ,EAC9B,UAAW3C,EACX,eAAgB0C,EAEpB,CA5TgBpD,EAAAL,GAAA,qBA8TV,SAAU2E,GACd1E,EACA2E,EAAyB,CAEzB,IAAIC,EAAkC,CAAA,EAEhCC,EAAgBC,GAAoB9E,CAAU,EACpD4E,EAASA,EAAO,OAAOC,EAAc,MAAM,EAE3C,IAAME,EAAgBC,GAAoBH,EAAc,KAAK,EACvDI,EAAkBF,EAAc,MACtC,OAAAH,EAASA,EAAO,OAAOG,EAAc,MAAM,EAE3CH,EAASA,EAAO,OAAOM,GAAsBD,CAAe,CAAC,EAE7DL,EAASA,EAAO,OAAOO,GAAqBF,CAAe,CAAC,EAE5DL,EAASA,EAAO,OACdQ,GAAwBH,EAAiBN,CAAe,CAAC,EAG3DC,EAASA,EAAO,OAAOS,GAAwBJ,CAAe,CAAC,EAExDL,CACT,CAxBgBxE,EAAAsE,GAAA,oBA0BhB,SAASQ,GACPlF,EAAuB,CAEvB,IAAI4E,EAAkC,CAAA,EAChCU,EAAqBC,GAAOvF,EAAa2D,GAC7CzC,GAASyC,EAAY/C,EAAO,CAAC,CAAC,EAGhC,OAAAgE,EAASA,EAAO,OAAOY,GAAqBF,CAAkB,CAAC,EAE/DV,EAASA,EAAO,OAAOa,GAAuBH,CAAkB,CAAC,EAEjEV,EAASA,EAAO,OAAOc,GAAqBJ,CAAkB,CAAC,EAE/DV,EAASA,EAAO,OAAOe,GAAsBL,CAAkB,CAAC,EAEhEV,EAASA,EAAO,OAAOgB,GAAsBN,CAAkB,CAAC,EAEzDV,CACT,CAnBSxE,EAAA8E,GAAA,yBA0BH,SAAUJ,GACd9E,EAAuB,CAEvB,IAAM6F,EAA+BN,GAAOvF,EAAaW,GAChD,CAAC4B,EAAI5B,EAAUC,EAAO,CAC9B,EAEKgE,EAAS5D,EAAI6E,EAA+BlF,IACzC,CACL,QACE,iBACAA,EAAS,KACT,uCACF,KAAMmF,GAAyB,gBAC/B,WAAY,CAACnF,CAAQ,GAExB,EAEKoF,EAAQC,GAAWhG,EAAY6F,CAA4B,EACjE,MAAO,CAAE,OAAAjB,EAAQ,MAAAmB,CAAK,CACxB,CApBgB3F,EAAA0E,GAAA,uBAsBV,SAAUE,GACdhF,EAAuB,CAEvB,IAAMiG,EAA+BV,GAAOvF,EAAaW,GAAY,CACnE,IAAMuF,EAAUvF,EAASC,EAAO,EAChC,MACE,CAACM,GAASgF,CAAO,GACjB,CAAC3E,GAAW2E,CAAO,GACnB,CAAC3D,EAAI2D,EAAS,MAAM,GACpB,CAACjE,GAASiE,CAAO,CAErB,CAAC,EAEKtB,EAAS5D,EAAIiF,EAA+BtF,IACzC,CACL,QACE,iBACAA,EAAS,KACT,0JAEF,KAAMmF,GAAyB,gBAC/B,WAAY,CAACnF,CAAQ,GAExB,EAEKoF,EAAQC,GAAWhG,EAAYiG,CAA4B,EACjE,MAAO,CAAE,OAAArB,EAAQ,MAAAmB,CAAK,CACxB,CA3BgB3F,EAAA4E,GAAA,uBA6BhB,IAAMmB,GAAe,WAEf,SAAUX,GACdxF,EAAuB,CAEvB,MAAMoG,UAAwBC,EAAiB,CA3fjD,MA2fiD,CAAAjG,EAAA,wBAA/C,aAAA,qBACE,KAAA,MAAQ,EAKV,CAHE,eAAekG,EAAa,CAC1B,KAAK,MAAQ,EACf,EAGF,IAAMC,EAAehB,GAAOvF,EAAaW,GAAY,CACnD,IAAMuF,EAAUvF,EAAS,QAEzB,GAAI,CACF,IAAM6F,EAAYC,GAAaP,CAAiB,EAC1CQ,EAAmB,IAAIN,EAC7B,OAAAM,EAAiB,MAAMF,CAAS,EAEzBE,EAAiB,WACd,CAGV,OAAOP,GAAa,KAAMD,EAAmB,MAAM,EAEvD,CAAC,EAgBD,OAdelF,EAAIuF,EAAe5F,IACzB,CACL,QACE;iBAEAA,EAAS,KACT;gFAGF,KAAMmF,GAAyB,iBAC/B,WAAY,CAACnF,CAAQ,GAExB,CAGH,CA1CgBP,EAAAoF,GAAA,wBA4CV,SAAUI,GACd5F,EAAuB,CAEvB,IAAM2G,EAAqBpB,GAAOvF,EAAaW,GAC7BA,EAAS,QACV,KAAK,EAAE,CACvB,EAaD,OAXeK,EAAI2F,EAAqBhG,IAC/B,CACL,QACE,iBACAA,EAAS,KACT,qDACF,KAAMmF,GAAyB,oBAC/B,WAAY,CAACnF,CAAQ,GAExB,CAGH,CApBgBP,EAAAwF,GAAA,yBAsBhB,IAAMgB,GAAiB,iBAEjB,SAAUnB,GACdzF,EAAuB,CAEvB,MAAM6G,UAA0BR,EAAiB,CA/jBnD,MA+jBmD,CAAAjG,EAAA,0BAAjD,aAAA,qBACE,KAAA,MAAQ,EAKV,CAHE,iBAAiBkG,EAAa,CAC5B,KAAK,MAAQ,EACf,EAGF,IAAMC,EAAehB,GAAOvF,EAAaW,GAAY,CACnD,IAAMuF,EAAUvF,EAAS,QACzB,GAAI,CACF,IAAM6F,EAAYC,GAAaP,CAAO,EAChCY,EAAqB,IAAID,EAC/B,OAAAC,EAAmB,MAAMN,CAAS,EAE3BM,EAAmB,WAChB,CAGV,OAAOF,GAAe,KAAKV,EAAQ,MAAM,EAE7C,CAAC,EAgBD,OAdelF,EAAIuF,EAAe5F,IACzB,CACL,QACE;iBAEAA,EAAS,KACT;wFAGF,KAAMmF,GAAyB,iBAC/B,WAAY,CAACnF,CAAQ,GAExB,CAGH,CAzCgBP,EAAAqF,GAAA,0BA2CV,SAAUC,GACd1F,EAAuB,CAEvB,IAAM+G,EAAexB,GAAOvF,EAAaW,GAAY,CACnD,IAAMuF,EAAUvF,EAASC,EAAO,EAChC,OAAOsF,aAAmB,SAAWA,EAAQ,WAAaA,EAAQ,OACpE,CAAC,EAaD,OAXelF,EAAI+F,EAAepG,IACzB,CACL,QACE,iBACAA,EAAS,KACT,oEACF,KAAMmF,GAAyB,wBAC/B,WAAY,CAACnF,CAAQ,GAExB,CAGH,CApBgBP,EAAAsF,GAAA,wBAuBV,SAAUC,GACd3F,EAAuB,CAEvB,IAAMgH,EAAqB,CAAA,EACvBC,EAAoBjG,EAAIhB,EAAakH,GAChC9D,GACLpD,EACA,CAAC0D,EAAQyD,KAELD,EAAU,QAAQ,SAAYC,EAAU,QAAmB,QAC3D,CAAC/F,GAAS4F,EAAOG,CAAS,GAC1BA,EAAU,UAAYtG,GAAM,KAI5BmG,EAAM,KAAKG,CAAS,EACpBzD,EAAO,KAAKyD,CAAS,GACdzD,GAIX,CAAA,CAAiB,CAEpB,EAEDuD,EAAoBG,GAAQH,CAAiB,EAE7C,IAAMI,EAAoB9B,GAAO0B,EAAoBK,GAC5CA,EAAiB,OAAS,CAClC,EAmBD,OAjBetG,EAAIqG,EAAoBE,GAAuB,CAC5D,IAAMC,EAAiBxG,EAAIuG,EAAiB5G,GACnCA,EAAS,IACjB,EAGD,MAAO,CACL,QACE,6BAHwB8G,GAAMF,CAAc,EAAG,OAGL,wDACYC,EAAe,KACnE,IAAI,CACL,MACH,KAAM1B,GAAyB,yBAC/B,WAAYyB,EAEhB,CAAC,CAGH,CAjDgBnH,EAAAuF,GAAA,yBAmDV,SAAUR,GACdnF,EAAuB,CAEvB,IAAM0H,EAAenC,GAAOvF,EAAa+B,GAAc,CACrD,GAAI,CAACQ,EAAIR,EAAO,OAAO,EACrB,MAAO,GAET,IAAM4F,EAAQ5F,EAAM,MAEpB,OAAO4F,IAAU9G,GAAM,SAAW8G,IAAU9G,GAAM,IAAM,CAACoB,GAAS0F,CAAK,CACzE,CAAC,EAaD,OAXe3G,EAAI0G,EAAe/G,IACzB,CACL,QACE,iBACAA,EAAS,KACT,gEACF,KAAMmF,GAAyB,yBAC/B,WAAY,CAACnF,CAAQ,GAExB,CAGH,CAxBgBP,EAAA+E,GAAA,wBA0BV,SAAUC,GACdpF,EACA4H,EAAoB,CAEpB,IAAMC,EAAetC,GAAOvF,EAAa+B,GAErCA,EAAM,YAAc,QAAa,CAACX,GAASwG,EAAY7F,EAAM,SAAS,CAEzE,EAaD,OAXef,EAAI6G,EAAelF,IAIzB,CACL,QAHA,iBAAiBA,EAAQ,IAAI,8DAA8DA,EAAQ,SAAS,yBAI5G,KAAMmD,GAAyB,yBAC/B,WAAY,CAACnD,CAAO,GAEvB,CAGH,CAtBgBvC,EAAAgF,GAAA,2BAwBV,SAAUC,GACdrF,EAAuB,CAEvB,IAAM4E,EAAkC,CAAA,EAElCkD,EAAc1E,GAClBpD,EACA,CAAC0D,EAAQf,EAASY,IAAO,CACvB,IAAM2C,EAAUvD,EAAQ,QAExB,OAAIuD,IAAYrF,GAAM,KAMlBoB,GAASiE,CAAO,EAClBxC,EAAO,KAAK,CAAE,IAAKwC,EAAS,IAAA3C,EAAK,UAAWZ,CAAO,CAAE,EAC5CzB,GAASgF,CAAO,GAAK6B,GAAW7B,CAAO,GAChDxC,EAAO,KAAK,CAAE,IAAKwC,EAAQ,OAAQ,IAAA3C,EAAK,UAAWZ,CAAO,CAAE,GAEvDe,CACT,EACA,CAAA,CAA0D,EAG5D,OAAAO,EAAQjE,EAAY,CAAC2C,EAASqF,IAAW,CACvC/D,EAAQ6D,EAAa,CAAC,CAAE,IAAAG,EAAK,IAAA1E,EAAK,UAAA2E,CAAS,IAAM,CAC/C,GAAIF,EAAUzE,GAAO4E,GAAcF,EAAKtF,EAAQ,OAAO,EAAG,CACxD,IAAMtC,EACJ,YAAY6H,EAAU,IAAI;4CACmBvF,EAAQ,IAAI;8EAG3DiC,EAAO,KAAK,CACV,QAASvE,EACT,KAAMyF,GAAyB,oBAC/B,WAAY,CAACnD,EAASuF,CAAS,EAChC,EAEL,CAAC,CACH,CAAC,EAEMtD,CACT,CA5CgBxE,EAAAiF,GAAA,2BA8ChB,SAAS8C,GAAcF,EAAa/B,EAAY,CAE9C,GAAIhF,GAASgF,CAAO,EAAG,CACrB,IAAMkC,EAAclC,EAAQ,KAAK+B,CAAG,EACpC,OAAOG,IAAgB,MAAQA,EAAY,QAAU,MAChD,IAAI7G,GAAW2E,CAAO,EAE3B,OAAOA,EAAQ+B,EAAK,EAAG,CAAA,EAAI,CAAA,CAAE,EACxB,GAAI1F,EAAI2D,EAAS,MAAM,EAE5B,OAAOA,EAAQ,KAAK+B,EAAK,EAAG,CAAA,EAAI,CAAA,CAAE,EAC7B,GAAI,OAAO/B,GAAY,SAC5B,OAAOA,IAAY+B,EAEnB,MAAM,MAAM,sBAAsB,EAEtC,CAhBS7H,EAAA+H,GAAA,iBAkBT,SAASJ,GAAWM,EAAc,CAiBhC,OACEC,GAhBgB,CAChB,IACA,KACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KAGiBC,GAASF,EAAO,OAAO,QAAQE,CAAI,IAAM,EAAE,IAAM,MAEtE,CApBSnI,EAAA2H,GAAA,cAsBH,SAAUzG,GAAgB4E,EAAe,CAC7C,IAAMsC,EAAQtC,EAAQ,WAAa,IAAM,GAGzC,OAAO,IAAI,OAAO,OAAOA,EAAQ,MAAM,IAAKsC,CAAK,CACnD,CALgBpI,EAAAkB,GAAA,mBAOV,SAAUD,GAAc6E,EAAe,CAC3C,IAAMsC,EAAQtC,EAAQ,WAAa,KAAO,IAG1C,OAAO,IAAI,OAAO,GAAGA,EAAQ,MAAM,GAAIsC,CAAK,CAC9C,CALgBpI,EAAAiB,GAAA,iBAOV,SAAUoH,GACdC,EACAC,EACAC,EAA6C,CAE7C,IAAMhE,EAAkC,CAAA,EAGxC,OAAKrC,EAAImG,EAAiBG,EAAY,GACpCjE,EAAO,KAAK,CACV,QACE,sDACAiE,GACA;EACF,KAAM/C,GAAyB,sCAChC,EAEEvD,EAAImG,EAAiBI,EAAK,GAC7BlE,EAAO,KAAK,CACV,QACE,sDACAkE,GACA;EACF,KAAMhD,GAAyB,wCAChC,EAIDvD,EAAImG,EAAiBI,EAAK,GAC1BvG,EAAImG,EAAiBG,EAAY,GACjC,CAACtG,EAAImG,EAAgB,MAAOA,EAAgB,WAAW,GAEvD9D,EAAO,KAAK,CACV,QACE,kDAAkDiE,EAAY,MAAMH,EAAgB,WAAW;EAEjG,KAAM5C,GAAyB,mDAChC,EAGCvD,EAAImG,EAAiBI,EAAK,GAC5B7E,EAAQyE,EAAgB,MAAO,CAACK,EAAeC,IAAgB,CAC7D/E,EAAQ8E,EAAe,CAACpF,EAAasF,IAAW,CAC9C,GAAI/G,GAAYyB,CAAW,EACzBiB,EAAO,KAAK,CACV,QACE,sEACIoE,CAAY,gBAAgBC,CAAO;EACzC,KAAMnD,GAAyB,0CAChC,UACQvD,EAAIoB,EAAa,YAAY,EAAG,CACzC,IAAMuF,EAAY9G,GAAQuB,EAAY,UAAU,EAC5CA,EAAY,WACZ,CAACA,EAAY,UAAU,EAC3BM,EAAQiF,EAAYC,GAAiB,CAEjC,CAACjH,GAAYiH,CAAa,GAC1B,CAAC/H,GAAS2H,EAAeI,CAAa,GAEtCvE,EAAO,KAAK,CACV,QAAS,8DAA8DuE,EAAc,IAAI,eAAexF,EAAY,IAAI,sBAAsBqF,CAAY;EAC1J,KAAMlD,GAAyB,gDAChC,CAEL,CAAC,EAEL,CAAC,CACH,CAAC,EAGIlB,CACT,CAvEgBxE,EAAAqI,GAAA,wBAyEV,SAAUW,GACdV,EACAC,EACAC,EAA6C,CAE7C,IAAMS,EAAW,CAAA,EACbC,EAAkB,GAChBC,EAAgBnC,GAAQoC,GAAQC,GAAOf,EAAgB,KAAK,CAAC,CAAC,EAE9DgB,EAAqBhJ,GACzB6I,EACC5I,GAAaA,EAASC,EAAO,IAAMC,GAAM,EAAE,EAExC8I,EAAsBjH,GAAakG,CAAwB,EACjE,OAAID,GACF1E,EAAQyF,EAAqB/G,GAAW,CACtC,IAAMiH,EAAYhH,GAAsBD,EAASgH,CAAmB,EACpE,GAAIC,IAAc,GAAO,CAEvB,IAAMC,EAAoB,CACxB,QAFcC,GAA2BnH,EAASiH,CAAS,EAG3D,KAAMA,EAAU,MAChB,UAAWjH,GAEb0G,EAAS,KAAKQ,CAAiB,OAG3BtH,EAAII,EAAS,aAAa,EACxBA,EAAQ,cAAgB,KAC1B2G,EAAkB,IAIlBzG,GAAiB8G,EAAqBhH,EAAQ,OAAiB,IAE/D2G,EAAkB,GAI1B,CAAC,EAGCX,GAAc,CAACW,GACjBD,EAAS,KAAK,CACZ,QACE;;;;eAKF,KAAMvD,GAAyB,qBAChC,EAEIuD,CACT,CAtDgBjJ,EAAAgJ,GAAA,+BAwDV,SAAUW,GAAiB/G,EAEhC,CACC,IAAMgH,EAAoB,CAAA,EACpBC,EAAYC,GAAKlH,CAAW,EAElC,OAAAiB,EAAQgG,EAAYE,GAAW,CAC7B,IAAMC,EAAiBpH,EAAYmH,CAAO,EAG1C,GAAI/H,GAAQgI,CAAc,EACxBJ,EAAaG,CAAO,EAAI,CAAA,MAExB,OAAM,MAAM,sBAAsB,CAEtC,CAAC,EAEMH,CACT,CAlBgB5J,EAAA2J,GAAA,oBAqBV,SAAU7G,GAAgBgF,EAAoB,CAClD,IAAMhC,EAAUgC,EAAU,QAE1B,GAAIhH,GAASgF,CAAO,EAClB,MAAO,GACF,GAAI3E,GAAW2E,CAAO,EAE3B,MAAO,GACF,GAAI3D,EAAI2D,EAAS,MAAM,EAE5B,MAAO,GACF,GAAIjE,GAASiE,CAAO,EACzB,MAAO,GAEP,MAAM,MAAM,sBAAsB,CAEtC,CAhBgB9F,EAAA8C,GAAA,mBAkBV,SAAUC,GAAe+C,EAAY,CACzC,OAAIjE,GAASiE,CAAO,GAAKA,EAAQ,SAAW,EACnCA,EAAQ,WAAW,CAAC,EAEpB,EAEX,CANgB9F,EAAA+C,GAAA,kBAWT,IAAMkH,GAAwD,CAEnE,KAAMjK,EAAA,SAAUkK,EAAI,CAClB,IAAMC,EAAMD,EAAK,OACjB,QAASE,EAAI,KAAK,UAAWA,EAAID,EAAKC,IAAK,CACzC,IAAMC,EAAIH,EAAK,WAAWE,CAAC,EAC3B,GAAIC,IAAM,GACR,YAAK,UAAYD,EAAI,EACd,GACF,GAAIC,IAAM,GACf,OAAIH,EAAK,WAAWE,EAAI,CAAC,IAAM,GAC7B,KAAK,UAAYA,EAAI,EAErB,KAAK,UAAYA,EAAI,EAEhB,GAGX,MAAO,EACT,EAjBM,QAmBN,UAAW,GAGb,SAAS5H,GACPD,EACAF,EAAiC,CASjC,GAAIF,EAAII,EAAS,aAAa,EAG5B,MAAO,GAGP,GAAIzB,GAASyB,EAAQ,OAAO,EAAG,CAC7B,GAAI,CAEFE,GAAiBJ,EAAyBE,EAAQ,OAAiB,QAC5D+H,EAAG,CAEV,MAAO,CACL,MAAO5E,GAAyB,oBAChC,OAAS4E,EAAY,SAGzB,MAAO,OACF,IAAIzI,GAASU,EAAQ,OAAO,EAEjC,MAAO,GACF,GAAIO,GAAgBP,CAAO,EAEhC,MAAO,CAAE,MAAOmD,GAAyB,iBAAiB,EAE1D,MAAM,MAAM,sBAAsB,EAGxC,CAvCS1F,EAAAwC,GAAA,yBAyCH,SAAUkH,GACdnH,EACAgI,EAKC,CAGD,GAAIA,EAAQ,QAAU7E,GAAyB,oBAC7C,MACE;0BAC4BnD,EAAQ,IAAI;gBACtBgI,EAAQ,MAAM;oGAG7B,GAAIA,EAAQ,QAAU7E,GAAyB,kBACpD,MACE;0BAC4BnD,EAAQ,IAAI;kGAI1C,MAAM,MAAM,sBAAsB,CAEtC,CA1BgBvC,EAAA0J,GAAA,8BA4BhB,SAASpH,GAAakI,EAAiC,CASrD,OARkB5J,EAAI4J,EAAeC,GAC/B5I,GAAS4I,CAAW,EACfA,EAAY,WAAW,CAAC,EAExBA,CAEV,CAGH,CAVSzK,EAAAsC,GAAA,gBAYT,SAASqB,GACP+G,EACAC,EACAC,EAAQ,CAEJF,EAAIC,CAAG,IAAM,OACfD,EAAIC,CAAG,EAAI,CAACC,CAAK,EAEjBF,EAAIC,CAAG,EAAE,KAAKC,CAAK,CAEvB,CAVS5K,EAAA2D,GAAA,oBAYF,IAAMkH,GAAqB,IAiB9BC,GAAsC,CAAA,EACpC,SAAUpH,GAAyBF,EAAgB,CACvD,OAAOA,EAAWqH,GACdrH,EACAsH,GAA0BtH,CAAQ,CACxC,CAJgBxD,EAAA0D,GAAA,4BAchB,SAAStD,IAA+B,CACtC,GAAIgE,EAAQ0G,EAAyB,EAAG,CACtCA,GAA4B,IAAI,MAAM,KAAK,EAC3C,QAASV,EAAI,EAAGA,EAAI,MAAOA,IACzBU,GAA0BV,CAAC,EAAIA,EAAI,IAAM,IAAM,CAAC,EAAEA,EAAI,KAAOA,EAGnE,CAPSpK,EAAAI,GAAA,mCCjoCH,SAAU2K,GACdC,EACAC,EAAyB,CAEzB,IAAMC,EAAeF,EAAY,aACjC,OAAIE,IAAiBD,EAAe,aAC3B,GAGLA,EAAe,WAAa,IAC5BA,EAAe,mBAAoBC,CAAY,IAAM,EAG3D,CAbgBC,EAAAJ,GAAA,0BAiBV,SAAUK,GACdC,EACAC,EAAkB,CAElB,OAAOD,EAAM,eAAiBC,EAAQ,YACxC,CALgBH,EAAAC,GAAA,sCAOT,IAAIG,GAAoB,EAClBC,GAAqD,CAAA,EAE5D,SAAUC,GAAkBC,EAAuB,CAEvD,IAAMC,EAAuBC,GAAiBF,CAAU,EAGxDG,GAAwBF,CAAoB,EAG5CG,GAAwBH,CAAoB,EAC5CI,GAA2BJ,CAAoB,EAE/CK,EAAQL,EAAuBL,GAAW,CACxCA,EAAQ,SAAWA,EAAQ,gBAAiB,OAAS,CACvD,CAAC,CACH,CAdgBH,EAAAM,GAAA,qBAgBV,SAAUG,GAAiBF,EAAuB,CACtD,IAAIO,EAASC,GAAMR,CAAU,EAEzBS,EAAaT,EACbU,EAAY,GAChB,KAAOA,GAAW,CAChBD,EAAaE,GACXC,GAAQC,EAAIJ,EAAaK,GAAgBA,EAAY,UAAU,CAAC,CAAC,EAGnE,IAAMC,EAAgBC,GAAWP,EAAYF,CAAM,EAEnDA,EAASA,EAAO,OAAOQ,CAAa,EAEhCE,EAAQF,CAAa,EACvBL,EAAY,GAEZD,EAAaM,EAGjB,OAAOR,CACT,CArBgBd,EAAAS,GAAA,oBAuBV,SAAUC,GAAwBH,EAAuB,CAC7DM,EAAQN,EAAac,GAAe,CAC7BI,GAAoBJ,CAAW,IAClChB,GAAgBD,EAAiB,EAAIiB,EAC/BA,EAAa,aAAejB,MAKlCsB,GAAsBL,CAAW,GACjC,CAACM,GAAQN,EAAY,UAAU,IAI/BA,EAAY,WAAa,CAACA,EAAY,UAAkC,GAGrEK,GAAsBL,CAAW,IACpCA,EAAY,WAAa,CAAA,GAGtBO,GAAgCP,CAAW,IAC9CA,EAAY,gBAAkB,CAAA,GAG3BQ,GAAmCR,CAAW,IACjDA,EAAY,mBAAqB,CAAA,EAErC,CAAC,CACH,CA7BgBrB,EAAAU,GAAA,2BA+BV,SAAUE,GAA2BL,EAAuB,CAChEM,EAAQN,EAAac,GAAe,CAElCA,EAAY,gBAAkB,CAAA,EAC9BR,EAAQQ,EAAY,mBAAqB,CAACS,EAAKC,IAAO,CACpDV,EAAY,gBAAiB,KAC3BhB,GAAgB0B,CAAwB,EAAE,YAAa,CAE3D,CAAC,CACH,CAAC,CACH,CAVgB/B,EAAAY,GAAA,8BAYV,SAAUD,GAAwBJ,EAAuB,CAC7DM,EAAQN,EAAac,GAAe,CAClCW,GAA8B,CAAA,EAAIX,CAAW,CAC/C,CAAC,CACH,CAJgBrB,EAAAW,GAAA,2BAMV,SAAUqB,GACdC,EACAC,EAAmB,CAEnBrB,EAAQoB,EAAOE,GAAY,CACzBD,EAAS,mBAAoBC,EAAS,YAAa,EAAI,EACzD,CAAC,EAEDtB,EAAQqB,EAAS,WAAaE,GAAgB,CAC5C,IAAMC,EAAUJ,EAAK,OAAOC,CAAQ,EAE/BI,GAASD,EAASD,CAAY,GACjCJ,GAA8BK,EAASD,CAAY,CAEvD,CAAC,CACH,CAfgBpC,EAAAgC,GAAA,iCAiBV,SAAUP,GAAoBtB,EAAkB,CACpD,OAAOoC,EAAIpC,EAAS,cAAc,CACpC,CAFgBH,EAAAyB,GAAA,uBAIV,SAAUC,GAAsBvB,EAAkB,CACtD,OAAOoC,EAAIpC,EAAS,YAAY,CAClC,CAFgBH,EAAA0B,GAAA,yBAIV,SAAUE,GAAgCzB,EAAkB,CAChE,OAAOoC,EAAIpC,EAAS,iBAAiB,CACvC,CAFgBH,EAAA4B,GAAA,mCAIV,SAAUC,GACd1B,EAAkB,CAElB,OAAOoC,EAAIpC,EAAS,oBAAoB,CAC1C,CAJgBH,EAAA6B,GAAA,sCAMV,SAAUW,GAAYrC,EAAkB,CAC5C,OAAOoC,EAAIpC,EAAS,cAAc,CACpC,CAFgBH,EAAAwC,GAAA,eClKT,IAAMC,GAAwD,CACnE,iCAAiCC,EAAa,CAC5C,MAAO,uDAAuDA,EAAM,KAAK,4BAC3E,EAEA,iCACEC,EACAC,EACAC,EACAC,EACAC,EAAe,CAEf,MACE,2BAA2BJ,EAAS,OAClCC,CAAW,CACZ,iBAAiBA,CAAW,aAAkBC,CAAM,cAEzD,GC8BF,IAAYG,IAAZ,SAAYA,EAAwB,CAClCA,EAAAA,EAAA,gBAAA,CAAA,EAAA,kBACAA,EAAAA,EAAA,gBAAA,CAAA,EAAA,kBACAA,EAAAA,EAAA,iBAAA,CAAA,EAAA,mBACAA,EAAAA,EAAA,wBAAA,CAAA,EAAA,0BACAA,EAAAA,EAAA,yBAAA,CAAA,EAAA,2BACAA,EAAAA,EAAA,yBAAA,CAAA,EAAA,2BACAA,EAAAA,EAAA,yBAAA,CAAA,EAAA,2BACAA,EAAAA,EAAA,sCAAA,CAAA,EAAA,wCACAA,EAAAA,EAAA,wCAAA,CAAA,EAAA,0CACAA,EAAAA,EAAA,mDAAA,CAAA,EAAA,qDACAA,EAAAA,EAAA,0CAAA,EAAA,EAAA,4CACAA,EAAAA,EAAA,iBAAA,EAAA,EAAA,mBACAA,EAAAA,EAAA,oBAAA,EAAA,EAAA,sBACAA,EAAAA,EAAA,qBAAA,EAAA,EAAA,uBACAA,EAAAA,EAAA,oBAAA,EAAA,EAAA,sBACAA,EAAAA,EAAA,oBAAA,EAAA,EAAA,sBACAA,EAAAA,EAAA,kBAAA,EAAA,EAAA,oBACAA,EAAAA,EAAA,gDAAA,EAAA,EAAA,iDACF,GAnBYA,KAAAA,GAAwB,CAAA,EAAA,EAyBpC,IAAMC,GAA+C,CACnD,8BAA+B,GAC/B,iBAAkB,OAClB,uBAAwB,YACxB,yBAA0B,CAAC;EAAM,IAAI,EACrC,oBAAqB,GACrB,SAAU,GACV,qBAAsBC,GACtB,cAAe,GACf,gBAAiB,GACjB,gBAAiB,IAGnB,OAAO,OAAOD,EAAoB,EAE5B,IAAOE,GAAP,KAAY,CAzFlB,MAyFkB,CAAAC,EAAA,cA4BhB,YACYC,EACVC,EAAuBL,GAAoB,CAE3C,GAHU,KAAA,gBAAAI,EAvBL,KAAA,sBAAiD,CAAA,EACjD,KAAA,uBAAkD,CAAA,EAE/C,KAAA,mBAAuD,CAAA,EACvD,KAAA,6BAEN,CAAA,EAEM,KAAA,MAAkB,CAAA,EAElB,KAAA,YAA+C,CAAA,EAGjD,KAAA,gBAA2B,GAC3B,KAAA,cAAyB,GACzB,KAAA,UAAqB,GACrB,KAAA,mBAA8C,CAAA,EAu0BtD,KAAA,WAAa,CAAIE,EAAmBC,IAAyB,CAG3D,GAAI,KAAK,gBAAkB,GAAM,CAC/B,KAAK,kBACL,IAAMC,EAAS,IAAI,MAAM,KAAK,gBAAkB,CAAC,EAAE,KAAK,GAAI,EACxD,KAAK,gBAAkB,KAAK,mBAC9B,QAAQ,IAAI,GAAGA,CAAM,QAAQF,CAAS,GAAG,EAE3C,GAAM,CAAE,KAAAG,EAAM,MAAAC,CAAK,EAAKC,GAAMJ,CAAS,EAEjCK,EAAcH,EAAO,GAAK,QAAQ,KAAO,QAAQ,IACvD,OAAI,KAAK,gBAAkB,KAAK,mBAC9BG,EAAY,GAAGJ,CAAM,QAAQF,CAAS,WAAWG,CAAI,IAAI,EAE3D,KAAK,kBACEC,MAEP,QAAOH,EAAS,CAEpB,EAj1BM,OAAOF,GAAW,UACpB,MAAM,MACJ;8CACiD,EAKrD,KAAK,OAASQ,GAAO,CAAA,EAAIb,GAAsBK,CAAM,EAErD,IAAMS,EAAe,KAAK,OAAO,cAC7BA,IAAiB,IACnB,KAAK,kBAAoB,IACzB,KAAK,cAAgB,IACZ,OAAOA,GAAiB,WACjC,KAAK,kBAAoBA,EACzB,KAAK,cAAgB,IAEvB,KAAK,gBAAkB,GAEvB,KAAK,WAAW,oBAAqB,IAAK,CACxC,IAAIC,EACAC,EAAoB,GACxB,KAAK,WAAW,wBAAyB,IAAK,CAC5C,GACE,KAAK,OAAO,yBACZhB,GAAqB,uBAGrB,KAAK,OAAO,uBAAyBiB,WAGnC,KAAK,OAAO,2BACZjB,GAAqB,yBAErB,MAAM,MACJ;uGAC2G,EAKjH,GAAIK,EAAO,UAAYA,EAAO,oBAC5B,MAAM,MACJ,oEAAoE,EAIxE,KAAK,gBAAkB,kBAAkB,KACvC,KAAK,OAAO,gBAAgB,EAE9B,KAAK,cAAgB,QAAQ,KAAK,KAAK,OAAO,gBAAgB,EAG1Da,GAAQd,CAAe,EACzBW,EAAmB,CACjB,MAAO,CAAE,YAAaI,GAAMf,CAAe,CAAC,EAC5C,YAAagB,KAIfJ,EAAoB,GACpBD,EAAmBI,GAAiCf,CAAe,EAEvE,CAAC,EAEG,KAAK,OAAO,kBAAoB,KAClC,KAAK,WAAW,uBAAwB,IAAK,CAC3C,KAAK,sBAAwB,KAAK,sBAAsB,OACtDiB,GACEN,EACA,KAAK,gBACL,KAAK,OAAO,wBAAwB,CACrC,CAEL,CAAC,EAED,KAAK,WAAW,8BAA+B,IAAK,CAClD,KAAK,uBAAyB,KAAK,uBAAuB,OACxDO,GACEP,EACA,KAAK,gBACL,KAAK,OAAO,wBAAwB,CACrC,CAEL,CAAC,GAIHA,EAAiB,MAAQA,EAAiB,MACtCA,EAAiB,MACjB,CAAA,EAIJQ,EAAQR,EAAiB,MAAO,CAACS,EAAeC,IAAgB,CAC9DV,EAAiB,MAAMU,CAAY,EAAIC,GACrCF,EACCG,GAAgBC,GAAYD,CAAW,CAAC,CAE7C,CAAC,EAED,IAAME,EAAeC,GAAKf,EAAiB,KAAK,EAyDhD,GAvDAQ,EACER,EAAiB,MACjB,CAACgB,EAAyBC,IAAe,CACvC,KAAK,WAAW,UAAUA,CAAW,eAAgB,IAAK,CAcxD,GAbA,KAAK,MAAM,KAAKA,CAAW,EAEvB,KAAK,OAAO,kBAAoB,IAClC,KAAK,WAAW,mBAAoB,IAAK,CACvC,KAAK,sBAAwB,KAAK,sBAAsB,OACtDC,GAAiBF,EAAYF,CAAY,CAAC,CAE9C,CAAC,EAMCK,EAAQ,KAAK,qBAAqB,EAAG,CACvCC,GAAkBJ,CAAU,EAE5B,IAAIK,EACJ,KAAK,WAAW,oBAAqB,IAAK,CACxCA,EAAoBC,GAAkBN,EAAY,CAChD,yBACE,KAAK,OAAO,yBACd,iBAAkB1B,EAAO,iBACzB,oBAAqBA,EAAO,oBAC5B,SAAUA,EAAO,SACjB,OAAQ,KAAK,WACd,CACH,CAAC,EAED,KAAK,mBAAmB2B,CAAW,EACjCI,EAAkB,mBAEpB,KAAK,6BAA6BJ,CAAW,EAC3CI,EAAkB,6BAEpB,KAAK,YAAcvB,GACjB,CAAA,EACA,KAAK,YACLuB,EAAkB,WAAW,EAG/B,KAAK,UAAYA,EAAkB,WAAa,KAAK,UAErD,KAAK,mBAAmBJ,CAAW,EACjCI,EAAkB,eAExB,CAAC,CACH,CAAC,EAGH,KAAK,YAAcrB,EAAiB,YAGlC,CAACmB,EAAQ,KAAK,qBAAqB,GACnC,CAAC,KAAK,OAAO,8BACb,CAIA,IAAMI,EAHiBC,EAAI,KAAK,sBAAwBC,GAC/CA,EAAM,OACd,EAC2C,KAC1C;CAA2B,EAE7B,MAAM,IAAI,MACR;EAA8CF,CAAoB,EAKtEf,EAAQ,KAAK,uBAAyBkB,GAAqB,CACzDC,GAAcD,EAAkB,OAAO,CACzC,CAAC,EAED,KAAK,WAAW,uCAAwC,IAAK,CAwB3D,GApBIE,IACF,KAAK,UAAiBC,GACtB,KAAK,MAAQ,KAAK,gBAElB,KAAK,gBAAkBC,GACvB,KAAK,MAAQ,KAAK,eAGhB7B,IACF,KAAK,YAAc6B,IAGjB,KAAK,kBAAoB,KAC3B,KAAK,iBAAmBD,IAGtB,KAAK,gBAAkB,KACzB,KAAK,iCAAmCC,IAGtC,QAAQ,KAAK,KAAK,OAAO,gBAAgB,EAC3C,KAAK,oBAAsB,KAAK,wBACvB,aAAa,KAAK,KAAK,OAAO,gBAAgB,EACvD,KAAK,oBAAsB,KAAK,6BACvB,cAAc,KAAK,KAAK,OAAO,gBAAgB,EACxD,KAAK,oBAAsB,KAAK,0BAEhC,OAAM,MACJ,8CAA8C,KAAK,OAAO,gBAAgB,GAAG,EAI7E,KAAK,WACP,KAAK,SAAW,KAAK,kBACrB,KAAK,cAAgB,KAAK,0BAE1B,KAAK,SAAW,KAAK,0BACrB,KAAK,cAAgB,KAAK,sBAE9B,CAAC,EAED,KAAK,WAAW,+BAAgC,IAAK,CACnD,IAAMC,EAAmBC,GACvB,KAAK,mBACL,CAACC,EAAmBC,EAAgBC,KAC9BD,IAAmB,IACrBD,EAAkB,KAAKE,CAAQ,EAE1BF,GAET,CAAA,CAAc,EAGhB,GAAI3C,EAAO,qBAAuB,CAAC6B,EAAQY,CAAgB,EACzD,MAAM,MACJ,kBAAkBA,EAAiB,KACjC,IAAI,CACL;;yEAE4E,CAGnF,CAAC,EAED,KAAK,WAAW,yBAA0B,IAAK,CAC7CK,GAAsB,CACxB,CAAC,EAED,KAAK,WAAW,mBAAoB,IAAK,CACvCC,GAAiB,IAAI,CACvB,CAAC,CACH,CAAC,CACH,CAEO,SACLC,EACAC,EAAsB,KAAK,YAAW,CAEtC,GAAI,CAACpB,EAAQ,KAAK,qBAAqB,EAAG,CAIxC,IAAMI,EAHiBC,EAAI,KAAK,sBAAwBC,GAC/CA,EAAM,OACd,EAC2C,KAC1C;CAA2B,EAE7B,MAAM,IAAI,MACR;EACEF,CAAoB,EAI1B,OAAO,KAAK,iBAAiBe,EAAMC,CAAW,CAChD,CAMQ,iBAAiBD,EAAcC,EAAmB,CACxD,IAAIC,EACFC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACIC,EAAUlB,EACVmB,EAAYD,EAAQ,OACtBE,EAAS,EACTC,GAAqB,EAKnBC,GAAwB,KAAK,UAC/B,EACA,KAAK,MAAMtB,EAAK,OAAS,EAAE,EACzBuB,GAAgB,IAAI,MAAMD,EAAqB,EAC/CE,GAAyB,CAAA,EAC3BC,GAAO,KAAK,gBAAkB,EAAI,OAClCC,EAAS,KAAK,gBAAkB,EAAI,OAClCC,EAAcC,GAAiB,KAAK,WAAW,EAC/CC,EAAa,KAAK,gBAClBC,EAAwB,KAAK,OAAO,uBAEtCC,EAAyB,EACzBC,EAAuC,CAAA,EACvCC,EAEA,CAAA,EAEEC,GAAsB,CAAA,EAEtBC,GAA+B,CAAA,EACrC,OAAO,OAAOA,EAAU,EACxB,IAAIC,GAEJ,SAASC,IAAuB,CAC9B,OAAOL,CACT,CAFSlF,EAAAuF,GAAA,2BAIT,SAASC,GAA6BC,GAAgB,CACpD,IAAMC,GAAmBC,GAAyBF,EAAQ,EACpDG,GACJT,EAAiCO,EAAgB,EACnD,OAAIE,KAAqB,OAChBP,GAEAO,EAEX,CATS5F,EAAAwF,GAAA,gCAWT,IAAMK,GAAW7F,EAAC8F,IAAoB,CAEpC,GACEV,GAAU,SAAW,GAGrBU,GAAS,UAAU,YAAc,OACjC,CAGA,IAAM5B,GACJ,KAAK,OAAO,qBAAqB,iCAC/B4B,EAAQ,EAGZpB,GAAO,KAAK,CACV,OAAQoB,GAAS,YACjB,KAAMA,GAAS,UACf,OAAQA,GAAS,YACjB,OAAQA,GAAS,MAAM,OACvB,QAAS5B,GACV,MACI,CACLkB,GAAU,IAAG,EACb,IAAMW,GAAUC,GAAKZ,EAAS,EAC9BF,EAAqB,KAAK,mBAAmBa,EAAO,EACpDZ,EACE,KAAK,6BAA6BY,EAAO,EAC3Cd,EAAyBC,EAAmB,OAC5C,IAAMe,GACJ,KAAK,mBAAmBF,EAAO,GAAK,KAAK,OAAO,WAAa,GAE3DZ,GAAoCc,GACtCX,GAAsBE,GAEtBF,GAAsBC,GAG5B,EAtCiB,YAwCjB,SAASW,GAAuBH,GAAe,CAC7CX,GAAU,KAAKW,EAAO,EACtBZ,EACE,KAAK,6BAA6BY,EAAO,EAE3Cb,EAAqB,KAAK,mBAAmBa,EAAO,EACpDd,EAAyBC,EAAmB,OAE5CD,EAAyBC,EAAmB,OAC5C,IAAMe,GACJ,KAAK,mBAAmBF,EAAO,GAAK,KAAK,OAAO,WAAa,GAE3DZ,GAAoCc,GACtCX,GAAsBE,GAEtBF,GAAsBC,EAE1B,CAjBSvF,EAAAkG,GAAA,aAqBTA,GAAU,KAAK,KAAM/C,CAAW,EAEhC,IAAIgD,GAEEC,GAAkB,KAAK,OAAO,gBAEpC,KAAO9B,EAASD,GAAW,CACzBZ,EAAe,KAEf,IAAM4C,GAAejC,EAAQ,WAAWE,CAAM,EACxCgC,GAA2BhB,GAAoBe,EAAY,EAC3DE,GAAuBD,GAAyB,OAEtD,IAAKlD,EAAI,EAAGA,EAAImD,GAAsBnD,IAAK,CACzC+C,GAAaG,GAAyBlD,CAAC,EACvC,IAAMoD,GAAcL,GAAW,QAC/BzC,EAAU,KAGV,IAAM+C,GAAiBN,GAAW,MA0BlC,GAzBIM,KAAmB,GACjBJ,KAAiBI,KAEnBhD,EAAe+C,IAERL,GAAW,WAAa,IACjChC,EAASqC,GAA4B,KACnCpC,EACAE,EACAG,GACAI,CAAM,EAEJV,IAAU,MACZV,EAAeU,EAAM,CAAC,EACjBA,EAAqC,UAAY,SACpDT,EAAWS,EAAqC,UAGlDV,EAAe,OAGjB,KAAK,gBAAgB+C,GAAuBlC,CAAM,EAClDb,EAAe,KAAK,MAAM+C,GAAuBtD,EAAMoB,CAAM,GAG3Db,IAAiB,KAAM,CAIzB,GADAD,EAAY2C,GAAW,UACnB3C,IAAc,OAAW,CAG3B,IAAMkD,GAAkBlD,EAAU,OAClC,IAAKF,EAAI,EAAGA,EAAIoD,GAAiBpD,IAAK,CACpC,IAAMqD,GAAkBzB,EAAmB1B,EAAUF,CAAC,CAAC,EACjDsD,GAAmBD,GAAgB,QA+BzC,GA9BAhD,EAAa,KAITgD,GAAgB,WAAa,IAC/BxC,EAASyC,GAAiC,KACxCxC,EACAE,EACAG,GACAI,CAAM,EAEJV,IAAU,MACZZ,EAAgBY,EAAM,CAAC,EAEpBA,EAAqC,UAAY,SAElDR,EAAcQ,EAAqC,UAGrDZ,EAAgB,OAGlB,KAAK,gBAAgBqD,GAA4BtC,CAAM,EACvDf,EAAgB,KAAK,MACnBqD,GACA1D,EACAoB,CAAM,GAINf,GAAiBA,EAAc,OAASE,EAAa,OAAQ,CAC/DA,EAAeF,EACfG,EAAUC,EACVwC,GAAaQ,GAGb,QAIN,OAKJ,GAAIlD,IAAiB,KAAM,CAoCzB,GAnCAG,EAAcH,EAAa,OAC3BI,EAAQsC,GAAW,MACftC,IAAU,SACZC,EAAUqC,GAAW,aAGrBpC,EAAW,KAAK,oBACdN,EACAa,EACAR,EACAqC,GAAW,UACXxB,GACAC,EACAhB,CAAW,EAGb,KAAK,cAAcG,EAAUL,CAAO,EAGhCG,IAAU,GACZU,GAAqB,KAAK,SACxBE,GACAF,GACAR,CAAQ,EAGVc,EAAOhB,CAAK,EAAE,KAAKE,CAAQ,GAG/Bb,EAAO,KAAK,UAAUA,EAAMU,CAAW,EACvCU,EAASA,EAASV,EAGlBgB,EAAS,KAAK,iBAAiBA,EAAShB,CAAW,EAE/CmB,IAAe,IAAQoB,GAAW,oBAAsB,GAAM,CAChE,IAAIU,GAAkB,EAClBC,GACAC,GACJ/B,EAAsB,UAAY,EAClC,GACE8B,GAAkB9B,EAAsB,KAAKvB,CAAY,EACrDqD,KAAoB,KACtBC,GAAkB/B,EAAsB,UAAY,EACpD6B,YAEKC,KAAoB,IAEzBD,KAAoB,IACtBlC,GAAOA,GAAQkC,GACfjC,EAAShB,EAAcmD,GACvB,KAAK,iCACHhD,EACAF,EACAkD,GACAF,GACAlC,GACAC,EACAhB,CAAW,GAKjB,KAAK,YAAYuC,GAAYN,GAAUK,GAAWnC,CAAS,MACtD,CAEL,IAAMiD,GAAmB1C,EACnB2C,GAAYtC,GACZuC,GAActC,EAChBuC,GAAmBf,KAAoB,GAE3C,KAAOe,KAAqB,IAAS7C,EAASD,GAI5C,IAFAnB,EAAO,KAAK,UAAUA,EAAM,CAAC,EAC7BoB,IACKjB,EAAI,EAAGA,EAAI4B,EAAwB5B,IAAK,CAC3C,IAAM8C,GAAajB,EAAmB7B,CAAC,EACjCmD,GAAcL,GAAW,QAGzBM,GAAiBN,GAAW,MAmBlC,GAlBIM,KAAmB,GACjBrC,EAAQ,WAAWE,CAAM,IAAMmC,KAEjCU,GAAmB,IAEZhB,GAAW,WAAa,GACjCgB,GACGX,GAA4B,KAC3BpC,EACAE,EACAG,GACAI,CAAM,IACF,MAER,KAAK,gBAAgB2B,GAAuBlC,CAAM,EAClD6C,GAAoBX,GAAuB,KAAKtD,CAAI,IAAM,MAGxDiE,KAAqB,GACvB,MAuBN,GAlBAnD,EAAYM,EAAS0C,GACrBpC,EAAS,KAAK,iBAAiBA,EAASZ,CAAS,EAEjDE,EAAM,KAAK,OAAO,qBAAqB,iCACrCE,EACA4C,GACAhD,EACAiD,GACAC,EAAW,EAEbxC,GAAO,KAAK,CACV,OAAQsC,GACR,KAAMC,GACN,OAAQC,GACR,OAAQlD,EACR,QAASE,EACV,EAEGkC,KAAoB,GACtB,OAON,OAAK,KAAK,YAER3B,GAAc,OAASF,IAGlB,CACL,OAAQE,GACR,OAAQI,EACR,OAAQH,GAEZ,CAEQ,YACNxE,EACA2F,EACAK,EACAnC,EAAgB,CAEhB,GAAI7D,EAAO,MAAQ,GAAM,CAGvB,IAAMkH,EAAWlH,EAAO,KACxB2F,EAAS9B,CAAQ,EACbqD,IAAa,QACflB,EAAU,KAAK,KAAMkB,CAAQ,OAEtBlH,EAAO,OAAS,QACzBgG,EAAU,KAAK,KAAMhG,EAAO,IAAI,CAEpC,CAEQ,UAAUgD,EAAcmE,EAAc,CAC5C,OAAOnE,EAAK,UAAUmE,CAAM,CAC9B,CAEQ,gBAAgBC,EAAgBC,EAAoB,CAC1DD,EAAO,UAAYC,CACrB,CAGQ,iCACNxD,EACAF,EACA2D,EACAX,EACAlC,EACAC,EACAhB,EAAmB,CAEnB,IAAI6D,EAAcC,EACd7D,IAAU,SAEZ4D,EAAeD,IAAc5D,EAAc,EAC3C8D,EAAmBD,EAAe,GAAK,EACjCZ,IAAoB,GAAKY,IAAiB,KAE9C1D,EAAS,QAAUY,EAAO+C,EAG1B3D,EAAS,UAAYa,EAAS,EAAI,CAAC8C,GAIzC,CAEQ,iBAAiBC,EAAmB/D,EAAmB,CAC7D,OAAO+D,EAAY/D,CACrB,CAMQ,sBACNgE,EACAC,EACAC,EACAC,EAAoB,CAEpB,MAAO,CACL,MAAAH,EACA,YAAAC,EACA,aAAAC,EACA,UAAAC,EAEJ,CAEQ,qBACNH,EACAC,EACAC,EACAC,EACAC,EACAC,EAAmB,CAEnB,MAAO,CACL,MAAAL,EACA,YAAAC,EACA,UAAAG,EACA,YAAAC,EACA,aAAAH,EACA,UAAAC,EAEJ,CAEQ,gBACNH,EACAC,EACAC,EACAC,EACAC,EACAC,EACArE,EAAmB,CAEnB,MAAO,CACL,MAAAgE,EACA,YAAAC,EACA,UAAWA,EAAcjE,EAAc,EACvC,UAAAoE,EACA,QAASA,EACT,YAAAC,EACA,UAAWA,EAAcrE,EAAc,EACvC,aAAAkE,EACA,UAAAC,EAEJ,CAUQ,kBACNG,EACAC,EACAC,EAAkB,CAElB,OAAAF,EAAY,KAAKE,CAAU,EACpBD,CACT,CAEQ,0BACND,EACAC,EACAC,EAAkB,CAElB,OAAAF,EAAYC,CAAK,EAAIC,EACrBD,IACOA,CACT,CAKQ,sBAAsBE,EAAe3E,EAAY,CAAS,CAE1D,wBAAwB2E,EAAe3E,EAAY,CACrDA,IAAY,OACd2E,EAAM,QAAU3E,EAEpB,CASQ,cACN4E,EACApF,EACAoB,EAAc,CAGd,OADcgE,EAAQ,KAAKpF,CAAI,IACjB,GACLA,EAAK,UAAUoB,EAAQgE,EAAQ,SAAS,EAE1C,IACT,CAEQ,cAAcA,EAAiBpF,EAAY,CACjD,IAAMqF,EAAcD,EAAQ,KAAKpF,CAAI,EACrC,OAAOqF,IAAgB,KAAOA,EAAY,CAAC,EAAI,IACjD,GAx1BcxI,GAAA,QACZ,6LAGYA,GAAA,GAAK,iBCzFf,SAAUyI,GAAWC,EAAkB,CAC3C,OAAIC,GAAcD,CAAO,EAChBA,EAAQ,MAERA,EAAQ,IAEnB,CANgBE,EAAAH,GAAA,cAYV,SAAUI,GACdC,EAAc,CAEd,OAAOC,GAASD,EAAI,KAAK,GAAKA,EAAI,QAAU,EAC9C,CAJgBE,EAAAH,GAAA,iBAMhB,IAAMI,GAAS,SACTC,GAAa,aACbC,GAAQ,QACRC,GAAQ,QACRC,GAAY,YACZC,GAAW,WACXC,GAAa,aACbC,GAAc,cACdC,GAAmB,mBAEnB,SAAUC,GAAYC,EAAoB,CAC9C,OAAOC,GAAoBD,CAAM,CACnC,CAFgBX,EAAAU,GAAA,eAIhB,SAASE,GAAoBD,EAAoB,CAC/C,IAAME,EAAUF,EAAO,QAEjBG,EAA4B,CAAA,EAOlC,GANAA,EAAU,KAAOH,EAAO,KAEnBI,GAAYF,CAAO,IACtBC,EAAU,QAAUD,GAGlBG,EAAIL,EAAQV,EAAM,EACpB,KACE;8FAKJ,OAAIe,EAAIL,EAAQT,EAAU,IAExBY,EAAU,WAAkBH,EAAOT,EAAU,GAG/Ce,GAAkB,CAACH,CAAS,CAAC,EAEzBE,EAAIL,EAAQR,EAAK,IACnBW,EAAU,MAAQH,EAAOR,EAAK,GAG5Ba,EAAIL,EAAQP,EAAK,IACnBU,EAAU,MAAQH,EAAOP,EAAK,GAG5BY,EAAIL,EAAQL,EAAQ,IACtBQ,EAAU,SAAWH,EAAOL,EAAQ,GAGlCU,EAAIL,EAAQN,EAAS,IACvBS,EAAU,UAAYH,EAAON,EAAS,GAGpCW,EAAIL,EAAQJ,EAAU,IACxBO,EAAU,WAAaH,EAAOJ,EAAU,GAGtCS,EAAIL,EAAQH,EAAW,IACzBM,EAAU,YAAcH,EAAOH,EAAW,GAGxCQ,EAAIL,EAAQF,EAAgB,IAC9BK,EAAU,iBAAmBH,EAAOF,EAAgB,GAG/CK,CACT,CArDSd,EAAAY,GAAA,uBAuDF,IAAMM,GAAMR,GAAY,CAAE,KAAM,MAAO,QAASS,GAAM,EAAE,CAAE,EACjEF,GAAkB,CAACC,EAAG,CAAC,EAEjB,SAAUE,GACdC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EACAC,EAAiB,CAEjB,MAAO,CACL,MAAAN,EACA,YAAAC,EACA,UAAAC,EACA,UAAAC,EACA,QAAAC,EACA,YAAAC,EACA,UAAAC,EACA,aAAoBP,EAAS,aAC7B,UAAWA,EAEf,CArBgBrB,EAAAoB,GAAA,uBAuBV,SAAUS,GAAaC,EAAeT,EAAkB,CAC5D,OAAOU,GAAuBD,EAAOT,CAAO,CAC9C,CAFgBrB,EAAA6B,GAAA,gBCnGT,IAAMG,GAA0D,CACrE,0BAA0B,CAAE,SAAAC,EAAU,OAAAC,EAAQ,SAAAC,EAAU,SAAAC,CAAQ,EAAE,CAQhE,MAFY,aALKC,GAAcJ,CAAQ,EAEnC,OAAOK,GAAWL,CAAQ,CAAC,OAC3B,qBAAqBA,EAAS,IAAI,MAEF,mBAAmBC,EAAO,KAAK,OAGrE,EAEA,8BAA8B,CAAE,eAAAK,EAAgB,SAAAH,CAAQ,EAAE,CACxD,MAAO,6CAA+CG,EAAe,KACvE,EAEA,wBAAwB,CACtB,oBAAAC,EACA,OAAAN,EACA,SAAAC,EACA,sBAAAM,EACA,SAAAL,CAAQ,EACT,CACC,IAAMM,EAAY,cAGZC,EAAY;cADCC,GAAMV,CAAM,EAAG,MACgB,IAElD,GAAIO,EACF,OAAOC,EAAYD,EAAwBE,EACtC,CACL,IAAME,EAAoBC,GACxBN,EACA,CAACO,EAAQC,IAAiBD,EAAO,OAAOC,CAAY,EACpD,CAAA,CAAmB,EAEfC,EAA0BC,EAC9BL,EACCM,GACC,IAAID,EAAIC,EAAWC,GAAkBd,GAAWc,CAAa,CAAC,EAAE,KAC9D,IAAI,CACL,GAAG,EAMFC,EAAwB;EAJCH,EAC7BD,EACA,CAACK,EAASC,IAAQ,KAAKA,EAAM,CAAC,KAAKD,CAAO,EAAE,EAEkD,KAC9F;CAAI,CACL,GAED,OAAOZ,EAAYW,EAAwBV,EAE/C,EAEA,sBAAsB,CACpB,uBAAAa,EACA,OAAAtB,EACA,sBAAAO,EACA,SAAAL,CAAQ,EACT,CACC,IAAMM,EAAY,cAGZC,EAAY;cADCC,GAAMV,CAAM,EAAG,MACgB,IAElD,GAAIO,EACF,OAAOC,EAAYD,EAAwBE,EACtC,CAQL,IAAMU,EACJ;KAR8BH,EAC9BM,EACCL,GACC,IAAID,EAAIC,EAAWC,GAAkBd,GAAWc,CAAa,CAAC,EAAE,KAC9D,GAAG,CACJ,GAAG,EAIsB,KAAK,IAAI,CAAC,IAExC,OAAOV,EAAYW,EAAwBV,EAE/C,GAGF,OAAO,OAAOX,EAA0B,EAEjC,IAAMyB,GACX,CACE,uBACEC,EACAC,EAA0B,CAS1B,MANE,gEACAA,EAAc,gBACd;2BAEAD,EAAa,KACb,IAEJ,GAGSE,GACX,CACE,yBACEF,EACAG,EAA2C,CAE3C,SAASC,EACPC,EAA+B,CAE/B,OAAIA,aAAgBC,EACXD,EAAK,aAAa,KAChBA,aAAgBE,EAClBF,EAAK,gBAEL,EAEX,CAVSG,EAAAJ,EAAA,8BAYT,IAAMK,EAAeT,EAAa,KAC5BU,EAAgBxB,GAAMiB,CAAc,EACpCQ,EAAQD,EAAc,IACtBE,EAAUC,GAAqBH,CAAa,EAC5CI,EAAgBV,EAA2BM,CAAa,EAExDK,EAAmBJ,EAAQ,EAC7BK,EAAM,KAAKJ,CAAO,GAAGG,EAAmBJ,EAAQ,EAAE,MACpDG,EAAgB,oBAAoBA,CAAa,KAAO,EAC1D;4CAEcX,EAAe,MACjB,oCAAoCM,CAAY;;oBAK5D,OAAAO,EAAMA,EAAI,QAAQ,UAAW,GAAG,EAChCA,EAAMA,EAAI,QAAQ,SAAU;CAAI,EAEzBA,CACT,EAEA,4BAA4BC,EAAU,CAQpC,MANE;0EAC2EA,EAAK,IAAI;;;uDAMxF,EAEA,qCAAqCC,EAKpC,CACC,IAAMC,EAAU3B,EAAI0B,EAAQ,WAAaE,GACvCxC,GAAWwC,CAAO,CAAC,EACnB,KAAK,IAAI,EACLC,EACJH,EAAQ,YAAY,MAAQ,EAAI,GAAKA,EAAQ,YAAY,IAU3D,MARE,4BAA4BA,EAAQ,iBAAiB,KACnD,IAAI,CACL;QACQG,CAAU,aAAaH,EAAQ,aAAa,IAAI;GACrDC,CAAO;;qBAKf,EAEA,+BAA+BD,EAK9B,CACC,IAAMC,EAAU3B,EAAI0B,EAAQ,WAAaI,GACvC1C,GAAW0C,CAAO,CAAC,EACnB,KAAK,IAAI,EACLD,EACJH,EAAQ,YAAY,MAAQ,EAAI,GAAKA,EAAQ,YAAY,IACvDK,EACF,qCAAqCL,EAAQ,iBAAiB,KAC5D,IAAI,CACL,WAAWG,CAAU,aACVH,EAAQ,aAAa,IAAI;GACjCC,CAAO;EAEb,OAAAI,EACEA,EACA;sBAEKA,CACT,EAEA,0BAA0BL,EAGzB,CACC,IAAIN,EAAUC,GAAqBK,EAAQ,UAAU,EACrD,OAAIA,EAAQ,WAAW,MAAQ,IAC7BN,GAAWM,EAAQ,WAAW,KAI9B,mBAAmBN,CAAO,kBAAkBM,EAAQ,aAAa,IAAI;qCAIzE,EAIA,oBAAoBA,EAGnB,CAEC,MAAO,YACT,EAEA,2BAA2BA,EAI1B,CAMC,MAJE,iCAAiCA,EAAQ,eAAiB,CAAC,WACjDA,EAAQ,YAAY,GAAG,aAAaA,EAAQ,aAAa,IAAI;uDAI3E,EAEA,8BAA8BA,EAG7B,CASC,MAPE;KACMA,EAAQ,YAAY,GAAG,aAC3BA,EAAQ,aAAa,IACvB;OACEA,EAAQ,YAAY,WAAW,OAAS,CAC1C,gBAGJ,EAEA,wBAAwBA,EAGvB,CACC,IAAMxC,EAAWwC,EAAQ,aAAa,KAChCM,EAAYhC,EAChB0B,EAAQ,kBACPO,GAAaA,EAAS,IAAI,EAEvBC,EAAoB,GAAGhD,CAAQ,QAAQ8C,EAC1C,OAAO,CAAC9C,CAAQ,CAAC,EACjB,KAAK,OAAO,CAAC,GAQhB,MANE;SACUA,CAAQ;;GACwDgD,CAAiB;;6DAK/F,EAIA,0BAA0BR,EAGzB,CAEC,MAAO,YACT,EAEA,4BAA4BA,EAG3B,CACC,IAAIxC,EACJ,OAAIwC,EAAQ,wBAAwBS,GAClCjD,EAAWwC,EAAQ,aAAa,KAEhCxC,EAAWwC,EAAQ,aAGN,iCAAiCxC,CAAQ,2CAA2CwC,EAAQ,WAAW,IAGxH,GCxTE,SAAUU,GACdC,EACAC,EAAoD,CAEpD,IAAMC,EAAc,IAAIC,GAAuBH,EAAWC,CAAc,EACxE,OAAAC,EAAY,YAAW,EAChBA,EAAY,MACrB,CAPgBE,EAAAL,GAAA,kBASV,IAAOI,GAAP,cAAsCE,EAAW,CApBvD,MAoBuD,CAAAD,EAAA,+BAIrD,YACUE,EACAL,EAAoD,CAE5D,MAAK,EAHG,KAAA,cAAAK,EACA,KAAA,eAAAL,EALH,KAAA,OAAgD,CAAA,CAQvD,CAEO,aAAW,CAChBM,EAAQC,GAAO,KAAK,aAAa,EAAIC,GAAQ,CAC3C,KAAK,aAAeA,EACpBA,EAAK,OAAO,IAAI,CAClB,CAAC,CACH,CAEO,iBAAiBC,EAAiB,CACvC,IAAMC,EAAM,KAAK,cAAcD,EAAK,eAAe,EAEnD,GAAKC,EAYHD,EAAK,eAAiBC,MAZd,CACR,IAAMC,EAAM,KAAK,eAAe,uBAC9B,KAAK,aACLF,CAAI,EAEN,KAAK,OAAO,KAAK,CACf,QAASE,EACT,KAAMC,GAA0B,uBAChC,SAAU,KAAK,aAAa,KAC5B,kBAAmBH,EAAK,gBACzB,EAIL,GCtBI,IAAgBI,GAAhB,cAAyDC,EAAU,CAjCzE,MAiCyE,CAAAC,EAAA,yCAUvE,YACYC,EACAC,EAAkB,CAE5B,MAAK,EAHK,KAAA,QAAAD,EACA,KAAA,KAAAC,EAXF,KAAA,iBAAgC,CAAA,EAIhC,KAAA,mBAAqB,GACrB,KAAA,yBAA2B,EAC3B,KAAA,MAAQ,GACR,KAAA,cAAgB,EAO1B,CAEA,cAAY,CAGV,GAFA,KAAK,MAAQ,GAET,KAAK,KAAK,UAAU,CAAC,IAAM,KAAK,QAAQ,KAC1C,MAAM,MAAM,qDAAqD,EAInE,YAAK,UAAYC,GAAM,KAAK,KAAK,SAAS,EAAE,QAAO,EACnD,KAAK,gBAAkBA,GAAM,KAAK,KAAK,eAAe,EAAE,QAAO,EAG/D,KAAK,UAAU,IAAG,EAClB,KAAK,gBAAgB,IAAG,EAExB,KAAK,mBAAkB,EACvB,KAAK,KAAK,KAAK,OAAO,EAEf,KAAK,gBACd,CAEA,KACEC,EACAC,EAA0B,CAAA,EAAE,CAGvB,KAAK,OACR,MAAM,KAAKD,EAAMC,CAAQ,CAE7B,CAEA,YACEC,EACAC,EACAF,EAAuB,CAGvB,GACEC,EAAQ,eAAe,OAAS,KAAK,oBACrCA,EAAQ,MAAQ,KAAK,yBACrB,CACA,IAAME,EAAWD,EAAS,OAAOF,CAAQ,EACzC,KAAK,mBAAkB,EACvB,KAAK,KAAKC,EAAQ,eAAqBE,CAAQ,EAEnD,CAEA,oBAAkB,CAEZC,EAAQ,KAAK,SAAS,GAGxB,KAAK,mBAAqB,GAC1B,KAAK,yBAA2B,EAChC,KAAK,cAAgB,KAErB,KAAK,mBAAqB,KAAK,UAAU,IAAG,EAC5C,KAAK,yBAA2B,KAAK,gBAAgB,IAAG,EAE5D,GAGWC,GAAP,cAAoCZ,EAAgC,CAhH1E,MAgH0E,CAAAE,EAAA,6BAIxE,YACEC,EACUC,EAAuB,CAEjC,MAAMD,EAASC,CAAI,EAFT,KAAA,KAAAA,EALJ,KAAA,iBAAmB,GACnB,KAAA,uBAAyB,EAO/B,KAAK,iBAAmB,KAAK,KAAK,QAAQ,KAC1C,KAAK,uBAAyB,KAAK,KAAK,iBAC1C,CAEA,aACES,EACAJ,EACAF,EAAuB,CAEvB,GACE,KAAK,eACLM,EAAS,aAAa,OAAS,KAAK,kBACpCA,EAAS,MAAQ,KAAK,wBACtB,CAAC,KAAK,MACN,CACA,IAAMH,EAAWD,EAAS,OAAOF,CAAQ,EACnCO,EAAW,IAAIC,EAAY,CAAE,WAAYL,CAAQ,CAAE,EACzD,KAAK,iBAAmBM,GAAMF,CAAQ,EACtC,KAAK,MAAQ,GAEjB,GAeWG,GAAP,cAAyDhB,EAAU,CA5JzE,MA4JyE,CAAAC,EAAA,kDAOvE,YACYgB,EACAC,EAAkB,CAE5B,MAAK,EAHK,KAAA,QAAAD,EACA,KAAA,WAAAC,EARF,KAAA,OAAgC,CACxC,MAAO,OACP,WAAY,OACZ,YAAa,OAQf,CAEA,cAAY,CACV,YAAK,KAAK,KAAK,OAAO,EACf,KAAK,MACd,GAGWC,GAAP,cAA2CH,EAAyC,CAhL1F,MAgL0F,CAAAf,EAAA,oCACxF,SACEmB,EACAZ,EACAF,EAAuB,CAEvB,GAAIc,EAAS,MAAQ,KAAK,WAAY,CACpC,IAAMC,EAAiBC,GAAOd,EAAS,OAAOF,CAAQ,CAAC,EACvD,KAAK,OAAO,YAAce,IAAmB,OACzCA,aAA0BE,IAC5B,KAAK,OAAO,MAAQF,EAAe,aACnC,KAAK,OAAO,WAAaA,EAAe,UAG1C,MAAM,SAASD,EAAUZ,EAAUF,CAAQ,CAE/C,GAGWkB,GAAP,cAA8CR,EAAyC,CAnM7F,MAmM6F,CAAAf,EAAA,uCAC3F,YACEwB,EACAjB,EACAF,EAAuB,CAEvB,GAAImB,EAAY,MAAQ,KAAK,WAAY,CACvC,IAAMC,EAAoBJ,GAAOd,EAAS,OAAOF,CAAQ,CAAC,EAC1D,KAAK,OAAO,YAAcoB,IAAsB,OAC5CA,aAA6BH,IAC/B,KAAK,OAAO,MAAQG,EAAkB,aACtC,KAAK,OAAO,WAAaA,EAAkB,UAG7C,MAAM,YAAYD,EAAajB,EAAUF,CAAQ,CAErD,GAGWqB,GAAP,cAAiDX,EAAyC,CAtNhG,MAsNgG,CAAAf,EAAA,0CAC9F,eACE2B,EACApB,EACAF,EAAuB,CAEvB,GAAIsB,EAAe,MAAQ,KAAK,WAAY,CAC1C,IAAMC,EAAuBP,GAAOd,EAAS,OAAOF,CAAQ,CAAC,EAC7D,KAAK,OAAO,YAAcuB,IAAyB,OAC/CA,aAAgCN,IAClC,KAAK,OAAO,MAAQM,EAAqB,aACzC,KAAK,OAAO,WAAaA,EAAqB,UAGhD,MAAM,eAAeD,EAAgBpB,EAAUF,CAAQ,CAE3D,GAIWwB,GAAP,cAAoDd,EAAyC,CA1OnG,MA0OmG,CAAAf,EAAA,6CACjG,kBACE8B,EACAvB,EACAF,EAAuB,CAEvB,GAAIyB,EAAkB,MAAQ,KAAK,WAAY,CAC7C,IAAMC,EAAoCV,GACxCd,EAAS,OAAOF,CAAQ,CAAC,EAE3B,KAAK,OAAO,YAAc0B,IAAsC,OAC5DA,aAA6CT,IAC/C,KAAK,OAAO,MAAQS,EAAkC,aACtD,KAAK,OAAO,WAAaA,EAAkC,UAG7D,MAAM,kBAAkBD,EAAmBvB,EAAUF,CAAQ,CAEjE,GAQI,SAAU2B,GACdC,EACAC,EACAC,EAAwB,CAAA,EAAE,CAG1BA,EAAWhC,GAAMgC,CAAQ,EACzB,IAAIC,EAAmC,CAAA,EACnC,EAAI,EAGR,SAASC,EAAkBC,EAAsB,CAC/C,OAAOA,EAAQ,OAAOC,GAAKN,EAAW,EAAI,CAAC,CAAC,CAC9C,CAFSjC,EAAAqC,EAAA,qBAKT,SAASG,EAAuBC,EAAyB,CACvD,IAAMC,EAAeV,GACnBK,EAAkBI,CAAU,EAC5BP,EACAC,CAAQ,EAEV,OAAOC,EAAO,OAAOM,CAAY,CACnC,CASA,IAhBS1C,EAAAwC,EAAA,0BAgBFL,EAAS,OAASD,GAAa,EAAID,EAAU,QAAQ,CAC1D,IAAM7B,EAAO6B,EAAU,CAAC,EAGxB,GAAI7B,aAAgBS,EAClB,OAAO2B,EAAuBpC,EAAK,UAAU,EACxC,GAAIA,aAAgBuC,EACzB,OAAOH,EAAuBpC,EAAK,UAAU,EACxC,GAAIA,aAAgBwC,EACzBR,EAASI,EAAuBpC,EAAK,UAAU,UACtCA,aAAgByC,EAAqB,CAC9C,IAAMC,EAAS1C,EAAK,WAAW,OAAO,CACpC,IAAI2C,EAAW,CACb,WAAY3C,EAAK,WAClB,EACF,EACD,OAAOoC,EAAuBM,CAAM,UAC3B1C,aAAgB4C,EAAkC,CAC3D,IAAMF,EAAS,CACb,IAAIjC,EAAY,CAAE,WAAYT,EAAK,UAAU,CAAE,EAC/C,IAAI2C,EAAW,CACb,WAAY,CAAC,IAAIzB,EAAS,CAAE,aAAclB,EAAK,SAAS,CAAE,CAAC,EAAE,OACtDA,EAAK,UAAU,EAEvB,GAEH,OAAOoC,EAAuBM,CAAM,UAC3B1C,aAAgB6C,EAAyB,CAClD,IAAMH,EAAS1C,EAAK,WAAW,OAAO,CACpC,IAAI2C,EAAW,CACb,WAAY,CAAC,IAAIzB,EAAS,CAAE,aAAclB,EAAK,SAAS,CAAE,CAAC,EAAE,OACtDA,EAAK,UAAU,EAEvB,EACF,EACDgC,EAASI,EAAuBM,CAAM,UAC7B1C,aAAgB2C,EAAY,CACrC,IAAMD,EAAS1C,EAAK,WAAW,OAAO,CACpC,IAAI2C,EAAW,CACb,WAAY3C,EAAK,WAClB,EACF,EACDgC,EAASI,EAAuBM,CAAM,MACjC,IAAI1C,aAAgB8C,EACzB,OAAAC,EAAQ/C,EAAK,WAAagD,GAAW,CAI/B3C,EAAQ2C,EAAQ,UAAU,IAAM,KAClChB,EAASI,EAAuBY,EAAQ,UAAU,EAEtD,CAAC,EACMhB,EACF,GAAIhC,aAAgBkB,EACzBa,EAAS,KAAK/B,EAAK,YAAY,MAE/B,OAAM,MAAM,sBAAsB,EAGpC,IAEF,OAAAgC,EAAO,KAAK,CACV,YAAaD,EACb,UAAWI,GAAKN,EAAW,CAAC,EAC7B,EAEMG,CACT,CAnGgBpC,EAAAgC,GAAA,qBA4GV,SAAUqB,GACdC,EACAC,EACAC,EACAC,EAAoB,CAEpB,IAAMC,EAAyB,qBAEzBC,EAAwB,CAACD,CAAiB,EAC1CE,EAAwB,mBAC1BC,EAAoB,GAElBC,EAAoBP,EAAY,OAChCQ,EAA2BD,EAAoBL,EAAe,EAE9DrB,EAAwC,CAAA,EAExC4B,EAAkC,CAAA,EAQxC,IAPAA,EAAc,KAAK,CACjB,IAAK,GACL,IAAKV,EACL,UAAW,CAAA,EACX,gBAAiB,CAAA,EAClB,EAEM,CAAC7C,EAAQuD,CAAa,GAAG,CAC9B,IAAM7B,EAAW6B,EAAc,IAAG,EAGlC,GAAI7B,IAAayB,EAAkB,CAE/BC,GACAI,GAAKD,CAAa,EAAG,KAAOD,GAG5BC,EAAc,IAAG,EAEnB,SAGF,IAAME,EAAU/B,EAAS,IACnBgC,EAAUhC,EAAS,IACnBiC,EAAgBjC,EAAS,UACzBkC,EAAsBlC,EAAS,gBAGrC,GAAI1B,EAAQyD,CAAO,EACjB,SAGF,IAAM9D,EAAO8D,EAAQ,CAAC,EAEtB,GAAI9D,IAASsD,EAAmB,CAC9B,IAAMY,EAAW,CACf,IAAKH,EACL,IAAK5B,GAAK2B,CAAO,EACjB,UAAWK,GAAUH,CAAa,EAClC,gBAAiBG,GAAUF,CAAmB,GAEhDL,EAAc,KAAKM,CAAQ,UAClBlE,aAAgBkB,EAEzB,GAAI6C,EAAUL,EAAoB,EAAG,CACnC,IAAMU,EAAUL,EAAU,EACpBM,EAAclB,EAAYiB,CAAO,EACvC,GAAIhB,EAAYiB,EAAarE,EAAK,YAAY,EAAG,CAC/C,IAAMkE,EAAW,CACf,IAAKE,EACL,IAAKjC,GAAK2B,CAAO,EACjB,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKM,CAAQ,WAGpBH,IAAYL,EAAoB,EAEzC1B,EAAO,KAAK,CACV,cAAehC,EAAK,aACpB,oBAAqBA,EAAK,IAC1B,UAAWgE,EACX,gBAAiBC,EAClB,EACDR,EAAoB,OAEpB,OAAM,MAAM,sBAAsB,UAE3BzD,aAAgBuC,EAAa,CACtC,IAAM+B,EAAevE,GAAMiE,CAAa,EACxCM,EAAa,KAAKtE,EAAK,eAAe,EAEtC,IAAMuE,EAAqBxE,GAAMkE,CAAmB,EACpDM,EAAmB,KAAKvE,EAAK,GAAG,EAEhC,IAAMkE,EAAW,CACf,IAAKH,EACL,IAAK/D,EAAK,WAAW,OAAOuD,EAAuBpB,GAAK2B,CAAO,CAAC,EAChE,UAAWQ,EACX,gBAAiBC,GAEnBX,EAAc,KAAKM,CAAQ,UAClBlE,aAAgBwC,EAAQ,CAEjC,IAAMgC,EAAkB,CACtB,IAAKT,EACL,IAAK5B,GAAK2B,CAAO,EACjB,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKY,CAAe,EAElCZ,EAAc,KAAKJ,CAAgB,EAEnC,IAAMiB,EAAe,CACnB,IAAKV,EACL,IAAK/D,EAAK,WAAW,OAAOmC,GAAK2B,CAAO,CAAC,EACzC,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKa,CAAY,UACtBzE,aAAgByC,EAAqB,CAE9C,IAAMiC,EAAkB,IAAI/B,EAAW,CACrC,WAAY3C,EAAK,WACjB,IAAKA,EAAK,IACX,EACKkC,EAAUlC,EAAK,WAAW,OAAO,CAAC0E,CAAe,EAAGvC,GAAK2B,CAAO,CAAC,EACjEI,EAAW,CACf,IAAKH,EACL,IAAK7B,EACL,UAAW8B,EACX,gBAAiBC,GAEnBL,EAAc,KAAKM,CAAQ,UAClBlE,aAAgB4C,EAAkC,CAE3D,IAAM+B,EAAgB,IAAIzD,EAAS,CACjC,aAAclB,EAAK,UACpB,EACK0E,EAAkB,IAAI/B,EAAW,CACrC,WAAY,CAAMgC,CAAa,EAAE,OAAO3E,EAAK,UAAU,EACvD,IAAKA,EAAK,IACX,EACKkC,EAAUlC,EAAK,WAAW,OAAO,CAAC0E,CAAe,EAAGvC,GAAK2B,CAAO,CAAC,EACjEI,EAAW,CACf,IAAKH,EACL,IAAK7B,EACL,UAAW8B,EACX,gBAAiBC,GAEnBL,EAAc,KAAKM,CAAQ,UAClBlE,aAAgB6C,EAAyB,CAElD,IAAM2B,EAAkB,CACtB,IAAKT,EACL,IAAK5B,GAAK2B,CAAO,EACjB,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKY,CAAe,EAElCZ,EAAc,KAAKJ,CAAgB,EAEnC,IAAMmB,EAAgB,IAAIzD,EAAS,CACjC,aAAclB,EAAK,UACpB,EACK4E,EAAgB,IAAIjC,EAAW,CACnC,WAAY,CAAMgC,CAAa,EAAE,OAAO3E,EAAK,UAAU,EACvD,IAAKA,EAAK,IACX,EACKkC,EAAUlC,EAAK,WAAW,OAAO,CAAC4E,CAAa,EAAGzC,GAAK2B,CAAO,CAAC,EAC/DW,GAAe,CACnB,IAAKV,EACL,IAAK7B,EACL,UAAW8B,EACX,gBAAiBC,GAEnBL,EAAc,KAAKa,EAAY,UACtBzE,aAAgB2C,EAAY,CAErC,IAAM6B,EAAkB,CACtB,IAAKT,EACL,IAAK5B,GAAK2B,CAAO,EACjB,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKY,CAAe,EAElCZ,EAAc,KAAKJ,CAAgB,EAGnC,IAAMoB,EAAgB,IAAIjC,EAAW,CACnC,WAAY3C,EAAK,WACjB,IAAKA,EAAK,IACX,EACKkC,EAAUlC,EAAK,WAAW,OAAO,CAAC4E,CAAa,EAAGzC,GAAK2B,CAAO,CAAC,EAC/DW,EAAe,CACnB,IAAKV,EACL,IAAK7B,EACL,UAAW8B,EACX,gBAAiBC,GAEnBL,EAAc,KAAKa,CAAY,UACtBzE,aAAgB8C,EAEzB,QAAS+B,EAAI7E,EAAK,WAAW,OAAS,EAAG6E,GAAK,EAAGA,IAAK,CACpD,IAAM7B,EAAehD,EAAK,WAAW6E,CAAC,EAChCC,EAAc,CAClB,IAAKf,EACL,IAAKf,EAAQ,WAAW,OAAOb,GAAK2B,CAAO,CAAC,EAC5C,UAAWE,EACX,gBAAiBC,GAEnBL,EAAc,KAAKkB,CAAW,EAC9BlB,EAAc,KAAKJ,CAAgB,UAE5BxD,aAAgBS,EACzBmD,EAAc,KAAK,CACjB,IAAKG,EACL,IAAK/D,EAAK,WAAW,OAAOmC,GAAK2B,CAAO,CAAC,EACzC,UAAWE,EACX,gBAAiBC,EAClB,UACQjE,aAAgB+E,GAEzBnB,EAAc,KACZoB,GAAmBhF,EAAM+D,EAASC,EAAeC,CAAmB,CAAC,MAGvE,OAAM,MAAM,sBAAsB,EAGtC,OAAOjC,CACT,CAzOgBpC,EAAAqD,GAAA,2BA2OhB,SAAS+B,GACPpE,EACAmD,EACAC,EACAC,EAA6B,CAE7B,IAAMK,EAAevE,GAAMiE,CAAa,EACxCM,EAAa,KAAK1D,EAAQ,IAAI,EAE9B,IAAMqE,EAAyBlF,GAAMkE,CAAmB,EAExD,OAAAgB,EAAuB,KAAK,CAAC,EAEtB,CACL,IAAKlB,EACL,IAAKnD,EAAQ,WACb,UAAW0D,EACX,gBAAiBW,EAErB,CAnBSrF,EAAAoF,GAAA,sBC9jBT,IAAYE,IAAZ,SAAYA,EAAS,CACnBA,EAAAA,EAAA,OAAA,CAAA,EAAA,SACAA,EAAAA,EAAA,WAAA,CAAA,EAAA,aACAA,EAAAA,EAAA,qBAAA,CAAA,EAAA,uBACAA,EAAAA,EAAA,oCAAA,CAAA,EAAA,sCACAA,EAAAA,EAAA,0BAAA,CAAA,EAAA,4BACAA,EAAAA,EAAA,YAAA,CAAA,EAAA,aACF,GAPYA,KAAAA,GAAS,CAAA,EAAA,EASf,SAAUC,GACdC,EAA2C,CAG3C,GAAIA,aAAgBC,GAAUD,IAAS,SACrC,OAAOF,GAAU,OACZ,GAAIE,aAAgBE,GAAcF,IAAS,aAChD,OAAOF,GAAU,WACZ,GACLE,aAAgBG,GAChBH,IAAS,sBAET,OAAOF,GAAU,qBACZ,GACLE,aAAgBI,GAChBJ,IAAS,mCAET,OAAOF,GAAU,oCACZ,GACLE,aAAgBK,GAChBL,IAAS,0BAET,OAAOF,GAAU,0BACZ,GAAIE,aAAgBM,GAAeN,IAAS,cACjD,OAAOF,GAAU,YAEjB,MAAM,MAAM,sBAAsB,CAEtC,CA5BgBS,EAAAR,GAAA,eA8BV,SAAUS,GAAkBC,EAKjC,CACC,GAAM,CAAE,WAAAC,EAAY,KAAAC,EAAM,SAAAC,EAAU,aAAAC,CAAY,EAAKJ,EAC/CK,EAAOf,GAAYa,CAAQ,EACjC,OAAIE,IAAShB,GAAU,YACdiB,GAAuBL,EAAYC,EAAME,CAAY,EAErDG,GACLN,EACAC,EACAG,EACAD,CAAY,CAGlB,CAlBgBN,EAAAC,GAAA,qBAoBV,SAAUS,GACdP,EACAQ,EACAL,EACAM,EACAC,EACAC,EAAuB,CAEvB,IAAMC,EAAiBP,GACrBL,EACAQ,EACAL,CAAY,EAGRU,EAAeC,GAA0BF,CAAc,EACzDG,GACAC,GAEJ,OAAOL,EACLC,EACAH,EACAI,EACAH,CAAoB,CAExB,CAxBgBb,EAAAU,GAAA,2BAsCV,SAAUU,GACdjB,EACAQ,EACAU,EACAR,EACAR,EACAiB,EAIkB,CAElB,IAAMP,EAAiBN,GACrBN,EACAQ,EACAN,EACAgB,CAAC,EAGGL,EAAeC,GAA0BF,CAAc,EACzDG,GACAC,GAEJ,OAAOG,EACLP,EAAe,CAAC,EAChBC,EACAH,CAAoB,CAExB,CA5BgBb,EAAAoB,GAAA,qCAgCV,SAAUG,GACdC,EACAZ,EACAI,EACAH,EAA6B,CAE7B,IAAMY,EAAYD,EAAK,OACjBE,EAA0BC,GAAMH,EAAOI,GACpCD,GAAMC,EAAUC,GACdA,EAAS,SAAW,CAC5B,CACF,EAGD,GAAIjB,EAIF,OAAO,SAELkB,EAAqB,CAKrB,IAAMC,EAAwCC,EAC5CF,EACCF,GAAYA,EAAQ,IAAI,EAG3B,QAASK,EAAI,EAAGA,EAAIR,EAAWQ,IAAK,CAClC,IAAML,EAAUJ,EAAKS,CAAC,EAChBC,EAAiBN,EAAQ,OAEzBO,EAAgBJ,EAAWE,CAAC,EAClC,GAAI,EAAAE,IAAkB,QAAaA,EAAc,KAAK,IAAI,IAAM,IAIhEC,EAAU,QAASC,EAAI,EAAGA,EAAIH,EAAgBG,IAAK,CACjD,IAAMR,EAAWD,EAAQS,CAAC,EACpBC,EAAiBT,EAAS,OAChC,QAASU,EAAI,EAAGA,EAAID,EAAgBC,IAAK,CACvC,IAAMC,EAAY,KAAK,GAAGD,EAAI,CAAC,EAC/B,GAAIvB,EAAawB,EAAWX,EAASU,CAAC,CAAC,IAAM,GAG3C,SAASH,EAKb,OAAOH,GAOb,EACK,GAAIP,GAA2B,CAACb,EAAsB,CAG3D,IAAM4B,EAAkBT,EAAIR,EAAOI,GAC1Bc,GAAQd,CAAO,CACvB,EAEKe,EAAcC,GAClBH,EACA,CAACI,EAAQjB,EAASkB,KAChBC,EAAQnB,EAAUoB,GAAe,CAC1BC,EAAIJ,EAAQG,EAAY,YAAa,IACxCH,EAAOG,EAAY,YAAa,EAAIF,GAEtCC,EAAQC,EAAY,gBAAmBE,GAAqB,CACrDD,EAAIJ,EAAQK,CAAiB,IAChCL,EAAOK,CAAiB,EAAIJ,EAEhC,CAAC,CACH,CAAC,EACMD,GAET,CAAA,CAA4B,EAM9B,OAAO,UAAA,CACL,IAAML,EAAY,KAAK,GAAG,CAAC,EAC3B,OAAOG,EAAYH,EAAU,YAAY,CAC3C,MAOA,QAAO,UAAA,CACL,QAASP,EAAI,EAAGA,EAAIR,EAAWQ,IAAK,CAClC,IAAML,EAAUJ,EAAKS,CAAC,EAChBC,EAAiBN,EAAQ,OAC/BQ,EAAU,QAASC,EAAI,EAAGA,EAAIH,EAAgBG,IAAK,CACjD,IAAMR,EAAWD,EAAQS,CAAC,EACpBC,EAAiBT,EAAS,OAChC,QAASU,EAAI,EAAGA,EAAID,EAAgBC,IAAK,CACvC,IAAMC,EAAY,KAAK,GAAGD,EAAI,CAAC,EAC/B,GAAIvB,EAAawB,EAAWX,EAASU,CAAC,CAAC,IAAM,GAG3C,SAASH,EAKb,OAAOH,GAOb,CAEJ,CA5HgBjC,EAAAuB,GAAA,kCA8HV,SAAU4B,GACdC,EACApC,EACAH,EAA6B,CAE7B,IAAMa,EAA0BC,GAAMyB,EAAMvB,GACnCA,EAAS,SAAW,CAC5B,EAEKwB,EAAaD,EAAI,OAIvB,GAAI1B,GAA2B,CAACb,EAAsB,CACpD,IAAMyC,EAAoBZ,GAAQU,CAAG,EAErC,GACEE,EAAkB,SAAW,GAC7BC,EAAcD,EAAkB,CAAC,EAAG,eAAe,EACnD,CAEA,IAAME,EADoBF,EAAkB,CAAC,EACW,aAExD,OAAO,UAAA,CACL,OAAO,KAAK,GAAG,CAAC,EAAE,eAAiBE,CACrC,MACK,CACL,IAAMb,EAAcC,GAClBU,EACA,CAACT,EAAQG,EAAaF,KACpBD,EAAOG,EAAY,YAAa,EAAI,GACpCD,EAAQC,EAAY,gBAAmBE,GAAqB,CAC1DL,EAAOK,CAAiB,EAAI,EAC9B,CAAC,EACML,GAET,CAAA,CAAe,EAGjB,OAAO,UAAA,CACL,IAAML,EAAY,KAAK,GAAG,CAAC,EAC3B,OAAOG,EAAYH,EAAU,YAAY,IAAM,EACjD,OAGF,QAAO,UAAA,CACLJ,EAAU,QAASC,EAAI,EAAGA,EAAIgB,EAAYhB,IAAK,CAC7C,IAAMR,EAAWuB,EAAIf,CAAC,EAChBC,EAAiBT,EAAS,OAChC,QAASU,EAAI,EAAGA,EAAID,EAAgBC,IAAK,CACvC,IAAMC,EAAY,KAAK,GAAGD,EAAI,CAAC,EAC/B,GAAIvB,EAAawB,EAAWX,EAASU,CAAC,CAAC,IAAM,GAG3C,SAASH,EAIb,MAAO,GAIT,MAAO,EACT,CAEJ,CAjEgBpC,EAAAmD,GAAA,2CAmEhB,IAAMM,GAAN,cAAyCC,EAAU,CA/VnD,MA+VmD,CAAA1D,EAAA,mCAGjD,YACU2D,EACAC,EACAC,EAAyB,CAEjC,MAAK,EAJG,KAAA,QAAAF,EACA,KAAA,iBAAAC,EACA,KAAA,eAAAC,CAGV,CAEA,cAAY,CACV,YAAK,KAAK,KAAK,OAAO,EACf,KAAK,OACd,CAEQ,cACNC,EACAC,EACAC,EACAC,EAAuB,CAEvB,OACEH,EAAK,MAAQ,KAAK,kBAClB,KAAK,iBAAmBC,GAExB,KAAK,QAAUC,EAAS,OAAOC,CAAQ,EAChC,IAGF,EACT,CAEA,WACEC,EACAF,EACAC,EAAuB,CAElB,KAAK,cAAcC,EAAY3E,GAAU,OAAQyE,EAAUC,CAAQ,GACtE,MAAM,WAAWC,EAAYF,EAAUC,CAAQ,CAEnD,CAEA,eACEE,EACAH,EACAC,EAAuB,CAGpB,KAAK,cACJE,EACA5E,GAAU,qBACVyE,EACAC,CAAQ,GAGV,MAAM,WAAWE,EAAgBH,EAAUC,CAAQ,CAEvD,CAEA,kBACEG,EACAJ,EACAC,EAAuB,CAGpB,KAAK,cACJG,EACA7E,GAAU,oCACVyE,EACAC,CAAQ,GAGV,MAAM,WAAWG,EAAmBJ,EAAUC,CAAQ,CAE1D,CAEA,SACEI,EACAL,EACAC,EAAuB,CAGpB,KAAK,cAAcI,EAAU9E,GAAU,WAAYyE,EAAUC,CAAQ,GAEtE,MAAM,WAAWI,EAAUL,EAAUC,CAAQ,CAEjD,CAEA,YACEK,EACAN,EACAC,EAAuB,CAGpB,KAAK,cACJK,EACA/E,GAAU,0BACVyE,EACAC,CAAQ,GAGV,MAAM,WAAWK,EAAaN,EAAUC,CAAQ,CAEpD,GAMIM,GAAN,cAA4CC,EAAW,CA7cvD,MA6cuD,CAAAxE,EAAA,sCAGrD,YACU4D,EACAC,EACAY,EAAe,CAEvB,MAAK,EAJG,KAAA,iBAAAb,EACA,KAAA,eAAAC,EACA,KAAA,UAAAY,EALH,KAAA,OAAwB,CAAA,CAQ/B,CAEQ,cACNX,EACAY,EAA2B,CAGzBZ,EAAK,MAAQ,KAAK,kBAClB,KAAK,iBAAmBY,IACvB,KAAK,YAAc,QAAaZ,IAAS,KAAK,aAE/C,KAAK,OAASA,EAAK,WAEvB,CAEO,YAAYA,EAAY,CAC7B,KAAK,cAAcA,EAAMvE,GAAU,MAAM,CAC3C,CAEO,gBAAgBuE,EAAgB,CACrC,KAAK,cAAcA,EAAMvE,GAAU,UAAU,CAC/C,CAEO,yBAAyBuE,EAAyB,CACvD,KAAK,cAAcA,EAAMvE,GAAU,oBAAoB,CACzD,CAEO,sCACLuE,EAAsC,CAEtC,KAAK,cAAcA,EAAMvE,GAAU,mCAAmC,CACxE,CAEO,6BAA6BuE,EAA6B,CAC/D,KAAK,cAAcA,EAAMvE,GAAU,yBAAyB,CAC9D,CAEO,iBAAiBuE,EAAiB,CACvC,KAAK,cAAcA,EAAMvE,GAAU,WAAW,CAChD,GAGF,SAASoF,GAAwBC,EAAY,CAC3C,IAAM/B,EAAS,IAAI,MAAM+B,CAAI,EAC7B,QAASrC,EAAI,EAAGA,EAAIqC,EAAMrC,IACxBM,EAAON,CAAC,EAAI,CAAA,EAEd,OAAOM,CACT,CANS7C,EAAA2E,GAAA,2BAaT,SAASE,GAAeC,EAAiB,CACvC,IAAIC,EAAO,CAAC,EAAE,EACd,QAASxC,EAAI,EAAGA,EAAIuC,EAAK,OAAQvC,IAAK,CACpC,IAAMyC,EAAUF,EAAKvC,CAAC,EAChB0C,EAAa,CAAA,EACnB,QAAS5C,EAAI,EAAGA,EAAI0C,EAAK,OAAQ1C,IAAK,CACpC,IAAM6C,EAAiBH,EAAK1C,CAAC,EAC7B4C,EAAW,KAAKC,EAAiB,IAAMF,EAAQ,YAAY,EAC3D,QAAS/C,EAAI,EAAGA,EAAI+C,EAAQ,gBAAiB,OAAQ/C,IAAK,CACxD,IAAMkD,EAAsB,IAAMH,EAAQ,gBAAiB/C,CAAC,EAC5DgD,EAAW,KAAKC,EAAiBC,CAAmB,GAGxDJ,EAAOE,EAET,OAAOF,CACT,CAhBS/E,EAAA6E,GAAA,kBAqBT,SAASO,GACPC,EACAC,EACAxC,EAAW,CAEX,QACMyC,EAAa,EACjBA,EAAaF,EAAkB,OAC/BE,IACA,CAEA,GAAIA,IAAezC,EACjB,SAEF,IAAM0C,EAAyBH,EAAkBE,CAAU,EAC3D,QAASE,EAAY,EAAGA,EAAYH,EAAe,OAAQG,IAAa,CACtE,IAAMC,EAAYJ,EAAeG,CAAS,EAC1C,GAAID,EAAuBE,CAAS,IAAM,GACxC,MAAO,IAKb,MAAO,EACT,CAxBS1F,EAAAoF,GAAA,sBA0BH,SAAUO,GACdC,EACAvE,EAAS,CAET,IAAMwE,EAAc7D,EAAI4D,EAAWhE,GACjCkE,GAAkB,CAAClE,CAAO,EAAG,CAAC,CAAC,EAE3BmE,EAAcpB,GAAwBkB,EAAY,MAAM,EACxDG,EAAahE,EAAI6D,EAAcI,GAAgB,CACnD,IAAMC,EAAmC,CAAA,EACzC,OAAAnD,EAAQkD,EAAeE,GAAQ,CAC7B,IAAMpB,EAAOF,GAAesB,EAAK,WAAW,EAC5CpD,EAAQgC,EAAOqB,GAAW,CACxBF,EAAKE,CAAO,EAAI,EAClB,CAAC,CACH,CAAC,EACMF,CACT,CAAC,EACGG,EAAUR,EAGd,QAASS,EAAa,EAAGA,GAAcjF,EAAGiF,IAAc,CACtD,IAAMC,EAAcF,EACpBA,EAAU1B,GAAwB4B,EAAY,MAAM,EAGpD,QAASC,EAAS,EAAGA,EAASD,EAAY,OAAQC,IAAU,CAC1D,IAAMC,EAA0BF,EAAYC,CAAM,EAElD,QACME,EAAc,EAClBA,EAAcD,EAAwB,OACtCC,IACA,CACA,IAAMC,EAAiBF,EAAwBC,CAAW,EAAE,YACtDE,EAAYH,EAAwBC,CAAW,EAAE,UACjDG,EAAahC,GAAe8B,CAAc,EAGhD,GAFiBvB,GAAmBY,EAAYa,EAAYL,CAAM,GAElDjD,EAAQqD,CAAS,GAAKD,EAAe,SAAWtF,EAAG,CACjE,IAAMyF,EAAgBf,EAAYS,CAAM,EAExC,GAAIO,GAAaD,EAAeH,CAAc,IAAM,GAAO,CACzDG,EAAc,KAAKH,CAAc,EAEjC,QAAStE,EAAI,EAAGA,EAAIwE,EAAW,OAAQxE,IAAK,CAC1C,IAAM+D,EAAUS,EAAWxE,CAAC,EAC5B2D,EAAWQ,CAAM,EAAEJ,CAAO,EAAI,SAK/B,CACH,IAAMY,EAA6BlB,GACjCc,EACAN,EAAa,EACbK,CAAc,EAEhBN,EAAQG,CAAM,EAAIH,EAAQG,CAAM,EAAE,OAAOQ,CAA0B,EAGnEjE,EAAQiE,EAA6Bb,GAAQ,CAC3C,IAAMU,EAAahC,GAAesB,EAAK,WAAW,EAClDpD,EAAQ8D,EAAaI,GAAO,CAC1BjB,EAAWQ,CAAM,EAAES,CAAG,EAAI,EAC5B,CAAC,CACH,CAAC,KAMT,OAAOlB,CACT,CAzEgB/F,EAAA2F,GAAA,qCA2EV,SAAUnF,GACdL,EACAQ,EACAU,EACA6F,EAAoB,CAEpB,IAAMC,EAAU,IAAI5C,GAClBpE,EACAZ,GAAU,YACV2H,CAAM,EAER,OAAAvG,EAAY,OAAOwG,CAAO,EACnBxB,GAAkCwB,EAAQ,OAAQ9F,CAAC,CAC5D,CAbgBrB,EAAAQ,GAAA,0BAeV,SAAUC,GACdN,EACAQ,EACAN,EACAgB,EAAS,CAET,IAAM+F,EAAmB,IAAI7C,GAC3BpE,EACAE,CAAQ,EAEVM,EAAY,OAAOyG,CAAgB,EACnC,IAAMC,EAAYD,EAAiB,OAO7BE,EALiB,IAAI7D,GACzB9C,EACAR,EACAE,CAAQ,EAEsB,aAAY,EAEtCkH,EAAa,IAAIC,EAAgB,CAAE,WAAYH,CAAS,CAAE,EAC1DI,EAAY,IAAID,EAAgB,CAAE,WAAYF,CAAQ,CAAE,EAE9D,OAAO3B,GAAkC,CAAC4B,EAAYE,CAAS,EAAGpG,CAAC,CACrE,CAxBgBrB,EAAAS,GAAA,oCA0BV,SAAUsG,GACdW,EACAC,EAAuB,CAEvBC,EAAkB,QAASrF,EAAI,EAAGA,EAAImF,EAAY,OAAQnF,IAAK,CAC7D,IAAMsF,EAAYH,EAAYnF,CAAC,EAC/B,GAAIsF,EAAU,SAAWF,EAAW,OAGpC,SAAStF,EAAI,EAAGA,EAAIwF,EAAU,OAAQxF,IAAK,CACzC,IAAMyF,EAAYH,EAAWtF,CAAC,EACxB0F,EAAWF,EAAUxF,CAAC,EAK5B,IAFEyF,IAAcC,GACdA,EAAS,mBAAoBD,EAAU,YAAa,IAAM,UACrC,GACrB,SAASF,EAGb,MAAO,IAGT,MAAO,EACT,CAxBgB5H,EAAA+G,GAAA,gBA0BV,SAAUiB,GACdC,EACAC,EAAkB,CAElB,OACED,EAAO,OAASC,EAAM,QACtBvG,GAAMsG,EAAQ,CAACjD,EAASlC,IAAO,CAC7B,IAAMqF,EAAeD,EAAMpF,CAAG,EAC9B,OACEkC,IAAYmD,GACZA,EAAa,mBAAoBnD,EAAQ,YAAa,CAE1D,CAAC,CAEL,CAdgBhF,EAAAgI,GAAA,wBAgBV,SAAU/G,GACdF,EAAmC,CAEnC,OAAOY,GAAMZ,EAAiBqH,GAC5BzG,GAAMyG,EAAiBC,GACrB1G,GAAM0G,EAAaC,GAAU/E,EAAQ+E,EAAM,eAAgB,CAAC,CAAC,CAC9D,CAEL,CARgBtI,EAAAiB,GAAA,6BC5pBV,SAAUsH,GAAkBC,EAKjC,CACC,IAAMC,EAAmCD,EAAQ,kBAAkB,SAAS,CAC1E,MAAOA,EAAQ,MACf,WAAYA,EAAQ,WACpB,YAAaA,EAAQ,YACtB,EACD,OAAOE,EAAID,EAAmCE,GAAiB,OAAA,OAAA,CAC7D,KAAMC,GAA0B,2BAA2B,EACxDD,CAAY,CACf,CACJ,CAfgBE,EAAAN,GAAA,qBAiBV,SAAUO,GACdC,EACAC,EACAC,EACAC,EAAmB,CAEnB,IAAMC,EAA4CC,GAChDL,EACCM,GACCC,GAA6BD,EAAcJ,CAAc,CAAC,EAGxDM,EAA+BC,GACnCT,EACAC,EACAC,CAAc,EAGVQ,EAAoBL,GAAQL,EAAYW,GAC5CC,GAAoBD,EAAST,CAAc,CAAC,EAGxCW,EAAsBR,GAAQL,EAAYW,GAC9CG,GACEH,EACAX,EACAG,EACAD,CAAc,CACf,EAGH,OAAOE,EAAgB,OACrBI,EACAE,EACAG,CAAmB,CAEvB,CApCgBf,EAAAC,GAAA,mBAsChB,SAASQ,GACPQ,EACAb,EAAqD,CAErD,IAAMc,EAAmB,IAAIC,GAC7BF,EAAa,OAAOC,CAAgB,EACpC,IAAME,EAAqBF,EAAiB,eAEtCG,EAAmBC,GACvBF,EACAG,EAA+B,EAG3BC,EAAkBC,GAAOJ,EAAmBK,GACzCA,EAAU,OAAS,CAC3B,EAwBD,OAtBe7B,EAAI8B,GAAOH,CAAU,EAAII,GAAuB,CAC7D,IAAMC,EAAiBC,GAAMF,CAAc,EACrCG,EAAM3B,EAAe,yBACzBa,EACAW,CAAc,EAEVI,EAAUC,GAAqBJ,CAAS,EACxCK,EAA6C,CACjD,QAASH,EACT,KAAMhC,GAA0B,sBAChC,SAAUkB,EAAa,KACvB,QAASe,EACT,WAAYH,EAAU,KAGlBM,EAAQC,GAA2BP,CAAS,EAClD,OAAIM,IACFD,EAAS,UAAYC,GAGhBD,CACT,CAAC,CAEH,CAxCSlC,EAAAS,GAAA,gCA0CH,SAAUc,GACdc,EAA+B,CAE/B,MAAO,GAAGJ,GAAqBI,CAAI,CAAC,MAClCA,EAAK,GACP,MAAMD,GAA2BC,CAAI,CAAC,EACxC,CANgBrC,EAAAuB,GAAA,mCAQhB,SAASa,GAA2BC,EAA+B,CACjE,OAAIA,aAAgBC,EACXD,EAAK,aAAa,KAChBA,aAAgBE,EAClBF,EAAK,gBAEL,EAEX,CARSrC,EAAAoC,GAAA,8BAUH,IAAOjB,GAAP,cAA6CqB,EAAW,CAjL9D,MAiL8D,CAAAxC,EAAA,sCAA9D,aAAA,qBACS,KAAA,eAA8C,CAAA,CAmCvD,CAjCS,iBAAiByC,EAAoB,CAC1C,KAAK,eAAe,KAAKA,CAAO,CAClC,CAEO,YAAYC,EAAc,CAC/B,KAAK,eAAe,KAAKA,CAAM,CACjC,CAEO,6BAA6BC,EAAgC,CAClE,KAAK,eAAe,KAAKA,CAAO,CAClC,CAEO,yBAAyBC,EAA+B,CAC7D,KAAK,eAAe,KAAKA,CAAU,CACrC,CAEO,sCACLC,EAA+C,CAE/C,KAAK,eAAe,KAAKA,CAAa,CACxC,CAEO,gBAAgBC,EAAgB,CACrC,KAAK,eAAe,KAAKA,CAAI,CAC/B,CAEO,iBAAiBC,EAAe,CACrC,KAAK,eAAe,KAAKA,CAAE,CAC7B,CAEO,cAAcC,EAAkB,CACrC,KAAK,eAAe,KAAKA,CAAQ,CACnC,GAGI,SAAUhC,GACdiC,EACAC,EACAC,EACA/C,EAAqD,CAErD,IAAMgD,EAAS,CAAA,EAWf,GAVoBC,GAClBH,EACA,CAACI,EAAQzC,IACHA,EAAQ,OAASoC,EAAK,KACjBK,EAAS,EAEXA,EAET,CAAC,EAEe,EAAG,CACnB,IAAMC,EAASnD,EAAe,4BAA4B,CACxD,aAAc6C,EACd,YAAaE,EACd,EACDC,EAAO,KAAK,CACV,QAASG,EACT,KAAMxD,GAA0B,oBAChC,SAAUkD,EAAK,KAChB,EAGH,OAAOG,CACT,CA9BgBpD,EAAAgB,GAAA,mCAmCV,SAAUwC,GACdC,EACAC,EACAP,EAAiB,CAEjB,IAAMC,EAAS,CAAA,EACXG,EAEJ,OAAKI,GAASD,EAAmBD,CAAQ,IACvCF,EACE,kCAAkCE,CAAQ,6CAA6CN,CAAS,uDAElGC,EAAO,KAAK,CACV,QAASG,EACT,KAAMxD,GAA0B,sBAChC,SAAU0D,EACX,GAGIL,CACT,CApBgBpD,EAAAwD,GAAA,4BAsBV,SAAUI,GACdC,EACAC,EACA1D,EACA2D,EAAe,CAAA,EAAE,CAEjB,IAAMX,EAAmC,CAAA,EACnCY,EAAmBC,GAAqBH,EAAS,UAAU,EACjE,GAAII,EAAQF,CAAgB,EAC1B,MAAO,CAAA,EACF,CACL,IAAMP,EAAWI,EAAQ,KACEF,GAASK,EAAkBH,CAAO,GAE3DT,EAAO,KAAK,CACV,QAAShD,EAAe,wBAAwB,CAC9C,aAAcyD,EACd,kBAAmBE,EACpB,EACD,KAAMhE,GAA0B,eAChC,SAAU0D,EACX,EAKH,IAAMU,EAAiBC,GAAWJ,EAAkBD,EAAK,OAAO,CAACF,CAAO,CAAC,CAAC,EACpEQ,EAAsB9D,GAAQ4D,EAAiBG,GAAe,CAClE,IAAMC,EAAUC,GAAMT,CAAI,EAC1B,OAAAQ,EAAQ,KAAKD,CAAW,EACjBV,GACLC,EACAS,EACAlE,EACAmE,CAAO,CAEX,CAAC,EAED,OAAOnB,EAAO,OAAOiB,CAAmB,EAE5C,CAxCgBrE,EAAA4D,GAAA,2BA0CV,SAAUK,GAAqBQ,EAAyB,CAC5D,IAAInB,EAAiB,CAAA,EACrB,GAAIY,EAAQO,CAAU,EACpB,OAAOnB,EAET,IAAMzB,EAAYC,GAAM2C,CAAU,EAGlC,GAAI5C,aAAqBU,EACvBe,EAAO,KAAKzB,EAAU,cAAc,UAEpCA,aAAqB6C,GACrB7C,aAAqB8C,GACrB9C,aAAqB+C,GACrB/C,aAAqBgD,GACrBhD,aAAqBiD,GACrBjD,aAAqBkD,EAErBzB,EAASA,EAAO,OACdW,GAAoCpC,EAAU,UAAU,CAAC,UAElDA,aAAqBmD,EAE9B1B,EAAS2B,GACPpF,EAAIgC,EAAU,WAAaqD,GACzBjB,GAAuCiB,EAAY,UAAU,CAAC,CAC/D,UAEM,EAAArD,aAAqBS,GAG9B,MAAM,MAAM,sBAAsB,EAGpC,IAAM6C,EAAkBC,GAAevD,CAAS,EAC1CwD,EAAUZ,EAAW,OAAS,EACpC,GAAIU,GAAmBE,EAAS,CAC9B,IAAMC,EAAOC,GAAKd,CAAU,EAC5B,OAAOnB,EAAO,OAAOW,GAAqBqB,CAAI,CAAC,MAE/C,QAAOhC,CAEX,CA1CgBtD,EAAAiE,GAAA,wBA4ChB,IAAMuB,GAAN,cAA0BhD,EAAW,CAtWrC,MAsWqC,CAAAxC,EAAA,oBAArC,aAAA,qBACS,KAAA,aAA8B,CAAA,CAKvC,CAHS,iBAAiByF,EAAiB,CACvC,KAAK,aAAa,KAAKA,CAAI,CAC7B,GAGI,SAAUC,GACdzE,EACAb,EAAqD,CAErD,IAAMuF,EAAc,IAAIH,GACxBvE,EAAa,OAAO0E,CAAW,EAC/B,IAAMC,EAAMD,EAAY,aAkCxB,OAhCepF,GACbqF,EACCC,GAAU,CACT,IAAMC,EAAaC,GAAUF,EAAO,UAAU,EAC9C,OAAOtF,GAAQuF,EAAY,CAACE,EAAiBC,IAAc,CACzD,IAAMC,EAAqBC,GACzB,CAACH,CAAe,EAChB,CAAA,EACAI,GACA,CAAC,EAEH,OAAIlC,EAAQgC,CAAkB,EACrB,CACL,CACE,QAAS9F,EAAe,2BAA2B,CACjD,aAAca,EACd,YAAa4E,EACb,eAAgBI,EACjB,EACD,KAAMlG,GAA0B,oBAChC,SAAUkB,EAAa,KACvB,WAAY4E,EAAO,IACnB,YAAaI,EAAa,IAIvB,CAAA,CAEX,CAAC,CACH,CAAC,CAIL,CAzCgBjG,EAAA0F,GAAA,8BA2CV,SAAUW,GACdpF,EACAqF,EACAlG,EAAqD,CAErD,IAAMuF,EAAc,IAAIH,GACxBvE,EAAa,OAAO0E,CAAW,EAC/B,IAAIC,EAAMD,EAAY,aAItB,OAAAC,EAAMW,GAAOX,EAAMC,GAAWA,EAAO,oBAAsB,EAAI,EAEhDtF,GAAQqF,EAAMC,GAAuB,CAClD,IAAMW,EAAiBX,EAAO,IACxBY,EAAqBZ,EAAO,cAAgBS,EAC5CI,EAAeC,GACnBH,EACAvF,EACAwF,EACAZ,CAAM,EAEFe,EAAsBC,GAC1BH,EACAb,EACA5E,EACAb,CAAc,EAEV0G,EAA4BC,GAChCL,EACAb,EACA5E,EACAb,CAAc,EAGhB,OAAOwG,EAAoB,OAAOE,CAAyB,CAC7D,CAAC,CAGH,CAvCgB9G,EAAAqG,GAAA,4CAyCV,IAAOW,GAAP,cAAmCxE,EAAW,CAlcpD,MAkcoD,CAAAxC,EAAA,4BAApD,aAAA,qBACS,KAAA,eAEA,CAAA,CAmBT,CAjBS,6BAA6B2C,EAAgC,CAClE,KAAK,eAAe,KAAKA,CAAO,CAClC,CAEO,yBAAyBC,EAA+B,CAC7D,KAAK,eAAe,KAAKA,CAAU,CACrC,CAEO,sCACLC,EAA+C,CAE/C,KAAK,eAAe,KAAKA,CAAa,CACxC,CAEO,gBAAgBC,EAAgB,CACrC,KAAK,eAAe,KAAKA,CAAI,CAC/B,GAGI,SAAUhC,GACdG,EACAb,EAAqD,CAErD,IAAMuF,EAAc,IAAIH,GACxBvE,EAAa,OAAO0E,CAAW,EAC/B,IAAMC,EAAMD,EAAY,aAoBxB,OAlBepF,GAAQqF,EAAMC,GACvBA,EAAO,WAAW,OAAS,IACtB,CACL,CACE,QAASzF,EAAe,8BAA8B,CACpD,aAAca,EACd,YAAa4E,EACd,EACD,KAAM9F,GAA0B,cAChC,SAAUkB,EAAa,KACvB,WAAY4E,EAAO,MAIhB,CAAA,CAEV,CAGH,CA3BgB7F,EAAAc,GAAA,uBA6BV,SAAUmG,GACdC,EACAC,EACA/G,EAAqD,CAErD,IAAMgD,EAAmC,CAAA,EACzC,OAAAgE,EAAQF,EAAgBG,GAAe,CACrC,IAAMnG,EAAmB,IAAI8F,GAC7BK,EAAY,OAAOnG,CAAgB,EACnC,IAAME,EAAqBF,EAAiB,eAC5CkG,EAAQhG,EAAqBkG,GAAY,CACvC,IAAMC,EAAWC,GAAYF,CAAQ,EAC/Bb,EAAqBa,EAAS,cAAgBH,EAC9CX,EAAiBc,EAAS,IAO1BG,EANQC,GACZlB,EACAa,EACAE,EACAd,CAAkB,EAEgB,CAAC,EACrC,GAAIvC,EAAQe,GAAQwC,CAAqB,CAAC,EAAG,CAC3C,IAAMlE,EAASnD,EAAe,0BAA0B,CACtD,aAAciH,EACd,WAAYC,EACb,EACDlE,EAAO,KAAK,CACV,QAASG,EACT,KAAMxD,GAA0B,uBAChC,SAAUsH,EAAY,KACvB,EAEL,CAAC,CACH,CAAC,EAEMjE,CACT,CApCgBpD,EAAAiH,GAAA,qCA2ChB,SAASJ,GACPH,EACAiB,EACA1E,EACA7C,EAAqD,CAErD,IAAMwH,EAAmC,CAAA,EACnCC,EAAuBxE,GAC3BqD,EACA,CAACpD,EAAQwE,EAAS7B,KAEZ0B,EAAY,WAAW1B,CAAU,EAAE,oBAAsB,IAI7DmB,EAAQU,EAAUC,GAAY,CAC5B,IAAMC,EAAwB,CAAC/B,CAAU,EACzCmB,EAAQV,EAAc,CAACuB,EAAcC,IAAmB,CAEpDjC,IAAeiC,GACfC,GAAaF,EAAcF,CAAQ,GAEnCJ,EAAY,WAAWO,CAAe,EAAE,oBAAsB,IAE9DF,EAAsB,KAAKE,CAAe,CAE9C,CAAC,EAGCF,EAAsB,OAAS,GAC/B,CAACG,GAAaP,EAAqBG,CAAQ,IAE3CH,EAAoB,KAAKG,CAAQ,EACjCzE,EAAO,KAAK,CACV,KAAM0E,EACN,KAAMD,EACP,EAEL,CAAC,EACMzE,GAET,CAAA,CAA6C,EAyB/C,OAtBmBzD,EAAIgI,EAAuBO,GAAqB,CACjE,IAAMC,EAAcxI,EAClBuI,EAAkB,KACjBnC,GAAeA,EAAa,CAAC,EAUhC,MAAO,CACL,QARkB7F,EAAe,+BAA+B,CAChE,aAAc6C,EACd,YAAa0E,EACb,iBAAkBU,EAClB,WAAYD,EAAkB,KAC/B,EAIC,KAAMrI,GAA0B,eAChC,SAAUkD,EAAK,KACf,WAAY0E,EAAY,IACxB,aAAcS,EAAkB,KAEpC,CAAC,CAGH,CAnESpI,EAAA6G,GAAA,gCAqEH,SAAUE,GACdL,EACAiB,EACA1E,EACA7C,EAAqD,CAGrD,IAAMkI,EAAkBjF,GACtBqD,EACA,CAACpD,EAAQwE,EAASS,IAAO,CACvB,IAAMC,EAAkB3I,EAAIiI,EAAUC,IAC7B,CAAE,IAAKQ,EAAK,KAAMR,CAAQ,EAClC,EACD,OAAOzE,EAAO,OAAOkF,CAAe,CACtC,EACA,CAAA,CAA0C,EAuD5C,OApDeC,GACblI,GAAQ+H,EAAkBI,GAAkB,CAG1C,GAFwBf,EAAY,WAAWe,EAAe,GAAG,EAE7C,oBAAsB,GACxC,MAAO,CAAA,EAET,IAAMC,EAAYD,EAAe,IAC3BE,EAAaF,EAAe,KAE5BG,EAAmCC,GACvCR,EACCS,GAIGpB,EAAY,WAAWoB,EAAiB,GAAG,EAAE,oBAC3C,IACFA,EAAiB,IAAMJ,GAGvBK,GAAqBD,EAAiB,KAAMH,CAAU,CAEzD,EAyBH,OAtB6B/I,EAC3BgJ,EACCI,GAAkE,CACjE,IAAMZ,EAAc,CAACY,EAAkB,IAAM,EAAGN,EAAY,CAAC,EACvDO,EAAavB,EAAY,MAAQ,EAAI,GAAKA,EAAY,IAQ5D,MAAO,CACL,QAPcvH,EAAe,qCAAqC,CAClE,aAAc6C,EACd,YAAa0E,EACb,iBAAkBU,EAClB,WAAYY,EAAkB,KAC/B,EAGC,KAAMlJ,GAA0B,sBAChC,SAAUkD,EAAK,KACf,WAAYiG,EACZ,aAAcb,EAElB,CAAC,CAIL,CAAC,CAAC,CAIN,CAvEgBrI,EAAA+G,GAAA,sCAyEhB,SAASpG,GACPT,EACAC,EACAC,EAAqD,CAErD,IAAMgD,EAAmC,CAAA,EAEnC+F,EAAatJ,EAAIM,EAAaiJ,GAAcA,EAAU,IAAI,EAEhE,OAAAhC,EAAQlH,EAAY4D,GAAY,CAC9B,IAAMuF,EAAevF,EAAS,KAC9B,GAAIH,GAASwF,EAAYE,CAAY,EAAG,CACtC,IAAM9F,EAASnD,EAAe,4BAA4B0D,CAAQ,EAElEV,EAAO,KAAK,CACV,QAASG,EACT,KAAMxD,GAA0B,gCAChC,SAAUsJ,EACX,EAEL,CAAC,EAEMjG,CACT,CAvBSpD,EAAAW,GAAA,0CC7pBH,SAAU2I,GACdC,EAA2B,CAE3B,IAAMC,EAA8CC,GAASF,EAAS,CACpE,eAAgBG,GACjB,EAEKC,EAA8C,CAAA,EACpD,OAAAC,EAAQL,EAAQ,MAAQM,GAAQ,CAC9BF,EAAcE,EAAK,IAAI,EAAIA,CAC7B,CAAC,EACMP,GAAkBK,EAAeH,EAAc,cAAc,CACtE,CAZgBM,EAAAR,GAAA,kBAcV,SAAUS,GAAgBR,EAK/B,CACC,OAAAA,EAAUE,GAASF,EAAS,CAC1B,eAAgBS,GACjB,EAEMD,GACLR,EAAQ,MACRA,EAAQ,WACRA,EAAQ,eACRA,EAAQ,WAAW,CAEvB,CAhBgBO,EAAAC,GAAA,mBC1BhB,IAAME,GAA6B,2BAC7BC,GAA0B,uBAC1BC,GAAuB,qBACvBC,GAAiC,6BAEjCC,GAA8B,CAClCJ,GACAC,GACAC,GACAC,IAGF,OAAO,OAAOC,EAA2B,EAGnC,SAAUC,GAAuBC,EAAY,CAEjD,OAAOC,GAASH,GAA6BE,EAAM,IAAI,CACzD,CAHgBE,EAAAH,GAAA,0BAKhB,IAAeI,GAAf,cACU,KAAK,CA5Bf,MA4Be,CAAAD,EAAA,6BAMb,YACEE,EACOC,EAAa,CAEpB,MAAMD,CAAO,EAFN,KAAA,MAAAC,EAJT,KAAA,eAA2B,CAAA,EASzB,OAAO,eAAe,KAAM,WAAW,SAAS,EAG5C,MAAM,mBACR,MAAM,kBAAkB,KAAM,KAAK,WAAW,CAElD,GAGWC,GAAP,cAAwCH,EAAoB,CAlDlE,MAkDkE,CAAAD,EAAA,iCAChE,YACEE,EACAC,EACOE,EAAqB,CAE5B,MAAMH,EAASC,CAAK,EAFb,KAAA,cAAAE,EAGP,KAAK,KAAOb,EACd,GAGWc,GAAP,cAAoCL,EAAoB,CA7D9D,MA6D8D,CAAAD,EAAA,6BAC5D,YACEE,EACAC,EACOE,EAAqB,CAE5B,MAAMH,EAASC,CAAK,EAFb,KAAA,cAAAE,EAGP,KAAK,KAAOZ,EACd,GAGWc,GAAP,cAA0CN,EAAoB,CAxEpE,MAwEoE,CAAAD,EAAA,mCAClE,YAAYE,EAAiBC,EAAa,CACxC,MAAMD,EAASC,CAAK,EACpB,KAAK,KAAOR,EACd,GAGWa,GAAP,cAAkCP,EAAoB,CA/E5D,MA+E4D,CAAAD,EAAA,2BAC1D,YACEE,EACAC,EACOE,EAAqB,CAE5B,MAAMH,EAASC,CAAK,EAFb,KAAA,cAAAE,EAGP,KAAK,KAAOX,EACd,GCzDK,IAAMe,GAAsB,CAAA,EAQtBC,GAA6B,0BAE7BC,GAAP,cAAuC,KAAK,CAxClD,MAwCkD,CAAAC,EAAA,gCAChD,YAAYC,EAAe,CACzB,MAAMA,CAAO,EACb,KAAK,KAAOH,EACd,GAMWI,GAAP,KAAkB,CAlDxB,MAkDwB,CAAAF,EAAA,oBAKtB,gBAAgBG,EAAqB,CACnC,KAAK,iBAAmB,CAAA,EACxB,KAAK,cAAgB,CAAA,EAErB,KAAK,gBAAkBC,EAAID,EAAQ,iBAAiB,EAC/CA,EAAO,gBACRE,GAAsB,gBAKtB,KAAK,kBACP,KAAK,4BAA8BC,GAEvC,CAEO,iBAAiBC,EAAkB,CACxC,IAAMC,EAAcC,GAClBF,EACA,GACA,IACA,IACA,IACA,IACA,IACA,GAAG,EAEL,OAAAC,EAAY,qBAAuB,GAC5BA,CACT,CAEO,iCAAiCD,EAAkB,CACxD,MAAO,EACT,CAEO,gCAAgCA,EAAkB,CACvD,MAAO,EACT,CAEA,wBAEEG,EACAC,EACAC,EACAC,EAA0B,CAG1B,IAAMC,EAAgB,KAAK,oBAAmB,EACxCC,EAAkB,KAAK,iBAAgB,EACvCC,EAA2B,CAAA,EAC7BC,EAAoB,GAElBC,EAAyB,KAAK,GAAG,CAAC,EACpCC,EAAY,KAAK,GAAG,CAAC,EAEnBC,EAAuBpB,EAAA,IAAK,CAChC,IAAMqB,EAAgB,KAAK,GAAG,CAAC,EAGzBC,EAAM,KAAK,qBAAqB,0BAA0B,CAC9D,SAAUT,EACV,OAAQK,EACR,SAAUG,EACV,SAAU,KAAK,oBAAmB,EACnC,EACKE,EAAQ,IAAIC,GAChBF,EACAJ,EACA,KAAK,GAAG,CAAC,CAAC,EAGZK,EAAM,eAAiBE,GAAUT,CAAc,EAC/C,KAAK,WAAWO,CAAK,CACvB,EAlB6B,wBAoB7B,KAAO,CAACN,GAEN,GAAI,KAAK,aAAaE,EAAWN,CAAe,EAAG,CACjDO,EAAoB,EACpB,eACSR,EAAc,KAAK,IAAI,EAAG,CAEnCQ,EAAoB,EAEpBV,EAAY,MAAM,KAAMC,CAAe,EACvC,YACS,KAAK,aAAaQ,EAAWL,CAAa,EACnDG,EAAoB,IAEpBE,EAAY,KAAK,WAAU,EAC3B,KAAK,kBAAkBA,EAAWH,CAAc,GAOpD,KAAK,iBAAiBD,CAAe,CACvC,CAEA,kCAEEW,EACAC,EACAC,EAA6B,CAsB7B,MAlBI,EAAAA,IAAa,IAKb,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGF,CAAuB,GAMrD,KAAK,eAAc,GAQrB,KAAK,yBACHA,EACA,KAAK,4BAA4BA,EAAyBC,CAAU,CAAC,EAO3E,CAGA,4BAEEpB,EACAsB,EAAoB,CAEpB,IAAMC,EAAc,KAAK,sBAAsBvB,EAASsB,CAAY,EAEpE,OADgB,KAAK,0BAA0BC,CAAW,CAE5D,CAEA,kBAEEjB,EACAkB,EAAoB,CAEpB,GAAI,KAAK,mCAAmClB,EAAiBkB,CAAO,EAElE,OADoB,KAAK,iBAAiBlB,CAAe,EAI3D,GAAI,KAAK,kCAAkCA,CAAe,EAAG,CAC3D,IAAMmB,EAAU,KAAK,WAAU,EAC/B,YAAK,aAAY,EACVA,EAGT,MAAM,IAAIjC,GAAwB,eAAe,CACnD,CAEA,yBAEEkC,EACAF,EAAoB,CAEpB,OACE,KAAK,mCAAmCE,EAAeF,CAAO,GAC9D,KAAK,kCAAkCE,CAAa,CAExD,CAEA,mCAEEpB,EACAkB,EAAoB,CAOpB,GALI,CAAC,KAAK,iCAAiClB,CAAe,GAKtDqB,EAAQH,CAAO,EACjB,MAAO,GAGT,IAAMI,EAAgB,KAAK,GAAG,CAAC,EAM/B,OAJEC,GAAKL,EAAUM,GACN,KAAK,aAAaF,EAAeE,CAAsB,CAC/D,IAAM,MAGX,CAEA,kCAEExB,EAA0B,CAE1B,OAAK,KAAK,gCAAgCA,CAAe,EAIvB,KAAK,aACrC,KAAK,GAAG,CAAC,EACTA,CAAe,EALR,EAQX,CAEA,yBAEEyB,EAAuB,CAEvB,IAAMC,EAAY,KAAK,iBAAgB,EACjCC,EAAuB,KAAK,0BAA0BD,CAAS,EACrE,OAAOE,GAASD,EAAsBF,CAAY,CACpD,CAEA,qBAAmB,CACjB,IAAMI,EAA4B,KAAK,iBAAgB,EAEnDC,EAAY,KAAK,GAAG,CAAC,EACrBC,EAAI,EACR,OAAa,CACX,IAAMC,EAAaT,GAAKM,EAA4BI,GACjCC,GAAaJ,EAAWG,CAAa,CAEvD,EACD,GAAID,IAAe,OACjB,OAAOA,EAETF,EAAY,KAAK,GAAGC,CAAC,EACrBA,IAEJ,CAEA,kBAAgB,CAEd,GAAI,KAAK,WAAW,SAAW,EAC7B,OAAO/C,GAET,IAAMmD,EAAoB,KAAK,6BAA4B,EACrDC,EAAc,KAAK,mCAAkC,EACrDC,EAAoB,KAAK,iCAAgC,EAE/D,MAAO,CACL,SAAU,KAAK,wBAAwBF,CAAiB,EACxD,iBAAkBC,EAClB,OAAQ,KAAK,wBAAwBC,CAAiB,EAE1D,CAEA,yBAAuB,CACrB,IAAMC,EAAoB,KAAK,WACzBC,EAA0B,KAAK,sBAErC,OAAOC,EAAIF,EAAmB,CAACG,EAAUC,IACnCA,IAAQ,EACH1D,GAEF,CACL,SAAU,KAAK,wBAAwByD,CAAQ,EAC/C,iBAAkBF,EAAwBG,CAAG,EAC7C,OAAQ,KAAK,wBAAwBJ,EAAkBI,EAAM,CAAC,CAAC,EAElE,CACH,CAEA,kBAAgB,CACd,IAAMC,EAAcH,EAAI,KAAK,wBAAuB,EAAKI,GAChD,KAAK,0BAA0BA,CAAO,CAC9C,EACD,OAAYC,GAAQF,CAAW,CACjC,CAEA,0BAEEjB,EAAqB,CAErB,GAAIA,IAAc1C,GAChB,MAAO,CAAC8D,EAAG,EAGb,IAAMC,EACJrB,EAAU,SAAWA,EAAU,iBAAmBsB,GAAKtB,EAAU,OAEnE,OAAO,KAAK,cAAcqB,CAAU,CACtC,CAIA,kBAEEE,EACAC,EAAsB,CAEtB,OAAK,KAAK,aAAaD,EAAOH,EAAG,GAC/BI,EAAa,KAAKD,CAAK,EAElBC,CACT,CAEA,SAA8BxD,EAAkB,CAC9C,IAAMS,EAA2B,CAAA,EAC7BgB,EAAU,KAAK,GAAG,CAAC,EACvB,KAAO,KAAK,aAAaA,EAASzB,CAAO,IAAM,IAC7CyB,EAAU,KAAK,WAAU,EACzB,KAAK,kBAAkBA,EAAShB,CAAc,EAGhD,OAAOS,GAAUT,CAAc,CACjC,CAEA,4BAEEgD,EACAC,EACAC,EACAC,EACAC,EACAC,EACAzC,EAAkB,CAIpB,CAEA,sBAEErB,EACAsB,EAAoB,CAEpB,IAAMyC,EAA0B,KAAK,0BAAyB,EACxDC,EAAgCC,GAAM,KAAK,qBAAqB,EAQtE,MAPyB,CACvB,UAAWF,EACX,gBAAiBC,EACjB,QAAShE,EACT,kBAAmBsB,EAIvB,CACA,2BAAyB,CACvB,OAAOwB,EAAI,KAAK,WAAaoB,GAC3B,KAAK,wBAAwBA,CAAa,CAAC,CAE/C,GAGI,SAAUnE,GAEd0D,EACAC,EACAC,EACAC,EACAC,EACAC,EACAzC,EAAkB,CAElB,IAAM8C,EAAM,KAAK,4BAA4BP,EAAcC,CAAc,EACrEO,EAAoB,KAAK,iBAAiBD,CAAG,EACjD,GAAIC,IAAsB,OAAW,CACnC,IAAMC,EAAe,KAAK,oBAAmB,EACvCC,EAAc,KAAK,mBAAkB,EAAGD,CAAY,EAG1DD,EADE,IAAIN,EAAeQ,EAAaT,CAAc,EACrB,aAAY,EACvC,KAAK,iBAAiBM,CAAG,EAAIC,EAG/B,IAAIjD,EAA0BiD,EAAkB,MAC5ChD,EAAagD,EAAkB,WAC7BG,EAAcH,EAAkB,YAKpC,KAAK,WAAW,SAAW,GAC3BG,GACApD,IAA4B,SAE5BA,EAA0BiC,GAC1BhC,EAAa,GAKX,EAAAD,IAA4B,QAAaC,IAAe,SAK1D,KAAK,kCACHD,EACAC,EACAC,CAAQ,GAMV,KAAK,wBACHoC,EACAC,EACAC,EACAxC,CAAuB,CAG7B,CA3DgB1B,EAAAM,GAAA,+BCrYV,SAAUyE,GACdC,EACAC,EACAC,EAAkB,CAElB,OAAOA,EAAaD,EAAeD,CACrC,CANgBG,EAAAJ,GAAA,+BCEV,IAAOK,GAAP,KAA2B,CAlBjC,MAkBiC,CAAAC,EAAA,6BAG/B,YAAYC,EAAmC,OAC7C,KAAK,cACHC,EAAAD,GAAS,gBAAY,MAAAC,IAAA,OAAAA,EAAIC,GAAsB,YACnD,CAEA,SAASF,EAIR,CACC,IAAMG,EAAsB,KAAK,wBAAwBH,EAAQ,KAAK,EAEtE,GAAII,EAAQD,CAAmB,EAAG,CAChC,IAAME,EAAiB,KAAK,4BAA4BL,EAAQ,KAAK,EAC/DM,EAAsB,KAAK,yCAC/BN,EAAQ,MACR,KAAK,YAAY,EAEbO,EAAwB,KAAK,kCACjCP,EAAQ,MACR,KAAK,YAAY,EAQnB,MANkB,CAChB,GAAGG,EACH,GAAGE,EACH,GAAGC,EACH,GAAGC,GAIP,OAAOJ,CACT,CAEA,wBAAwBK,EAAa,CACnC,OAAOC,GAAQD,EAAQE,GACrBC,GACED,EACAA,EACAE,EAAoC,CACrC,CAEL,CAEA,4BAA4BJ,EAAa,CACvC,OAAOC,GAAQD,EAAQE,GACrBG,GACEH,EACAE,EAAoC,CACrC,CAEL,CAEA,yCACEJ,EACAM,EAAoB,CAEpB,OAAOL,GAAQD,EAAQE,GACrBK,GACEL,EACAI,EACAF,EAAoC,CACrC,CAEL,CAEA,kCACEJ,EACAM,EAAoB,CAEpB,OAAOE,GACLR,EACAM,EACAF,EAAoC,CAExC,CAEA,6BAA6BZ,EAM5B,CACC,OAAOiB,GACLjB,EAAQ,eACRA,EAAQ,KACRA,EAAQ,aACRA,EAAQ,cACRA,EAAQ,qBACRkB,EAA8B,CAElC,CAEA,0BAA0BlB,EAMzB,CACC,OAAOmB,GACLnB,EAAQ,eACRA,EAAQ,KACRA,EAAQ,aACRA,EAAQ,qBACRoB,GAAYpB,EAAQ,QAAQ,EAC5BqB,EAAuC,CAE3C,GCxGI,IAAOC,GAAP,KAAiB,CAjCvB,MAiCuB,CAAAC,EAAA,mBAMrB,eAAeC,EAAqB,CAClC,KAAK,qBAAuBC,EAAID,EAAQ,sBAAsB,EACzDA,EAAO,qBACRE,GAAsB,qBAE1B,KAAK,aAAeD,EAAID,EAAQ,cAAc,EACzCA,EAAO,aACRE,GAAsB,aAE1B,KAAK,kBAAoBD,EAAID,EAAQ,mBAAmB,EACnDA,EAAO,kBACR,IAAIG,GAAqB,CAAE,aAAc,KAAK,YAAY,CAAE,EAEhE,KAAK,oBAAsB,IAAI,GACjC,CAEA,6BAAkDC,EAAa,CAC7DC,EAAQD,EAAQE,GAAY,CAC1B,KAAK,WAAW,GAAGA,EAAS,IAAI,kBAAmB,IAAK,CACtD,GAAM,CACJ,YAAAC,EACA,WAAAC,EACA,OAAAC,EACA,oBAAAC,EACA,iCAAAC,EACA,wBAAAC,CAAuB,EACrBC,GAAeP,CAAQ,EAE3BD,EAAQE,EAAcO,GAAY,CAChC,IAAMC,EAAUD,EAAS,MAAQ,EAAI,GAAKA,EAAS,IACnD,KAAK,WAAW,GAAGE,GAAqBF,CAAQ,CAAC,GAAGC,CAAO,GAAI,IAAK,CAClE,IAAME,EAAS,KAAK,kBAAkB,6BAA6B,CACjE,eAAgBH,EAAS,IACzB,KAAMR,EACN,aAAcQ,EAAS,cAAgB,KAAK,aAC5C,cAAeA,EAAS,cACxB,qBAAsB,KAAK,qBAC5B,EAEKI,EAAMC,GACV,KAAK,oBAAoBb,EAAS,IAAI,EACtC,IACAQ,EAAS,GAAG,EAEd,KAAK,eAAeI,EAAKD,CAAM,CACjC,CAAC,CACH,CAAC,EAEDZ,EAAQG,EAAaM,GAAY,CAC/B,KAAK,qBACHR,EACAQ,EAAS,IACT,IACA,aACAA,EAAS,aACTE,GAAqBF,CAAQ,CAAC,CAElC,CAAC,EAEDT,EAAQI,EAASK,GAAY,CAC3B,KAAK,qBACHR,EACAQ,EAAS,IACT,IACA,SACAA,EAAS,aACTE,GAAqBF,CAAQ,CAAC,CAElC,CAAC,EAEDT,EAAQK,EAAsBI,GAAY,CACxC,KAAK,qBACHR,EACAQ,EAAS,IACT,KACA,sBACAA,EAAS,aACTE,GAAqBF,CAAQ,CAAC,CAElC,CAAC,EAEDT,EAAQM,EAAmCG,GAAY,CACrD,KAAK,qBACHR,EACAQ,EAAS,IACT,KACA,mCACAA,EAAS,aACTE,GAAqBF,CAAQ,CAAC,CAElC,CAAC,EAEDT,EAAQO,EAA0BE,GAAY,CAC5C,KAAK,qBACHR,EACAQ,EAAS,IACT,KACA,0BACAA,EAAS,aACTE,GAAqBF,CAAQ,CAAC,CAElC,CAAC,CACH,CAAC,CACH,CAAC,CACH,CAEA,qBAEEM,EACAC,EACAC,EACAC,EACAC,EACAC,EAAqB,CAErB,KAAK,WACH,GAAGA,CAAa,GAAGJ,IAAmB,EAAI,GAAKA,CAAc,GAC7D,IAAK,CACH,IAAMJ,EAAS,KAAK,kBAAkB,0BAA0B,CAC9D,eAAAI,EACA,KAAAD,EACA,aAAcI,GAAoB,KAAK,aACvC,qBAAsB,KAAK,qBAC3B,SAAAD,EACD,EACKL,EAAMC,GACV,KAAK,oBAAoBC,EAAK,IAAI,EAClCE,EACAD,CAAc,EAEhB,KAAK,eAAeH,EAAKD,CAAM,CACjC,CAAC,CAEL,CAGA,4BAEES,EACAC,EAAkB,CAElB,IAAMC,EAAyB,KAAK,6BAA4B,EAChE,OAAOT,GACLS,EACAF,EACAC,CAAU,CAEd,CAEA,mBAAwCT,EAAW,CACjD,OAAO,KAAK,oBAAoB,IAAIA,CAAG,CACzC,CAGA,eAAoCA,EAAaW,EAAe,CAC9D,KAAK,oBAAoB,IAAIX,EAAKW,CAAK,CACzC,GAGIC,GAAN,cAAyCC,EAAW,CAtMpD,MAsMoD,CAAAhC,EAAA,mCAApD,aAAA,qBACS,KAAA,WAOH,CACF,OAAQ,CAAA,EACR,YAAa,CAAA,EACb,WAAY,CAAA,EACZ,wBAAyB,CAAA,EACzB,oBAAqB,CAAA,EACrB,iCAAkC,CAAA,EAuCtC,CApCE,OAAK,CACH,KAAK,WAAa,CAChB,OAAQ,CAAA,EACR,YAAa,CAAA,EACb,WAAY,CAAA,EACZ,wBAAyB,CAAA,EACzB,oBAAqB,CAAA,EACrB,iCAAkC,CAAA,EAEtC,CAEO,YAAYU,EAAc,CAC/B,KAAK,WAAW,OAAO,KAAKA,CAAM,CACpC,CAEO,6BAA6BuB,EAAgC,CAClE,KAAK,WAAW,wBAAwB,KAAKA,CAAO,CACtD,CAEO,yBAAyBC,EAA+B,CAC7D,KAAK,WAAW,oBAAoB,KAAKA,CAAU,CACrD,CAEO,sCACLC,EAA+C,CAE/C,KAAK,WAAW,iCAAiC,KAAKA,CAAa,CACrE,CAEO,gBAAgBC,EAAgB,CACrC,KAAK,WAAW,WAAW,KAAKA,CAAI,CACtC,CAEO,iBAAiBC,EAAe,CACrC,KAAK,WAAW,YAAY,KAAKA,CAAE,CACrC,GAGIC,GAAmB,IAAIP,GACvB,SAAUjB,GAAeO,EAAU,CAQvCiB,GAAiB,MAAK,EACtBjB,EAAK,OAAOiB,EAAgB,EAC5B,IAAMC,EAAaD,GAAiB,WAEpC,OAAAA,GAAiB,MAAK,EACVC,CACd,CAdgBvC,EAAAc,GAAA,kBCrPV,SAAU0B,GACdC,EACAC,EAAoE,CAGhE,MAAMD,EAAiB,WAAW,IAAM,IAI1CA,EAAiB,YAAcC,EAAgB,YAC/CD,EAAiB,UAAYC,EAAgB,WAMtCD,EAAiB,UAAaC,EAAgB,YACrDD,EAAiB,UAAYC,EAAgB,UAEjD,CAnBgBC,EAAAH,GAAA,6BA4BV,SAAUI,GACdH,EACAC,EAAgC,CAG5B,MAAMD,EAAiB,WAAW,IAAM,IAI1CA,EAAiB,YAAcC,EAAgB,YAC/CD,EAAiB,YAAcC,EAAgB,YAC/CD,EAAiB,UAAYC,EAAgB,UAC7CD,EAAiB,UAAYC,EAAgB,UAC7CD,EAAiB,UAAYC,EAAgB,UAC7CD,EAAiB,QAAUC,EAAgB,SAMpCD,EAAiB,UAAaC,EAAgB,YACrDD,EAAiB,UAAYC,EAAgB,UAC7CD,EAAiB,UAAYC,EAAgB,UAC7CD,EAAiB,QAAUC,EAAgB,QAE/C,CAzBgBC,EAAAC,GAAA,uBA2BV,SAAUC,GACdC,EACAC,EACAC,EAAqB,CAEjBF,EAAK,SAASE,CAAa,IAAM,OACnCF,EAAK,SAASE,CAAa,EAAI,CAACD,CAAK,EAErCD,EAAK,SAASE,CAAa,EAAE,KAAKD,CAAK,CAE3C,CAVgBJ,EAAAE,GAAA,oBAYV,SAAUI,GACdH,EACAI,EACAC,EAAe,CAEXL,EAAK,SAASI,CAAQ,IAAM,OAC9BJ,EAAK,SAASI,CAAQ,EAAI,CAACC,CAAU,EAErCL,EAAK,SAASI,CAAQ,EAAE,KAAKC,CAAU,CAE3C,CAVgBR,EAAAM,GAAA,wBC5EhB,IAAMG,GAAO,OAEP,SAAUC,GAAeC,EAASC,EAAiB,CACvD,OAAO,eAAeD,EAAKF,GAAM,CAC/B,WAAY,GACZ,aAAc,GACd,SAAU,GACV,MAAOG,EACR,CACH,CAPgBC,EAAAH,GAAA,kBCYV,SAAUI,GAAiBC,EAAUC,EAAS,CAClD,IAAMC,EAAgBC,GAAKH,CAAG,EACxBI,EAAsBF,EAAc,OAC1C,QAAS,EAAI,EAAG,EAAIE,EAAqB,IAAK,CAC5C,IAAMC,EAAgBH,EAAc,CAAC,EAC/BI,EAAiBN,EAAIK,CAAa,EAClCE,EAAuBD,EAAe,OAC5C,QAASE,EAAI,EAAGA,EAAID,EAAsBC,IAAK,CAC7C,IAAMC,EAAiBH,EAAeE,CAAC,EAEnCC,EAAU,eAAiB,QAC7B,KAAKA,EAAU,IAAI,EAAEA,EAAU,SAAUR,CAAK,GAKtD,CAhBgBS,EAAAX,GAAA,gBAkBV,SAAUY,GACdC,EACAC,EAAmB,CAInB,IAAMC,EAA0BJ,EAAA,UAAA,CAAa,EAAb,sBAKhCK,GAAeD,EAAoBF,EAAc,eAAe,EAEhE,IAAMI,EAAgB,CACpB,MAAON,EAAA,SAAUO,EAA8BhB,EAAU,CASvD,GAPIiB,GAAQD,CAAO,IAGjBA,EAAUA,EAAQ,CAAC,GAIjB,CAAAE,GAAYF,CAAO,EAIvB,OAAO,KAAKA,EAAQ,IAAI,EAAEA,EAAQ,SAAUhB,CAAK,CACnD,EAdO,SAgBP,gBAAiBS,EAAA,UAAA,CACf,IAAMU,EAA2BC,GAAgB,KAAMR,CAAS,EAChE,GAAI,CAACS,EAAQF,CAAwB,EAAG,CACtC,IAAMG,EAAgBC,EACpBJ,EACCK,GAAiBA,EAAa,GAAG,EAEpC,MAAM,MACJ,mCAAmC,KAAK,YAAY,IAAI;GACnDF,EAAc,KAAK;;CAAM,EAAE,QAAQ,MAAO;EAAM,CAAC,EAAE,EAG9D,EAZiB,oBAenB,OAAAT,EAAmB,UAAYE,EAC/BF,EAAmB,UAAU,YAAcA,EAE3CA,EAAmB,YAAcD,EAE1BC,CACT,CAnDgBJ,EAAAC,GAAA,wCAqDV,SAAUe,GACdd,EACAC,EACAc,EAAyB,CAIzB,IAAMb,EAA0BJ,EAAA,UAAA,CAAa,EAAb,sBAKhCK,GAAeD,EAAoBF,EAAc,2BAA2B,EAE5E,IAAMgB,EAAoB,OAAO,OAAOD,EAAgB,SAAS,EACjE,OAAAE,EAAQhB,EAAYiB,GAAY,CAC9BF,EAAkBE,CAAQ,EAAI/B,EAChC,CAAC,EAEDe,EAAmB,UAAYc,EAC/Bd,EAAmB,UAAU,YAAcA,EAEpCA,CACT,CAvBgBJ,EAAAgB,GAAA,4CAyBhB,IAAYK,IAAZ,SAAYA,EAAyB,CACnCA,EAAAA,EAAA,iBAAA,CAAA,EAAA,mBACAA,EAAAA,EAAA,eAAA,CAAA,EAAA,gBACF,GAHYA,KAAAA,GAAyB,CAAA,EAAA,EAW/B,SAAUV,GACdW,EACAnB,EAAmB,CAInB,OAFsBoB,GAA0BD,EAAiBnB,CAAS,CAG5E,CAPgBH,EAAAW,GAAA,mBASV,SAAUY,GACdD,EACAnB,EAAmB,CAEnB,IAAMqB,EAAmBC,GAAOtB,EAAYuB,GACnCC,GAAYL,EAAwBI,CAAY,CAAC,IAAM,EAC/D,EAEKE,EAAoCd,EACxCU,EACCE,IACQ,CACL,IAAK,4BAA4BA,CAAY,QAC3CJ,EAAgB,YAAY,IAC7B,gBACD,KAAMD,GAA0B,eAChC,WAAYK,GAEf,EAGH,OAAOG,GAAiCD,CAAM,CAChD,CAtBgB5B,EAAAuB,GAAA,6BCzGV,IAAOO,GAAP,KAAkB,CAzBxB,MAyBwB,CAAAC,EAAA,oBAoBtB,gBAAqCC,EAAqB,CAUxD,GATA,KAAK,UAAY,CAAA,EAGjB,KAAK,UAAaA,EAAe,UAEjC,KAAK,qBAAuBC,EAAID,EAAQ,sBAAsB,EACzDA,EAAO,qBACRE,GAAsB,qBAEtB,CAAC,KAAK,UACR,KAAK,yBAA2BC,GAChC,KAAK,sBAAwBA,GAC7B,KAAK,gBAAkBA,GACvB,KAAK,mBAAqBA,GAC1B,KAAK,YAAcA,WAEf,QAAQ,KAAK,KAAK,oBAAoB,EACpC,KAAK,iBACP,KAAK,yBAA2BC,GAChC,KAAK,wBAA0BA,GAC/B,KAAK,YAAcD,GACnB,KAAK,uBAAyB,KAAK,qCAEnC,KAAK,yBAA2BA,GAChC,KAAK,wBAA0BA,GAC/B,KAAK,YAAc,KAAK,gBACxB,KAAK,uBAAyB,KAAK,2CAE5B,cAAc,KAAK,KAAK,oBAAoB,EACjD,KAAK,iBACP,KAAK,yBAAgCE,GACrC,KAAK,wBAA+BA,GACpC,KAAK,YAAcF,GACnB,KAAK,uBACH,KAAK,2CAEP,KAAK,yBAA2BA,GAChC,KAAK,wBAA0BA,GAC/B,KAAK,YAAc,KAAK,sBACxB,KAAK,uBACH,KAAK,iDAEA,QAAQ,KAAK,KAAK,oBAAoB,EAC/C,KAAK,yBAA2BA,GAChC,KAAK,wBAA0BA,GAC/B,KAAK,YAAcA,GACnB,KAAK,uBAAyBA,OAE9B,OAAM,MACJ,kDAAkDH,EAAO,oBAAoB,GAAG,CAIxF,CAEA,yCAEEM,EAAY,CAEZA,EAAQ,SAAW,CACjB,YAAa,IACb,UAAW,IAEf,CAEA,wCAEEA,EAAY,CAEZA,EAAQ,SAAW,CAKjB,YAAa,KAAK,GAAG,CAAC,EAAE,YACxB,UAAW,IAEf,CAEA,mCAAwDA,EAAY,CAClEA,EAAQ,SAAW,CACjB,YAAa,IACb,UAAW,IACX,YAAa,IACb,UAAW,IACX,QAAS,IACT,UAAW,IAEf,CAOA,kCAAuDA,EAAY,CACjE,IAAMC,EAAY,KAAK,GAAG,CAAC,EAC3BD,EAAQ,SAAW,CACjB,YAAaC,EAAU,YACvB,UAAWA,EAAU,UACrB,YAAaA,EAAU,YACvB,UAAW,IACX,QAAS,IACT,UAAW,IAEf,CAEA,yBAA8CC,EAAoB,CAChE,IAAMF,EAAmB,CACvB,KAAME,EACN,SAAU,OAAO,OAAO,IAAI,GAG9B,KAAK,uBAAuBF,CAAO,EACnC,KAAK,UAAU,KAAKA,CAAO,CAC7B,CAEA,uBAAqB,CACnB,KAAK,UAAU,IAAG,CACpB,CAEA,gBAAqCG,EAAoB,CAEvD,IAAMC,EAAY,KAAK,GAAG,CAAC,EACrBC,EAAMF,EAAY,SAIpBE,EAAI,aAAeD,EAAU,aAC/BC,EAAI,UAAYD,EAAU,UAC1BC,EAAI,QAAUD,EAAU,QACxBC,EAAI,UAAYD,EAAU,YAI1BC,EAAI,YAAc,IAClBA,EAAI,UAAY,IAChBA,EAAI,YAAc,IAEtB,CAEA,sBAA2CF,EAAoB,CAC7D,IAAMC,EAAY,KAAK,GAAG,CAAC,EAErBC,EAAMF,EAAY,SAIpBE,EAAI,aAAeD,EAAU,YAC/BC,EAAI,UAAYD,EAAU,UAI1BC,EAAI,YAAc,GAEtB,CAEA,gBAEEC,EACAC,EAAqB,CAErB,IAAMC,EAAU,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,EACxDC,GAAiBD,EAASD,EAAeD,CAAG,EAE5C,KAAK,yBAAyBE,EAAQ,SAAgBD,CAAa,CACrE,CAEA,mBAEEG,EACAC,EAAgB,CAEhB,IAAMC,EAAa,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,EAC3DC,GAAqBD,EAAYD,EAAUD,CAAa,EAExD,KAAK,wBAAwBE,EAAW,SAAWF,EAAc,QAAS,CAC5E,CAEA,8BAA4B,CAK1B,GAAII,GAAY,KAAK,yBAAyB,EAAG,CAC/C,IAAMC,EAA+BC,GACnC,KAAK,UACLC,GAAK,KAAK,oBAAoB,CAAC,EAEjC,YAAK,0BAA4BF,EAC1BA,EAGT,OAAY,KAAK,yBACnB,CAEA,0CAAwC,CAKtC,GAAID,GAAY,KAAK,qCAAqC,EAAG,CAC3D,IAAMI,EAAiBC,GACrB,KAAK,UACLF,GAAK,KAAK,oBAAoB,EAC9B,KAAK,6BAA4B,CAAE,EAErC,YAAK,sCAAwCC,EACtCA,EAGT,OAAY,KAAK,qCACnB,CAEA,8BAA4B,CAC1B,IAAME,EAAY,KAAK,WACvB,OAAOA,EAAUA,EAAU,OAAS,CAAC,CACvC,CAEA,kCAAgC,CAC9B,IAAMA,EAAY,KAAK,WACvB,OAAOA,EAAUA,EAAU,OAAS,CAAC,CACvC,CAEA,oCAAkC,CAChC,IAAMC,EAAkB,KAAK,sBAC7B,OAAOA,EAAgBA,EAAgB,OAAS,CAAC,CACnD,GCtQI,IAAOC,GAAP,KAAmB,CAXzB,MAWyB,CAAAC,EAAA,qBAKvB,kBAAgB,CACd,KAAK,UAAY,CAAA,EACjB,KAAK,gBAAkB,EACvB,KAAK,QAAU,EACjB,CAEA,IAAI,MAAMC,EAAkB,CAG1B,GAAI,KAAK,mBAAqB,GAC5B,MAAM,MACJ,kFAAkF,EAKtF,KAAK,MAAK,EACV,KAAK,UAAYA,EACjB,KAAK,gBAAkBA,EAAS,MAClC,CAEA,IAAI,OAAK,CACP,OAAO,KAAK,SACd,CAGA,YAAU,CACR,OAAI,KAAK,SAAW,KAAK,UAAU,OAAS,GAC1C,KAAK,aAAY,EACV,KAAK,GAAG,CAAC,GAETC,EAEX,CAIA,GAAwBC,EAAe,CACrC,IAAMC,EAAY,KAAK,QAAUD,EACjC,OAAIC,EAAY,GAAK,KAAK,iBAAmBA,EACpCF,GAEA,KAAK,UAAUE,CAAS,CAEnC,CAEA,cAAY,CACV,KAAK,SACP,CAEA,kBAAgB,CACd,OAAO,KAAK,OACd,CAEA,iBAAsCC,EAAgB,CACpD,KAAK,QAAUA,CACjB,CAEA,iBAAe,CACb,KAAK,QAAU,EACjB,CAEA,uBAAqB,CACnB,KAAK,QAAU,KAAK,UAAU,OAAS,CACzC,CAEA,kBAAgB,CACd,OAAO,KAAK,iBAAgB,CAC9B,GCnDI,IAAOC,GAAP,KAAoB,CAlB1B,MAkB0B,CAAAC,EAAA,sBACxB,OAA+BC,EAAa,CAC1C,OAAOA,EAAK,KAAK,IAAI,CACvB,CAEA,QAEEC,EACAC,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAASD,EAAKE,CAAO,CACnD,CAEA,QAEEF,EACAG,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAYH,EAAKE,CAAO,CACtD,CAEA,OAEEF,EACAI,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmBJ,CAAG,CACnD,CAEA,GAEEA,EACAK,EAA6C,CAE7C,OAAO,KAAK,WAAWA,EAAYL,CAAG,CACxC,CAEA,KAEEA,EACAI,EAA0D,CAE1D,OAAO,KAAK,aAAaJ,EAAKI,CAAiB,CACjD,CAEA,WAEEJ,EACAI,EAAiE,CAEjE,OAAO,KAAK,mBAAmBJ,EAAKI,CAAiB,CACvD,CAEA,QAEEH,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,SAEED,EACAC,EAA2B,CAE3B,OAAO,KAAK,gBAAgBD,EAAS,EAAGC,CAAO,CACjD,CAEA,QAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,SAEEC,EACAD,EAAiC,CAEjC,OAAO,KAAK,gBAAgBC,EAAY,EAAGD,CAAO,CACpD,CAEA,OAEEE,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,QAEEA,EAA0D,CAE1D,OAAO,KAAK,eAAeA,EAAmB,CAAC,CACjD,CAEA,GAEEC,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,IAEEA,EAAiD,CAEjD,OAAO,KAAK,WAAWA,EAAY,CAAC,CACtC,CAEA,KAEED,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,MAEEA,EAA0D,CAE1D,KAAK,aAAa,EAAGA,CAAiB,CACxC,CAEA,SAAmCF,EAA+B,CAChE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,UAAoCA,EAA+B,CACjE,KAAK,qBAAqB,EAAGA,CAAO,CACtC,CAEA,aAEEE,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,OAAO,KAAK,mBAAmB,EAAGA,CAAiB,CACrD,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,cAEEA,EAAiE,CAEjE,KAAK,mBAAmB,EAAGA,CAAiB,CAC9C,CAEA,iBAEEF,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,kBAEEA,EAAqC,CAErC,KAAK,2BAA2B,EAAGA,CAAO,CAC5C,CAEA,KAEEI,EACAC,EACAC,EAAyBC,GAAmB,CAE5C,GAAIC,GAAS,KAAK,kBAAmBJ,CAAI,EAAG,CAO1C,IAAMK,EAAQ,CACZ,QANAC,GAAqC,4BAA4B,CAC/D,aAAcN,EACd,YAAa,KAAK,UACnB,EAID,KAAMO,GAA0B,oBAChC,SAAUP,GAEZ,KAAK,iBAAiB,KAAKK,CAAK,EAGlC,KAAK,kBAAkB,KAAKL,CAAI,EAEhC,IAAMQ,EAAqB,KAAK,WAAWR,EAAMC,EAAgBC,CAAM,EACtE,YAAaF,CAAI,EAAIQ,EACfA,CACT,CAEA,cAEER,EACAP,EACAS,EAAyBC,GAAmB,CAE5C,IAAMM,EAAuCC,GAC3CV,EACA,KAAK,kBACL,KAAK,SAAS,EAEhB,KAAK,iBAAmB,KAAK,iBAAiB,OAAOS,CAAU,EAE/D,IAAMD,EAAqB,KAAK,WAAWR,EAAMP,EAAMS,CAAM,EAC5D,YAAaF,CAAI,EAAIQ,EACfA,CACT,CAEA,UAEEG,EACAC,EAAY,CAEZ,OAAO,UAAA,CAEL,KAAK,oBAAoB,KAAK,CAAC,EAC/B,IAAMC,EAAW,KAAK,eAAc,EACpC,GAAI,CACF,OAAAF,EAAY,MAAM,KAAMC,CAAI,EAErB,SACAE,EAAG,CACV,GAAIC,GAAuBD,CAAC,EAC1B,MAAO,GAEP,MAAMA,UAGR,KAAK,iBAAiBD,CAAQ,EAC9B,KAAK,oBAAoB,IAAG,EAEhC,CACF,CAGO,oBAAkB,CACvB,OAAO,KAAK,oBACd,CAEO,8BAA4B,CACjC,OAAOG,GAAiBC,GAAO,KAAK,oBAAoB,CAAC,CAC3D,GCvoBI,IAAOC,GAAP,KAAuB,CApD7B,MAoD6B,CAAAC,EAAA,yBAe3B,qBACEC,EACAC,EAAqB,CAiBrB,GAfA,KAAK,UAAY,KAAK,YAAY,KAElC,KAAK,oBAAsB,CAAA,EAC3B,KAAK,oBAAsB,CAAA,EAC3B,KAAK,iBAAmB,IACxB,KAAK,aAAeC,GACpB,KAAK,WAAa,EAElB,KAAK,kBAAoB,CAAA,EACzB,KAAK,UAAY,CAAA,EACjB,KAAK,oBAAsB,CAAA,EAC3B,KAAK,WAAa,CAAA,EAClB,KAAK,sBAAwB,CAAA,EAC7B,KAAK,qBAAuB,CAAA,EAExBC,EAAIF,EAAQ,mBAAmB,EACjC,MAAM,MACJ;;sBAE0B,EAI9B,GAAIG,GAAQJ,CAAe,EAAG,CAI5B,GAAIK,EAAQL,CAAwB,EAClC,MAAM,MACJ;;2CAE+C,EAInD,GAAI,OAAQA,EAA0B,CAAC,EAAE,aAAgB,SACvD,MAAM,MACJ;;sBAE0B,EAKhC,GAAII,GAAQJ,CAAe,EACzB,KAAK,UAAYM,GACfN,EACA,CAACO,EAAKC,KACJD,EAAIC,EAAQ,IAAI,EAAIA,EACbD,GAET,CAAA,CAAwC,UAG1CJ,EAAIH,EAAiB,OAAO,GAC5BS,GAAMC,GAAQC,GAAaX,EAAiB,KAAK,CAAC,EAAGY,EAAW,EAChE,CACA,IAAMC,EAAgBH,GAAQC,GAAaX,EAAiB,KAAK,CAAC,EAC5Dc,EAAeC,GAAKF,CAAa,EACvC,KAAK,UAAiBP,GACpBQ,EACA,CAACP,EAAKC,KACJD,EAAIC,EAAQ,IAAI,EAAIA,EACbD,GAET,CAAA,CAAwC,UAEjCS,GAAShB,CAAe,EACjC,KAAK,UAAYiB,GAAMjB,CAAsC,MAE7D,OAAM,IAAI,MACR,wIACuE,EAM3E,KAAK,UAAU,IAASkB,GAExB,IAAML,EAAgBV,EAAIH,EAAiB,OAAO,EAC9CU,GAAQC,GAAaX,EAAiB,KAAK,CAAC,EAC5CW,GAAOX,CAAe,EACpBmB,EAAwBV,GAAMI,EAAgBO,GAClDf,EAAQe,EAAiB,eAAe,CAAC,EAG3C,KAAK,aAAeD,EAChBjB,GACAmB,GAKJC,GAAkBX,GAAO,KAAK,SAAS,CAAC,CAC1C,CAEA,WAEEY,EACAC,EACAvB,EAAsB,CAEtB,GAAI,KAAK,iBACP,MAAM,MACJ,iBAAiBsB,CAAQ;6FACuE,EAGpG,IAAME,EAAyBtB,EAAIF,EAAQ,eAAe,EACrDA,EAAO,cACRyB,GAAoB,cAClBC,EAAoBxB,EAAIF,EAAQ,mBAAmB,EACpDA,EAAO,kBACRyB,GAAoB,kBAIlBE,EACJ,KAAK,kBAAqB,GAE5B,KAAK,mBACL,KAAK,oBAAoBA,CAAS,EAAIL,EACtC,KAAK,oBAAoBA,CAAQ,EAAIK,EAErC,IAAIC,EAIJ,OAAI,KAAK,YAAc,GACrBA,EAAoB9B,EAAA,YAEf+B,EAAU,CAEb,GAAI,CACF,KAAK,0BAA0BF,EAAWL,EAAU,KAAK,UAAU,EACnEC,EAAK,MAAM,KAAMM,CAAI,EACrB,IAAMC,EAAM,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,EACpD,YAAK,YAAYA,CAAG,EACbA,QACAC,EAAG,CACV,OAAO,KAAK,gBAAgBA,EAAGP,EAAeE,CAAiB,UAE/D,KAAK,uBAAsB,EAE/B,EAfoB,qBAiBpBE,EAAoB9B,EAAA,YAEf+B,EAAU,CAEb,GAAI,CACF,YAAK,0BAA0BF,EAAWL,EAAU,KAAK,UAAU,EAC5DC,EAAK,MAAM,KAAMM,CAAI,QACrBE,EAAG,CACV,OAAO,KAAK,gBAAgBA,EAAGP,EAAeE,CAAiB,UAE/D,KAAK,uBAAsB,EAE/B,EAZoB,wBAeoC,OAAO,OAC/DE,EACA,CAAE,SAAAN,EAAU,sBAAuBC,CAAI,CAAE,CAI7C,CAEA,gBAEE,EACAS,EACAN,EAA2B,CAE3B,IAAMO,EAAqB,KAAK,WAAW,SAAW,EAKhDC,EACJF,GAAuB,CAAC,KAAK,eAAc,GAAM,KAAK,gBAExD,GAAIG,GAAuB,CAAC,EAAG,CAC7B,IAAMC,EAAkB,EACxB,GAAIF,EAAe,CACjB,IAAMG,EAAgB,KAAK,oBAAmB,EAC9C,GAAI,KAAK,yBAAyBA,CAAa,EAE7C,GADAD,EAAW,eAAiB,KAAK,SAASC,CAAa,EACnD,KAAK,UAAW,CAClB,IAAMC,EACJ,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,EAC1C,OAAAA,EAAiB,cAAgB,GAC1BA,MAEP,QAAOZ,EAAkB,CAAC,MAEvB,CACL,GAAI,KAAK,UAAW,CAClB,IAAMY,EACJ,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,EAC1CA,EAAiB,cAAgB,GACjCF,EAAW,iBAAmBE,EAGhC,MAAMF,OAEH,IAAIH,EAET,YAAK,sBAAqB,EAGnBP,EAAkB,CAAC,EAG1B,MAAMU,OAIR,OAAM,CAEV,CAGA,eAEEG,EACAC,EAAkB,CAElB,IAAMC,EAAM,KAAK,4BAA4B,IAAYD,CAAU,EACnE,OAAO,KAAK,oBAAoBD,EAAmBC,EAAYC,CAAG,CACpE,CAEA,oBAEEF,EACAC,EACAC,EAAW,CAEX,IAAIC,EAAgB,KAAK,mBAAmBD,CAAG,EAC3CE,EACJ,GAAI,OAAOJ,GAAsB,WAAY,CAC3CI,EAASJ,EAAkB,IAC3B,IAAMK,EAAYL,EAAkB,KAEpC,GAAIK,IAAc,OAAW,CAC3B,IAAMC,EAAuBH,EAC7BA,EAAgB5C,EAAA,IACP8C,EAAU,KAAK,IAAI,GAAKC,EAAqB,KAAK,IAAI,EAD/C,uBAKlBF,EAASJ,EAGX,GAAIG,EAAc,KAAK,IAAI,IAAM,GAC/B,OAAOC,EAAO,KAAK,IAAI,CAG3B,CAEA,mBAEEG,EACAP,EAAiE,CAEjE,IAAMQ,EAAQ,KAAK,4BACjB,KACAD,CAAc,EAEhB,OAAO,KAAK,wBACVA,EACAP,EACAQ,CAAK,CAET,CAEA,wBAEED,EACAP,EACAE,EAAW,CAEX,IAAIC,EAAgB,KAAK,mBAAmBD,CAAG,EAC3CE,EACJ,GAAI,OAAOJ,GAAsB,WAAY,CAC3CI,EAASJ,EAAkB,IAC3B,IAAMK,EAAYL,EAAkB,KAEpC,GAAIK,IAAc,OAAW,CAC3B,IAAMC,EAAuBH,EAC7BA,EAAgB5C,EAAA,IACP8C,EAAU,KAAK,IAAI,GAAKC,EAAqB,KAAK,IAAI,EAD/C,uBAKlBF,EAASJ,EAGX,GAAeG,EAAe,KAAK,IAAI,IAAM,GAAM,CACjD,IAAIM,EAAW,KAAK,mBAAmBL,CAAM,EAC7C,KACaD,EAAe,KAAK,IAAI,IAAM,IACzCM,IAAa,IAEbA,EAAW,KAAK,mBAAmBL,CAAM,MAG3C,OAAM,KAAK,wBACTG,EACAG,GAAU,qBACkBV,EAAmB,OAAO,EAS1D,KAAK,4BACH,KAAK,mBACL,CAACO,EAAgBP,CAAiB,EAC7BG,EACL,KACAI,EACAI,EAAiC,CAErC,CAEA,2BAEEJ,EACAK,EAAqC,CAErC,IAAMJ,EAAQ,KAAK,4BACjB,KACAD,CAAc,EAEhB,KAAK,gCAAgCA,EAAgBK,EAASJ,CAAK,CACrE,CAEA,gCAEED,EACAK,EACAV,EAAW,CAEX,IAAME,EAASQ,EAAQ,IACjBC,EAAYD,EAAQ,IAK1B,GAHoC,KAAK,mBAAmBV,CAAG,EAG/B,KAAK,IAAI,IAAM,GAAM,CAC9BE,EAAQ,KAAK,IAAI,EAItC,IAAMU,EAAyBvD,EAAA,IACtB,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGsD,CAAS,EADjB,0BAK/B,KAAO,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGA,CAAS,IAAM,IAGlD,KAAK,QAAQA,CAAS,EAEDT,EAAQ,KAAK,IAAI,EAIxC,KAAK,4BACH,KAAK,4BACL,CACEG,EACAM,EACAC,EACAV,EACAW,IAEFD,EACA,KACAP,EACAQ,EAAoC,MAGtC,OAAM,KAAK,wBACTR,EACAG,GAAU,oCACVE,EAAQ,OAAO,CAGrB,CAEA,aAEEL,EACAP,EAA0D,CAE1D,IAAMQ,EAAQ,KAAK,4BAA4B,IAAUD,CAAc,EACvE,OAAO,KAAK,kBAAkBA,EAAgBP,EAAmBQ,CAAK,CACxE,CAEA,kBAEED,EACAP,EACAE,EAAW,CAEX,IAAIc,EAAoB,KAAK,mBAAmBd,CAAG,EAC/CE,EACJ,GAAI,OAAOJ,GAAsB,WAAY,CAC3CI,EAASJ,EAAkB,IAC3B,IAAMK,EAAYL,EAAkB,KAEpC,GAAIK,IAAc,OAAW,CAC3B,IAAMC,EAAuBU,EAC7BA,EAAoBzD,EAAA,IACX8C,EAAU,KAAK,IAAI,GAAKC,EAAqB,KAAK,IAAI,EAD3C,2BAKtBF,EAASJ,EAGX,IAAIS,EAAW,GACf,KAAOO,EAAkB,KAAK,IAAI,IAAM,IAAQP,IAAa,IAC3DA,EAAW,KAAK,mBAAmBL,CAAM,EAI3C,KAAK,4BACH,KAAK,aACL,CAACG,EAAgBP,CAAiB,EAC7BgB,EACL,IACAT,EACAU,GAMAR,CAAQ,CAEZ,CAEA,qBAEEF,EACAK,EAA+B,CAE/B,IAAMJ,EAAQ,KAAK,4BACjB,KACAD,CAAc,EAEhB,KAAK,0BAA0BA,EAAgBK,EAASJ,CAAK,CAC/D,CAEA,0BAEED,EACAK,EACAV,EAAW,CAEX,IAAME,EAASQ,EAAQ,IACjBC,EAAYD,EAAQ,IAI1B,GAH6B,KAAK,mBAAmBV,CAAG,EAG/B,KAAK,IAAI,IAAM,GAAM,CAC5CE,EAAO,KAAK,IAAI,EAEhB,IAAMU,EAAyBvD,EAAA,IACtB,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGsD,CAAS,EADjB,0BAI/B,KAAO,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGA,CAAS,IAAM,IAGlD,KAAK,QAAQA,CAAS,EAEtBT,EAAO,KAAK,IAAI,EAIlB,KAAK,4BACH,KAAK,4BACL,CACEG,EACAM,EACAC,EACAV,EACAc,IAEFJ,EACA,KACAP,EACAW,EAA8B,EAGpC,CAEA,4BAEEX,EACAM,EACAC,EACAV,EACAe,EAAyE,CAEzE,KAAOL,EAAsB,GAG3B,KAAK,QAAQD,CAAS,EACtBT,EAAO,KAAK,IAAI,EASlB,KAAK,4BACH,KAAK,4BACL,CACEG,EACAM,EACAC,EACAV,EACAe,GAEFL,EACA,KACAP,EACAY,CAAuB,CAE3B,CAEA,mBAAwCf,EAAgB,CACtD,IAAMgB,EAAkB,KAAK,iBAAgB,EAC7C,OAAAhB,EAAO,KAAK,IAAI,EACO,KAAK,iBAAgB,EAIpBgB,CAC1B,CAEA,WAEEC,EACApB,EAAkB,CAElB,IAAMO,EAAQ,KAAK,4BAA4B,IAAQP,CAAU,EAC3DqB,EAAO1D,GAAQyD,CAAU,EAAIA,EAAaA,EAAW,IAGrDE,EADS,KAAK,mBAAmBf,CAAK,EAChB,KAAK,KAAMc,CAAI,EAC3C,GAAIC,IAAiB,OAEnB,OAD+BD,EAAKC,CAAY,EACvB,IAAI,KAAK,IAAI,EAExC,KAAK,oBACHtB,EACCoB,EAAqC,OAAO,CAEjD,CAEA,wBAAsB,CAOpB,GANA,KAAK,WAAW,IAAG,EACnB,KAAK,sBAAsB,IAAG,EAG9B,KAAK,sBAAqB,EAEtB,KAAK,WAAW,SAAW,GAAK,KAAK,eAAc,IAAO,GAAO,CACnE,IAAMG,EAAoB,KAAK,GAAG,CAAC,EAC7BC,EAAS,KAAK,qBAAqB,8BAA8B,CACrE,eAAgBD,EAChB,SAAU,KAAK,oBAAmB,EACnC,EACD,KAAK,WACH,IAAIE,GAA2BD,EAAQD,CAAiB,CAAC,EAG/D,CAEA,gBAEEG,EACAC,EACAhB,EAAiC,CAEjC,IAAIiB,EACJ,GAAI,CACF,IAAMvC,EAAOsB,IAAY,OAAYA,EAAQ,KAAO,OACpD,YAAK,WAAagB,EAClBC,EAAaF,EAAW,MAAM,KAAMrC,CAAI,EACxC,KAAK,mBACHuC,EACAjB,IAAY,QAAaA,EAAQ,QAAU,OACvCA,EAAQ,MACRe,EAAW,QAAQ,EAElBE,QACArC,EAAG,CACV,MAAM,KAAK,qBAAqBA,EAAGoB,EAASe,EAAW,QAAQ,EAEnE,CAEA,qBAEE,EACAf,EACA7B,EAAgB,CAEhB,MAAIa,GAAuB,CAAC,GAAK,EAAE,mBAAqB,SACtD,KAAK,mBACH,EAAE,iBACFgB,IAAY,QAAaA,EAAQ,QAAU,OACvCA,EAAQ,MACR7B,CAAQ,EAGd,OAAO,EAAE,kBAEL,CACR,CAEA,gBAEEf,EACA4D,EACAhB,EAAsC,CAEtC,IAAIkB,EACJ,GAAI,CACF,IAAMC,EAAY,KAAK,GAAG,CAAC,EACvB,KAAK,aAAaA,EAAW/D,CAAO,IAAM,IAC5C,KAAK,aAAY,EACjB8D,EAAgBC,GAEhB,KAAK,qBAAqB/D,EAAS+D,EAAWnB,CAAO,QAEhDoB,EAAkB,CACzBF,EAAgB,KAAK,wBACnB9D,EACA4D,EACAI,CAAgB,EAIpB,YAAK,gBACHpB,IAAY,QAAaA,EAAQ,QAAU,OACvCA,EAAQ,MACR5C,EAAQ,KACZ8D,CAAa,EAERA,CACT,CAEA,qBAEE9D,EACA+D,EACAnB,EAAsC,CAEtC,IAAIqB,EACEC,EAAgB,KAAK,GAAG,CAAC,EAC/B,MAAItB,IAAY,QAAaA,EAAQ,QACnCqB,EAAMrB,EAAQ,QAEdqB,EAAM,KAAK,qBAAqB,0BAA0B,CACxD,SAAUjE,EACV,OAAQ+D,EACR,SAAUG,EACV,SAAU,KAAK,oBAAmB,EACnC,EAEG,KAAK,WACT,IAAIC,GAAyBF,EAAKF,EAAWG,CAAa,CAAC,CAE/D,CAEA,wBAEElE,EACA4D,EACAI,EAAuB,CAIvB,GACE,KAAK,iBAELA,EAAiB,OAAS,4BAC1B,CAAC,KAAK,eAAc,EACpB,CACA,IAAMI,EAAU,KAAK,4BAAiCpE,EAAS4D,CAAG,EAClE,GAAI,CACF,OAAO,KAAK,kBAAuB5D,EAASoE,CAAO,QAC5CC,EAAqB,CAC5B,MAAIA,EAAoB,OAASC,GAGzBN,EAEAK,OAIV,OAAML,CAEV,CAEA,gBAAc,CAEZ,IAAMO,EAAc,KAAK,OACnBC,EAAiB/D,GAAM,KAAK,UAAU,EAC5C,MAAO,CACL,OAAQ8D,EACR,WAAY,KAAK,iBAAgB,EACjC,WAAYC,EACZ,UAAW,KAAK,UAEpB,CAEA,iBAAsCC,EAAsB,CAC1D,KAAK,OAASA,EAAS,OACvB,KAAK,iBAAiBA,EAAS,UAAU,EACzC,KAAK,WAAaA,EAAS,UAC7B,CAEA,0BAEErD,EACAsD,EACAC,EAAwB,CAExB,KAAK,sBAAsB,KAAKA,CAAgB,EAChD,KAAK,WAAW,KAAKvD,CAAS,EAE9B,KAAK,yBAAyBsD,CAAQ,CACxC,CAEA,gBAAc,CACZ,OAAO,KAAK,oBAAoB,SAAW,CAC7C,CAEA,qBAAmB,CACjB,IAAMtD,EAAY,KAAK,6BAA4B,EACnD,OAAO,KAAK,oBAAoBA,CAAS,CAC3C,CAEA,wBAA6CA,EAAiB,CAC5D,OAAO,KAAK,oBAAoBA,CAAS,CAC3C,CAEO,gBAAc,CACnB,OAAO,KAAK,aAAa,KAAK,GAAG,CAAC,EAAGV,EAAG,CAC1C,CAEO,OAAK,CACV,KAAK,gBAAe,EACpB,KAAK,WAAa,EAClB,KAAK,oBAAsB,CAAA,EAC3B,KAAK,OAAS,CAAA,EACd,KAAK,WAAa,CAAA,EAElB,KAAK,UAAY,CAAA,EACjB,KAAK,sBAAwB,CAAA,CAC/B,GC30BI,IAAOkE,GAAP,KAAmB,CAjBzB,MAiByB,CAAAC,EAAA,qBAIvB,iBAAiBC,EAAqB,CACpC,KAAK,QAAU,CAAA,EACf,KAAK,qBAAuBC,EAAID,EAAQ,sBAAsB,EACzDA,EAAO,qBACRE,GAAsB,oBAC5B,CAEA,WAEEC,EAA4B,CAE5B,GAAIC,GAAuBD,CAAK,EAC9B,OAAAA,EAAM,QAAU,CACd,UAAW,KAAK,0BAAyB,EACzC,oBAAqBE,GAAM,KAAK,qBAAqB,GAEvD,KAAK,QAAQ,KAAKF,CAAK,EAChBA,EAEP,MAAM,MACJ,6DAA6D,CAGnE,CAEA,IAAI,QAAM,CACR,OAAOE,GAAM,KAAK,OAAO,CAC3B,CAEA,IAAI,OAAOC,EAAkC,CAC3C,KAAK,QAAUA,CACjB,CAGA,wBAEEC,EACAC,EACAC,EAAqC,CAErC,IAAMC,EAAW,KAAK,oBAAmB,EACnCC,EAAc,KAAK,mBAAkB,EAAGD,CAAQ,EAOhDE,EAN+BC,GACnCN,EACAI,EACAH,EACA,KAAK,YAAY,EAEkC,CAAC,EAChDM,EAAe,CAAA,EACrB,QAASC,EAAI,EAAGA,GAAK,KAAK,aAAcA,IACtCD,EAAa,KAAK,KAAK,GAAGC,CAAC,CAAC,EAE9B,IAAMC,EAAM,KAAK,qBAAqB,sBAAsB,CAC1D,uBAAwBJ,EACxB,OAAQE,EACR,SAAU,KAAK,GAAG,CAAC,EACnB,sBAAuBL,EACvB,SAAUC,EACX,EAED,MAAM,KAAK,WAAW,IAAIO,GAAmBD,EAAK,KAAK,GAAG,CAAC,EAAG,KAAK,GAAG,CAAC,CAAC,CAAC,CAC3E,CAGA,oBAEET,EACAW,EAA+B,CAE/B,IAAMR,EAAW,KAAK,oBAAmB,EACnCC,EAAc,KAAK,mBAAkB,EAAGD,CAAQ,EAEhDS,EAA+BC,GACnCb,EACAI,EACA,KAAK,YAAY,EAGbG,EAAe,CAAA,EACrB,QAASC,EAAI,EAAGA,GAAK,KAAK,aAAcA,IACtCD,EAAa,KAAK,KAAK,GAAGC,CAAC,CAAC,EAE9B,IAAMM,EAAgB,KAAK,GAAG,CAAC,EAEzBC,EAAS,KAAK,qBAAqB,wBAAwB,CAC/D,oBAAqBH,EACrB,OAAQL,EACR,SAAUO,EACV,sBAAuBH,EACvB,SAAU,KAAK,oBAAmB,EACnC,EAED,MAAM,KAAK,WACT,IAAIK,GAAqBD,EAAQ,KAAK,GAAG,CAAC,EAAGD,CAAa,CAAC,CAE/D,GC7GI,IAAOG,GAAP,KAAoB,CAP1B,MAO0B,CAAAC,EAAA,sBACxB,mBAAiB,CAAI,CAEd,qBAELC,EACAC,EAAwB,CAExB,IAAMC,EAAgB,KAAK,qBAAqBF,CAAa,EAE7D,GAAIG,GAAYD,CAAa,EAC3B,MAAM,MAAM,UAAUF,CAAa,oCAAoC,EAGzE,OAAOI,GACL,CAACF,CAAa,EACdD,EACA,KAAK,aACL,KAAK,YAAY,CAErB,CAIO,0BAELI,EAA8B,CAE9B,IAAMC,EAAcC,GAAMF,EAAY,SAAS,EAEzCG,EADkB,KAAK,mBAAkB,EACTF,CAAW,EAKjD,OAJ+B,IAAIG,GACjCD,EACAH,CAAW,EACX,aAAY,CAEhB,GCEF,IAAMK,GAAwB,CAC5B,YAAa,8DAEf,OAAO,OAAOA,EAAqB,EAEnC,IAAMC,GAAmB,GACnBC,GAAiB,KAAK,IAAI,EAAG,CAAuB,EAAI,EAExDC,GAAMC,GAAY,CAAE,KAAM,wBAAyB,QAASC,GAAM,EAAE,CAAE,EAC5EC,GAAkB,CAACH,EAAG,CAAC,EACvB,IAAMI,GAAwBC,GAC5BL,GACA;qFAKA,GACA,GACA,GACA,GACA,GACA,EAAE,EAEJ,OAAO,OAAOI,EAAqB,EAEnC,IAAME,GAAmC,CACvC,KACE;qFAEF,SAAU,CAAA,GAMCC,GAAP,KAAmB,CAvEzB,MAuEyB,CAAAC,EAAA,qBAIvB,iBAAsCC,EAAqB,CACzD,KAAK,mBAAqB,CAAA,EAC1B,KAAK,gBAAkB,EACzB,CAEA,iBAAe,CACb,KAAK,gBAAkB,GAEvB,KAAK,WAAW,mBAAoB,IAAK,CAUvC,QAASC,EAAI,EAAGA,EAAI,GAAIA,IAAK,CAC3B,IAAMC,EAAMD,EAAI,EAAIA,EAAI,GACxB,KAAK,UAAUC,CAAG,EAAe,EAAI,SAAUC,EAAMC,EAAI,CACvD,OAAO,KAAK,sBAAsBD,EAAMF,EAAGG,CAAI,CACjD,EACA,KAAK,UAAUF,CAAG,EAAe,EAAI,SAAUC,EAAMC,EAAI,CACvD,OAAO,KAAK,sBAAsBD,EAAMF,EAAGG,CAAI,CACjD,EACA,KAAK,SAASF,CAAG,EAAc,EAAI,SAAUC,EAAI,CAC/C,OAAO,KAAK,qBAAqBA,EAAMF,CAAC,CAC1C,EACA,KAAK,KAAKC,CAAG,EAAU,EAAI,SAAUC,EAAI,CACvC,OAAO,KAAK,iBAAiBA,EAAMF,CAAC,CACtC,EACA,KAAK,OAAOC,CAAG,EAAY,EAAI,SAAUC,EAAI,CAC3C,KAAK,mBAAmBF,EAAGE,CAAI,CACjC,EACA,KAAK,WAAWD,CAAG,EAAgB,EAAI,SAAUC,EAAI,CACnD,KAAK,2BAA2BF,EAAGE,CAAI,CACzC,EACA,KAAK,eAAeD,CAAG,EAAoB,EAAI,SAAUC,EAAI,CAC3D,KAAK,yBAAyBF,EAAGE,CAAI,CACvC,EACA,KAAK,mBAAmBD,CAAG,EAAwB,EAAI,SAAUC,EAAI,CACnE,KAAK,iCAAiCF,EAAGE,CAAI,CAC/C,EAIF,KAAK,QAAa,SAAUD,EAAKC,EAAMC,EAAI,CACzC,OAAO,KAAK,sBAAsBD,EAAMD,EAAKE,CAAI,CACnD,EACA,KAAK,QAAa,SAAUF,EAAKC,EAAMC,EAAI,CACzC,OAAO,KAAK,sBAAsBD,EAAMD,EAAKE,CAAI,CACnD,EACA,KAAK,OAAY,SAAUF,EAAKC,EAAI,CAClC,OAAO,KAAK,qBAAqBA,EAAMD,CAAG,CAC5C,EACA,KAAK,GAAQ,SAAUA,EAAKC,EAAI,CAC9B,OAAO,KAAK,iBAAiBA,EAAMD,CAAG,CACxC,EACA,KAAK,KAAU,SAAUA,EAAKC,EAAI,CAChC,KAAK,mBAAmBD,EAAKC,CAAI,CACnC,EACA,KAAK,WAAgB,SAAUD,EAAKC,EAAI,CACtC,KAAK,yBAAyBD,EAAKC,CAAI,CACzC,EAEA,KAAK,OAAS,KAAK,cACnB,KAAK,UAAY,KAAK,iBACtB,KAAK,GAAK,KAAK,SACjB,CAAC,CACH,CAEA,kBAAgB,CACd,KAAK,gBAAkB,GAKvB,KAAK,WAAW,6BAA8B,IAAK,CACjD,IAAME,EAAY,KAElB,QAASJ,EAAI,EAAGA,EAAI,GAAIA,IAAK,CAC3B,IAAMC,EAAMD,EAAI,EAAIA,EAAI,GACxB,OAAOI,EAAK,UAAUH,CAAG,EAAE,EAC3B,OAAOG,EAAK,UAAUH,CAAG,EAAE,EAC3B,OAAOG,EAAK,SAASH,CAAG,EAAE,EAC1B,OAAOG,EAAK,KAAKH,CAAG,EAAE,EACtB,OAAOG,EAAK,OAAOH,CAAG,EAAE,EACxB,OAAOG,EAAK,WAAWH,CAAG,EAAE,EAC5B,OAAOG,EAAK,eAAeH,CAAG,EAAE,EAChC,OAAOG,EAAK,mBAAmBH,CAAG,EAAE,EAGtC,OAAOG,EAAK,QACZ,OAAOA,EAAK,QACZ,OAAOA,EAAK,OACZ,OAAOA,EAAK,GACZ,OAAOA,EAAK,KACZ,OAAOA,EAAK,WAEZ,OAAOA,EAAK,OACZ,OAAOA,EAAK,UACZ,OAAOA,EAAK,EACd,CAAC,CACH,CAKA,cAAsCC,EAAa,CAEnD,CAGA,iBACEC,EACAC,EAAY,CAEZ,MAAO,IAAM,EACf,CAIA,UAAUC,EAAe,CAGvB,OAAOC,EACT,CAEA,mBAAmBC,EAAcC,EAAa,CAC5C,GAAI,CACF,IAAMC,EAAkB,IAAIC,GAAK,CAAE,WAAY,CAAA,EAAI,KAAMH,CAAI,CAAE,EAC/D,OAAAE,EAAgB,KAAOF,EACvB,KAAK,mBAAmB,KAAKE,CAAe,EAC5CD,EAAI,KAAK,IAAI,EACb,KAAK,mBAAmB,IAAG,EACpBC,QACAE,EAAe,CACtB,GAAIA,EAAc,uBAAyB,GACzC,GAAI,CACFA,EAAc,QACZA,EAAc,QACd;;yEAEsB,CAExB,MAAMA,EAGV,MAAMA,EAEV,CAGA,qBAEEC,EACAC,EAAkB,CAElB,OAAOC,GAAW,KAAK,KAAMC,EAAQH,EAAmBC,CAAU,CACpE,CAEA,yBAEEA,EACAD,EAAiE,CAEjEE,GAAW,KAAK,KAAME,EAAqBJ,EAAmBC,CAAU,CAC1E,CAEA,iCAEEA,EACAI,EAAqC,CAErCH,GAAW,KACT,KACAI,EACAD,EACAJ,EACA5B,EAAgB,CAEpB,CAEA,mBAEE4B,EACAD,EAA0D,CAE1DE,GAAW,KAAK,KAAMK,EAAYP,EAAmBC,CAAU,CACjE,CAEA,2BAEEA,EACAI,EAA+B,CAE/BH,GAAW,KACT,KACAM,EACAH,EACAJ,EACA5B,EAAgB,CAEpB,CAEA,iBAEEoC,EACAR,EAAkB,CAElB,OAAOS,GAAa,KAAK,KAAMD,EAAYR,CAAU,CACvD,CAEA,sBAEEU,EACAV,EACAI,EAAiC,CAGjC,GADAO,GAAuBX,CAAU,EAC7B,CAACU,GAAcE,EAAIF,EAAY,UAAU,IAAM,GAAO,CACxD,IAAMG,EAAa,IAAI,MACrB,WAAWC,GAAad,CAAU,CAAC,uEACiB,KAAK,UACrDU,CAAU,CACX;2BAEQ,KAAK,mBAAmB,CAAC,EAAG,IACrC,GAAG,EAEP,MAAAG,EAAM,qBAAuB,GACvBA,EAGR,IAAME,EAAgBC,GAAK,KAAK,kBAAkB,EAC5CC,EAAWP,EAAW,SACtBQ,EAAkB,IAAIC,EAAY,CACtC,IAAKnB,EACL,gBAAiBiB,EACjB,MAAOb,GAAS,MAEhB,eAAgB,OACjB,EACD,OAAAW,EAAS,WAAW,KAAKG,CAAe,EAEjC,KAAK,UACRtC,GACKT,EACX,CAEA,sBAEEiD,EACApB,EACAI,EAA2B,CAG3B,GADAO,GAAuBX,CAAU,EAC7B,CAACqB,GAAoBD,CAAO,EAAG,CACjC,IAAMP,EAAa,IAAI,MACrB,WAAWC,GAAad,CAAU,CAAC,mEACa,KAAK,UACjDoB,CAAO,CACR;2BAEQ,KAAK,mBAAmB,CAAC,EAAG,IACrC,GAAG,EAEP,MAAAP,EAAM,qBAAuB,GACvBA,EAER,IAAME,EAAgBC,GAAK,KAAK,kBAAkB,EAC5CE,EAAkB,IAAII,EAAS,CACnC,IAAKtB,EACL,aAAcoB,EACd,MAAOhB,GAAS,MACjB,EACD,OAAAW,EAAS,WAAW,KAAKG,CAAe,EAEjCxC,EACT,GAGF,SAASuB,GACPsB,EACAC,EACAxB,EACAyB,EAAqB,GAAK,CAE1Bd,GAAuBX,CAAU,EACjC,IAAMe,EAAgBC,GAAK,KAAK,kBAAkB,EAC5CU,EAAgBC,GAAWH,CAAW,EAAIA,EAAcA,EAAY,IAEpEI,EAAU,IAAIL,EAAgB,CAAE,WAAY,CAAA,EAAI,IAAKvB,CAAU,CAAE,EACvE,OAAIyB,IACFG,EAAQ,UAAYJ,EAAY,KAE9BZ,EAAIY,EAAa,eAAe,IAClCI,EAAQ,aAAeJ,EAAY,eAGrC,KAAK,mBAAmB,KAAKI,CAAO,EACpCF,EAAc,KAAK,IAAI,EACvBX,EAAS,WAAW,KAAKa,CAAO,EAChC,KAAK,mBAAmB,IAAG,EAEpBzD,EACT,CAxBSW,EAAAmB,GAAA,cA0BT,SAASQ,GAAae,EAAkBxB,EAAkB,CACxDW,GAAuBX,CAAU,EACjC,IAAMe,EAAgBC,GAAK,KAAK,kBAAkB,EAE5Ca,EAAaC,GAAQN,CAAW,IAAM,GACtCO,EACJF,IAAe,GAAQL,EAAcA,EAAY,IAE7CQ,EAAY,IAAIC,EAAY,CAChC,WAAY,CAAA,EACZ,IAAKjC,EACL,kBAAmB6B,GAAcL,EAAY,qBAAuB,GACrE,EACGZ,EAAIY,EAAa,eAAe,IAClCQ,EAAU,aAAeR,EAAY,eAGvC,IAAMU,EAAgBC,GAAKJ,EAAOK,GAAiBT,GAAWS,EAAQ,IAAI,CAAC,EAC3E,OAAAJ,EAAU,cAAgBE,EAE1BnB,EAAS,WAAW,KAAKiB,CAAS,EAElCK,EAAQN,EAAOK,GAAW,CACxB,IAAME,EAAc,IAAIC,EAAY,CAAE,WAAY,CAAA,CAAE,CAAE,EACtDP,EAAU,WAAW,KAAKM,CAAW,EACjC1B,EAAIwB,EAAS,oBAAoB,EACnCE,EAAY,kBAAoBF,EAAQ,mBAGjCxB,EAAIwB,EAAS,MAAM,IAC1BE,EAAY,kBAAoB,IAElC,KAAK,mBAAmB,KAAKA,CAAW,EACxCF,EAAQ,IAAI,KAAK,IAAI,EACrB,KAAK,mBAAmB,IAAG,CAC7B,CAAC,EACMjE,EACT,CArCSW,EAAA2B,GAAA,gBAuCT,SAASK,GAAa7B,EAAW,CAC/B,OAAOA,IAAQ,EAAI,GAAK,GAAGA,CAAG,EAChC,CAFSH,EAAAgC,GAAA,gBAIT,SAASH,GAAuB1B,EAAW,CACzC,GAAIA,EAAM,GAAKA,EAAMZ,GAAgB,CACnC,IAAMwC,EAAa,IAAI,MAErB,kCAAkC5B,CAAG;wDAEjCZ,GAAiB,CACnB,EAAE,EAEN,MAAAwC,EAAM,qBAAuB,GACvBA,EAEV,CAZS/B,EAAA6B,GAAA,0BClbH,IAAO6B,GAAP,KAAwB,CAR9B,MAQ8B,CAAAC,EAAA,0BAK5B,sBAAsBC,EAAqB,CACzC,GAAIC,EAAID,EAAQ,eAAe,EAAG,CAChC,IAAME,EAAoBF,EAAO,cAC3BG,EAAgB,OAAOD,GAAsB,SACnD,KAAK,kBAAoBC,EACbD,EACR,IACJ,KAAK,cAAgBC,EACjBD,EAAoB,EACnBA,OAEL,KAAK,kBAAoB,EACzB,KAAK,cAAgBE,GAAsB,cAG7C,KAAK,gBAAkB,EACzB,CAEA,WAAmCC,EAAmBC,EAAkB,CAGtE,GAAI,KAAK,gBAAkB,GAAM,CAC/B,KAAK,kBACL,IAAMC,EAAS,IAAI,MAAM,KAAK,gBAAkB,CAAC,EAAE,KAAK,GAAI,EACxD,KAAK,gBAAkB,KAAK,mBAC9B,QAAQ,IAAI,GAAGA,CAAM,QAAQF,CAAS,GAAG,EAE3C,GAAM,CAAE,KAAAG,EAAM,MAAAC,CAAK,EAAKC,GAAMJ,CAAS,EAEjCK,EAAcH,EAAO,GAAK,QAAQ,KAAO,QAAQ,IACvD,OAAI,KAAK,gBAAkB,KAAK,mBAC9BG,EAAY,GAAGJ,CAAM,QAAQF,CAAS,WAAWG,CAAI,IAAI,EAE3D,KAAK,kBACEC,MAEP,QAAOH,EAAS,CAEpB,GCpDI,SAAUM,GAAYC,EAAkBC,EAAgB,CAC5DA,EAAU,QAASC,GAAY,CAC7B,IAAMC,EAAYD,EAAS,UAC3B,OAAO,oBAAoBC,CAAS,EAAE,QAASC,GAAY,CACzD,GAAIA,IAAa,cACf,OAGF,IAAMC,EAAqB,OAAO,yBAChCF,EACAC,CAAQ,EAIRC,IACCA,EAAmB,KAAOA,EAAmB,KAE9C,OAAO,eACLL,EAAY,UACZI,EACAC,CAAkB,EAGpBL,EAAY,UAAUI,CAAQ,EAAIF,EAAS,UAAUE,CAAQ,CAEjE,CAAC,CACH,CAAC,CACH,CA3BgBE,EAAAP,GAAA,eCuCT,IAAMQ,GAAcC,GACzBC,GACA,GACA,IACA,IACA,IACA,IACA,IACA,GAAG,EAEL,OAAO,OAAOF,EAAW,EAIlB,IAAMG,GAET,OAAO,OAAO,CAChB,gBAAiB,GACjB,aAAc,EACd,qBAAsB,GACtB,UAAW,GACX,qBAAsBC,GACtB,qBAAsB,OACtB,cAAe,GACf,gBAAiB,GAClB,EAEYC,GAAkD,OAAO,OAAO,CAC3E,kBAAmBC,EAAA,IAAG,GAAH,qBACnB,cAAe,GAChB,EAEWC,IAAZ,SAAYA,EAAyB,CACnCA,EAAAA,EAAA,kBAAA,CAAA,EAAA,oBACAA,EAAAA,EAAA,oBAAA,CAAA,EAAA,sBACAA,EAAAA,EAAA,sBAAA,CAAA,EAAA,wBACAA,EAAAA,EAAA,sBAAA,CAAA,EAAA,wBACAA,EAAAA,EAAA,uBAAA,CAAA,EAAA,yBACAA,EAAAA,EAAA,eAAA,CAAA,EAAA,iBACAA,EAAAA,EAAA,oBAAA,CAAA,EAAA,sBACAA,EAAAA,EAAA,eAAA,CAAA,EAAA,iBACAA,EAAAA,EAAA,gCAAA,CAAA,EAAA,kCACAA,EAAAA,EAAA,mBAAA,CAAA,EAAA,qBACAA,EAAAA,EAAA,uBAAA,EAAA,EAAA,yBACAA,EAAAA,EAAA,sBAAA,EAAA,EAAA,wBACAA,EAAAA,EAAA,cAAA,EAAA,EAAA,gBACAA,EAAAA,EAAA,4BAAA,EAAA,EAAA,6BACF,GAfYA,KAAAA,GAAyB,CAAA,EAAA,EAoD/B,SAAUC,GAAUC,EAAa,OAAS,CAC9C,OAAO,UAAA,CACL,OAAOA,CACT,CACF,CAJgBH,EAAAE,GAAA,aAMV,IAAOE,GAAP,MAAOC,CAAM,CAjInB,MAiImB,CAAAL,EAAA,eAYjB,OAAO,oBAAoBM,EAAsB,CAC/C,MAAM,MACJ,4HAC+D,CAEnE,CAEO,qBAAmB,CACxB,KAAK,WAAW,sBAAuB,IAAK,CAC1C,IAAIC,EAEJ,KAAK,iBAAmB,GACxB,IAAMC,EAAY,KAAK,UAEvB,KAAK,WAAW,cAAe,IAAK,CAIlCC,GAAiB,IAAI,CACvB,CAAC,EAED,KAAK,WAAW,oBAAqB,IAAK,CACxC,GAAI,CACF,KAAK,gBAAe,EAEpBC,EAAQ,KAAK,kBAAoBC,GAAgB,CAI/C,IAAMC,EAHe,KACnBD,CAAY,EAE4B,sBACtCE,EACJ,KAAK,WAAW,GAAGF,CAAY,QAAS,IAAK,CAC3CE,EAAmB,KAAK,mBACtBF,EACAC,CAAqB,CAEzB,CAAC,EACD,KAAK,qBAAqBD,CAAY,EAAIE,CAC5C,CAAC,UAED,KAAK,iBAAgB,EAEzB,CAAC,EAED,IAAIC,EAA2C,CAAA,EAmD/C,GAlDA,KAAK,WAAW,oBAAqB,IAAK,CACxCA,EAAiBC,GAAe,CAC9B,MAAOC,GAAO,KAAK,oBAAoB,EACxC,EACD,KAAK,iBAAmB,KAAK,iBAAiB,OAAOF,CAAc,CACrE,CAAC,EAED,KAAK,WAAW,sBAAuB,IAAK,CAG1C,GAAIG,EAAQH,CAAc,GAAK,KAAK,kBAAoB,GAAO,CAC7D,IAAMI,EAAmBC,GAAgB,CACvC,MAAOH,GAAO,KAAK,oBAAoB,EACvC,WAAYA,GAAO,KAAK,SAAS,EACjC,eAAgBI,GAChB,YAAaZ,EACd,EACKa,EAA4BC,GAAkB,CAClD,kBAAmB,KAAK,kBACxB,MAAON,GAAO,KAAK,oBAAoB,EACvC,WAAYA,GAAO,KAAK,SAAS,EACjC,YAAaR,EACd,EACD,KAAK,iBAAmB,KAAK,iBAAiB,OAC5CU,EACAG,CAAyB,EAG/B,CAAC,EAGGJ,EAAQ,KAAK,gBAAgB,IAE3B,KAAK,iBACP,KAAK,WAAW,yBAA0B,IAAK,CAC7C,IAAMM,EAAaC,GACjBR,GAAO,KAAK,oBAAoB,CAAC,EAEnC,KAAK,cAAgBO,CACvB,CAAC,EAGH,KAAK,WAAW,4BAA6B,IAAK,UAChDE,GAAAC,EAAA,KAAK,mBAAkB,cAAU,MAAAD,IAAA,QAAAA,EAAA,KAAAC,EAAG,CAClC,MAAOV,GAAO,KAAK,oBAAoB,EACxC,EACD,KAAK,6BAA6BA,GAAO,KAAK,oBAAoB,CAAC,CACrE,CAAC,GAID,CAACX,EAAO,kCACR,CAACY,EAAQ,KAAK,gBAAgB,EAE9B,MAAAV,EAAgBoB,EACd,KAAK,iBACJC,GAAaA,EAAS,OAAO,EAE1B,IAAI,MACR;GAAwCrB,EAAc,KACpD;;CAAqC,CACtC,EAAE,CAGT,CAAC,CACH,CAMA,YAAYsB,EAAkCC,EAAqB,CAJnE,KAAA,iBAA6C,CAAA,EAC7C,KAAA,iBAAmB,GAIjB,IAAMC,EAAsB,KAW5B,GAVAA,EAAK,iBAAiBD,CAAM,EAC5BC,EAAK,iBAAgB,EACrBA,EAAK,eAAeD,CAAM,EAC1BC,EAAK,qBAAqBF,EAAiBC,CAAM,EACjDC,EAAK,gBAAgBD,CAAM,EAC3BC,EAAK,gBAAgBD,CAAM,EAC3BC,EAAK,kBAAiB,EACtBA,EAAK,iBAAiBD,CAAM,EAC5BC,EAAK,sBAAsBD,CAAM,EAE7BE,EAAIF,EAAQ,eAAe,EAC7B,MAAM,IAAI,MACR;;;sBAGwB,EAI5B,KAAK,gBAAkBE,EAAIF,EAAQ,iBAAiB,EAC/CA,EAAO,gBACRjC,GAAsB,eAC5B,GAjJOO,GAAA,iCAA4C,GAoJrD6B,GAAY7B,GAAQ,CAClB8B,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACD,EAaK,IAAOC,GAAP,cAAqCC,EAAM,CApTjD,MAoTiD,CAAAC,EAAA,8BAC/C,YACEC,EACAC,EAAgCC,GAAqB,CAErD,IAAMC,EAAcC,GAAMH,CAAM,EAChCE,EAAY,UAAY,GACxB,MAAMH,EAAiBG,CAAW,CACpC,GCnSI,SAAUE,GAAYC,EAAYC,EAA+BC,EAAkB,CACrF,MAAO,GAAGF,EAAK,IAAI,IAAIC,CAAI,IAAIC,CAAU,EAC7C,CAFgBC,EAAAJ,GAAA,eAaT,IAAMK,GAAY,EACZC,GAAiB,EACjBC,GAAuB,EACvBC,GAAuB,EAG7B,IAAMC,GAAgB,EAChBC,GAAgB,EAChBC,GAAqB,EACrBC,GAAsB,GACtBC,GAAqB,GACrBC,GAAe,GAuFNC,GAAhB,KAAkC,CAxIxC,MAwIwC,CAAAC,EAAA,2BAGpC,YAAYC,EAAgB,CACxB,KAAK,OAASA,CAClB,CAEA,WAAS,CACL,MAAO,EACX,GAGSC,GAAP,cAA8BH,EAAkB,CApJtD,MAoJsD,CAAAC,EAAA,uBAGlD,YAAYC,EAAkBE,EAAoB,CAC9C,MAAMF,CAAM,EACZ,KAAK,UAAYE,CACrB,GAGSC,GAAP,cAAiCL,EAAkB,CA7JzD,MA6JyD,CAAAC,EAAA,0BACrD,YAAYC,EAAgB,CACxB,MAAMA,CAAM,CAChB,CAEA,WAAS,CACL,MAAO,EACX,GAGSI,GAAP,cAA8BN,EAAkB,CAvKtD,MAuKsD,CAAAC,EAAA,uBAIlD,YAAYM,EAA2BC,EAAYC,EAAqB,CACpE,MAAMF,CAAS,EACf,KAAK,KAAOC,EACZ,KAAK,YAAcC,CACvB,CAEA,WAAS,CACL,MAAO,EACX,GAQE,SAAUC,GAAUC,EAAa,CACnC,IAAMC,EAAW,CACb,YAAa,CAAA,EACb,eAAgB,CAAA,EAChB,iBAAkB,IAAI,IACtB,gBAAiB,IAAI,IACrB,OAAQ,CAAA,GAEZC,GAAgCD,EAAKD,CAAK,EAC1C,IAAMG,EAAaH,EAAM,OACzB,QAASI,EAAI,EAAGA,EAAID,EAAYC,IAAK,CACjC,IAAMP,EAAOG,EAAMI,CAAC,EACdC,EAAYC,GAAML,EAAKJ,EAAMA,CAAI,EACnCQ,IAAc,QAGlBE,GAAgBN,EAAKJ,EAAMQ,CAAS,EAExC,OAAOJ,CACX,CAnBgBX,EAAAS,GAAA,aAqBhB,SAASG,GAAgCD,EAAUD,EAAa,CAC5D,IAAMG,EAAaH,EAAM,OACzB,QAASI,EAAI,EAAGA,EAAID,EAAYC,IAAK,CACjC,IAAMP,EAAOG,EAAMI,CAAC,EACdI,EAAQC,GAAyBR,EAAKJ,EAAM,OAAW,CACzD,KAAMa,GACT,EACKC,EAAOF,GAAwBR,EAAKJ,EAAM,OAAW,CACvD,KAAMd,GACT,EACDyB,EAAM,KAAOG,EACbV,EAAI,iBAAiB,IAAIJ,EAAMW,CAAK,EACpCP,EAAI,gBAAgB,IAAIJ,EAAMc,CAAI,EAE1C,CAdSrB,EAAAY,GAAA,mCAgBT,SAASU,GACLX,EACAJ,EACAgB,EAAuB,CAEvB,OAAIA,aAAsBC,EACfC,GAASd,EAAKJ,EAAMgB,EAAW,aAAcA,CAAU,EACvDA,aAAsBG,EACtBC,GAAQhB,EAAKJ,EAAMgB,CAAU,EAC7BA,aAAsBK,EACtBC,GAAYlB,EAAKJ,EAAMgB,CAAU,EACjCA,aAAsBO,EACtBC,GAAOpB,EAAKJ,EAAMgB,CAAU,EAC5BA,aAAsBS,EACtBC,GAAWtB,EAAKJ,EAAMgB,CAAU,EAChCA,aAAsBW,EACtBC,GAAcxB,EAAKJ,EAAMgB,CAAU,EACnCA,aAAsBa,EACtBC,GAAoB1B,EAAKJ,EAAMgB,CAAU,EACzCA,aAAsBe,EACtBC,GAAuB5B,EAAKJ,EAAMgB,CAAU,EAE5CP,GAAML,EAAKJ,EAAMgB,CAAyB,CAEzD,CAxBSvB,EAAAsB,GAAA,QA0BT,SAASW,GAAWtB,EAAUJ,EAAY0B,EAAsB,CAC5D,IAAMO,EAAYrB,GAA8BR,EAAKJ,EAAM0B,EAAY,CACnE,KAAMQ,GACT,EACDC,GAAoB/B,EAAK6B,CAAS,EAClC,IAAMG,EAASC,GACXjC,EACAJ,EACAiC,EACAP,EACAjB,GAAML,EAAKJ,EAAM0B,CAAU,CAAC,EAEhC,OAAOY,GAAKlC,EAAKJ,EAAM0B,EAAYU,CAAM,CAC7C,CAbS3C,EAAAiC,GAAA,cAeT,SAASE,GACLxB,EACAJ,EACA0B,EAAmC,CAEnC,IAAMO,EAAYrB,GAA8BR,EAAKJ,EAAM0B,EAAY,CACnE,KAAMQ,GACT,EACDC,GAAoB/B,EAAK6B,CAAS,EAClC,IAAMG,EAASC,GACXjC,EACAJ,EACAiC,EACAP,EACAjB,GAAML,EAAKJ,EAAM0B,CAAU,CAAC,EAE1Ba,EAAMrB,GAASd,EAAKJ,EAAM0B,EAAW,UAAWA,CAAU,EAChE,OAAOY,GAAKlC,EAAKJ,EAAM0B,EAAYU,EAAQG,CAAG,CAClD,CAlBS9C,EAAAmC,GAAA,iBAoBT,SAASE,GACL1B,EACAJ,EACA0B,EAA+B,CAE/B,IAAMc,EAAY5B,GAA8BR,EAAKJ,EAAM0B,EAAY,CACnE,KAAMe,GACT,EACDN,GAAoB/B,EAAKoC,CAAS,EAClC,IAAMJ,EAASC,GACXjC,EACAJ,EACAwC,EACAd,EACAjB,GAAML,EAAKJ,EAAM0B,CAAU,CAAC,EAEhC,OAAOgB,GAAKtC,EAAKJ,EAAM0B,EAAYU,CAAM,CAC7C,CAjBS3C,EAAAqC,GAAA,uBAmBT,SAASE,GACL5B,EACAJ,EACA0B,EAA4C,CAE5C,IAAMc,EAAY5B,GAA8BR,EAAKJ,EAAM0B,EAAY,CACnE,KAAMe,GACT,EACDN,GAAoB/B,EAAKoC,CAAS,EAClC,IAAMJ,EAASC,GACXjC,EACAJ,EACAwC,EACAd,EACAjB,GAAML,EAAKJ,EAAM0B,CAAU,CAAC,EAE1Ba,EAAMrB,GAASd,EAAKJ,EAAM0B,EAAW,UAAWA,CAAU,EAChE,OAAOgB,GAAKtC,EAAKJ,EAAM0B,EAAYU,EAAQG,CAAG,CAClD,CAlBS9C,EAAAuC,GAAA,0BAoBT,SAASV,GACLlB,EACAJ,EACAsB,EAAwB,CAExB,IAAMX,EAAQC,GAA+BR,EAAKJ,EAAMsB,EAAa,CACjE,KAAMqB,GACT,EACDR,GAAoB/B,EAAKO,CAAK,EAC9B,IAAMiC,EAAOC,EAAIvB,EAAY,WAAawB,GAAM/B,GAAKX,EAAKJ,EAAM8C,CAAC,CAAC,EAElE,OADeT,GAASjC,EAAKJ,EAAMW,EAAOW,EAAa,GAAGsB,CAAI,CAElE,CAZSnD,EAAA6B,GAAA,eAcT,SAASE,GAAOpB,EAAUJ,EAAYwB,EAAc,CAChD,IAAMb,EAAQC,GAA+BR,EAAKJ,EAAMwB,EAAQ,CAC5D,KAAMmB,GACT,EACDR,GAAoB/B,EAAKO,CAAK,EAC9B,IAAMyB,EAASC,GAASjC,EAAKJ,EAAMW,EAAOa,EAAQf,GAAML,EAAKJ,EAAMwB,CAAM,CAAC,EAC1E,OAAOuB,GAAS3C,EAAKJ,EAAMwB,EAAQY,CAAM,CAC7C,CAPS3C,EAAA+B,GAAA,UAST,SAASf,GACLL,EACAJ,EACAS,EAAoC,CAEpC,IAAMuC,EAAUC,GACZJ,EAAIpC,EAAM,WAAaqC,GAAM/B,GAAKX,EAAKJ,EAAM8C,CAAC,CAAC,EAC9CA,GAAMA,IAAM,MAAS,EAE1B,OAAIE,EAAQ,SAAW,EACZA,EAAQ,CAAC,EACTA,EAAQ,SAAW,EAC1B,OAEOE,GAAU9C,EAAK4C,CAAO,CAErC,CAhBSvD,EAAAgB,GAAA,SAkBT,SAASiC,GACLtC,EACAJ,EACA0C,EACAN,EACAG,EAAe,CAEf,IAAMY,EAAWf,EAAO,KAClBgB,EAAShB,EAAO,MAEhBiB,EAAOzC,GAA4BR,EAAKJ,EAAM0C,EAAM,CACtD,KAAMpD,GACT,EACD6C,GAAoB/B,EAAKiD,CAAI,EAC7B,IAAMC,EAAM1C,GAAuBR,EAAKJ,EAAM0C,EAAM,CAChD,KAAMnD,GACT,EACD,OAAA4D,EAAS,SAAWE,EACpBC,EAAI,SAAWD,EACfjD,EAAI,YAAYmD,GAAYvD,EAAMuC,EAAM,mCAAqC,sBAAuBG,EAAK,GAAG,CAAC,EAAIW,EACjHG,GAAQJ,EAAQC,CAAI,EAIhBd,IAAQ,QACRiB,GAAQH,EAAMF,CAAQ,EACtBK,GAAQH,EAAMC,CAAG,IAEjBE,GAAQH,EAAMC,CAAG,EAEjBE,GAAQH,EAAMd,EAAI,IAAI,EACtBiB,GAAQjB,EAAI,MAAOY,CAAQ,GAGxB,CACH,KAAMA,EACN,MAAOG,EAEf,CAtCS7D,EAAAiD,GAAA,QAwCT,SAASJ,GACLlC,EACAJ,EACAsC,EACAF,EACAG,EAAe,CAEf,IAAM5B,EAAQyB,EAAO,KACfkB,EAAMlB,EAAO,MAEbqB,EAAQ7C,GAA6BR,EAAKJ,EAAMsC,EAAM,CACxD,KAAMjD,GACT,EACD8C,GAAoB/B,EAAKqD,CAAK,EAC9B,IAAMC,EAAU9C,GAAuBR,EAAKJ,EAAMsC,EAAM,CACpD,KAAM/C,GACT,EACK8D,EAAOzC,GAA4BR,EAAKJ,EAAMsC,EAAM,CACtD,KAAMlD,GACT,EACD,OAAAqE,EAAM,SAAWJ,EACjBK,EAAQ,SAAWL,EAEnBG,GAAQC,EAAO9C,CAAK,EACpB6C,GAAQC,EAAOC,CAAO,EACtBF,GAAQF,EAAKD,CAAI,EAEbd,IAAQ,QACRiB,GAAQH,EAAMK,CAAO,EAErBF,GAAQH,EAAMd,EAAI,IAAI,EACtBiB,GAAQjB,EAAI,MAAO5B,CAAK,GAExB6C,GAAQH,EAAMI,CAAK,EAGvBrD,EAAI,YAAYmD,GAAYvD,EAAMuC,EAAM,0BAA4B,aAAcD,EAAK,GAAG,CAAC,EAAImB,EACxF,CACH,KAAMA,EACN,MAAOC,EAEf,CAzCSjE,EAAA6C,GAAA,QA2CT,SAASS,GAAS3C,EAAUJ,EAAY+C,EAAkBX,EAAiB,CACvE,IAAMzB,EAAQyB,EAAO,KACfkB,EAAMlB,EAAO,MAEnB,OAAAoB,GAAQ7C,EAAO2C,CAAG,EAElBlD,EAAI,YAAYmD,GAAYvD,EAAM,SAAU+C,EAAS,GAAG,CAAC,EAAIpC,EACtDyB,CACX,CARS3C,EAAAsD,GAAA,YAUT,SAASZ,GAAoB/B,EAAUuD,EAAoB,CACvD,OAAAvD,EAAI,eAAe,KAAKuD,CAAK,EAC7BA,EAAM,SAAWvD,EAAI,eAAe,OAAS,EACtCuD,EAAM,QACjB,CAJSlE,EAAA0C,GAAA,uBAMT,SAASE,GACLjC,EACAJ,EACAW,EACAK,KACG4B,EAA+B,CAElC,IAAMU,EAAM1C,GAAwBR,EAAKJ,EAAMgB,EAAY,CACvD,KAAM7B,GACN,MAAAwB,EACH,EACDA,EAAM,IAAM2C,EACZ,QAAWM,KAAOhB,EACVgB,IAAQ,QAERJ,GAAQ7C,EAAOiD,EAAI,IAAI,EACvBJ,GAAQI,EAAI,MAAON,CAAG,GAEtBE,GAAQ7C,EAAO2C,CAAG,EAI1B,IAAMlB,EAAoB,CACtB,KAAMzB,EACN,MAAO2C,GAEX,OAAAlD,EAAI,YAAYmD,GAAYvD,EAAM6D,GAAY7C,CAAU,EAAGA,EAAW,GAAG,CAAC,EAAIL,EACvEyB,CACX,CA5BS3C,EAAA4C,GAAA,YA8BT,SAASwB,GAAY7C,EAAuB,CACxC,GAAIA,aAAsBK,EACtB,MAAO,cACJ,GAAIL,aAAsBO,EAC7B,MAAO,SACJ,GAAIP,aAAsBS,EAC7B,MAAO,aACJ,GAAIT,aAAsBW,EAC7B,MAAO,0BACJ,GAAIX,aAAsBa,EAC7B,MAAO,sBACJ,GAAIb,aAAsBe,EAC7B,MAAO,mCAEP,MAAM,IAAI,MAAM,qCAAqC,CAE7D,CAhBStC,EAAAoE,GAAA,eAkBT,SAASX,GAAU9C,EAAUwC,EAAiB,CAC1C,IAAMkB,EAAalB,EAAK,OACxB,QAASrC,EAAI,EAAGA,EAAIuD,EAAa,EAAGvD,IAAK,CACrC,IAAM6B,EAASQ,EAAKrC,CAAC,EACjBwD,EACA3B,EAAO,KAAK,YAAY,SAAW,IACnC2B,EAAa3B,EAAO,KAAK,YAAY,CAAC,GAE1C,IAAM4B,EAAmBD,aAAsBjE,GACzCmE,EAAiBF,EACjBG,EAAOtB,EAAKrC,EAAI,CAAC,EAAE,KAErB6B,EAAO,KAAK,OAASO,IACrBP,EAAO,MAAM,OAASO,IACtBoB,IAAe,SACbC,GAAoBC,EAAe,cAAgB7B,EAAO,OACxD2B,EAAW,SAAW3B,EAAO,QAG7B4B,EACAC,EAAe,YAAcC,EAE7BH,EAAW,OAASG,EAExBC,GAAY/D,EAAKgC,EAAO,KAAK,GAG7BoB,GAAQpB,EAAO,MAAO8B,CAAI,EAIlC,IAAME,EAAQxB,EAAK,CAAC,EACdyB,EAAOzB,EAAKkB,EAAa,CAAC,EAChC,MAAO,CACH,KAAMM,EAAM,KACZ,MAAOC,EAAK,MAEpB,CArCS5E,EAAAyD,GAAA,aAuCT,SAAShC,GACLd,EACAJ,EACAJ,EACAoB,EAAqC,CAErC,IAAMsD,EAAO1D,GAAqBR,EAAKJ,EAAMgB,EAAY,CACrD,KAAM2B,GACT,EACK4B,EAAQ3D,GAAqBR,EAAKJ,EAAMgB,EAAY,CACtD,KAAM2B,GACT,EACD,OAAA6B,GAAcF,EAAM,IAAI3E,GAAe4E,EAAO3E,CAAS,CAAC,EACjD,CACH,KAAA0E,EACA,MAAAC,EAER,CAjBS9E,EAAAyB,GAAA,YAmBT,SAASE,GACLhB,EACAqE,EACAC,EAAwB,CAExB,IAAM1E,EAAO0E,EAAY,eACnB/D,EAAQP,EAAI,iBAAiB,IAAIJ,CAAI,EACrCsE,EAAO1D,GAA+BR,EAAKqE,EAAaC,EAAa,CACvE,KAAM/B,GACT,EACK4B,EAAQ3D,GAA+BR,EAAKqE,EAAaC,EAAa,CACxE,KAAM/B,GACT,EAEKgC,EAAO,IAAI7E,GAAea,EAAOX,EAAMuE,CAAK,EAClD,OAAAC,GAAcF,EAAMK,CAAI,EAEjB,CACH,KAAAL,EACA,MAAAC,EAER,CArBS9E,EAAA2B,GAAA,WAuBT,SAASV,GAAgBN,EAAUJ,EAAYS,EAAgB,CAC3D,IAAME,EAAQP,EAAI,iBAAiB,IAAIJ,CAAI,EAC3CwD,GAAQ7C,EAAOF,EAAM,IAAI,EACzB,IAAMK,EAAOV,EAAI,gBAAgB,IAAIJ,CAAI,EACzC,OAAAwD,GAAQ/C,EAAM,MAAOK,CAAI,EACC,CACtB,KAAMH,EACN,MAAOG,EAGf,CAVSrB,EAAAiB,GAAA,mBAYT,SAAS8C,GAAQoB,EAAiBC,EAAe,CAC7C,IAAMd,EAAa,IAAIlE,GAAkBgF,CAAa,EACtDL,GAAcI,EAAGb,CAAU,CAC/B,CAHStE,EAAA+D,GAAA,WAKT,SAAS5C,GACLR,EACAJ,EACAgB,EACA8D,EAAmB,CAEnB,IAAMC,EAAO,OAAA,OAAA,CACT,IAAA3E,EACA,WAAAY,EACA,uBAAwB,GACxB,KAAAhB,EACA,YAAa,CAAA,EACb,oBAAqB,CAAA,EACrB,YAAaI,EAAI,OAAO,MAAM,EAC3B0E,CAAO,EAEd,OAAA1E,EAAI,OAAO,KAAK2E,CAAC,EACVA,CACX,CAlBStF,EAAAmB,GAAA,YAoBT,SAAS4D,GAAcb,EAAqBI,EAAsB,CAG1DJ,EAAM,YAAY,SAAW,IAC7BA,EAAM,uBAAyBI,EAAW,UAAS,GAEvDJ,EAAM,YAAY,KAAKI,CAAU,CACrC,CAPStE,EAAA+E,GAAA,iBAST,SAASL,GAAY/D,EAAUuD,EAAe,CAC1CvD,EAAI,OAAO,OAAOA,EAAI,OAAO,QAAQuD,CAAK,EAAG,CAAC,CAClD,CAFSlE,EAAA0E,GAAA,eCxmBF,IAAMa,GAAY,CAAA,EAQZC,GAAP,KAAmB,CA/BzB,MA+ByB,CAAAC,EAAA,qBAAzB,aAAA,CACU,KAAA,IAA8B,CAAA,EAC9B,KAAA,QAAuB,CAAA,CAsCjC,CAlCE,IAAI,MAAI,CACN,OAAO,KAAK,QAAQ,MACtB,CAEA,UAAQ,CAEN,KAAK,IAAM,CAAA,CACb,CAEA,IAAIC,EAAiB,CACnB,IAAMC,EAAMC,GAAgBF,CAAM,EAG5BC,KAAO,KAAK,MAChB,KAAK,IAAIA,CAAG,EAAI,KAAK,QAAQ,OAC7B,KAAK,QAAQ,KAAKD,CAAM,EAE5B,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,OACd,CAEA,IAAI,MAAI,CACN,OAAOG,EAAI,KAAK,QAAU,GAAM,EAAE,GAAG,CACvC,CAEA,IAAI,KAAG,CACL,IAAIC,EAAQ,GACZ,QAAWC,KAAK,KAAK,IACnBD,GAASC,EAAI,IAEf,OAAOD,CACT,GAGI,SAAUF,GAAgBF,EAAmBM,EAAM,GAAI,CAC3D,MAAO,GAAGA,EAAM,IAAIN,EAAO,GAAG,GAAK,EAAE,IACnCA,EAAO,MAAM,WACf,IAAIA,EAAO,MAAM,IAAKO,GAAMA,EAAE,YAAY,SAAQ,CAAE,EAAE,KAAK,GAAG,CAAC,EACjE,CAJgBR,EAAAG,GAAA,mBCZhB,SAASM,GAAeC,EAA2BC,EAAgB,CAC/D,IAAMC,EAAuC,CAAA,EAC7C,OAAQC,GAAgB,CACpB,IAAMC,EAAMD,EAAa,SAAQ,EAC7BE,EAAWH,EAAIE,CAAG,EACtB,OAAIC,IAAa,SAGbA,EAAW,CACP,cAAeL,EACf,SAAAC,EACA,OAAQ,CAAA,GAEZC,EAAIE,CAAG,EAAIC,GACJA,CAEf,CACJ,CAjBSC,EAAAP,GAAA,kBAmBT,IAAMQ,GAAN,KAAkB,CAhFlB,MAgFkB,CAAAD,EAAA,qBAAlB,aAAA,CACY,KAAA,WAAwB,CAAA,CAkBpC,CAhBI,GAAGE,EAAa,CACZ,OAAOA,GAAS,KAAK,WAAW,QAAU,KAAK,WAAWA,CAAK,CACnE,CAEA,IAAIA,EAAeC,EAAc,CAC7B,KAAK,WAAWD,CAAK,EAAIC,CAC7B,CAEA,UAAQ,CACJ,IAAIA,EAAQ,GACNC,EAAO,KAAK,WAAW,OAC7B,QAASC,EAAI,EAAGA,EAAID,EAAMC,IACtBF,GAAS,KAAK,WAAWE,CAAC,IAAM,GAAO,IAAM,IAEjD,OAAOF,CACX,GASEG,GAAmB,IAAIL,GAMhBM,GAAP,cAAuCC,EAAoB,CAjHjE,MAiHiE,CAAAR,EAAA,gCAM7D,YAAYS,EAAgC,OACxC,MAAK,EACL,KAAK,SAAUC,EAAAD,GAAS,WAAO,MAAAC,IAAA,OAAAA,EAAMC,GAAY,QAAQ,IAAIA,CAAO,CACxE,CAES,WAAWF,EAA0B,CAC1C,KAAK,IAAMG,GAAUH,EAAQ,KAAK,EAClC,KAAK,KAAOI,GAAiB,KAAK,GAAG,CACzC,CAES,0CAAwC,CAC7C,MAAO,CAAA,CACX,CAES,6BAA2B,CAChC,MAAO,CAAA,CACX,CAES,6BAA6BJ,EAMrC,CACG,GAAM,CAAE,eAAAK,EAAgB,KAAAC,EAAM,cAAAC,EAAe,qBAAAC,CAAoB,EAAKR,EAChES,EAAO,KAAK,KACZC,EAAU,KAAK,QACfrB,EAAMsB,GAAYL,EAAM,cAAeD,CAAc,EAErDO,EADgB,KAAK,IAAI,YAAYvB,CAAG,EACV,SAC9BwB,EAA2CC,EAC7CC,GAAkB,CACd,aAAc,EACd,WAAYV,EACZ,SAAU,cACV,KAAMC,EACT,EACAU,GAAYF,EAAIE,EAAUC,GAASA,EAAK,CAAC,CAAC,CAAC,EAGhD,GAAIC,GAAcL,EAAa,EAAK,GAAK,CAACL,EAAsB,CAC5D,IAAMW,EAAcC,GAChBP,EACA,CAACQ,EAAQL,EAASM,KACdC,EAAQP,EAAUQ,GAAe,CACzBA,IACAH,EAAOG,EAAY,YAAa,EAAIF,EACpCC,EAAQC,EAAY,gBAAmBC,GAAqB,CACxDJ,EAAOI,CAAiB,EAAIH,CAChC,CAAC,EAET,CAAC,EACMD,GAEX,CAAA,CAA4B,EAGhC,OAAId,EACO,SAA4BmB,EAAM,OACrC,IAAMC,EAAY,KAAK,GAAG,CAAC,EACrBC,EAAiCT,EAAYQ,EAAU,YAAY,EACzE,GAAID,IAAW,QAAaE,IAAe,OAAW,CAClD,IAAMC,GAAO5B,EAAAyB,EAAOE,CAAU,KAAC,MAAA3B,IAAA,OAAA,OAAAA,EAAE,KACjC,GAAI4B,IAAS,QAAaA,EAAK,KAAK,IAAI,IAAM,GAC1C,OAGR,OAAOD,CACX,EAEO,UAAA,CACH,IAAMD,EAAY,KAAK,GAAG,CAAC,EAC3B,OAAOR,EAAYQ,EAAU,YAAY,CAC7C,MAED,QAAIpB,EACA,SAA4BmB,EAAM,CACrC,IAAMI,EAAa,IAAItC,GACjBuC,EAASL,IAAW,OAAY,EAAIA,EAAO,OACjD,QAAS9B,EAAI,EAAGA,EAAImC,EAAQnC,IAAK,CAC7B,IAAMiC,EAAOH,IAAS9B,CAAC,EAAE,KACzBkC,EAAW,IAAIlC,EAAGiC,IAAS,QAAaA,EAAK,KAAK,IAAI,CAAC,EAE3D,IAAMR,EAASW,GAAgB,KAAK,KAAMvB,EAAMG,EAAekB,EAAYpB,CAAO,EAClF,OAAO,OAAOW,GAAW,SAAWA,EAAS,MACjD,EAEO,UAAA,CACH,IAAMA,EAASW,GAAgB,KAAK,KAAMvB,EAAMG,EAAef,GAAkBa,CAAO,EACxF,OAAO,OAAOW,GAAW,SAAWA,EAAS,MACjD,CAER,CAES,0BAA0BrB,EAMlC,CACG,GAAM,CAAE,eAAAK,EAAgB,KAAAC,EAAM,SAAA2B,EAAU,qBAAAzB,CAAoB,EAAKR,EAC3DS,EAAO,KAAK,KACZC,EAAU,KAAK,QACfrB,EAAMsB,GAAYL,EAAM2B,EAAU5B,CAAc,EAEhDO,EADgB,KAAK,IAAI,YAAYvB,CAAG,EACV,SAC9B6C,EAAOpB,EACTC,GAAkB,CACd,aAAc,EACd,WAAYV,EACZ,SAAA4B,EACA,KAAA3B,EACH,EACA6B,GACQrB,EAAIqB,EAAIC,GAAMA,EAAE,CAAC,CAAC,CAC1B,EAGH,GAAIlB,GAAcgB,CAAI,GAAKA,EAAK,CAAC,EAAE,CAAC,GAAK,CAAC1B,EAAsB,CAC9D,IAAM6B,EAAMH,EAAK,CAAC,EACZI,EAAoBC,GAAQF,CAAG,EAErC,GACEC,EAAkB,SAAW,GAC7BE,EAAQF,EAAkB,CAAC,EAAE,eAAe,EAC5C,CAEA,IAAMG,EADoBH,EAAkB,CAAC,EACI,aAEjD,OAAO,UAAA,CACL,OAAO,KAAK,GAAG,CAAC,EAAE,eAAiBG,CACrC,MACK,CACL,IAAMtB,EAAcC,GAClBkB,EACA,CAACjB,EAAQG,KACHA,IAAgB,SAClBH,EAAOG,EAAY,YAAa,EAAI,GACpCD,EAAQC,EAAY,gBAAkBC,GAAqB,CACzDJ,EAAOI,CAAiB,EAAI,EAC9B,CAAC,GAEIJ,GAET,CAAA,CAA6B,EAG/B,OAAO,UAAA,CACL,IAAMM,EAAY,KAAK,GAAG,CAAC,EAC3B,OAAOR,EAAYQ,EAAU,YAAY,IAAM,EACjD,GAGJ,OAAO,UAAA,CACL,IAAMN,EAASW,GAAgB,KAAK,KAAMvB,EAAMG,EAAef,GAAkBa,CAAO,EACtF,OAAO,OAAOW,GAAW,SAAW,GAAQA,IAAW,CAC3D,CACN,GAIJ,SAASH,GAAcwB,EAAwCC,EAAa,GAAI,CAC5E,IAAMC,EAAU,IAAI,IAEpB,QAAWP,KAAOK,EAAW,CACzB,IAAMG,EAAS,IAAI,IACnB,QAAWC,KAAWT,EAAK,CACvB,GAAIS,IAAY,OAAW,CACvB,GAAIH,EAEA,MAEA,MAAO,GAGf,IAAMI,EAAU,CAACD,EAAQ,YAAa,EAAE,OAAOA,EAAQ,eAAgB,EACvE,QAAWrD,KAASsD,EAChB,GAAIH,EAAQ,IAAInD,CAAK,GACjB,GAAI,CAACoD,EAAO,IAAIpD,CAAK,EACjB,MAAO,QAGXmD,EAAQ,IAAInD,CAAK,EACjBoD,EAAO,IAAIpD,CAAK,GAKhC,MAAO,EACX,CA5BSF,EAAA2B,GAAA,iBA8BT,SAASd,GAAiB4C,EAAQ,CAC9B,IAAMC,EAAiBD,EAAI,eAAe,OACpCE,EAA4B,MAAMD,CAAc,EACtD,QAASrD,EAAI,EAAGA,EAAIqD,EAAgBrD,IAChCsD,EAActD,CAAC,EAAIZ,GAAegE,EAAI,eAAepD,CAAC,EAAGA,CAAC,EAE9D,OAAOsD,CACX,CAPS3D,EAAAa,GAAA,oBAST,SAAS4B,GAELmB,EACAjE,EACAE,EACAsB,EAAwB,CAExB,IAAM0C,EAAMD,EAAUjE,CAAQ,EAAEE,CAAY,EACxCiE,EAAQD,EAAI,MAChB,GAAIC,IAAU,OAAW,CACrB,IAAMC,EAAUC,GAAkBH,EAAI,aAAyB,EAC/DC,EAAQG,GAAYJ,EAAKK,GAAYH,CAAO,CAAC,EAC7CF,EAAI,MAAQC,EAIhB,OADYK,GAAiB,MAAM,KAAM,CAACN,EAAKC,EAAOjE,EAAcsB,CAAO,CAAC,CAEhF,CAjBSnB,EAAAyC,GAAA,mBAmBT,SAAS0B,GAELN,EACAO,EACAvE,EACAsB,EAAwB,CAExB,IAAIkD,EAAYD,EAEZ/D,EAAI,EACFqB,EAAiB,CAAA,EACnB4C,EAAI,KAAK,GAAGjE,GAAG,EAEnB,OAAa,CACT,IAAIkE,EAAIC,GAAuBH,EAAWC,CAAC,EAK3C,GAJIC,IAAM,SACNA,EAAIE,GAAuB,MAAM,KAAM,CAACZ,EAAKQ,EAAWC,EAAGjE,EAAGR,EAAcsB,CAAO,CAAC,GAGpFoD,IAAMG,GACN,OAAOC,GAA0BjD,EAAM2C,EAAWC,CAAC,EAGvD,GAAIC,EAAE,gBAAkB,GACpB,OAAOA,EAAE,WAGbF,EAAYE,EACZ7C,EAAK,KAAK4C,CAAC,EACXA,EAAI,KAAK,GAAGjE,GAAG,EAEvB,CA/BSL,EAAAmE,GAAA,oBAiCT,SAASM,GAELZ,EACAQ,EACAO,EACAC,EACAhF,EACAsB,EAAwB,CAExB,IAAM2D,EAAQC,GAAgBV,EAAU,QAASO,EAAO/E,CAAY,EACpE,GAAIiF,EAAM,OAAS,EACf,OAAAE,GAAWnB,EAAKQ,EAAWO,EAAOF,EAAS,EACpCA,GAGX,IAAIO,EAAWf,GAAYY,CAAK,EAC1BI,EAAeC,GAAaL,EAAOjF,CAAY,EAErD,GAAIqF,IAAiB,OACjBD,EAAS,cAAgB,GACzBA,EAAS,WAAaC,EACtBD,EAAS,QAAQ,UAAYC,UACtBE,GAAiCN,CAAK,EAAG,CAChD,IAAMzC,EAAagD,GAAIP,EAAM,IAAI,EACjCG,EAAS,cAAgB,GACzBA,EAAS,WAAa5C,EACtB4C,EAAS,QAAQ,UAAY5C,EAC7BiD,GAAyB,MAAM,KAAM,CAACzB,EAAKgB,EAAWC,EAAM,KAAM3D,CAAO,CAAC,EAG9E,OAAA8D,EAAWD,GAAWnB,EAAKQ,EAAWO,EAAOK,CAAQ,EAC9CA,CACX,CAhCSjF,EAAAyE,GAAA,0BAkCT,SAASa,GAELzB,EACAgB,EACAU,EACApE,EAAwB,CAExB,IAAMqE,EAA0B,CAAA,EAChC,QAASnF,EAAI,EAAGA,GAAKwE,EAAWxE,IAC5BmF,EAAW,KAAK,KAAK,GAAGnF,CAAC,EAAE,SAAS,EAExC,IAAMoF,EAAW5B,EAAI,cACf6B,EAAeD,EAAS,KACxBE,EAAaF,EAAS,WACtB9E,EAAUiF,GAAoB,CAChC,aAAAF,EACA,iBAAAH,EACA,WAAAI,EACA,WAAAH,EACH,EACDrE,EAAQR,CAAO,CACnB,CArBSX,EAAAsF,GAAA,4BAuBT,SAASM,GAAoBnF,EAK5B,CACG,IAAMoF,EAAUtE,EAAId,EAAQ,WAAaqF,GACrCC,GAAWD,CAAO,CAAC,EACrB,KAAK,IAAI,EACLE,EACFvF,EAAQ,WAAW,MAAQ,EAAI,GAAKA,EAAQ,WAAW,IACvDwF,EACA,qCAAqCxF,EAAQ,iBAAiB,KAC1D,IAAI,CACP,SAASyF,GAAqBzF,EAAQ,UAAU,CAAC,GAAGuF,CAAU,aACnDvF,EAAQ,aAAa,IAAI;GACjCoF,CAAO;EAEf,OAAAI,EACIA,EACA;sBAEGA,CACX,CAvBSjG,EAAA4F,GAAA,uBAyBT,SAASM,GAAqBC,EAA+B,CACzD,GAAIA,aAAgBC,EAChB,MAAO,UACJ,GAAID,aAAgBE,EACvB,MAAO,SACJ,GAAIF,aAAgBG,EACvB,MAAO,KACJ,GAAIH,aAAgBI,EACvB,MAAO,eACJ,GAAIJ,aAAgBK,EACvB,MAAO,mBACJ,GAAIL,aAAgBM,EACvB,MAAO,WACJ,GAAIN,aAAgBO,EACvB,MAAO,OACJ,GAAIP,aAAgBQ,EACvB,MAAO,UAEP,MAAM,MAAM,sBAAsB,CAE1C,CApBS3G,EAAAkG,GAAA,wBAsBT,SAASvB,GACLjD,EACAkF,EACAC,EAAe,CAEf,IAAMC,EAAkBC,GACpBH,EAAS,QAAQ,SAChBhE,GAAMA,EAAE,MAAM,WAAW,EAExBoE,EAAiBC,GACnBH,EACK,OAAQlE,GAA2BA,aAAasE,EAAc,EAC9D,IAAKtE,GAAMA,EAAE,SAAS,EAC1BA,GAAMA,EAAE,YAAY,EAEzB,MAAO,CACH,YAAaiE,EACb,mBAAoBG,EACpB,UAAWtF,EAEnB,CApBS1B,EAAA2E,GAAA,6BAsBT,SAASH,GACL2C,EACAvC,EAAa,CAEb,OAAOuC,EAAM,MAAMvC,EAAM,YAAY,CACzC,CALS5E,EAAAwE,GAAA,0BAOT,SAASO,GACLqC,EACAxC,EACA/E,EAA0B,CAE1B,IAAMwH,EAAe,IAAIC,GACnBC,EAAiC,CAAA,EAEvC,QAAWC,KAAKJ,EAAQ,SAAU,CAC9B,GAAIvH,EAAa,GAAG2H,EAAE,GAAG,IAAM,GAC3B,SAEJ,GAAIA,EAAE,MAAM,OAASC,GAAe,CAChCF,EAAkB,KAAKC,CAAC,EACxB,SAEJ,IAAME,EAAmBF,EAAE,MAAM,YAAY,OAC7C,QAASnH,EAAI,EAAGA,EAAIqH,EAAkBrH,IAAK,CACvC,IAAMsH,EAAaH,EAAE,MAAM,YAAYnH,CAAC,EAClCuH,EAASC,GAAmBF,EAAY/C,CAAK,EAC/CgD,IAAW,QACXP,EAAa,IAAI,CACb,MAAOO,EACP,IAAKJ,EAAE,IACP,MAAOA,EAAE,MACZ,GAKb,IAAI1C,EAMJ,GAJIyC,EAAkB,SAAW,GAAKF,EAAa,OAAS,IACxDvC,EAAQuC,GAGRvC,IAAU,OAAW,CACrBA,EAAQ,IAAIwC,GACZ,QAAWE,KAAKH,EAAa,SACzBtD,GAAQyD,EAAG1C,CAAK,EAIxB,GAAIyC,EAAkB,OAAS,GAAK,CAACO,GAAyBhD,CAAK,EAC/D,QAAW0C,KAAKD,EACZzC,EAAM,IAAI0C,CAAC,EAInB,OAAO1C,CACX,CAlDS9E,EAAA+E,GAAA,mBAoDT,SAAS8C,GACLF,EACA/C,EAAa,CAEb,GACI+C,aAAsBT,IACtBa,GAAanD,EAAO+C,EAAW,SAAS,EAExC,OAAOA,EAAW,MAG1B,CAXS3H,EAAA6H,GAAA,sBAaT,SAAS1C,GACLiC,EACAvH,EAA0B,CAE1B,IAAIiD,EACJ,QAAW0E,KAAKJ,EAAQ,SACpB,GAAIvH,EAAa,GAAG2H,EAAE,GAAG,IAAM,IAC3B,GAAI1E,IAAQ,OACRA,EAAM0E,EAAE,YACD1E,IAAQ0E,EAAE,IACjB,OAIZ,OAAO1E,CACX,CAfS9C,EAAAmF,GAAA,gBAiBT,SAASjB,GAAYH,EAAqB,CACtC,MAAO,CACH,QAASA,EACT,MAAO,CAAA,EACP,cAAe,GACf,WAAY,GAEpB,CAPS/D,EAAAkE,GAAA,eAST,SAASc,GACLnB,EACAmE,EACApD,EACAqD,EAAY,CAEZ,OAAAA,EAAKhE,GAAYJ,EAAKoE,CAAE,EACxBD,EAAK,MAAMpD,EAAM,YAAY,EAAIqD,EAC1BA,CACX,CATSjI,EAAAgF,GAAA,cAWT,SAASf,GAAYJ,EAAUsD,EAAe,CAC1C,GAAIA,IAAUzC,GACV,OAAOyC,EAIX,IAAMe,EAASf,EAAM,QAAQ,IACvBpH,EAAW8D,EAAI,OAAOqE,CAAM,EAClC,OAAInI,IAAa,OACNA,GAEXoH,EAAM,QAAQ,SAAQ,EACtBtD,EAAI,OAAOqE,CAAM,EAAIf,EACdA,EACX,CAdSnH,EAAAiE,GAAA,eAgBT,SAASD,GAAkByB,EAAkB,CACzC,IAAM2B,EAAU,IAAIE,GAEda,EAAsB1C,EAAS,YAAY,OACjD,QAASpF,EAAI,EAAGA,EAAI8H,EAAqB9H,IAAK,CAE1C,IAAM+H,EAAoB,CACtB,MAFW3C,EAAS,YAAYpF,CAAC,EAAE,OAGnC,IAAKA,EACL,MAAO,CAAA,GAEX0D,GAAQqE,EAAQhB,CAAO,EAG3B,OAAOA,CACX,CAfSpH,EAAAgE,GAAA,qBAiBT,SAASD,GAAQqE,EAAmBhB,EAAqB,CACrD,IAAMiB,EAAID,EAAO,MAEjB,GAAIC,EAAE,OAASZ,GAAe,CAC1B,GAAIW,EAAO,MAAM,OAAS,EAAG,CACzB,IAAME,EAAW,CAAC,GAAGF,EAAO,KAAK,EAE3BG,EAA0B,CAC5B,MAFgBD,EAAS,IAAG,EAG5B,IAAKF,EAAO,IACZ,MAAOE,GAEXvE,GAAQwE,EAAcnB,CAAO,OAI7BA,EAAQ,IAAIgB,CAAM,EAEtB,OAGCC,EAAE,wBACHjB,EAAQ,IAAIgB,CAAM,EAGtB,IAAMV,EAAmBW,EAAE,YAAY,OACvC,QAAS,EAAI,EAAG,EAAIX,EAAkB,IAAK,CACvC,IAAMC,EAAaU,EAAE,YAAY,CAAC,EAC5Bb,EAAIgB,GAAiBJ,EAAQT,CAAU,EAEzCH,IAAM,QACNzD,GAAQyD,EAAGJ,CAAO,EAG9B,CAlCSpH,EAAA+D,GAAA,WAoCT,SAASyE,GACLJ,EACAT,EAAsB,CAEtB,GAAIA,aAAsBc,GACtB,MAAO,CACH,MAAOd,EAAW,OAClB,IAAKS,EAAO,IACZ,MAAOA,EAAO,OAEf,GAAIT,aAAsBe,GAAgB,CAC7C,IAAMC,EAAQ,CAAC,GAAGP,EAAO,MAAOT,EAAW,WAAW,EACtD,MAAO,CACH,MAAOA,EAAW,OAClB,IAAKS,EAAO,IACZ,MAAAO,GAIZ,CAnBS3I,EAAAwI,GAAA,oBAqBT,SAASV,GAAyBV,EAAqB,CACnD,QAAWI,KAAKJ,EAAQ,SACpB,GAAII,EAAE,MAAM,OAASC,GACjB,MAAO,GAGf,MAAO,EACX,CAPSzH,EAAA8H,GAAA,4BAST,SAASc,GAA2BxB,EAAqB,CACrD,QAAWI,KAAKJ,EAAQ,SACpB,GAAII,EAAE,MAAM,OAASC,GACjB,MAAO,GAGf,MAAO,EACX,CAPSzH,EAAA4I,GAAA,8BAST,SAASxD,GAAiCgC,EAAqB,CAC3D,GAAIwB,GAA2BxB,CAAO,EAClC,MAAO,GAEX,IAAMyB,EAAUC,GAAsB1B,EAAQ,QAAQ,EAGtD,OADI2B,GAAqBF,CAAO,GAAK,CAACG,GAA6BH,CAAO,CAE9E,CARS7I,EAAAoF,GAAA,oCAUT,SAAS0D,GACL1B,EAA6B,CAE7B,IAAM6B,EAAe,IAAI,IACzB,QAAWzB,KAAKJ,EAAS,CACrB,IAAMtH,EAAMoJ,GAAgB1B,EAAG,EAAK,EAChC7E,EAAOsG,EAAa,IAAInJ,CAAG,EAC3B6C,IAAS,SACTA,EAAO,CAAA,EACPsG,EAAa,IAAInJ,EAAK6C,CAAI,GAE9BA,EAAK6E,EAAE,GAAG,EAAI,GAElB,OAAOyB,CACX,CAdSjJ,EAAA8I,GAAA,yBAgBT,SAASC,GACLF,EAA6C,CAE7C,QAAW1I,KAAS,MAAM,KAAK0I,EAAQ,OAAM,CAAE,EAC3C,GAAI,OAAO,KAAK1I,CAAK,EAAE,OAAS,EAC5B,MAAO,GAGf,MAAO,EACX,CATSH,EAAA+I,GAAA,wBAWT,SAASC,GACLH,EAA6C,CAE7C,QAAW1I,KAAS,MAAM,KAAK0I,EAAQ,OAAM,CAAE,EAC3C,GAAI,OAAO,KAAK1I,CAAK,EAAE,SAAW,EAC9B,MAAO,GAGf,MAAO,EACX,CATSH,EAAAgJ,GAAA,gCC5uBF,IAAIG,IACV,SAAUA,EAAa,CACpB,SAASC,EAAGC,EAAO,CACf,OAAO,OAAOA,GAAU,QAC5B,CAFSC,EAAAF,EAAA,MAGTD,EAAY,GAAKC,CACrB,GAAGD,KAAgBA,GAAc,CAAC,EAAE,EAC7B,IAAII,IACV,SAAUA,EAAK,CACZ,SAASH,EAAGC,EAAO,CACf,OAAO,OAAOA,GAAU,QAC5B,CAFSC,EAAAF,EAAA,MAGTG,EAAI,GAAKH,CACb,GAAGG,KAAQA,GAAM,CAAC,EAAE,EACb,IAAIC,IACV,SAAUA,EAAS,CAChBA,EAAQ,UAAY,YACpBA,EAAQ,UAAY,WACpB,SAASJ,EAAGC,EAAO,CACf,OAAO,OAAOA,GAAU,UAAYG,EAAQ,WAAaH,GAASA,GAASG,EAAQ,SACvF,CAFSF,EAAAF,EAAA,MAGTI,EAAQ,GAAKJ,CACjB,GAAGI,KAAYA,GAAU,CAAC,EAAE,EACrB,IAAIC,IACV,SAAUA,EAAU,CACjBA,EAAS,UAAY,EACrBA,EAAS,UAAY,WACrB,SAASL,EAAGC,EAAO,CACf,OAAO,OAAOA,GAAU,UAAYI,EAAS,WAAaJ,GAASA,GAASI,EAAS,SACzF,CAFSH,EAAAF,EAAA,MAGTK,EAAS,GAAKL,CAClB,GAAGK,KAAaA,GAAW,CAAC,EAAE,EAKvB,IAAIC,GACV,SAAUA,EAAU,CAMjB,SAASC,EAAOC,EAAMC,EAAW,CAC7B,OAAID,IAAS,OAAO,YAChBA,EAAOH,GAAS,WAEhBI,IAAc,OAAO,YACrBA,EAAYJ,GAAS,WAElB,CAAE,KAAAG,EAAM,UAAAC,CAAU,CAC7B,CARSP,EAAAK,EAAA,UASTD,EAAS,OAASC,EAIlB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,SAASD,EAAU,IAAI,GAAKC,EAAG,SAASD,EAAU,SAAS,CACxG,CAHSR,EAAAF,EAAA,MAITM,EAAS,GAAKN,CAClB,GAAGM,IAAaA,EAAW,CAAC,EAAE,EAKvB,IAAIM,GACV,SAAUA,EAAO,CACd,SAASL,EAAOM,EAAKC,EAAKC,EAAOC,EAAM,CACnC,GAAIL,EAAG,SAASE,CAAG,GAAKF,EAAG,SAASG,CAAG,GAAKH,EAAG,SAASI,CAAK,GAAKJ,EAAG,SAASK,CAAI,EAC9E,MAAO,CAAE,MAAOV,EAAS,OAAOO,EAAKC,CAAG,EAAG,IAAKR,EAAS,OAAOS,EAAOC,CAAI,CAAE,EAE5E,GAAIV,EAAS,GAAGO,CAAG,GAAKP,EAAS,GAAGQ,CAAG,EACxC,MAAO,CAAE,MAAOD,EAAK,IAAKC,CAAI,EAG9B,MAAM,IAAI,MAAM,8CAA8CD,CAAG,KAAKC,CAAG,KAAKC,CAAK,KAAKC,CAAI,GAAG,CAEvG,CAVSd,EAAAK,EAAA,UAWTK,EAAM,OAASL,EAIf,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,cAAcD,CAAS,GAAKJ,EAAS,GAAGI,EAAU,KAAK,GAAKJ,EAAS,GAAGI,EAAU,GAAG,CACnG,CAHSR,EAAAF,EAAA,MAITY,EAAM,GAAKZ,CACf,GAAGY,IAAUA,EAAQ,CAAC,EAAE,EAKjB,IAAIK,IACV,SAAUA,EAAU,CAMjB,SAASV,EAAOW,EAAKC,EAAO,CACxB,MAAO,CAAE,IAAAD,EAAK,MAAAC,CAAM,CACxB,CAFSjB,EAAAK,EAAA,UAGTU,EAAS,OAASV,EAIlB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,cAAcD,CAAS,GAAKE,EAAM,GAAGF,EAAU,KAAK,IAAMC,EAAG,OAAOD,EAAU,GAAG,GAAKC,EAAG,UAAUD,EAAU,GAAG,EAC9H,CAHSR,EAAAF,EAAA,MAITiB,EAAS,GAAKjB,CAClB,GAAGiB,KAAaA,GAAW,CAAC,EAAE,EAKvB,IAAIG,IACV,SAAUA,EAAc,CAQrB,SAASb,EAAOc,EAAWC,EAAaC,EAAsBC,EAAsB,CAChF,MAAO,CAAE,UAAAH,EAAW,YAAAC,EAAa,qBAAAC,EAAsB,qBAAAC,CAAqB,CAChF,CAFStB,EAAAK,EAAA,UAGTa,EAAa,OAASb,EAItB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,cAAcD,CAAS,GAAKE,EAAM,GAAGF,EAAU,WAAW,GAAKC,EAAG,OAAOD,EAAU,SAAS,GAC/FE,EAAM,GAAGF,EAAU,oBAAoB,IACtCE,EAAM,GAAGF,EAAU,oBAAoB,GAAKC,EAAG,UAAUD,EAAU,oBAAoB,EACnG,CALSR,EAAAF,EAAA,MAMToB,EAAa,GAAKpB,CACtB,GAAGoB,KAAiBA,GAAe,CAAC,EAAE,EAK/B,IAAIK,IACV,SAAUA,EAAO,CAId,SAASlB,EAAOmB,EAAKC,EAAOC,EAAMC,EAAO,CACrC,MAAO,CACH,IAAAH,EACA,MAAAC,EACA,KAAAC,EACA,MAAAC,CACJ,CACJ,CAPS3B,EAAAK,EAAA,UAQTkB,EAAM,OAASlB,EAIf,SAASP,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,YAAYD,EAAU,IAAK,EAAG,CAAC,GACjEC,EAAG,YAAYD,EAAU,MAAO,EAAG,CAAC,GACpCC,EAAG,YAAYD,EAAU,KAAM,EAAG,CAAC,GACnCC,EAAG,YAAYD,EAAU,MAAO,EAAG,CAAC,CAC/C,CANSR,EAAAF,EAAA,MAOTyB,EAAM,GAAKzB,CACf,GAAGyB,KAAUA,GAAQ,CAAC,EAAE,EAKjB,IAAIK,IACV,SAAUA,EAAkB,CAIzB,SAASvB,EAAOY,EAAOY,EAAO,CAC1B,MAAO,CACH,MAAAZ,EACA,MAAAY,CACJ,CACJ,CALS7B,EAAAK,EAAA,UAMTuB,EAAiB,OAASvB,EAI1B,SAASP,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKE,EAAM,GAAGF,EAAU,KAAK,GAAKe,GAAM,GAAGf,EAAU,KAAK,CAC/F,CAHSR,EAAAF,EAAA,MAIT8B,EAAiB,GAAK9B,CAC1B,GAAG8B,KAAqBA,GAAmB,CAAC,EAAE,EAKvC,IAAIE,IACV,SAAUA,EAAmB,CAI1B,SAASzB,EAAO0B,EAAOC,EAAUC,EAAqB,CAClD,MAAO,CACH,MAAAF,EACA,SAAAC,EACA,oBAAAC,CACJ,CACJ,CANSjC,EAAAK,EAAA,UAOTyB,EAAkB,OAASzB,EAI3B,SAASP,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,OAAOD,EAAU,KAAK,IACvDC,EAAG,UAAUD,EAAU,QAAQ,GAAK0B,GAAS,GAAG1B,CAAS,KACzDC,EAAG,UAAUD,EAAU,mBAAmB,GAAKC,EAAG,WAAWD,EAAU,oBAAqB0B,GAAS,EAAE,EACnH,CALSlC,EAAAF,EAAA,MAMTgC,EAAkB,GAAKhC,CAC3B,GAAGgC,KAAsBA,GAAoB,CAAC,EAAE,EAIzC,IAAIK,IACV,SAAUA,EAAkB,CAIzBA,EAAiB,QAAU,UAI3BA,EAAiB,QAAU,UAI3BA,EAAiB,OAAS,QAC9B,GAAGA,KAAqBA,GAAmB,CAAC,EAAE,EAKvC,IAAIC,IACV,SAAUA,EAAc,CAIrB,SAAS/B,EAAOgC,EAAWC,EAASC,EAAgBC,EAAcC,EAAMC,EAAe,CACnF,IAAMC,EAAS,CACX,UAAAN,EACA,QAAAC,CACJ,EACA,OAAI7B,EAAG,QAAQ8B,CAAc,IACzBI,EAAO,eAAiBJ,GAExB9B,EAAG,QAAQ+B,CAAY,IACvBG,EAAO,aAAeH,GAEtB/B,EAAG,QAAQgC,CAAI,IACfE,EAAO,KAAOF,GAEdhC,EAAG,QAAQiC,CAAa,IACxBC,EAAO,cAAgBD,GAEpBC,CACX,CAlBS3C,EAAAK,EAAA,UAmBT+B,EAAa,OAAS/B,EAItB,SAASP,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,SAASD,EAAU,SAAS,GAAKC,EAAG,SAASD,EAAU,SAAS,IACjGC,EAAG,UAAUD,EAAU,cAAc,GAAKC,EAAG,SAASD,EAAU,cAAc,KAC9EC,EAAG,UAAUD,EAAU,YAAY,GAAKC,EAAG,SAASD,EAAU,YAAY,KAC1EC,EAAG,UAAUD,EAAU,IAAI,GAAKC,EAAG,OAAOD,EAAU,IAAI,EACpE,CANSR,EAAAF,EAAA,MAOTsC,EAAa,GAAKtC,CACtB,GAAGsC,KAAiBA,GAAe,CAAC,EAAE,EAK/B,IAAIQ,IACV,SAAUA,EAA8B,CAIrC,SAASvC,EAAOwC,EAAUC,EAAS,CAC/B,MAAO,CACH,SAAAD,EACA,QAAAC,CACJ,CACJ,CALS9C,EAAAK,EAAA,UAMTuC,EAA6B,OAASvC,EAItC,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,QAAQD,CAAS,GAAKO,GAAS,GAAGP,EAAU,QAAQ,GAAKC,EAAG,OAAOD,EAAU,OAAO,CAClG,CAHSR,EAAAF,EAAA,MAIT8C,EAA6B,GAAK9C,CACtC,GAAG8C,KAAiCA,GAA+B,CAAC,EAAE,EAI/D,IAAIG,IACV,SAAUA,EAAoB,CAI3BA,EAAmB,MAAQ,EAI3BA,EAAmB,QAAU,EAI7BA,EAAmB,YAAc,EAIjCA,EAAmB,KAAO,CAC9B,GAAGA,KAAuBA,GAAqB,CAAC,EAAE,EAM3C,IAAIC,IACV,SAAUA,EAAe,CAOtBA,EAAc,YAAc,EAM5BA,EAAc,WAAa,CAC/B,GAAGA,KAAkBA,GAAgB,CAAC,EAAE,EAMjC,IAAIC,IACV,SAAUA,EAAiB,CACxB,SAASnD,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,OAAOD,EAAU,IAAI,CAClE,CAHSR,EAAAF,EAAA,MAITmD,EAAgB,GAAKnD,CACzB,GAAGmD,KAAoBA,GAAkB,CAAC,EAAE,EAKrC,IAAIC,IACV,SAAUA,EAAY,CAInB,SAAS7C,EAAOY,EAAO6B,EAASK,EAAUC,EAAMC,EAAQC,EAAoB,CACxE,IAAIX,EAAS,CAAE,MAAA1B,EAAO,QAAA6B,CAAQ,EAC9B,OAAIrC,EAAG,QAAQ0C,CAAQ,IACnBR,EAAO,SAAWQ,GAElB1C,EAAG,QAAQ2C,CAAI,IACfT,EAAO,KAAOS,GAEd3C,EAAG,QAAQ4C,CAAM,IACjBV,EAAO,OAASU,GAEhB5C,EAAG,QAAQ6C,CAAkB,IAC7BX,EAAO,mBAAqBW,GAEzBX,CACX,CAfS3C,EAAAK,EAAA,UAgBT6C,EAAW,OAAS7C,EAIpB,SAASP,EAAGC,EAAO,CACf,IAAIwD,EACJ,IAAI/C,EAAYT,EAChB,OAAOU,EAAG,QAAQD,CAAS,GACpBE,EAAM,GAAGF,EAAU,KAAK,GACxBC,EAAG,OAAOD,EAAU,OAAO,IAC1BC,EAAG,OAAOD,EAAU,QAAQ,GAAKC,EAAG,UAAUD,EAAU,QAAQ,KAChEC,EAAG,QAAQD,EAAU,IAAI,GAAKC,EAAG,OAAOD,EAAU,IAAI,GAAKC,EAAG,UAAUD,EAAU,IAAI,KACtFC,EAAG,UAAUD,EAAU,eAAe,GAAMC,EAAG,QAAQ8C,EAAK/C,EAAU,mBAAqB,MAAQ+C,IAAO,OAAS,OAASA,EAAG,IAAI,KACnI9C,EAAG,OAAOD,EAAU,MAAM,GAAKC,EAAG,UAAUD,EAAU,MAAM,KAC5DC,EAAG,UAAUD,EAAU,kBAAkB,GAAKC,EAAG,WAAWD,EAAU,mBAAoBoC,GAA6B,EAAE,EACrI,CAXS5C,EAAAF,EAAA,MAYToD,EAAW,GAAKpD,CACpB,GAAGoD,KAAeA,GAAa,CAAC,EAAE,EAK3B,IAAIM,IACV,SAAUA,EAAS,CAIhB,SAASnD,EAAOoD,EAAOC,KAAYC,EAAM,CACrC,IAAIhB,EAAS,CAAE,MAAAc,EAAO,QAAAC,CAAQ,EAC9B,OAAIjD,EAAG,QAAQkD,CAAI,GAAKA,EAAK,OAAS,IAClChB,EAAO,UAAYgB,GAEhBhB,CACX,CANS3C,EAAAK,EAAA,UAOTmD,EAAQ,OAASnD,EAIjB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,KAAK,GAAKC,EAAG,OAAOD,EAAU,OAAO,CAC7F,CAHSR,EAAAF,EAAA,MAIT0D,EAAQ,GAAK1D,CACjB,GAAG0D,KAAYA,GAAU,CAAC,EAAE,EAKrB,IAAItB,IACV,SAAUA,EAAU,CAMjB,SAAS0B,EAAQ3C,EAAO4C,EAAS,CAC7B,MAAO,CAAE,MAAA5C,EAAO,QAAA4C,CAAQ,CAC5B,CAFS7D,EAAA4D,EAAA,WAGT1B,EAAS,QAAU0B,EAMnB,SAASE,EAAOC,EAAUF,EAAS,CAC/B,MAAO,CAAE,MAAO,CAAE,MAAOE,EAAU,IAAKA,CAAS,EAAG,QAAAF,CAAQ,CAChE,CAFS7D,EAAA8D,EAAA,UAGT5B,EAAS,OAAS4B,EAKlB,SAASE,EAAI/C,EAAO,CAChB,MAAO,CAAE,MAAAA,EAAO,QAAS,EAAG,CAChC,CAFSjB,EAAAgE,EAAA,OAGT9B,EAAS,IAAM8B,EACf,SAASlE,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAC1BC,EAAG,OAAOD,EAAU,OAAO,GAC3BE,EAAM,GAAGF,EAAU,KAAK,CACnC,CALSR,EAAAF,EAAA,MAMToC,EAAS,GAAKpC,CAClB,GAAGoC,KAAaA,GAAW,CAAC,EAAE,EACvB,IAAI+B,IACV,SAAUA,EAAkB,CACzB,SAAS5D,EAAO0B,EAAOmC,EAAmBC,EAAa,CACnD,IAAMxB,EAAS,CAAE,MAAAZ,CAAM,EACvB,OAAImC,IAAsB,SACtBvB,EAAO,kBAAoBuB,GAE3BC,IAAgB,SAChBxB,EAAO,YAAcwB,GAElBxB,CACX,CATS3C,EAAAK,EAAA,UAUT4D,EAAiB,OAAS5D,EAC1B,SAASP,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,cAAcD,CAAS,GAAKC,EAAG,OAAOD,EAAU,KAAK,IAC1DC,EAAG,QAAQD,EAAU,iBAAiB,GAAKA,EAAU,oBAAsB,UAC3EC,EAAG,OAAOD,EAAU,WAAW,GAAKA,EAAU,cAAgB,OACvE,CALSR,EAAAF,EAAA,MAMTmE,EAAiB,GAAKnE,CAC1B,GAAGmE,KAAqBA,GAAmB,CAAC,EAAE,EACvC,IAAIG,IACV,SAAUA,EAA4B,CACnC,SAAStE,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOU,EAAG,OAAOD,CAAS,CAC9B,CAHSR,EAAAF,EAAA,MAITsE,EAA2B,GAAKtE,CACpC,GAAGsE,KAA+BA,GAA6B,CAAC,EAAE,EAC3D,IAAIC,IACV,SAAUA,EAAmB,CAQ1B,SAAST,EAAQ3C,EAAO4C,EAASS,EAAY,CACzC,MAAO,CAAE,MAAArD,EAAO,QAAA4C,EAAS,aAAcS,CAAW,CACtD,CAFStE,EAAA4D,EAAA,WAGTS,EAAkB,QAAUT,EAQ5B,SAASE,EAAOC,EAAUF,EAASS,EAAY,CAC3C,MAAO,CAAE,MAAO,CAAE,MAAOP,EAAU,IAAKA,CAAS,EAAG,QAAAF,EAAS,aAAcS,CAAW,CAC1F,CAFStE,EAAA8D,EAAA,UAGTO,EAAkB,OAASP,EAO3B,SAASE,EAAI/C,EAAOqD,EAAY,CAC5B,MAAO,CAAE,MAAArD,EAAO,QAAS,GAAI,aAAcqD,CAAW,CAC1D,CAFStE,EAAAgE,EAAA,OAGTK,EAAkB,IAAML,EACxB,SAASlE,EAAGC,EAAO,CACf,IAAMS,EAAYT,EAClB,OAAOmC,GAAS,GAAG1B,CAAS,IAAMyD,GAAiB,GAAGzD,EAAU,YAAY,GAAK4D,GAA2B,GAAG5D,EAAU,YAAY,EACzI,CAHSR,EAAAF,EAAA,MAITuE,EAAkB,GAAKvE,CAC3B,GAAGuE,KAAsBA,GAAoB,CAAC,EAAE,EAKzC,IAAIE,IACV,SAAUA,EAAkB,CAIzB,SAASlE,EAAOmE,EAAcC,EAAO,CACjC,MAAO,CAAE,aAAAD,EAAc,MAAAC,CAAM,CACjC,CAFSzE,EAAAK,EAAA,UAGTkE,EAAiB,OAASlE,EAC1B,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOU,EAAG,QAAQD,CAAS,GACpBkE,GAAwC,GAAGlE,EAAU,YAAY,GACjE,MAAM,QAAQA,EAAU,KAAK,CACxC,CALSR,EAAAF,EAAA,MAMTyE,EAAiB,GAAKzE,CAC1B,GAAGyE,KAAqBA,GAAmB,CAAC,EAAE,EACvC,IAAII,IACV,SAAUA,EAAY,CACnB,SAAStE,EAAOW,EAAK4D,EAASN,EAAY,CACtC,IAAI3B,EAAS,CACT,KAAM,SACN,IAAA3B,CACJ,EACA,OAAI4D,IAAY,SAAcA,EAAQ,YAAc,QAAaA,EAAQ,iBAAmB,UACxFjC,EAAO,QAAUiC,GAEjBN,IAAe,SACf3B,EAAO,aAAe2B,GAEnB3B,CACX,CAZS3C,EAAAK,EAAA,UAaTsE,EAAW,OAAStE,EACpB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOS,GAAaA,EAAU,OAAS,UAAYC,EAAG,OAAOD,EAAU,GAAG,IAAMA,EAAU,UAAY,SAChGA,EAAU,QAAQ,YAAc,QAAaC,EAAG,QAAQD,EAAU,QAAQ,SAAS,KAAOA,EAAU,QAAQ,iBAAmB,QAAaC,EAAG,QAAQD,EAAU,QAAQ,cAAc,MAASA,EAAU,eAAiB,QAAa4D,GAA2B,GAAG5D,EAAU,YAAY,EACtS,CAJSR,EAAAF,EAAA,MAKT6E,EAAW,GAAK7E,CACpB,GAAG6E,KAAeA,GAAa,CAAC,EAAE,EAC3B,IAAIE,IACV,SAAUA,EAAY,CACnB,SAASxE,EAAOyE,EAAQC,EAAQH,EAASN,EAAY,CACjD,IAAI3B,EAAS,CACT,KAAM,SACN,OAAAmC,EACA,OAAAC,CACJ,EACA,OAAIH,IAAY,SAAcA,EAAQ,YAAc,QAAaA,EAAQ,iBAAmB,UACxFjC,EAAO,QAAUiC,GAEjBN,IAAe,SACf3B,EAAO,aAAe2B,GAEnB3B,CACX,CAbS3C,EAAAK,EAAA,UAcTwE,EAAW,OAASxE,EACpB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOS,GAAaA,EAAU,OAAS,UAAYC,EAAG,OAAOD,EAAU,MAAM,GAAKC,EAAG,OAAOD,EAAU,MAAM,IAAMA,EAAU,UAAY,SAClIA,EAAU,QAAQ,YAAc,QAAaC,EAAG,QAAQD,EAAU,QAAQ,SAAS,KAAOA,EAAU,QAAQ,iBAAmB,QAAaC,EAAG,QAAQD,EAAU,QAAQ,cAAc,MAASA,EAAU,eAAiB,QAAa4D,GAA2B,GAAG5D,EAAU,YAAY,EACtS,CAJSR,EAAAF,EAAA,MAKT+E,EAAW,GAAK/E,CACpB,GAAG+E,KAAeA,GAAa,CAAC,EAAE,EAC3B,IAAIG,IACV,SAAUA,EAAY,CACnB,SAAS3E,EAAOW,EAAK4D,EAASN,EAAY,CACtC,IAAI3B,EAAS,CACT,KAAM,SACN,IAAA3B,CACJ,EACA,OAAI4D,IAAY,SAAcA,EAAQ,YAAc,QAAaA,EAAQ,oBAAsB,UAC3FjC,EAAO,QAAUiC,GAEjBN,IAAe,SACf3B,EAAO,aAAe2B,GAEnB3B,CACX,CAZS3C,EAAAK,EAAA,UAaT2E,EAAW,OAAS3E,EACpB,SAASP,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOS,GAAaA,EAAU,OAAS,UAAYC,EAAG,OAAOD,EAAU,GAAG,IAAMA,EAAU,UAAY,SAChGA,EAAU,QAAQ,YAAc,QAAaC,EAAG,QAAQD,EAAU,QAAQ,SAAS,KAAOA,EAAU,QAAQ,oBAAsB,QAAaC,EAAG,QAAQD,EAAU,QAAQ,iBAAiB,MAASA,EAAU,eAAiB,QAAa4D,GAA2B,GAAG5D,EAAU,YAAY,EAC5S,CAJSR,EAAAF,EAAA,MAKTkF,EAAW,GAAKlF,CACpB,GAAGkF,KAAeA,GAAa,CAAC,EAAE,EAC3B,IAAIC,IACV,SAAUA,EAAe,CACtB,SAASnF,EAAGC,EAAO,CACf,IAAIS,EAAYT,EAChB,OAAOS,IACFA,EAAU,UAAY,QAAaA,EAAU,kBAAoB,UACjEA,EAAU,kBAAoB,QAAaA,EAAU,gBAAgB,MAAO0E,GACrEzE,EAAG,OAAOyE,EAAO,IAAI,EACdP,GAAW,GAAGO,CAAM,GAAKL,GAAW,GAAGK,CAAM,GAAKF,GAAW,GAAGE,CAAM,EAGtEX,GAAiB,GAAGW,CAAM,CAExC,EACT,CAZSlF,EAAAF,EAAA,MAaTmF,EAAc,GAAKnF,CACvB,GAAGmF,KAAkBA,GAAgB,CAAC,EAAE,EAuSjC,IAAIE,IACV,SAAUA,EAAwB,CAK/B,SAASC,EAAOC,EAAK,CACjB,MAAO,CAAE,IAAAA,CAAI,CACjB,CAFSC,EAAAF,EAAA,UAGTD,EAAuB,OAASC,EAIhC,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,GAAG,CAC3D,CAHSH,EAAAC,EAAA,MAITJ,EAAuB,GAAKI,CAChC,GAAGJ,KAA2BA,GAAyB,CAAC,EAAE,EAKnD,IAAIQ,IACV,SAAUA,EAAiC,CAMxC,SAASP,EAAOC,EAAKO,EAAS,CAC1B,MAAO,CAAE,IAAAP,EAAK,QAAAO,CAAQ,CAC1B,CAFSN,EAAAF,EAAA,UAGTO,EAAgC,OAASP,EAIzC,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,GAAG,GAAKC,EAAG,QAAQD,EAAU,OAAO,CAC5F,CAHSH,EAAAC,EAAA,MAITI,EAAgC,GAAKJ,CACzC,GAAGI,KAAoCA,GAAkC,CAAC,EAAE,EAKrE,IAAIE,IACV,SAAUA,EAAyC,CAMhD,SAAST,EAAOC,EAAKO,EAAS,CAC1B,MAAO,CAAE,IAAAP,EAAK,QAAAO,CAAQ,CAC1B,CAFSN,EAAAF,EAAA,UAGTS,EAAwC,OAAST,EAIjD,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,GAAG,IAAMA,EAAU,UAAY,MAAQC,EAAG,QAAQD,EAAU,OAAO,EAC3H,CAHSH,EAAAC,EAAA,MAITM,EAAwC,GAAKN,CACjD,GAAGM,KAA4CA,GAA0C,CAAC,EAAE,EAKrF,IAAIC,IACV,SAAUA,EAAkB,CAQzB,SAASV,EAAOC,EAAKU,EAAYH,EAASI,EAAM,CAC5C,MAAO,CAAE,IAAAX,EAAK,WAAAU,EAAY,QAAAH,EAAS,KAAAI,CAAK,CAC5C,CAFSV,EAAAF,EAAA,UAGTU,EAAiB,OAASV,EAI1B,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,GAAG,GAAKC,EAAG,OAAOD,EAAU,UAAU,GAAKC,EAAG,QAAQD,EAAU,OAAO,GAAKC,EAAG,OAAOD,EAAU,IAAI,CAC5J,CAHSH,EAAAC,EAAA,MAITO,EAAiB,GAAKP,CAC1B,GAAGO,KAAqBA,GAAmB,CAAC,EAAE,EAQvC,IAAIG,IACV,SAAUA,EAAY,CAInBA,EAAW,UAAY,YAIvBA,EAAW,SAAW,WAItB,SAASV,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOC,IAAcQ,EAAW,WAAaR,IAAcQ,EAAW,QAC1E,CAHSX,EAAAC,EAAA,MAITU,EAAW,GAAKV,CACpB,GAAGU,KAAeA,GAAa,CAAC,EAAE,EAC3B,IAAIC,IACV,SAAUA,EAAe,CAItB,SAASX,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,cAAcF,CAAK,GAAKS,GAAW,GAAGR,EAAU,IAAI,GAAKC,EAAG,OAAOD,EAAU,KAAK,CAChG,CAHSH,EAAAC,EAAA,MAITW,EAAc,GAAKX,CACvB,GAAGW,KAAkBA,GAAgB,CAAC,EAAE,EAIjC,IAAIC,IACV,SAAUA,EAAoB,CAC3BA,EAAmB,KAAO,EAC1BA,EAAmB,OAAS,EAC5BA,EAAmB,SAAW,EAC9BA,EAAmB,YAAc,EACjCA,EAAmB,MAAQ,EAC3BA,EAAmB,SAAW,EAC9BA,EAAmB,MAAQ,EAC3BA,EAAmB,UAAY,EAC/BA,EAAmB,OAAS,EAC5BA,EAAmB,SAAW,GAC9BA,EAAmB,KAAO,GAC1BA,EAAmB,MAAQ,GAC3BA,EAAmB,KAAO,GAC1BA,EAAmB,QAAU,GAC7BA,EAAmB,QAAU,GAC7BA,EAAmB,MAAQ,GAC3BA,EAAmB,KAAO,GAC1BA,EAAmB,UAAY,GAC/BA,EAAmB,OAAS,GAC5BA,EAAmB,WAAa,GAChCA,EAAmB,SAAW,GAC9BA,EAAmB,OAAS,GAC5BA,EAAmB,MAAQ,GAC3BA,EAAmB,SAAW,GAC9BA,EAAmB,cAAgB,EACvC,GAAGA,KAAuBA,GAAqB,CAAC,EAAE,EAK3C,IAAIC,IACV,SAAUA,EAAkB,CAIzBA,EAAiB,UAAY,EAW7BA,EAAiB,QAAU,CAC/B,GAAGA,KAAqBA,GAAmB,CAAC,EAAE,EAOvC,IAAIC,IACV,SAAUA,EAAmB,CAI1BA,EAAkB,WAAa,CACnC,GAAGA,KAAsBA,GAAoB,CAAC,EAAE,EAMzC,IAAIC,IACV,SAAUA,EAAmB,CAI1B,SAASlB,EAAOmB,EAASC,EAAQC,EAAS,CACtC,MAAO,CAAE,QAAAF,EAAS,OAAAC,EAAQ,QAAAC,CAAQ,CACtC,CAFSnB,EAAAF,EAAA,UAGTkB,EAAkB,OAASlB,EAI3B,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOC,GAAaC,EAAG,OAAOD,EAAU,OAAO,GAAKiB,EAAM,GAAGjB,EAAU,MAAM,GAAKiB,EAAM,GAAGjB,EAAU,OAAO,CAChH,CAHSH,EAAAC,EAAA,MAITe,EAAkB,GAAKf,CAC3B,GAAGe,KAAsBA,GAAoB,CAAC,EAAE,EAOzC,IAAIK,IACV,SAAUA,EAAgB,CAQvBA,EAAe,KAAO,EAUtBA,EAAe,kBAAoB,CACvC,GAAGA,KAAmBA,GAAiB,CAAC,EAAE,EACnC,IAAIC,IACV,SAAUA,EAA4B,CACnC,SAASrB,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOC,IAAcC,EAAG,OAAOD,EAAU,MAAM,GAAKA,EAAU,SAAW,UACpEC,EAAG,OAAOD,EAAU,WAAW,GAAKA,EAAU,cAAgB,OACvE,CAJSH,EAAAC,EAAA,MAKTqB,EAA2B,GAAKrB,CACpC,GAAGqB,KAA+BA,GAA6B,CAAC,EAAE,EAK3D,IAAIC,IACV,SAAUA,EAAgB,CAKvB,SAASzB,EAAO0B,EAAO,CACnB,MAAO,CAAE,MAAAA,CAAM,CACnB,CAFSxB,EAAAF,EAAA,UAGTyB,EAAe,OAASzB,CAC5B,GAAGyB,KAAmBA,GAAiB,CAAC,EAAE,EAKnC,IAAIE,IACV,SAAUA,EAAgB,CAOvB,SAAS3B,EAAO4B,EAAOC,EAAc,CACjC,MAAO,CAAE,MAAOD,GAAgB,CAAC,EAAG,aAAc,CAAC,CAACC,CAAa,CACrE,CAFS3B,EAAAF,EAAA,UAGT2B,EAAe,OAAS3B,CAC5B,GAAG2B,KAAmBA,GAAiB,CAAC,EAAE,EACnC,IAAIG,IACV,SAAUA,EAAc,CAMrB,SAASC,EAAcC,EAAW,CAC9B,OAAOA,EAAU,QAAQ,wBAAyB,MAAM,CAC5D,CAFS9B,EAAA6B,EAAA,iBAGTD,EAAa,cAAgBC,EAI7B,SAAS5B,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,OAAOD,CAAS,GAAMC,EAAG,cAAcD,CAAS,GAAKC,EAAG,OAAOD,EAAU,QAAQ,GAAKC,EAAG,OAAOD,EAAU,KAAK,CAC7H,CAHSH,EAAAC,EAAA,MAIT2B,EAAa,GAAK3B,CACtB,GAAG2B,KAAiBA,GAAe,CAAC,EAAE,EAC/B,IAAIG,IACV,SAAUA,EAAO,CAId,SAAS9B,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,MAAO,CAAC,CAACC,GAAaC,EAAG,cAAcD,CAAS,IAAMS,GAAc,GAAGT,EAAU,QAAQ,GACrFyB,GAAa,GAAGzB,EAAU,QAAQ,GAClCC,EAAG,WAAWD,EAAU,SAAUyB,GAAa,EAAE,KAAO1B,EAAM,QAAU,QAAakB,EAAM,GAAGlB,EAAM,KAAK,EACjH,CALSF,EAAAC,EAAA,MAMT8B,EAAM,GAAK9B,CACf,GAAG8B,KAAUA,GAAQ,CAAC,EAAE,EAKjB,IAAIC,IACV,SAAUA,EAAsB,CAO7B,SAASlC,EAAO0B,EAAOS,EAAe,CAClC,OAAOA,EAAgB,CAAE,MAAAT,EAAO,cAAAS,CAAc,EAAI,CAAE,MAAAT,CAAM,CAC9D,CAFSxB,EAAAF,EAAA,UAGTkC,EAAqB,OAASlC,CAClC,GAAGkC,KAAyBA,GAAuB,CAAC,EAAE,EAK/C,IAAIE,IACV,SAAUA,EAAsB,CAC7B,SAASpC,EAAO0B,EAAOS,KAAkBE,EAAY,CACjD,IAAIC,EAAS,CAAE,MAAAZ,CAAM,EACrB,OAAIpB,EAAG,QAAQ6B,CAAa,IACxBG,EAAO,cAAgBH,GAEvB7B,EAAG,QAAQ+B,CAAU,EACrBC,EAAO,WAAaD,EAGpBC,EAAO,WAAa,CAAC,EAElBA,CACX,CAZSpC,EAAAF,EAAA,UAaToC,EAAqB,OAASpC,CAClC,GAAGoC,KAAyBA,GAAuB,CAAC,EAAE,EAI/C,IAAIG,IACV,SAAUA,EAAuB,CAI9BA,EAAsB,KAAO,EAI7BA,EAAsB,KAAO,EAI7BA,EAAsB,MAAQ,CAClC,GAAGA,KAA0BA,GAAwB,CAAC,EAAE,EAKjD,IAAIC,IACV,SAAUA,EAAmB,CAM1B,SAASxC,EAAOyC,EAAOC,EAAM,CACzB,IAAIJ,EAAS,CAAE,MAAAG,CAAM,EACrB,OAAInC,EAAG,OAAOoC,CAAI,IACdJ,EAAO,KAAOI,GAEXJ,CACX,CANSpC,EAAAF,EAAA,UAOTwC,EAAkB,OAASxC,CAC/B,GAAGwC,KAAsBA,GAAoB,CAAC,EAAE,EAIzC,IAAIG,IACV,SAAUA,EAAY,CACnBA,EAAW,KAAO,EAClBA,EAAW,OAAS,EACpBA,EAAW,UAAY,EACvBA,EAAW,QAAU,EACrBA,EAAW,MAAQ,EACnBA,EAAW,OAAS,EACpBA,EAAW,SAAW,EACtBA,EAAW,MAAQ,EACnBA,EAAW,YAAc,EACzBA,EAAW,KAAO,GAClBA,EAAW,UAAY,GACvBA,EAAW,SAAW,GACtBA,EAAW,SAAW,GACtBA,EAAW,SAAW,GACtBA,EAAW,OAAS,GACpBA,EAAW,OAAS,GACpBA,EAAW,QAAU,GACrBA,EAAW,MAAQ,GACnBA,EAAW,OAAS,GACpBA,EAAW,IAAM,GACjBA,EAAW,KAAO,GAClBA,EAAW,WAAa,GACxBA,EAAW,OAAS,GACpBA,EAAW,MAAQ,GACnBA,EAAW,SAAW,GACtBA,EAAW,cAAgB,EAC/B,GAAGA,KAAeA,GAAa,CAAC,EAAE,EAM3B,IAAIC,IACV,SAAUA,EAAW,CAIlBA,EAAU,WAAa,CAC3B,GAAGA,KAAcA,GAAY,CAAC,EAAE,EACzB,IAAIC,IACV,SAAUA,EAAmB,CAU1B,SAAS7C,EAAO8C,EAAMJ,EAAMD,EAAOxC,EAAK8C,EAAe,CACnD,IAAIT,EAAS,CACT,KAAAQ,EACA,KAAAJ,EACA,SAAU,CAAE,IAAAzC,EAAK,MAAAwC,CAAM,CAC3B,EACA,OAAIM,IACAT,EAAO,cAAgBS,GAEpBT,CACX,CAVSpC,EAAAF,EAAA,UAWT6C,EAAkB,OAAS7C,CAC/B,GAAG6C,KAAsBA,GAAoB,CAAC,EAAE,EACzC,IAAIG,IACV,SAAUA,EAAiB,CAUxB,SAAShD,EAAO8C,EAAMJ,EAAMzC,EAAKwC,EAAO,CACpC,OAAOA,IAAU,OACX,CAAE,KAAAK,EAAM,KAAAJ,EAAM,SAAU,CAAE,IAAAzC,EAAK,MAAAwC,CAAM,CAAE,EACvC,CAAE,KAAAK,EAAM,KAAAJ,EAAM,SAAU,CAAE,IAAAzC,CAAI,CAAE,CAC1C,CAJSC,EAAAF,EAAA,UAKTgD,EAAgB,OAAShD,CAC7B,GAAGgD,KAAoBA,GAAkB,CAAC,EAAE,EACrC,IAAIC,IACV,SAAUA,EAAgB,CAWvB,SAASjD,EAAO8C,EAAMI,EAAQR,EAAMD,EAAOU,EAAgBC,EAAU,CACjE,IAAId,EAAS,CACT,KAAAQ,EACA,OAAAI,EACA,KAAAR,EACA,MAAAD,EACA,eAAAU,CACJ,EACA,OAAIC,IAAa,SACbd,EAAO,SAAWc,GAEfd,CACX,CAZSpC,EAAAF,EAAA,UAaTiD,EAAe,OAASjD,EAIxB,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOC,GACHC,EAAG,OAAOD,EAAU,IAAI,GAAKC,EAAG,OAAOD,EAAU,IAAI,GACrDiB,EAAM,GAAGjB,EAAU,KAAK,GAAKiB,EAAM,GAAGjB,EAAU,cAAc,IAC7DA,EAAU,SAAW,QAAaC,EAAG,OAAOD,EAAU,MAAM,KAC5DA,EAAU,aAAe,QAAaC,EAAG,QAAQD,EAAU,UAAU,KACrEA,EAAU,WAAa,QAAa,MAAM,QAAQA,EAAU,QAAQ,KACpEA,EAAU,OAAS,QAAa,MAAM,QAAQA,EAAU,IAAI,EACrE,CATSH,EAAAC,EAAA,MAUT8C,EAAe,GAAK9C,CACxB,GAAG8C,KAAmBA,GAAiB,CAAC,EAAE,EAInC,IAAII,IACV,SAAUA,EAAgB,CAIvBA,EAAe,MAAQ,GAIvBA,EAAe,SAAW,WAI1BA,EAAe,SAAW,WAY1BA,EAAe,gBAAkB,mBAWjCA,EAAe,eAAiB,kBAahCA,EAAe,gBAAkB,mBAMjCA,EAAe,OAAS,SAIxBA,EAAe,sBAAwB,yBASvCA,EAAe,aAAe,eAClC,GAAGA,KAAmBA,GAAiB,CAAC,EAAE,EAMnC,IAAIC,IACV,SAAUA,EAAuB,CAI9BA,EAAsB,QAAU,EAOhCA,EAAsB,UAAY,CACtC,GAAGA,KAA0BA,GAAwB,CAAC,EAAE,EAKjD,IAAIC,IACV,SAAUA,EAAmB,CAI1B,SAASvD,EAAOwD,EAAaC,EAAMC,EAAa,CAC5C,IAAIpB,EAAS,CAAE,YAAAkB,CAAY,EAC3B,OAA0BC,GAAS,OAC/BnB,EAAO,KAAOmB,GAEeC,GAAgB,OAC7CpB,EAAO,YAAcoB,GAElBpB,CACX,CATSpC,EAAAF,EAAA,UAUTuD,EAAkB,OAASvD,EAI3B,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,WAAWD,EAAU,YAAasD,GAAW,EAAE,IAC1EtD,EAAU,OAAS,QAAaC,EAAG,WAAWD,EAAU,KAAMC,EAAG,MAAM,KACvED,EAAU,cAAgB,QAAaA,EAAU,cAAgBiD,GAAsB,SAAWjD,EAAU,cAAgBiD,GAAsB,UAC9J,CALSpD,EAAAC,EAAA,MAMToD,EAAkB,GAAKpD,CAC3B,GAAGoD,KAAsBA,GAAoB,CAAC,EAAE,EACzC,IAAIK,IACV,SAAUA,EAAY,CACnB,SAAS5D,EAAO6D,EAAOC,EAAqBpB,EAAM,CAC9C,IAAIJ,EAAS,CAAE,MAAAuB,CAAM,EACjBE,EAAY,GAChB,OAAI,OAAOD,GAAwB,UAC/BC,EAAY,GACZzB,EAAO,KAAOwB,GAETE,GAAQ,GAAGF,CAAmB,EACnCxB,EAAO,QAAUwB,EAGjBxB,EAAO,KAAOwB,EAEdC,GAAarB,IAAS,SACtBJ,EAAO,KAAOI,GAEXJ,CACX,CAjBSpC,EAAAF,EAAA,UAkBT4D,EAAW,OAAS5D,EACpB,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOC,GAAaC,EAAG,OAAOD,EAAU,KAAK,IACxCA,EAAU,cAAgB,QAAaC,EAAG,WAAWD,EAAU,YAAasD,GAAW,EAAE,KACzFtD,EAAU,OAAS,QAAaC,EAAG,OAAOD,EAAU,IAAI,KACxDA,EAAU,OAAS,QAAaA,EAAU,UAAY,UACtDA,EAAU,UAAY,QAAa2D,GAAQ,GAAG3D,EAAU,OAAO,KAC/DA,EAAU,cAAgB,QAAaC,EAAG,QAAQD,EAAU,WAAW,KACvEA,EAAU,OAAS,QAAa4D,GAAc,GAAG5D,EAAU,IAAI,EACxE,CATSH,EAAAC,EAAA,MAUTyD,EAAW,GAAKzD,CACpB,GAAGyD,KAAeA,GAAa,CAAC,EAAE,EAK3B,IAAIM,IACV,SAAUA,EAAU,CAIjB,SAASlE,EAAOyC,EAAO0B,EAAM,CACzB,IAAI7B,EAAS,CAAE,MAAAG,CAAM,EACrB,OAAInC,EAAG,QAAQ6D,CAAI,IACf7B,EAAO,KAAO6B,GAEX7B,CACX,CANSpC,EAAAF,EAAA,UAOTkE,EAAS,OAASlE,EAIlB,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKiB,EAAM,GAAGjB,EAAU,KAAK,IAAMC,EAAG,UAAUD,EAAU,OAAO,GAAK2D,GAAQ,GAAG3D,EAAU,OAAO,EACjI,CAHSH,EAAAC,EAAA,MAIT+D,EAAS,GAAK/D,CAClB,GAAG+D,KAAaA,GAAW,CAAC,EAAE,EAKvB,IAAIE,IACV,SAAUA,EAAmB,CAI1B,SAASpE,EAAOqE,EAASC,EAAc,CACnC,MAAO,CAAE,QAAAD,EAAS,aAAAC,CAAa,CACnC,CAFSpE,EAAAF,EAAA,UAGToE,EAAkB,OAASpE,EAI3B,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKC,EAAG,SAASD,EAAU,OAAO,GAAKC,EAAG,QAAQD,EAAU,YAAY,CACvG,CAHSH,EAAAC,EAAA,MAITiE,EAAkB,GAAKjE,CAC3B,GAAGiE,KAAsBA,GAAoB,CAAC,EAAE,EAKzC,IAAIG,IACV,SAAUA,EAAc,CAIrB,SAASvE,EAAOyC,EAAO+B,EAAQL,EAAM,CACjC,MAAO,CAAE,MAAA1B,EAAO,OAAA+B,EAAQ,KAAAL,CAAK,CACjC,CAFSjE,EAAAF,EAAA,UAGTuE,EAAa,OAASvE,EAItB,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,QAAQD,CAAS,GAAKiB,EAAM,GAAGjB,EAAU,KAAK,IAAMC,EAAG,UAAUD,EAAU,MAAM,GAAKC,EAAG,OAAOD,EAAU,MAAM,EAC9H,CAHSH,EAAAC,EAAA,MAIToE,EAAa,GAAKpE,CACtB,GAAGoE,KAAiBA,GAAe,CAAC,EAAE,EAK/B,IAAIE,IACV,SAAUA,EAAgB,CAMvB,SAASzE,EAAOyC,EAAOiC,EAAQ,CAC3B,MAAO,CAAE,MAAAjC,EAAO,OAAAiC,CAAO,CAC3B,CAFSxE,EAAAF,EAAA,UAGTyE,EAAe,OAASzE,EACxB,SAASG,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,OAAOE,EAAG,cAAcD,CAAS,GAAKiB,EAAM,GAAGjB,EAAU,KAAK,IAAMA,EAAU,SAAW,QAAaoE,EAAe,GAAGpE,EAAU,MAAM,EAC5I,CAHSH,EAAAC,EAAA,MAITsE,EAAe,GAAKtE,CACxB,GAAGsE,KAAmBA,GAAiB,CAAC,EAAE,EAQnC,IAAIE,IACV,SAAUA,EAAoB,CAC3BA,EAAmB,UAAe,YAKlCA,EAAmB,KAAU,OAC7BA,EAAmB,MAAW,QAC9BA,EAAmB,KAAU,OAC7BA,EAAmB,UAAe,YAClCA,EAAmB,OAAY,SAC/BA,EAAmB,cAAmB,gBACtCA,EAAmB,UAAe,YAClCA,EAAmB,SAAc,WACjCA,EAAmB,SAAc,WACjCA,EAAmB,WAAgB,aACnCA,EAAmB,MAAW,QAC9BA,EAAmB,SAAc,WACjCA,EAAmB,OAAY,SAC/BA,EAAmB,MAAW,QAC9BA,EAAmB,QAAa,UAChCA,EAAmB,SAAc,WACjCA,EAAmB,QAAa,UAChCA,EAAmB,OAAY,SAC/BA,EAAmB,OAAY,SAC/BA,EAAmB,OAAY,SAC/BA,EAAmB,SAAc,WAIjCA,EAAmB,UAAe,WACtC,GAAGA,KAAuBA,GAAqB,CAAC,EAAE,EAQ3C,IAAIC,IACV,SAAUA,EAAwB,CAC/BA,EAAuB,YAAiB,cACxCA,EAAuB,WAAgB,aACvCA,EAAuB,SAAc,WACrCA,EAAuB,OAAY,SACnCA,EAAuB,WAAgB,aACvCA,EAAuB,SAAc,WACrCA,EAAuB,MAAW,QAClCA,EAAuB,aAAkB,eACzCA,EAAuB,cAAmB,gBAC1CA,EAAuB,eAAoB,gBAC/C,GAAGA,KAA2BA,GAAyB,CAAC,EAAE,EAInD,IAAIC,IACV,SAAUA,EAAgB,CACvB,SAAS1E,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,cAAcD,CAAS,IAAMA,EAAU,WAAa,QAAa,OAAOA,EAAU,UAAa,WACrG,MAAM,QAAQA,EAAU,IAAI,IAAMA,EAAU,KAAK,SAAW,GAAK,OAAOA,EAAU,KAAK,CAAC,GAAM,SACtG,CAJSH,EAAAC,EAAA,MAKT0E,EAAe,GAAK1E,CACxB,GAAG0E,KAAmBA,GAAiB,CAAC,EAAE,EAMnC,IAAIC,IACV,SAAUA,EAAiB,CAIxB,SAAS9E,EAAOyC,EAAO7B,EAAM,CACzB,MAAO,CAAE,MAAA6B,EAAO,KAAA7B,CAAK,CACzB,CAFSV,EAAAF,EAAA,UAGT8E,EAAgB,OAAS9E,EACzB,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAkCC,GAAc,MAAQiB,EAAM,GAAGjB,EAAU,KAAK,GAAKC,EAAG,OAAOD,EAAU,IAAI,CACjH,CAHSH,EAAAC,EAAA,MAIT2E,EAAgB,GAAK3E,CACzB,GAAG2E,KAAoBA,GAAkB,CAAC,EAAE,EAMrC,IAAIC,IACV,SAAUA,EAA2B,CAIlC,SAAS/E,EAAOyC,EAAOuC,EAAcC,EAAqB,CACtD,MAAO,CAAE,MAAAxC,EAAO,aAAAuC,EAAc,oBAAAC,CAAoB,CACtD,CAFS/E,EAAAF,EAAA,UAGT+E,EAA0B,OAAS/E,EACnC,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAkCC,GAAc,MAAQiB,EAAM,GAAGjB,EAAU,KAAK,GAAKC,EAAG,QAAQD,EAAU,mBAAmB,IACrHC,EAAG,OAAOD,EAAU,YAAY,GAAKA,EAAU,eAAiB,OAC5E,CAJSH,EAAAC,EAAA,MAKT4E,EAA0B,GAAK5E,CACnC,GAAG4E,KAA8BA,GAA4B,CAAC,EAAE,EAMzD,IAAIG,IACV,SAAUA,EAAkC,CAIzC,SAASlF,EAAOyC,EAAO0C,EAAY,CAC/B,MAAO,CAAE,MAAA1C,EAAO,WAAA0C,CAAW,CAC/B,CAFSjF,EAAAF,EAAA,UAGTkF,EAAiC,OAASlF,EAC1C,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAkCC,GAAc,MAAQiB,EAAM,GAAGjB,EAAU,KAAK,IACxEC,EAAG,OAAOD,EAAU,UAAU,GAAKA,EAAU,aAAe,OACxE,CAJSH,EAAAC,EAAA,MAKT+E,EAAiC,GAAK/E,CAC1C,GAAG+E,KAAqCA,GAAmC,CAAC,EAAE,EAOvE,IAAIE,IACV,SAAUA,EAAoB,CAI3B,SAASpF,EAAOqF,EAASC,EAAiB,CACtC,MAAO,CAAE,QAAAD,EAAS,gBAAAC,CAAgB,CACtC,CAFSpF,EAAAF,EAAA,UAGToF,EAAmB,OAASpF,EAI5B,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,QAAQD,CAAS,GAAKiB,EAAM,GAAGlB,EAAM,eAAe,CAClE,CAHSF,EAAAC,EAAA,MAITiF,EAAmB,GAAKjF,CAC5B,GAAGiF,KAAuBA,GAAqB,CAAC,EAAE,EAM3C,IAAIG,IACV,SAAUA,EAAe,CAItBA,EAAc,KAAO,EAIrBA,EAAc,UAAY,EAC1B,SAASpF,EAAGC,EAAO,CACf,OAAOA,IAAU,GAAKA,IAAU,CACpC,CAFSF,EAAAC,EAAA,MAGToF,EAAc,GAAKpF,CACvB,GAAGoF,KAAkBA,GAAgB,CAAC,EAAE,EACjC,IAAIC,IACV,SAAUA,EAAoB,CAC3B,SAASxF,EAAOI,EAAO,CACnB,MAAO,CAAE,MAAAA,CAAM,CACnB,CAFSF,EAAAF,EAAA,UAGTwF,EAAmB,OAASxF,EAC5B,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,cAAcD,CAAS,IACzBA,EAAU,UAAY,QAAaC,EAAG,OAAOD,EAAU,OAAO,GAAKS,GAAc,GAAGT,EAAU,OAAO,KACrGA,EAAU,WAAa,QAAaoF,GAAS,GAAGpF,EAAU,QAAQ,KAClEA,EAAU,UAAY,QAAa2D,GAAQ,GAAG3D,EAAU,OAAO,EAC3E,CANSH,EAAAC,EAAA,MAOTqF,EAAmB,GAAKrF,CAC5B,GAAGqF,KAAuBA,GAAqB,CAAC,EAAE,EAC3C,IAAIE,IACV,SAAUA,EAAW,CAClB,SAAS1F,EAAO2F,EAAUjE,EAAOgB,EAAM,CACnC,IAAMJ,EAAS,CAAE,SAAAqD,EAAU,MAAAjE,CAAM,EACjC,OAAIgB,IAAS,SACTJ,EAAO,KAAOI,GAEXJ,CACX,CANSpC,EAAAF,EAAA,UAOT0F,EAAU,OAAS1F,EACnB,SAASG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,cAAcD,CAAS,GAAKuF,EAAS,GAAGvF,EAAU,QAAQ,IAC5DC,EAAG,OAAOD,EAAU,KAAK,GAAKC,EAAG,WAAWD,EAAU,MAAOmF,GAAmB,EAAE,KAClFnF,EAAU,OAAS,QAAakF,GAAc,GAAGlF,EAAU,IAAI,IAC/DA,EAAU,YAAc,QAAcC,EAAG,WAAWD,EAAU,UAAWwF,GAAS,EAAE,IACpFxF,EAAU,UAAY,QAAaC,EAAG,OAAOD,EAAU,OAAO,GAAKS,GAAc,GAAGT,EAAU,OAAO,KACrGA,EAAU,cAAgB,QAAaC,EAAG,QAAQD,EAAU,WAAW,KACvEA,EAAU,eAAiB,QAAaC,EAAG,QAAQD,EAAU,YAAY,EACrF,CATSH,EAAAC,EAAA,MAUTuF,EAAU,GAAKvF,CACnB,GAAGuF,KAAcA,GAAY,CAAC,EAAE,EACzB,IAAII,IACV,SAAUA,EAAa,CACpB,SAASC,EAAc3F,EAAO,CAC1B,MAAO,CAAE,KAAM,UAAW,MAAAA,CAAM,CACpC,CAFSF,EAAA6F,EAAA,iBAGTD,EAAY,cAAgBC,CAChC,GAAGD,KAAgBA,GAAc,CAAC,EAAE,EAC7B,IAAIE,IACV,SAAUA,EAAsB,CAC7B,SAAShG,EAAOiG,EAAYC,EAAYzD,EAAO0D,EAAS,CACpD,MAAO,CAAE,WAAAF,EAAY,WAAAC,EAAY,MAAAzD,EAAO,QAAA0D,CAAQ,CACpD,CAFSjG,EAAAF,EAAA,UAGTgG,EAAqB,OAAShG,CAClC,GAAGgG,KAAyBA,GAAuB,CAAC,EAAE,EAC/C,IAAII,IACV,SAAUA,EAAsB,CAC7B,SAASpG,EAAO4B,EAAO,CACnB,MAAO,CAAE,MAAAA,CAAM,CACnB,CAFS1B,EAAAF,EAAA,UAGToG,EAAqB,OAASpG,CAClC,GAAGoG,KAAyBA,GAAuB,CAAC,EAAE,EAO/C,IAAIC,IACV,SAAUA,EAA6B,CAIpCA,EAA4B,QAAU,EAItCA,EAA4B,UAAY,CAC5C,GAAGA,KAAgCA,GAA8B,CAAC,EAAE,EAC7D,IAAIC,IACV,SAAUA,EAAwB,CAC/B,SAAStG,EAAOyC,EAAO7B,EAAM,CACzB,MAAO,CAAE,MAAA6B,EAAO,KAAA7B,CAAK,CACzB,CAFSV,EAAAF,EAAA,UAGTsG,EAAuB,OAAStG,CACpC,GAAGsG,KAA2BA,GAAyB,CAAC,EAAE,EACnD,IAAIC,IACV,SAAUA,EAAyB,CAChC,SAASvG,EAAO0D,EAAa8C,EAAwB,CACjD,MAAO,CAAE,YAAA9C,EAAa,uBAAA8C,CAAuB,CACjD,CAFStG,EAAAF,EAAA,UAGTuG,EAAwB,OAASvG,CACrC,GAAGuG,KAA4BA,GAA0B,CAAC,EAAE,EACrD,IAAIE,IACV,SAAUA,EAAiB,CACxB,SAAStG,EAAGC,EAAO,CACf,IAAMC,EAAYD,EAClB,OAAOE,EAAG,cAAcD,CAAS,GAAKqG,GAAI,GAAGrG,EAAU,GAAG,GAAKC,EAAG,OAAOD,EAAU,IAAI,CAC3F,CAHSH,EAAAC,EAAA,MAITsG,EAAgB,GAAKtG,CACzB,GAAGsG,KAAoBA,GAAkB,CAAC,EAAE,EAKrC,IAAIE,IACV,SAAUA,EAAc,CAQrB,SAASC,EAAOC,EAAKC,EAAYC,EAASC,EAAS,CAC/C,OAAO,IAAIC,GAAiBJ,EAAKC,EAAYC,EAASC,CAAO,CACjE,CAFSE,EAAAN,EAAA,UAGTD,EAAa,OAASC,EAItB,SAASO,EAAGC,EAAO,CACf,IAAIC,EAAYD,EAChB,MAAO,GAAAE,EAAG,QAAQD,CAAS,GAAKC,EAAG,OAAOD,EAAU,GAAG,IAAMC,EAAG,UAAUD,EAAU,UAAU,GAAKC,EAAG,OAAOD,EAAU,UAAU,IAAMC,EAAG,SAASD,EAAU,SAAS,GAC/JC,EAAG,KAAKD,EAAU,OAAO,GAAKC,EAAG,KAAKD,EAAU,UAAU,GAAKC,EAAG,KAAKD,EAAU,QAAQ,EACpG,CAJSH,EAAAC,EAAA,MAKTR,EAAa,GAAKQ,EAClB,SAASI,EAAWC,EAAUC,EAAO,CACjC,IAAIC,EAAOF,EAAS,QAAQ,EACxBG,EAAcC,EAAUH,EAAO,CAACI,EAAGC,IAAM,CACzC,IAAIC,EAAOF,EAAE,MAAM,MAAM,KAAOC,EAAE,MAAM,MAAM,KAC9C,OAAIC,IAAS,EACFF,EAAE,MAAM,MAAM,UAAYC,EAAE,MAAM,MAAM,UAE5CC,CACX,CAAC,EACGC,EAAqBN,EAAK,OAC9B,QAASO,EAAIN,EAAY,OAAS,EAAGM,GAAK,EAAGA,IAAK,CAC9C,IAAIC,EAAIP,EAAYM,CAAC,EACjBE,EAAcX,EAAS,SAASU,EAAE,MAAM,KAAK,EAC7CE,EAAYZ,EAAS,SAASU,EAAE,MAAM,GAAG,EAC7C,GAAIE,GAAaJ,EACbN,EAAOA,EAAK,UAAU,EAAGS,CAAW,EAAID,EAAE,QAAUR,EAAK,UAAUU,EAAWV,EAAK,MAAM,MAGzF,OAAM,IAAI,MAAM,kBAAkB,EAEtCM,EAAqBG,CACzB,CACA,OAAOT,CACX,CAvBSR,EAAAK,EAAA,cAwBTZ,EAAa,WAAaY,EAC1B,SAASK,EAAUS,EAAMC,EAAS,CAC9B,GAAID,EAAK,QAAU,EAEf,OAAOA,EAEX,IAAME,EAAKF,EAAK,OAAS,EAAK,EACxBG,EAAOH,EAAK,MAAM,EAAGE,CAAC,EACtBE,EAAQJ,EAAK,MAAME,CAAC,EAC1BX,EAAUY,EAAMF,CAAO,EACvBV,EAAUa,EAAOH,CAAO,EACxB,IAAII,EAAU,EACVC,EAAW,EACXV,EAAI,EACR,KAAOS,EAAUF,EAAK,QAAUG,EAAWF,EAAM,QACnCH,EAAQE,EAAKE,CAAO,EAAGD,EAAME,CAAQ,CAAC,GACrC,EAEPN,EAAKJ,GAAG,EAAIO,EAAKE,GAAS,EAI1BL,EAAKJ,GAAG,EAAIQ,EAAME,GAAU,EAGpC,KAAOD,EAAUF,EAAK,QAClBH,EAAKJ,GAAG,EAAIO,EAAKE,GAAS,EAE9B,KAAOC,EAAWF,EAAM,QACpBJ,EAAKJ,GAAG,EAAIQ,EAAME,GAAU,EAEhC,OAAON,CACX,CA/BSnB,EAAAU,EAAA,YAgCb,GAAGjB,KAAiBA,GAAe,CAAC,EAAE,EAItC,IAAMM,GAAN,KAAuB,CAjiEvB,MAiiEuB,CAAAC,EAAA,yBACnB,YAAYL,EAAKC,EAAYC,EAASC,EAAS,CAC3C,KAAK,KAAOH,EACZ,KAAK,YAAcC,EACnB,KAAK,SAAWC,EAChB,KAAK,SAAWC,EAChB,KAAK,aAAe,MACxB,CACA,IAAI,KAAM,CACN,OAAO,KAAK,IAChB,CACA,IAAI,YAAa,CACb,OAAO,KAAK,WAChB,CACA,IAAI,SAAU,CACV,OAAO,KAAK,QAChB,CACA,QAAQ4B,EAAO,CACX,GAAIA,EAAO,CACP,IAAIC,EAAQ,KAAK,SAASD,EAAM,KAAK,EACjCE,EAAM,KAAK,SAASF,EAAM,GAAG,EACjC,OAAO,KAAK,SAAS,UAAUC,EAAOC,CAAG,CAC7C,CACA,OAAO,KAAK,QAChB,CACA,OAAOC,EAAOhC,EAAS,CACnB,KAAK,SAAWgC,EAAM,KACtB,KAAK,SAAWhC,EAChB,KAAK,aAAe,MACxB,CACA,gBAAiB,CACb,GAAI,KAAK,eAAiB,OAAW,CACjC,IAAIiC,EAAc,CAAC,EACftB,EAAO,KAAK,SACZuB,EAAc,GAClB,QAAS,EAAI,EAAG,EAAIvB,EAAK,OAAQ,IAAK,CAC9BuB,IACAD,EAAY,KAAK,CAAC,EAClBC,EAAc,IAElB,IAAIC,EAAKxB,EAAK,OAAO,CAAC,EACtBuB,EAAeC,IAAO,MAAQA,IAAO;AAAA,EACjCA,IAAO,MAAQ,EAAI,EAAIxB,EAAK,QAAUA,EAAK,OAAO,EAAI,CAAC,IAAM;AAAA,GAC7D,GAER,CACIuB,GAAevB,EAAK,OAAS,GAC7BsB,EAAY,KAAKtB,EAAK,MAAM,EAEhC,KAAK,aAAesB,CACxB,CACA,OAAO,KAAK,YAChB,CACA,WAAWG,EAAQ,CACfA,EAAS,KAAK,IAAI,KAAK,IAAIA,EAAQ,KAAK,SAAS,MAAM,EAAG,CAAC,EAC3D,IAAIH,EAAc,KAAK,eAAe,EAClCI,EAAM,EAAGC,EAAOL,EAAY,OAChC,GAAIK,IAAS,EACT,OAAOC,EAAS,OAAO,EAAGH,CAAM,EAEpC,KAAOC,EAAMC,GAAM,CACf,IAAIE,EAAM,KAAK,OAAOH,EAAMC,GAAQ,CAAC,EACjCL,EAAYO,CAAG,EAAIJ,EACnBE,EAAOE,EAGPH,EAAMG,EAAM,CAEpB,CAGA,IAAIC,EAAOJ,EAAM,EACjB,OAAOE,EAAS,OAAOE,EAAML,EAASH,EAAYQ,CAAI,CAAC,CAC3D,CACA,SAASC,EAAU,CACf,IAAIT,EAAc,KAAK,eAAe,EACtC,GAAIS,EAAS,MAAQT,EAAY,OAC7B,OAAO,KAAK,SAAS,OAEpB,GAAIS,EAAS,KAAO,EACrB,MAAO,GAEX,IAAIC,EAAaV,EAAYS,EAAS,IAAI,EACtCE,EAAkBF,EAAS,KAAO,EAAIT,EAAY,OAAUA,EAAYS,EAAS,KAAO,CAAC,EAAI,KAAK,SAAS,OAC/G,OAAO,KAAK,IAAI,KAAK,IAAIC,EAAaD,EAAS,UAAWE,CAAc,EAAGD,CAAU,CACzF,CACA,IAAI,WAAY,CACZ,OAAO,KAAK,eAAe,EAAE,MACjC,CACJ,EACIpC,GACH,SAAUA,EAAI,CACX,IAAMsC,EAAW,OAAO,UAAU,SAClC,SAASC,EAAQzC,EAAO,CACpB,OAAO,OAAOA,EAAU,GAC5B,CAFSF,EAAA2C,EAAA,WAGTvC,EAAG,QAAUuC,EACb,SAASC,EAAU1C,EAAO,CACtB,OAAO,OAAOA,EAAU,GAC5B,CAFSF,EAAA4C,EAAA,aAGTxC,EAAG,UAAYwC,EACf,SAASC,EAAQ3C,EAAO,CACpB,OAAOA,IAAU,IAAQA,IAAU,EACvC,CAFSF,EAAA6C,EAAA,WAGTzC,EAAG,QAAUyC,EACb,SAASC,EAAO5C,EAAO,CACnB,OAAOwC,EAAS,KAAKxC,CAAK,IAAM,iBACpC,CAFSF,EAAA8C,EAAA,UAGT1C,EAAG,OAAS0C,EACZ,SAASC,EAAO7C,EAAO,CACnB,OAAOwC,EAAS,KAAKxC,CAAK,IAAM,iBACpC,CAFSF,EAAA+C,EAAA,UAGT3C,EAAG,OAAS2C,EACZ,SAASC,EAAY9C,EAAO+C,EAAKC,EAAK,CAClC,OAAOR,EAAS,KAAKxC,CAAK,IAAM,mBAAqB+C,GAAO/C,GAASA,GAASgD,CAClF,CAFSlD,EAAAgD,EAAA,eAGT5C,EAAG,YAAc4C,EACjB,SAASG,EAAQjD,EAAO,CACpB,OAAOwC,EAAS,KAAKxC,CAAK,IAAM,mBAAqB,aAAeA,GAASA,GAAS,UAC1F,CAFSF,EAAAmD,EAAA,WAGT/C,EAAG,QAAU+C,EACb,SAASC,EAASlD,EAAO,CACrB,OAAOwC,EAAS,KAAKxC,CAAK,IAAM,mBAAqB,GAAKA,GAASA,GAAS,UAChF,CAFSF,EAAAoD,EAAA,YAGThD,EAAG,SAAWgD,EACd,SAASC,EAAKnD,EAAO,CACjB,OAAOwC,EAAS,KAAKxC,CAAK,IAAM,mBACpC,CAFSF,EAAAqD,EAAA,QAGTjD,EAAG,KAAOiD,EACV,SAASC,EAAcpD,EAAO,CAI1B,OAAOA,IAAU,MAAQ,OAAOA,GAAU,QAC9C,CALSF,EAAAsD,EAAA,iBAMTlD,EAAG,cAAgBkD,EACnB,SAASC,EAAWrD,EAAOsD,EAAO,CAC9B,OAAO,MAAM,QAAQtD,CAAK,GAAKA,EAAM,MAAMsD,CAAK,CACpD,CAFSxD,EAAAuD,EAAA,cAGTnD,EAAG,WAAamD,CACpB,GAAGnD,IAAOA,EAAK,CAAC,EAAE,EC/pEZ,IAAOqD,GAAP,KAAqB,CAd3B,MAc2B,CAAAC,EAAA,uBAA3B,aAAA,CAGY,KAAA,UAAoC,CAAA,CAmFhD,CAjFI,IAAY,SAAO,CACf,OAAO,KAAK,UAAU,KAAK,UAAU,OAAS,CAAC,CACnD,CAEA,cAAcC,EAAa,CACvB,YAAK,SAAW,IAAIC,GAAgBD,CAAK,EACzC,KAAK,SAAS,KAAO,KAAK,SAC1B,KAAK,UAAY,CAAC,KAAK,QAAQ,EACxB,KAAK,QAChB,CAEA,mBAAmBE,EAAwB,CACvC,IAAMC,EAAgB,IAAIC,GAC1B,OAAAD,EAAc,cAAgBD,EAC9BC,EAAc,KAAO,KAAK,SAC1B,KAAK,QAAQ,QAAQ,KAAKA,CAAa,EACvC,KAAK,UAAU,KAAKA,CAAa,EAC1BA,CACX,CAEA,cAAcE,EAAeH,EAAwB,CACjD,IAAMI,EAAW,IAAIC,GAAgBF,EAAM,YAAaA,EAAM,MAAM,OAAQG,GAAaH,CAAK,EAAGA,EAAM,UAAW,EAAK,EACvH,OAAAC,EAAS,cAAgBJ,EACzBI,EAAS,KAAO,KAAK,SACrB,KAAK,QAAQ,QAAQ,KAAKA,CAAQ,EAC3BA,CACX,CAEA,WAAWG,EAAa,CACpB,IAAMC,EAASD,EAAK,UACpB,GAAIC,EAAQ,CACR,IAAMC,EAAQD,EAAO,QAAQ,QAAQD,CAAI,EACrCE,GAAS,GACTD,EAAO,QAAQ,OAAOC,EAAO,CAAC,EAG1C,CAEA,UAAUC,EAA+D,CACrE,IAAMC,EAAmB,KAAK,QAG1B,OAAOD,EAAK,OAAU,WACtB,KAAK,QAAQ,QAAmBA,GAEpCA,EAAK,SAAWC,EAChB,IAAMJ,EAAO,KAAK,UAAU,IAAG,EAG3BA,GAAM,QAAQ,SAAW,GACzB,KAAK,WAAWA,CAAI,CAE5B,CAEA,gBAAgBK,EAAsB,CAClC,QAAWT,KAASS,EAAc,CAC9B,IAAMC,EAAa,IAAIR,GAAgBF,EAAM,YAAaA,EAAM,MAAM,OAAQG,GAAaH,CAAK,EAAGA,EAAM,UAAW,EAAI,EACxHU,EAAW,KAAO,KAAK,SACvB,KAAK,eAAe,KAAK,SAAUA,CAAU,EAErD,CAEQ,eAAeN,EAAwBJ,EAAkB,CAC7D,GAAM,CAAE,OAAQW,EAAY,IAAKC,CAAQ,EAAKZ,EAE9C,QAASa,EAAI,EAAGA,EAAIT,EAAK,QAAQ,OAAQS,IAAK,CAC1C,IAAMC,EAAQV,EAAK,QAAQS,CAAC,EACtB,CAAE,OAAQE,EAAY,IAAKC,CAAQ,EAAKF,EAC9C,GAAIG,GAAmBH,CAAK,GAAKH,EAAaI,GAAcH,EAAWI,EAAU,CAC7E,KAAK,eAAeF,EAAOd,CAAK,EAChC,eACOY,GAAYG,EAAY,CAC/BX,EAAK,QAAQ,OAAOS,EAAG,EAAGb,CAAK,EAC/B,QAMRI,EAAK,QAAQ,KAAKJ,CAAK,CAC3B,GAGkBkB,GAAhB,KAA+B,CAtGrC,MAsGqC,CAAAxB,EAAA,wBAYjC,IAAI,QAAM,CACN,OAAO,KAAK,SAChB,CAGA,IAAI,SAAO,CACP,OAAO,KAAK,aAChB,CAEA,IAAI,QAAM,CACN,MAAO,EACX,CAEA,IAAI,SAAO,SACP,IAAMU,EAAO,QAAOe,EAAA,KAAK,YAAQ,MAAAA,IAAA,OAAA,OAAAA,EAAE,QAAU,SAAW,KAAK,UAAWC,EAAA,KAAK,aAAS,MAAAA,IAAA,OAAA,OAAAA,EAAE,QACxF,GAAI,CAAChB,EACD,MAAM,IAAI,MAAM,yCAAyC,EAE7D,OAAOA,CACX,CAEA,IAAI,QAAQiB,EAAc,CACtB,KAAK,SAAWA,CACpB,CAGA,IAAI,SAAO,CACP,OAAO,KAAK,OAChB,CAEA,IAAI,MAAI,CACJ,OAAO,KAAK,KAAK,SAAS,UAAU,KAAK,OAAQ,KAAK,GAAG,CAC7D,GAGSnB,GAAP,cAA+BgB,EAAe,CArJpD,MAqJoD,CAAAxB,EAAA,wBAChD,IAAI,QAAM,CACN,OAAO,KAAK,OAChB,CAEA,IAAI,QAAM,CACN,OAAO,KAAK,OAChB,CAEA,IAAI,KAAG,CACH,OAAO,KAAK,QAAU,KAAK,OAC/B,CAEA,IAAa,QAAM,CACf,OAAO,KAAK,OAChB,CAEA,IAAI,WAAS,CACT,OAAO,KAAK,UAChB,CAEA,IAAI,OAAK,CACL,OAAO,KAAK,MAChB,CAQA,YAAY4B,EAAgBC,EAAgBC,EAAcC,EAAsBC,EAAS,GAAK,CAC1F,MAAK,EACL,KAAK,QAAUA,EACf,KAAK,QAAUJ,EACf,KAAK,WAAaG,EAClB,KAAK,QAAUF,EACf,KAAK,OAASC,CAClB,GAGSzB,GAAP,cAAoCmB,EAAe,CA9LzD,MA8LyD,CAAAxB,EAAA,6BAAzD,aAAA,qBACa,KAAA,QAAqB,IAAIiC,GAAiB,IAAI,CAqD3D,CAjDI,IAAI,UAAQ,CACR,OAAO,KAAK,OAChB,CAEA,IAAI,QAAM,SACN,OAAOP,GAAAD,EAAA,KAAK,sBAAkB,MAAAA,IAAA,OAAA,OAAAA,EAAE,UAAM,MAAAC,IAAA,OAAAA,EAAI,CAC9C,CAEA,IAAI,QAAM,CACN,OAAO,KAAK,IAAM,KAAK,MAC3B,CAEA,IAAI,KAAG,SACH,OAAOA,GAAAD,EAAA,KAAK,qBAAiB,MAAAA,IAAA,OAAA,OAAAA,EAAE,OAAG,MAAAC,IAAA,OAAAA,EAAI,CAC1C,CAEA,IAAI,OAAK,CACL,IAAMQ,EAAY,KAAK,mBACjBC,EAAW,KAAK,kBACtB,GAAID,GAAaC,EAAU,CACvB,GAAI,KAAK,cAAgB,OAAW,CAChC,GAAM,CAAE,MAAOC,CAAU,EAAKF,EACxB,CAAE,MAAOG,CAAS,EAAKF,EAC7B,KAAK,YAAc,CAAE,MAAOC,EAAW,MAAO,IAAKC,EAAU,IAAI,KAAOD,EAAW,MAAM,KAAOA,EAAW,MAAQC,EAAU,GAAG,EAEpI,OAAO,KAAK,gBAEZ,OAAO,CAAE,MAAOC,EAAS,OAAO,EAAG,CAAC,EAAG,IAAKA,EAAS,OAAO,EAAG,CAAC,CAAC,CAEzE,CAEA,IAAY,oBAAkB,CAC1B,QAAWlB,KAAS,KAAK,QACrB,GAAI,CAACA,EAAM,OACP,OAAOA,EAGf,OAAO,KAAK,QAAQ,CAAC,CACzB,CAEA,IAAY,mBAAiB,CACzB,QAASD,EAAI,KAAK,QAAQ,OAAS,EAAGA,GAAK,EAAGA,IAAK,CAC/C,IAAMC,EAAQ,KAAK,QAAQD,CAAC,EAC5B,GAAI,CAACC,EAAM,OACP,OAAOA,EAGf,OAAO,KAAK,QAAQ,KAAK,QAAQ,OAAS,CAAC,CAC/C,GAGEa,GAAN,MAAMM,UAAyB,KAAc,CAtP7C,MAsP6C,CAAAvC,EAAA,yBAGzC,YAAYW,EAAwB,CAChC,MAAK,EACL,KAAK,OAASA,EACd,OAAO,eAAe,KAAM4B,EAAiB,SAAS,CAC1D,CAES,QAAQC,EAAgB,CAC7B,YAAK,WAAWA,CAAK,EACd,MAAM,KAAK,GAAGA,CAAK,CAC9B,CAES,WAAWA,EAAgB,CAChC,YAAK,WAAWA,CAAK,EACd,MAAM,QAAQ,GAAGA,CAAK,CACjC,CAES,OAAOC,EAAeC,KAAkBF,EAAgB,CAC7D,YAAK,WAAWA,CAAK,EACd,MAAM,OAAOC,EAAOC,EAAO,GAAGF,CAAK,CAC9C,CAEQ,WAAWA,EAAgB,CAC/B,QAAW3B,KAAQ2B,EACG3B,EAAM,UAAY,KAAK,MAEjD,GAGSX,GAAP,cAA+BG,EAAoB,CArRzD,MAqRyD,CAAAL,EAAA,wBAGrD,IAAa,MAAI,CACb,OAAO,KAAK,MAAM,UAAU,KAAK,OAAQ,KAAK,GAAG,CACrD,CAEA,IAAI,UAAQ,CACR,OAAO,KAAK,KAChB,CAEA,YAAYC,EAAc,CACtB,MAAK,EAXD,KAAA,MAAQ,GAYZ,KAAK,MAAQA,GAAS,EAC1B,GCvQG,IAAM0C,GAAiB,OAAO,UAAU,EAU/C,SAASC,GAAeC,EAA4C,CAChE,OAAOA,EAAK,QAAUF,EAC1B,CAFSG,EAAAF,GAAA,kBA8BT,IAAMG,GAAa,SACbC,GAAiBF,EAACG,GAAyBA,EAAK,SAASF,EAAU,EAAIE,EAAOA,EAAOF,GAApE,kBAEDG,GAAhB,KAAqC,CAvE3C,MAuE2C,CAAAJ,EAAA,8BAMvC,YAAYK,EAA6B,CAF/B,KAAA,iBAA2C,IAAI,IAGrD,KAAK,MAAQA,EAAS,OAAO,MAC7B,IAAMC,EAAS,KAAK,MAAM,WAC1B,KAAK,QAAU,IAAIC,GAAkBD,EAAM,OAAA,OAAA,OAAA,OAAA,CAAA,EACpCD,EAAS,OAAO,YAAY,EAAA,CAC/B,qBAAsBA,EAAS,OAAO,0BAA0B,CAAA,CAAA,CAExE,CAEA,aAAaG,EAAaC,EAA2B,CACjD,KAAK,QAAQ,OAAOD,EAAKC,CAAO,CACpC,CAEA,SAASD,EAAaE,EAAgC,CAClD,KAAK,QAAQ,WAAWF,EAAKE,CAAQ,CACzC,CAEA,KAAKF,EAAaE,EAAgC,CAC9C,KAAK,QAAQ,SAASF,EAAKE,CAAQ,CACvC,CAEA,WAAWF,EAAaE,EAAgC,CACpD,KAAK,QAAQ,eAAeF,EAAKE,CAAQ,CAC7C,CAQA,aAAW,CACP,OAAO,KAAK,QAAQ,YACxB,CAEA,IAAI,iBAAe,CACf,OAAO,KAAK,gBAChB,CAEA,cAAY,CACR,OAAQ,KAAK,QAAgB,UACjC,CAEA,UAAQ,CACJ,KAAK,QAAQ,iBAAgB,CACjC,GAGSC,GAAP,cAA6BP,EAAqB,CA7HxD,MA6HwD,CAAAJ,EAAA,sBASpD,IAAY,SAAO,CACf,OAAO,KAAK,MAAM,KAAK,MAAM,OAAS,CAAC,CAC3C,CAEA,YAAYK,EAA6B,CACrC,MAAMA,CAAQ,EAVD,KAAA,YAAc,IAAIO,GAC3B,KAAA,MAAe,CAAA,EAEf,KAAA,cAAgB,IAAI,IAQxB,KAAK,OAASP,EAAS,WAAW,OAClC,KAAK,UAAYA,EAAS,OAAO,eACjC,KAAK,cAAgBA,EAAS,OAAO,aACzC,CAEA,KAAKQ,EAAkBC,EAAc,CACjC,IAAMC,EAAOF,EAAK,SAAW,OAAYG,GAAeH,CAAI,EAAIhB,GAAiBoB,GAAYJ,CAAI,EAC3FK,EAAa,KAAK,QAAQ,YAAYhB,GAAeW,EAAK,IAAI,EAAG,KAAK,oBAAoBE,EAAMD,CAAI,EAAE,KAAK,IAAI,CAAC,EACtH,OAAID,EAAK,QACL,KAAK,SAAWK,GAEbA,CACX,CAEA,MAAmCC,EAAa,CAC5C,KAAK,YAAY,cAAcA,CAAK,EACpC,IAAMC,EAAc,KAAK,MAAM,SAASD,CAAK,EAC7C,KAAK,QAAQ,MAAQC,EAAY,OACjC,IAAMC,EAAS,KAAK,SAAS,KAAK,KAAK,QAAS,CAAA,CAAE,EAClD,YAAK,YAAY,gBAAgBD,EAAY,MAAM,EACnD,KAAK,gBAAgB,MAAK,EACnB,CACH,MAAOC,EACP,YAAaD,EAAY,OACzB,aAAc,KAAK,QAAQ,OAEnC,CAEQ,oBAAoBE,EAAoCC,EAAwB,CACpF,OAAQC,GAAQ,CACZ,GAAI,CAAC,KAAK,YAAW,EAAI,CACrB,IAAMzB,EAAY,CAAE,MAAAuB,CAAK,EACzB,KAAK,MAAM,KAAKvB,CAAI,EAChBuB,IAAUzB,KACVE,EAAK,MAAQ,IAGrB,IAAIsB,EACJ,GAAI,CACAA,EAASE,EAAeC,CAAI,OAClB,CACVH,EAAS,OAEb,MAAI,CAAC,KAAK,YAAW,GAAMA,IAAW,SAClCA,EAAS,KAAK,UAAS,GAEpBA,CACX,CACJ,CAEA,QAAQb,EAAaiB,EAAsBC,EAAwB,CAC/D,IAAMC,EAAQ,KAAK,QAAQ,YAAYnB,EAAKiB,CAAS,EACrD,GAAI,CAAC,KAAK,YAAW,GAAM,KAAK,aAAaE,CAAK,EAAG,CACjD,IAAMC,EAAW,KAAK,YAAY,cAAcD,EAAOD,CAAO,EACxD,CAAE,WAAAG,EAAY,WAAAC,CAAU,EAAK,KAAK,cAAcJ,CAAO,EACvDK,EAAU,KAAK,QACrB,GAAIF,EAAY,CACZ,IAAMG,EAAiBC,GAAUP,CAAO,EAAIC,EAAM,MAAQ,KAAK,UAAU,QAAQA,EAAM,MAAOC,CAAQ,EACtG,KAAK,OAAOC,EAAW,SAAUA,EAAW,QAASG,EAAgBJ,EAAUE,CAAU,UAClFhC,GAAeiC,CAAO,EAAG,CAChC,IAAIG,EAAOP,EAAM,MACZM,GAAUP,CAAO,IAClBQ,EAAO,KAAK,UAAU,QAAQA,EAAMN,CAAQ,EAAE,SAAQ,GAE1DG,EAAQ,OAASG,GAG7B,CAQQ,aAAaP,EAAa,CAC9B,MAAO,CAACA,EAAM,sBAAwB,CAAC,MAAMA,EAAM,WAAW,GAAK,OAAOA,EAAM,WAAc,UAAY,CAAC,MAAMA,EAAM,SAAS,CACpI,CAEA,QAAQnB,EAAaK,EAAkBa,EAA0BF,EAAU,CACvE,IAAIW,EACC,KAAK,YAAW,IACjBA,EAAU,KAAK,YAAY,mBAAmBT,CAAO,GAEzD,IAAMU,EAAgB,KAAK,QAAQ,YAAY5B,EAAKK,EAAMW,CAAI,EAC1D,CAAC,KAAK,YAAW,GAAMW,GAAWA,EAAQ,OAAS,GACnD,KAAK,yBAAyBC,EAAeV,EAASS,CAAO,CAErE,CAEQ,yBAAyBd,EAAaK,EAA0BS,EAAyB,CAC7F,GAAM,CAAE,WAAAN,EAAY,WAAAC,CAAU,EAAK,KAAK,cAAcJ,CAAO,EAC7D,GAAIG,EACA,KAAK,OAAOA,EAAW,SAAUA,EAAW,QAASR,EAAQc,EAASL,CAAU,UACzE,CAACD,EAAY,CAMpB,IAAME,EAAU,KAAK,QACrB,GAAIjC,GAAeiC,CAAO,EACtBA,EAAQ,OAASV,EAAO,SAAQ,UACzB,OAAOA,GAAW,UAAYA,EAAQ,CAC7C,IAAMgB,EAAahB,EAAO,MACpBiB,EAAS,KAAK,sBAAsBjB,EAAQU,CAAO,EACrDM,IACAC,EAAO,MAAQD,GAEnB,IAAME,EAAUD,EAChB,KAAK,MAAM,IAAG,EACd,KAAK,MAAM,KAAKC,CAAO,GAGnC,CAEA,OAAOjB,EAAekB,EAAc,CAChC,GAAI,CAAC,KAAK,YAAW,EAAI,CACrB,IAAIC,EAAO,KAAK,QAIhB,GAAI,CAACA,EAAK,UAAYD,EAAO,SAAWA,EAAO,SAAU,CACrDC,EAAO,KAAK,UAAU,EAAK,EAC3B,IAAMf,EAAUe,EAAK,SAAS,QAC9B,KAAK,YAAY,mBAAmBf,CAAO,EAE/C,IAAMa,EAAU,CAAE,MAAAjB,CAAK,EACvB,KAAK,MAAM,IAAG,EACd,KAAK,MAAM,KAAKiB,CAAO,EACnBC,EAAO,SAAWA,EAAO,UACzB,KAAK,OAAOA,EAAO,SAAUA,EAAO,QAASC,EAAMA,EAAK,SAAU,EAAK,EAGnF,CAEA,UAAUC,EAAM,GAAI,CAChB,GAAI,KAAK,YAAW,EAChB,OAEJ,IAAMC,EAAM,KAAK,QAMjB,OALAC,GAAuBD,CAAG,EAC1B,KAAK,YAAY,UAAUA,CAAG,EAC1BD,GACA,KAAK,MAAM,IAAG,EAEd5C,GAAe6C,CAAG,EACX,KAAK,UAAU,QAAQA,EAAI,MAAOA,EAAI,QAAQ,GAErDE,GAA0B,KAAK,cAAeF,CAAG,EAE9CA,EACX,CAEQ,cAAcjB,EAAwB,CAC1C,GAAI,CAAC,KAAK,cAAc,IAAIA,CAAO,EAAG,CAClC,IAAMG,EAAaiB,GAAmBpB,EAASqB,EAAY,EAC3D,KAAK,cAAc,IAAIrB,EAAS,CAC5B,WAAYG,EACZ,WAAYA,EAAamB,GAAiBnB,EAAW,QAAQ,EAAI,GACpE,EAEL,OAAO,KAAK,cAAc,IAAIH,CAAO,CACzC,CAEQ,OAAOuB,EAAkBvB,EAAiBwB,EAAgBf,EAAkBL,EAAmB,CACnG,IAAMa,EAAM,KAAK,QACbQ,EAMJ,OALIrB,GAAc,OAAOoB,GAAU,SAC/BC,EAAO,KAAK,OAAO,eAAeR,EAAKjB,EAASS,EAASe,CAAK,EAE9DC,EAAOD,EAEHD,EAAU,CACd,IAAK,IAAK,CACNN,EAAIjB,CAAO,EAAIyB,EACf,MAEJ,IAAK,KAAM,CACPR,EAAIjB,CAAO,EAAI,GACf,MAEJ,IAAK,KACI,MAAM,QAAQiB,EAAIjB,CAAO,CAAC,IAC3BiB,EAAIjB,CAAO,EAAI,CAAA,GAEnBiB,EAAIjB,CAAO,EAAE,KAAKyB,CAAI,EAGlC,CAEQ,sBAAsBC,EAAaC,EAAW,CAClD,OAAW,CAAClD,EAAMmD,CAAa,IAAK,OAAO,QAAQD,CAAM,EAAG,CACxD,IAAME,EAAWH,EAAOjD,CAAI,EACxBoD,IAAa,OACbH,EAAOjD,CAAI,EAAImD,EACR,MAAM,QAAQC,CAAQ,GAAK,MAAM,QAAQD,CAAa,IAC7DA,EAAc,KAAK,GAAGC,CAAQ,EAC9BH,EAAOjD,CAAI,EAAImD,GAGvB,OAAOF,CACX,CAEA,IAAI,kBAAgB,CAChB,OAAO,KAAK,QAAQ,gBACxB,GASkBI,GAAhB,KAAkD,CAnWxD,MAmWwD,CAAAxD,EAAA,2CAEpD,0BAA0ByD,EAKzB,CACG,OAAOC,GAA2B,0BAA0BD,CAAO,CACvE,CAEA,8BAA8BA,EAG7B,CACG,OAAOC,GAA2B,8BAA8BD,CAAO,CAC3E,CAEA,wBAAwBA,EAMvB,CACG,OAAOC,GAA2B,wBAAwBD,CAAO,CACrE,CAEA,sBAAsBA,EAMrB,CACG,OAAOC,GAA2B,sBAAsBD,CAAO,CACnE,GAISE,GAAP,cAAiDH,EAAkC,CA3YzF,MA2YyF,CAAAxD,EAAA,0CAE5E,0BAA0B,CAAE,SAAA4D,EAAU,OAAAC,CAAM,EAKpD,CAMG,MAAO,aALaD,EAAS,MACvB,IAAMA,EAAS,MAAQ,IACvBA,EAAS,KAAK,SAAS,KAAK,EACxB,YAAYA,EAAS,KAAK,UAAU,EAAGA,EAAS,KAAK,OAAS,CAAC,CAAC,IAChE,kBAAkBA,EAAS,IAAI,GACV,gBAAgBC,EAAO,KAAK,KAC/D,CAES,8BAA8B,CAAE,eAAAC,CAAc,EAGtD,CACG,MAAO,qCAAqCA,EAAe,KAAK,KACpE,GASSC,GAAP,cAAuC3D,EAAqB,CAzalE,MAyakE,CAAAJ,EAAA,gCAAlE,aAAA,qBAEY,KAAA,OAAmB,CAAA,EAEnB,KAAA,aAAkC,CAAA,EAClC,KAAA,iBAAsC,CAAA,EACtC,KAAA,eAAiB,EACjB,KAAA,UAAY,CAkGxB,CAhGI,QAAM,CAEN,CAEA,WAAS,CAGT,CAEA,MAAMmB,EAAa,CACf,KAAK,WAAU,EACf,IAAMb,EAAS,KAAK,MAAM,SAASa,CAAK,EACxC,YAAK,OAASb,EAAO,OACrB,KAAK,QAAQ,MAAQ,CAAC,GAAG,KAAK,MAAM,EACpC,KAAK,SAAS,KAAK,KAAK,QAAS,CAAA,CAAE,EACnC,KAAK,gBAAgB,MAAK,EACnB,CACH,OAAQ,KAAK,OACb,aAAc,CAAC,GAAG,KAAK,gBAAgB,EACvC,WAAY,KAAK,eAEzB,CAEA,KAAKO,EAAkBC,EAAc,CACjC,IAAMI,EAAa,KAAK,QAAQ,YAAYhB,GAAeW,EAAK,IAAI,EAAG,KAAK,oBAAoBC,CAAI,EAAE,KAAK,IAAI,CAAC,EAChH,OAAID,EAAK,QACL,KAAK,SAAWK,GAEbA,CACX,CAEQ,YAAU,CACd,KAAK,aAAe,CAAA,EACpB,KAAK,iBAAmB,CAAA,EACxB,KAAK,eAAiB,EACtB,KAAK,UAAY,CACrB,CAEQ,oBAAoBK,EAAwB,CAChD,OAAQC,GAAQ,CACZ,IAAMwC,EAAO,KAAK,cAAa,EAC/B,GAAI,CACAzC,EAAeC,CAAI,UAEnB,KAAK,eAAewC,CAAI,EAEhC,CACJ,CAEQ,0BAAwB,CAC5B,KAAK,aAAa,OAAO,KAAK,SAAS,CAC3C,CAEA,eAAa,CACT,IAAMA,EAAO,KAAK,aAAa,OAC/B,YAAK,UAAYA,EACVA,CACX,CAEA,eAAeA,EAAY,CACvB,KAAK,yBAAwB,EAC7B,KAAK,UAAYA,CACrB,CAEA,QAAQxD,EAAaiB,EAAsBC,EAAwB,CAC/D,KAAK,QAAQ,YAAYlB,EAAKiB,CAAS,EAClC,KAAK,YAAW,IACjB,KAAK,iBAAmB,CAAC,GAAG,KAAK,aAAcC,CAAO,EACtD,KAAK,eAAiB,KAAK,QAAU,EAE7C,CAEA,QAAQlB,EAAaK,EAAkBa,EAA0BF,EAAU,CACvE,KAAK,OAAOE,CAAO,EACnB,KAAK,QAAQ,YAAYlB,EAAKK,EAAMW,CAAI,EACxC,KAAK,MAAME,CAAO,CACtB,CAEA,OAAOuC,EAAwB,CACtB,KAAK,YAAW,GACjB,KAAK,aAAa,KAAKA,CAAO,CAEtC,CAEA,MAAMA,EAAwB,CAC1B,GAAI,CAAC,KAAK,YAAW,EAAI,CACrB,IAAMC,EAAQ,KAAK,aAAa,YAAYD,CAAO,EAC/CC,GAAS,GACT,KAAK,aAAa,OAAOA,CAAK,EAG1C,CAEA,IAAI,SAAO,CACP,OAAQ,KAAK,QAAgB,OACjC,GAGEC,GAA+B,CACjC,gBAAiB,GACjB,qBAAsB,OACtB,gBAAiB,GACjB,qBAAsB,IAAIR,IAOxBpD,GAAN,cAAgC6D,EAAqB,CA/hBrD,MA+hBqD,CAAApE,EAAA,0BAKjD,YAAYM,EAAyB+D,EAAsB,CACvD,IAAMC,EAAsBD,GAAU,iBAAkBA,EACxD,MAAM/D,EAAM,OAAA,OAAA,OAAA,OAAA,OAAA,OAAA,CAAA,EACL6D,EAAa,EAAA,CAChB,kBAAmBG,EACb,IAAIC,GAAqB,CAAE,aAAcF,EAAO,YAAY,CAAE,EAC9D,IAAIG,EAAyB,CAAA,EAChCH,CAAM,CAAA,CAEjB,CAEA,IAAI,cAAY,CACZ,OAAO,KAAK,eAChB,CAEA,YAAYlE,EAAcW,EAAc,CACpC,OAAO,KAAK,KAAKX,EAAMW,CAAI,CAC/B,CAEA,kBAAgB,CACZ,KAAK,oBAAmB,CAC5B,CAEA,YAAYN,EAAaiB,EAAoB,CACzC,OAAO,KAAK,QAAQjB,EAAKiB,CAAS,CACtC,CAEA,YAAYjB,EAAaK,EAAkBW,EAAU,CACjD,OAAO,KAAK,QAAQhB,EAAKK,EAAM,CAC3B,KAAM,CAACW,CAAI,EACd,CACL,CAEA,OAAOhB,EAAaC,EAA2B,CAC3C,KAAK,GAAGD,EAAKC,CAAO,CACxB,CAEA,WAAWD,EAAaE,EAAgC,CACpD,KAAK,OAAOF,EAAKE,CAAQ,CAC7B,CAEA,SAASF,EAAaE,EAAgC,CAClD,KAAK,KAAKF,EAAKE,CAAQ,CAC3B,CAEA,eAAeF,EAAaE,EAAgC,CACxD,KAAK,WAAWF,EAAKE,CAAQ,CACjC,GC3iBE,SAAU+D,GAAmCC,EAAkBC,EAAWC,EAA2B,CAQvG,OAAAC,GANqC,CACjC,OAAAF,EACA,OAAAC,EACA,MAJU,IAAI,IAKd,UAAW,IAAI,KAEOF,CAAO,EAC1BC,CACX,CAVgBG,EAAAL,GAAA,gBAYhB,SAASI,GAAWE,EAA8BL,EAAgB,CAC9D,IAAMM,EAAYC,GAAqBP,EAAS,EAAK,EAC/CQ,EAAcC,EAAOT,EAAQ,KAAK,EAAE,OAAOU,EAAY,EAAE,OAAOC,GAAQL,EAAU,IAAIK,CAAI,CAAC,EACjG,QAAWA,KAAQH,EAAa,CAC5B,IAAMI,EAAG,OAAA,OAAA,OAAA,OAAA,CAAA,EACFP,CAAa,EAAA,CAChB,QAAS,EACT,SAAU,EACV,QAAS,EACT,KAAM,EACN,GAAI,CAAC,CAAA,EAETO,EAAI,MAAM,IACND,EAAK,KACLN,EAAc,OAAO,KAAKM,EAAME,GAAaD,EAAKD,EAAK,UAAU,CAAC,CAAC,EAG/E,CAjBSP,EAAAD,GAAA,cAmBT,SAASU,GAAaD,EAAkBE,EAA0BC,EAAc,GAAK,CACjF,IAAIC,EACJ,GAAIC,GAAUH,CAAO,EACjBE,EAASE,GAAaN,EAAKE,CAAO,UAC3BK,GAASL,CAAO,EACvBE,EAASI,GAAYR,EAAKE,CAAO,UAC1BO,GAAaP,CAAO,EAC3BE,EAASH,GAAaD,EAAKE,EAAQ,QAAQ,UACpCQ,GAAiBR,CAAO,EAC/BE,EAASO,GAAoBX,EAAKE,CAAO,UAClCU,GAAWV,CAAO,EACzBE,EAASS,GAAcb,EAAKE,CAAO,UAC5BY,GAAeZ,CAAO,EAC7BE,EAASW,GAAkBf,EAAKE,CAAO,UAChCc,GAAiBd,CAAO,EAC/BE,EAASa,GAAoBjB,EAAKE,CAAO,UAClCgB,GAAQhB,CAAO,EACtBE,EAASe,GAAWnB,EAAKE,CAAO,UAC1BkB,GAAYlB,CAAO,EAAG,CAC5B,IAAMmB,EAAMrB,EAAI,UAChBI,EAASZ,EAAA,IAAMQ,EAAI,OAAO,QAAQqB,EAAKC,GAAKpB,CAAO,EAA1C,cAET,OAAM,IAAIqB,GAAkBrB,EAAQ,SAAU,4BAA4BA,EAAQ,KAAK,EAAE,EAE7F,OAAOsB,GAAKxB,EAAKG,EAAc,OAAYsB,GAAkBvB,CAAO,EAAGE,EAAQF,EAAQ,WAAW,CACtG,CAzBSV,EAAAS,GAAA,gBA2BT,SAASO,GAAYR,EAAkB0B,EAAc,CACjD,IAAMC,EAAaC,GAAYF,CAAM,EACrC,MAAO,IAAM1B,EAAI,OAAO,OAAO2B,EAAYD,CAAM,CACrD,CAHSlC,EAAAgB,GAAA,eAKT,SAASK,GAAcb,EAAkB6B,EAAkB,CACvD,IAAM9B,EAAO8B,EAAS,KAAK,IAC3B,GAAI/B,GAAaC,CAAI,EAAG,CACpB,IAAMsB,EAAMrB,EAAI,UACV8B,EAAYD,EAAS,UAAU,OAAS,EAAIE,GAAuBhC,EAAM8B,EAAS,SAAS,EAAI,KAAO,CAAA,GAC5G,OAAQG,GAAShC,EAAI,OAAO,QAAQqB,EAAKY,GAAQjC,EAAKD,CAAI,EAAG8B,EAAUC,EAAUE,CAAI,CAAC,UAC/EE,GAAenC,CAAI,EAAG,CAC7B,IAAMsB,EAAMrB,EAAI,UACVI,EAAS+B,GAASnC,EAAKD,EAAK,IAAI,EACtC,MAAO,IAAMC,EAAI,OAAO,QAAQqB,EAAKjB,EAAQyB,CAAQ,UAC7C9B,EAGRqC,GAAkBrC,CAAI,MAFtB,OAAM,IAAIwB,GAAkBM,EAAS,SAAU,wBAAwBA,EAAS,KAAK,EAAE,CAI/F,CAfSrC,EAAAqB,GAAA,iBAiBT,SAASkB,GAAuBhC,EAAkBsC,EAA0B,CACxE,IAAMC,EAAaD,EAAU,IAAIE,GAAKC,GAAeD,EAAE,KAAK,CAAC,EAC7D,OAAQP,GAAQ,CACZ,IAAMS,EAAiB,CAAA,EACvB,QAASC,EAAI,EAAGA,EAAIJ,EAAW,OAAQI,IAAK,CACxC,IAAMC,EAAa5C,EAAK,WAAW2C,CAAC,EAC9BZ,EAAYQ,EAAWI,CAAC,EAC9BD,EAASE,EAAW,IAAI,EAAIb,EAAUE,CAAI,EAE9C,OAAOS,CACX,CACJ,CAXSjD,EAAAuC,GAAA,0BAkBT,SAASS,GAAeI,EAAoB,CACxC,GAAIC,GAAcD,CAAS,EAAG,CAC1B,IAAME,EAAON,GAAeI,EAAU,IAAI,EACpCG,EAAQP,GAAeI,EAAU,KAAK,EAC5C,OAAQZ,GAAUc,EAAKd,CAAI,GAAKe,EAAMf,CAAI,UACnCgB,GAAcJ,CAAS,EAAG,CACjC,IAAME,EAAON,GAAeI,EAAU,IAAI,EACpCG,EAAQP,GAAeI,EAAU,KAAK,EAC5C,OAAQZ,GAAUc,EAAKd,CAAI,GAAKe,EAAMf,CAAI,UACnCiB,GAAWL,CAAS,EAAG,CAC9B,IAAMM,EAAQV,GAAeI,EAAU,KAAK,EAC5C,OAAQZ,GAAS,CAACkB,EAAMlB,CAAI,UACrBmB,GAAqBP,CAAS,EAAG,CACxC,IAAMQ,EAAOR,EAAU,UAAU,IAAK,KACtC,OAAQZ,GAASA,IAAS,QAAaA,EAAKoB,CAAI,IAAM,WAC/CC,GAAiBT,CAAS,EAAG,CACpC,IAAMM,EAAQ,EAAQN,EAAU,KAChC,MAAO,IAAMM,EAEjBd,GAAkBQ,CAAS,CAC/B,CApBSpD,EAAAgD,GAAA,kBAsBT,SAASzB,GAAkBf,EAAkBsD,EAA0B,CACnE,GAAIA,EAAa,SAAS,SAAW,EACjC,OAAOrD,GAAaD,EAAKsD,EAAa,SAAS,CAAC,CAAC,EAC9C,CACH,IAAMC,EAA8B,CAAA,EAEpC,QAAWrD,KAAWoD,EAAa,SAAU,CACzC,IAAME,EAAqC,CAGvC,IAAKvD,GAAaD,EAAKE,EAAS,EAAI,GAElCuD,EAAQhC,GAAkBvB,CAAO,EACnCuD,IACAD,EAAiB,KAAOhB,GAAeiB,CAAK,GAEhDF,EAAQ,KAAKC,CAAgB,EAGjC,IAAMnC,EAAMrB,EAAI,KAChB,OAAQgC,GAAShC,EAAI,OAAO,aAAaqB,EAAKkC,EAAQ,IAAInD,GAAS,CAC/D,IAAMsD,EAAuB,CACzB,IAAKlE,EAAA,IAAMY,EAAO,IAAI4B,CAAI,EAArB,QAEH2B,EAAOvD,EAAO,KACpB,OAAIuD,IACAD,EAAI,KAAO,IAAMC,EAAK3B,CAAI,GAEvB0B,CACX,CAAC,CAAC,EAEV,CA/BSlE,EAAAuB,GAAA,qBAiCT,SAASE,GAAoBjB,EAAkB4D,EAAqB,CAChE,GAAIA,EAAM,SAAS,SAAW,EAC1B,OAAO3D,GAAaD,EAAK4D,EAAM,SAAS,CAAC,CAAC,EAE9C,IAAML,EAA8B,CAAA,EAEpC,QAAWrD,KAAW0D,EAAM,SAAU,CAClC,IAAMJ,EAAqC,CAGvC,IAAKvD,GAAaD,EAAKE,EAAS,EAAI,GAElCuD,EAAQhC,GAAkBvB,CAAO,EACnCuD,IACAD,EAAiB,KAAOhB,GAAeiB,CAAK,GAEhDF,EAAQ,KAAKC,CAAgB,EAGjC,IAAMK,EAAQ7D,EAAI,KAEZ8D,EAAStE,EAAA,CAACuE,EAAkBC,IAAuB,CACrD,IAAMC,EAAUD,EAAQ,aAAY,EAAG,KAAK,GAAG,EAC/C,MAAO,UAAUD,CAAQ,IAAIE,CAAO,EACxC,EAHe,UAITX,EAAuB9D,EAACwC,GAAShC,EAAI,OAAO,aAAa6D,EAAON,EAAQ,IAAI,CAACnD,EAAQiB,IAAO,CAC9F,IAAMqC,EAAuB,CAAE,IAAKlE,EAAA,IAAM,GAAN,MAAU,EACxCH,EAASW,EAAI,OACnB0D,EAAI,IAAM,IAAK,CAEX,GADAtD,EAAO,IAAI4B,CAAI,EACX,CAAC3C,EAAO,YAAW,EAAI,CACvB,IAAM6E,EAAMJ,EAAOD,EAAOxE,CAAM,EAC3BA,EAAO,gBAAgB,IAAI6E,CAAG,GAE/B7E,EAAO,gBAAgB,IAAI6E,EAAK,CAAA,CAAE,EAEtC,IAAMC,EAAa9E,EAAO,gBAAgB,IAAI6E,CAAG,EAC7C,OAAOC,IAAa9C,CAAG,EAAM,MAE7B8C,EAAW9C,CAAG,EAAI,IAG9B,EACA,IAAMsC,EAAOvD,EAAO,KACpB,OAAIuD,EACAD,EAAI,KAAO,IAAMC,EAAK3B,CAAI,EAE1B0B,EAAI,KAAO,IAAK,CACZ,IAAMU,EAAsB/E,EAAO,gBAAgB,IAAIyE,EAAOD,EAAOxE,CAAM,CAAC,EAE5E,MADc,CAAC+E,IAAsB/C,CAAG,CAE5C,EAEGqC,CACX,CAAC,CAAC,EA7B2B,gBA8BvBW,EAAU7C,GAAKxB,EAAKyB,GAAkBmC,CAAK,EAAGN,EAAc,GAAG,EACrE,OAAQtB,GAAQ,CACZqC,EAAQrC,CAAI,EACPhC,EAAI,OAAO,YAAW,GACvBA,EAAI,OAAO,gBAAgB,OAAO8D,EAAOD,EAAO7D,EAAI,MAAM,CAAC,CAEnE,CACJ,CA9DSR,EAAAyB,GAAA,uBAgET,SAASE,GAAWnB,EAAkB4D,EAAY,CAC9C,IAAML,EAAUK,EAAM,SAAS,IAAIrB,GAAKtC,GAAaD,EAAKuC,CAAC,CAAC,EAC5D,OAAQP,GAASuB,EAAQ,QAAQnD,GAAUA,EAAO4B,CAAI,CAAC,CAC3D,CAHSxC,EAAA2B,GAAA,cAKT,SAASM,GAAkBvB,EAAwB,CAC/C,GAAIgB,GAAQhB,CAAO,EACf,OAAOA,EAAQ,cAGvB,CALSV,EAAAiC,GAAA,qBAOT,SAASd,GAAoBX,EAAkBsE,EAA0BC,EAAWD,EAAS,SAAQ,CACjG,GAAKC,EAUE,GAAI3D,GAAW2D,CAAQ,GAAKzE,GAAayE,EAAS,KAAK,GAAG,EAAG,CAChE,IAAMlD,EAAMrB,EAAI,UAChB,OAAQgC,GAAShC,EAAI,OAAO,QAAQqB,EAAKY,GAAQjC,EAAKuE,EAAS,KAAK,GAAiB,EAAGD,EAAUtC,CAAI,UAC/FpB,GAAW2D,CAAQ,GAAKrC,GAAeqC,EAAS,KAAK,GAAG,EAAG,CAClE,IAAMlD,EAAMrB,EAAI,UACVwE,EAAerC,GAASnC,EAAKuE,EAAS,KAAK,IAAI,IAAI,EACzD,MAAO,IAAMvE,EAAI,OAAO,QAAQqB,EAAKmD,EAAcF,CAAQ,UACpDjE,GAAUkE,CAAQ,EAAG,CAC5B,IAAMlD,EAAMrB,EAAI,UACVyE,EAAUtC,GAASnC,EAAKuE,EAAS,KAAK,EAC5C,MAAO,IAAMvE,EAAI,OAAO,QAAQqB,EAAKoD,EAASH,CAAQ,MAGtD,OAAM,IAAI,MAAM,wCAAwC,MAvB7C,CACX,GAAI,CAACA,EAAS,KAAK,IACf,MAAM,IAAI,MAAM,wCAA0CA,EAAS,KAAK,QAAQ,EAEpF,IAAMI,EAAaC,GAAmBL,EAAS,KAAK,GAAG,EACjDM,EAAiBF,GAAY,SACnC,GAAI,CAACE,EACD,MAAM,IAAI,MAAM,4CAA8ChD,GAAY0C,EAAS,KAAK,GAAG,CAAC,EAEhG,OAAO3D,GAAoBX,EAAKsE,EAAUM,CAAc,EAgBhE,CA1BSpF,EAAAmB,GAAA,uBA4BT,SAASL,GAAaN,EAAkByE,EAAgB,CACpD,IAAMpD,EAAMrB,EAAI,UACV6E,EAAQ7E,EAAI,OAAOyE,EAAQ,KAAK,EACtC,GAAI,CAACI,EACD,MAAM,IAAI,MAAM,qCAAuCJ,EAAQ,KAAK,EAExE,MAAO,IAAMzE,EAAI,OAAO,QAAQqB,EAAKwD,EAAOJ,CAAO,CACvD,CAPSjF,EAAAc,GAAA,gBAST,SAASkB,GAAKxB,EAAkByD,EAA8BrD,EAAgB0E,EAAwB,CAClG,IAAMnB,EAAOF,GAASjB,GAAeiB,CAAK,EAE1C,GAAI,CAACqB,EACD,GAAInB,EAAM,CACN,IAAMtC,EAAMrB,EAAI,KAChB,OAAQgC,GAAShC,EAAI,OAAO,aAAaqB,EAAK,CAC1C,CACI,IAAK7B,EAAA,IAAMY,EAAO4B,CAAI,EAAjB,OACL,KAAMxC,EAAA,IAAMmE,EAAK3B,CAAI,EAAf,SAEV,CACI,IAAK+C,GAAS,EACd,KAAMvF,EAAA,IAAM,CAACmE,EAAK3B,CAAI,EAAhB,SAEb,MAED,QAAO5B,EAIf,GAAI0E,IAAgB,IAAK,CACrB,IAAMzD,EAAMrB,EAAI,OAChB,OAAQgC,GAAShC,EAAI,OAAO,KAAKqB,EAAK,CAClC,IAAK7B,EAAA,IAAMY,EAAO4B,CAAI,EAAjB,OACL,KAAM2B,EAAO,IAAMA,EAAK3B,CAAI,EAAI,OACnC,UACM8C,IAAgB,IAAK,CAC5B,IAAMzD,EAAMrB,EAAI,OAChB,GAAI2D,EAAM,CACN,IAAME,EAAQ7D,EAAI,KAKlB,OAAQgC,GAAShC,EAAI,OAAO,aAAa6D,EAAO,CAC5C,CACI,IAAKrE,EAAA,IAAMQ,EAAI,OAAO,WAAWqB,EAAK,CAClC,IAAK7B,EAAA,IAAMY,EAAO4B,CAAI,EAAjB,OACR,EAFI,OAGL,KAAMxC,EAAA,IAAMmE,EAAK3B,CAAI,EAAf,SAEV,CACI,IAAK+C,GAAS,EACd,KAAMvF,EAAA,IAAM,CAACmE,EAAK3B,CAAI,EAAhB,SAEb,MAED,QAAQA,GAAShC,EAAI,OAAO,WAAWqB,EAAK,CACxC,IAAK7B,EAAA,IAAMY,EAAO4B,CAAI,EAAjB,OACR,UAEE8C,IAAgB,IAAK,CAC5B,IAAMzD,EAAMrB,EAAI,WAChB,OAAQgC,GAAShC,EAAI,OAAO,SAASqB,EAAK,CACtC,IAAK7B,EAAA,IAAMY,EAAO4B,CAAI,EAAjB,OACL,KAAM2B,EAAO,IAAMA,EAAK3B,CAAI,EAAI,OACnC,OAEDI,GAAkB0C,CAAW,CAErC,CA7DStF,EAAAgC,GAAA,QA+DT,SAASS,GAAQjC,EAAoBE,EAAqC,CACtE,IAAMkD,EAAO4B,GAAYhF,EAAKE,CAAO,EAC/BH,EAAOC,EAAI,MAAM,IAAIoD,CAAI,EAC/B,GAAI,CAACrD,EAAM,MAAM,IAAI,MAAM,SAASqD,CAAI,eAAe,EACvD,OAAOrD,CACX,CALSP,EAAAyC,GAAA,WAOT,SAAS+C,GAAYhF,EAAoBE,EAAqC,CAC1E,GAAIJ,GAAaI,CAAO,EACpB,OAAOA,EAAQ,KACZ,GAAIF,EAAI,UAAU,IAAIE,CAAO,EAChC,OAAOF,EAAI,UAAU,IAAIE,CAAO,EAC7B,CACH,IAAI+E,EAAgB/E,EAChBgF,EAAkBD,EAAK,WACvBE,EAAmBjF,EAAQ,MAC/B,KAAO,CAACJ,GAAaoF,CAAM,IACnBhE,GAAQgE,CAAM,GAAKpE,GAAeoE,CAAM,GAAKlE,GAAiBkE,CAAM,KAEpEC,EADcD,EAAO,SAAS,QAAQD,CAAuB,EAC5C,SAAQ,EAAK,IAAME,GAExCF,EAAOC,EACPA,EAASA,EAAO,WAGpB,OAAAC,EADaD,EACG,KAAO,IAAMC,EAC7BnF,EAAI,UAAU,IAAIE,EAASiF,CAAQ,EAC5BA,EAEf,CAtBS3F,EAAAwF,GAAA,eAwBT,SAAS7C,GAASnC,EAAoBoD,EAAY,CAC9C,IAAMyB,EAAQ7E,EAAI,OAAOoD,CAAI,EAC7B,GAAI,CAACyB,EAAO,MAAM,IAAI,MAAM,UAAUzB,CAAI,eAAe,EACzD,OAAOyB,CACX,CAJSrF,EAAA2C,GAAA,YCtYH,SAAUiD,GAAuBC,EAA6B,CAChE,IAAMC,EAAUD,EAAS,QACnBE,EAAQF,EAAS,OAAO,MACxBG,EAAS,IAAIC,GAAwBJ,CAAQ,EACnD,OAAAK,GAAaJ,EAASE,EAAQD,EAAM,UAAU,EAC9CC,EAAO,SAAQ,EACRA,CACX,CAPgBG,EAAAP,GAAA,0BCIV,SAAUQ,GAAoBC,EAA6B,CAC7D,IAAMC,EAASC,GAAqBF,CAAQ,EAC5C,OAAAC,EAAO,SAAQ,EACRA,CACX,CAJgBE,EAAAJ,GAAA,uBAUV,SAAUG,GAAqBF,EAA6B,CAC9D,IAAMI,EAAUJ,EAAS,QACnBK,EAAQL,EAAS,OAAO,MACxBC,EAAS,IAAIK,GAAcN,CAAQ,EACzC,OAAOO,GAAaH,EAASH,EAAQI,EAAM,UAAU,CACzD,CALgBF,EAAAD,GAAA,wBCAV,IAAOM,GAAP,KAA0B,CAxBhC,MAwBgC,CAAAC,EAAA,4BAE5B,YAAYC,EAAkBC,EAA6B,CACvD,IAAMC,EAAiBC,EAAOC,GAAqBJ,EAAS,EAAK,CAAC,EAC5DK,EAA8B,KAAK,oBAAoBH,CAAc,EACrEI,EAAsB,KAAK,mBAAmBJ,EAAgBG,EAAgBJ,CAAO,EAE3F,OAAAI,EAAe,QAAQE,GAAgB,CACnC,IAAMC,EAAUD,EAAc,QAC1B,OAAOC,GAAY,UAAYA,GAAW,SAAUA,GAAWC,GAAaD,CAAO,EACnFF,EAAO,QAAQC,CAAa,EAE5BD,EAAO,KAAKC,CAAa,CAEjC,CAAC,EAGMD,CACX,CAEU,oBAAoBI,EAA2B,CACrD,OAAOA,EAAM,OAAOC,EAAc,EAAE,OAAOC,GAAK,CAACA,EAAE,QAAQ,EACtD,IAAIC,GAAY,KAAK,mBAAmBA,CAAQ,CAAC,EAAE,QAAO,CACnE,CAEU,mBAAmBA,EAAsB,CAC/C,IAAMC,EAAQC,GAAcF,CAAQ,EAC9BL,EAAU,KAAK,sBAAsBM,CAAK,EAAI,KAAK,qBAAqBA,CAAK,EAAIA,EACjFE,EAAuB,CACzB,KAAMH,EAAS,KACf,QAASL,EACT,YAAa,IAEjB,OAAIK,EAAS,SAETG,EAAU,MAAQP,GAAaK,CAAK,EAAIG,GAAM,QAAU,UAErDD,CACX,CAEU,sBAAsBF,EAAa,CACzC,OAAIA,EAAM,MAAM,SAAS,GAAG,EAEjB,GACA,GAAAA,EAAM,OAAO,SAAS,KAAK,GAAKA,EAAM,OAAO,SAAS,KAAK,EAM1E,CAEU,qBAAqBA,EAAa,CACxC,IAAMI,EAAc,IAAI,OAAOJ,EAAOA,EAAM,MAAQ,GAAG,EACvD,MAAO,CAACK,EAAMC,KACVF,EAAY,UAAYE,EACLF,EAAY,KAAKC,CAAI,EAGhD,CAEU,mBAAmBT,EAA6BL,EAA6BJ,EAA6B,CAChH,OAAOS,EAEF,OAAOW,EAAY,EACnB,QAAQC,GAAQC,GAAkBD,CAAI,EAAE,OAAOE,EAAS,CAAC,EACzD,SAASZ,GAAKA,EAAE,KAAK,EAAE,QAAO,EAE9B,KAAK,CAACa,EAAGC,IAAMA,EAAE,MAAM,OAASD,EAAE,MAAM,MAAM,EAC9C,IAAIE,GAAW,KAAK,kBAAkBA,EAAStB,EAAgB,EAAQJ,GAAS,eAAgB,CAAC,CAC1G,CAEU,kBAAkB0B,EAAkBtB,EAA6BuB,EAAwB,CAC/F,MAAO,CACH,KAAMD,EAAQ,MACd,QAAS,KAAK,oBAAoBA,EAASC,CAAe,EAC1D,WAAY,KAAK,cAAcD,EAAStB,CAAc,EAE9D,CAEU,oBAAoBsB,EAAkBC,EAAwB,CACpE,OAAOA,EACH,IAAI,OAAOC,GAA0BF,EAAQ,KAAK,CAAC,EACnDA,EAAQ,KAChB,CAEU,cAAcA,EAAkBtB,EAA2B,CACjE,OAAOA,EAAe,OAAO,CAACyB,EAAyBC,IAAS,CAC5D,IAAMvB,EAAUuB,GAAO,QACvB,OAAIvB,GAAS,QAAUwB,GAAe,IAAMxB,EAAQ,OAAS,IAAKmB,EAAQ,KAAK,GAC3EG,EAAW,KAAKC,CAAK,EAElBD,CACX,EAAG,CAAA,CAAE,CACT,GC/FE,IAAOG,GAAP,KAA4B,CAvBlC,MAuBkC,CAAAC,EAAA,8BAE9B,QAAQC,EAAeC,EAAgB,CACnC,IAAIC,EAAuCD,EAAQ,cAInD,GAHIE,GAAiBD,CAAO,IACxBA,EAAUE,GAA0BF,CAAO,GAE3CG,GAAWH,CAAO,EAAG,CACrB,IAAMI,EAAOJ,EAAQ,KAAK,IAC1B,GAAI,CAACI,EACD,MAAM,IAAI,MAAM,yCAAyC,EAE7D,OAAO,KAAK,aAAaA,EAAMN,EAAOC,CAAO,EAEjD,OAAOD,CACX,CAGU,aAAaM,EAAoBN,EAAeC,EAAgB,OACtE,OAAQK,EAAK,KAAK,YAAW,EAAI,CAC7B,IAAK,MAAO,OAAOC,GAAe,WAAWP,CAAK,EAClD,IAAK,SAAU,OAAOO,GAAe,cAAcP,CAAK,EACxD,IAAK,KAAM,OAAOO,GAAe,UAAUP,CAAK,EAEpD,QAAQQ,EAAAC,GAAYH,CAAI,KAAC,MAAAE,IAAA,OAAA,OAAAA,EAAE,YAAW,EAAI,CACtC,IAAK,SAAU,OAAOD,GAAe,cAAcP,CAAK,EACxD,IAAK,UAAW,OAAOO,GAAe,eAAeP,CAAK,EAC1D,IAAK,SAAU,OAAOO,GAAe,cAAcP,CAAK,EACxD,IAAK,OAAQ,OAAOO,GAAe,YAAYP,CAAK,EACpD,QAAS,OAAOA,EAExB,GAGaO,IAAjB,SAAiBA,EAAc,CAE3B,SAAgBG,EAAcV,EAAa,CACvC,IAAIW,EAAS,GACb,QAASC,EAAI,EAAGA,EAAIZ,EAAM,OAAS,EAAGY,IAAK,CACvC,IAAMC,EAAIb,EAAM,OAAOY,CAAC,EACxB,GAAIC,IAAM,KAAM,CACZ,IAAMC,EAAKd,EAAM,OAAO,EAAEY,CAAC,EAC3BD,GAAUI,EAAuBD,CAAE,OAEnCH,GAAUE,EAGlB,OAAOF,CACX,CAZgBZ,EAAAW,EAAA,iBAAAH,EAAA,cAAaG,EAc7B,SAASK,EAAuBC,EAAY,CACxC,OAAQA,EAAM,CACV,IAAK,IAAK,MAAO,KACjB,IAAK,IAAK,MAAO,KACjB,IAAK,IAAK,MAAO;EACjB,IAAK,IAAK,MAAO,KACjB,IAAK,IAAK,MAAO,IACjB,IAAK,IAAK,MAAO,KACjB,IAAK,IAAK,MAAO,KACjB,QAAS,OAAOA,EAExB,CAXSjB,EAAAgB,EAAA,0BAaT,SAAgBE,EAAUjB,EAAa,CACnC,OAAIA,EAAM,OAAO,CAAC,IAAM,IACbA,EAAM,UAAU,CAAC,EAEjBA,CAEf,CANgBD,EAAAkB,EAAA,aAAAV,EAAA,UAASU,EAQzB,SAAgBC,EAAWlB,EAAa,CACpC,OAAO,SAASA,CAAK,CACzB,CAFgBD,EAAAmB,EAAA,cAAAX,EAAA,WAAUW,EAI1B,SAAgBC,EAAcnB,EAAa,CACvC,OAAO,OAAOA,CAAK,CACvB,CAFgBD,EAAAoB,EAAA,iBAAAZ,EAAA,cAAaY,EAI7B,SAAgBC,EAAYpB,EAAa,CACrC,OAAO,IAAI,KAAKA,CAAK,CACzB,CAFgBD,EAAAqB,EAAA,eAAAb,EAAA,YAAWa,EAI3B,SAAgBC,EAAcrB,EAAa,CACvC,OAAO,OAAOA,CAAK,CACvB,CAFgBD,EAAAsB,EAAA,iBAAAd,EAAA,cAAac,EAI7B,SAAgBC,EAAetB,EAAa,CACxC,OAAOA,EAAM,YAAW,IAAO,MACnC,CAFgBD,EAAAuB,EAAA,kBAAAf,EAAA,eAAce,CAIlC,GAzDiBf,KAAAA,GAAc,CAAA,EAAA,ECzD/B,IAAAgB,EAAA,GAOAC,EAAAD,EAAc,YCOR,SAAUE,IAAa,CACzB,OAAO,IAAI,QAAQC,GAAU,CAGrB,OAAO,aAAiB,IACxB,WAAWA,EAAS,CAAC,EAErB,aAAaA,CAAO,CAE5B,CAAC,CACL,CAVgBC,EAAAF,GAAA,iBAYhB,IAAIG,GAAW,EACXC,GAA2B,GAKzB,SAAUC,IAAwB,CACpC,OAAAF,GAAW,KAAK,IAAG,EACZ,IAAI,yBACf,CAHgBD,EAAAG,GAAA,4BASV,SAAUC,GAAsBC,EAAc,CAChDH,GAA2BG,CAC/B,CAFgBL,EAAAI,GAAA,yBAST,IAAME,GAAqB,OAAO,oBAAoB,EAMvD,SAAUC,GAAqBC,EAAY,CAC7C,OAAOA,IAAQF,EACnB,CAFgBN,EAAAO,GAAA,wBAehB,eAAsBE,GAAkBC,EAAwB,CAC5D,GAAIA,IAAU,oBAAkB,KAE5B,OAEJ,IAAMC,EAAU,KAAK,IAAG,EAKxB,GAJIA,EAAUV,IAAYC,KACtBD,GAAWU,EACX,MAAMb,GAAa,GAEnBY,EAAM,wBACN,MAAMJ,EAEd,CAbsBN,EAAAS,GAAA,qBAmBhB,IAAOG,GAAP,KAAe,CA1FrB,MA0FqB,CAAAZ,EAAA,iBAArB,aAAA,CAII,KAAA,QAAU,IAAI,QAAW,CAACD,EAASc,IAAU,CACzC,KAAK,QAAWC,IACZf,EAAQe,CAAG,EACJ,MAEX,KAAK,OAAUN,IACXK,EAAOL,CAAG,EACH,KAEf,CAAC,CACL,GCnGA,IAAMO,GAAN,MAAMC,CAAiB,CALvB,MAKuB,CAAAC,EAAA,yBACnB,YAAYC,EAAKC,EAAYC,EAASC,EAAS,CAC3C,KAAK,KAAOH,EACZ,KAAK,YAAcC,EACnB,KAAK,SAAWC,EAChB,KAAK,SAAWC,EAChB,KAAK,aAAe,MACxB,CACA,IAAI,KAAM,CACN,OAAO,KAAK,IAChB,CACA,IAAI,YAAa,CACb,OAAO,KAAK,WAChB,CACA,IAAI,SAAU,CACV,OAAO,KAAK,QAChB,CACA,QAAQC,EAAO,CACX,GAAIA,EAAO,CACP,IAAMC,EAAQ,KAAK,SAASD,EAAM,KAAK,EACjCE,EAAM,KAAK,SAASF,EAAM,GAAG,EACnC,OAAO,KAAK,SAAS,UAAUC,EAAOC,CAAG,CAC7C,CACA,OAAO,KAAK,QAChB,CACA,OAAOC,EAASL,EAAS,CACrB,QAAWM,KAAUD,EACjB,GAAIT,EAAiB,cAAcU,CAAM,EAAG,CAExC,IAAMJ,EAAQK,GAAmBD,EAAO,KAAK,EAEvCE,EAAc,KAAK,SAASN,EAAM,KAAK,EACvCO,EAAY,KAAK,SAASP,EAAM,GAAG,EACzC,KAAK,SAAW,KAAK,SAAS,UAAU,EAAGM,CAAW,EAAIF,EAAO,KAAO,KAAK,SAAS,UAAUG,EAAW,KAAK,SAAS,MAAM,EAE/H,IAAMC,EAAY,KAAK,IAAIR,EAAM,MAAM,KAAM,CAAC,EACxCS,EAAU,KAAK,IAAIT,EAAM,IAAI,KAAM,CAAC,EACtCU,EAAc,KAAK,aACjBC,EAAmBC,GAAmBR,EAAO,KAAM,GAAOE,CAAW,EAC3E,GAAIG,EAAUD,IAAcG,EAAiB,OACzC,QAASE,EAAI,EAAGC,EAAMH,EAAiB,OAAQE,EAAIC,EAAKD,IACpDH,EAAYG,EAAIL,EAAY,CAAC,EAAIG,EAAiBE,CAAC,OAInDF,EAAiB,OAAS,IAC1BD,EAAY,OAAOF,EAAY,EAAGC,EAAUD,EAAW,GAAGG,CAAgB,EAG1E,KAAK,aAAeD,EAAcA,EAAY,MAAM,EAAGF,EAAY,CAAC,EAAE,OAAOG,EAAkBD,EAAY,MAAMD,EAAU,CAAC,CAAC,EAGrI,IAAMM,EAAOX,EAAO,KAAK,QAAUG,EAAYD,GAC/C,GAAIS,IAAS,EACT,QAASF,EAAIL,EAAY,EAAIG,EAAiB,OAAQG,EAAMJ,EAAY,OAAQG,EAAIC,EAAKD,IACrFH,EAAYG,CAAC,EAAIH,EAAYG,CAAC,EAAIE,CAG9C,SACSrB,EAAiB,OAAOU,CAAM,EACnC,KAAK,SAAWA,EAAO,KACvB,KAAK,aAAe,WAGpB,OAAM,IAAI,MAAM,+BAA+B,EAGvD,KAAK,SAAWN,CACpB,CACA,gBAAiB,CACb,OAAI,KAAK,eAAiB,SACtB,KAAK,aAAec,GAAmB,KAAK,SAAU,EAAI,GAEvD,KAAK,YAChB,CACA,WAAWI,EAAQ,CACfA,EAAS,KAAK,IAAI,KAAK,IAAIA,EAAQ,KAAK,SAAS,MAAM,EAAG,CAAC,EAC3D,IAAMN,EAAc,KAAK,eAAe,EACpCO,EAAM,EAAGC,EAAOR,EAAY,OAChC,GAAIQ,IAAS,EACT,MAAO,CAAE,KAAM,EAAG,UAAWF,CAAO,EAExC,KAAOC,EAAMC,GAAM,CACf,IAAMC,EAAM,KAAK,OAAOF,EAAMC,GAAQ,CAAC,EACnCR,EAAYS,CAAG,EAAIH,EACnBE,EAAOC,EAGPF,EAAME,EAAM,CAEpB,CAGA,IAAMC,EAAOH,EAAM,EACnB,OAAAD,EAAS,KAAK,gBAAgBA,EAAQN,EAAYU,CAAI,CAAC,EAChD,CAAE,KAAAA,EAAM,UAAWJ,EAASN,EAAYU,CAAI,CAAE,CACzD,CACA,SAASC,EAAU,CACf,IAAMX,EAAc,KAAK,eAAe,EACxC,GAAIW,EAAS,MAAQX,EAAY,OAC7B,OAAO,KAAK,SAAS,OAEpB,GAAIW,EAAS,KAAO,EACrB,MAAO,GAEX,IAAMC,EAAaZ,EAAYW,EAAS,IAAI,EAC5C,GAAIA,EAAS,WAAa,EACtB,OAAOC,EAEX,IAAMC,EAAkBF,EAAS,KAAO,EAAIX,EAAY,OAAUA,EAAYW,EAAS,KAAO,CAAC,EAAI,KAAK,SAAS,OAC3GL,EAAS,KAAK,IAAIM,EAAaD,EAAS,UAAWE,CAAc,EACvE,OAAO,KAAK,gBAAgBP,EAAQM,CAAU,CAClD,CACA,gBAAgBN,EAAQM,EAAY,CAChC,KAAON,EAASM,GAAcE,GAAM,KAAK,SAAS,WAAWR,EAAS,CAAC,CAAC,GACpEA,IAEJ,OAAOA,CACX,CACA,IAAI,WAAY,CACZ,OAAO,KAAK,eAAe,EAAE,MACjC,CACA,OAAO,cAAcS,EAAO,CACxB,IAAMC,EAAYD,EAClB,OAAkCC,GAAc,MAC5C,OAAOA,EAAU,MAAS,UAAYA,EAAU,QAAU,SACzDA,EAAU,cAAgB,QAAa,OAAOA,EAAU,aAAgB,SACjF,CACA,OAAO,OAAOD,EAAO,CACjB,IAAMC,EAAYD,EAClB,OAAkCC,GAAc,MAC5C,OAAOA,EAAU,MAAS,UAAYA,EAAU,QAAU,QAAaA,EAAU,cAAgB,MACzG,CACJ,EACWC,IACV,SAAUA,EAAc,CASrB,SAASC,EAAOhC,EAAKC,EAAYC,EAASC,EAAS,CAC/C,OAAO,IAAIN,GAAiBG,EAAKC,EAAYC,EAASC,CAAO,CACjE,CAFSJ,EAAAiC,EAAA,UAGTD,EAAa,OAASC,EAUtB,SAASC,EAAOC,EAAU3B,EAASL,EAAS,CACxC,GAAIgC,aAAoBrC,GACpB,OAAAqC,EAAS,OAAO3B,EAASL,CAAO,EACzBgC,EAGP,MAAM,IAAI,MAAM,sEAAsE,CAE9F,CARSnC,EAAAkC,EAAA,UASTF,EAAa,OAASE,EACtB,SAASE,EAAWD,EAAUE,EAAO,CACjC,IAAMC,EAAOH,EAAS,QAAQ,EACxBI,EAAcC,GAAUH,EAAM,IAAII,EAAiB,EAAG,CAACC,EAAGC,IAAM,CAClE,IAAMvB,EAAOsB,EAAE,MAAM,MAAM,KAAOC,EAAE,MAAM,MAAM,KAChD,OAAIvB,IAAS,EACFsB,EAAE,MAAM,MAAM,UAAYC,EAAE,MAAM,MAAM,UAE5CvB,CACX,CAAC,EACGwB,EAAqB,EACnBC,EAAQ,CAAC,EACf,QAAWC,KAAKP,EAAa,CACzB,IAAM5B,EAAcwB,EAAS,SAASW,EAAE,MAAM,KAAK,EACnD,GAAInC,EAAciC,EACd,MAAM,IAAI,MAAM,kBAAkB,EAE7BjC,EAAciC,GACnBC,EAAM,KAAKP,EAAK,UAAUM,EAAoBjC,CAAW,CAAC,EAE1DmC,EAAE,QAAQ,QACVD,EAAM,KAAKC,EAAE,OAAO,EAExBF,EAAqBT,EAAS,SAASW,EAAE,MAAM,GAAG,CACtD,CACA,OAAAD,EAAM,KAAKP,EAAK,OAAOM,CAAkB,CAAC,EACnCC,EAAM,KAAK,EAAE,CACxB,CA1BS7C,EAAAoC,EAAA,cA2BTJ,EAAa,WAAaI,CAC9B,GAAGJ,KAAiBA,GAAe,CAAC,EAAE,EACtC,SAASQ,GAAUO,EAAMC,EAAS,CAC9B,GAAID,EAAK,QAAU,EAEf,OAAOA,EAEX,IAAME,EAAKF,EAAK,OAAS,EAAK,EACxBG,EAAOH,EAAK,MAAM,EAAGE,CAAC,EACtBE,EAAQJ,EAAK,MAAME,CAAC,EAC1BT,GAAUU,EAAMF,CAAO,EACvBR,GAAUW,EAAOH,CAAO,EACxB,IAAII,EAAU,EACVC,EAAW,EACXnC,EAAI,EACR,KAAOkC,EAAUF,EAAK,QAAUG,EAAWF,EAAM,QACjCH,EAAQE,EAAKE,CAAO,EAAGD,EAAME,CAAQ,CAAC,GACvC,EAEPN,EAAK7B,GAAG,EAAIgC,EAAKE,GAAS,EAI1BL,EAAK7B,GAAG,EAAIiC,EAAME,GAAU,EAGpC,KAAOD,EAAUF,EAAK,QAClBH,EAAK7B,GAAG,EAAIgC,EAAKE,GAAS,EAE9B,KAAOC,EAAWF,EAAM,QACpBJ,EAAK7B,GAAG,EAAIiC,EAAME,GAAU,EAEhC,OAAON,CACX,CA/BS/C,EAAAwC,GAAA,aAgCT,SAASvB,GAAmBqB,EAAMgB,EAAeC,EAAa,EAAG,CAC7D,IAAMC,EAASF,EAAgB,CAACC,CAAU,EAAI,CAAC,EAC/C,QAAS,EAAI,EAAG,EAAIjB,EAAK,OAAQ,IAAK,CAClC,IAAMmB,EAAKnB,EAAK,WAAW,CAAC,EACxBT,GAAM4B,CAAE,IACJA,IAAO,IAAoC,EAAI,EAAInB,EAAK,QAAUA,EAAK,WAAW,EAAI,CAAC,IAAM,IAC7F,IAEJkB,EAAO,KAAKD,EAAa,EAAI,CAAC,EAEtC,CACA,OAAOC,CACX,CAZSxD,EAAAiB,GAAA,sBAaT,SAASY,GAAM6B,EAAM,CACjB,OAAOA,IAAS,IAAoCA,IAAS,EACjE,CAFS1D,EAAA6B,GAAA,SAGT,SAASnB,GAAmBL,EAAO,CAC/B,IAAMC,EAAQD,EAAM,MACdE,EAAMF,EAAM,IAClB,OAAIC,EAAM,KAAOC,EAAI,MAASD,EAAM,OAASC,EAAI,MAAQD,EAAM,UAAYC,EAAI,UACpE,CAAE,MAAOA,EAAK,IAAKD,CAAM,EAE7BD,CACX,CAPSL,EAAAU,GAAA,sBAQT,SAAS+B,GAAkBkB,EAAU,CACjC,IAAMtD,EAAQK,GAAmBiD,EAAS,KAAK,EAC/C,OAAItD,IAAUsD,EAAS,MACZ,CAAE,QAASA,EAAS,QAAS,MAAAtD,CAAM,EAEvCsD,CACX,CANS3D,EAAAyC,GAAA,8DCvOT,SAASmB,EAAWC,EAAAA,CAClB,GAAoB,OAATA,GAAS,SAClB,MAAM,IAAIC,UAAU,mCAAqCC,KAAKC,UAAUH,CAAAA,CAAAA,CAE5E,CAJSD,EAAAA,EAAAA,KAOT,SAASK,EAAqBJ,EAAMK,EAAAA,CAMlC,QADIC,EAJAC,EAAM,GACNC,EAAoB,EACpBC,EAAAA,GACAC,EAAO,EAEFC,EAAI,EAAGA,GAAKX,EAAKY,OAAAA,EAAUD,EAAG,CACrC,GAAIA,EAAIX,EAAKY,OACXN,EAAON,EAAKa,WAAWF,CAAAA,MACpB,CAAA,GAAIL,IAAS,GAChB,MAEAA,EAAO,EAAQ,CACjB,GAAIA,IAAS,GAAU,CACrB,GAAIG,EAAAA,IAAcE,EAAI,GAAKD,IAAS,GAE7B,GAAID,IAAcE,EAAI,GAAKD,IAAS,EAAG,CAC5C,GAAIH,EAAIK,OAAS,GAAKJ,IAAsB,GAAKD,EAAIM,WAAWN,EAAIK,OAAS,CAAA,IAAO,IAAYL,EAAIM,WAAWN,EAAIK,OAAS,CAAA,IAAO,IACjI,GAAIL,EAAIK,OAAS,EAAG,CAClB,IAAIE,EAAiBP,EAAIQ,YAAY,GAAA,EACrC,GAAID,IAAmBP,EAAIK,OAAS,EAAG,CACjCE,IADiC,IAEnCP,EAAM,GACNC,EAAoB,GAGpBA,GADAD,EAAMA,EAAIS,MAAM,EAAGF,CAAAA,GACKF,OAAS,EAAIL,EAAIQ,YAAY,GAAA,EAEvDN,EAAYE,EACZD,EAAO,EACP,QACF,CACF,SAAWH,EAAIK,SAAW,GAAKL,EAAIK,SAAW,EAAG,CAC/CL,EAAM,GACNC,EAAoB,EACpBC,EAAYE,EACZD,EAAO,EACP,QACF,EAEEL,IACEE,EAAIK,OAAS,EACfL,GAAO,MAEPA,EAAM,KACRC,EAAoB,EAExB,MACMD,EAAIK,OAAS,EACfL,GAAO,IAAMP,EAAKgB,MAAMP,EAAY,EAAGE,CAAAA,EAEvCJ,EAAMP,EAAKgB,MAAMP,EAAY,EAAGE,CAAAA,EAClCH,EAAoBG,EAAIF,EAAY,EAEtCA,EAAYE,EACZD,EAAO,CACT,MAAWJ,IAAS,IAAYI,IAArBJ,GAAqBI,EAC5BA,EAEFA,EAAAA,EAEJ,CACA,OAAOH,CACT,CA/DSH,EAAAA,EAAAA,KA6ET,IAAIa,EAAQ,CAEVC,QAASC,EAAA,UAAA,CAKP,QAFIC,EAFAC,EAAe,GACfC,EAAAA,GAGKX,EAAIY,UAAUX,OAAS,EAAGD,GAAAA,IAAM,CAAMW,EAAkBX,IAAK,CACpE,IAAIX,EACAW,GAAK,EACPX,EAAOuB,UAAUZ,CAAAA,GAEbS,IAFaT,SAGfS,EAAMI,QAAQJ,IAAAA,GAChBpB,EAAOoB,GAGTrB,EAAWC,CAAAA,EAGPA,EAAKY,SAAW,IAIpBS,EAAerB,EAAO,IAAMqB,EAC5BC,EAAmBtB,EAAKa,WAAW,CAAA,IAAO,GAC5C,CAQA,OAFAQ,EAAejB,EAAqBiB,EAAAA,CAAeC,CAAAA,EAE/CA,EACED,EAAaT,OAAS,EACjB,IAAMS,EAEN,IACAA,EAAaT,OAAS,EACxBS,EAEA,GAEX,EA1CS,WA4CTI,UAAWN,EAAA,SAAmBnB,EAAAA,CAG5B,GAFAD,EAAWC,CAAAA,EAEPA,EAAKY,SAAW,EAAG,MAAO,IAE9B,IAAIc,EAAa1B,EAAKa,WAAW,CAAA,IAAO,GACpCc,EAAoB3B,EAAKa,WAAWb,EAAKY,OAAS,CAAA,IAAO,GAQ7D,OALAZ,EAAOI,EAAqBJ,EAAAA,CAAO0B,CAAAA,GAE1Bd,SAAW,GAAMc,IAAY1B,EAAO,KACzCA,EAAKY,OAAS,GAAKe,IAAmB3B,GAAQ,KAE9C0B,EAAmB,IAAM1B,EACtBA,CACT,EAhBW,aAkBX0B,WAAYP,EAAA,SAAoBnB,EAAAA,CAE9B,OADAD,EAAWC,CAAAA,EACJA,EAAKY,OAAS,GAAKZ,EAAKa,WAAW,CAAA,IAAO,EACnD,EAHY,cAKZe,KAAMT,EAAA,UAAA,CACJ,GAAII,UAAUX,SAAW,EACvB,MAAO,IAET,QADIiB,EACKlB,EAAI,EAAGA,EAAIY,UAAUX,OAAAA,EAAUD,EAAG,CACzC,IAAImB,EAAMP,UAAUZ,CAAAA,EACpBZ,EAAW+B,CAAAA,EACPA,EAAIlB,OAAS,IACXiB,IADW,OAEbA,EAASC,EAETD,GAAU,IAAMC,EAEtB,CACA,OAAID,IAAJ,OACS,IACFZ,EAAMQ,UAAUI,CAAAA,CACzB,EAjBM,QAmBNE,SAAUZ,EAAA,SAAkBa,EAAMC,EAAAA,CAShC,GARAlC,EAAWiC,CAAAA,EACXjC,EAAWkC,CAAAA,EAEPD,IAASC,IAEbD,EAAOf,EAAMC,QAAQc,CAAAA,MACrBC,EAAKhB,EAAMC,QAAQe,CAAAA,GAEF,MAAO,GAIxB,QADIC,EAAY,EACTA,EAAYF,EAAKpB,QAClBoB,EAAKnB,WAAWqB,CAAAA,IAAe,GAAfA,EADYA,EAAAA,CASlC,QALIC,EAAUH,EAAKpB,OACfwB,EAAUD,EAAUD,EAGpBG,EAAU,EACPA,EAAUJ,EAAGrB,QACdqB,EAAGpB,WAAWwB,CAAAA,IAAa,GAAbA,EADUA,EAAAA,CAW9B,QANIC,EADQL,EAAGrB,OACKyB,EAGhBzB,EAASwB,EAAUE,EAAQF,EAAUE,EACrCC,EAAAA,GACA5B,EAAI,EACDA,GAAKC,EAAAA,EAAUD,EAAG,CACvB,GAAIA,IAAMC,EAAQ,CAChB,GAAI0B,EAAQ1B,EAAQ,CAClB,GAAIqB,EAAGpB,WAAWwB,EAAU1B,CAAAA,IAAO,GAGjC,OAAOsB,EAAGjB,MAAMqB,EAAU1B,EAAI,CAAA,EACzB,GAAIA,IAAM,EAGf,OAAOsB,EAAGjB,MAAMqB,EAAU1B,CAAAA,CAE9B,MAAWyB,EAAUxB,IACfoB,EAAKnB,WAAWqB,EAAYvB,CAAAA,IAAO,GAGrC4B,EAAgB5B,EACPA,IAAM,IAGf4B,EAAgB,IAGpB,KACF,CACA,IAAIC,EAAWR,EAAKnB,WAAWqB,EAAYvB,CAAAA,EAE3C,GAAI6B,IADSP,EAAGpB,WAAWwB,EAAU1B,CAAAA,EAEnC,MACO6B,IAAa,KACpBD,EAAgB5B,EACpB,CAEA,IAAI8B,EAAM,GAGV,IAAK9B,EAAIuB,EAAYK,EAAgB,EAAG5B,GAAKwB,EAAAA,EAAWxB,EAClDA,IAAMwB,GAAWH,EAAKnB,WAAWF,CAAAA,IAAO,KACtC8B,EAAI7B,SAAW,EACjB6B,GAAO,KAEPA,GAAO,OAMb,OAAIA,EAAI7B,OAAS,EACR6B,EAAMR,EAAGjB,MAAMqB,EAAUE,CAAAA,GAEhCF,GAAWE,EACPN,EAAGpB,WAAWwB,CAAAA,IAAa,IAAbA,EACdA,EACGJ,EAAGjB,MAAMqB,CAAAA,EAEpB,EAxFU,YA0FVK,UAAWvB,EAAA,SAAmBnB,EAAAA,CAC5B,OAAOA,CACT,EAFW,aAIX2C,QAASxB,EAAA,SAAiBnB,EAAAA,CAExB,GADAD,EAAWC,CAAAA,EACPA,EAAKY,SAAW,EAAG,MAAO,IAK9B,QAJIN,EAAON,EAAKa,WAAW,CAAA,EACvB+B,EAAUtC,IAAS,GACnBuC,EAAAA,GACAC,EAAAA,GACKnC,EAAIX,EAAKY,OAAS,EAAGD,GAAK,EAAA,EAAKA,EAEtC,IADAL,EAAON,EAAKa,WAAWF,CAAAA,KACV,IACT,GAAA,CAAKmC,EAAc,CACjBD,EAAMlC,EACN,KACF,OAGFmC,EAAAA,GAIJ,OAAID,IAAJ,GAAuBD,EAAU,IAAM,IACnCA,GAAWC,IAAQ,EAAU,KAC1B7C,EAAKgB,MAAM,EAAG6B,CAAAA,CACvB,EAvBS,WAyBTE,SAAU5B,EAAA,SAAkBnB,EAAMgD,EAAAA,CAChC,GAAIA,IAAJ,QAAwC,OAARA,GAAQ,SAAU,MAAM,IAAI/C,UAAU,iCAAA,EACtEF,EAAWC,CAAAA,EAEX,IAGIW,EAHAsC,EAAQ,EACRJ,EAAAA,GACAC,EAAAA,GAGJ,GAAIE,IAAJ,QAAyBA,EAAIpC,OAAS,GAAKoC,EAAIpC,QAAUZ,EAAKY,OAAQ,CACpE,GAAIoC,EAAIpC,SAAWZ,EAAKY,QAAUoC,IAAQhD,EAAM,MAAO,GACvD,IAAIkD,EAASF,EAAIpC,OAAS,EACtBuC,EAAAA,GACJ,IAAKxC,EAAIX,EAAKY,OAAS,EAAGD,GAAK,EAAA,EAAKA,EAAG,CACrC,IAAIL,EAAON,EAAKa,WAAWF,CAAAA,EAC3B,GAAIL,IAAS,IAGT,GAAA,CAAKwC,EAAc,CACjBG,EAAQtC,EAAI,EACZ,KACF,OAEEwC,IAFF,KAKAL,EAAAA,GACAK,EAAmBxC,EAAI,GAErBuC,GAAU,IAER5C,IAAS0C,EAAInC,WAAWqC,CAAAA,EACR,EAAZA,GADoBA,KAIxBL,EAAMlC,IAKRuC,EAAAA,GACAL,EAAMM,GAId,CAGA,OADIF,IAAUJ,EAAKA,EAAMM,EAA0BN,IAA1BM,KAAsCN,EAAM7C,EAAKY,QACnEZ,EAAKgB,MAAMiC,EAAOJ,CAAAA,CAC3B,CACE,IAAKlC,EAAIX,EAAKY,OAAS,EAAGD,GAAK,EAAA,EAAKA,EAClC,GAAIX,EAAKa,WAAWF,CAAAA,IAAO,IAGvB,GAAA,CAAKmC,EAAc,CACjBG,EAAQtC,EAAI,EACZ,KACF,OACSkC,IADT,KAIFC,EAAAA,GACAD,EAAMlC,EAAI,GAId,OAAIkC,IAAJ,GAAuB,GAChB7C,EAAKgB,MAAMiC,EAAOJ,CAAAA,CAE7B,EArEU,YAuEVO,QAASjC,EAAA,SAAiBnB,EAAAA,CACxBD,EAAWC,CAAAA,EAQX,QAPIqD,EAAAA,GACAC,EAAY,EACZT,EAAAA,GACAC,EAAAA,GAGAS,EAAc,EACT5C,EAAIX,EAAKY,OAAS,EAAGD,GAAK,EAAA,EAAKA,EAAG,CACzC,IAAIL,EAAON,EAAKa,WAAWF,CAAAA,EAC3B,GAAIL,IAAS,GASTuC,IATAvC,KAYFwC,EAAAA,GACAD,EAAMlC,EAAI,GAERL,IAAS,GAEL+C,IAFJ/C,GAGE+C,EAAW1C,EACJ4C,IAAgB,IACvBA,EAAc,GACTF,IADS,KAIlBE,EAAAA,YArBE,CAAKT,EAAc,CACjBQ,EAAY3C,EAAI,EAChB,KACF,CAoBN,CAEA,OAAI0C,IAAJ,IAAuBR,IAAnBQ,IAEAE,IAAgB,GAEhBA,IAAgB,GAAKF,IAAaR,EAAM,GAAKQ,IAAaC,EAAY,EACjE,GAEFtD,EAAKgB,MAAMqC,EAAUR,CAAAA,CAC9B,EA/CS,WAiDTW,OAAQrC,EAAA,SAAgBsC,EAAAA,CACtB,GAAIA,IAAe,MAA8B,OAAfA,GAAe,SAC/C,MAAM,IAAIxD,UAAU,mEAAA,OAA4EwD,CAAAA,EAElG,OAvVJ,SAAiBC,EAAKD,EAAAA,CACpB,IAAIE,EAAMF,EAAWE,KAAOF,EAAWG,KACnCC,EAAOJ,EAAWI,OAASJ,EAAWK,MAAQ,KAAOL,EAAWT,KAAO,IAC3E,OAAKW,EAGDA,IAAQF,EAAWG,KACdD,EAAME,EAERF,EA8UU,IA9UEE,EALVA,CAMX,EA6UmB,EAAKJ,CAAAA,CACtB,EALQ,UAORM,MAAO5C,EAAA,SAAenB,EAAAA,CACpBD,EAAWC,CAAAA,EAEX,IAAIgE,EAAM,CAAEJ,KAAM,GAAID,IAAK,GAAIE,KAAM,GAAIb,IAAK,GAAIc,KAAM,EAAA,EACxD,GAAI9D,EAAKY,SAAW,EAAG,OAAOoD,EAC9B,IAEIf,EAFA3C,EAAON,EAAKa,WAAW,CAAA,EACvBa,EAAapB,IAAS,GAEtBoB,GACFsC,EAAIJ,KAAO,IACXX,EAAQ,GAERA,EAAQ,EAaV,QAXII,EAAAA,GACAC,EAAY,EACZT,EAAAA,GACAC,EAAAA,GACAnC,EAAIX,EAAKY,OAAS,EAIlB2C,EAAc,EAGX5C,GAAKsC,EAAAA,EAAStC,EAEnB,IADAL,EAAON,EAAKa,WAAWF,CAAAA,KACV,GASTkC,IAVmBlC,KAarBmC,EAAAA,GACAD,EAAMlC,EAAI,GAERL,IAAS,GAEL+C,IAFJ/C,GAEqB+C,EAAW1C,EAAW4C,IAAgB,IAAGA,EAAc,GACnEF,IADmE,KAI9EE,EAAAA,YAlBE,CAAKT,EAAc,CACjBQ,EAAY3C,EAAI,EAChB,KACF,CAwCN,OArBI0C,IAqBJ,IArBuBR,IAAnBQ,IAEJE,IAAgB,GAEhBA,IAAgB,GAAKF,IAAaR,EAAM,GAAKQ,IAAaC,EAAY,EAChET,IADgE,KAE/BmB,EAAIH,KAAOG,EAAIF,KAA9CR,IAAc,GAAK5B,EAAkC1B,EAAKgB,MAAM,EAAG6B,CAAAA,EAAgC7C,EAAKgB,MAAMsC,EAAWT,CAAAA,IAG3HS,IAAc,GAAK5B,GACrBsC,EAAIF,KAAO9D,EAAKgB,MAAM,EAAGqC,CAAAA,EACzBW,EAAIH,KAAO7D,EAAKgB,MAAM,EAAG6B,CAAAA,IAEzBmB,EAAIF,KAAO9D,EAAKgB,MAAMsC,EAAWD,CAAAA,EACjCW,EAAIH,KAAO7D,EAAKgB,MAAMsC,EAAWT,CAAAA,GAEnCmB,EAAIhB,IAAMhD,EAAKgB,MAAMqC,EAAUR,CAAAA,GAG7BS,EAAY,EAAGU,EAAIL,IAAM3D,EAAKgB,MAAM,EAAGsC,EAAY,CAAA,EAAY5B,IAAYsC,EAAIL,IAAM,KAElFK,CACT,EA1EO,SA4EPN,IAAK,IACLO,UAAW,IACXC,MAAO,KACPjD,MAAO,IAAA,EAGTA,EAAMA,MAAQA,EAEdkD,EAAOC,QAAUnD,CAAAA,CAAAA,EC/gBboD,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,EAAAA,CAE5B,IAAIC,EAAeH,EAAyBE,CAAAA,EAC5C,GAAIC,IAAJ,OACC,OAAOA,EAAaJ,QAGrB,IAAID,EAASE,EAAyBE,CAAAA,EAAY,CAGjDH,QAAS,CAAC,CAAA,EAOX,OAHAK,EAAoBF,CAAAA,EAAUJ,EAAQA,EAAOC,QAASE,CAAAA,EAG/CH,EAAOC,OACf,CAlBSE,EAAAA,EAAAA,KCHTA,EAAoBI,EAAI,CAACN,EAASO,IAAAA,CACjC,QAAQC,KAAOD,EACXL,EAAoBO,EAAEF,EAAYC,CAAAA,GAAAA,CAASN,EAAoBO,EAAET,EAASQ,CAAAA,GAC5EE,OAAOC,eAAeX,EAASQ,EAAK,CAAEI,WAAAA,GAAkBC,IAAKN,EAAWC,CAAAA,CAAAA,CAAAA,CAE1E,ECNDN,EAAoBO,EAAI,CAACK,EAAKC,IAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,EAAKC,CAAAA,ECClFb,EAAoBiB,EAAKnB,GAAAA,CACH,OAAXoB,OAAW,KAAeA,OAAOC,aAC1CX,OAAOC,eAAeX,EAASoB,OAAOC,YAAa,CAAEC,MAAO,QAAA,CAAA,EAE7DZ,OAAOC,eAAeX,EAAS,aAAc,CAAEsB,MAAAA,EAAO,CAAA,CAAO,EAAA,IAAA,EAAA,CAAA,GAAA,IAAA,CCQvD,IAAIC,EAEXJ,EAAA,EAAA,CAAA,EAAAA,EAAA,EAAA,EAAA,CAAA,IAAApE,EAAA,IAAAyE,EAAA,OAAA,MAAAzE,EAAA,IAAA0E,GAAA,QAAA,CAAA,EAAuB,OAAZrE,SAAY,SACtBmE,EAAYnE,QAAQsE,WAAa,QACF,OAAdC,WAAc,WAE/BJ,EADgBI,UAAUC,UACJC,QAAQ,SAAA,GAAc,GCV7C,IAAMC,EAAiB,iBACjBC,EAAoB,MACpBC,EAAoB,QAE1B,SAASC,EAAarC,EAAUsC,EAAAA,CAG/B,GAAA,CAAKtC,EAAIuC,QAAUD,EAClB,MAAM,IAAIE,MAAM,2DAA2DxC,EAAIyC,SAAAA,aAAsBzC,EAAIhE,IAAAA,cAAkBgE,EAAI0C,KAAAA,iBAAsB1C,EAAI2C,QAAAA,IAAAA,EAK1J,GAAI3C,EAAIuC,QAAAA,CAAWL,EAAeU,KAAK5C,EAAIuC,MAAAA,EAC1C,MAAM,IAAIC,MAAM,iDAAA,EAQjB,GAAIxC,EAAIhE,MACP,GAAIgE,EAAIyC,WACP,GAAA,CAAKN,EAAkBS,KAAK5C,EAAIhE,IAAAA,EAC/B,MAAM,IAAIwG,MAAM,0IAAA,UAGbJ,EAAkBQ,KAAK5C,EAAIhE,IAAAA,EAC9B,MAAM,IAAIwG,MAAM,2HAAA,EAIpB,CA7BSH,EAAAA,EAAAA,KA+DT,IAAMQ,EAAS,GACTC,EAAS,IACTC,EAAU,+DAkBT,MAAMC,CAAAA,OAAAA,CAAAA,EAAAA,UAEZ,OAAA,MAAaC,EAAAA,CACZ,OAAIA,aAAiBD,GAAAA,CAAAA,CAGhBC,GAGoC,OAArBA,EAAOR,WAAc,UACJ,OAApBQ,EAAON,UAAa,UACJ,OAAhBM,EAAOjH,MAAS,UACC,OAAjBiH,EAAOP,OAAU,UACC,OAAlBO,EAAOV,QAAW,UACA,OAAlBU,EAAOC,QAAW,UACF,OAAhBD,EAAOE,MAAS,YACI,OAApBF,EAAOG,UAAa,UACtC,CAMSb,OAMAE,UAKAzG,KAKA0G,MAKAC,SAeT,YAAsBU,EAAsCZ,EAAoBzG,EAAe0G,EAAgBC,EAAmBL,EAAAA,GAAmB,CAExH,OAAjBe,GAAiB,UAC3BC,KAAKf,OAASc,EAAad,QAAUM,EACrCS,KAAKb,UAAYY,EAAaZ,WAAaI,EAC3CS,KAAKtH,KAAOqH,EAAarH,MAAQ6G,EACjCS,KAAKZ,MAAQW,EAAaX,OAASG,EACnCS,KAAKX,SAAWU,EAAaV,UAAYE,IAKzCS,KAAKf,OAvHR,SAAoBA,GAAgBD,GAAAA,CACnC,OAAKC,IAAWD,GAGTC,GAFC,MAGT,EAkH4Bc,EAAcf,CAAAA,EACvCgB,KAAKb,UAAYA,GAAaI,EAC9BS,KAAKtH,KAjHR,SAA8BuG,GAAgBvG,GAAAA,CAM7C,OAAQuG,GAAAA,CACP,IAAK,QACL,IAAK,OACL,IAAK,OACCvG,GAEMA,GAAK,CAAA,IAAO8G,IACtB9G,GAAO8G,EAAS9G,IAFhBA,GAAO8G,CAAAA,CAMV,OAAO9G,EACR,EA+FoCsH,KAAKf,OAAQvG,GAAQ6G,CAAAA,EACtDS,KAAKZ,MAAQA,GAASG,EACtBS,KAAKX,SAAWA,GAAYE,EAE5BR,EAAaiB,KAAMhB,CAAAA,EAErB,CA4BA,IAAA,QAAIY,CAIH,OAAOK,EAAYD,KAAAA,EAAM,CAC1B,CAIA,KAAKE,EAAAA,CAEJ,GAAA,CAAKA,EACJ,OAAOF,KAGR,GAAA,CAAI,OAAEf,EAAM,UAAEE,EAAS,KAAEzG,EAAI,MAAE0G,EAAK,SAAEC,CAAAA,EAAaa,EA2BnD,OA1BIjB,IA0BJ,OAzBCA,EAASe,KAAKf,OACJA,IAAW,OACrBA,EAASM,GAENJ,IAFMI,OAGTJ,EAAYa,KAAKb,UACPA,IAAc,OACxBA,EAAYI,GAET7G,IAFS6G,OAGZ7G,EAAOsH,KAAKtH,KACFA,IAAS,OACnBA,EAAO6G,GAEJH,IAFIG,OAGPH,EAAQY,KAAKZ,MACHA,IAAU,OACpBA,EAAQG,GAELF,IAFKE,OAGRF,EAAWW,KAAKX,SACNA,IAAa,OACvBA,EAAWE,GAGRN,IAAWe,KAAKf,QAChBE,IAAca,KAAKb,WACnBzG,IAASsH,KAAKtH,MACd0G,IAAUY,KAAKZ,OACfC,IAAaW,KAAKX,SAEdW,KAGD,IAAIG,EAAIlB,EAAQE,EAAWzG,EAAM0G,EAAOC,CAAAA,CAChD,CAUA,OAAA,MAAajB,EAAeY,EAAAA,GAAmB,CAC9C,IAAMoB,EAAQX,EAAQY,KAAKjC,CAAAA,EAC3B,OAAKgC,EAGE,IAAID,EACVC,EAAM,CAAA,GAAMb,EACZe,GAAcF,EAAM,CAAA,GAAMb,CAAAA,EAC1Be,GAAcF,EAAM,CAAA,GAAMb,CAAAA,EAC1Be,GAAcF,EAAM,CAAA,GAAMb,CAAAA,EAC1Be,GAAcF,EAAM,CAAA,GAAMb,CAAAA,EAC1BP,CAAAA,EARO,IAAImB,EAAIZ,EAAQA,EAAQA,EAAQA,EAAQA,CAAAA,CAUjD,CAuBA,OAAA,KAAY7G,EAAAA,CAEX,IAAIyG,EAAYI,EAWhB,GANIlB,IACH3F,EAAOA,EAAK6H,QAAQ,MAAOf,CAAAA,GAKxB9G,EAAK,CAAA,IAAO8G,GAAU9G,EAAK,CAAA,IAAO8G,EAAQ,CAC7C,IAAMgB,EAAM9H,EAAKiG,QAAQa,EAAQ,CAAA,EAC7BgB,IAD6B,IAEhCrB,EAAYzG,EAAK+H,UAAU,CAAA,EAC3B/H,EAAO8G,IAEPL,EAAYzG,EAAK+H,UAAU,EAAGD,CAAAA,EAC9B9H,EAAOA,EAAK+H,UAAUD,CAAAA,GAAQhB,EAAAA,CAIhC,OAAO,IAAIW,EAAI,OAAQhB,EAAWzG,EAAM6G,EAAQA,CAAAA,CACjD,CAEA,OAAA,KAAYmB,EAAAA,CACX,IAAMC,EAAS,IAAIR,EAClBO,EAAWzB,OACXyB,EAAWvB,UACXuB,EAAWhI,KACXgI,EAAWtB,MACXsB,EAAWrB,QAAAA,EAGZ,OADAN,EAAa4B,EAAAA,EAAQ,EACdA,CACR,CAeA,SAASC,EAAAA,GAAwB,CAChC,OAAOC,EAAab,KAAMY,CAAAA,CAC3B,CAEA,QAAAE,CACC,OAAOd,IACR,CAMA,OAAA,OAAce,EAAAA,CACb,GAAKA,EAEE,CAAA,GAAIA,aAAgBrB,EAC1B,OAAOqB,EACD,CACN,IAAMJ,EAAS,IAAIR,EAAIY,CAAAA,EAGvB,OAFAJ,EAAOK,WAAwBD,EAAME,SACrCN,EAAOO,QAAqBH,EAAMI,OAASC,EAA4BL,EAAMnB,OAAS,KAC/Ee,CAAAA,CAAAA,CAPP,OAAYI,CASd,CAAA,CAkBD,IAAMK,EAAiB/C,EAAY,EAAA,OAGnC,MAAM8B,UAAYT,CAAAA,OAAAA,CAAAA,EAAAA,UAEjBsB,WAA4B,KAC5BE,QAAyB,KAEzB,IAAA,QAAatB,CAIZ,OAHKI,KAAKkB,UACTlB,KAAKkB,QAAUjB,EAAYD,KAAAA,EAAM,GAE3BA,KAAKkB,OACb,CAES,SAASN,EAAAA,GAAwB,CACzC,OAAKA,EAOGC,EAAab,KAAAA,EAAM,GANrBA,KAAKgB,aACThB,KAAKgB,WAAaH,EAAab,KAAAA,EAAM,GAE/BA,KAAKgB,WAKd,CAES,QAAAF,CACR,IAAM7H,EAAgB,CACrBoI,KAAM,CAAA,EA0BP,OAvBIrB,KAAKkB,UACRjI,EAAI2G,OAASI,KAAKkB,QAClBjI,EAAIkI,KAAOC,GAERpB,KAAKgB,aACR/H,EAAIgI,SAAWjB,KAAKgB,YAGjBhB,KAAKtH,OACRO,EAAIP,KAAOsH,KAAKtH,MAEbsH,KAAKf,SACRhG,EAAIgG,OAASe,KAAKf,QAEfe,KAAKb,YACRlG,EAAIkG,UAAYa,KAAKb,WAElBa,KAAKZ,QACRnG,EAAImG,MAAQY,KAAKZ,OAEdY,KAAKX,WACRpG,EAAIoG,SAAWW,KAAKX,UAEdpG,CACR,CAAA,CAID,IAAMqI,EAAwC,CAC7C,GAAkB,MAClB,GAAkB,MAClB,GAAyB,MACzB,GAAiB,MACjB,GAA8B,MAC9B,GAA+B,MAC/B,GAAmB,MAEnB,GAA4B,MAC5B,GAAuB,MACvB,GAAsB,MACtB,GAAwB,MACxB,GAAsB,MACtB,GAAuB,MACvB,GAAqB,MACrB,GAAiB,MACjB,GAAkB,MAClB,GAAsB,MACtB,GAAmB,MAEnB,GAAkB,KAAA,EAGnB,SAASC,EAAuBC,EAAsBC,EAAiBC,EAAAA,CACtE,IAAIzI,EACA0I,EAAAA,GAEJ,QAASC,EAAM,EAAGA,EAAMJ,EAAalI,OAAQsI,IAAO,CACnD,IAAM5I,EAAOwI,EAAajI,WAAWqI,CAAAA,EAGrC,GACE5I,GAAQ,IAAcA,GAAQ,KAC3BA,GAAQ,IAAcA,GAAQ,IAC9BA,GAAQ,IAAmBA,GAAQ,IACpCA,IAAS,IACTA,IAAS,IACTA,IAAS,IACTA,IAAS,KACRyI,GAAUzI,IAAS,IACnB0I,GAAe1I,IAAS,IACxB0I,GAAe1I,IAAS,IACxB0I,GAAe1I,IAAS,GAGxB2I,IAHe3I,KAIlBC,GAAO4I,mBAAmBL,EAAaf,UAAUkB,EAAiBC,CAAAA,CAAAA,EAClED,EAAAA,IAGG1I,IAHgB,SAInBA,GAAOuI,EAAaM,OAAOF,CAAAA,OAGtB,CAEF3I,IAFE,SAGLA,EAAMuI,EAAaO,OAAO,EAAGH,CAAAA,GAI9B,IAAMI,GAAUV,EAAYtI,CAAAA,EACxBgJ,KADwBhJ,QAIvB2I,IAHDK,KAIF/I,GAAO4I,mBAAmBL,EAAaf,UAAUkB,EAAiBC,CAAAA,CAAAA,EAClED,EAAAA,IAID1I,GAAO+I,IAEGL,IAFHK,KAIPL,EAAkBC,EAAAA,CAAAA,CASrB,OAJID,IAIJ,KAHC1I,GAAO4I,mBAAmBL,EAAaf,UAAUkB,CAAAA,CAAAA,GAG3C1I,IAH2C0I,OAGvB1I,EAAMuI,CAClC,CA9DSD,EAAAA,EAAAA,KAgET,SAASU,EAA0BvJ,EAAAA,CAClC,IAAIO,EACJ,QAAS2I,EAAM,EAAGA,EAAMlJ,EAAKY,OAAQsI,IAAO,CAC3C,IAAM5I,EAAON,EAAKa,WAAWqI,CAAAA,EACzB5I,IAAS,IAAiBA,IAAS,IAClCC,IADyBD,SAE5BC,EAAMP,EAAKqJ,OAAO,EAAGH,CAAAA,GAEtB3I,GAAOqI,EAAYtI,CAAAA,GAEfC,IAFeD,SAGlBC,GAAOP,EAAKkJ,CAAAA,EAAAA,CAIf,OAAO3I,IAAP,OAA2BA,EAAMP,CAClC,CAhBSuJ,EAAAA,EAAAA,KAqBF,SAAShC,EAAYiC,EAAUC,EAAAA,CAErC,IAAI/D,EAsBJ,OAnBCA,EAFG8D,EAAI/C,WAAa+C,EAAIxJ,KAAKY,OAAS,GAAK4I,EAAIjD,SAAW,OAElD,KAAKiD,EAAI/C,SAAAA,GAAY+C,EAAIxJ,IAAAA,GAEjCwJ,EAAIxJ,KAAKa,WAAW,CAAA,IAAO,KACvB2I,EAAIxJ,KAAKa,WAAW,CAAA,GAAM,IAAc2I,EAAIxJ,KAAKa,WAAW,CAAA,GAAM,IAAc2I,EAAIxJ,KAAKa,WAAW,CAAA,GAAM,IAAc2I,EAAIxJ,KAAKa,WAAW,CAAA,GAAM,MACnJ2I,EAAIxJ,KAAKa,WAAW,CAAA,IAAO,GAEzB4I,EAIID,EAAIxJ,KAAKqJ,OAAO,CAAA,EAFhBG,EAAIxJ,KAAK,CAAA,EAAG0J,YAAAA,EAAgBF,EAAIxJ,KAAKqJ,OAAO,CAAA,EAM7CG,EAAIxJ,KAET2F,IACHD,EAAQA,EAAMmC,QAAQ,MAAO,IAAA,GAEvBnC,CACR,CAzBgB6B,EAAAA,EAAAA,KA8BhB,SAASY,EAAaqB,EAAUtB,EAAAA,CAE/B,IAAMyB,EAAWzB,EAEdqB,EADAV,EAGCtI,EAAM,GAAA,CACN,OAAEgG,EAAM,UAAEE,EAAS,KAAEzG,EAAI,MAAE0G,GAAK,SAAEC,EAAAA,EAAa6C,EASnD,GARIjD,IACHhG,GAAOgG,EACPhG,GAAO,MAEJkG,GAAaF,IAAW,UAC3BhG,GAAOuG,EACPvG,GAAOuG,GAEJL,EAAW,CACd,IAAIqB,GAAMrB,EAAUR,QAAQ,GAAA,EAC5B,GAAI6B,KAAJ,GAAgB,CAEf,IAAM8B,GAAWnD,EAAU4C,OAAO,EAAGvB,EAAAA,EACrCrB,EAAYA,EAAU4C,OAAOvB,GAAM,CAAA,EACnCA,GAAM8B,GAAS7I,YAAY,GAAA,EACvB+G,KADuB,GAE1BvH,GAAOoJ,EAAQC,GAAAA,GAAU,EAAO,GAGhCrJ,GAAOoJ,EAAQC,GAASP,OAAO,EAAGvB,EAAAA,EAAAA,GAAM,EAAO,EAC/CvH,GAAO,IACPA,GAAOoJ,EAAQC,GAASP,OAAOvB,GAAM,CAAA,EAAA,GAAI,EAAO,GAEjDvH,GAAO,GAAA,CAERkG,EAAYA,EAAUiD,YAAAA,EACtB5B,GAAMrB,EAAU1F,YAAY,GAAA,EACxB+G,KADwB,GAE3BvH,GAAOoJ,EAAQlD,EAAAA,GAAW,EAAO,GAGjClG,GAAOoJ,EAAQlD,EAAU4C,OAAO,EAAGvB,EAAAA,EAAAA,GAAM,EAAO,EAChDvH,GAAOkG,EAAU4C,OAAOvB,EAAAA,EAAAA,CAG1B,GAAI9H,EAAM,CAET,GAAIA,EAAKY,QAAU,GAAKZ,EAAKa,WAAW,CAAA,IAAO,IAAkBb,EAAKa,WAAW,CAAA,IAAO,GAAgB,CACvG,IAAMP,GAAON,EAAKa,WAAW,CAAA,EACzBP,IAAQ,IAAcA,IAAQ,KACjCN,EAAO,IAAI6J,OAAOC,aAAaxJ,GAAO,EAAA,CAAA,IAAON,EAAKqJ,OAAO,CAAA,CAAA,GAAA,SAEhDrJ,EAAKY,QAAU,GAAKZ,EAAKa,WAAW,CAAA,IAAO,GAAgB,CACrE,IAAMP,GAAON,EAAKa,WAAW,CAAA,EACzBP,IAAQ,IAAcA,IAAQ,KACjCN,EAAO,GAAG6J,OAAOC,aAAaxJ,GAAO,EAAA,CAAA,IAAON,EAAKqJ,OAAO,CAAA,CAAA,GAAA,CAI1D9I,GAAOoJ,EAAQ3J,EAAAA,GAAM,EAAM,CAAA,CAU5B,OARI0G,KACHnG,GAAO,IACPA,GAAOoJ,EAAQjD,GAAAA,GAAO,EAAO,GAE1BC,KACHpG,GAAO,IACPA,GAAQ2H,EAAgEvB,GAAjDkC,EAAuBlC,GAAAA,GAAU,EAAO,GAEzDpG,CACR,CApES4H,EAAAA,EAAAA,KAwET,SAAS4B,EAA2BC,EAAAA,CACnC,GAAA,CACC,OAAOC,mBAAmBD,CAAAA,CAAAA,MACzB,CACD,OAAIA,EAAIpJ,OAAS,EACToJ,EAAIX,OAAO,EAAG,CAAA,EAAKU,EAA2BC,EAAIX,OAAO,CAAA,CAAA,EAEzDW,CAAAA,CAGV,CAVSD,EAAAA,EAAAA,KAYT,IAAMG,EAAiB,8BAEvB,SAAStC,GAAcoC,EAAAA,CACtB,OAAKA,EAAItC,MAAMwC,CAAAA,EAGRF,EAAInC,QAAQqC,EAAiBxC,GAAUqC,EAA2BrC,CAAAA,CAAAA,EAFjEsC,CAGT,CALSpC,EAAAA,GAAAA,KAKT,IAAAuC,GAAA5E,EAAA,GAAA,ECjqBA,IAAM6E,GAAYD,GAAA,OAAkBA,GAC9BE,GAAQ,IAEP,IAAUC,IAAjB,SAAiBA,EAAAA,CAeGC,EAAAC,SAAhB,SAAyBhB,KAAaiB,EAAAA,CAClC,OAAOjB,EAAIrC,KAAK,CAAEnH,KAAMoK,GAAUxI,KAAK4H,EAAIxJ,KAAAA,GAASyK,CAAAA,CAAAA,CAAAA,CACxD,EAgBgBF,EAAAG,YAAhB,SAA4BlB,KAAaiB,EAAAA,CACrC,IAAIzK,EAAOwJ,EAAIxJ,KACX2K,EAAAA,GACA3K,EAAK,CAAA,IAAOqK,KACZrK,EAAOqK,GAAQrK,EACf2K,EAAAA,IAEJ,IAAItJ,EAAe+I,GAAUlJ,QAAQlB,EAAAA,GAASyK,CAAAA,EAI9C,OAHIE,GAActJ,EAAa,CAAA,IAAOgJ,IAAAA,CAAUb,EAAI/C,YAChDpF,EAAeA,EAAa0G,UAAU,CAAA,GAEnCyB,EAAIrC,KAAK,CAAEnH,KAAMqB,CAAAA,CAAAA,CAC5B,EAUgBkJ,EAAA5H,QAAhB,SAAwB6G,EAAAA,CACpB,GAAIA,EAAIxJ,KAAKY,SAAW,GAAK4I,EAAIxJ,OAASqK,GACtC,OAAOb,EAEX,IAAIxJ,EAAOoK,GAAUzH,QAAQ6G,EAAIxJ,IAAAA,EAIjC,OAHIA,EAAKY,SAAW,GAAKZ,EAAKa,WAAW,CAAA,IAAO,KAC5Cb,EAAO,IAEJwJ,EAAIrC,KAAK,CAAEnH,KAAAA,CAAAA,CAAAA,CACtB,EAUgBuK,EAAAxH,SAAhB,SAAyByG,EAAAA,CACrB,OAAOY,GAAUrH,SAASyG,EAAIxJ,IAAAA,CAClC,EAUgBuK,EAAAnH,QAAhB,SAAwBoG,EAAAA,CACpB,OAAOY,GAAUhH,QAAQoG,EAAIxJ,IAAAA,CACjC,CACH,GAzFgBsK,KAAAA,GAAK,CAAA,EAAA,CAAA,GAAA,EAAAM,GAAA,CAAA,GAAA,EAAA,GAAA,CAAA,IAAA5D,GAAA,MAAAsD,EAAA,EAAAM,GCJhB,IAAWC,IAAjB,SAAiBA,EAAQ,CAERA,EAAA,SAAWC,GAAM,SACjBD,EAAA,QAAUC,GAAM,QAChBD,EAAA,QAAUC,GAAM,QAChBD,EAAA,SAAWC,GAAM,SACjBD,EAAA,YAAcC,GAAM,YAEjC,SAAgBC,EAAOC,EAAkBC,EAAgB,CACrD,OAAOD,GAAG,SAAQ,IAAOC,GAAG,SAAQ,CACxC,CAFgBC,EAAAH,EAAA,UAAAF,EAAA,OAAME,EAItB,SAAgBI,EAASC,EAAoBC,EAAgB,CACzD,IAAMC,EAAW,OAAOF,GAAS,SAAWA,EAAOA,EAAK,KAClDG,EAAS,OAAOF,GAAO,SAAWA,EAAKA,EAAG,KAC1CG,EAAYF,EAAS,MAAM,GAAG,EAAE,OAAOG,GAAKA,EAAE,OAAS,CAAC,EACxDC,EAAUH,EAAO,MAAM,GAAG,EAAE,OAAOE,GAAKA,EAAE,OAAS,CAAC,EACtDE,EAAI,EACR,KAAOA,EAAIH,EAAU,QACbA,EAAUG,CAAC,IAAMD,EAAQC,CAAC,EADLA,IACzB,CAIJ,IAAMC,EAAW,MAAM,OAAOJ,EAAU,OAASG,CAAC,EAC5CE,EAASH,EAAQ,MAAMC,CAAC,EAAE,KAAK,GAAG,EACxC,OAAOC,EAAWC,CACtB,CAdgBX,EAAAC,EAAA,YAAAN,EAAA,SAAQM,CAgB5B,GA5BiBN,KAAAA,GAAQ,CAAA,EAAA,EC2CzB,IAAYiB,GAAZ,SAAYA,EAAa,CAKrBA,EAAAA,EAAA,QAAA,CAAA,EAAA,UAMAA,EAAAA,EAAA,OAAA,CAAA,EAAA,SAKAA,EAAAA,EAAA,eAAA,CAAA,EAAA,iBAQAA,EAAAA,EAAA,eAAA,CAAA,EAAA,iBAKAA,EAAAA,EAAA,OAAA,CAAA,EAAA,SAMAA,EAAAA,EAAA,kBAAA,CAAA,EAAA,oBAKAA,EAAAA,EAAA,UAAA,CAAA,EAAA,WACJ,GAzCYA,IAAAA,EAAa,CAAA,EAAA,EA8GnB,IAAOC,GAAP,KAAoC,CAnK1C,MAmK0C,CAAAC,EAAA,sCAMtC,YAAYC,EAAmC,CAC3C,KAAK,gBAAkBA,EAAS,gBAChC,KAAK,cAAgBA,EAAS,UAAU,cACxC,KAAK,mBAAqBA,EAAS,UAAU,kBACjD,CAEA,MAAM,QAAqCC,EAAUC,EAAoB,oBAAkB,KAAI,CAC3F,IAAMC,EAAU,MAAM,KAAK,mBAAmB,SAASF,CAAG,EAC1D,OAAO,KAAK,YAAeA,EAAKE,EAASD,CAAiB,CAC9D,CAIA,iBAA8CE,EAA4BH,EAAWC,EAAqC,CAEtH,OADAD,EAAMA,GAAOI,GAAI,MAAMD,EAAa,GAAG,EACnCF,EACO,KAAK,YAAeD,EAAKG,EAAcF,CAAiB,EAExD,KAAK,OAAUD,EAAKG,CAAY,CAE/C,CAIA,WAAwCE,EAAcL,EAAUC,EAAqC,CACjG,OAAIA,EACO,KAAK,YAAeD,EAAKK,EAAMJ,CAAiB,EAEhD,KAAK,OAAUD,EAAKK,CAAI,CAEvC,CAEA,UAAuCC,EAAUN,EAAQ,CACrD,OAAO,KAAK,OAAUA,EAAK,CAAE,OAAQM,CAAK,CAAE,CAChD,CAEU,OAAoCN,EAAUE,EAA8C,CAClG,GAAI,OAAOA,GAAY,SAAU,CAC7B,IAAMK,EAAc,KAAK,MAASP,EAAKE,CAAO,EAC9C,OAAO,KAAK,sBAAyBK,EAAaP,EAAK,OAAWE,CAAO,UAElE,WAAYA,EAAS,CAC5B,IAAMK,EAAc,CAAE,MAAOL,EAAQ,OAAQ,aAAc,CAAA,EAAI,YAAa,CAAA,CAAE,EAC9E,OAAO,KAAK,sBAAyBK,EAAaP,CAAG,MAElD,CACH,IAAMO,EAAc,KAAK,MAASP,EAAKE,EAAQ,QAAO,CAAE,EACxD,OAAO,KAAK,sBAAsBK,EAAaP,EAAKE,CAAO,EAEnE,CAEU,MAAM,YAAyCF,EAAUE,EAAgCM,EAA8B,CAC7H,GAAI,OAAON,GAAY,SAAU,CAC7B,IAAMK,EAAc,MAAM,KAAK,WAAcP,EAAKE,EAASM,CAAW,EACtE,OAAO,KAAK,sBAAyBD,EAAaP,EAAK,OAAWE,CAAO,MACtE,CACH,IAAMK,EAAc,MAAM,KAAK,WAAcP,EAAKE,EAAQ,QAAO,EAAIM,CAAW,EAChF,OAAO,KAAK,sBAAsBD,EAAaP,EAAKE,CAAO,EAEnE,CAaU,sBAAmDK,EAA6BP,EAAUG,EAA6BE,EAAa,CAC1I,IAAII,EACJ,GAAIN,EACAM,EAAW,CACP,YAAAF,EACA,IAAAP,EACA,MAAOJ,EAAc,OACrB,WAAY,CAAA,EACZ,aAAAO,OAED,CACH,IAAMO,EAAqB,KAAK,yBAAyBV,EAAKK,CAAI,EAClEI,EAAW,CACP,YAAAF,EACA,IAAAP,EACA,MAAOJ,EAAc,OACrB,WAAY,CAAA,EACZ,IAAI,cAAY,CACZ,OAAOc,EAAkB,CAC7B,GAGP,OAAAH,EAAY,MAA2B,UAAYE,EAC7CA,CACX,CAEA,MAAM,OAAoCA,EAAuCR,EAAoC,SAEjH,IAAMU,GAAUC,EAAAH,EAAS,YAAY,MAAM,YAAQ,MAAAG,IAAA,OAAA,OAAAA,EAAE,KAAK,SACpDT,GAAeU,EAAA,KAAK,iBAAa,MAAAA,IAAA,OAAA,OAAAA,EAAE,IAAIJ,EAAS,IAAI,SAAQ,CAAE,EAC9DJ,EAAOF,EAAeA,EAAa,QAAO,EAAK,MAAM,KAAK,mBAAmB,SAASM,EAAS,GAAG,EAExG,GAAIN,EACA,OAAO,eACHM,EACA,eACA,CACI,MAAON,EACV,MAEF,CACH,IAAMO,EAAqB,KAAK,yBAAyBD,EAAS,IAAKJ,CAAI,EAC3E,OAAO,eACHI,EACA,eACA,CACI,IAAKC,EACR,EAMT,OAAIC,IAAYN,IACZI,EAAS,YAAc,MAAM,KAAK,WAAWA,EAAS,IAAKJ,EAAMJ,CAAiB,EACjFQ,EAAS,YAAY,MAA2B,UAAYA,GAEjEA,EAAS,MAAQb,EAAc,OACxBa,CACX,CAEU,MAAyBT,EAAUK,EAAY,CAErD,OADiB,KAAK,gBAAgB,YAAYL,CAAG,EACrC,OAAO,cAAc,MAASK,CAAI,CACtD,CAEU,WAA8BL,EAAUK,EAAcJ,EAAoC,CAEhG,OADiB,KAAK,gBAAgB,YAAYD,CAAG,EACrC,OAAO,YAAY,MAASK,EAAMJ,CAAiB,CACvE,CAEU,yBAAyBD,EAAUK,EAAa,CACtD,IAAMS,EAAkB,KAAK,gBACzBC,EACJ,MAAO,IACIA,IAAAA,EAAYC,GAAa,OAC5BhB,EAAI,SAAQ,EAAIc,EAAgB,YAAYd,CAAG,EAAE,iBAAiB,WAAY,EAAGK,GAAQ,EAAE,EAGvG,GAuESY,GAAP,KAA8B,CAvYpC,MAuYoC,CAAAnB,EAAA,gCAMhC,YAAYC,EAAmC,CAF5B,KAAA,YAA4C,IAAI,IAG/D,KAAK,uBAAyBA,EAAS,UAAU,sBACrD,CAEA,IAAI,KAAG,CACH,OAAOmB,EAAO,KAAK,YAAY,OAAM,CAAE,CAC3C,CAEA,YAAYT,EAAyB,CACjC,IAAMU,EAAYV,EAAS,IAAI,SAAQ,EACvC,GAAI,KAAK,YAAY,IAAIU,CAAS,EAC9B,MAAM,IAAI,MAAM,4BAA4BA,CAAS,uBAAuB,EAEhF,KAAK,YAAY,IAAIA,EAAWV,CAAQ,CAC5C,CAEA,YAAYT,EAAQ,CAChB,IAAMmB,EAAYnB,EAAI,SAAQ,EAC9B,OAAO,KAAK,YAAY,IAAImB,CAAS,CACzC,CAEA,MAAM,oBAAoBnB,EAAUC,EAAqC,CACrE,IAAIQ,EAAW,KAAK,YAAYT,CAAG,EACnC,OAAIS,IAGJA,EAAW,MAAM,KAAK,uBAAuB,QAAQT,EAAKC,CAAiB,EAC3E,KAAK,YAAYQ,CAAQ,EAClBA,EACX,CAIA,eAAeT,EAAUK,EAAcJ,EAAqC,CACxE,GAAIA,EACA,OAAO,KAAK,uBAAuB,WAAWI,EAAML,EAAKC,CAAiB,EAAE,KAAKQ,IAC7E,KAAK,YAAYA,CAAQ,EAClBA,EACV,EACE,CACH,IAAMA,EAAW,KAAK,uBAAuB,WAAWJ,EAAML,CAAG,EACjE,YAAK,YAAYS,CAAQ,EAClBA,EAEf,CAEA,YAAYT,EAAQ,CAChB,OAAO,KAAK,YAAY,IAAIA,EAAI,SAAQ,CAAE,CAC9C,CAEA,mBAAmBA,EAAQ,CACvB,IAAMmB,EAAYnB,EAAI,SAAQ,EACxBoB,EAAa,KAAK,YAAY,IAAID,CAAS,EACjD,OAAIC,IACAA,EAAW,MAAQxB,EAAc,QACjCwB,EAAW,kBAAoB,OAC/BA,EAAW,WAAa,CAAA,EACxBA,EAAW,YAAc,QAEtBA,CACX,CAEA,eAAepB,EAAQ,CACnB,IAAMmB,EAAYnB,EAAI,SAAQ,EACxBoB,EAAa,KAAK,YAAY,IAAID,CAAS,EACjD,OAAIC,IACAA,EAAW,MAAQxB,EAAc,QACjC,KAAK,YAAY,OAAOuB,CAAS,GAE9BC,CACX,GCzYE,IAAOC,GAAP,KAAoB,CA1E1B,MA0E0B,CAAAC,EAAA,sBAMtB,YAAYC,EAA6B,CACrC,KAAK,WAAaA,EAAS,OAAO,cAClC,KAAK,iBAAmB,IAAMA,EAAS,OAAO,UAAU,iBACxD,KAAK,cAAgBA,EAAS,WAAW,cACzC,KAAK,eAAiBA,EAAS,UAAU,cAC7C,CAEA,MAAM,KAAKC,EAA2BC,EAAc,oBAAkB,KAAI,CACtE,QAAWC,KAAQC,GAAUH,EAAS,YAAY,KAAK,EACnD,MAAMI,GAAkBH,CAAW,EACnCI,GAAiBH,CAAI,EAAE,QAAQI,GAAO,KAAK,OAAOA,EAAKN,CAAQ,CAAC,CAExE,CAEU,OAAOO,EAAwBP,EAAyB,CAC9D,IAAMM,EAAMC,EAAQ,UAEpB,GAAID,EAAI,OAAS,OACb,GAAI,CACA,IAAME,EAAc,KAAK,aAAaD,CAAO,EAC7C,GAAIE,GAAeD,CAAW,EAC1BF,EAAI,KAAOE,UAEXF,EAAI,iBAAmBE,EACnB,KAAK,iBAAgB,EAAG,YAAYA,EAAY,WAAW,EAAG,CAE9D,IAAME,EAAa,KAAK,YAAYF,CAAW,EAC/CF,EAAI,KAAOI,GAAc,KAAK,mBAAmBH,EAASC,CAAW,SAGxEG,EAAK,CACVL,EAAI,KAAI,OAAA,OAAA,OAAA,OAAA,CAAA,EACDC,CAAO,EAAA,CACV,QAAS,mDAAmDD,EAAI,QAAQ,MAAMK,CAAG,EAAE,CAAA,EAK/FX,EAAS,WAAW,KAAKM,CAAG,CAChC,CAEA,OAAON,EAAyB,CAC5B,QAAWM,KAAON,EAAS,WACvB,OAAQM,EAAyB,KACjC,OAAQA,EAAyB,iBAErCN,EAAS,WAAa,CAAA,CAC1B,CAEA,aAAaO,EAAsB,CAE/B,IAAMC,EADQ,KAAK,cAAc,SAASD,CAAO,EACvB,WAAWA,EAAQ,UAAU,QAAQ,EAC/D,OAAOC,GAAe,KAAK,mBAAmBD,CAAO,CACzD,CAEA,eAAeL,EAAeU,EAAkBC,EAA8BC,EAAe,CAGzF,IAAMC,EAAS,KACTC,EAA8B,CAChC,SAAUH,EACV,SAAUC,EAEV,IAAI,KAAG,OACH,GAAIG,GAAU,KAAK,IAAI,EAEnB,OAAO,KAAK,KACT,GAAIC,GAAqB,KAAK,gBAAgB,EAAG,CAEpD,IAAMR,EAAaK,EAAO,YAAY,KAAK,gBAAgB,EAC3D,KAAK,KAAOL,GACRK,EAAO,mBAAmB,CAAE,UAAAC,EAAW,UAAWd,EAAM,SAAAU,CAAQ,EAAI,KAAK,gBAAgB,UACtF,KAAK,OAAS,OAAW,CAEhC,IAAMO,EAAUJ,EAAO,cAAc,CAAE,UAAAC,EAAW,UAAWd,EAAM,SAAAU,CAAQ,CAAE,EAC7E,GAAIO,EAAQ,OAASC,GAAYlB,CAAI,EAAE,MAAQmB,EAAc,eAEzD,OAEJ,KAAK,MAAOC,EAAAH,EAAQ,QAAI,MAAAG,IAAA,OAAAA,EAAIH,EAAQ,MACpC,KAAK,iBAAmBA,EAAQ,MAEpC,OAAOF,GAAU,KAAK,IAAI,EAAI,KAAK,KAAO,MAC9C,EACA,IAAI,kBAAgB,CAChB,OAAO,KAAK,gBAChB,EACA,IAAI,OAAK,CACL,OAAOR,GAAe,KAAK,IAAI,EAAI,KAAK,KAAO,MACnD,GAEJ,OAAOO,CACX,CAEU,cAAcT,EAAsB,CAC1C,GAAI,CACA,IAAMC,EAAc,KAAK,aAAaD,CAAO,EAC7C,GAAIE,GAAeD,CAAW,EAC1B,MAAO,CAAE,MAAOA,CAAW,EAE/B,IAAME,EAAa,KAAK,YAAYF,CAAW,EAC/C,OAAIE,EACO,CAAE,KAAMA,EAAY,MAAOF,CAAW,EAGtC,CACH,MAAOA,EACP,MACI,KAAK,mBAAmBD,EAASC,CAAW,SAGnDG,EAAK,CACV,MAAO,CACH,MAAK,OAAA,OAAA,OAAA,OAAA,CAAA,EACEJ,CAAO,EAAA,CACV,QAAS,mDAAmDA,EAAQ,UAAU,QAAQ,MAAMI,CAAG,EAAE,CAAA,GAIjH,CAEU,YAAYY,EAAmC,CACrD,GAAIA,EAAgB,KAChB,OAAOA,EAAgB,KAE3B,IAAMC,EAAM,KAAK,iBAAgB,EAAG,YAAYD,EAAgB,WAAW,EAC3E,GAAKC,EAGL,OAAO,KAAK,eAAe,WAAWA,EAAI,YAAY,MAAOD,EAAgB,IAAI,CACrF,CAEU,mBAAmBhB,EAAwBkB,EAAsC,CAGvF,IAAMzB,EAAWoB,GAAYb,EAAQ,SAAS,EAC1CP,EAAS,MAAQqB,EAAc,gBAC/B,QAAQ,KAAK,gFAAgFrB,EAAS,GAAG,IAAI,EAEjH,IAAM0B,EAAgB,KAAK,WAAW,iBAAiBnB,CAAO,EAC9D,OAAA,OAAA,OAAA,OAAA,OAAA,CAAA,EACOA,CAAO,EAAA,CACV,QAAS,kCAAkCmB,CAAa,WAAWnB,EAAQ,UAAU,QAAQ,KAC7F,kBAAAkB,CAAiB,CAAA,CAEzB,GCpNE,SAAUE,GAAQC,EAAa,CACjC,OAAO,OAAQA,EAAsB,MAAS,QAClD,CAFgBC,EAAAF,GAAA,WAoBV,IAAOG,GAAP,KAA0B,CAjChC,MAiCgC,CAAAD,EAAA,4BAC5B,QAAQD,EAAa,CACjB,GAAID,GAAQC,CAAI,EACZ,OAAOA,EAAK,IAGpB,CAEA,YAAYA,EAAa,CACrB,OAAOG,GAAoBH,EAAK,SAAU,MAAM,CACpD,GCsBE,IAAOI,GAAP,KAAwB,CAjE9B,MAiE8B,CAAAC,EAAA,0BAK1B,YAAYC,EAA6B,CACrC,KAAK,aAAeA,EAAS,WAAW,aACxC,KAAK,MAAQA,EAAS,OAAO,UAAU,aACvC,KAAK,YAAcA,EAAS,UAAU,cAC1C,CAEA,gBAAgBC,EAAsB,CAClC,GAAIA,EAAe,CACf,IAAMC,EAAaC,GAAeF,CAAa,EACzCG,EAAWH,EAAc,QAC/B,GAAIC,GAAcE,EAAU,CACxB,IAAMC,EAAaD,EAA4BF,EAAW,OAAO,EAEjE,GAAII,GAAYD,CAAS,EACrB,OAAOA,EAAU,IACd,GAAI,MAAM,QAAQA,CAAS,GAC9B,QAAWE,KAAOF,EACd,GAAIC,GAAYC,CAAG,GAAKA,EAAI,UACrBA,EAAI,SAAS,QAAUN,EAAc,QACrCM,EAAI,SAAS,KAAON,EAAc,IACrC,OAAOM,EAAI,KAK3B,GAAIH,EAAU,CACV,IAAMI,EAAW,KAAK,aAAa,YAAYJ,CAAQ,EAEvD,GAAII,IAAaA,IAAaP,GAAiBQ,GAAYR,EAAeO,CAAQ,GAC9E,OAAOJ,GAKvB,CAEA,oBAAoBH,EAAsB,CACtC,IAAMS,EAAU,KAAK,gBAAgBT,CAAa,EAClD,GAAIS,GAAS,SAAU,CACnB,IAAMC,EAAa,KAAK,aAAa,YAAYD,CAAO,EACxD,OAAOC,GAAcD,EAAQ,SAGrC,CAEA,eAAeC,EAAqBC,EAA8B,CAC9D,IAAMC,EAA+B,CAAA,EACrC,GAAID,EAAQ,mBAAoB,CAC5B,IAAML,EAAM,KAAK,mBAAmBI,CAAU,EAC1CJ,GACAM,EAAK,KAAKN,CAAG,EAGrB,IAAIO,EAAkB,KAAK,MAAM,kBAAkBH,EAAY,KAAK,YAAY,eAAeA,CAAU,CAAC,EAC1G,OAAIC,EAAQ,cACRE,EAAkBA,EAAgB,OAAOP,GAAOQ,GAAS,OAAOR,EAAI,UAAWK,EAAQ,WAAW,CAAC,GAEvGC,EAAK,KAAK,GAAGC,CAAe,EACrBE,EAAOH,CAAI,CACtB,CAEU,mBAAmBF,EAAmB,CAC5C,IAAMH,EAAW,KAAK,aAAa,YAAYG,CAAU,EACzD,GAAIH,EAAU,CACV,IAAMS,EAAMC,GAAYP,CAAU,EAC5BQ,EAAO,KAAK,YAAY,eAAeR,CAAU,EACvD,MAAO,CACH,UAAWM,EAAI,IACf,WAAYE,EACZ,UAAWF,EAAI,IACf,WAAYE,EACZ,QAASC,GAAkBZ,CAAQ,EACnC,MAAO,IAInB,GCtIE,IAAOa,GAAP,KAAe,CAZrB,MAYqB,CAAAC,EAAA,iBAMjB,YAAYC,EAAwB,CAChC,GALI,KAAA,IAAM,IAAI,IAKVA,EACA,OAAW,CAACC,EAAKC,CAAK,IAAKF,EACvB,KAAK,IAAIC,EAAKC,CAAK,CAG/B,CAKA,IAAI,MAAI,CACJ,OAAOC,GAAU,IAAIC,EAAO,KAAK,IAAI,OAAM,CAAE,EAAE,IAAIC,GAAKA,EAAE,MAAM,CAAC,CACrE,CAKA,OAAK,CACD,KAAK,IAAI,MAAK,CAClB,CAUA,OAAOJ,EAAQC,EAAS,CACpB,GAAIA,IAAU,OACV,OAAO,KAAK,IAAI,OAAOD,CAAG,EACvB,CACH,IAAMK,EAAS,KAAK,IAAI,IAAIL,CAAG,EAC/B,GAAIK,EAAQ,CACR,IAAMC,EAAQD,EAAO,QAAQJ,CAAK,EAClC,GAAIK,GAAS,EACT,OAAID,EAAO,SAAW,EAClB,KAAK,IAAI,OAAOL,CAAG,EAEnBK,EAAO,OAAOC,EAAO,CAAC,EAEnB,GAGf,MAAO,GAEf,CASA,IAAIN,EAAM,OACN,OAAOO,EAAA,KAAK,IAAI,IAAIP,CAAG,KAAC,MAAAO,IAAA,OAAAA,EAAI,CAAA,CAChC,CAOA,IAAIP,EAAQC,EAAS,CACjB,GAAIA,IAAU,OACV,OAAO,KAAK,IAAI,IAAID,CAAG,EACpB,CACH,IAAMK,EAAS,KAAK,IAAI,IAAIL,CAAG,EAC/B,OAAIK,EACOA,EAAO,QAAQJ,CAAK,GAAK,EAE7B,GAEf,CAKA,IAAID,EAAQC,EAAQ,CAChB,OAAI,KAAK,IAAI,IAAID,CAAG,EAChB,KAAK,IAAI,IAAIA,CAAG,EAAG,KAAKC,CAAK,EAE7B,KAAK,IAAI,IAAID,EAAK,CAACC,CAAK,CAAC,EAEtB,IACX,CAKA,OAAOD,EAAQK,EAAmB,CAC9B,OAAI,KAAK,IAAI,IAAIL,CAAG,EAChB,KAAK,IAAI,IAAIA,CAAG,EAAG,KAAK,GAAGK,CAAM,EAEjC,KAAK,IAAI,IAAIL,EAAK,MAAM,KAAKK,CAAM,CAAC,EAEjC,IACX,CAKA,QAAQG,EAAiD,CACrD,KAAK,IAAI,QAAQ,CAACC,EAAOT,IACrBS,EAAM,QAAQR,GAASO,EAAWP,EAAOD,EAAK,IAAI,CAAC,CAAC,CAE5D,CAKA,CAAC,OAAO,QAAQ,GAAC,CACb,OAAO,KAAK,QAAO,EAAG,SAAQ,CAClC,CAKA,SAAO,CACH,OAAOG,EAAO,KAAK,IAAI,QAAO,CAAE,EAC3B,QAAQ,CAAC,CAACH,EAAKS,CAAK,IAAMA,EAAM,IAAIR,GAAS,CAACD,EAAKC,CAAK,CAAW,CAAC,CAC7E,CAKA,MAAI,CACA,OAAOE,EAAO,KAAK,IAAI,KAAI,CAAE,CACjC,CAKA,QAAM,CACF,OAAOA,EAAO,KAAK,IAAI,OAAM,CAAE,EAAE,KAAI,CACzC,CAKA,qBAAmB,CACf,OAAOA,EAAO,KAAK,IAAI,QAAO,CAAE,CACpC,GAISO,GAAP,KAAY,CAvKlB,MAuKkB,CAAAZ,EAAA,cAKd,IAAI,MAAI,CACJ,OAAO,KAAK,IAAI,IACpB,CAIA,YAAYC,EAAwB,CAChC,GAVI,KAAA,IAAM,IAAI,IACV,KAAA,QAAU,IAAI,IASdA,EACA,OAAW,CAACC,EAAKC,CAAK,IAAKF,EACvB,KAAK,IAAIC,EAAKC,CAAK,CAG/B,CAEA,OAAK,CACD,KAAK,IAAI,MAAK,EACd,KAAK,QAAQ,MAAK,CACtB,CAEA,IAAID,EAAQC,EAAQ,CAChB,YAAK,IAAI,IAAID,EAAKC,CAAK,EACvB,KAAK,QAAQ,IAAIA,EAAOD,CAAG,EACpB,IACX,CAEA,IAAIA,EAAM,CACN,OAAO,KAAK,IAAI,IAAIA,CAAG,CAC3B,CAEA,OAAOC,EAAQ,CACX,OAAO,KAAK,QAAQ,IAAIA,CAAK,CACjC,CAEA,OAAOD,EAAM,CACT,IAAMC,EAAQ,KAAK,IAAI,IAAID,CAAG,EAC9B,OAAIC,IAAU,QACV,KAAK,IAAI,OAAOD,CAAG,EACnB,KAAK,QAAQ,OAAOC,CAAK,EAClB,IAEJ,EACX,GCpJE,IAAOU,GAAP,KAA8B,CAjEpC,MAiEoC,CAAAC,EAAA,gCAKhC,YAAYC,EAA6B,CACrC,KAAK,aAAeA,EAAS,WAAW,aACxC,KAAK,aAAeA,EAAS,UAAU,0BAC3C,CAEA,MAAM,eAAeC,EAA2BC,EAAc,oBAAkB,KAAI,CAChF,OAAO,KAAK,sBAAsBD,EAAS,YAAY,MAAOA,EAAU,OAAWC,CAAW,CAClG,CAcA,MAAM,sBAAsBC,EAAqBF,EAAoCG,EAAiDC,GAAgBH,EAAiC,oBAAkB,KAAI,CACzM,IAAMI,EAAgC,CAAA,EAEtC,KAAK,WAAWH,EAAYG,EAASL,CAAQ,EAC7C,QAAWM,KAAQH,EAASD,CAAU,EAClC,MAAMK,GAAkBN,CAAW,EACnC,KAAK,WAAWK,EAAMD,EAASL,CAAQ,EAE3C,OAAOK,CACX,CAMU,WAAWC,EAAeD,EAA+BL,EAAyB,CACxF,IAAMQ,EAAO,KAAK,aAAa,QAAQF,CAAI,EACvCE,GACAH,EAAQ,KAAK,KAAK,aAAa,kBAAkBC,EAAME,EAAMR,CAAQ,CAAC,CAE9E,CAEA,MAAM,mBAAmBA,EAA2BC,EAAc,oBAAkB,KAAI,CACpF,IAAMQ,EAAWT,EAAS,YAAY,MAChCU,EAAS,IAAIC,GAEnB,QAAWL,KAAQM,GAAkBH,CAAQ,EACzC,MAAMF,GAAkBN,CAAW,EACnC,KAAK,YAAYK,EAAMN,EAAUU,CAAM,EAE3C,OAAOA,CACX,CAOU,YAAYJ,EAAeN,EAA2BU,EAAyB,CACrF,IAAMG,EAAYP,EAAK,WACvB,GAAIO,EAAW,CACX,IAAML,EAAO,KAAK,aAAa,QAAQF,CAAI,EACvCE,GACAE,EAAO,IAAIG,EAAW,KAAK,aAAa,kBAAkBP,EAAME,EAAMR,CAAQ,CAAC,EAG3F,GChGE,IAAOc,GAAP,KAAkB,CAzCxB,MAyCwB,CAAAC,EAAA,oBAKpB,YAAYC,EAAsCC,EAAoBC,EAAsB,OACxF,KAAK,SAAWF,EAChB,KAAK,WAAaC,EAClB,KAAK,iBAAkBE,EAAAD,GAAS,mBAAe,MAAAC,IAAA,OAAAA,EAAI,EACvD,CAEA,gBAAc,CACV,OAAI,KAAK,WACE,KAAK,SAAS,OAAO,KAAK,WAAW,eAAc,CAAE,EAErD,KAAK,QAEpB,CAEA,WAAWC,EAAY,CACnB,IAAMC,EAAQ,KAAK,gBACb,KAAK,SAAS,KAAKC,GAAKA,EAAE,KAAK,YAAW,IAAOF,EAAK,YAAW,CAAE,EACnE,KAAK,SAAS,KAAKE,GAAKA,EAAE,OAASF,CAAI,EAC7C,GAAIC,EACA,OAAOA,EAEX,GAAI,KAAK,WACL,OAAO,KAAK,WAAW,WAAWD,CAAI,CAG9C,GAGSG,GAAP,KAAe,CA1ErB,MA0EqB,CAAAR,EAAA,iBAKjB,YAAYC,EAAwCC,EAAoBC,EAAsB,OAC1F,KAAK,SAAW,IAAI,IACpB,KAAK,iBAAkBC,EAAAD,GAAS,mBAAe,MAAAC,IAAA,OAAAA,EAAI,GACnD,QAAWK,KAAWR,EAAU,CAC5B,IAAMI,EAAO,KAAK,gBACZI,EAAQ,KAAK,YAAW,EACxBA,EAAQ,KACd,KAAK,SAAS,IAAIJ,EAAMI,CAAO,EAEnC,KAAK,WAAaP,CACtB,CAEA,WAAWG,EAAY,CACnB,IAAMK,EAAY,KAAK,gBAAkBL,EAAK,YAAW,EAAKA,EACxDC,EAAQ,KAAK,SAAS,IAAII,CAAS,EACzC,GAAIJ,EACA,OAAOA,EAEX,GAAI,KAAK,WACL,OAAO,KAAK,WAAW,WAAWD,CAAI,CAG9C,CAEA,gBAAc,CACV,IAAIM,EAAgBC,EAAO,KAAK,SAAS,OAAM,CAAE,EACjD,OAAI,KAAK,aACLD,EAAgBA,EAAc,OAAO,KAAK,WAAW,eAAc,CAAE,GAElEA,CACX,GAISE,GAAqB,CAC9B,YAAU,CAEV,EACA,gBAAc,CACV,OAAOC,EACX,GC7GE,IAAgBC,GAAhB,KAA+B,CAVrC,MAUqC,CAAAC,EAAA,wBAArC,aAAA,CAEc,KAAA,UAA0B,CAAA,EAC1B,KAAA,WAAa,EAoB3B,CAlBI,UAAUC,EAAsB,CAC5B,KAAK,UAAU,KAAKA,CAAU,CAClC,CAEA,SAAO,CACH,KAAK,gBAAe,EACpB,KAAK,MAAK,EACV,KAAK,WAAa,GAClB,KAAK,UAAU,QAAQA,GAAcA,EAAW,QAAO,CAAE,CAC7D,CAEU,iBAAe,CACrB,GAAI,KAAK,WACL,MAAM,IAAI,MAAM,sCAAsC,CAE9D,GAKSC,GAAP,cAAiCH,EAAe,CAnCtD,MAmCsD,CAAAC,EAAA,oBAAtD,aAAA,qBACuB,KAAA,MAAQ,IAAI,GAoCnC,CAlCI,IAAIG,EAAM,CACN,YAAK,gBAAe,EACb,KAAK,MAAM,IAAIA,CAAG,CAC7B,CAEA,IAAIA,EAAQC,EAAQ,CAChB,KAAK,gBAAe,EACpB,KAAK,MAAM,IAAID,EAAKC,CAAK,CAC7B,CAIA,IAAID,EAAQE,EAAkB,CAE1B,GADA,KAAK,gBAAe,EAChB,KAAK,MAAM,IAAIF,CAAG,EAClB,OAAO,KAAK,MAAM,IAAIA,CAAG,EACtB,GAAIE,EAAU,CACjB,IAAMD,EAAQC,EAAQ,EACtB,YAAK,MAAM,IAAIF,EAAKC,CAAK,EAClBA,MAEP,OAER,CAEA,OAAOD,EAAM,CACT,YAAK,gBAAe,EACb,KAAK,MAAM,OAAOA,CAAG,CAChC,CAEA,OAAK,CACD,KAAK,gBAAe,EACpB,KAAK,MAAM,MAAK,CACpB,GAGSG,GAAP,cAAuEP,EAAe,CA1E5F,MA0E4F,CAAAC,EAAA,qBAKxF,YAAYO,EAA0C,CAClD,MAAK,EAJQ,KAAA,MAAQ,IAAI,IAKzB,KAAK,UAAYA,IAAcH,GAASA,EAC5C,CAEA,IAAII,EAAqBL,EAAQ,CAC7B,YAAK,gBAAe,EACb,KAAK,gBAAgBK,CAAU,EAAE,IAAIL,CAAG,CACnD,CAEA,IAAIK,EAAqBL,EAAUC,EAAY,CAC3C,KAAK,gBAAe,EACpB,KAAK,gBAAgBI,CAAU,EAAE,IAAIL,EAAKC,CAAK,CACnD,CAIA,IAAII,EAAqBL,EAAUE,EAAsB,CACrD,KAAK,gBAAe,EACpB,IAAMI,EAAe,KAAK,gBAAgBD,CAAU,EACpD,GAAIC,EAAa,IAAIN,CAAG,EACpB,OAAOM,EAAa,IAAIN,CAAG,EACxB,GAAIE,EAAU,CACjB,IAAMD,EAAQC,EAAQ,EACtB,OAAAI,EAAa,IAAIN,EAAKC,CAAK,EACpBA,MAEP,OAER,CAEA,OAAOI,EAAqBL,EAAQ,CAChC,YAAK,gBAAe,EACb,KAAK,gBAAgBK,CAAU,EAAE,OAAOL,CAAG,CACtD,CAIA,MAAMK,EAAoB,CAEtB,GADA,KAAK,gBAAe,EAChBA,EAAY,CACZ,IAAME,EAAS,KAAK,UAAUF,CAAU,EACxC,KAAK,MAAM,OAAOE,CAAM,OAExB,KAAK,MAAM,MAAK,CAExB,CAEU,gBAAgBF,EAAmB,CACzC,IAAME,EAAS,KAAK,UAAUF,CAAU,EACpCG,EAAgB,KAAK,MAAM,IAAID,CAAM,EACzC,OAAKC,IACDA,EAAgB,IAAI,IACpB,KAAK,MAAM,IAAID,EAAQC,CAAa,GAEjCA,CACX,GAOSC,GAAP,cAAmCN,EAAwC,CA9IjF,MA8IiF,CAAAN,EAAA,sBAC7E,YAAYa,EAAyC,CACjD,MAAMC,GAAOA,EAAI,SAAQ,CAAE,EAC3B,KAAK,UAAUD,EAAe,UAAU,gBAAgB,SAAS,CAACE,EAASC,IAAW,CAClF,IAAMC,EAAUF,EAAQ,OAAOC,CAAO,EACtC,QAAWF,KAAOG,EACd,KAAK,MAAMH,CAAG,CAEtB,CAAC,CAAC,CACN,GAOSI,GAAP,cAAoChB,EAAiB,CA9J3D,MA8J2D,CAAAF,EAAA,uBACvD,YAAYa,EAAyC,CACjD,MAAK,EACL,KAAK,UAAUA,EAAe,UAAU,gBAAgB,SAAS,IAAK,CAClE,KAAK,MAAK,CACd,CAAC,CAAC,CACN,GCnIE,IAAOM,GAAP,KAA2B,CAjCjC,MAiCiC,CAAAC,EAAA,6BAS7B,YAAYC,EAA6B,CACrC,KAAK,WAAaA,EAAS,OAAO,cAClC,KAAK,aAAeA,EAAS,WAAW,aACxC,KAAK,aAAeA,EAAS,UAAU,2BACvC,KAAK,aAAeA,EAAS,OAAO,UAAU,aAC9C,KAAK,iBAAmB,IAAIC,GAA8BD,EAAS,MAAM,CAC7E,CAEA,SAASE,EAAsB,CAC3B,IAAMC,EAA4C,CAAA,EAC5CC,EAAgB,KAAK,WAAW,iBAAiBF,CAAO,EAExDG,EAAcC,GAAYJ,EAAQ,SAAS,EAAE,kBACnD,GAAIG,EAAa,CACb,IAAIE,EAAmCL,EAAQ,UAC/C,EAAG,CACC,IAAMM,EAAkBH,EAAY,IAAIE,CAAW,EAC/CC,EAAgB,OAAS,GACzBL,EAAO,KAAKM,EAAOD,CAAe,EAAE,OAChCE,GAAQ,KAAK,WAAW,UAAUA,EAAK,KAAMN,CAAa,CAAC,CAAC,EAEpEG,EAAcA,EAAY,iBACrBA,GAGb,IAAII,EAAgB,KAAK,eAAeP,EAAeF,CAAO,EAC9D,QAASU,EAAIT,EAAO,OAAS,EAAGS,GAAK,EAAGA,IACpCD,EAAS,KAAK,YAAYR,EAAOS,CAAC,EAAGD,CAAM,EAE/C,OAAOA,CACX,CAKU,YAAYE,EAAwCC,EAAoBC,EAAsB,CACpG,OAAO,IAAIC,GAAYP,EAAOI,CAAQ,EAAGC,EAAYC,CAAO,CAChE,CAMU,oBAAoBF,EAA6BC,EAAoBC,EAAsB,CACjG,IAAME,EAAIR,EAAOI,CAAQ,EAAE,IAAIK,GAAI,CAC/B,IAAMC,EAAO,KAAK,aAAa,QAAQD,CAAC,EACxC,GAAIC,EACA,OAAO,KAAK,aAAa,kBAAkBD,EAAGC,CAAI,CAG1D,CAAC,EAAE,YAAW,EACd,OAAO,IAAIH,GAAYC,EAAGH,EAAYC,CAAO,CACjD,CAKU,eAAeX,EAAuBgB,EAAuB,CACnE,OAAO,KAAK,iBAAiB,IAAIhB,EAAe,IAAM,IAAIiB,GAAS,KAAK,aAAa,YAAYjB,CAAa,CAAC,CAAC,CACpH,GC/CE,SAAUkB,GAAqBC,EAAa,CAC9C,OAAO,OAAQA,EAA4B,UAAa,QAC5D,CAFgBC,EAAAF,GAAA,wBAkDhB,SAASG,GAAwBC,EAAY,CACzC,OAAO,OAAOA,GAAQ,UAAY,CAAC,CAACA,IAAQ,SAAUA,GAAO,WAAYA,EAC7E,CAFSF,EAAAC,GAAA,2BAIH,IAAOE,GAAP,KAA4B,CA5GlC,MA4GkC,CAAAH,EAAA,8BAa9B,YAAYI,EAA6B,CAVzC,KAAA,iBAAmB,IAAI,IAAI,CAAC,aAAc,qBAAsB,kBAAmB,YAAa,UAAU,CAAC,EAWvG,KAAK,iBAAmBA,EAAS,OAAO,UAAU,iBAClD,KAAK,eAAiBA,EAAS,UAAU,eACzC,KAAK,aAAeA,EAAS,WAAW,aACxC,KAAK,gBAAkBA,EAAS,cAAc,eAClD,CAEA,UAAUL,EAAeM,EAAgC,CAAA,EAAE,CACvD,IAAMC,EAAmBD,GAAS,SAC5BE,EAAkBP,EAAA,CAACQ,EAAaC,IAAmB,KAAK,SAASD,EAAKC,EAAOJ,CAAO,EAAlE,mBAClBK,EAAWJ,EAAmB,CAACE,EAAaC,IAAmBH,EAAiBE,EAAKC,EAAOF,CAAe,EAAIA,EAErH,GAAI,CACA,YAAK,gBAAkBI,GAAYZ,CAAI,EAChC,KAAK,UAAUA,EAAMW,EAAUL,GAAS,KAAK,UAEpD,KAAK,gBAAkB,OAE/B,CAEA,YAAyCO,EAAiBP,EAAkC,CAAA,EAAE,CAC1F,IAAMQ,EAAO,KAAK,MAAMD,CAAO,EAC/B,YAAK,SAASC,EAAMA,EAAMR,CAAO,EAC1BQ,CACX,CAEU,SAASL,EAAaC,EAAgB,CAAE,QAAAK,EAAS,WAAAC,EAAY,YAAAC,EAAa,SAAAC,EAAU,aAAAC,CAAY,EAAwB,aAC9H,GAAI,MAAK,iBAAiB,IAAIV,CAAG,EAE1B,GAAIW,GAAYV,CAAK,EAAG,CAC3B,IAAMW,EAAWX,EAAM,IACjBY,EAAWP,EAAUL,EAAM,SAAW,OAC5C,GAAIW,EAAU,CACV,IAAME,EAAiBX,GAAYS,CAAQ,EACvCG,EAAY,GACZ,KAAK,iBAAmB,KAAK,kBAAoBD,IAC7CJ,EACAK,EAAYL,EAAaI,EAAe,IAAKb,CAAK,EAElDc,EAAYD,EAAe,IAAI,SAAQ,GAG/C,IAAME,EAAa,KAAK,eAAe,eAAeJ,CAAQ,EAC9D,MAAO,CACH,KAAM,GAAGG,CAAS,IAAIC,CAAU,GAChC,SAAAH,OAGJ,OAAO,CACH,QAAQI,GAAAC,EAAAjB,EAAM,SAAK,MAAAiB,IAAA,OAAA,OAAAA,EAAE,WAAO,MAAAD,IAAA,OAAAA,EAAI,8BAChC,SAAAJ,WAGDM,GAAUlB,CAAK,EAAG,CACzB,IAAImB,EAYJ,GAXIZ,IACAY,EAAU,KAAK,kCAAiC,OAAA,OAAA,CAAA,EAAMnB,CAAK,CAAA,GACtD,CAACD,GAAOC,EAAM,YAAcmB,GAAS,cAEtCA,EAAQ,YAAY,aAAcC,EAAA,KAAK,mBAAe,MAAAA,IAAA,OAAA,OAAAA,EAAE,IAAI,SAAQ,IAGxEd,GAAc,CAACP,IACfoB,IAAAA,EAAO,OAAA,OAAA,CAAA,EAAUnB,CAAK,GACtBmB,EAAQ,aAAcE,EAAArB,EAAM,YAAQ,MAAAqB,IAAA,OAAA,OAAAA,EAAE,MAEtCb,EAAU,CACVW,IAAAA,EAAO,OAAA,OAAA,CAAA,EAAUnB,CAAK,GACtB,IAAMsB,EAAU,KAAK,gBAAgB,WAAWtB,CAAK,EACjDsB,IACCH,EAA+B,SAAWG,EAAQ,QAAQ,MAAO,EAAE,GAG5E,OAAOH,GAAWnB,MAElB,QAAOA,CAEf,CAEU,kCAAkCV,EAA2B,CACnE,IAAMiC,EAA4EhC,EAAAiC,IAA4B,CAC1G,OAAQA,EAAQ,OAChB,IAAKA,EAAQ,IACb,OAAQA,EAAQ,OAChB,MAAOA,EAAQ,QAJ+D,yBAOlF,GAAIlC,EAAK,SAAU,CACf,IAAMmC,EAAanC,EAAK,YAAciC,EAAsBjC,EAAK,QAAQ,EACnEoC,EAAiDD,EAAW,YAAc,CAAA,EAEhF,cAAO,KAAKnC,CAAI,EAAE,OAAOS,GAAO,CAACA,EAAI,WAAW,GAAG,CAAC,EAAE,QAAQA,GAAM,CAChE,IAAM4B,EAAsBC,GAAqBtC,EAAK,SAAUS,CAAG,EAAE,IAAIwB,CAAqB,EAC1FI,EAAoB,SAAW,IAC/BD,EAAY3B,CAAG,EAAI4B,EAE3B,CAAC,EAEMrC,EAGf,CAEU,SAASA,EAAsBc,EAAeR,EAAiCiC,EAAqBC,EAA4BC,EAAuB,CAC7J,OAAW,CAACC,EAAcC,CAAI,IAAK,OAAO,QAAQ3C,CAAI,EAClD,GAAI,MAAM,QAAQ2C,CAAI,EAClB,QAASC,EAAQ,EAAGA,EAAQD,EAAK,OAAQC,IAAS,CAC9C,IAAMC,EAAUF,EAAKC,CAAK,EACtB1C,GAAwB2C,CAAO,EAC/BF,EAAKC,CAAK,EAAI,KAAK,gBAAgB5C,EAAM0C,EAAc5B,EAAM+B,EAASvC,CAAO,EACtEsB,GAAUiB,CAAO,GACxB,KAAK,SAASA,EAA2B/B,EAAMR,EAASN,EAAM0C,EAAcE,CAAK,OAGlF1C,GAAwByC,CAAI,EACnC3C,EAAK0C,CAAY,EAAI,KAAK,gBAAgB1C,EAAM0C,EAAc5B,EAAM6B,EAAMrC,CAAO,EAC1EsB,GAAUe,CAAI,GACrB,KAAK,SAASA,EAAwB7B,EAAMR,EAASN,EAAM0C,CAAY,EAG/E,IAAMI,EAAU9C,EAChB8C,EAAQ,WAAaP,EACrBO,EAAQ,mBAAqBN,EAC7BM,EAAQ,gBAAkBL,CAC9B,CAEU,gBAAgBF,EAAoBQ,EAAkBjC,EAAekC,EAAkC1C,EAA+B,CAC5I,IAAIS,EAAUiC,EAAU,SACpBC,EAAQD,EAAU,OACtB,GAAIA,EAAU,KAAM,CAChB,IAAME,EAAM,KAAK,WAAWpC,EAAMkC,EAAU,KAAM1C,EAAQ,YAAY,EACtE,GAAIsB,GAAUsB,CAAG,EACb,OAAKnC,IACDA,EAAU,KAAK,aAAa,QAAQmC,CAAG,GAEpC,CACH,SAAUnC,GAAW,GACrB,IAAAmC,GAGJD,EAAQC,EAGhB,GAAID,EAAO,CACP,IAAMC,EAA0B,CAC5B,SAAUnC,GAAW,IAEzB,OAAAmC,EAAI,MAAQ,CACR,UAAAX,EACA,SAAAQ,EACA,QAASE,EACT,UAAWC,GAERA,MAEP,OAER,CAEU,WAAWpC,EAAeqC,EAAahC,EAAmC,CAChF,GAAI,CACA,IAAMiC,EAAgBD,EAAI,QAAQ,GAAG,EACrC,GAAIC,IAAkB,EAAG,CACrB,IAAMpD,EAAO,KAAK,eAAe,WAAWc,EAAMqC,EAAI,UAAU,CAAC,CAAC,EAClE,OAAKnD,GACM,2BAA6BmD,EAI5C,GAAIC,EAAgB,EAAG,CACnB,IAAMC,EAAclC,EAAeA,EAAagC,CAAG,EAAIG,GAAI,MAAMH,CAAG,EAC9DI,EAAW,KAAK,iBAAiB,YAAYF,CAAW,EAC9D,OAAKE,EAGEA,EAAS,YAAY,MAFjB,oCAAsCJ,EAIrD,IAAME,EAAclC,EAAeA,EAAagC,EAAI,UAAU,EAAGC,CAAa,CAAC,EAAIE,GAAI,MAAMH,EAAI,UAAU,EAAGC,CAAa,CAAC,EACtHG,EAAW,KAAK,iBAAiB,YAAYF,CAAW,EAC9D,GAAI,CAACE,EACD,MAAO,oCAAsCJ,EAEjD,GAAIC,IAAkBD,EAAI,OAAS,EAC/B,OAAOI,EAAS,YAAY,MAEhC,IAAMvD,EAAO,KAAK,eAAe,WAAWuD,EAAS,YAAY,MAAOJ,EAAI,UAAUC,EAAgB,CAAC,CAAC,EACxG,OAAKpD,GACM,0BAA4BmD,QAGlCK,EAAK,CACV,OAAO,OAAOA,CAAG,EAEzB,GCvRE,IAAOC,GAAP,KAA6B,CAnCnC,MAmCmC,CAAAC,EAAA,+BAK/B,SAASC,EAA6B,CAClC,GAAI,CAAC,KAAK,WAAa,CAAC,KAAK,IAAK,CAE9B,KAAK,UAAYA,EACjB,OAEJ,GAAI,CAAC,KAAK,MACN,KAAK,IAAM,CAAA,EACP,KAAK,WAAW,CAEhB,QAAWC,KAAO,KAAK,UAAU,iBAAiB,eAC9C,KAAK,IAAIA,CAAG,EAAI,KAAK,UAEzB,KAAK,UAAY,OAIzB,QAAWA,KAAOD,EAAS,iBAAiB,eACpC,KAAK,IAAIC,CAAG,IAAM,QAAa,KAAK,IAAIA,CAAG,IAAMD,GACjD,QAAQ,KAAK,sBAAsBC,CAAG,0DAA0DD,EAAS,iBAAiB,UAAU,IAAI,EAE5I,KAAK,IAAIC,CAAG,EAAID,CAExB,CAEA,YAAYE,EAAQ,CAChB,GAAI,KAAK,YAAc,OACnB,OAAO,KAAK,UAEhB,GAAI,KAAK,MAAQ,OACb,MAAM,IAAI,MAAM,uFAAuF,EAE3G,IAAMD,EAAME,GAAS,QAAQD,CAAG,EAC1BE,EAAW,KAAK,IAAIH,CAAG,EAC7B,GAAI,CAACG,EACD,MAAM,IAAI,MAAM,gEAAgEH,CAAG,IAAI,EAE3F,OAAOG,CACX,CAEA,IAAI,KAAG,CACH,OAAI,KAAK,YAAc,OACZ,CAAC,KAAK,SAAS,EAEtB,KAAK,MAAQ,OACN,OAAO,OAAO,KAAK,GAAG,EAE1B,CAAA,CACX,GCjCE,SAAUC,GAAeC,EAAY,CACvC,MAAO,CAAE,KAAAA,CAAI,CACjB,CAFgBC,EAAAF,GAAA,kBAuCV,IAAWG,IAAjB,SAAiBA,EAAkB,CAClBA,EAAA,IAAqC,CAAC,OAAQ,OAAQ,UAAU,CACjF,GAFiBA,KAAAA,GAAkB,CAAA,EAAA,EAY7B,IAAOC,GAAP,KAAyB,CA1G/B,MA0G+B,CAAAF,EAAA,2BAI3B,YAAYG,EAA6B,CAHxB,KAAA,QAAU,IAAIC,GAI3B,KAAK,WAAaD,EAAS,OAAO,aACtC,CAUA,SAAYE,EAAmCC,EAAsC,KAAMC,EAA+B,OAAM,CAC5H,GAAIA,IAAa,WACb,MAAM,IAAI,MAAM,2EAA2E,EAE/F,OAAW,CAACC,EAAMC,CAAE,IAAK,OAAO,QAAQJ,CAAY,EAAG,CACnD,IAAMK,EAAYD,EAClB,GAAI,MAAM,QAAQC,CAAS,EACvB,QAAWC,KAASD,EAAW,CAC3B,IAAME,EAA8B,CAChC,MAAO,KAAK,wBAAwBD,EAAOL,CAAO,EAClD,SAAAC,GAEJ,KAAK,SAASC,EAAMI,CAAK,UAEtB,OAAOF,GAAc,WAAY,CACxC,IAAME,EAA8B,CAChC,MAAO,KAAK,wBAAwBF,EAAWJ,CAAO,EACtD,SAAAC,GAEJ,KAAK,SAASC,EAAMI,CAAK,GAGrC,CAEU,wBAAwBD,EAAwBL,EAAgB,CACtE,MAAO,OAAOO,EAAMC,EAAQC,IAAe,CACvC,GAAI,CACA,MAAMJ,EAAM,KAAKL,EAASO,EAAMC,EAAQC,CAAW,QAC9CC,EAAK,CACV,GAAIC,GAAqBD,CAAG,EACxB,MAAMA,EAEV,QAAQ,MAAM,uCAAwCA,CAAG,EACzD,IAAME,EAAUF,aAAe,MAAQA,EAAI,QAAU,OAAOA,CAAG,EAC3DA,aAAe,OAASA,EAAI,OAC5B,QAAQ,MAAMA,EAAI,KAAK,EAE3BF,EAAO,QAAS,wCAA0CI,EAAS,CAAE,KAAAL,CAAI,CAAE,EAEnF,CACJ,CAEU,SAASL,EAAcI,EAA2B,CACxD,GAAIJ,IAAS,UAAW,CACpB,KAAK,QAAQ,IAAI,UAAWI,CAAK,EACjC,OAEJ,QAAWO,KAAW,KAAK,WAAW,eAAeX,CAAI,EACrD,KAAK,QAAQ,IAAIW,EAASP,CAAK,CAEvC,CAEA,UAAUJ,EAAcY,EAAiC,CACrD,IAAIC,EAASC,EAAO,KAAK,QAAQ,IAAId,CAAI,CAAC,EACrC,OAAO,KAAK,QAAQ,IAAI,SAAS,CAAC,EACvC,OAAIY,IACAC,EAASA,EAAO,OAAOT,GAASQ,EAAW,SAASR,EAAM,QAAQ,CAAC,GAEhES,EAAO,IAAIT,GAASA,EAAM,KAAK,CAC1C,GCnIE,IAAOW,GAAP,KAA+B,CAlDrC,MAkDqC,CAAAC,EAAA,iCAKjC,YAAYC,EAA6B,CACrC,KAAK,mBAAqBA,EAAS,WAAW,mBAC9C,KAAK,SAAWA,EAAS,gBAC7B,CAEA,MAAM,iBAAiBC,EAA2BC,EAA6B,CAAA,EAAIC,EAAc,oBAAkB,KAAI,CACnH,IAAMC,EAAcH,EAAS,YACvBI,EAA4B,CAAA,EAIlC,GAFA,MAAMC,GAAkBH,CAAW,GAE/B,CAACD,EAAQ,YAAcA,EAAQ,WAAW,SAAS,UAAU,KAC7D,KAAK,oBAAoBE,EAAaC,EAAaH,CAAO,EACtDA,EAAQ,uBAAyBG,EAAY,KAAKE,GAAI,CAAA,IAAAC,EAAC,QAAAA,EAAAD,EAAE,QAAI,MAAAC,IAAA,OAAA,OAAAA,EAAE,QAASC,GAAkB,WAAW,CAAA,IAIzG,KAAK,qBAAqBL,EAAaC,EAAaH,CAAO,EACvDA,EAAQ,wBAA0BG,EAAY,KAAKE,GAAI,CAAA,IAAAC,EAAC,QAAAA,EAAAD,EAAE,QAAI,MAAAC,IAAA,OAAA,OAAAA,EAAE,QAASC,GAAkB,YAAY,CAAA,KAI3G,KAAK,qBAAqBR,EAAUI,EAAaH,CAAO,EACpDA,EAAQ,wBAA0BG,EAAY,KAAKE,GAAI,CAAA,IAAAC,EAAC,QAAAA,EAAAD,EAAE,QAAI,MAAAC,IAAA,OAAA,OAAAA,EAAE,QAASC,GAAkB,YAAY,CAAA,IACvG,OAAOJ,EAKf,GAAI,CACAA,EAAY,KAAK,GAAG,MAAM,KAAK,YAAYD,EAAY,MAAOF,EAASC,CAAW,CAAC,QAC9EO,EAAK,CACV,GAAIC,GAAqBD,CAAG,EACxB,MAAMA,EAEV,QAAQ,MAAM,uCAAwCA,CAAG,EAG7D,aAAMJ,GAAkBH,CAAW,EAE5BE,CACX,CAEU,oBAAoBD,EAA0BC,EAA2BO,EAA2B,CAC1G,QAAWC,KAAcT,EAAY,YAAa,CAC9C,IAAMU,EAAyB,CAC3B,SAAUC,GAAqB,OAAO,EACtC,MAAO,CACH,MAAO,CACH,KAAMF,EAAW,KAAQ,EACzB,UAAWA,EAAW,OAAU,GAEpC,IAAK,CACD,KAAMA,EAAW,KAAQ,EACzB,UAAWA,EAAW,OAAUA,EAAW,OAAS,IAG5D,QAASA,EAAW,QACpB,KAAMG,GAAeP,GAAkB,WAAW,EAClD,OAAQ,KAAK,UAAS,GAE1BJ,EAAY,KAAKS,CAAU,EAEnC,CAEU,qBAAqBV,EAA0BC,EAA2BO,EAA2B,CAC3G,QAAWK,KAAeb,EAAY,aAAc,CAChD,IAAIc,EAIJ,GAAI,MAAMD,EAAY,MAAM,WAAW,GAGnC,GAAI,kBAAmBA,EAAa,CAChC,IAAME,EAASF,EAAyC,cACxD,GAAK,MAAME,EAAM,WAAW,EAGrB,CAGH,IAAMC,EAAqB,CAAE,KAAM,EAAG,UAAW,CAAC,EAClDF,EAAQ,CAAE,MAAOE,EAAU,IAAKA,CAAQ,MAPb,CAC3B,IAAMA,EAAqB,CAAE,KAAMD,EAAM,QAAW,EAAG,UAAWA,EAAM,SAAU,EAClFD,EAAQ,CAAE,MAAOE,EAAU,IAAKA,CAAQ,SAShDF,EAAQG,GAAaJ,EAAY,KAAK,EAE1C,GAAIC,EAAO,CACP,IAAMJ,EAAyB,CAC3B,SAAUC,GAAqB,OAAO,EACtC,MAAAG,EACA,QAASD,EAAY,QACrB,KAAMD,GAAeP,GAAkB,YAAY,EACnD,OAAQ,KAAK,UAAS,GAE1BJ,EAAY,KAAKS,CAAU,GAGvC,CAEU,qBAAqBb,EAA2BI,EAA2BO,EAA2B,CAC5G,QAAWU,KAAarB,EAAS,WAAY,CACzC,IAAMsB,EAAeD,EAAU,MAC/B,GAAIC,EAAc,CACd,IAAMC,EAAwC,CAC1C,KAAMD,EAAa,UACnB,SAAUA,EAAa,SACvB,MAAOA,EAAa,MACpB,KAAM,CACF,KAAMd,GAAkB,aACxB,cAAec,EAAa,UAAU,MACtC,SAAUA,EAAa,SACvB,QAASA,EAAa,UAAU,WAGxClB,EAAY,KAAK,KAAK,aAAa,QAASkB,EAAa,QAASC,CAAI,CAAC,GAGnF,CAEU,MAAM,YAAYC,EAAmBvB,EAA4BC,EAAc,oBAAkB,KAAI,CAC3G,IAAMuB,EAAgC,CAAA,EAChCC,EAA+B5B,EAAA,CAAoB6B,EAAiDC,EAAiBL,IAA2B,CAClJE,EAAgB,KAAK,KAAK,aAAaE,EAAUC,EAASL,CAAI,CAAC,CACnE,EAFqC,YAIrC,aAAM,QAAQ,IAAIM,GAAUL,CAAQ,EAAE,IAAI,MAAMM,GAAO,CACnD,MAAMzB,GAAkBH,CAAW,EACnC,IAAM6B,EAAS,KAAK,mBAAmB,UAAUD,EAAK,MAAO7B,EAAQ,UAAU,EAC/E,QAAW+B,KAASD,EAChB,MAAMC,EAAMF,EAAMJ,EAAUxB,CAAW,CAE/C,CAAC,CAAC,EACKuB,CACX,CAEU,aAAgCE,EAAiDC,EAAiBL,EAA+B,CACvI,MAAO,CACH,QAAAK,EACA,MAAOK,GAAmBV,CAAI,EAC9B,SAAUT,GAAqBa,CAAQ,EACvC,KAAMJ,EAAK,KACX,gBAAiBA,EAAK,gBACtB,KAAMA,EAAK,KACX,mBAAoBA,EAAK,mBACzB,KAAMA,EAAK,KACX,OAAQ,KAAK,UAAS,EAE9B,CAEU,WAAS,CACf,OAAO,KAAK,SAAS,UACzB,GAGE,SAAUU,GAAsCV,EAA+B,CACjF,GAAIA,EAAK,MACL,OAAOA,EAAK,MAEhB,IAAIW,EAOJ,OANI,OAAOX,EAAK,UAAa,SACzBW,EAAUC,GAAoBZ,EAAK,KAAK,SAAUA,EAAK,SAAUA,EAAK,KAAK,EACpE,OAAOA,EAAK,SAAY,WAC/BW,EAAUE,GAAmBb,EAAK,KAAK,SAAUA,EAAK,QAASA,EAAK,KAAK,GAE7EW,IAAAA,EAAYX,EAAK,KAAK,UACjBW,EAMEA,EAAQ,MALJ,CACH,MAAO,CAAE,KAAM,EAAG,UAAW,CAAC,EAC9B,IAAK,CAAE,KAAM,EAAG,UAAW,CAAC,EAIxC,CAlBgBpC,EAAAmC,GAAA,sBAoBV,SAAUnB,GAAqBa,EAA+C,CAChF,OAAQA,EAAU,CACd,IAAK,QACD,MAAO,GACX,IAAK,UACD,MAAO,GACX,IAAK,OACD,MAAO,GACX,IAAK,OACD,MAAO,GACX,QACI,MAAM,IAAI,MAAM,gCAAkCA,CAAQ,EAEtE,CAbgB7B,EAAAgB,GAAA,wBAeV,IAAWN,IAAjB,SAAiBA,EAAiB,CACjBA,EAAA,YAAc,eACdA,EAAA,aAAe,gBACfA,EAAA,aAAe,eAChC,GAJiBA,KAAAA,GAAiB,CAAA,EAAA,ECjN5B,IAAO6B,GAAP,KAAwC,CAtC9C,MAsC8C,CAAAC,EAAA,0CAK1C,YAAYC,EAA6B,CACrC,KAAK,eAAiBA,EAAS,UAAU,eACzC,KAAK,aAAeA,EAAS,WAAW,YAC5C,CAEA,kBAAkBC,EAAeC,EAA0BC,EAA4BC,GAAYH,CAAI,EAAC,CACpGC,IAAAA,EAAS,KAAK,aAAa,QAAQD,CAAI,GACvC,IAAMI,EAAO,KAAK,eAAe,eAAeJ,CAAI,EACpD,GAAI,CAACC,EACD,MAAM,IAAI,MAAM,gBAAgBG,CAAI,eAAe,EAEvD,IAAIC,EACEC,EAAoBR,EAAA,IAAK,CAAA,IAAAS,EAAA,OAACF,IAAAA,EAAoBG,IAAkBD,EAAA,KAAK,aAAa,YAAYP,CAAI,KAAC,MAAAO,IAAA,OAAAA,EAAIP,EAAK,QAAQ,EAAC,EAAjG,qBAC1B,MAAO,CACH,KAAAA,EACA,KAAAC,EACA,IAAI,aAAW,CACX,OAAOK,EAAiB,CAC5B,EACA,iBAAkBE,GAAkBR,EAAK,QAAQ,EACjD,KAAMA,EAAK,MACX,YAAaE,EAAS,IACtB,KAAAE,EAER,GAuCSK,GAAP,KAA0C,CA1GhD,MA0GgD,CAAAX,EAAA,4CAI5C,YAAYC,EAA6B,CACrC,KAAK,YAAcA,EAAS,UAAU,cAC1C,CAEA,MAAM,mBAAmBG,EAA2BQ,EAAc,oBAAkB,KAAI,CACpF,IAAMC,EAAgC,CAAA,EAChCC,EAAWV,EAAS,YAAY,MACtC,QAAWW,KAAWC,GAAUF,CAAQ,EACpC,MAAMG,GAAkBL,CAAW,EACnCM,GAAiBH,CAAO,EAAE,OAAOI,GAAW,CAACC,GAAeD,CAAO,CAAC,EAAE,QAAQA,GAAU,CAEpF,IAAME,EAAc,KAAK,kBAAkBF,CAAO,EAC9CE,GACAR,EAAM,KAAKQ,CAAW,CAE9B,CAAC,EAEL,OAAOR,CACX,CAEU,kBAAkBM,EAAsB,CAC9C,IAAMG,EAAkBH,EAAQ,UAAU,iBACpCI,EAAaJ,EAAQ,UAAU,SACrC,GAAI,CAACG,GAAmB,CAACC,EACrB,OAEJ,IAAMC,EAASnB,GAAYc,EAAQ,SAAS,EAAE,IAC9C,MAAO,CACH,UAAWK,EACX,WAAY,KAAK,YAAY,eAAeL,EAAQ,SAAS,EAC7D,UAAWG,EAAgB,YAC3B,WAAYA,EAAgB,KAC5B,QAASZ,GAAkBa,CAAU,EACrC,MAAOE,GAAS,OAAOH,EAAgB,YAAaE,CAAM,EAElE,GC9GE,IAAOE,GAAP,KAA4B,CAnClC,MAmCkC,CAAAC,EAAA,8BAAlC,aAAA,CACc,KAAA,iBAAmB,IACnB,KAAA,eAAiB,GAuC/B,CArCI,eAAeC,EAAa,CACxB,GAAIA,EAAK,WAAY,CACjB,IAAMC,EAAgB,KAAK,eAAeD,EAAK,UAAU,EACnDE,EAAa,KAAK,eAAeF,CAAI,EAE3C,OADiBC,EAAgB,KAAK,iBAAmBC,EAG7D,MAAO,EACX,CAEU,eAAe,CAAE,mBAAAC,EAAoB,gBAAAC,CAAe,EAAW,CACrE,GAAI,CAACD,EACD,MAAM,IAAI,MAAM,2CAA2C,EAE/D,OAAIC,IAAoB,OACbD,EAAqB,KAAK,eAAiBC,EAE/CD,CACX,CAEA,WAAwCH,EAAeK,EAAY,CAE/D,OADiBA,EAAK,MAAM,KAAK,gBAAgB,EACjC,OAAO,CAACC,EAAeC,IAAgB,CACnD,GAAI,CAACD,GAAiBC,EAAa,SAAW,EAC1C,OAAOD,EAEX,IAAME,EAAgBD,EAAa,QAAQ,KAAK,cAAc,EAC9D,GAAIC,EAAgB,EAAG,CACnB,IAAMC,EAAWF,EAAa,UAAU,EAAGC,CAAa,EAClDE,EAAa,SAASH,EAAa,UAAUC,EAAgB,CAAC,CAAC,EAC/DG,EAASL,EAAuDG,CAAQ,EAC9E,OAAOE,IAAQD,CAAU,EAE7B,OAAQJ,EAAqDC,CAAY,CAC7E,EAAGP,CAAI,CACX,GCjBE,IAAOY,GAAP,KAAmC,CAzDzC,MAyDyC,CAAAC,EAAA,qCAOrC,YAAYC,EAAmC,CAJ5B,KAAA,OAAS,IAAIC,GACtB,KAAA,SAAgD,CAAA,EAChD,KAAA,gBAAkB,GAGxB,KAAK,gBAAkBD,EAAS,eACpC,CAEA,IAAI,OAAK,CACL,OAAO,KAAK,OAAO,OACvB,CAEA,WAAWE,EAAwB,SAC/B,KAAK,iBAAkBC,GAAAC,EAAAF,EAAO,aAAa,aAAS,MAAAE,IAAA,OAAA,OAAAA,EAAE,iBAAa,MAAAD,IAAA,OAAAA,EAAI,EAC3E,CAEA,MAAM,YAAYD,EAAsC,CACpD,GAAI,KAAK,gBAAiB,CACtB,GAAIA,EAAO,SAAU,CAIjB,IAAMG,EAAY,KAAK,gBAAgB,IACvCH,EAAO,SAAS,CAEZ,QAASG,EAAU,IAAIC,GAAQ,KAAK,cAAcA,EAAK,iBAAiB,UAAU,CAAC,EACtF,EAGL,GAAIJ,EAAO,mBAAoB,CAG3B,IAAMK,EAAiB,KAAK,gBAAgB,IAAI,IAAID,IAA2B,CAE3E,QAAS,KAAK,cAAcA,EAAK,iBAAiB,UAAU,GAC/D,EAGKE,EAAU,MAAMN,EAAO,mBAAmBK,CAAc,EAC9DA,EAAe,QAAQ,CAACE,EAAMC,IAAO,CACjC,KAAK,2BAA2BD,EAAK,QAAUD,EAAQE,CAAG,CAAC,CAC/D,CAAC,GAGT,KAAK,OAAO,QAAO,CACvB,CAQA,oBAAoBC,EAAoC,CAC/CA,EAAO,UAGZ,OAAO,KAAKA,EAAO,QAAQ,EAAE,QAAQC,GAAU,CAC3C,KAAK,2BAA2BA,EAASD,EAAO,SAASC,CAAO,CAAC,CACrE,CAAC,CACL,CAEU,2BAA2BA,EAAiBC,EAAkB,CACpE,KAAK,SAASD,CAAO,EAAIC,CAC7B,CAQA,MAAM,iBAAiBC,EAAkBD,EAAqB,CAC1D,MAAM,KAAK,MAEX,IAAME,EAAc,KAAK,cAAcD,CAAQ,EAC/C,GAAI,KAAK,SAASC,CAAW,EACzB,OAAO,KAAK,SAASA,CAAW,EAAEF,CAAa,CAEvD,CAEU,cAAcG,EAAkB,CACtC,MAAO,GAAGA,CAAU,EACxB,GC3HE,IAAWC,IAAjB,SAAiBA,EAAU,CAGvB,SAAgBC,EAAOC,EAAoC,CACvD,MAAO,CACH,QAASC,EAAA,SAAY,MAAMD,EAAQ,EAA1B,WAEjB,CAJgBC,EAAAF,EAAA,UAAAD,EAAA,OAAMC,CAK1B,GARiBD,KAAAA,GAAU,CAAA,EAAA,ECuFrB,IAAOI,GAAP,KAA6B,CA3GnC,MA2GmC,CAAAC,EAAA,+BAmB/B,YAAYC,EAAmC,CAjB/C,KAAA,mBAAmC,CAE/B,WAAY,CACR,WAAY,CAAC,WAAY,MAAM,IAQpB,KAAA,gBAA4C,CAAA,EAC5C,KAAA,oBAAsB,IAAIC,GAC1B,KAAA,WAAa,IAAI,IACjB,KAAA,qBAAuB,IAAI,IACpC,KAAA,aAAeC,EAAc,QAGnC,KAAK,iBAAmBF,EAAS,UAAU,iBAC3C,KAAK,uBAAyBA,EAAS,UAAU,uBACjD,KAAK,aAAeA,EAAS,UAAU,aACvC,KAAK,gBAAkBA,EAAS,eACpC,CAEA,MAAM,MAAyBG,EAAsCC,EAAwB,CAAA,EAAIC,EAAc,oBAAkB,KAAI,SACjI,QAAWC,KAAYH,EAAW,CAC9B,IAAMI,EAAMD,EAAS,IAAI,SAAQ,EACjC,GAAIA,EAAS,QAAUJ,EAAc,WACjC,GAAI,OAAOE,EAAQ,YAAe,WAAaA,EAAQ,WAEnDE,EAAS,MAAQJ,EAAc,kBAC/BI,EAAS,YAAc,OACvB,KAAK,WAAW,OAAOC,CAAG,UACnB,OAAOH,EAAQ,YAAe,SAAU,CAC/C,IAAMI,EAAa,KAAK,WAAW,IAAID,CAAG,EACpCE,GAAqBC,EAAAF,GAAY,UAAM,MAAAE,IAAA,OAAA,OAAAA,EAAE,iBAC/C,GAAID,EAAoB,CAIpB,IAAME,IADgBC,EAAAR,EAAQ,WAAW,cAAU,MAAAQ,IAAA,OAAAA,EAAIC,GAAmB,KACzC,OAAOC,GAAK,CAACL,EAAmB,SAASK,CAAC,CAAC,EACxEH,EAAW,OAAS,IACpB,KAAK,WAAW,IAAIJ,EAAK,CACrB,UAAW,GACX,QAAS,CACL,WAAU,OAAA,OAAA,OAAA,OAAA,CAAA,EACHH,EAAQ,UAAU,EAAA,CACrB,WAAAO,CAAU,CAAA,GAGlB,OAAQH,EAAW,OACtB,EACDF,EAAS,MAAQJ,EAAc,0BAM3C,KAAK,WAAW,OAAOK,CAAG,EAGlC,KAAK,aAAeL,EAAc,QAClC,MAAM,KAAK,WAAWC,EAAU,IAAIY,GAAKA,EAAE,GAAG,EAAG,CAAA,CAAE,EACnD,MAAM,KAAK,eAAeZ,EAAWC,EAASC,CAAW,CAC7D,CAEA,MAAM,OAAOW,EAAgBC,EAAgBZ,EAAc,oBAAkB,KAAI,CAC7E,KAAK,aAAeH,EAAc,QAElC,QAAWgB,KAAcD,EACrB,KAAK,iBAAiB,eAAeC,CAAU,EAC/C,KAAK,WAAW,OAAOA,EAAW,SAAQ,CAAE,EAC5C,KAAK,aAAa,OAAOA,CAAU,EAGvC,QAAWC,KAAcH,EAAS,CAE9B,GAAI,CADgB,KAAK,iBAAiB,mBAAmBG,CAAU,EACrD,CAId,IAAMC,EAAc,KAAK,uBAAuB,UAAU,CAAE,MAAO,SAAS,EAAID,CAAU,EAC1FC,EAAY,MAAQlB,EAAc,QAClC,KAAK,iBAAiB,YAAYkB,CAAW,EAEjD,KAAK,WAAW,OAAOD,EAAW,SAAQ,CAAE,EAGhD,IAAME,EAAiBC,EAAON,CAAO,EAAE,OAAOC,CAAO,EAAE,IAAIM,GAAOA,EAAI,SAAQ,CAAE,EAAE,MAAK,EACvF,KAAK,iBAAiB,IACjB,OAAOC,GAAO,CAACH,EAAe,IAAIG,EAAI,IAAI,SAAQ,CAAE,GAAK,KAAK,aAAaA,EAAKH,CAAc,CAAC,EAC/F,QAAQG,GAAM,CACI,KAAK,gBAAgB,YAAYA,EAAI,GAAG,EAAE,WAAW,OAC7D,OAAOA,CAAG,EACjBA,EAAI,MAAQ,KAAK,IAAIA,EAAI,MAAOtB,EAAc,cAAc,EAC5DsB,EAAI,YAAc,MACtB,CAAC,EAEL,MAAM,KAAK,WAAWR,EAASC,CAAO,EAEtC,MAAMQ,GAAkBpB,CAAW,EAGnC,IAAMqB,EAAmB,KAAK,iBAAiB,IAC1C,OAAOF,GAAM,OAEV,OAAAA,EAAI,MAAQtB,EAAc,QAEvB,EAAC,GAAAQ,EAAA,KAAK,WAAW,IAAIc,EAAI,IAAI,SAAQ,CAAE,KAAC,MAAAd,IAAA,SAAAA,EAAE,WAAS,EAEzD,QAAO,EACZ,MAAM,KAAK,eAAegB,EAAkB,KAAK,mBAAoBrB,CAAW,CACpF,CAEU,MAAM,WAAWW,EAAgBC,EAAc,CACrD,MAAM,QAAQ,IAAI,KAAK,gBAAgB,IAAIU,GAAYA,EAASX,EAASC,CAAO,CAAC,CAAC,CACtF,CAKU,aAAaX,EAA2BsB,EAAwB,CAEtE,OAAItB,EAAS,WAAW,KAAKuB,GAAOA,EAAI,QAAU,MAAS,EAChD,GAGJ,KAAK,aAAa,WAAWvB,EAAUsB,CAAW,CAC7D,CAEA,SAASE,EAAgC,CACrC,YAAK,gBAAgB,KAAKA,CAAQ,EAC3BC,GAAW,OAAO,IAAK,CAC1B,IAAMC,EAAQ,KAAK,gBAAgB,QAAQF,CAAQ,EAC/CE,GAAS,GACT,KAAK,gBAAgB,OAAOA,EAAO,CAAC,CAE5C,CAAC,CACL,CAMU,MAAM,eAAe7B,EAA8BC,EAAuBC,EAA8B,CAC9G,KAAK,aAAaF,EAAWC,CAAO,EAEpC,MAAM,KAAK,cAAcD,EAAWD,EAAc,OAAQG,EAAamB,GACnE,KAAK,uBAAuB,OAAOA,EAAKnB,CAAW,CAAC,EAGxD,MAAM,KAAK,cAAcF,EAAWD,EAAc,eAAgBG,EAAamB,GAC3E,KAAK,aAAa,cAAcA,EAAKnB,CAAW,CAAC,EAGrD,MAAM,KAAK,cAAcF,EAAWD,EAAc,eAAgBG,EAAa,MAAMmB,GAAM,CACvF,IAAMS,EAAmB,KAAK,gBAAgB,YAAYT,EAAI,GAAG,EAAE,WAAW,iBAC9EA,EAAI,kBAAoB,MAAMS,EAAiB,mBAAmBT,EAAKnB,CAAW,CACtF,CAAC,EAED,MAAM,KAAK,cAAcF,EAAWD,EAAc,OAAQG,EAAamB,GACpD,KAAK,gBAAgB,YAAYA,EAAI,GAAG,EAAE,WAAW,OACtD,KAAKA,EAAKnB,CAAW,CACtC,EAED,MAAM,KAAK,cAAcF,EAAWD,EAAc,kBAAmBG,EAAamB,GAC9E,KAAK,aAAa,iBAAiBA,EAAKnB,CAAW,CAAC,EAGxD,IAAM6B,EAAgB/B,EAAU,OAAOqB,GAAO,KAAK,eAAeA,CAAG,CAAC,EACtE,MAAM,KAAK,cAAcU,EAAehC,EAAc,UAAWG,EAAamB,GAC1E,KAAK,SAASA,EAAKnB,CAAW,CAAC,EAInC,QAAWmB,KAAOrB,EAAW,CACzB,IAAMgC,EAAQ,KAAK,WAAW,IAAIX,EAAI,IAAI,SAAQ,CAAE,EAChDW,IACAA,EAAM,UAAY,IAG9B,CAEU,aAAahC,EAA8BC,EAAqB,CACtE,QAAWoB,KAAOrB,EAAW,CACzB,IAAMI,EAAMiB,EAAI,IAAI,SAAQ,EACtBW,EAAQ,KAAK,WAAW,IAAI5B,CAAG,GAIjC,CAAC4B,GAASA,EAAM,YAChB,KAAK,WAAW,IAAI5B,EAAK,CACrB,UAAW,GACX,QAAAH,EACA,OAAQ+B,GAAO,OAClB,EAGb,CAEU,MAAM,cAAchC,EAA8BiC,EAA4B/B,EACpFyB,EAA8D,CAC9D,IAAMO,EAAWlC,EAAU,OAAOY,GAAKA,EAAE,MAAQqB,CAAW,EAC5D,QAAW9B,KAAY+B,EACnB,MAAMZ,GAAkBpB,CAAW,EACnC,MAAMyB,EAASxB,CAAQ,EACvBA,EAAS,MAAQ8B,EAErB,MAAM,KAAK,iBAAiBC,EAAUD,EAAa/B,CAAW,EAC9D,KAAK,aAAe+B,CACxB,CAEA,aAAaA,EAA4BN,EAA+B,CACpE,YAAK,oBAAoB,IAAIM,EAAaN,CAAQ,EAC3CC,GAAW,OAAO,IAAK,CAC1B,KAAK,oBAAoB,OAAOK,EAAaN,CAAQ,CACzD,CAAC,CACL,CAIA,UAAUK,EAAsBG,EAAsCjC,EAA+B,CACjG,IAAIkB,EAOJ,GANIe,GAAc,SAAUA,EACxBf,EAAMe,EAENjC,EAAciC,EAElBjC,IAAAA,EAAgB,oBAAkB,MAC9BkB,EAAK,CACL,IAAMjB,EAAW,KAAK,iBAAiB,YAAYiB,CAAG,EACtD,GAAIjB,GAAYA,EAAS,MAAQ6B,EAC7B,OAAO,QAAQ,QAAQZ,CAAG,EAGlC,OAAI,KAAK,cAAgBY,EACd,QAAQ,QAAQ,MAAS,EACzB9B,EAAY,wBACZ,QAAQ,OAAOkC,EAAkB,EAErC,IAAI,QAAQ,CAACC,EAASC,IAAU,CACnC,IAAMC,EAAkB,KAAK,aAAaP,EAAO,IAAK,CAGlD,GAFAO,EAAgB,QAAO,EACvBC,EAAiB,QAAO,EACpBpB,EAAK,CACL,IAAMjB,EAAW,KAAK,iBAAiB,YAAYiB,CAAG,EACtDiB,EAAQlC,GAAU,GAAG,OAErBkC,EAAQ,MAAS,CAEzB,CAAC,EACKG,EAAmBtC,EAAa,wBAAwB,IAAK,CAC/DqC,EAAgB,QAAO,EACvBC,EAAiB,QAAO,EACxBF,EAAOF,EAAkB,CAC7B,CAAC,CACL,CAAC,CACL,CAEU,MAAM,iBAAiBpC,EAA8BgC,EAAsB9B,EAA8B,CAC/G,GAAIF,EAAU,SAAW,EAErB,OAEJ,IAAMyC,EAAY,KAAK,oBAAoB,IAAIT,CAAK,EACpD,QAAWR,KAAYiB,EACnB,MAAMnB,GAAkBpB,CAAW,EACnC,MAAMsB,EAASxB,EAAWE,CAAW,CAE7C,CAOU,eAAeC,EAAyB,CAC9C,MAAO,EAAQ,KAAK,gBAAgBA,CAAQ,EAAE,UAClD,CAMU,MAAM,SAASA,EAA2BD,EAA8B,SAC9E,IAAMwC,EAAY,KAAK,gBAAgB,YAAYvC,EAAS,GAAG,EAAE,WAAW,kBACtEwC,EAAoB,KAAK,gBAAgBxC,CAAQ,EAAE,WACnDF,EAAU,OAAO0C,GAAsB,SAAWA,EAAoB,OACtEC,EAAc,MAAMF,EAAU,iBAAiBvC,EAAUF,EAASC,CAAW,EAC/EC,EAAS,YACTA,EAAS,YAAY,KAAK,GAAGyC,CAAW,EAExCzC,EAAS,YAAcyC,EAI3B,IAAMZ,EAAQ,KAAK,WAAW,IAAI7B,EAAS,IAAI,SAAQ,CAAE,EACzD,GAAI6B,EAAO,EACPzB,EAAAyB,EAAM,UAAM,MAAAzB,IAAA,SAAZyB,EAAM,OAAW,CAAA,GACjB,IAAMa,GAAgBpC,EAAAR,GAAS,cAAU,MAAAQ,IAAA,OAAAA,EAAIC,GAAmB,IAC5DsB,EAAM,OAAO,iBACbA,EAAM,OAAO,iBAAiB,KAAK,GAAGa,CAAa,EAEnDb,EAAM,OAAO,iBAAmB,CAAC,GAAGa,CAAa,EAG7D,CAEU,gBAAgB1C,EAAyB,SAC/C,OAAOM,GAAAF,EAAA,KAAK,WAAW,IAAIJ,EAAS,IAAI,SAAQ,CAAE,KAAC,MAAAI,IAAA,OAAA,OAAAA,EAAE,WAAO,MAAAE,IAAA,OAAAA,EAAI,CAAA,CACpE,GChVE,IAAOqC,GAAP,KAA0B,CAnFhC,MAmFgC,CAAAC,EAAA,4BAuB5B,YAAYC,EAAmC,CAb5B,KAAA,YAAc,IAAI,IAKlB,KAAA,kBAAoB,IAAIC,GAMxB,KAAA,eAAiB,IAAI,IAGpC,KAAK,UAAYD,EAAS,UAAU,iBACpC,KAAK,gBAAkBA,EAAS,gBAChC,KAAK,cAAgBA,EAAS,aAClC,CAEA,kBAAkBE,EAAqBC,EAAmB,CACtD,IAAMC,EAAeC,GAAYH,CAAU,EAAE,IACvCI,EAAiC,CAAA,EACvC,YAAK,eAAe,QAAQC,GAAU,CAClCA,EAAQ,QAAQC,GAAW,CACnBC,GAAS,OAAOD,EAAS,UAAWJ,CAAY,GAAKI,EAAS,aAAeL,GAC7EG,EAAO,KAAKE,CAAQ,CAE5B,CAAC,CACL,CAAC,EACME,EAAOJ,CAAM,CACxB,CAEA,YAAYK,EAAmBC,EAAkB,CAC7C,IAAIC,EAAeH,EAAO,KAAK,YAAY,KAAI,CAAE,EACjD,OAAIE,IACAC,EAAeA,EAAa,OAAOC,GAAO,CAACF,GAAQA,EAAK,IAAIE,CAAG,CAAC,GAE7DD,EACF,IAAIC,GAAO,KAAK,oBAAoBA,EAAKH,CAAQ,CAAC,EAClD,KAAI,CACb,CAEU,oBAAoBG,EAAaH,EAAiB,OACxD,OAAKA,EAGgB,KAAK,kBAAkB,IAAIG,EAAKH,EAAU,IAAK,OAEhE,QAD4BI,EAAA,KAAK,YAAY,IAAID,CAAG,KAAC,MAAAC,IAAA,OAAAA,EAAI,CAAA,GAC9B,OAAOC,GAAK,KAAK,cAAc,UAAUA,EAAE,KAAML,CAAQ,CAAC,CACzF,CAAC,GALUI,EAAA,KAAK,YAAY,IAAID,CAAG,KAAC,MAAAC,IAAA,OAAAA,EAAI,CAAA,CAO5C,CAEA,OAAOD,EAAQ,CACX,IAAMG,EAAYH,EAAI,SAAQ,EAC9B,KAAK,YAAY,OAAOG,CAAS,EACjC,KAAK,kBAAkB,MAAMA,CAAS,EACtC,KAAK,eAAe,OAAOA,CAAS,CACxC,CAEA,MAAM,cAAcC,EAA2BC,EAAc,oBAAkB,KAAI,CAE/E,IAAMC,EAAU,MADC,KAAK,gBAAgB,YAAYF,EAAS,GAAG,EAC/B,WAAW,iBAAiB,eAAeA,EAAUC,CAAW,EACzFL,EAAMI,EAAS,IAAI,SAAQ,EACjC,KAAK,YAAY,IAAIJ,EAAKM,CAAO,EACjC,KAAK,kBAAkB,MAAMN,CAAG,CACpC,CAEA,MAAM,iBAAiBI,EAA2BC,EAAc,oBAAkB,KAAI,CAElF,IAAME,EAAY,MADD,KAAK,gBAAgB,YAAYH,EAAS,GAAG,EAC7B,UAAU,6BAA6B,mBAAmBA,EAAUC,CAAW,EAChH,KAAK,eAAe,IAAID,EAAS,IAAI,SAAQ,EAAIG,CAAS,CAC9D,CAEA,WAAWH,EAA2BI,EAAwB,CAC1D,IAAMC,EAAa,KAAK,eAAe,IAAIL,EAAS,IAAI,SAAQ,CAAE,EAClE,OAAKK,EAGEA,EAAW,KAAKC,GAAO,CAACA,EAAI,OAASF,EAAY,IAAIE,EAAI,UAAU,SAAQ,CAAE,CAAC,EAF1E,EAGf,GCjHE,IAAOC,GAAP,KAA8B,CA5DpC,MA4DoC,CAAAC,EAAA,gCAYhC,YAAYC,EAAmC,CAV/C,KAAA,oBAAoC,CAAA,EAOjB,KAAA,OAAS,IAAIC,GAI5B,KAAK,gBAAkBD,EAAS,gBAChC,KAAK,iBAAmBA,EAAS,UAAU,iBAC3C,KAAK,gBAAkBA,EAAS,UAAU,gBAC1C,KAAK,mBAAqBA,EAAS,UAAU,mBAC7C,KAAK,MAAQA,EAAS,UAAU,aACpC,CAEA,IAAI,OAAK,CACL,OAAO,KAAK,OAAO,OACvB,CAEA,WAAWE,EAAwB,OAC/B,KAAK,SAAUC,EAAAD,EAAO,oBAAgB,MAAAC,IAAA,OAAAA,EAAI,MAC9C,CAEA,YAAYC,EAA0B,CAGlC,OAAO,KAAK,MAAM,MAAMC,GAAQ,CAAA,IAAAF,EAAC,OAAA,KAAK,qBAAoBA,EAAA,KAAK,WAAO,MAAAA,IAAA,OAAAA,EAAI,CAAA,EAAIE,CAAK,CAAC,CAAA,CACxF,CAEA,MAAM,oBAAoBC,EAA4BC,EAAc,oBAAkB,KAAI,CACtF,IAAMC,EAAY,MAAM,KAAK,eAAeF,CAAO,EAGnD,MAAMG,GAAkBF,CAAW,EACnC,MAAM,KAAK,gBAAgB,MAAMC,EAAW,KAAK,oBAAqBD,CAAW,CACrF,CAMU,MAAM,eAAeD,EAA0B,CACrD,IAAMI,EAAiB,KAAK,gBAAgB,IAAI,QAAQC,GAAKA,EAAE,iBAAiB,cAAc,EACxFH,EAA+B,CAAA,EAC/BI,EAAYb,EAACc,GAA6B,CAC5CL,EAAU,KAAKK,CAAQ,EAClB,KAAK,iBAAiB,YAAYA,EAAS,GAAG,GAC/C,KAAK,iBAAiB,YAAYA,CAAQ,CAElD,EALkB,aASlB,aAAM,KAAK,wBAAwBP,EAASM,CAAS,EACrD,MAAM,QAAQ,IACVN,EAAQ,IAAIQ,GAAM,CAACA,EAAI,KAAK,cAAcA,CAAE,CAAC,CAA2B,EACnE,IAAI,MAAMC,GAAS,KAAK,eAAe,GAAGA,EAAOL,EAAgBE,CAAS,CAAC,CAAC,EAErF,KAAK,OAAO,QAAO,EACZJ,CACX,CAOU,wBAAwBQ,EAA6BC,EAA+C,CAC1G,OAAO,QAAQ,QAAO,CAC1B,CAOU,cAAcC,EAAgC,CACpD,OAAOC,GAAI,MAAMD,EAAgB,GAAG,CACxC,CAMU,MAAM,eAAeA,EAAkCE,EAAiBV,EAA0BE,EAA8C,CACtJ,IAAMS,EAAU,MAAM,KAAK,mBAAmB,cAAcD,CAAU,EACtE,MAAM,QAAQ,IAAIC,EAAQ,IAAI,MAAMN,GAAQ,CACxC,GAAI,KAAK,aAAaG,EAAiBH,EAAOL,CAAc,GACxD,GAAIK,EAAM,YACN,MAAM,KAAK,eAAeG,EAAiBH,EAAM,IAAKL,EAAgBE,CAAS,UACxEG,EAAM,OAAQ,CACrB,IAAMF,EAAW,MAAM,KAAK,iBAAiB,oBAAoBE,EAAM,GAAG,EAC1EH,EAAUC,CAAQ,GAG9B,CAAC,CAAC,CACN,CAKU,aAAaS,EAAmCP,EAAuBL,EAAwB,CACrG,IAAMa,EAAOC,GAAS,SAAST,EAAM,GAAG,EACxC,GAAIQ,EAAK,WAAW,GAAG,EACnB,MAAO,GAEX,GAAIR,EAAM,YACN,OAAOQ,IAAS,gBAAkBA,IAAS,MACxC,GAAIR,EAAM,OAAQ,CACrB,IAAMU,EAAUD,GAAS,QAAQT,EAAM,GAAG,EAC1C,OAAOL,EAAe,SAASe,CAAO,EAE1C,MAAO,EACX,GCpJE,IAAOC,GAAP,KAAmB,CA9BzB,MA8ByB,CAAAC,EAAA,qBAKrB,YAAYC,EAA6B,CACrC,IAAMC,EAASD,EAAS,OAAO,aAAa,YAAYA,EAAS,QAAS,CACtE,gBAAiBA,EAAS,iBAAiB,gBAC9C,EACD,KAAK,WAAa,KAAK,sBAAsBC,CAAM,EACnD,IAAMC,EAAcC,GAAsBF,CAAM,EAAI,OAAO,OAAOA,CAAM,EAAIA,EAC5E,KAAK,gBAAkB,IAAIG,GAAgBF,EAAa,CACpD,iBAAkB,OACrB,CACL,CAEA,IAAI,YAAU,CACV,OAAO,KAAK,UAChB,CAEA,SAASG,EAAY,OACjB,IAAMC,EAAmB,KAAK,gBAAgB,SAASD,CAAI,EAC3D,MAAO,CACH,OAAQC,EAAiB,OACzB,OAAQA,EAAiB,OACzB,QAAQC,EAAAD,EAAiB,OAAO,UAAM,MAAAC,IAAA,OAAAA,EAAI,CAAA,EAElD,CAEU,sBAAsBC,EAA4B,CACxD,GAAIL,GAAsBK,CAAW,EAAG,OAAOA,EAC/C,IAAMP,EAASQ,GAA4BD,CAAW,EAAI,OAAO,OAAOA,EAAY,KAAK,EAAE,KAAI,EAAKA,EAC9FE,EAA2B,CAAA,EACjC,OAAAT,EAAO,QAAQU,GAASD,EAAIC,EAAM,IAAI,EAAIA,CAAK,EACxCD,CACX,GAME,SAAUE,GAAiBC,EAAgC,CAC7D,OAAO,MAAM,QAAQA,CAAe,IAAMA,EAAgB,SAAW,GAAK,SAAUA,EAAgB,CAAC,EACzG,CAFgBd,EAAAa,GAAA,oBAOV,SAAUH,GAA4BI,EAAgC,CACxE,OAAOA,GAAmB,UAAWA,GAAmB,gBAAiBA,CAC7E,CAFgBd,EAAAU,GAAA,+BAOV,SAAUN,GAAsBU,EAAgC,CAClE,MAAO,CAACD,GAAiBC,CAAe,GAAK,CAACJ,GAA4BI,CAAe,CAC7F,CAFgBd,EAAAI,GAAA,yBCmBV,SAAUW,GAAWC,EAAwBC,EAAsCC,EAA2B,CAChH,IAAIC,EACAC,EACA,OAAOJ,GAAS,UAChBI,EAAWH,EACXE,EAAOD,IAEPE,EAAWJ,EAAK,MAAM,MACtBG,EAAOF,GAENG,IACDA,EAAWC,EAAS,OAAO,EAAG,CAAC,GAGnC,IAAMC,EAAQC,GAASP,CAAI,EACrBQ,EAAoBC,GAAiBN,CAAI,EAEzCO,EAASC,GAAS,CACpB,MAAAL,EACA,SAAAF,EACA,QAASI,EACZ,EAED,OAAOI,GAAkB,CACrB,MAAO,EACP,OAAAF,EACA,SAAAN,EACH,CACL,CA5BgBS,EAAAd,GAAA,cA8BV,SAAUe,GAAQd,EAAwBE,EAA2B,CACvE,IAAMM,EAAoBC,GAAiBP,CAAO,EAC5CI,EAAQC,GAASP,CAAI,EAC3B,GAAIM,EAAM,SAAW,EACjB,MAAO,GAGX,IAAMS,EAAQT,EAAM,CAAC,EACfU,EAAOV,EAAMA,EAAM,OAAS,CAAC,EAC7BW,EAAaT,EAAkB,MAC/BU,EAAYV,EAAkB,IAEpC,MAAO,EAAQS,GAAY,KAAKF,CAAK,GAAM,EAAQG,GAAW,KAAKF,CAAI,CAC3E,CAbgBH,EAAAC,GAAA,WAehB,SAASP,GAASP,EAAsB,CACpC,IAAImB,EAAU,GACd,OAAI,OAAOnB,GAAS,SAChBmB,EAAUnB,EAEVmB,EAAUnB,EAAK,KAELmB,EAAQ,MAAMC,EAAc,CAE9C,CATSP,EAAAN,GAAA,YAmBT,IAAMc,GAAW,kCACXC,GAAiB,iDAEvB,SAASX,GAASY,EAA4B,WAC1C,IAAMb,EAAuB,CAAA,EACzBc,EAAcD,EAAQ,SAAS,KAC/BE,EAAmBF,EAAQ,SAAS,UACxC,QAASG,EAAI,EAAGA,EAAIH,EAAQ,MAAM,OAAQG,IAAK,CAC3C,IAAMX,EAAQW,IAAM,EACdV,EAAOU,IAAMH,EAAQ,MAAM,OAAS,EACtCI,EAAOJ,EAAQ,MAAMG,CAAC,EACtBE,EAAQ,EAEZ,GAAIb,GAASQ,EAAQ,QAAQ,MAAO,CAChC,IAAMM,GAAQC,EAAAP,EAAQ,QAAQ,SAAK,MAAAO,IAAA,OAAA,OAAAA,EAAE,KAAKH,CAAI,EAC1CE,IACAD,EAAQC,EAAM,MAAQA,EAAM,CAAC,EAAE,YAEhC,CACH,IAAMA,GAAQE,EAAAR,EAAQ,QAAQ,QAAI,MAAAQ,IAAA,OAAA,OAAAA,EAAE,KAAKJ,CAAI,EACzCE,IACAD,EAAQC,EAAM,MAAQA,EAAM,CAAC,EAAE,QAGvC,GAAIb,EAAM,CACN,IAAMa,GAAQG,EAAAT,EAAQ,QAAQ,OAAG,MAAAS,IAAA,OAAA,OAAAA,EAAE,KAAKL,CAAI,EACxCE,IACAF,EAAOA,EAAK,UAAU,EAAGE,EAAM,KAAK,GAO5C,GAHAF,EAAOA,EAAK,UAAU,EAAGM,GAAcN,CAAI,CAAC,EACtBO,GAAeP,EAAMC,CAAK,GAE3BD,EAAK,QAEtB,GAAIjB,EAAO,OAAS,EAAG,CACnB,IAAMN,EAAWC,EAAS,OAAOmB,EAAaC,CAAgB,EAC9Df,EAAO,KAAK,CACR,KAAM,QACN,QAAS,GACT,MAAOyB,EAAM,OAAO/B,EAAUA,CAAQ,EACzC,OAEF,CACHiB,GAAS,UAAYO,EACrB,IAAMQ,EAAWf,GAAS,KAAKM,CAAI,EACnC,GAAIS,EAAU,CACV,IAAMC,EAAYD,EAAS,CAAC,EACtBE,EAAQF,EAAS,CAAC,EAClBnC,EAAQI,EAAS,OAAOmB,EAAaC,EAAmBG,CAAK,EAC7DW,EAAMlC,EAAS,OAAOmB,EAAaC,EAAmBG,EAAQS,EAAU,MAAM,EACpF3B,EAAO,KAAK,CACR,KAAM,MACN,QAAS4B,EACT,MAAOH,EAAM,OAAOlC,EAAOsC,CAAG,EACjC,EACDX,GAASS,EAAU,OACnBT,EAAQM,GAAeP,EAAMC,CAAK,EAGtC,GAAIA,EAAQD,EAAK,OAAQ,CACrB,IAAMa,EAAOb,EAAK,UAAUC,CAAK,EAC3Ba,EAAmB,MAAM,KAAKD,EAAK,SAASlB,EAAc,CAAC,EACjEZ,EAAO,KAAK,GAAGgC,GAAkBD,EAAkBD,EAAMhB,EAAaC,EAAmBG,CAAK,CAAC,GAIvGJ,IACAC,EAAmB,EAIvB,OAAIf,EAAO,OAAS,GAAKA,EAAOA,EAAO,OAAS,CAAC,EAAE,OAAS,QACjDA,EAAO,MAAM,EAAG,EAAE,EAGtBA,CACX,CA3ESG,EAAAF,GAAA,YA6ET,SAAS+B,GAAkBC,EAA0BhB,EAAciB,EAAmBC,EAAsB,CACxG,IAAMnC,EAAuB,CAAA,EAE7B,GAAIiC,EAAK,SAAW,EAAG,CACnB,IAAM1C,EAAQI,EAAS,OAAOuC,EAAWC,CAAc,EACjDN,EAAMlC,EAAS,OAAOuC,EAAWC,EAAiBlB,EAAK,MAAM,EACnEjB,EAAO,KAAK,CACR,KAAM,OACN,QAASiB,EACT,MAAOQ,EAAM,OAAOlC,EAAOsC,CAAG,EACjC,MACE,CACH,IAAIO,EAAY,EAChB,QAAWjB,KAASc,EAAM,CACtB,IAAMI,EAAalB,EAAM,MACnBmB,EAAerB,EAAK,UAAUmB,EAAWC,CAAU,EACrDC,EAAa,OAAS,GACtBtC,EAAO,KAAK,CACR,KAAM,OACN,QAASiB,EAAK,UAAUmB,EAAWC,CAAU,EAC7C,MAAOZ,EAAM,OACT9B,EAAS,OAAOuC,EAAWE,EAAYD,CAAc,EACrDxC,EAAS,OAAOuC,EAAWG,EAAaF,CAAc,CAAC,EAE9D,EAEL,IAAII,EAASD,EAAa,OAAS,EAC7BE,EAAUrB,EAAM,CAAC,EAUvB,GATAnB,EAAO,KAAK,CACR,KAAM,aACN,QAASwC,EACT,MAAOf,EAAM,OACT9B,EAAS,OAAOuC,EAAWE,EAAYG,EAASJ,CAAc,EAC9DxC,EAAS,OAAOuC,EAAWE,EAAYG,EAASC,EAAQ,OAASL,CAAc,CAAC,EAEvF,EACDI,GAAUC,EAAQ,OACdrB,EAAM,SAAW,EAAG,CACpBoB,GAAUpB,EAAM,CAAC,EAAE,OACnB,IAAMS,EAAQT,EAAM,CAAC,EACrBnB,EAAO,KAAK,CACR,KAAM,OACN,QAAS4B,EACT,MAAOH,EAAM,OACT9B,EAAS,OAAOuC,EAAWE,EAAYG,EAASJ,CAAc,EAC9DxC,EAAS,OAAOuC,EAAWE,EAAYG,EAASX,EAAM,OAASO,CAAc,CAAC,EAErF,OAEDnC,EAAO,KAAK,CACR,KAAM,OACN,QAAS,GACT,MAAOyB,EAAM,OACT9B,EAAS,OAAOuC,EAAWE,EAAYG,EAASJ,CAAc,EAC9DxC,EAAS,OAAOuC,EAAWE,EAAYG,EAASJ,CAAc,CAAC,EAEtE,EAELC,EAAYC,EAAalB,EAAM,CAAC,EAAE,OAEtC,IAAMsB,EAAaxB,EAAK,UAAUmB,CAAS,EACvCK,EAAW,OAAS,GACpBzC,EAAO,KAAK,CACR,KAAM,OACN,QAASyC,EACT,MAAOhB,EAAM,OACT9B,EAAS,OAAOuC,EAAWE,EAAYD,CAAc,EACrDxC,EAAS,OAAOuC,EAAWE,EAAYD,EAAiBM,EAAW,MAAM,CAAC,EAEjF,EAIT,OAAOzC,CACX,CA1ESG,EAAA6B,GAAA,qBA4ET,IAAMU,GAAqB,KACrBC,GAAqB,OAE3B,SAASnB,GAAeP,EAAcC,EAAa,CAC/C,IAAMC,EAAQF,EAAK,UAAUC,CAAK,EAAE,MAAMwB,EAAkB,EAC5D,OAAIvB,EACOD,EAAQC,EAAM,MAEdF,EAAK,MAEpB,CAPSd,EAAAqB,GAAA,kBAST,SAASD,GAAcN,EAAY,CAC/B,IAAME,EAAQF,EAAK,MAAM0B,EAAkB,EAC3C,GAAIxB,GAAS,OAAOA,EAAM,OAAU,SAChC,OAAOA,EAAM,KAGrB,CANShB,EAAAoB,GAAA,iBAUT,SAASrB,GAAkBW,EAAqB,aAC5C,IAAM+B,EAA0BjD,EAAS,OAAOkB,EAAQ,SAAS,KAAMA,EAAQ,SAAS,SAAS,EACjG,GAAIA,EAAQ,OAAO,SAAW,EAC1B,OAAO,IAAIgC,GAAiB,CAAA,EAAIpB,EAAM,OAAOmB,EAAeA,CAAa,CAAC,EAE9E,IAAME,EAA2B,CAAA,EACjC,KAAOjC,EAAQ,MAAQA,EAAQ,OAAO,QAAQ,CAC1C,IAAMkC,EAAUC,GAAkBnC,EAASiC,EAASA,EAAS,OAAS,CAAC,CAAC,EACpEC,GACAD,EAAS,KAAKC,CAAO,EAG7B,IAAMxD,GAAQ8B,GAAAD,EAAA0B,EAAS,CAAC,KAAC,MAAA1B,IAAA,OAAA,OAAAA,EAAE,MAAM,SAAK,MAAAC,IAAA,OAAAA,EAAIuB,EACpCf,GAAMoB,GAAA3B,EAAAwB,EAASA,EAAS,OAAS,CAAC,KAAC,MAAAxB,IAAA,OAAA,OAAAA,EAAE,MAAM,OAAG,MAAA2B,IAAA,OAAAA,EAAIL,EACxD,OAAO,IAAIC,GAAiBC,EAAUrB,EAAM,OAAOlC,EAAOsC,CAAG,CAAC,CAClE,CAfS1B,EAAAD,GAAA,qBAiBT,SAAS8C,GAAkBnC,EAAuBP,EAAmB,CACjE,IAAM4C,EAAOrC,EAAQ,OAAOA,EAAQ,KAAK,EACzC,GAAIqC,EAAK,OAAS,MACd,OAAOC,GAActC,EAAS,EAAK,EAChC,GAAIqC,EAAK,OAAS,QAAUA,EAAK,OAAS,aAC7C,OAAOE,GAAevC,CAAO,EAE7BwC,GAAgBH,EAAM5C,CAAI,EAC1BO,EAAQ,OAGhB,CAXSV,EAAA6C,GAAA,qBAaT,SAASK,GAAgBC,EAAmBP,EAAsB,CAC9D,GAAIA,EAAS,CACT,IAAM9B,EAAO,IAAIsC,GAAc,GAAID,EAAM,KAAK,EAC1C,YAAaP,EACbA,EAAQ,QAAQ,KAAK9B,CAAI,EAEzB8B,EAAQ,QAAQ,QAAQ,KAAK9B,CAAI,EAG7C,CATSd,EAAAkD,GAAA,mBAWT,SAASD,GAAevC,EAAqB,CACzC,IAAIyC,EAAQzC,EAAQ,OAAOA,EAAQ,KAAK,EAClC2C,EAAaF,EACfG,EAAYH,EACV1D,EAAuB,CAAA,EAC7B,KAAO0D,GAASA,EAAM,OAAS,SAAWA,EAAM,OAAS,OACrD1D,EAAM,KAAK8D,GAAiB7C,CAAO,CAAC,EACpC4C,EAAYH,EACZA,EAAQzC,EAAQ,OAAOA,EAAQ,KAAK,EAExC,OAAO,IAAI8C,GAAc/D,EAAO6B,EAAM,OAAO+B,EAAW,MAAM,MAAOC,EAAU,MAAM,GAAG,CAAC,CAC7F,CAXStD,EAAAiD,GAAA,kBAaT,SAASM,GAAiB7C,EAAqB,CAE3C,OADcA,EAAQ,OAAOA,EAAQ,KAAK,EAChC,OAAS,aACRsC,GAActC,EAAS,EAAI,EAE3B+C,GAAe/C,CAAO,CAErC,CAPSV,EAAAuD,GAAA,oBAST,SAASP,GAActC,EAAuBgD,EAAe,CACzD,IAAMC,EAAWjD,EAAQ,OAAOA,EAAQ,OAAO,EACzCkD,EAAOD,EAAS,QAAQ,UAAU,CAAC,EACnCE,EAAYnD,EAAQ,OAAOA,EAAQ,KAAK,EAC9C,GAAImD,GAAW,OAAS,OACpB,GAAIH,EAAQ,CACR,IAAMI,EAAUL,GAAe/C,CAAO,EACtC,OAAO,IAAIqD,GACPH,EACA,IAAIJ,GAAc,CAACM,CAAO,EAAGA,EAAQ,KAAK,EAC1CJ,EACApC,EAAM,OAAOqC,EAAS,MAAM,MAAOG,EAAQ,MAAM,GAAG,CAAC,MAEtD,CACH,IAAME,EAAUf,GAAevC,CAAO,EACtC,OAAO,IAAIqD,GACPH,EACAI,EACAN,EACApC,EAAM,OAAOqC,EAAS,MAAM,MAAOK,EAAQ,MAAM,GAAG,CAAC,MAG1D,CACH,IAAMC,EAAQN,EAAS,MACvB,OAAO,IAAII,GAAaH,EAAM,IAAIJ,GAAc,CAAA,EAAIS,CAAK,EAAGP,EAAQO,CAAK,EAEjF,CA1BSjE,EAAAgD,GAAA,iBA4BT,SAASS,GAAe/C,EAAqB,CACzC,IAAMyC,EAAQzC,EAAQ,OAAOA,EAAQ,OAAO,EAC5C,OAAO,IAAI0C,GAAcD,EAAM,QAASA,EAAM,KAAK,CACvD,CAHSnD,EAAAyD,GAAA,kBAuBT,SAAS7D,GAAiBP,EAA2B,CACjD,GAAI,CAACA,EACD,OAAOO,GAAiB,CACpB,MAAO,MACP,IAAK,KACL,KAAM,IACT,EAEL,GAAM,CAAE,MAAAR,EAAO,IAAAsC,EAAK,KAAAZ,CAAI,EAAKzB,EAC7B,MAAO,CACH,MAAO6E,GAAgB9E,EAAO,EAAI,EAClC,IAAK8E,GAAgBxC,EAAK,EAAK,EAC/B,KAAMwC,GAAgBpD,EAAM,EAAI,EAExC,CAdSd,EAAAJ,GAAA,oBAgBT,SAASsE,GAAgBC,EAAqC/E,EAAc,CACxE,GAAI,OAAO+E,GAAW,UAAY,OAAOA,GAAW,SAAU,CAC1D,IAAMC,EAAU,OAAOD,GAAW,SAAWE,GAAaF,CAAM,EAAIA,EAAO,OAC3E,OAAI/E,EACO,IAAI,OAAO,QAAQgF,CAAO,EAAE,EAE5B,IAAI,OAAO,OAAOA,CAAO,OAAO,MAG3C,QAAOD,CAEf,CAXSnE,EAAAkE,GAAA,mBAaT,IAAMxB,GAAN,KAAsB,CAzetB,MAyesB,CAAA1C,EAAA,yBAKlB,YAAY2C,EAA0BsB,EAAY,CAC9C,KAAK,SAAWtB,EAChB,KAAK,MAAQsB,CACjB,CAEA,OAAOL,EAAY,CACf,OAAO,KAAK,WAAU,EAAG,KAAKU,GAAKA,EAAE,OAASV,CAAI,CACtD,CAEA,QAAQA,EAAY,CAChB,OAAO,KAAK,WAAU,EAAG,OAAOU,GAAKA,EAAE,OAASV,CAAI,CACxD,CAEQ,YAAU,CACd,OAAO,KAAK,SAAS,OAAQ,GAAqB,SAAU,CAAC,CACjE,CAEA,UAAQ,CACJ,IAAInC,EAAQ,GACZ,QAAWmB,KAAW,KAAK,SACvB,GAAInB,EAAM,SAAW,EACjBA,EAAQmB,EAAQ,SAAQ,MACrB,CACH,IAAM2B,EAAO3B,EAAQ,SAAQ,EAC7BnB,GAAS+C,GAAa/C,CAAK,EAAI8C,EAGvC,OAAO9C,EAAM,KAAI,CACrB,CAEA,WAAWpC,EAA4B,CACnC,IAAIoC,EAAQ,GACZ,QAAWmB,KAAW,KAAK,SACvB,GAAInB,EAAM,SAAW,EACjBA,EAAQmB,EAAQ,WAAWvD,CAAO,MAC/B,CACH,IAAMkF,EAAO3B,EAAQ,WAAWvD,CAAO,EACvCoC,GAAS+C,GAAa/C,CAAK,EAAI8C,EAGvC,OAAO9C,EAAM,KAAI,CACrB,GAGEsC,GAAN,KAAkB,CA1hBlB,MA0hBkB,CAAA/D,EAAA,qBAMd,YAAY4D,EAActD,EAAyBoD,EAAiBO,EAAY,CAC5E,KAAK,KAAOL,EACZ,KAAK,QAAUtD,EACf,KAAK,OAASoD,EACd,KAAK,MAAQO,CACjB,CAEA,UAAQ,CACJ,IAAIM,EAAO,IAAI,KAAK,IAAI,GAClBjE,EAAU,KAAK,QAAQ,SAAQ,EAMrC,OALI,KAAK,QAAQ,QAAQ,SAAW,EAChCiE,EAAO,GAAGA,CAAI,IAAIjE,CAAO,GAClB,KAAK,QAAQ,QAAQ,OAAS,IACrCiE,EAAO,GAAGA,CAAI;EAAKjE,CAAO,IAE1B,KAAK,OAEE,IAAIiE,CAAI,IAERA,CAEf,CAEA,WAAWlF,EAA4B,SACnC,OAAO6B,GAAAD,EAAA5B,GAAS,aAAS,MAAA4B,IAAA,OAAA,OAAAA,EAAA,KAAA5B,EAAG,IAAI,KAAC,MAAA6B,IAAA,OAAAA,EAAI,KAAK,kBAAkB7B,CAAO,CACvE,CAEQ,kBAAkBA,EAA4B,CAClD,IAAMiB,EAAU,KAAK,QAAQ,WAAWjB,CAAO,EAC/C,GAAI,KAAK,OAAQ,CACb,IAAMoF,EAAWC,GAAgB,KAAK,KAAMpE,EAASjB,GAAW,CAAA,CAAE,EAClE,GAAI,OAAOoF,GAAa,SACpB,OAAOA,EAGf,IAAIE,EAAS,GACTtF,GAAS,MAAQ,UAAYA,GAAS,MAAQ,OAC9CsF,EAAS,IACFtF,GAAS,MAAQ,OACxBsF,EAAS,KACFtF,GAAS,MAAQ,gBACxBsF,EAAS,OAEb,IAAIJ,EAAO,GAAGI,CAAM,IAAI,KAAK,IAAI,GAAGA,CAAM,GAM1C,OALI,KAAK,QAAQ,QAAQ,SAAW,EAChCJ,EAAO,GAAGA,CAAI,WAAMjE,CAAO,GACpB,KAAK,QAAQ,QAAQ,OAAS,IACrCiE,EAAO,GAAGA,CAAI;EAAKjE,CAAO,IAE1B,KAAK,OAEE,IAAIiE,CAAI,IAERA,CAEf,GAGJ,SAASG,GAAgBE,EAAatE,EAAiBjB,EAA2B,SAC9E,GAAIuF,IAAQ,aAAeA,IAAQ,YAAcA,IAAQ,OAAQ,CAC7D,IAAM7D,EAAQT,EAAQ,QAAQ,GAAG,EAC7BuE,EAAUvE,EACd,GAAIS,EAAQ,EAAG,CACX,IAAM+D,EAAezD,GAAef,EAASS,CAAK,EAClD8D,EAAUvE,EAAQ,UAAUwE,CAAY,EACxCxE,EAAUA,EAAQ,UAAU,EAAGS,CAAK,EAExC,OAAI6D,IAAQ,YAAeA,IAAQ,QAAUvF,EAAQ,OAAS,UAE1DwF,EAAU,KAAKA,CAAO,OAEL3D,GAAAD,EAAA5B,EAAQ,cAAU,MAAA4B,IAAA,OAAA,OAAAA,EAAA,KAAA5B,EAAGiB,EAASuE,CAAO,KAAC,MAAA3D,IAAA,OAAAA,EAAI6D,GAAkBzE,EAASuE,CAAO,EAIzG,CAjBS7E,EAAA0E,GAAA,mBAmBT,SAASK,GAAkBzE,EAAiBuE,EAAe,CACvD,GAAI,CACA,OAAAG,GAAI,MAAM1E,EAAS,EAAI,EAChB,IAAIuE,CAAO,KAAKvE,CAAO,SAC1B,CACJ,OAAOA,EAEf,CAPSN,EAAA+E,GAAA,qBAST,IAAMvB,GAAN,KAAmB,CAtnBnB,MAsnBmB,CAAAxD,EAAA,sBAIf,YAAYP,EAAsBwE,EAAY,CAC1C,KAAK,QAAUxE,EACf,KAAK,MAAQwE,CACjB,CAEA,UAAQ,CACJ,IAAIM,EAAO,GACX,QAAS1D,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IAAK,CAC1C,IAAM6C,EAAS,KAAK,QAAQ7C,CAAC,EACvBkC,EAAO,KAAK,QAAQlC,EAAI,CAAC,EAC/B0D,GAAQb,EAAO,SAAQ,EACnBX,GAAQA,EAAK,MAAM,MAAM,KAAOW,EAAO,MAAM,MAAM,OACnDa,GAAQ;GAGhB,OAAOA,CACX,CAEA,WAAWlF,EAA4B,CACnC,IAAIkF,EAAO,GACX,QAAS1D,EAAI,EAAGA,EAAI,KAAK,QAAQ,OAAQA,IAAK,CAC1C,IAAM6C,EAAS,KAAK,QAAQ7C,CAAC,EACvBkC,EAAO,KAAK,QAAQlC,EAAI,CAAC,EAC/B0D,GAAQb,EAAO,WAAWrE,CAAO,EAC7B0D,GAAQA,EAAK,MAAM,MAAM,KAAOW,EAAO,MAAM,MAAM,OACnDa,GAAQ;GAGhB,OAAOA,CACX,GAGEnB,GAAN,KAAmB,CA1pBnB,MA0pBmB,CAAApD,EAAA,sBAIf,YAAYuE,EAAcN,EAAY,CAClC,KAAK,KAAOM,EACZ,KAAK,MAAQN,CACjB,CAEA,UAAQ,CACJ,OAAO,KAAK,IAChB,CACA,YAAU,CACN,OAAO,KAAK,IAChB,GAIJ,SAASO,GAAaD,EAAY,CAC9B,OAAIA,EAAK,SAAS;CAAI,EACX;EAEA;;CAEf,CANSvE,EAAAwE,GAAA,gBClpBH,IAAOS,GAAP,KAAiC,CA1BvC,MA0BuC,CAAAC,EAAA,mCAKnC,YAAYC,EAA6B,CACrC,KAAK,aAAeA,EAAS,OAAO,UAAU,aAC9C,KAAK,gBAAkBA,EAAS,cAAc,eAClD,CAEA,iBAAiBC,EAAa,CAC1B,IAAMC,EAAU,KAAK,gBAAgB,WAAWD,CAAI,EACpD,GAAIC,GAAWC,GAAQD,CAAO,EAE1B,OADoBE,GAAWF,CAAO,EACnB,WAAW,CAC1B,WAAYH,EAAA,CAACM,EAAMC,IACR,KAAK,0BAA0BL,EAAMI,EAAMC,CAAO,EADjD,cAGZ,UAAWP,EAACQ,GACD,KAAK,yBAAyBN,EAAMM,CAAG,EADvC,aAGd,CAGT,CAEU,0BAA0BN,EAAeO,EAAcF,EAAe,OAC5E,IAAMG,GAAcC,EAAA,KAAK,4BAA4BT,EAAMO,CAAI,KAAC,MAAAE,IAAA,OAAAA,EAAI,KAAK,sBAAsBT,EAAMO,CAAI,EACzG,GAAIC,GAAeA,EAAY,YAAa,CACxC,IAAME,EAAOF,EAAY,YAAY,MAAM,MAAM,KAAO,EAClDG,EAAYH,EAAY,YAAY,MAAM,MAAM,UAAY,EAC5DI,EAAMJ,EAAY,YAAY,KAAK,CAAE,SAAU,IAAIE,CAAI,IAAIC,CAAS,EAAE,CAAE,EAC9E,MAAO,IAAIN,CAAO,KAAKO,EAAI,SAAQ,CAAE,QAErC,OAER,CAEU,yBAAyBC,EAAgBC,EAAc,CAGjE,CAEU,4BAA4Bd,EAAeO,EAAY,CAE7D,IAAMQ,EADWC,GAAYhB,CAAI,EACJ,kBAC7B,GAAI,CAACe,EACD,OAEJ,IAAIE,EAAmCjB,EACvC,EAAG,CAEC,IAAMQ,EADkBO,EAAY,IAAIE,CAAW,EACf,KAAKC,GAAKA,EAAE,OAASX,CAAI,EAC7D,GAAIC,EACA,OAAOA,EAEXS,EAAcA,EAAY,iBACrBA,EAGb,CAEU,sBAAsBjB,EAAeO,EAAY,CAEvD,OADoB,KAAK,aAAa,YAAW,EAAG,KAAKW,GAAKA,EAAE,OAASX,CAAI,CAEjF,GCnEE,IAAOY,GAAP,KAA6B,CAxBnC,MAwBmC,CAAAC,EAAA,+BAE/B,YAAYC,EAA6B,CACrC,KAAK,cAAgB,IAAMA,EAAS,OAAO,aAC/C,CACA,WAAWC,EAAa,OACpB,OAAGC,GAAqBD,CAAI,EACjBA,EAAK,UAETE,EAAAC,GAAgBH,EAAK,SAAU,KAAK,cAAa,EAAG,qBAAqB,KAAC,MAAAE,IAAA,OAAA,OAAAA,EAAE,IACvF,GClCJ,IAAAE,GAAA,GAOAC,EAAAD,GAAc,YCwBR,IAAOE,GAAP,KAAyB,CA/B/B,MA+B+B,CAAAC,EAAA,2BAI3B,YAAYC,EAA6B,CACrC,KAAK,WAAaA,EAAS,OAAO,aACtC,CAEA,MAAyBC,EAAY,CACjC,OAAO,QAAQ,QAAQ,KAAK,WAAW,MAASA,CAAI,CAAC,CACzD,GAGkBC,GAAhB,KAA2C,CA5CjD,MA4CiD,CAAAH,EAAA,oCAiB7C,YAAYC,EAA6B,CAX/B,KAAA,YAAc,EAKd,KAAA,iBAAmB,IACnB,KAAA,WAA6B,CAAA,EAC7B,KAAA,MAAuC,CAAA,EAK7C,KAAK,SAAWA,EAAS,WAAW,QACxC,CAEU,mBAAiB,CACvB,KAAO,KAAK,WAAW,OAAS,KAAK,aAAa,CAC9C,IAAMG,EAAS,KAAK,aAAY,EAChCA,EAAO,QAAQ,IAAK,CAChB,GAAI,KAAK,MAAM,OAAS,EAAG,CACvB,IAAMC,EAAW,KAAK,MAAM,MAAK,EAC7BA,IACAD,EAAO,KAAI,EACXC,EAAS,QAAQD,CAAM,GAGnC,CAAC,EACD,KAAK,WAAW,KAAKA,CAAM,EAEnC,CAEA,MAAM,MAAyBF,EAAcI,EAA8B,CACvE,IAAMF,EAAS,MAAM,KAAK,oBAAoBE,CAAW,EACnDD,EAAW,IAAIE,GACjBC,EAIEC,EAAeH,EAAY,wBAAwB,IAAK,CAC1DE,EAAU,WAAW,IAAK,CACtB,KAAK,gBAAgBJ,CAAM,CAC/B,EAAG,KAAK,gBAAgB,CAC5B,CAAC,EACD,OAAAA,EAAO,MAAMF,CAAI,EAAE,KAAKQ,GAAS,CAC7B,IAAMC,EAAW,KAAK,SAAS,QAAWD,CAAM,EAChDL,EAAS,QAAQM,CAAQ,CAC7B,CAAC,EAAE,MAAMC,GAAM,CACXP,EAAS,OAAOO,CAAG,CACvB,CAAC,EAAE,QAAQ,IAAK,CACZH,EAAa,QAAO,EACpB,aAAaD,CAAO,CACxB,CAAC,EACMH,EAAS,OACpB,CAEU,gBAAgBD,EAAoB,CAC1CA,EAAO,UAAS,EAChB,IAAMS,EAAQ,KAAK,WAAW,QAAQT,CAAM,EACxCS,GAAS,GACT,KAAK,WAAW,OAAOA,EAAO,CAAC,CAEvC,CAEU,MAAM,oBAAoBP,EAA8B,CAC9D,KAAK,kBAAiB,EACtB,QAAWF,KAAU,KAAK,WACtB,GAAIA,EAAO,MACP,OAAAA,EAAO,KAAI,EACJA,EAGf,IAAMC,EAAW,IAAIE,GACrB,OAAAD,EAAY,wBAAwB,IAAK,CACrC,IAAMO,EAAQ,KAAK,MAAM,QAAQR,CAAQ,EACrCQ,GAAS,GACT,KAAK,MAAM,OAAOA,EAAO,CAAC,EAE9BR,EAAS,OAAOS,EAAkB,CACtC,CAAC,EACD,KAAK,MAAM,KAAKT,CAAQ,EACjBA,EAAS,OACpB,GAQSU,GAAP,KAAmB,CA3IzB,MA2IyB,CAAAf,EAAA,qBAUrB,IAAI,OAAK,CACL,OAAO,KAAK,MAChB,CAEA,IAAI,SAAO,CACP,OAAO,KAAK,eAAe,KAC/B,CAEA,YAAYgB,EAAgCC,EAAkCC,EAAgCC,EAAqB,CAdhH,KAAA,eAAiB,IAAI,WAE9B,KAAA,SAAW,IAAIZ,GACf,KAAA,OAAS,GACT,KAAA,SAAW,GAWjB,KAAK,YAAcS,EACnB,KAAK,WAAaG,EAClBF,EAAUP,GAAS,CACf,IAAMU,EAAcV,EACpB,KAAK,SAAS,QAAQU,CAAW,EACjC,KAAK,OAAM,CACf,CAAC,EACDF,EAAQG,GAAQ,CACZ,KAAK,SAAS,OAAOA,CAAK,EAC1B,KAAK,OAAM,CACf,CAAC,CACL,CAEA,WAAS,CACL,KAAK,SAAS,OAAOP,EAAkB,EACvC,KAAK,WAAU,CACnB,CAEA,MAAI,CACA,KAAK,OAAS,EAClB,CAEA,QAAM,CACF,KAAK,SAAW,GAChB,KAAK,OAAS,GACd,KAAK,eAAe,KAAI,CAC5B,CAEA,MAAMZ,EAAY,CACd,GAAI,KAAK,SACL,MAAM,IAAI,MAAM,uBAAuB,EAE3C,YAAK,SAAW,GAChB,KAAK,SAAW,IAAIK,GACpB,KAAK,YAAYL,CAAI,EACd,KAAK,SAAS,OACzB,GCnJE,IAAOoB,GAAP,KAA2B,CA/CjC,MA+CiC,CAAAC,EAAA,6BAAjC,aAAA,CAEY,KAAA,oBAAsB,IAAI,0BAC1B,KAAA,WAA0B,CAAA,EAC1B,KAAA,UAAyB,CAAA,EACzB,KAAA,KAAO,EA6DnB,CA3DI,MAAMC,EAAwD,CAC1D,KAAK,YAAW,EAChB,IAAMC,EAAc,IAAI,0BACxB,YAAK,oBAAsBA,EACpB,KAAK,QAAQ,KAAK,WAAYD,EAAQC,EAAY,KAAK,CAClE,CAEA,KAAQD,EAA6B,CACjC,OAAO,KAAK,QAAQ,KAAK,UAAWA,CAAM,CAC9C,CAEQ,QAAkBE,EAAoBF,EAAuBG,EAAqC,CACtG,IAAMC,EAAW,IAAIC,GACfC,EAAmB,CACrB,OAAAN,EACA,SAAAI,EACA,kBAAmBD,GAAqB,oBAAkB,MAE9D,OAAAD,EAAM,KAAKI,CAAK,EAChB,KAAK,qBAAoB,EAClBF,EAAS,OACpB,CAEQ,MAAM,sBAAoB,CAC9B,GAAI,CAAC,KAAK,KACN,OAEJ,IAAMG,EAAuB,CAAA,EAC7B,GAAI,KAAK,WAAW,OAAS,EAEzBA,EAAQ,KAAK,KAAK,WAAW,MAAK,CAAG,UAC9B,KAAK,UAAU,OAAS,EAE/BA,EAAQ,KAAK,GAAG,KAAK,UAAU,OAAO,EAAG,KAAK,UAAU,MAAM,CAAC,MAE/D,QAEJ,KAAK,KAAO,GACZ,MAAM,QAAQ,IAAIA,EAAQ,IAAI,MAAO,CAAE,OAAAP,EAAQ,SAAAI,EAAU,kBAAAD,CAAiB,IAAM,CAC5E,GAAI,CAEA,IAAMK,EAAS,MAAM,QAAQ,QAAO,EAAG,KAAK,IAAMR,EAAOG,CAAiB,CAAC,EAC3EC,EAAS,QAAQI,CAAM,QAClBC,EAAK,CACNC,GAAqBD,CAAG,EAExBL,EAAS,QAAQ,MAAS,EAE1BA,EAAS,OAAOK,CAAG,EAG/B,CAAC,CAAC,EACF,KAAK,KAAO,GACZ,KAAK,qBAAoB,CAC7B,CAEA,aAAW,CACP,KAAK,oBAAoB,OAAM,CACnC,GClEE,IAAOE,GAAP,KAAsB,CA9C5B,MA8C4B,CAAAC,EAAA,wBASxB,YAAYC,EAA6B,CAHtB,KAAA,oBAAsB,IAAIC,GAC1B,KAAA,eAAiB,IAAIA,GAGpC,KAAK,QAAUD,EAAS,QACxB,KAAK,MAAQA,EAAS,OAAO,MAC7B,KAAK,OAASA,EAAS,WAAW,MACtC,CAEA,UAAUE,EAA4B,CAClC,MAAO,CAGH,YAAaA,EAAO,YAAY,IAAIC,GAAK,OAAA,OAAA,CAAA,EAAMA,CAAC,CAAG,EACnD,aAAcD,EAAO,aAAa,IAAIC,GAAK,OAAA,OAAA,CAAA,EAAMA,CAAC,CAAG,EACrD,MAAO,KAAK,iBAAiBD,EAAO,MAAO,KAAK,wBAAwBA,EAAO,KAAK,CAAC,EAE7F,CAEU,wBAAwBE,EAAa,CAC3C,IAAMC,EAAW,IAAI,IACfC,EAAW,IAAI,IACrB,QAAWC,KAAWC,GAAUJ,CAAI,EAChCC,EAAS,IAAIE,EAAS,CAAA,CAAE,EAE5B,GAAIH,EAAK,SACL,QAAWK,KAAWC,GAAUN,EAAK,QAAQ,EACzCE,EAAS,IAAIG,EAAS,CAAA,CAAE,EAGhC,MAAO,CACH,SAAAJ,EACA,SAAAC,EAER,CAEU,iBAAiBF,EAAeO,EAAyB,CAC/D,IAAMC,EAAMD,EAAQ,SAAS,IAAIP,CAAI,EACrCQ,EAAI,MAAQR,EAAK,MACjBQ,EAAI,gBAAkBR,EAAK,gBAC3BQ,EAAI,mBAAqBR,EAAK,mBAC1BA,EAAK,WAAa,SAClBQ,EAAI,SAAW,KAAK,iBAAiBR,EAAK,SAAUO,CAAO,GAE/D,OAAW,CAACE,EAAMC,CAAK,IAAK,OAAO,QAAQV,CAAI,EAC3C,GAAI,CAAAS,EAAK,WAAW,GAAG,EAGvB,GAAI,MAAM,QAAQC,CAAK,EAAG,CACtB,IAAMC,EAAa,CAAA,EACnBH,EAAIC,CAAI,EAAIE,EACZ,QAAWC,KAAQF,EACXG,GAAUD,CAAI,EACdD,EAAI,KAAK,KAAK,iBAAiBC,EAAML,CAAO,CAAC,EACtCO,GAAYF,CAAI,EACvBD,EAAI,KAAK,KAAK,mBAAmBC,EAAML,CAAO,CAAC,EAE/CI,EAAI,KAAKC,CAAI,OAGdC,GAAUH,CAAK,EACtBF,EAAIC,CAAI,EAAI,KAAK,iBAAiBC,EAAOH,CAAO,EACzCO,GAAYJ,CAAK,EACxBF,EAAIC,CAAI,EAAI,KAAK,mBAAmBC,EAAOH,CAAO,EAC3CG,IAAU,SACjBF,EAAIC,CAAI,EAAIC,GAGpB,OAAOF,CACX,CAEU,mBAAmBO,EAAsBR,EAAyB,CACxE,IAAMC,EAA+B,CAAA,EACrC,OAAAA,EAAI,SAAWO,EAAU,SACrBA,EAAU,WACVP,EAAI,SAAWD,EAAQ,SAAS,IAAIQ,EAAU,QAAQ,GAEnDP,CACX,CAEU,iBAAiBR,EAAeO,EAAyB,CAC/D,IAAMF,EAAUE,EAAQ,SAAS,IAAIP,CAAI,EACzC,OAAIgB,GAAchB,CAAI,EAClBK,EAAQ,SAAWL,EAAK,SAGxBK,EAAQ,cAAgB,KAAK,oBAAoBL,EAAK,aAAa,EAEvEK,EAAQ,OAASL,EAAK,OACtBK,EAAQ,QAAUE,EAAQ,SAAS,IAAIP,EAAK,OAAO,EAC/CiB,GAAmBjB,CAAI,EACvBK,EAAQ,QAAUL,EAAK,QAAQ,IAAIkB,GAAS,KAAK,iBAAiBA,EAAOX,CAAO,CAAC,EAC1EY,GAAcnB,CAAI,IACzBK,EAAQ,UAAYL,EAAK,UAAU,KACnCK,EAAQ,OAASL,EAAK,OACtBK,EAAQ,OAASL,EAAK,OACtBK,EAAQ,UAAYL,EAAK,MAAM,MAAM,KACrCK,EAAQ,YAAcL,EAAK,MAAM,MAAM,UACvCK,EAAQ,QAAUL,EAAK,MAAM,IAAI,KACjCK,EAAQ,UAAYL,EAAK,MAAM,IAAI,WAEhCK,CACX,CAEA,QAAqCP,EAA2B,CAC5D,IAAME,EAAOF,EAAO,MACdS,EAAU,KAAK,uBAAuBP,CAAI,EAChD,MAAI,aAAcA,GACd,KAAK,eAAeA,EAAK,SAAUO,CAAO,EAEvC,CACH,YAAaT,EAAO,YACpB,aAAcA,EAAO,aACrB,MAAO,KAAK,eAAeE,EAAMO,CAAO,EAEhD,CAEU,uBAAuBP,EAAS,CACtC,IAAMC,EAAW,IAAI,IACfC,EAAW,IAAI,IACrB,QAAWC,KAAWC,GAAUJ,CAAI,EAChCC,EAAS,IAAIE,EAAS,CAAA,CAAa,EAEvC,IAAIiB,EACJ,GAAIpB,EAAK,SACL,QAAWK,KAAWC,GAAUN,EAAK,QAAQ,EAAG,CAC5C,IAAIqB,EACA,aAAchB,GACdgB,EAAM,IAAIC,GAAgBjB,EAAQ,QAAkB,EACpDe,EAAOC,GACA,YAAahB,EACpBgB,EAAM,IAAIE,GACH,cAAelB,IACtBgB,EAAM,KAAK,mBAAmBhB,CAAO,GAErCgB,IACAnB,EAAS,IAAIG,EAASgB,CAAG,EACzBA,EAAI,KAAOD,GAIvB,MAAO,CACH,SAAAnB,EACA,SAAAC,EAER,CAEU,eAAeF,EAAWO,EAAuB,CACvD,IAAMJ,EAAUI,EAAQ,SAAS,IAAIP,CAAI,EACzCG,EAAQ,MAAQH,EAAK,MACrBG,EAAQ,gBAAkBH,EAAK,gBAC/BG,EAAQ,mBAAqBH,EAAK,mBAC9BA,EAAK,WACLG,EAAQ,SAAWI,EAAQ,SAAS,IAAIP,EAAK,QAAQ,GAEzD,OAAW,CAACS,EAAMC,CAAK,IAAK,OAAO,QAAQV,CAAI,EAC3C,GAAI,CAAAS,EAAK,WAAW,GAAG,EAGvB,GAAI,MAAM,QAAQC,CAAK,EAAG,CACtB,IAAMC,EAAiB,CAAA,EACvBR,EAAQM,CAAI,EAAIE,EAChB,QAAWC,KAAQF,EACXG,GAAUD,CAAI,EACdD,EAAI,KAAK,KAAK,UAAU,KAAK,eAAeC,EAAML,CAAO,EAAGJ,CAAO,CAAC,EAC7DW,GAAYF,CAAI,EACvBD,EAAI,KAAK,KAAK,iBAAiBC,EAAMT,EAASM,EAAMF,CAAO,CAAC,EAE5DI,EAAI,KAAKC,CAAI,OAGdC,GAAUH,CAAK,EACtBP,EAAQM,CAAI,EAAI,KAAK,UAAU,KAAK,eAAeC,EAAOH,CAAO,EAAGJ,CAAO,EACpEW,GAAYJ,CAAK,EACxBP,EAAQM,CAAI,EAAI,KAAK,iBAAiBC,EAAOP,EAASM,EAAMF,CAAO,EAC5DG,IAAU,SACjBP,EAAQM,CAAI,EAAIC,GAGxB,OAAOP,CACX,CAEU,UAAUH,EAAWwB,EAAW,CACtC,OAAAxB,EAAK,WAAawB,EACXxB,CACX,CAEU,iBAAiBe,EAAgBf,EAAeS,EAAcF,EAAuB,CAC3F,OAAO,KAAK,OAAO,eAAeP,EAAMS,EAAMF,EAAQ,SAAS,IAAIQ,EAAU,QAAQ,EAAIA,EAAU,QAAQ,CAC/G,CAEU,eAAeV,EAAcE,EAAyBkB,EAAM,EAAC,CACnE,IAAMC,EAAanB,EAAQ,SAAS,IAAIF,CAAO,EAK/C,GAJI,OAAOA,EAAQ,eAAkB,WACjCqB,EAAW,cAAgB,KAAK,kBAAkBrB,EAAQ,aAAa,GAE3EqB,EAAW,QAAUnB,EAAQ,SAAS,IAAIF,EAAQ,OAAO,EACrDY,GAAmBS,CAAU,EAC7B,QAAWR,KAASb,EAAQ,QAAS,CACjC,IAAMsB,EAAW,KAAK,eAAeT,EAAOX,EAASkB,GAAK,EAC1DC,EAAW,QAAQ,KAAKC,CAAQ,EAGxC,OAAOD,CACX,CAEU,mBAAmBrB,EAAY,CACrC,IAAMuB,EAAY,KAAK,aAAavB,EAAQ,SAAS,EAC/CwB,EAASxB,EAAQ,OACjByB,EAASzB,EAAQ,OACjB0B,EAAY1B,EAAQ,UACpB2B,EAAc3B,EAAQ,YACtB4B,EAAU5B,EAAQ,QAClB6B,EAAY7B,EAAQ,UACpB8B,EAAS9B,EAAQ,OAiBvB,OAhBa,IAAI+B,GACbP,EACAC,EACA,CACI,MAAO,CACH,KAAMC,EACN,UAAWC,GAEf,IAAK,CACD,KAAMC,EACN,UAAWC,IAGnBN,EACAO,CAAM,CAGd,CAEU,aAAa1B,EAAY,CAC/B,OAAO,KAAK,MAAM,WAAWA,CAAI,CACrC,CAEU,oBAAoBT,EAAqB,CAC/C,OAAI,KAAK,oBAAoB,OAAS,GAClC,KAAK,0BAAyB,EAE3B,KAAK,oBAAoB,IAAIA,CAAI,CAC5C,CAEU,kBAAkBqC,EAAU,CAC9B,KAAK,oBAAoB,OAAS,GAClC,KAAK,0BAAyB,EAElC,IAAMC,EAAU,KAAK,oBAAoB,OAAOD,CAAE,EAClD,GAAIC,EACA,OAAOA,EAEP,MAAM,IAAI,MAAM,+BAAiCD,CAAE,CAE3D,CAEU,2BAAyB,CAC/B,IAAIA,EAAK,EACT,QAAWC,KAAWlC,GAAU,KAAK,OAAO,EACpCmC,GAAkBD,CAAO,GACzB,KAAK,oBAAoB,IAAIA,EAASD,GAAI,CAGtD,GC3QE,SAAUG,GAAwBC,EAAiC,CACrE,MAAO,CACH,cAAe,CACX,gBAAiBC,EAACC,GAAa,IAAIC,GAAuBD,CAAQ,EAAjD,mBACjB,sBAAuBD,EAACC,GAAa,IAAIE,GAA2BF,CAAQ,EAArD,0BAE3B,OAAQ,CACJ,YAAaD,EAACC,GAAa,IAAIG,GAAmBH,CAAQ,EAA7C,eACb,cAAeD,EAACC,GAAaI,GAAoBJ,CAAQ,EAA1C,iBACf,cAAeD,EAACC,GAAaK,GAAoBL,CAAQ,EAA1C,iBACf,iBAAkBD,EAACC,GAAaM,GAAuBN,CAAQ,EAA7C,oBAClB,eAAgBD,EAAA,IAAM,IAAIQ,GAAV,kBAChB,aAAcR,EAAA,IAAM,IAAIS,GAAV,gBACd,MAAOT,EAACC,GAAa,IAAIS,GAAaT,CAAQ,EAAvC,SACP,2BAA4BD,EAAA,IAAM,IAAIW,GAAV,+BAEhC,UAAW,CACP,eAAgBX,EAAA,IAAM,IAAIY,GAAV,kBAChB,2BAA4BZ,EAACC,GAAa,IAAIY,GAAkCZ,CAAQ,EAA5D,8BAC5B,6BAA8BD,EAACC,GAAa,IAAIa,GAAoCb,CAAQ,EAA9D,iCAElC,WAAY,CACR,OAAQD,EAACC,GAAa,IAAIc,GAAcd,CAAQ,EAAxC,UACR,aAAcD,EAAA,IAAM,IAAIgB,GAAV,gBACd,cAAehB,EAACC,GAAa,IAAIgB,GAAqBhB,CAAQ,EAA/C,iBACf,iBAAkBD,EAACC,GAAa,IAAIiB,GAAwBjB,CAAQ,EAAlD,oBAClB,WAAYD,EAACC,GAAa,IAAIkB,GAAkBlB,CAAQ,EAA5C,eAEhB,WAAY,CACR,SAAUD,EAACC,GAAa,IAAImB,GAAgBnB,CAAQ,EAA1C,YACV,eAAgBD,EAACC,GAAa,IAAIoB,GAAsBpB,CAAQ,EAAhD,mBAEpB,WAAY,CACR,kBAAmBD,EAACC,GAAa,IAAIqB,GAAyBrB,CAAQ,EAAnD,qBACnB,mBAAoBD,EAACC,GAAa,IAAIsB,GAAmBtB,CAAQ,EAA7C,uBAExB,OAAQD,EAAA,IAAMD,EAAQ,OAAd,UAEhB,CAtCgBC,EAAAF,GAAA,2BA0DV,SAAU0B,GAA8BzB,EAAuC,CACjF,MAAO,CACH,gBAAiBC,EAAA,IAAM,IAAIyB,GAAV,mBACjB,UAAW,CACP,iBAAkBzB,EAACC,GAAa,IAAIyB,GAAwBzB,CAAQ,EAAlD,oBAClB,uBAAwBD,EAACC,GAAa,IAAI0B,GAA8B1B,CAAQ,EAAxD,0BACxB,gBAAiBD,EAACC,GAAa,IAAI2B,GAAuB3B,CAAQ,EAAjD,mBACjB,aAAcD,EAACC,GAAa,IAAI4B,GAAoB5B,CAAQ,EAA9C,gBACd,iBAAkBD,EAACC,GAAa,IAAI6B,GAAwB7B,CAAQ,EAAlD,oBAClB,mBAAoBD,EAACC,GAAaF,EAAQ,mBAAmBE,CAAQ,EAAjD,sBACpB,cAAeD,EAAA,IAAM,IAAI+B,GAAV,iBACf,sBAAuB/B,EAACC,GAAa,IAAI+B,GAA6B/B,CAAQ,EAAvD,0BAGnC,CAdgBD,EAAAwB,GAAA,iCCtFV,IAAWS,IAAjB,SAAiBA,EAAM,CACNA,EAAA,MAAQ,CAA4BC,EAAmBC,IAAuBC,GAAOA,GAAO,CAAA,EAAIF,CAAE,EAAGC,CAAE,CACxH,GAFiBF,KAAAA,GAAM,CAAA,EAAA,EA0BjB,SAAUI,GACZC,EAAwBC,EAAyBC,EAAyBC,EAAyBC,EAAyBC,EAAyBC,EAAyBC,EAAyBC,EAAuB,CAE9N,IAAMC,EAAS,CAACT,EAASC,EAASC,EAASC,EAASC,EAASC,EAASC,EAASC,EAASC,CAAO,EAAE,OAAOV,GAAQ,CAAA,CAAE,EAClH,OAAOY,GAAQD,CAAM,CACzB,CALgBE,EAAAZ,GAAA,UAOhB,IAAMa,GAAU,OAAO,SAAS,EAM1B,SAAUC,GAAaC,EAAO,CAChC,GAAIA,GAASA,EAAaF,EAAO,EAC7B,QAAWG,KAAS,OAAO,OAAOD,CAAI,EAClCD,GAAUE,CAAK,EAGvB,OAAOD,CACX,CAPgBH,EAAAE,GAAA,aAahB,SAASH,GAAcD,EAAsBO,EAAc,CACvD,IAAMC,EAAa,IAAI,MAAM,CAAA,EAAW,CACpC,eAAgBN,EAAA,IAAM,GAAN,kBAChB,IAAKA,EAAA,CAACO,EAAKC,IAASC,GAASF,EAAKC,EAAMV,EAAQO,GAAYC,CAAK,EAA5D,OACL,yBAA0BN,EAAA,CAACO,EAAKC,KAAUC,GAASF,EAAKC,EAAMV,EAAQO,GAAYC,CAAK,EAAG,OAAO,yBAAyBC,EAAKC,CAAI,GAAzG,4BAC1B,IAAKR,EAAA,CAACU,EAAGF,IAASA,KAAQV,EAArB,OACL,QAASE,EAAA,IAAM,CAAC,GAAG,QAAQ,QAAQF,CAAM,EAAGG,EAAO,EAA1C,WACZ,EACD,OAAAK,EAAML,EAAO,EAAI,GACVK,CACX,CAVSN,EAAAD,GAAA,WAgBT,IAAMY,GAAgB,OAAM,EAc5B,SAASF,GAAeF,EAAUC,EAAgCV,EAAsBO,EAAW,CAC/F,GAAIG,KAAQD,EAAK,CACb,GAAIA,EAAIC,CAAI,YAAa,MACrB,MAAM,IAAI,MAAM,mFAAoF,CAAC,MAAOD,EAAIC,CAAI,CAAC,CAAC,EAE1H,GAAID,EAAIC,CAAI,IAAMG,GACd,MAAM,IAAI,MAAM,gCAAkC,OAAOH,CAAI,EAAI,4FAA4F,EAEjK,OAAOD,EAAIC,CAAI,UACRA,KAAQV,EAAQ,CACvB,IAAMM,EAA+DN,EAAOU,CAAe,EAC3FD,EAAIC,CAAI,EAAIG,GACZ,GAAI,CACAJ,EAAIC,CAAI,EAAK,OAAOJ,GAAU,WAAcA,EAAMC,CAAQ,EAAIN,GAAQK,EAAOC,CAAQ,QAChFO,EAAO,CACZ,MAAAL,EAAIC,CAAI,EAAII,aAAiB,MAAQA,EAAQ,OACvCA,EAEV,OAAOL,EAAIC,CAAI,MAEf,OAER,CAtBSR,EAAAS,GAAA,YA+BT,SAAStB,GAAO0B,EAAqBC,EAAoB,CACrD,GAAIA,GACA,OAAW,CAACC,EAAKC,CAAM,IAAK,OAAO,QAAQF,CAAM,EAC7C,GAAIE,IAAW,OAAW,CACtB,IAAMC,EAASJ,EAAOE,CAAG,EACrBE,IAAW,MAAQD,IAAW,MAAQ,OAAOC,GAAW,UAAY,OAAOD,GAAW,SACtFH,EAAOE,CAAG,EAAI5B,GAAO8B,EAAQD,CAAM,EAEnCH,EAAOE,CAAG,EAAIC,GAK9B,OAAOH,CACX,CAdSb,EAAAb,GAAA,UCtIT,IAAA+B,EAAA,GAAAC,GAAAD,EAAA,cAAAE,GAAA,UAAAC,GAAA,iBAAAC,EAAA,iBAAAC,GAAA,aAAAC,GAAA,gBAAAC,GAAA,aAAAC,GAAA,eAAAC,GAAA,oBAAAC,GAAA,kBAAAC,GAAA,iBAAAC,GAAA,sBAAAC,GAAA,iBAAAC,GAAA,aAAAC,GAAA,uBAAAC,GAAA,cAAAC,GAAA,gBAAAC,GAAA,gBAAAC,GAAA,eAAAC,GAAA,mBAAAC,GAAA,QAAAC,GAAA,aAAAC,GAAA,mBAAAC,GAAA,sBAAAC,GAAA,kBAAAC,GAAA,sBAAAC,GAAA,yBAAAC,GAAA,wBAAAC,GAAA,0BAAAC,GAAA,6BAAAC,GAAA,WAAAC,IAOAC,EAAAjC,EAAckC,ICyBR,IAAOC,GAAP,KAA8B,CAhCpC,MAgCoC,CAAAC,EAAA,gCAEhC,UAAQ,CACJ,MAAM,IAAI,MAAM,8BAA8B,CAClD,CAEA,MAAM,eAAa,CACf,MAAO,CAAA,CACX,GAISC,GAAkB,CAC3B,mBAAoBD,EAAA,IAAM,IAAID,GAAV,uBC9BxB,IAAMG,GAAgF,CAClF,QAASC,EAAA,IAAG,GAAH,WACT,iBAAkBA,EAAA,KAAO,CACrB,gBAAiB,GACjB,eAAgB,CAAC,UAAU,EAC3B,WAAY,YAHE,qBAOhBC,GAAkG,CACpG,cAAeD,EAAA,IAAM,IAAQE,GAAd,kBAGnB,SAASC,IAA4B,CACjC,IAAMC,EAASC,GACXC,GAA8BC,EAAe,EAC7CN,EAA0B,EAExBO,EAAUH,GACZI,GAAwB,CAAE,OAAAL,CAAM,CAAE,EAClCL,EAAoB,EAExB,OAAAK,EAAO,gBAAgB,SAASI,CAAO,EAChCA,CACX,CAXSR,EAAAG,GAAA,gCAiBH,SAAUO,GAAoBC,EAAY,OAC5C,IAAMC,EAAWT,GAA4B,EACvCU,EAAUD,EAAS,WAAW,eAAe,YAAYD,CAAI,EACnE,OAAAC,EAAS,OAAO,UAAU,uBAAuB,UAAUC,EAASC,GAAI,MAAM,aAAYC,EAAAF,EAAQ,QAAI,MAAAE,IAAA,OAAAA,EAAI,SAAS,UAAU,CAAC,EACvHF,CACX,CALgBb,EAAAU,GAAA,uB9G7BhBM,EAAAC,GAAcC,G+GhBd,IAAIC,GAAY,OAAO,eACnBC,EAASA,EAAA,CAACC,EAAQC,IAAUH,GAAUE,EAAQ,OAAQ,CAAE,MAAAC,EAAO,aAAc,EAAK,CAAC,EAA1E,UAITC,GAAY,YACZC,GAAe,eACnB,SAASC,GAAeC,EAAM,CAC5B,OAAOC,GAAW,WAAWD,EAAMF,EAAY,CACjD,CAFSJ,EAAAK,GAAA,kBAGTL,EAAOK,GAAgB,gBAAgB,EACvC,IAAIG,GAAS,SACb,SAASC,GAASH,EAAM,CACtB,OAAOC,GAAW,WAAWD,EAAME,EAAM,CAC3C,CAFSR,EAAAS,GAAA,YAGTT,EAAOS,GAAU,UAAU,EAC3B,IAAIC,GAAW,WACXC,GAAgB,gBAChBC,GAAS,SACb,SAASC,GAASP,EAAM,CACtB,OAAOC,GAAW,WAAWD,EAAMM,EAAM,CAC3C,CAFSZ,EAAAa,GAAA,YAGTb,EAAOa,GAAU,UAAU,EAC3B,IAAIC,GAAS,SACb,SAASC,GAAST,EAAM,CACtB,OAAOC,GAAW,WAAWD,EAAMQ,EAAM,CAC3C,CAFSd,EAAAe,GAAA,YAGTf,EAAOe,GAAU,UAAU,EAC3B,IAAIC,GAAW,WACf,SAASC,GAAWX,EAAM,CACxB,OAAOC,GAAW,WAAWD,EAAMU,EAAQ,CAC7C,CAFShB,EAAAiB,GAAA,cAGTjB,EAAOiB,GAAY,YAAY,EAC/B,IAAIC,GAAO,OACX,SAASC,GAAOb,EAAM,CACpB,OAAOC,GAAW,WAAWD,EAAMY,EAAI,CACzC,CAFSlB,EAAAmB,GAAA,UAGTnB,EAAOmB,GAAQ,QAAQ,EACvB,IAAIC,GAAQ,QACZ,SAASC,GAAQf,EAAM,CACrB,OAAOC,GAAW,WAAWD,EAAMc,EAAK,CAC1C,CAFSpB,EAAAqB,GAAA,WAGTrB,EAAOqB,GAAS,SAAS,EACzB,IAAIC,GAAS,SACb,SAASC,GAASjB,EAAM,CACtB,OAAOC,GAAW,WAAWD,EAAMgB,EAAM,CAC3C,CAFStB,EAAAuB,GAAA,YAGTvB,EAAOuB,GAAU,UAAU,EAC3B,IAAIC,GAAc,cAClB,SAASC,GAAcnB,EAAM,CAC3B,OAAOC,GAAW,WAAWD,EAAMkB,EAAW,CAChD,CAFSxB,EAAAyB,GAAA,iBAGTzB,EAAOyB,GAAe,eAAe,EACrC,IAAIC,GAAM,MACV,SAASC,GAAMrB,EAAM,CACnB,OAAOC,GAAW,WAAWD,EAAMoB,EAAG,CACxC,CAFS1B,EAAA2B,GAAA,SAGT3B,EAAO2B,GAAO,OAAO,EACrB,IAAIC,GAAa,aACjB,SAASC,GAAavB,EAAM,CAC1B,OAAOC,GAAW,WAAWD,EAAMsB,EAAU,CAC/C,CAFS5B,EAAA6B,GAAA,gBAGT7B,EAAO6B,GAAc,cAAc,EACnC,IAAIC,GAAY,YACZC,GAAuB,cAAcC,EAAsB,CAhE/D,MAgE+D,CAAAhC,EAAA,6BAC7D,MAAO,CACLA,EAAO,KAAM,sBAAsB,CACrC,CACA,aAAc,CACZ,MAAO,CAAC,eAAgB,SAAU,WAAY,gBAAiB,SAAU,SAAU,YAAa,OAAQ,WAAY,QAAS,OAAQ,WAAY,QAAS,SAAU,cAAe,MAAO,aAAc,UAAW,WAAW,CAChO,CACA,iBAAiBiC,EAASC,EAAW,CACnC,OAAQD,EAAS,CACf,KAAKzB,GACL,KAAKE,GACL,KAAKC,GACL,KAAKC,GACL,KAAKQ,GACH,OAAO,KAAK,UAAUjB,GAAW+B,CAAS,EAE5C,KAAKJ,GACH,OAAO,KAAK,UAAUd,GAAUkB,CAAS,EAE3C,QACE,MAAO,EAEX,CACF,CACA,iBAAiBC,EAAS,CACxB,IAAMC,EAAc,GAAGD,EAAQ,UAAU,KAAK,IAAIA,EAAQ,QAAQ,GAClE,OAAQC,EAAa,CACnB,QACE,MAAM,IAAI,MAAM,GAAGA,CAAW,+BAA+B,CAEjE,CACF,CACA,gBAAgBC,EAAM,CACpB,OAAQA,EAAM,CACZ,IAAK,eACH,MAAO,CACL,KAAM,eACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,QAAS,aAAc,CAAC,CAAE,EAClC,CAAE,KAAM,SAAU,aAAc,CAAC,CAAE,EACnC,CAAE,KAAM,YAAa,aAAc,CAAC,CAAE,EACtC,CAAE,KAAM,WAAY,aAAc,CAAC,CAAE,EACrC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,SACH,MAAO,CACL,KAAM,SACN,WAAY,CACV,CAAE,KAAM,MAAO,EACf,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,WACH,MAAO,CACL,KAAM,WACN,WAAY,CACV,CAAE,KAAM,QAAS,CACnB,CACF,EAEF,IAAK,gBACH,MAAO,CACL,KAAM,gBACN,WAAY,CACV,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,QAAS,EACjB,CAAE,KAAM,OAAQ,aAAc,CAAC,CAAE,CACnC,CACF,EAEF,IAAK,SACH,MAAO,CACL,KAAM,SACN,WAAY,CACV,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,SAAU,EAClB,CAAE,KAAM,OAAQ,aAAc,CAAC,CAAE,EACjC,CAAE,KAAM,MAAO,CACjB,CACF,EAEF,IAAK,SACH,MAAO,CACL,KAAM,SACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,OACH,MAAO,CACL,KAAM,OACN,WAAY,CACV,CAAE,KAAM,QAAS,EACjB,CAAE,KAAM,WAAY,aAAc,EAAM,EACxC,CAAE,KAAM,OAAQ,EAChB,CAAE,KAAM,UAAW,aAAc,EAAM,EACvC,CAAE,KAAM,QAAS,EACjB,CAAE,KAAM,WAAY,aAAc,EAAM,EACxC,CAAE,KAAM,OAAQ,EAChB,CAAE,KAAM,UAAW,aAAc,EAAM,EACvC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,WACH,MAAO,CACL,KAAM,WACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,aAAc,aAAc,CAAC,CAAE,EACvC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,QACH,MAAO,CACL,KAAM,QACN,WAAY,CACV,CAAE,KAAM,MAAO,EACf,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,OACH,MAAO,CACL,KAAM,OACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,WACH,MAAO,CACL,KAAM,WACN,WAAY,CACV,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,IAAK,CACf,CACF,EAEF,IAAK,QACH,MAAO,CACL,KAAM,QACN,WAAY,CACV,CAAE,KAAM,QAAS,EACjB,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,OAAQ,aAAc,CAAC,CAAE,EACjC,CAAE,KAAM,MAAO,CACjB,CACF,EAEF,IAAK,SACH,MAAO,CACL,KAAM,SACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,SAAU,aAAc,CAAC,CAAE,EACnC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,cACH,MAAO,CACL,KAAM,cACN,WAAY,CACV,CAAE,KAAM,KAAM,EACd,CAAE,KAAM,OAAQ,EAChB,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,MACH,MAAO,CACL,KAAM,MACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,WAAY,aAAc,CAAC,CAAE,EACrC,CAAE,KAAM,WAAY,aAAc,EAAM,EACxC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,aACH,MAAO,CACL,KAAM,aACN,WAAY,CACV,CAAE,KAAM,OAAQ,EAChB,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,UACH,MAAO,CACL,KAAM,UACN,WAAY,CACV,CAAE,KAAM,MAAO,EACf,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,IAAK,EACb,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,IAAK,YACH,MAAO,CACL,KAAM,YACN,WAAY,CACV,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,UAAW,EACnB,CAAE,KAAM,KAAM,EACd,CAAE,KAAM,aAAc,aAAc,CAAC,CAAE,EACvC,CAAE,KAAM,OAAQ,CAClB,CACF,EAEF,QACE,MAAO,CACL,KAAMA,EACN,WAAY,CAAC,CACf,CAEJ,CACF,CACF,EACI9B,GAAa,IAAIwB,GAIjBO,GACAC,GAA8BvC,EAAO,IAAMsC,KAAsBA,GAAoBE,GAAoB,ulHAAulH,GAAI,aAAa,EACjtHC,GACAC,GAAgC1C,EAAO,IAAMyC,KAAwBA,GAAsBD,GAAoB,2hKAA2hK,GAAI,eAAe,EAC7pKG,GACAC,GAA6B5C,EAAO,IAAM2C,KAAqBA,GAAmBH,GAAoB,+9JAA+9J,GAAI,YAAY,EACrlKK,GACAC,GAAsC9C,EAAO,IAAM6C,KAA8BA,GAA4BL,GAAoB,w5VAAw5V,GAAI,qBAAqB,EACljWO,GACAC,GAAkChD,EAAO,IAAM+C,KAA0BA,GAAwBP,GAAoB,6sUAA6sU,GAAI,iBAAiB,EAGv1US,GAAuB,CACzB,WAAY,OACZ,eAAgB,CAAC,OAAQ,UAAU,EACnC,gBAAiB,EACnB,EACIC,GAAyB,CAC3B,WAAY,SACZ,eAAgB,CAAC,OAAQ,UAAU,EACnC,gBAAiB,EACnB,EACIC,GAAsB,CACxB,WAAY,MACZ,eAAgB,CAAC,OAAQ,UAAU,EACnC,gBAAiB,EACnB,EACIC,GAA+B,CACjC,WAAY,eACZ,eAAgB,CAAC,OAAQ,UAAU,EACnC,gBAAiB,EACnB,EACIC,GAA2B,CAC7B,WAAY,WACZ,eAAgB,CAAC,OAAQ,UAAU,EACnC,gBAAiB,EACnB,EACIC,GAA+B,CACjC,cAA+BtD,EAAO,IAAM,IAAI+B,GAAwB,eAAe,CACzF,EACIwB,GAAsB,CACxB,QAAyBvD,EAAO,IAAMuC,GAAY,EAAG,SAAS,EAC9D,iBAAkCvC,EAAO,IAAMiD,GAAsB,kBAAkB,EACvF,OAAQ,CAAC,CACX,EACIO,GAAwB,CAC1B,QAAyBxD,EAAO,IAAM0C,GAAc,EAAG,SAAS,EAChE,iBAAkC1C,EAAO,IAAMkD,GAAwB,kBAAkB,EACzF,OAAQ,CAAC,CACX,EACIO,GAAqB,CACvB,QAAyBzD,EAAO,IAAM4C,GAAW,EAAG,SAAS,EAC7D,iBAAkC5C,EAAO,IAAMmD,GAAqB,kBAAkB,EACtF,OAAQ,CAAC,CACX,EACIO,GAA8B,CAChC,QAAyB1D,EAAO,IAAM8C,GAAoB,EAAG,SAAS,EACtE,iBAAkC9C,EAAO,IAAMoD,GAA8B,kBAAkB,EAC/F,OAAQ,CAAC,CACX,EACIO,GAA0B,CAC5B,QAAyB3D,EAAO,IAAMgD,GAAgB,EAAG,SAAS,EAClE,iBAAkChD,EAAO,IAAMqD,GAA0B,kBAAkB,EAC3F,OAAQ,CAAC,CACX,EAMIO,GAA0B,6CAC1BC,GAA0B,4BAC1BC,GAAa,wBAGbC,GAAe,CACjB,UAAWH,GACX,UAAWC,GACX,MAAOC,EACT,EACIE,GAAgC,cAAcC,EAAsB,CAnYxE,MAmYwE,CAAAjE,EAAA,sCACtE,MAAO,CACLA,EAAO,KAAM,+BAA+B,CAC9C,CACA,aAAakE,EAAMC,EAAOC,EAAS,CACjC,IAAIlE,EAAQ,KAAK,mBAAmBgE,EAAMC,EAAOC,CAAO,EAIxD,OAHIlE,IAAU,SACZA,EAAQ,KAAK,mBAAmBgE,EAAMC,EAAOC,CAAO,GAElDlE,IAAU,OACL,MAAM,aAAagE,EAAMC,EAAOC,CAAO,EAEzClE,CACT,CACA,mBAAmBgE,EAAMC,EAAOE,EAAU,CACxC,IAAMC,EAAQP,GAAaG,EAAK,IAAI,EACpC,GAAII,IAAU,OACZ,OAEF,IAAMC,EAAQD,EAAM,KAAKH,CAAK,EAC9B,GAAII,IAAU,KAGd,IAAIA,EAAM,CAAC,IAAM,OACf,OAAOA,EAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,cAAe,GAAG,EAEnD,GAAIA,EAAM,CAAC,IAAM,OACf,OAAOA,EAAM,CAAC,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,cAAe,GAAG,EAAE,QAAQ,eAAgB;AAAA,CAAI,EAGxH,CACF,EACIC,GAAuB,cAAcR,EAA8B,CAnavE,MAmauE,CAAAhE,EAAA,6BACrE,MAAO,CACLA,EAAO,KAAM,sBAAsB,CACrC,CACA,mBAAmByE,EAAOC,EAAQL,EAAU,CAE5C,CACF,EAIIM,GAA8B,cAAcC,EAAoB,CA9apE,MA8aoE,CAAA5E,EAAA,oCAClE,MAAO,CACLA,EAAO,KAAM,6BAA6B,CAC5C,CACA,YAAY6E,EAAU,CACpB,MAAM,EACN,KAAK,SAAW,IAAI,IAAIA,CAAQ,CAClC,CACA,mBAAmBC,EAAOC,EAAgBC,EAAS,CACjD,IAAMC,EAAa,MAAM,mBAAmBH,EAAOC,EAAgBC,CAAO,EAC1E,OAAAC,EAAW,QAASC,GAAc,CAC5B,KAAK,SAAS,IAAIA,EAAU,IAAI,GAAKA,EAAU,UAAY,SAC7DA,EAAU,QAAU,IAAI,OAAOA,EAAU,QAAQ,SAAS,EAAI,oBAAoB,EAEtF,CAAC,EACMD,CACT,CACF,EACIE,GAAqB,cAAcR,EAA4B,CAhcnE,MAgcmE,CAAA3E,EAAA,2BACjE,MAAO,CACLA,EAAO,KAAM,oBAAoB,CACnC,CACF",
  "names": ["require_ral", "__commonJSMin", "exports", "_ral", "RAL", "__name", "install", "ral", "require_is", "__commonJSMin", "exports", "boolean", "value", "__name", "string", "number", "error", "func", "array", "stringArray", "elem", "require_events", "__commonJSMin", "exports", "ral_1", "Event", "_disposable", "CallbackList", "__name", "callback", "context", "bucket", "foundCallbackWithDifferentContext", "len", "args", "ret", "callbacks", "contexts", "i", "e", "Emitter", "_Emitter", "_options", "listener", "thisArgs", "disposables", "result", "event", "require_cancellation", "__commonJSMin", "exports", "ral_1", "Is", "events_1", "CancellationToken", "is", "value", "candidate", "__name", "shortcutEvent", "callback", "context", "handle", "MutableToken", "CancellationTokenSource", "lib_exports", "__export", "AbstractAstReflection", "AbstractCstNode", "AbstractLangiumParser", "AbstractParserErrorMessageProvider", "AbstractThreadedAsyncParser", "ast_utils_exports", "BiMap", "cancellation_exports", "CompositeCstNodeImpl", "ContextCache", "CstNodeBuilder", "cst_utils_exports", "DONE_RESULT", "DatatypeSymbol", "DefaultAstNodeDescriptionProvider", "DefaultAstNodeLocator", "DefaultAsyncParser", "DefaultCommentProvider", "DefaultConfigurationProvider", "DefaultDocumentBuilder", "DefaultDocumentValidator", "DefaultHydrator", "DefaultIndexManager", "DefaultJsonSerializer", "DefaultLangiumDocumentFactory", "DefaultLangiumDocuments", "DefaultLexer", "DefaultLinker", "DefaultNameProvider", "DefaultReferenceDescriptionProvider", "DefaultReferences", "DefaultScopeComputation", "DefaultScopeProvider", "DefaultServiceRegistry", "DefaultTokenBuilder", "DefaultValueConverter", "DefaultWorkspaceLock", "DefaultWorkspaceManager", "Deferred", "Disposable", "DisposableCache", "DocumentCache", "DocumentState", "DocumentValidator", "EMPTY_SCOPE", "EMPTY_STREAM", "EmptyFileSystem", "EmptyFileSystemProvider", "ErrorWithLocation", "ast_exports", "grammar_utils_exports", "JSDocDocumentationProvider", "LangiumCompletionParser", "LangiumParser", "LangiumParserErrorMessageProvider", "LeafCstNodeImpl", "MapScope", "Module", "MultiMap", "OperationCancelled", "ParserWorker", "Reduction", "regexp_utils_exports", "RootCstNodeImpl", "SimpleCache", "StreamImpl", "StreamScope", "TextDocument", "TreeStreamImpl", "URI", "UriUtils", "ValidationCategory", "ValidationRegistry", "ValueConverter", "WorkspaceCache", "assertUnreachable", "createCompletionParser", "createDefaultCoreModule", "createDefaultSharedCoreModule", "createGrammarConfig", "createLangiumParser", "delayNextTick", "diagnosticData", "eagerLoad", "getDiagnosticRange", "inject", "interruptAndCheck", "isAstNode", "isAstNodeDescription", "isAstNodeWithComment", "isCompositeCstNode", "isIMultiModeLexerDefinition", "isJSDoc", "isLeafCstNode", "isLinkingError", "isNamed", "isOperationCancelled", "isReference", "isRootCstNode", "isTokenTypeArray", "isTokenTypeDictionary", "loadGrammarFromJson", "parseJSDoc", "prepareLangiumParser", "setInterruptionPeriod", "startCancelableOperation", "stream", "toDiagnosticSeverity", "cst_utils_exports", "__export", "DefaultNameRegexp", "RangeComparison", "compareRange", "findCommentNode", "findDeclarationNodeAtOffset", "findLeafNodeAtOffset", "findLeafNodeBeforeOffset", "flattenCst", "getInteriorNodes", "getNextNode", "getPreviousNode", "getStartlineNode", "inRange", "isChildNode", "isCommentNode", "streamCst", "toDocumentSegment", "tokenToRange", "isAstNode", "obj", "__name", "isReference", "isAstNodeDescription", "isLinkingError", "AbstractAstReflection", "node", "type", "subtype", "supertype", "nested", "existing", "result", "allTypes", "types", "possibleSubType", "isCompositeCstNode", "isLeafCstNode", "isRootCstNode", "StreamImpl", "_StreamImpl", "__name", "startFn", "nextFn", "iterator", "count", "next", "result", "keyFn", "valueFn", "entryStream", "element", "other", "state", "DONE_RESULT", "separator", "value", "addSeparator", "toString", "searchElement", "fromIndex", "index", "predicate", "callbackfn", "done", "initialValue", "previousValue", "mapped", "isIterable", "depth", "stream", "skipCount", "i", "maxSize", "by", "set", "e", "key", "otherKeySet", "item", "ownKey", "obj", "EMPTY_STREAM", "collections", "collection", "TreeStreamImpl", "root", "children", "options", "Reduction", "sum", "a", "b", "product", "min", "max", "streamCst", "node", "TreeStreamImpl", "element", "isCompositeCstNode", "__name", "flattenCst", "isLeafCstNode", "isChildNode", "child", "parent", "tokenToRange", "token", "toDocumentSegment", "offset", "end", "range", "RangeComparison", "compareRange", "to", "startInside", "endInside", "inRange", "DefaultNameRegexp", "findDeclarationNodeAtOffset", "cstNode", "nameRegexp", "localOffset", "textAtOffset", "findLeafNodeAtOffset", "findCommentNode", "commentNames", "previous", "getPreviousNode", "isCommentNode", "isRootCstNode", "endIndex", "e", "searchResult", "binarySearch", "findLeafNodeBeforeOffset", "closest", "left", "right", "closestNode", "middle", "middleNode", "hidden", "index", "getNextNode", "last", "next", "getStartlineNode", "line", "selfIndex", "getInteriorNodes", "start", "commonParent", "getCommonParent", "a", "b", "aParents", "getParentChain", "bParents", "current", "i", "aParent", "bParent", "chain", "grammar_utils_exports", "__export", "findAssignment", "findNameAssignment", "findNodeForKeyword", "findNodeForProperty", "findNodesForKeyword", "findNodesForKeywordInternal", "findNodesForProperty", "getActionAtElement", "getActionType", "getAllReachableRules", "getCrossReferenceTerminal", "getEntryRule", "getExplicitRuleType", "getHiddenRules", "getRuleType", "getTypeName", "isArrayCardinality", "isArrayOperator", "isCommentTerminal", "isDataType", "isDataTypeRule", "isOptionalCardinality", "terminalRegex", "ErrorWithLocation", "__name", "node", "message", "assertUnreachable", "_", "ast_exports", "__export", "AbstractElement", "AbstractRule", "AbstractType", "Action", "Alternatives", "ArrayLiteral", "ArrayType", "Assignment", "BooleanLiteral", "CharacterRange", "Condition", "Conjunction", "CrossReference", "Disjunction", "EndOfFile", "Grammar", "GrammarImport", "Group", "InferredType", "Interface", "Keyword", "LangiumGrammarAstReflection", "LangiumGrammarTerminals", "NamedArgument", "NegatedToken", "Negation", "NumberLiteral", "Parameter", "ParameterReference", "ParserRule", "ReferenceType", "RegexToken", "ReturnType", "RuleCall", "SimpleType", "StringLiteral", "TerminalAlternatives", "TerminalGroup", "TerminalRule", "TerminalRuleCall", "Type", "TypeAttribute", "TypeDefinition", "UnionType", "UnorderedGroup", "UntilToken", "ValueLiteral", "Wildcard", "isAbstractElement", "isAbstractRule", "isAbstractType", "isAction", "isAlternatives", "isArrayLiteral", "isArrayType", "isAssignment", "isBooleanLiteral", "isCharacterRange", "isCondition", "isConjunction", "isCrossReference", "isDisjunction", "isEndOfFile", "isFeatureName", "isGrammar", "isGrammarImport", "isGroup", "isInferredType", "isInterface", "isKeyword", "isNamedArgument", "isNegatedToken", "isNegation", "isNumberLiteral", "isParameter", "isParameterReference", "isParserRule", "isPrimitiveType", "isReferenceType", "isRegexToken", "isReturnType", "isRuleCall", "isSimpleType", "isStringLiteral", "isTerminalAlternatives", "isTerminalGroup", "isTerminalRule", "isTerminalRuleCall", "isType", "isTypeAttribute", "isTypeDefinition", "isUnionType", "isUnorderedGroup", "isUntilToken", "isValueLiteral", "isWildcard", "reflection", "LangiumGrammarTerminals", "AbstractRule", "isAbstractRule", "item", "reflection", "__name", "AbstractType", "isAbstractType", "Condition", "isCondition", "isFeatureName", "isPrimitiveType", "TypeDefinition", "isTypeDefinition", "ValueLiteral", "isValueLiteral", "AbstractElement", "isAbstractElement", "ArrayLiteral", "isArrayLiteral", "ArrayType", "isArrayType", "BooleanLiteral", "isBooleanLiteral", "Conjunction", "isConjunction", "Disjunction", "isDisjunction", "Grammar", "isGrammar", "GrammarImport", "isGrammarImport", "InferredType", "isInferredType", "Interface", "isInterface", "NamedArgument", "isNamedArgument", "Negation", "isNegation", "NumberLiteral", "isNumberLiteral", "Parameter", "isParameter", "ParameterReference", "isParameterReference", "ParserRule", "isParserRule", "ReferenceType", "isReferenceType", "ReturnType", "isReturnType", "SimpleType", "isSimpleType", "StringLiteral", "isStringLiteral", "TerminalRule", "isTerminalRule", "Type", "isType", "TypeAttribute", "isTypeAttribute", "UnionType", "isUnionType", "Action", "isAction", "Alternatives", "isAlternatives", "Assignment", "isAssignment", "CharacterRange", "isCharacterRange", "CrossReference", "isCrossReference", "EndOfFile", "isEndOfFile", "Group", "isGroup", "Keyword", "isKeyword", "NegatedToken", "isNegatedToken", "RegexToken", "isRegexToken", "RuleCall", "isRuleCall", "TerminalAlternatives", "isTerminalAlternatives", "TerminalGroup", "isTerminalGroup", "TerminalRuleCall", "isTerminalRuleCall", "UnorderedGroup", "isUnorderedGroup", "UntilToken", "isUntilToken", "Wildcard", "isWildcard", "LangiumGrammarAstReflection", "AbstractAstReflection", "subtype", "supertype", "refInfo", "referenceId", "type", "ast_utils_exports", "__export", "assignMandatoryProperties", "copyAstNode", "findLocalReferences", "findRootNode", "getContainerOfType", "getDocument", "hasContainerOfType", "linkContentToContainer", "streamAllContents", "streamAst", "streamContents", "streamReferences", "linkContentToContainer", "node", "name", "value", "item", "index", "isAstNode", "__name", "getContainerOfType", "typePredicate", "hasContainerOfType", "predicate", "getDocument", "result", "findRootNode", "streamContents", "options", "range", "StreamImpl", "state", "property", "isAstNodeInRange", "element", "DONE_RESULT", "streamAllContents", "root", "TreeStreamImpl", "streamAst", "astNode", "nodeRange", "_a", "inRange", "streamReferences", "isReference", "findLocalReferences", "targetNode", "lookup", "refs", "refInfo", "stream", "assignMandatoryProperties", "reflection", "typeMetaData", "genericNode", "copyDefaultValue", "propertyType", "copyAstNode", "buildReference", "copy", "copiedArray", "regexp_utils_exports", "__export", "NEWLINE_REGEXP", "escapeRegExp", "getCaseInsensitivePattern", "getTerminalParts", "isMultilineComment", "isWhitespace", "partialMatches", "partialRegExp", "cc", "char", "__name", "insertToSet", "item", "set", "subItem", "addFlag", "flagObj", "flagKey", "x", "ASSERT_EXISTS", "obj", "ASSERT_NEVER_REACH_HERE", "isCharacter", "digitsCharCodes", "i", "cc", "wordCharCodes", "whitespaceCodes", "hexDigitPattern", "decimalPattern", "decimalPatternNoZero", "RegExpParser", "__name", "newState", "input", "value", "flags", "addFlag", "alts", "begin", "terms", "type", "ASSERT_EXISTS", "disjunction", "ASSERT_NEVER_REACH_HERE", "isBacktracking", "range", "atLeast", "atMost", "atom", "cc", "set", "complement", "digitsCharCodes", "whitespaceCodes", "wordCharCodes", "escapeCode", "letter", "escapedChar", "nextChar", "from", "isFromSingleChar", "isCharacter", "to", "isToSingleChar", "insertToSet", "capturing", "groupAst", "number", "howMuch", "prevState", "howMany", "hexString", "hexChar", "char", "BaseRegExpVisitor", "__name", "node", "key", "child", "subChild", "NEWLINE_REGEXP", "regexpParser", "RegExpParser", "TerminalRegExpVisitor", "BaseRegExpVisitor", "__name", "regex", "node", "char", "escapedChar", "escapeRegExp", "set", "visitor", "getTerminalParts", "regexp", "pattern", "parts", "alternative", "isMultilineComment", "isWhitespace", "value", "getCaseInsensitivePattern", "keyword", "letter", "partialMatches", "input", "partial", "partialRegExp", "match", "re", "source", "i", "process", "result", "tmp", "appendRaw", "nbChars", "appendOptional", "getEntryRule", "grammar", "isParserRule", "__name", "getHiddenRules", "isTerminalRule", "getAllReachableRules", "allTerminals", "ruleNames", "entryRule", "topMostRules", "rule", "ruleDfs", "rules", "visitedSet", "streamAllContents", "node", "isRuleCall", "isTerminalRuleCall", "refRule", "getCrossReferenceTerminal", "crossRef", "nameAssigment", "findNameAssignment", "isCommentTerminal", "terminalRule", "terminalRegex", "findNodesForProperty", "property", "findNodesForPropertyInternal", "findNodeForProperty", "index", "nodes", "element", "first", "nodeFeature", "getContainerOfType", "isAssignment", "isCompositeCstNode", "e", "findNodesForKeyword", "keyword", "findNodesForKeywordInternal", "findNodeForKeyword", "isKeyword", "treeIterator", "streamCst", "result", "keywordNodes", "childNode", "findAssignment", "cstNode", "astNode", "_a", "assignment", "type", "startNode", "isInferredType", "isAction", "assertUnreachable", "findNameAssignmentInternal", "cache", "go", "refType", "childAssignment", "isSimpleType", "getActionAtElement", "parent", "isGroup", "elements", "item", "action", "isAbstractElement", "isOptionalCardinality", "cardinality", "isArrayCardinality", "isArrayOperator", "operator", "isDataTypeRule", "isDataTypeRuleInternal", "visited", "isDataType", "isDataTypeInternal", "isArrayType", "isReferenceType", "isUnionType", "ref", "isType", "getExplicitRuleType", "isInterface", "getTypeName", "isReturnType", "actionType", "getActionType", "getRuleType", "_b", "_c", "flags", "source", "abstractElementToRegex", "flagText", "value", "name", "WILDCARD", "isTerminalAlternatives", "terminalAlternativesToRegex", "isTerminalGroup", "terminalGroupToRegex", "isCharacterRange", "characterRangeToRegex", "withCardinality", "isNegatedToken", "negateTokenToRegex", "isUntilToken", "untilTokenToRegex", "isRegexToken", "lastSlash", "regexFlags", "isWildcard", "alternatives", "group", "until", "negate", "range", "keywordToRegex", "escapeRegExp", "regex", "options", "createGrammarConfig", "services", "rules", "grammar", "rule", "isTerminalRule", "isCommentTerminal", "isMultilineComment", "terminalRegex", "DefaultNameRegexp", "__name", "PRINT_ERROR", "msg", "__name", "PRINT_WARNING", "timer", "func", "start", "val", "__name", "toFastProperties", "toBecomeFast", "FakeConstructor", "__name", "fakeInstance", "fakeAccess", "tokenLabel", "tokType", "hasTokenLabel", "__name", "obj", "isString_default", "AbstractProduction", "value", "_definition", "visitor", "forEach_default", "prod", "NonTerminal", "options", "assign_default", "pickBy_default", "v", "definition", "Rule", "Alternative", "Option", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "Repetition", "RepetitionWithSeparator", "Alternation", "Terminal", "serializeGrammar", "topRules", "map_default", "serializeProduction", "node", "convertDefinition", "serializedNonTerminal", "serializedTerminal", "pattern", "isRegExp_default", "GAstVisitor", "__name", "node", "nodeAny", "NonTerminal", "Alternative", "Option", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Repetition", "Alternation", "Terminal", "Rule", "isSequenceProd", "prod", "Alternative", "Option", "Repetition", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Terminal", "Rule", "__name", "isOptionalProd", "alreadyVisited", "Alternation", "some_default", "subProd", "NonTerminal", "includes_default", "AbstractProduction", "every_default", "isBranchingProd", "getProductionDslName", "RestWalker", "__name", "prod", "prevRest", "forEach_default", "subProd", "index", "currRest", "drop_default", "NonTerminal", "Terminal", "Alternative", "Option", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Repetition", "Alternation", "terminal", "refProd", "flatProd", "fullOrRest", "optionProd", "atLeastOneProd", "fullAtLeastOneRest", "atLeastOneSepProd", "fullAtLeastOneSepRest", "restForRepetitionWithSeparator", "manyProd", "fullManyRest", "manySepProd", "fullManySepRest", "orProd", "alt", "prodWrapper", "repSepProd", "first", "prod", "NonTerminal", "Terminal", "firstForTerminal", "isSequenceProd", "firstForSequence", "isBranchingProd", "firstForBranching", "__name", "firstSet", "seq", "nextSubProdIdx", "hasInnerProdsRemaining", "currSubProd", "isLastInnerProdOptional", "isOptionalProd", "uniq_default", "allAlternativesFirsts", "map_default", "innerProd", "flatten_default", "terminal", "IN", "ResyncFollowsWalker", "RestWalker", "__name", "topProd", "terminal", "currRest", "prevRest", "refProd", "followName", "buildBetweenProdsFollowPrefix", "fullRest", "restProd", "Alternative", "t_in_topProd_follows", "first", "computeAllProdsFollows", "topProductions", "reSyncFollows", "forEach_default", "currRefsFollow", "assign_default", "inner", "occurenceInParent", "IN", "regExpAstCache", "regExpParser", "RegExpParser", "getRegExpAst", "regExp", "regExpStr", "regExpAst", "__name", "clearRegExpParserCache", "complementErrorMessage", "failedOptimizationPrefixMsg", "getOptimizedStartCodesIndices", "regExp", "ensureOptimizations", "ast", "getRegExpAst", "firstCharOptimizedIndices", "e", "PRINT_WARNING", "msgSuffix", "PRINT_ERROR", "__name", "result", "ignoreCase", "terms", "term", "atom", "addOptimizedIdxToResult", "forEach_default", "code", "range", "rangeCode", "minOptimizationVal", "minUnOptVal", "maxUnOptVal", "minOptIdx", "charCodeToOptimizedIndex", "maxOptIdx", "currOptIdx", "isOptionalQuantifier", "isWholeOptional", "values_default", "optimizedCharIdx", "handleIgnoreCase", "char", "upperChar", "lowerChar", "findCode", "setNode", "targetCharCodes", "find_default", "codeOrRange", "includes_default", "targetCode", "quantifier", "isArray_default", "every_default", "CharCodeFinder", "BaseRegExpVisitor", "node", "canMatchCharCode", "charCodes", "pattern", "charCodeFinder", "PATTERN", "DEFAULT_MODE", "MODES", "SUPPORT_STICKY", "analyzeTokenTypes", "tokenTypes", "options", "defaults_default", "SUPPORT_STICKY", "__name", "msg", "action", "tracer", "initCharCodeToOptimizedIndexMap", "onlyRelevantTypes", "reject_default", "currType", "PATTERN", "Lexer", "hasCustom", "allTransformedPatterns", "map_default", "currPattern", "isRegExp_default", "regExpSource", "includes_default", "addStickyFlag", "addStartOfInput", "isFunction_default", "escapedRegExpString", "wrappedRegExp", "patternIdxToType", "patternIdxToGroup", "patternIdxToLongerAltIdxArr", "patternIdxToPushMode", "patternIdxToPopMode", "clazz", "groupName", "isString_default", "isUndefined_default", "longerAltType", "isArray_default", "type", "indexOf_default", "has_default", "patternIdxToCanLineTerminator", "lineTerminatorCharCodes", "getCharCodes", "tokType", "checkLineBreaksIssues", "canMatchCharCode", "patternIdxToIsCustom", "patternIdxToShort", "emptyGroups", "patternIdxToConfig", "isCustomPattern", "isShortPattern", "reduce_default", "acc", "x", "idx", "canBeOptimized", "charCodeToPatternIdxToConfig", "result", "currTokType", "charCode", "optimizedIdx", "charCodeToOptimizedIndex", "addToMapOfArrays", "lastOptimizedIdx", "forEach_default", "charOrInt", "currOptimizedIdx", "PRINT_ERROR", "failedOptimizationPrefixMsg", "optimizedCodes", "getOptimizedStartCodesIndices", "isEmpty_default", "code", "validatePatterns", "validModesNames", "errors", "missingResult", "findMissingPatterns", "invalidResult", "findInvalidPatterns", "validTokenTypes", "validateRegExpPattern", "findInvalidGroupType", "findModesThatDoNotExist", "findUnreachablePatterns", "withRegExpPatterns", "filter_default", "findEndOfInputAnchor", "findStartOfInputAnchor", "findUnsupportedFlags", "findDuplicatePatterns", "findEmptyMatchRegExps", "tokenTypesWithMissingPattern", "LexerDefinitionErrorType", "valid", "difference_default", "tokenTypesWithInvalidPattern", "pattern", "end_of_input", "EndAnchorFinder", "BaseRegExpVisitor", "node", "invalidRegex", "regexpAst", "getRegExpAst", "endAnchorVisitor", "matchesEmptyString", "start_of_input", "StartAnchorFinder", "startAnchorVisitor", "invalidFlags", "found", "identicalPatterns", "outerType", "innerType", "compact_default", "duplicatePatterns", "currIdenticalSet", "setOfIdentical", "tokenTypeNames", "head_default", "invalidTypes", "group", "validModes", "invalidModes", "canBeTested", "noMetaChar", "testIdx", "str", "tokenType", "testTokenType", "regExpArray", "regExp", "find_default", "char", "flags", "performRuntimeChecks", "lexerDefinition", "trackLines", "lineTerminatorCharacters", "DEFAULT_MODE", "MODES", "currModeValue", "currModeName", "currIdx", "longerAlt", "currLongerAlt", "performWarningRuntimeChecks", "warnings", "hasAnyLineBreak", "allTokenTypes", "flatten_default", "values_default", "concreteTokenTypes", "terminatorCharCodes", "currIssue", "warningDescriptor", "buildLineBreakIssueMessage", "cloneEmptyGroups", "clonedResult", "groupKeys", "keys_default", "currKey", "currGroupValue", "LineTerminatorOptimizedTester", "text", "len", "i", "c", "e", "details", "charsOrCodes", "numOrString", "map", "key", "value", "minOptimizationVal", "charCodeToOptimizedIdxMap", "tokenStructuredMatcher", "tokInstance", "tokConstructor", "instanceType", "__name", "tokenStructuredMatcherNoCategories", "token", "tokType", "tokenShortNameIdx", "tokenIdxToClass", "augmentTokenTypes", "tokenTypes", "tokenTypesAndParents", "expandCategories", "assignTokenDefaultProps", "assignCategoriesMapProp", "assignCategoriesTokensProp", "forEach_default", "result", "clone_default", "categories", "searching", "compact_default", "flatten_default", "map_default", "currTokType", "newCategories", "difference_default", "isEmpty_default", "hasShortKeyProperty", "hasCategoriesProperty", "isArray_default", "hasExtendingTokensTypesProperty", "hasExtendingTokensTypesMapProperty", "val", "key", "singleAssignCategoriesToksMap", "path", "nextNode", "pathNode", "nextCategory", "newPath", "includes_default", "has_default", "isTokenType", "defaultLexerErrorProvider", "token", "fullText", "startOffset", "length", "line", "column", "LexerDefinitionErrorType", "DEFAULT_LEXER_CONFIG", "defaultLexerErrorProvider", "Lexer", "__name", "lexerDefinition", "config", "phaseDesc", "phaseImpl", "indent", "time", "value", "timer", "traceMethod", "assign_default", "traceInitVal", "actualDefinition", "hasOnlySingleMode", "LineTerminatorOptimizedTester", "isArray_default", "clone_default", "DEFAULT_MODE", "performRuntimeChecks", "performWarningRuntimeChecks", "forEach_default", "currModeValue", "currModeName", "reject_default", "currTokType", "isUndefined_default", "allModeNames", "keys_default", "currModDef", "currModName", "validatePatterns", "isEmpty_default", "augmentTokenTypes", "currAnalyzeResult", "analyzeTokenTypes", "allErrMessagesString", "map_default", "error", "warningDescriptor", "PRINT_WARNING", "SUPPORT_STICKY", "identity_default", "noop_default", "unOptimizedModes", "reduce_default", "cannotBeOptimized", "canBeOptimized", "modeName", "clearRegExpParserCache", "toFastProperties", "text", "initialMode", "i", "j", "k", "matchAltImage", "longerAlt", "matchedImage", "payload", "altPayload", "imageLength", "group", "tokType", "newToken", "errLength", "droppedChar", "msg", "match", "orgText", "orgLength", "offset", "matchedTokensIndex", "guessedNumberOfTokens", "matchedTokens", "errors", "line", "column", "groups", "cloneEmptyGroups", "trackLines", "lineTerminatorPattern", "currModePatternsLength", "patternIdxToConfig", "currCharCodeToPatternIdxToConfig", "modeStack", "emptyArray", "getPossiblePatterns", "getPossiblePatternsSlow", "getPossiblePatternsOptimized", "charCode", "optimizedCharIdx", "charCodeToOptimizedIndex", "possiblePatterns", "pop_mode", "popToken", "newMode", "last_default", "modeCanBeOptimized", "push_mode", "currConfig", "recoveryEnabled", "nextCharCode", "chosenPatternIdxToConfig", "chosenPatternsLength", "currPattern", "singleCharCode", "longerAltLength", "longerAltConfig", "longerAltPattern", "numOfLTsInMatch", "foundTerminator", "lastLTEndOffset", "errorStartOffset", "errorLine", "errorColumn", "foundResyncPoint", "pushMode", "length", "regExp", "newLastIndex", "lastLTIdx", "lastCharIsLT", "fixForEndingInLT", "oldColumn", "image", "startOffset", "tokenTypeIdx", "tokenType", "startLine", "startColumn", "tokenVector", "index", "tokenToAdd", "token", "pattern", "regExpArray", "tokenLabel", "tokType", "hasTokenLabel", "__name", "hasTokenLabel", "obj", "isString_default", "__name", "PARENT", "CATEGORIES", "LABEL", "GROUP", "PUSH_MODE", "POP_MODE", "LONGER_ALT", "LINE_BREAKS", "START_CHARS_HINT", "createToken", "config", "createTokenInternal", "pattern", "tokenType", "isUndefined_default", "has_default", "augmentTokenTypes", "EOF", "Lexer", "createTokenInstance", "tokType", "image", "startOffset", "endOffset", "startLine", "endLine", "startColumn", "endColumn", "tokenMatcher", "token", "tokenStructuredMatcher", "defaultParserErrorProvider", "expected", "actual", "previous", "ruleName", "hasTokenLabel", "tokenLabel", "firstRedundant", "expectedPathsPerAlt", "customUserDescription", "errPrefix", "errSuffix", "head_default", "allLookAheadPaths", "reduce_default", "result", "currAltPaths", "nextValidTokenSequences", "map_default", "currPath", "currTokenType", "calculatedDescription", "itemMsg", "idx", "expectedIterationPaths", "defaultGrammarResolverErrorProvider", "topLevelRule", "undefinedRule", "defaultGrammarValidatorErrorProvider", "duplicateProds", "getExtraProductionArgument", "prod", "Terminal", "NonTerminal", "__name", "topLevelName", "duplicateProd", "index", "dslName", "getProductionDslName", "extraArgument", "hasExplicitIndex", "msg", "rule", "options", "pathMsg", "currTok", "occurrence", "currtok", "currMessage", "pathNames", "currRule", "leftRecursivePath", "Rule", "resolveGrammar", "topLevels", "errMsgProvider", "refResolver", "GastRefResolverVisitor", "__name", "GAstVisitor", "nameToTopRule", "forEach_default", "values_default", "prod", "node", "ref", "msg", "ParserDefinitionErrorType", "AbstractNextPossibleTokensWalker", "RestWalker", "__name", "topProd", "path", "clone_default", "prod", "prevRest", "refProd", "currRest", "fullRest", "isEmpty_default", "NextAfterTokenWalker", "terminal", "restProd", "Alternative", "first", "AbstractNextTerminalAfterProductionWalker", "topRule", "occurrence", "NextTerminalAfterManyWalker", "manyProd", "firstAfterMany", "head_default", "Terminal", "NextTerminalAfterManySepWalker", "manySepProd", "firstAfterManySep", "NextTerminalAfterAtLeastOneWalker", "atLeastOneProd", "firstAfterAtLeastOne", "NextTerminalAfterAtLeastOneSepWalker", "atleastOneSepProd", "firstAfterfirstAfterAtLeastOneSep", "possiblePathsFrom", "targetDef", "maxLength", "currPath", "result", "remainingPathWith", "nextDef", "drop_default", "getAlternativesForProd", "definition", "alternatives", "NonTerminal", "Option", "RepetitionMandatory", "newDef", "Repetition", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Alternation", "forEach_default", "currAlt", "nextPossibleTokensAfter", "initialDef", "tokenVector", "tokMatcher", "maxLookAhead", "EXIT_NON_TERMINAL", "EXIT_NON_TERMINAL_ARR", "EXIT_ALTERNATIVE", "foundCompletePath", "tokenVectorLength", "minimalAlternativesIndex", "possiblePaths", "last_default", "currDef", "currIdx", "currRuleStack", "currOccurrenceStack", "nextPath", "dropRight_default", "nextIdx", "actualToken", "newRuleStack", "newOccurrenceStack", "nextPathWithout", "nextPathWith", "secondIteration", "separatorGast", "nthRepetition", "i", "currAltPath", "Rule", "expandTopLevelRule", "newCurrOccurrenceStack", "PROD_TYPE", "getProdType", "prod", "Option", "Repetition", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Alternation", "__name", "getLookaheadPaths", "options", "occurrence", "rule", "prodType", "maxLookahead", "type", "getLookaheadPathsForOr", "getLookaheadPathsForOptionalProd", "buildLookaheadFuncForOr", "ruleGrammar", "hasPredicates", "dynamicTokensEnabled", "laFuncBuilder", "lookAheadPaths", "tokenMatcher", "areTokenCategoriesNotUsed", "tokenStructuredMatcherNoCategories", "tokenStructuredMatcher", "buildLookaheadFuncForOptionalProd", "k", "lookaheadBuilder", "buildAlternativesLookAheadFunc", "alts", "numOfAlts", "areAllOneTokenLookahead", "every_default", "currAlt", "currPath", "orAlts", "predicates", "map_default", "t", "currNumOfPaths", "currPredicate", "nextPath", "j", "currPathLength", "i", "nextToken", "singleTokenAlts", "flatten_default", "choiceToAlt", "reduce_default", "result", "idx", "forEach_default", "currTokType", "has_default", "currExtendingType", "buildSingleAlternativeLookaheadFunction", "alt", "numOfPaths", "singleTokensTypes", "isEmpty_default", "expectedTokenUniqueKey", "RestDefinitionFinderWalker", "RestWalker", "topProd", "targetOccurrence", "targetProdType", "node", "expectedProdType", "currRest", "prevRest", "optionProd", "atLeastOneProd", "atLeastOneSepProd", "manyProd", "manySepProd", "InsideDefinitionFinderVisitor", "GAstVisitor", "targetRef", "expectedProdName", "initializeArrayOfArrays", "size", "pathToHashKeys", "path", "keys", "tokType", "longerKeys", "currShorterKey", "categoriesKeySuffix", "isUniquePrefixHash", "altKnownPathsKeys", "searchPathKeys", "currAltIdx", "otherAltKnownPathsKeys", "searchIdx", "searchKey", "lookAheadSequenceFromAlternatives", "altsDefs", "partialAlts", "possiblePathsFrom", "finalResult", "altsHashes", "currAltPaths", "dict", "item", "currKey", "newData", "pathLength", "currDataset", "altIdx", "currAltPathsAndSuffixes", "currPathIdx", "currPathPrefix", "suffixDef", "prefixKeys", "currAltResult", "containsPath", "newPartialPathsAndSuffixes", "key", "orProd", "visitor", "insideDefVisitor", "insideDef", "afterDef", "insideFlat", "Alternative", "afterFlat", "alternative", "searchPath", "compareOtherPath", "otherPath", "searchTok", "otherTok", "isStrictPrefixOfPath", "prefix", "other", "otherTokType", "singleAltPaths", "singlePath", "token", "validateLookahead", "options", "lookaheadValidationErrorMessages", "map_default", "errorMessage", "ParserDefinitionErrorType", "__name", "validateGrammar", "topLevels", "tokenTypes", "errMsgProvider", "grammarName", "duplicateErrors", "flatMap_default", "currTopLevel", "validateDuplicateProductions", "termsNamespaceConflictErrors", "checkTerminalAndNoneTerminalsNameSpace", "tooManyAltsErrors", "curRule", "validateTooManyAlts", "duplicateRulesError", "validateRuleDoesNotAlreadyExist", "topLevelRule", "collectorVisitor", "OccurrenceValidationCollector", "allRuleProductions", "productionGroups", "groupBy_default", "identifyProductionForDuplicates", "duplicates", "pickBy_default", "currGroup", "values_default", "currDuplicates", "firstProd", "head_default", "msg", "dslName", "getProductionDslName", "defError", "param", "getExtraProductionArgument", "prod", "Terminal", "NonTerminal", "GAstVisitor", "subrule", "option", "manySep", "atLeastOne", "atLeastOneSep", "many", "or", "terminal", "rule", "allRules", "className", "errors", "reduce_default", "result", "errMsg", "validateRuleIsOverridden", "ruleName", "definedRulesNames", "includes_default", "validateNoLeftRecursion", "topRule", "currRule", "path", "nextNonTerminals", "getFirstNoneTerminal", "isEmpty_default", "validNextSteps", "difference_default", "errorsFromNextSteps", "currRefRule", "newPath", "clone_default", "definition", "Alternative", "Option", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Repetition", "Alternation", "flatten_default", "currSubDef", "isFirstOptional", "isOptionalProd", "hasMore", "rest", "drop_default", "OrCollector", "node", "validateEmptyOrAlternative", "orCollector", "ors", "currOr", "exceptLast", "dropRight_default", "currAlternative", "currAltIdx", "possibleFirstInAlt", "nextPossibleTokensAfter", "tokenStructuredMatcher", "validateAmbiguousAlternationAlternatives", "globalMaxLookahead", "reject_default", "currOccurrence", "actualMaxLookahead", "alternatives", "getLookaheadPathsForOr", "altsAmbiguityErrors", "checkAlternativesAmbiguities", "altsPrefixAmbiguityErrors", "checkPrefixAlternativesAmbiguities", "RepetitionCollector", "validateSomeNonEmptyLookaheadPath", "topLevelRules", "maxLookahead", "forEach_default", "currTopRule", "currProd", "prodType", "getProdType", "pathsInsideProduction", "getLookaheadPathsForOptionalProd", "alternation", "foundAmbiguousPaths", "identicalAmbiguities", "currAlt", "currPath", "altsCurrPathAppearsIn", "currOtherAlt", "currOtherAltIdx", "containsPath", "currAmbDescriptor", "ambgIndices", "pathsAndIndices", "idx", "currPathsAndIdx", "compact_default", "currPathAndIdx", "targetIdx", "targetPath", "prefixAmbiguitiesPathsAndIndices", "filter_default", "searchPathAndIdx", "isStrictPrefixOfPath", "currAmbPathAndIdx", "occurrence", "tokenNames", "currToken", "currRuleName", "resolveGrammar", "options", "actualOptions", "defaults_default", "defaultGrammarResolverErrorProvider", "topRulesTable", "forEach_default", "rule", "__name", "validateGrammar", "defaultGrammarValidatorErrorProvider", "MISMATCHED_TOKEN_EXCEPTION", "NO_VIABLE_ALT_EXCEPTION", "EARLY_EXIT_EXCEPTION", "NOT_ALL_INPUT_PARSED_EXCEPTION", "RECOGNITION_EXCEPTION_NAMES", "isRecognitionException", "error", "includes_default", "__name", "RecognitionException", "message", "token", "MismatchedTokenException", "previousToken", "NoViableAltException", "NotAllInputParsedException", "EarlyExitException", "EOF_FOLLOW_KEY", "IN_RULE_RECOVERY_EXCEPTION", "InRuleRecoveryException", "__name", "message", "Recoverable", "config", "has_default", "DEFAULT_PARSER_CONFIG", "attemptInRepetitionRecovery", "tokType", "tokToInsert", "createTokenInstance", "grammarRule", "grammarRuleArgs", "lookAheadFunc", "expectedTokType", "reSyncTokType", "savedLexerState", "resyncedTokens", "passedResyncPoint", "nextTokenWithoutResync", "currToken", "generateErrorMessage", "previousToken", "msg", "error", "MismatchedTokenException", "dropRight_default", "expectTokAfterLastMatch", "nextTokIdx", "notStuck", "tokIdxInRule", "grammarPath", "follows", "nextTok", "expectedToken", "isEmpty_default", "mismatchedTok", "find_default", "possibleFollowsTokType", "tokenTypeIdx", "followKey", "currentRuleReSyncSet", "includes_default", "allPossibleReSyncTokTypes", "nextToken", "k", "foundMatch", "resyncTokType", "tokenMatcher", "currRuleShortName", "currRuleIdx", "prevRuleShortName", "explicitRuleStack", "explicitOccurrenceStack", "map_default", "ruleName", "idx", "followStack", "currKey", "flatten_default", "EOF", "followName", "IN", "token", "resyncTokens", "prodFunc", "args", "lookaheadFunc", "dslMethodIdx", "prodOccurrence", "nextToksWalker", "pathRuleStack", "pathOccurrenceStack", "clone_default", "currShortName", "key", "firstAfterRepInfo", "currRuleName", "ruleGrammar", "isEndOfRule", "getKeyForAutomaticLookahead", "ruleIdx", "dslMethodIdx", "occurrence", "__name", "LLkLookaheadStrategy", "__name", "options", "_a", "DEFAULT_PARSER_CONFIG", "leftRecursionErrors", "isEmpty_default", "emptyAltErrors", "ambiguousAltsErrors", "emptyRepetitionErrors", "rules", "flatMap_default", "currTopRule", "validateNoLeftRecursion", "defaultGrammarValidatorErrorProvider", "validateEmptyOrAlternative", "maxLookahead", "validateAmbiguousAlternationAlternatives", "validateSomeNonEmptyLookaheadPath", "buildLookaheadFuncForOr", "buildAlternativesLookAheadFunc", "buildLookaheadFuncForOptionalProd", "getProdType", "buildSingleAlternativeLookaheadFunction", "LooksAhead", "__name", "config", "has_default", "DEFAULT_PARSER_CONFIG", "LLkLookaheadStrategy", "rules", "forEach_default", "currRule", "alternation", "repetition", "option", "repetitionMandatory", "repetitionMandatoryWithSeparator", "repetitionWithSeparator", "collectMethods", "currProd", "prodIdx", "getProductionDslName", "laFunc", "key", "getKeyForAutomaticLookahead", "rule", "prodOccurrence", "prodKey", "prodType", "prodMaxLookahead", "dslMethodName", "dslMethodIdx", "occurrence", "currRuleShortName", "value", "DslMethodsCollectorVisitor", "GAstVisitor", "manySep", "atLeastOne", "atLeastOneSep", "many", "or", "collectorVisitor", "dslMethods", "setNodeLocationOnlyOffset", "currNodeLocation", "newLocationInfo", "__name", "setNodeLocationFull", "addTerminalToCst", "node", "token", "tokenTypeName", "addNoneTerminalToCst", "ruleName", "ruleResult", "NAME", "defineNameProp", "obj", "nameValue", "__name", "defaultVisit", "ctx", "param", "childrenNames", "keys_default", "childrenNamesLength", "currChildName", "currChildArray", "currChildArrayLength", "j", "currChild", "__name", "createBaseSemanticVisitorConstructor", "grammarName", "ruleNames", "derivedConstructor", "defineNameProp", "semanticProto", "cstNode", "isArray_default", "isUndefined_default", "semanticDefinitionErrors", "validateVisitor", "isEmpty_default", "errorMessages", "map_default", "currDefError", "createBaseVisitorConstructorWithDefaults", "baseConstructor", "withDefaultsProto", "forEach_default", "ruleName", "CstVisitorDefinitionError", "visitorInstance", "validateMissingCstMethods", "missingRuleNames", "filter_default", "currRuleName", "isFunction_default", "errors", "compact_default", "TreeBuilder", "__name", "config", "has_default", "DEFAULT_PARSER_CONFIG", "noop_default", "setNodeLocationFull", "setNodeLocationOnlyOffset", "cstNode", "nextToken", "fullRuleName", "ruleCstNode", "prevToken", "loc", "key", "consumedToken", "rootCst", "addTerminalToCst", "ruleCstResult", "ruleName", "preCstNode", "addNoneTerminalToCst", "isUndefined_default", "newBaseCstVisitorConstructor", "createBaseSemanticVisitorConstructor", "keys_default", "newConstructor", "createBaseVisitorConstructorWithDefaults", "ruleStack", "occurrenceStack", "LexerAdapter", "__name", "newInput", "END_OF_FILE", "howMuch", "soughtIdx", "newState", "RecognizerApi", "__name", "impl", "idx", "tokType", "options", "ruleToCall", "actionORMethodDef", "altsOrOpts", "name", "implementation", "config", "DEFAULT_RULE_CONFIG", "includes_default", "error", "defaultGrammarValidatorErrorProvider", "ParserDefinitionErrorType", "ruleImplementation", "ruleErrors", "validateRuleIsOverridden", "grammarRule", "args", "orgState", "e", "isRecognitionException", "serializeGrammar", "values_default", "RecognizerEngine", "__name", "tokenVocabulary", "config", "tokenStructuredMatcherNoCategories", "has_default", "isArray_default", "isEmpty_default", "reduce_default", "acc", "tokType", "every_default", "flatten_default", "values_default", "isTokenType", "allTokenTypes", "uniqueTokens", "uniq_default", "isObject_default", "clone_default", "EOF", "noTokenCategoriesUsed", "tokenConstructor", "tokenStructuredMatcher", "augmentTokenTypes", "ruleName", "impl", "resyncEnabled", "DEFAULT_RULE_CONFIG", "recoveryValueFunc", "shortName", "invokeRuleWithTry", "args", "cst", "e", "resyncEnabledConfig", "isFirstInvokedRule", "reSyncEnabled", "isRecognitionException", "recogError", "reSyncTokType", "partialCstResult", "actionORMethodDef", "occurrence", "key", "lookAheadFunc", "action", "predicate", "orgLookaheadFunction", "prodOccurrence", "laKey", "notStuck", "PROD_TYPE", "NextTerminalAfterAtLeastOneWalker", "options", "separator", "separatorLookAheadFunc", "NextTerminalAfterAtLeastOneSepWalker", "lookaheadFunction", "NextTerminalAfterManyWalker", "NextTerminalAfterManySepWalker", "nextTerminalAfterWalker", "beforeIteration", "altsOrOpts", "alts", "altIdxToTake", "firstRedundantTok", "errMsg", "NotAllInputParsedException", "ruleToCall", "idx", "ruleResult", "consumedToken", "nextToken", "eFromConsumption", "msg", "previousToken", "MismatchedTokenException", "follows", "eFromInRuleRecovery", "IN_RULE_RECOVERY_EXCEPTION", "savedErrors", "savedRuleStack", "newState", "fullName", "idxInCallingRule", "ErrorHandler", "__name", "config", "has_default", "DEFAULT_PARSER_CONFIG", "error", "isRecognitionException", "clone_default", "newErrors", "occurrence", "prodType", "userDefinedErrMsg", "ruleName", "ruleGrammar", "insideProdPaths", "getLookaheadPathsForOptionalProd", "actualTokens", "i", "msg", "EarlyExitException", "errMsgTypes", "lookAheadPathsPerAlternative", "getLookaheadPathsForOr", "previousToken", "errMsg", "NoViableAltException", "ContentAssist", "__name", "startRuleName", "precedingInput", "startRuleGast", "isUndefined_default", "nextPossibleTokensAfter", "grammarPath", "topRuleName", "head_default", "topProduction", "NextAfterTokenWalker", "RECORDING_NULL_OBJECT", "HANDLE_SEPARATOR", "MAX_METHOD_IDX", "RFT", "createToken", "Lexer", "augmentTokenTypes", "RECORDING_PHASE_TOKEN", "createTokenInstance", "RECORDING_PHASE_CSTNODE", "GastRecorder", "__name", "config", "i", "idx", "arg1", "arg2", "that", "impl", "grammarRule", "args", "howMuch", "END_OF_FILE", "name", "def", "newTopLevelRule", "Rule", "originalError", "actionORMethodDef", "occurrence", "recordProd", "Option", "RepetitionMandatory", "options", "RepetitionMandatoryWithSeparator", "Repetition", "RepetitionWithSeparator", "altsOrOpts", "recordOrProd", "ruleToCall", "assertMethodIdxIsValid", "has_default", "error", "getIdxSuffix", "prevProd", "last_default", "ruleName", "newNoneTerminal", "NonTerminal", "tokType", "hasShortKeyProperty", "Terminal", "prodConstructor", "mainProdArg", "handleSep", "grammarAction", "isFunction_default", "newProd", "hasOptions", "isArray_default", "alts", "newOrProd", "Alternation", "hasPredicates", "some_default", "currAlt", "forEach_default", "currAltFlat", "Alternative", "PerformanceTracer", "__name", "config", "has_default", "userTraceInitPerf", "traceIsNumber", "DEFAULT_PARSER_CONFIG", "phaseDesc", "phaseImpl", "indent", "time", "value", "timer", "traceMethod", "applyMixins", "derivedCtor", "baseCtors", "baseCtor", "baseProto", "propName", "basePropDescriptor", "__name", "END_OF_FILE", "createTokenInstance", "EOF", "DEFAULT_PARSER_CONFIG", "defaultParserErrorProvider", "DEFAULT_RULE_CONFIG", "__name", "ParserDefinitionErrorType", "EMPTY_ALT", "value", "Parser", "_Parser", "parserInstance", "defErrorsMsgs", "className", "toFastProperties", "forEach_default", "currRuleName", "originalGrammarAction", "recordedRuleGast", "resolverErrors", "resolveGrammar", "values_default", "isEmpty_default", "validationErrors", "validateGrammar", "defaultGrammarValidatorErrorProvider", "lookaheadValidationErrors", "validateLookahead", "allFollows", "computeAllProdsFollows", "_b", "_a", "map_default", "defError", "tokenVocabulary", "config", "that", "has_default", "applyMixins", "Recoverable", "LooksAhead", "TreeBuilder", "LexerAdapter", "RecognizerEngine", "RecognizerApi", "ErrorHandler", "ContentAssist", "GastRecorder", "PerformanceTracer", "EmbeddedActionsParser", "Parser", "__name", "tokenVocabulary", "config", "DEFAULT_PARSER_CONFIG", "configClone", "clone_default", "buildATNKey", "rule", "type", "occurrence", "__name", "ATN_BASIC", "ATN_RULE_START", "ATN_PLUS_BLOCK_START", "ATN_STAR_BLOCK_START", "ATN_RULE_STOP", "ATN_BLOCK_END", "ATN_STAR_LOOP_BACK", "ATN_STAR_LOOP_ENTRY", "ATN_PLUS_LOOP_BACK", "ATN_LOOP_END", "AbstractTransition", "__name", "target", "AtomTransition", "tokenType", "EpsilonTransition", "RuleTransition", "ruleStart", "rule", "followState", "createATN", "rules", "atn", "createRuleStartAndStopATNStates", "ruleLength", "i", "ruleBlock", "block", "buildRuleHandle", "start", "newState", "ATN_RULE_START", "stop", "atom", "production", "Terminal", "tokenRef", "NonTerminal", "ruleRef", "Alternation", "alternation", "Option", "option", "Repetition", "repetition", "RepetitionWithSeparator", "repetitionSep", "RepetitionMandatory", "repetitionMandatory", "RepetitionMandatoryWithSeparator", "repetitionMandatorySep", "starState", "ATN_STAR_BLOCK_START", "defineDecisionState", "handle", "makeAlts", "star", "sep", "plusState", "ATN_PLUS_BLOCK_START", "plus", "ATN_BASIC", "alts", "map_default", "e", "optional", "handles", "filter_default", "makeBlock", "blkStart", "blkEnd", "loop", "end", "buildATNKey", "epsilon", "entry", "loopEnd", "state", "alt", "getProdType", "altsLength", "transition", "isRuleTransition", "ruleTransition", "next", "removeState", "first", "last", "left", "right", "addTransition", "currentRule", "nonTerminal", "call", "a", "b", "partial", "t", "DFA_ERROR", "ATNConfigSet", "__name", "config", "key", "getATNConfigKey", "map_default", "value", "k", "alt", "e", "createDFACache", "startState", "decision", "map", "predicateSet", "key", "existing", "__name", "PredicateSet", "index", "value", "size", "i", "EMPTY_PREDICATES", "LLStarLookaheadStrategy", "LLkLookaheadStrategy", "options", "_a", "message", "createATN", "initATNSimulator", "prodOccurrence", "rule", "hasPredicates", "dynamicTokensEnabled", "dfas", "logging", "buildATNKey", "decisionIndex", "partialAlts", "map_default", "getLookaheadPaths", "currAlt", "path", "isLL1Sequence", "choiceToAlt", "reduce_default", "result", "idx", "forEach_default", "currTokType", "currExtendingType", "orAlts", "nextToken", "prediction", "gate", "predicates", "length", "adaptivePredict", "prodType", "alts", "e", "g", "alt", "singleTokensTypes", "flatten_default", "isEmpty_default", "expectedTokenUniqueKey", "sequences", "allowEmpty", "fullSet", "altSet", "tokType", "indices", "atn", "decisionLength", "decisionToDFA", "dfaCaches", "dfa", "start", "closure", "computeStartState", "addDFAState", "newDFAState", "performLookahead", "s0", "previousD", "t", "d", "getExistingTargetState", "computeLookaheadTarget", "DFA_ERROR", "buildAdaptivePredictError", "token", "lookahead", "reach", "computeReachSet", "addDFAEdge", "newState", "predictedAlt", "getUniqueAlt", "hasConflictTerminatingPrediction", "min_default", "reportLookaheadAmbiguity", "ambiguityIndices", "prefixPath", "atnState", "topLevelRule", "production", "buildAmbiguityError", "pathMsg", "currtok", "tokenLabel", "occurrence", "currMessage", "getProductionDslName", "prod", "NonTerminal", "Option", "Alternation", "RepetitionMandatory", "RepetitionMandatoryWithSeparator", "RepetitionWithSeparator", "Repetition", "Terminal", "previous", "current", "nextTransitions", "flatMap_default", "nextTokenTypes", "uniqBy_default", "AtomTransition", "state", "configs", "intermediate", "ATNConfigSet", "skippedStopStates", "c", "ATN_RULE_STOP", "transitionLength", "transition", "target", "getReachableTarget", "hasConfigInRuleStopState", "tokenMatcher", "from", "to", "mapKey", "numberOfTransitions", "config", "p", "atnStack", "followConfig", "getEpsilonTarget", "EpsilonTransition", "RuleTransition", "stack", "allConfigsInRuleStopStates", "altSets", "getConflictingAltSets", "hasConflictingAltSet", "hasStateAssociatedWithOneAlt", "configToAlts", "getATNConfigKey", "DocumentUri", "is", "value", "__name", "URI", "integer", "uinteger", "Position", "create", "line", "character", "candidate", "Is", "Range", "one", "two", "three", "four", "Location", "uri", "range", "LocationLink", "targetUri", "targetRange", "targetSelectionRange", "originSelectionRange", "Color", "red", "green", "blue", "alpha", "ColorInformation", "color", "ColorPresentation", "label", "textEdit", "additionalTextEdits", "TextEdit", "FoldingRangeKind", "FoldingRange", "startLine", "endLine", "startCharacter", "endCharacter", "kind", "collapsedText", "result", "DiagnosticRelatedInformation", "location", "message", "DiagnosticSeverity", "DiagnosticTag", "CodeDescription", "Diagnostic", "severity", "code", "source", "relatedInformation", "_a", "Command", "title", "command", "args", "replace", "newText", "insert", "position", "del", "ChangeAnnotation", "needsConfirmation", "description", "ChangeAnnotationIdentifier", "AnnotatedTextEdit", "annotation", "TextDocumentEdit", "textDocument", "edits", "OptionalVersionedTextDocumentIdentifier", "CreateFile", "options", "RenameFile", "oldUri", "newUri", "DeleteFile", "WorkspaceEdit", "change", "TextDocumentIdentifier", "create", "uri", "__name", "is", "value", "candidate", "Is", "VersionedTextDocumentIdentifier", "version", "OptionalVersionedTextDocumentIdentifier", "TextDocumentItem", "languageId", "text", "MarkupKind", "MarkupContent", "CompletionItemKind", "InsertTextFormat", "CompletionItemTag", "InsertReplaceEdit", "newText", "insert", "replace", "Range", "InsertTextMode", "CompletionItemLabelDetails", "CompletionItem", "label", "CompletionList", "items", "isIncomplete", "MarkedString", "fromPlainText", "plainText", "Hover", "ParameterInformation", "documentation", "SignatureInformation", "parameters", "result", "DocumentHighlightKind", "DocumentHighlight", "range", "kind", "SymbolKind", "SymbolTag", "SymbolInformation", "name", "containerName", "WorkspaceSymbol", "DocumentSymbol", "detail", "selectionRange", "children", "CodeActionKind", "CodeActionTriggerKind", "CodeActionContext", "diagnostics", "only", "triggerKind", "Diagnostic", "CodeAction", "title", "kindOrCommandOrEdit", "checkKind", "Command", "WorkspaceEdit", "CodeLens", "data", "FormattingOptions", "tabSize", "insertSpaces", "DocumentLink", "target", "SelectionRange", "parent", "SemanticTokenTypes", "SemanticTokenModifiers", "SemanticTokens", "InlineValueText", "InlineValueVariableLookup", "variableName", "caseSensitiveLookup", "InlineValueEvaluatableExpression", "expression", "InlineValueContext", "frameId", "stoppedLocation", "InlayHintKind", "InlayHintLabelPart", "Location", "InlayHint", "position", "Position", "TextEdit", "StringValue", "createSnippet", "InlineCompletionItem", "insertText", "filterText", "command", "InlineCompletionList", "InlineCompletionTriggerKind", "SelectedCompletionInfo", "InlineCompletionContext", "selectedCompletionInfo", "WorkspaceFolder", "URI", "TextDocument", "create", "uri", "languageId", "version", "content", "FullTextDocument", "__name", "is", "value", "candidate", "Is", "applyEdits", "document", "edits", "text", "sortedEdits", "mergeSort", "a", "b", "diff", "lastModifiedOffset", "i", "e", "startOffset", "endOffset", "data", "compare", "p", "left", "right", "leftIdx", "rightIdx", "range", "start", "end", "event", "lineOffsets", "isLineStart", "ch", "offset", "low", "high", "Position", "mid", "line", "position", "lineOffset", "nextLineOffset", "toString", "defined", "undefined", "boolean", "string", "number", "numberRange", "min", "max", "integer", "uinteger", "func", "objectLiteral", "typedArray", "check", "CstNodeBuilder", "__name", "input", "RootCstNodeImpl", "feature", "compositeNode", "CompositeCstNodeImpl", "token", "leafNode", "LeafCstNodeImpl", "tokenToRange", "node", "parent", "index", "item", "current", "hiddenTokens", "hiddenNode", "tokenStart", "tokenEnd", "i", "child", "childStart", "childEnd", "isCompositeCstNode", "AbstractCstNode", "_a", "_b", "value", "offset", "length", "range", "tokenType", "hidden", "CstNodeContainer", "firstNode", "lastNode", "firstRange", "lastRange", "Position", "_CstNodeContainer", "items", "start", "count", "DatatypeSymbol", "isDataTypeNode", "node", "__name", "ruleSuffix", "withRuleSuffix", "name", "AbstractLangiumParser", "services", "tokens", "ChevrotainWrapper", "idx", "choices", "callback", "LangiumParser", "CstNodeBuilder", "rule", "impl", "type", "isDataTypeRule", "getTypeName", "ruleMethod", "input", "lexerResult", "result", "$type", "implementation", "args", "tokenType", "feature", "token", "leafNode", "assignment", "isCrossRef", "current", "convertedValue", "isKeyword", "text", "cstNode", "subruleResult", "resultKind", "object", "newItem", "action", "last", "pop", "obj", "linkContentToContainer", "assignMandatoryProperties", "getContainerOfType", "isAssignment", "isCrossReference", "operator", "value", "item", "target", "source", "existingValue", "newValue", "AbstractParserErrorMessageProvider", "options", "defaultParserErrorProvider", "LangiumParserErrorMessageProvider", "expected", "actual", "firstRedundant", "LangiumCompletionParser", "size", "element", "index", "defaultConfig", "EmbeddedActionsParser", "config", "useDefaultLookahead", "LLkLookaheadStrategy", "LLStarLookaheadStrategy", "createParser", "grammar", "parser", "tokens", "buildRules", "__name", "parserContext", "reachable", "getAllReachableRules", "parserRules", "stream", "isParserRule", "rule", "ctx", "buildElement", "element", "ignoreGuard", "method", "isKeyword", "buildKeyword", "isAction", "buildAction", "isAssignment", "isCrossReference", "buildCrossReference", "isRuleCall", "buildRuleCall", "isAlternatives", "buildAlternatives", "isUnorderedGroup", "buildUnorderedGroup", "isGroup", "buildGroup", "isEndOfFile", "idx", "EOF", "ErrorWithLocation", "wrap", "getGuardCondition", "action", "actionType", "getTypeName", "ruleCall", "predicate", "buildRuleCallPredicate", "args", "getRule", "isTerminalRule", "getToken", "assertUnreachable", "namedArgs", "predicates", "e", "buildPredicate", "ruleArgs", "i", "ruleTarget", "condition", "isDisjunction", "left", "right", "isConjunction", "isNegation", "value", "isParameterReference", "name", "isBooleanLiteral", "alternatives", "methods", "predicatedMethod", "guard", "alt", "gate", "group", "orIdx", "idFunc", "groupIdx", "lParser", "stackId", "key", "groupState", "trackedAlternatives", "wrapped", "crossRef", "terminal", "terminalRule", "keyword", "assignment", "findNameAssignment", "assignTerminal", "token", "cardinality", "EMPTY_ALT", "getRuleName", "item", "parent", "ruleName", "createCompletionParser", "services", "grammar", "lexer", "parser", "LangiumCompletionParser", "createParser", "__name", "createLangiumParser", "services", "parser", "prepareLangiumParser", "__name", "grammar", "lexer", "LangiumParser", "createParser", "DefaultTokenBuilder", "__name", "grammar", "options", "reachableRules", "stream", "getAllReachableRules", "terminalTokens", "tokens", "terminalToken", "pattern", "isWhitespace", "rules", "isTerminalRule", "e", "terminal", "regex", "terminalRegex", "tokenType", "Lexer", "stickyRegex", "text", "offset", "isParserRule", "rule", "streamAllContents", "isKeyword", "a", "b", "keyword", "caseInsensitive", "getCaseInsensitivePattern", "longerAlts", "token", "partialMatches", "DefaultValueConverter", "__name", "input", "cstNode", "feature", "isCrossReference", "getCrossReferenceTerminal", "isRuleCall", "rule", "ValueConverter", "_a", "getRuleType", "convertString", "result", "i", "c", "c1", "convertEscapeCharacter", "char", "convertID", "convertInt", "convertBigint", "convertDate", "convertNumber", "convertBoolean", "cancellation_exports", "__reExport", "delayNextTick", "resolve", "__name", "lastTick", "globalInterruptionPeriod", "startCancelableOperation", "setInterruptionPeriod", "period", "OperationCancelled", "isOperationCancelled", "err", "interruptAndCheck", "token", "current", "Deferred", "reject", "arg", "FullTextDocument", "_FullTextDocument", "__name", "uri", "languageId", "version", "content", "range", "start", "end", "changes", "change", "getWellformedRange", "startOffset", "endOffset", "startLine", "endLine", "lineOffsets", "addedLineOffsets", "computeLineOffsets", "i", "len", "diff", "offset", "low", "high", "mid", "line", "position", "lineOffset", "nextLineOffset", "isEOL", "event", "candidate", "TextDocument", "create", "update", "document", "applyEdits", "edits", "text", "sortedEdits", "mergeSort", "getWellformedEdit", "a", "b", "lastModifiedOffset", "spans", "e", "data", "compare", "p", "left", "right", "leftIdx", "rightIdx", "isAtLineStart", "textOffset", "result", "ch", "char", "textEdit", "assertPath", "path", "TypeError", "JSON", "stringify", "normalizeStringPosix", "allowAboveRoot", "code", "res", "lastSegmentLength", "lastSlash", "dots", "i", "length", "charCodeAt", "lastSlashIndex", "lastIndexOf", "slice", "posix", "resolve", "__name", "cwd", "resolvedPath", "resolvedAbsolute", "arguments", "process", "normalize", "isAbsolute", "trailingSeparator", "join", "joined", "arg", "relative", "from", "to", "fromStart", "fromEnd", "fromLen", "toStart", "toLen", "lastCommonSep", "fromCode", "out", "_makeLong", "dirname", "hasRoot", "end", "matchedSlash", "basename", "ext", "start", "extIdx", "firstNonSlashEnd", "extname", "startDot", "startPart", "preDotState", "format", "pathObject", "sep", "dir", "root", "base", "name", "parse", "ret", "delimiter", "win32", "module", "exports", "__webpack_module_cache__", "__webpack_require__", "moduleId", "cachedModule", "__webpack_modules__", "d", "definition", "key", "o", "Object", "defineProperty", "enumerable", "get", "obj", "prop", "prototype", "hasOwnProperty", "call", "r", "Symbol", "toStringTag", "value", "isWindows", "f", "P", "platform", "navigator", "userAgent", "indexOf", "_schemePattern", "_singleSlashStart", "_doubleSlashStart", "_validateUri", "_strict", "scheme", "Error", "authority", "query", "fragment", "test", "_empty", "_slash", "_regexp", "URI", "thing", "fsPath", "with", "toString", "schemeOrData", "this", "uriToFsPath", "change", "Uri", "match", "exec", "percentDecode", "replace", "idx", "substring", "components", "result", "skipEncoding", "_asFormatted", "toJSON", "data", "_formatted", "external", "_fsPath", "_sep", "_pathSepMarker", "$mid", "encodeTable", "encodeURIComponentFast", "uriComponent", "isPath", "isAuthority", "nativeEncodePos", "pos", "encodeURIComponent", "charAt", "substr", "escaped", "encodeURIComponentMinimal", "uri", "keepDriveLetterCasing", "toLowerCase", "encoder", "userinfo", "String", "fromCharCode", "decodeURIComponentGraceful", "str", "decodeURIComponent", "_rEncodedAsHex", "A", "posixPath", "slash", "Utils", "t", "joinPath", "paths", "resolvePath", "slashAdded", "LIB", "UriUtils", "Utils", "equals", "a", "b", "__name", "relative", "from", "to", "fromPath", "toPath", "fromParts", "e", "toParts", "i", "backPart", "toPart", "DocumentState", "DefaultLangiumDocumentFactory", "__name", "services", "uri", "cancellationToken", "content", "textDocument", "URI", "text", "model", "parseResult", "cancelToken", "document", "textDocumentGetter", "oldText", "_a", "_b", "serviceRegistry", "textDoc", "TextDocument", "DefaultLangiumDocuments", "stream", "uriString", "langiumDoc", "DefaultLinker", "__name", "services", "document", "cancelToken", "node", "streamAst", "interruptAndCheck", "streamReferences", "ref", "refInfo", "description", "isLinkingError", "linkedNode", "err", "property", "refNode", "refText", "linker", "reference", "isAstNode", "isAstNodeDescription", "refData", "getDocument", "DocumentState", "_a", "nodeDescription", "doc", "targetDescription", "referenceType", "isNamed", "node", "__name", "DefaultNameProvider", "findNodeForProperty", "DefaultReferences", "__name", "services", "sourceCstNode", "assignment", "findAssignment", "nodeElem", "reference", "isReference", "ref", "nameNode", "isChildNode", "astNode", "targetNode", "options", "refs", "indexReferences", "UriUtils", "stream", "doc", "getDocument", "path", "toDocumentSegment", "MultiMap", "__name", "elements", "key", "value", "Reduction", "stream", "a", "values", "index", "_a", "callbackfn", "array", "BiMap", "DefaultScopeComputation", "__name", "services", "document", "cancelToken", "parentNode", "children", "streamContents", "exports", "node", "interruptAndCheck", "name", "rootNode", "scopes", "MultiMap", "streamAllContents", "container", "StreamScope", "__name", "elements", "outerScope", "options", "_a", "name", "local", "e", "MapScope", "element", "localName", "elementStream", "stream", "EMPTY_SCOPE", "EMPTY_STREAM", "DisposableCache", "__name", "disposable", "SimpleCache", "key", "value", "provider", "ContextCache", "converter", "contextKey", "contextCache", "mapKey", "documentCache", "DocumentCache", "sharedServices", "uri", "changed", "deleted", "allUris", "WorkspaceCache", "DefaultScopeProvider", "__name", "services", "WorkspaceCache", "context", "scopes", "referenceType", "precomputed", "getDocument", "currentNode", "allDescriptions", "stream", "desc", "result", "i", "elements", "outerScope", "options", "StreamScope", "s", "e", "name", "_context", "MapScope", "isAstNodeWithComment", "node", "__name", "isIntermediateReference", "obj", "DefaultJsonSerializer", "services", "options", "specificReplacer", "defaultReplacer", "key", "value", "replacer", "getDocument", "content", "root", "refText", "sourceText", "textRegions", "comments", "uriConverter", "isReference", "refValue", "$refText", "targetDocument", "targetUri", "targetPath", "_b", "_a", "isAstNode", "astNode", "_c", "_d", "comment", "createDocumentSegment", "cstNode", "textRegion", "assignments", "propertyAssignments", "findNodesForProperty", "container", "containerProperty", "containerIndex", "propertyName", "item", "index", "element", "mutable", "property", "reference", "error", "ref", "uri", "fragmentIndex", "documentUri", "URI", "document", "err", "DefaultServiceRegistry", "__name", "language", "ext", "uri", "UriUtils", "services", "diagnosticData", "code", "__name", "ValidationCategory", "ValidationRegistry", "services", "MultiMap", "checksRecord", "thisObj", "category", "type", "ch", "callbacks", "check", "entry", "node", "accept", "cancelToken", "err", "isOperationCancelled", "message", "subtype", "categories", "checks", "stream", "DefaultDocumentValidator", "__name", "services", "document", "options", "cancelToken", "parseResult", "diagnostics", "interruptAndCheck", "d", "_a", "DocumentValidator", "err", "isOperationCancelled", "_options", "lexerError", "diagnostic", "toDiagnosticSeverity", "diagnosticData", "parserError", "range", "token", "position", "tokenToRange", "reference", "linkingError", "info", "rootNode", "validationItems", "acceptor", "severity", "message", "streamAst", "node", "checks", "check", "getDiagnosticRange", "cstNode", "findNodeForProperty", "findNodeForKeyword", "DefaultAstNodeDescriptionProvider", "__name", "services", "node", "name", "document", "getDocument", "path", "nameNodeSegment", "nameSegmentGetter", "_a", "toDocumentSegment", "DefaultReferenceDescriptionProvider", "cancelToken", "descr", "rootNode", "astNode", "streamAst", "interruptAndCheck", "streamReferences", "refInfo", "isLinkingError", "description", "targetNodeDescr", "refCstNode", "docUri", "UriUtils", "DefaultAstNodeLocator", "__name", "node", "containerPath", "newSegment", "$containerProperty", "$containerIndex", "path", "previousValue", "currentValue", "propertyIndex", "property", "arrayIndex", "array", "DefaultConfigurationProvider", "__name", "services", "Deferred", "params", "_b", "_a", "languages", "lang", "configToUpdate", "configs", "conf", "idx", "change", "section", "configuration", "language", "sectionName", "languageId", "Disposable", "create", "callback", "__name", "DefaultDocumentBuilder", "__name", "services", "MultiMap", "DocumentState", "documents", "options", "cancelToken", "document", "key", "buildState", "previousCategories", "_a", "categories", "_b", "ValidationCategory", "c", "e", "changed", "deleted", "deletedUri", "changedUri", "newDocument", "allChangedUris", "stream", "uri", "doc", "interruptAndCheck", "rebuildDocuments", "listener", "changedUris", "ref", "callback", "Disposable", "index", "scopeComputation", "toBeValidated", "state", "targetState", "filtered", "uriOrToken", "OperationCancelled", "resolve", "reject", "buildDisposable", "cancelDisposable", "listeners", "validator", "validationSetting", "diagnostics", "newCategories", "DefaultIndexManager", "__name", "services", "ContextCache", "targetNode", "astNodePath", "targetDocUri", "getDocument", "result", "docRefs", "refDescr", "UriUtils", "stream", "nodeType", "uris", "documentUris", "uri", "_a", "e", "uriString", "document", "cancelToken", "exports", "indexData", "changedUris", "references", "ref", "DefaultWorkspaceManager", "__name", "services", "Deferred", "params", "_a", "_params", "token", "folders", "cancelToken", "documents", "interruptAndCheck", "fileExtensions", "e", "collector", "document", "wf", "entry", "_folders", "_collector", "workspaceFolder", "URI", "folderPath", "content", "_workspaceFolder", "name", "UriUtils", "extname", "DefaultLexer", "__name", "services", "tokens", "lexerTokens", "isTokenTypeDictionary", "Lexer", "text", "chevrotainResult", "_a", "buildTokens", "isIMultiModeLexerDefinition", "res", "token", "isTokenTypeArray", "tokenVocabulary", "parseJSDoc", "node", "start", "options", "opts", "position", "Position", "lines", "getLines", "normalizedOptions", "normalizeOptions", "tokens", "tokenize", "parseJSDocComment", "__name", "isJSDoc", "first", "last", "firstRegex", "lastRegex", "content", "NEWLINE_REGEXP", "tagRegex", "inlineTagRegex", "context", "currentLine", "currentCharacter", "i", "line", "index", "match", "_a", "_b", "_c", "lastCharacter", "skipWhitespace", "Range", "tagMatch", "fullMatch", "value", "end", "rest", "inlineTagMatches", "buildInlineTokens", "tags", "lineIndex", "characterIndex", "lastIndex", "matchIndex", "startContent", "offset", "tagName", "endContent", "nonWhitespaceRegex", "whitespaceEndRegex", "startPosition", "JSDocCommentImpl", "elements", "element", "parseJSDocElement", "_d", "next", "parseJSDocTag", "parseJSDocText", "appendEmptyLine", "token", "JSDocLineImpl", "firstToken", "lastToken", "parseJSDocInline", "JSDocTextImpl", "parseJSDocLine", "inline", "tagToken", "name", "nextToken", "docLine", "JSDocTagImpl", "textDoc", "range", "normalizeOption", "option", "escaped", "escapeRegExp", "e", "text", "fillNewlines", "rendered", "renderInlineTag", "marker", "tag", "display", "displayStart", "renderLinkDefault", "URI", "JSDocDocumentationProvider", "__name", "services", "node", "comment", "isJSDoc", "parseJSDoc", "link", "display", "tag", "name", "description", "_a", "line", "character", "uri", "_node", "_tag", "precomputed", "getDocument", "currentNode", "e", "DefaultCommentProvider", "__name", "services", "node", "isAstNodeWithComment", "_a", "findCommentNode", "event_exports", "__reExport", "DefaultAsyncParser", "__name", "services", "text", "AbstractThreadedAsyncParser", "worker", "deferred", "cancelToken", "Deferred", "timeout", "cancellation", "result", "hydrated", "err", "index", "OperationCancelled", "ParserWorker", "sendMessage", "onMessage", "onError", "terminate", "parseResult", "error", "DefaultWorkspaceLock", "__name", "action", "tokenSource", "queue", "cancellationToken", "deferred", "Deferred", "entry", "entries", "result", "err", "isOperationCancelled", "DefaultHydrator", "__name", "services", "BiMap", "result", "e", "node", "astNodes", "cstNodes", "astNode", "streamAst", "cstNode", "streamCst", "context", "obj", "name", "value", "arr", "item", "isAstNode", "isReference", "reference", "isRootCstNode", "isCompositeCstNode", "child", "isLeafCstNode", "root", "cst", "RootCstNodeImpl", "CompositeCstNodeImpl", "parent", "num", "cstNodeObj", "hydrated", "tokenType", "offset", "length", "startLine", "startColumn", "endLine", "endColumn", "hidden", "LeafCstNodeImpl", "id", "element", "isAbstractElement", "createDefaultCoreModule", "context", "__name", "services", "DefaultCommentProvider", "JSDocDocumentationProvider", "DefaultAsyncParser", "createGrammarConfig", "createLangiumParser", "createCompletionParser", "DefaultValueConverter", "DefaultTokenBuilder", "DefaultLexer", "LangiumParserErrorMessageProvider", "DefaultAstNodeLocator", "DefaultAstNodeDescriptionProvider", "DefaultReferenceDescriptionProvider", "DefaultLinker", "DefaultNameProvider", "DefaultScopeProvider", "DefaultScopeComputation", "DefaultReferences", "DefaultHydrator", "DefaultJsonSerializer", "DefaultDocumentValidator", "ValidationRegistry", "createDefaultSharedCoreModule", "DefaultServiceRegistry", "DefaultLangiumDocuments", "DefaultLangiumDocumentFactory", "DefaultDocumentBuilder", "DefaultIndexManager", "DefaultWorkspaceManager", "DefaultWorkspaceLock", "DefaultConfigurationProvider", "Module", "m1", "m2", "_merge", "inject", "module1", "module2", "module3", "module4", "module5", "module6", "module7", "module8", "module9", "module", "_inject", "__name", "isProxy", "eagerLoad", "item", "value", "injector", "proxy", "obj", "prop", "_resolve", "_", "__requested__", "error", "target", "source", "key", "value2", "value1", "utils_exports", "__export", "ast_utils_exports", "BiMap", "cancellation_exports", "ContextCache", "cst_utils_exports", "DONE_RESULT", "Deferred", "Disposable", "DisposableCache", "DocumentCache", "EMPTY_STREAM", "ErrorWithLocation", "grammar_utils_exports", "MultiMap", "OperationCancelled", "Reduction", "regexp_utils_exports", "SimpleCache", "StreamImpl", "TreeStreamImpl", "URI", "UriUtils", "WorkspaceCache", "assertUnreachable", "delayNextTick", "interruptAndCheck", "isOperationCancelled", "loadGrammarFromJson", "setInterruptionPeriod", "startCancelableOperation", "stream", "__reExport", "event_exports", "EmptyFileSystemProvider", "__name", "EmptyFileSystem", "minimalGrammarModule", "__name", "minimalSharedGrammarModule", "LangiumGrammarAstReflection", "createMinimalGrammarServices", "shared", "inject", "createDefaultSharedCoreModule", "EmptyFileSystem", "grammar", "createDefaultCoreModule", "loadGrammarFromJson", "json", "services", "astNode", "URI", "_a", "__reExport", "lib_exports", "utils_exports", "__defProp", "__name", "target", "value", "Statement", "Architecture", "isArchitecture", "item", "reflection", "Branch", "isBranch", "Checkout", "CherryPicking", "Commit", "isCommit", "Common", "isCommon", "GitGraph", "isGitGraph", "Info", "isInfo", "Merge", "isMerge", "Packet", "isPacket", "PacketBlock", "isPacketBlock", "Pie", "isPie", "PieSection", "isPieSection", "Direction", "MermaidAstReflection", "AbstractAstReflection", "subtype", "supertype", "refInfo", "referenceId", "type", "loadedInfoGrammar", "InfoGrammar", "loadGrammarFromJson", "loadedPacketGrammar", "PacketGrammar", "loadedPieGrammar", "PieGrammar", "loadedArchitectureGrammar", "ArchitectureGrammar", "loadedGitGraphGrammar", "GitGraphGrammar", "InfoLanguageMetaData", "PacketLanguageMetaData", "PieLanguageMetaData", "ArchitectureLanguageMetaData", "GitGraphLanguageMetaData", "MermaidGeneratedSharedModule", "InfoGeneratedModule", "PacketGeneratedModule", "PieGeneratedModule", "ArchitectureGeneratedModule", "GitGraphGeneratedModule", "accessibilityDescrRegex", "accessibilityTitleRegex", "titleRegex", "rulesRegexes", "AbstractMermaidValueConverter", "DefaultValueConverter", "rule", "input", "cstNode", "_cstNode", "regex", "match", "CommonValueConverter", "_rule", "_input", "AbstractMermaidTokenBuilder", "DefaultTokenBuilder", "keywords", "rules", "terminalTokens", "options", "tokenTypes", "tokenType", "CommonTokenBuilder"]
}