All Downloads are FREE. Search and download functionalities are using the official Maven repository.

package.dist.chunks.mermaid.esm.chunk-HBGMPAD7.mjs.map Maven / Gradle / Ivy

Go to download

Markdown-ish syntax for generating flowcharts, mindmaps, sequence diagrams, class diagrams, gantt charts, git graphs and more.

There is a newer version: 11.4.0
Show newest version
{
  "version": 3,
  "sources": ["../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/ral.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/is.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/events.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-jsonrpc/lib/common/cancellation.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/index.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/cst-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/syntax-tree.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/stream.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/grammar-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/errors.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/languages/generated/ast.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/ast-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/regexp-utils.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/utils.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/character-classes.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/regexp-parser.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/regexp-to-ast/src/base-regexp-visitor.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/languages/grammar-config.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/print.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/timer.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/utils/src/to-fast-properties.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/model.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/visitor.ts", "../../../../../node_modules/.pnpm/@[email protected]/node_modules/@chevrotain/gast/src/helpers.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/rest.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/first.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/constants.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/follow.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/reg_exp_parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/reg_exp.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/tokens.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer_errors_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/lexer_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/scan/tokens_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/errors_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/resolver.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/interpreter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/checks.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/exceptions_public.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recoverable.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/keys.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/grammar/llk_lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/looksahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/cst/cst.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/lang/lang_extensions.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/cst/cst_visitor.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/tree_builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/lexer_adapter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recognizer_api.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/recognizer_engine.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/error_handler.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/context_assist.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/gast_recorder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/traits/perf_tracer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/utils/apply_mixins.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/chevrotain/src/parse/parser/parser.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/atn.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/dfa.ts", "../../../../../node_modules/.pnpm/[email protected][email protected]/node_modules/chevrotain-allstar/src/all-star-lookahead.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-languageserver-types/lib/esm/main.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/cst-node-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/langium-parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/parser-builder-base.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/completion-parser-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/langium-parser-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/token-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/value-converter.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/cancellation.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/promise-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-languageserver-textdocument/lib/esm/main.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/node_modules/path-browserify/index.js", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/bootstrap", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/define property getters", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/hasOwnProperty shorthand", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/webpack/runtime/make namespace object", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/platform.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/uri.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/vscode-uri/lib/esm/webpack:/LIB/src/utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/uri-utils.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/documents.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/linker.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/name-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/references.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/collections.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope-computation.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/caching.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/references/scope-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/serializer/json-serializer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/service-registry.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/validation/validation-registry.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/validation/document-validator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/ast-descriptions.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/ast-node-locator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/configuration.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/disposable.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/document-builder.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/index-manager.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/workspace-manager.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/lexer.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/jsdoc.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/documentation-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/documentation/comment-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/event.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/parser/async-parser.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/workspace-lock.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/serializer/hydrator.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/default-module.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/dependency-injection.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/index.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/workspace/file-system-provider.ts", "../../../../../node_modules/.pnpm/[email protected]/node_modules/langium/src/utils/grammar-loader.ts", "../../../../parser/dist/chunks/mermaid-parser.core/chunk-U3TRWOCV.mjs"],
  "sourcesContent": ["\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nlet _ral;\nfunction RAL() {\n    if (_ral === undefined) {\n        throw new Error(`No runtime abstraction layer installed`);\n    }\n    return _ral;\n}\n(function (RAL) {\n    function install(ral) {\n        if (ral === undefined) {\n            throw new Error(`No runtime abstraction layer provided`);\n        }\n        _ral = ral;\n    }\n    RAL.install = install;\n})(RAL || (RAL = {}));\nexports.default = RAL;\n", "\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.stringArray = exports.array = exports.func = exports.error = exports.number = exports.string = exports.boolean = void 0;\nfunction boolean(value) {\n    return value === true || value === false;\n}\nexports.boolean = boolean;\nfunction string(value) {\n    return typeof value === 'string' || value instanceof String;\n}\nexports.string = string;\nfunction number(value) {\n    return typeof value === 'number' || value instanceof Number;\n}\nexports.number = number;\nfunction error(value) {\n    return value instanceof Error;\n}\nexports.error = error;\nfunction func(value) {\n    return typeof value === 'function';\n}\nexports.func = func;\nfunction array(value) {\n    return Array.isArray(value);\n}\nexports.array = array;\nfunction stringArray(value) {\n    return array(value) && value.every(elem => string(elem));\n}\nexports.stringArray = stringArray;\n", "\"use strict\";\n/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Emitter = exports.Event = void 0;\nconst ral_1 = require(\"./ral\");\nvar Event;\n(function (Event) {\n    const _disposable = { dispose() { } };\n    Event.None = function () { return _disposable; };\n})(Event || (exports.Event = Event = {}));\nclass CallbackList {\n    add(callback, context = null, bucket) {\n        if (!this._callbacks) {\n            this._callbacks = [];\n            this._contexts = [];\n        }\n        this._callbacks.push(callback);\n        this._contexts.push(context);\n        if (Array.isArray(bucket)) {\n            bucket.push({ dispose: () => this.remove(callback, context) });\n        }\n    }\n    remove(callback, context = null) {\n        if (!this._callbacks) {\n            return;\n        }\n        let foundCallbackWithDifferentContext = false;\n        for (let i = 0, len = this._callbacks.length; i < len; i++) {\n            if (this._callbacks[i] === callback) {\n                if (this._contexts[i] === context) {\n                    // callback & context match => remove it\n                    this._callbacks.splice(i, 1);\n                    this._contexts.splice(i, 1);\n                    return;\n                }\n                else {\n                    foundCallbackWithDifferentContext = true;\n                }\n            }\n        }\n        if (foundCallbackWithDifferentContext) {\n            throw new Error('When adding a listener with a context, you should remove it with the same context');\n        }\n    }\n    invoke(...args) {\n        if (!this._callbacks) {\n            return [];\n        }\n        const ret = [], callbacks = this._callbacks.slice(0), contexts = this._contexts.slice(0);\n        for (let i = 0, len = callbacks.length; i < len; i++) {\n            try {\n                ret.push(callbacks[i].apply(contexts[i], args));\n            }\n            catch (e) {\n                // eslint-disable-next-line no-console\n                (0, ral_1.default)().console.error(e);\n            }\n        }\n        return ret;\n    }\n    isEmpty() {\n        return !this._callbacks || this._callbacks.length === 0;\n    }\n    dispose() {\n        this._callbacks = undefined;\n        this._contexts = undefined;\n    }\n}\nclass Emitter {\n    constructor(_options) {\n        this._options = _options;\n    }\n    /**\n     * For the public to allow to subscribe\n     * to events from this Emitter\n     */\n    get event() {\n        if (!this._event) {\n            this._event = (listener, thisArgs, disposables) => {\n                if (!this._callbacks) {\n                    this._callbacks = new CallbackList();\n                }\n                if (this._options && this._options.onFirstListenerAdd && this._callbacks.isEmpty()) {\n                    this._options.onFirstListenerAdd(this);\n                }\n                this._callbacks.add(listener, thisArgs);\n                const result = {\n                    dispose: () => {\n                        if (!this._callbacks) {\n                            // disposable is disposed after emitter is disposed.\n                            return;\n                        }\n                        this._callbacks.remove(listener, thisArgs);\n                        result.dispose = Emitter._noop;\n                        if (this._options && this._options.onLastListenerRemove && this._callbacks.isEmpty()) {\n                            this._options.onLastListenerRemove(this);\n                        }\n                    }\n                };\n                if (Array.isArray(disposables)) {\n                    disposables.push(result);\n                }\n                return result;\n            };\n        }\n        return this._event;\n    }\n    /**\n     * To be kept private to fire an event to\n     * subscribers\n     */\n    fire(event) {\n        if (this._callbacks) {\n            this._callbacks.invoke.call(this._callbacks, event);\n        }\n    }\n    dispose() {\n        if (this._callbacks) {\n            this._callbacks.dispose();\n            this._callbacks = undefined;\n        }\n    }\n}\nexports.Emitter = Emitter;\nEmitter._noop = function () { };\n", "\"use strict\";\n/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CancellationTokenSource = exports.CancellationToken = void 0;\nconst ral_1 = require(\"./ral\");\nconst Is = require(\"./is\");\nconst events_1 = require(\"./events\");\nvar CancellationToken;\n(function (CancellationToken) {\n    CancellationToken.None = Object.freeze({\n        isCancellationRequested: false,\n        onCancellationRequested: events_1.Event.None\n    });\n    CancellationToken.Cancelled = Object.freeze({\n        isCancellationRequested: true,\n        onCancellationRequested: events_1.Event.None\n    });\n    function is(value) {\n        const candidate = value;\n        return candidate && (candidate === CancellationToken.None\n            || candidate === CancellationToken.Cancelled\n            || (Is.boolean(candidate.isCancellationRequested) && !!candidate.onCancellationRequested));\n    }\n    CancellationToken.is = is;\n})(CancellationToken || (exports.CancellationToken = CancellationToken = {}));\nconst shortcutEvent = Object.freeze(function (callback, context) {\n    const handle = (0, ral_1.default)().timer.setTimeout(callback.bind(context), 0);\n    return { dispose() { handle.dispose(); } };\n});\nclass MutableToken {\n    constructor() {\n        this._isCancelled = false;\n    }\n    cancel() {\n        if (!this._isCancelled) {\n            this._isCancelled = true;\n            if (this._emitter) {\n                this._emitter.fire(undefined);\n                this.dispose();\n            }\n        }\n    }\n    get isCancellationRequested() {\n        return this._isCancelled;\n    }\n    get onCancellationRequested() {\n        if (this._isCancelled) {\n            return shortcutEvent;\n        }\n        if (!this._emitter) {\n            this._emitter = new events_1.Emitter();\n        }\n        return this._emitter.event;\n    }\n    dispose() {\n        if (this._emitter) {\n            this._emitter.dispose();\n            this._emitter = undefined;\n        }\n    }\n}\nclass CancellationTokenSource {\n    get token() {\n        if (!this._token) {\n            // be lazy and create the token only when\n            // actually needed\n            this._token = new MutableToken();\n        }\n        return this._token;\n    }\n    cancel() {\n        if (!this._token) {\n            // save an object by returning the default\n            // cancelled token when cancellation happens\n            // before someone asks for the token\n            this._token = CancellationToken.Cancelled;\n        }\n        else {\n            this._token.cancel();\n        }\n    }\n    dispose() {\n        if (!this._token) {\n            // ensure to initialize with an empty token if we had none\n            this._token = CancellationToken.None;\n        }\n        else if (this._token instanceof MutableToken) {\n            // actually dispose\n            this._token.dispose();\n        }\n    }\n}\nexports.CancellationTokenSource = CancellationTokenSource;\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport * from './default-module.js';\nexport * from './dependency-injection.js';\nexport * from './service-registry.js';\nexport * from './services.js';\nexport * from './syntax-tree.js';\nexport * from './documentation/index.js';\nexport * from './languages/index.js';\nexport * from './parser/index.js';\nexport * from './references/index.js';\nexport * from './serializer/index.js';\nexport * from './utils/index.js';\nexport * from './validation/index.js';\nexport * from './workspace/index.js';\n\n// Export the Langium Grammar AST definitions in the `GrammarAST` namespace\nimport * as GrammarAST from './languages/generated/ast.js';\nimport type { Grammar } from './languages/generated/ast.js';\nexport { Grammar, GrammarAST };\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IToken } from '@chevrotain/types';\nimport type { Range } from 'vscode-languageserver-types';\nimport type { CstNode, CompositeCstNode, LeafCstNode } from '../syntax-tree.js';\nimport type { DocumentSegment } from '../workspace/documents.js';\nimport type { Stream, TreeStream } from './stream.js';\nimport { isCompositeCstNode, isLeafCstNode, isRootCstNode } from '../syntax-tree.js';\nimport { TreeStreamImpl } from './stream.js';\n\n/**\n * Create a stream of all CST nodes that are directly and indirectly contained in the given root node,\n * including the root node itself.\n */\nexport function streamCst(node: CstNode): TreeStream {\n    return new TreeStreamImpl(node, element => {\n        if (isCompositeCstNode(element)) {\n            return element.content;\n        } else {\n            return [];\n        }\n    }, { includeRoot: true });\n}\n\n/**\n * Create a stream of all leaf nodes that are directly and indirectly contained in the given root node.\n */\nexport function flattenCst(node: CstNode): Stream {\n    return streamCst(node).filter(isLeafCstNode);\n}\n\n/**\n * Determines whether the specified cst node is a child of the specified parent node.\n */\nexport function isChildNode(child: CstNode, parent: CstNode): boolean {\n    while (child.container) {\n        child = child.container;\n        if (child === parent) {\n            return true;\n        }\n    }\n    return false;\n}\n\nexport function tokenToRange(token: IToken): Range {\n    // Chevrotain uses 1-based indices everywhere\n    // So we subtract 1 from every value to align with the LSP\n    return {\n        start: {\n            character: token.startColumn! - 1,\n            line: token.startLine! - 1\n        },\n        end: {\n            character: token.endColumn!, // endColumn uses the correct index\n            line: token.endLine! - 1\n        }\n    };\n}\n\nexport function toDocumentSegment(node: CstNode): DocumentSegment;\nexport function toDocumentSegment(node?: CstNode): DocumentSegment | undefined;\nexport function toDocumentSegment(node?: CstNode): DocumentSegment | undefined {\n    if (!node) {\n        return undefined;\n    }\n    const { offset, end, range } = node;\n    return {\n        range,\n        offset,\n        end,\n        length: end - offset\n    };\n}\n\nexport enum RangeComparison {\n    Before = 0,\n    After = 1,\n    OverlapFront = 2,\n    OverlapBack = 3,\n    Inside = 4\n}\n\nexport function compareRange(range: Range, to: Range): RangeComparison {\n    if (range.end.line < to.start.line || (range.end.line === to.start.line && range.end.character < range.start.character)) {\n        return RangeComparison.Before;\n    } else if (range.start.line > to.end.line || (range.start.line === to.end.line && range.start.character > to.end.character)) {\n        return RangeComparison.After;\n    }\n    const startInside = range.start.line > to.start.line || (range.start.line === to.start.line && range.start.character >= to.start.character);\n    const endInside = range.end.line < to.end.line || (range.end.line === to.end.line && range.end.character <= to.end.character);\n    if (startInside && endInside) {\n        return RangeComparison.Inside;\n    } else if (startInside) {\n        return RangeComparison.OverlapBack;\n    } else {\n        return RangeComparison.OverlapFront;\n    }\n}\n\nexport function inRange(range: Range, to: Range): boolean {\n    const comparison = compareRange(range, to);\n    return comparison > RangeComparison.After;\n}\n\n// The \\p{L} regex matches any unicode letter character, i.e. characters from non-english alphabets\n// Together with \\w it matches any kind of character which can commonly appear in IDs\nexport const DefaultNameRegexp = /^[\\w\\p{L}]$/u;\n\n/**\n * Performs `findLeafNodeAtOffset` with a minor difference: When encountering a character that matches the `nameRegexp` argument,\n * it will instead return the leaf node at the `offset - 1` position.\n *\n * For LSP services, users expect that the declaration of an element is available if the cursor is directly after the element.\n */\nexport function findDeclarationNodeAtOffset(cstNode: CstNode | undefined, offset: number, nameRegexp = DefaultNameRegexp): LeafCstNode | undefined {\n    if (cstNode) {\n        if (offset > 0) {\n            const localOffset = offset - cstNode.offset;\n            const textAtOffset = cstNode.text.charAt(localOffset);\n            if (!nameRegexp.test(textAtOffset)) {\n                offset--;\n            }\n        }\n        return findLeafNodeAtOffset(cstNode, offset);\n    }\n    return undefined;\n}\n\nexport function findCommentNode(cstNode: CstNode | undefined, commentNames: string[]): CstNode | undefined {\n    if (cstNode) {\n        const previous = getPreviousNode(cstNode, true);\n        if (previous && isCommentNode(previous, commentNames)) {\n            return previous;\n        }\n        if (isRootCstNode(cstNode)) {\n            // Go from the first non-hidden node through all nodes in reverse order\n            // We do this to find the comment node which directly precedes the root node\n            const endIndex = cstNode.content.findIndex(e => !e.hidden);\n            for (let i = endIndex - 1; i >= 0; i--) {\n                const child = cstNode.content[i];\n                if (isCommentNode(child, commentNames)) {\n                    return child;\n                }\n            }\n        }\n    }\n    return undefined;\n}\n\nexport function isCommentNode(cstNode: CstNode, commentNames: string[]): boolean {\n    return isLeafCstNode(cstNode) && commentNames.includes(cstNode.tokenType.name);\n}\n\n/**\n * Finds the leaf CST node at the specified 0-based string offset.\n * Note that the given offset will be within the range of the returned leaf node.\n *\n * If the offset does not point to a CST node (but just white space), this method will return `undefined`.\n *\n * @param node The CST node to search through.\n * @param offset The specified offset.\n * @returns The CST node at the specified offset.\n */\nexport function findLeafNodeAtOffset(node: CstNode, offset: number): LeafCstNode | undefined {\n    if (isLeafCstNode(node)) {\n        return node;\n    } else if (isCompositeCstNode(node)) {\n        const searchResult = binarySearch(node, offset, false);\n        if (searchResult) {\n            return findLeafNodeAtOffset(searchResult, offset);\n        }\n    }\n    return undefined;\n}\n\n/**\n * Finds the leaf CST node at the specified 0-based string offset.\n * If no CST node exists at the specified position, it will return the leaf node before it.\n *\n * If there is no leaf node before the specified offset, this method will return `undefined`.\n *\n * @param node The CST node to search through.\n * @param offset The specified offset.\n * @returns The CST node closest to the specified offset.\n */\nexport function findLeafNodeBeforeOffset(node: CstNode, offset: number): LeafCstNode | undefined {\n    if (isLeafCstNode(node)) {\n        return node;\n    } else if (isCompositeCstNode(node)) {\n        const searchResult = binarySearch(node, offset, true);\n        if (searchResult) {\n            return findLeafNodeBeforeOffset(searchResult, offset);\n        }\n    }\n    return undefined;\n}\n\nfunction binarySearch(node: CompositeCstNode, offset: number, closest: boolean): CstNode | undefined {\n    let left = 0;\n    let right = node.content.length - 1;\n    let closestNode: CstNode | undefined = undefined;\n\n    while (left <= right) {\n        const middle = Math.floor((left + right) / 2);\n        const middleNode = node.content[middle];\n\n        if (middleNode.offset <= offset && middleNode.end > offset) {\n            // Found an exact match\n            return middleNode;\n        }\n\n        if (middleNode.end <= offset) {\n            // Update the closest node (less than offset) and move to the right half\n            closestNode = closest ? middleNode : undefined;\n            left = middle + 1;\n        } else {\n            // Move to the left half\n            right = middle - 1;\n        }\n    }\n\n    return closestNode;\n}\n\nexport function getPreviousNode(node: CstNode, hidden = true): CstNode | undefined {\n    while (node.container) {\n        const parent = node.container;\n        let index = parent.content.indexOf(node);\n        while (index > 0) {\n            index--;\n            const previous = parent.content[index];\n            if (hidden || !previous.hidden) {\n                return previous;\n            }\n        }\n        node = parent;\n    }\n    return undefined;\n}\n\nexport function getNextNode(node: CstNode, hidden = true): CstNode | undefined {\n    while (node.container) {\n        const parent = node.container;\n        let index = parent.content.indexOf(node);\n        const last = parent.content.length - 1;\n        while (index < last) {\n            index++;\n            const next = parent.content[index];\n            if (hidden || !next.hidden) {\n                return next;\n            }\n        }\n        node = parent;\n    }\n    return undefined;\n}\n\nexport function getStartlineNode(node: CstNode): CstNode {\n    if (node.range.start.character === 0) {\n        return node;\n    }\n    const line = node.range.start.line;\n    let last = node;\n    let index: number | undefined;\n    while (node.container) {\n        const parent = node.container;\n        const selfIndex = index ?? parent.content.indexOf(node);\n        if (selfIndex === 0) {\n            node = parent;\n            index = undefined;\n        } else {\n            index = selfIndex - 1;\n            node = parent.content[index];\n        }\n        if (node.range.start.line !== line) {\n            break;\n        }\n        last = node;\n    }\n    return last;\n}\n\nexport function getInteriorNodes(start: CstNode, end: CstNode): CstNode[] {\n    const commonParent = getCommonParent(start, end);\n    if (!commonParent) {\n        return [];\n    }\n    return commonParent.parent.content.slice(commonParent.a + 1, commonParent.b);\n}\n\nfunction getCommonParent(a: CstNode, b: CstNode): CommonParent | undefined {\n    const aParents = getParentChain(a);\n    const bParents = getParentChain(b);\n    let current: CommonParent | undefined;\n    for (let i = 0; i < aParents.length && i < bParents.length; i++) {\n        const aParent = aParents[i];\n        const bParent = bParents[i];\n        if (aParent.parent === bParent.parent) {\n            current = {\n                parent: aParent.parent,\n                a: aParent.index,\n                b: bParent.index\n            };\n        } else {\n            break;\n        }\n    }\n    return current;\n}\n\ninterface CommonParent {\n    parent: CompositeCstNode\n    a: number\n    b: number\n}\n\nfunction getParentChain(node: CstNode): ParentLink[] {\n    const chain: ParentLink[] = [];\n    while (node.container) {\n        const parent = node.container;\n        const index = parent.content.indexOf(node);\n        chain.push({\n            parent,\n            index\n        });\n        node = parent;\n    }\n    return chain.reverse();\n}\n\ninterface ParentLink {\n    parent: CompositeCstNode\n    index: number\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { TokenType } from 'chevrotain';\nimport type { URI } from './utils/uri-utils.js';\nimport type { AbstractElement } from './languages/generated/ast.js';\nimport type { DocumentSegment, LangiumDocument } from './workspace/documents.js';\n\n/**\n * A node in the Abstract Syntax Tree (AST).\n */\nexport interface AstNode {\n    /** Every AST node has a type corresponding to what was specified in the grammar declaration. */\n    readonly $type: string;\n    /** The container node in the AST; every node except the root node has a container. */\n    readonly $container?: AstNode;\n    /** The property of the `$container` node that contains this node. This is either a direct reference or an array. */\n    readonly $containerProperty?: string;\n    /** In case `$containerProperty` is an array, the array index is stored here. */\n    readonly $containerIndex?: number;\n    /** The Concrete Syntax Tree (CST) node of the text range from which this node was parsed. */\n    readonly $cstNode?: CstNode;\n    /** The document containing the AST; only the root node has a direct reference to the document. */\n    readonly $document?: LangiumDocument;\n}\n\nexport function isAstNode(obj: unknown): obj is AstNode {\n    return typeof obj === 'object' && obj !== null && typeof (obj as AstNode).$type === 'string';\n}\n\nexport interface GenericAstNode extends AstNode {\n    [key: string]: unknown\n}\n\ntype SpecificNodeProperties = keyof Omit;\n\n/**\n * The property names of a given AST node type.\n */\nexport type Properties = SpecificNodeProperties extends never ? string : SpecificNodeProperties\n\n/**\n * A cross-reference in the AST. Cross-references may or may not be successfully resolved.\n */\nexport interface Reference {\n    /**\n     * The target AST node of this reference. Accessing this property may trigger cross-reference\n     * resolution by the `Linker` in case it has not been done yet. If the reference cannot be resolved,\n     * the value is `undefined`.\n     */\n    readonly ref?: T;\n\n    /** If any problem occurred while resolving the reference, it is described by this property. */\n    readonly error?: LinkingError;\n    /** The CST node from which the reference was parsed */\n    readonly $refNode?: CstNode;\n    /** The actual text used to look up in the surrounding scope */\n    readonly $refText: string;\n    /** The node description for the AstNode returned by `ref`  */\n    readonly $nodeDescription?: AstNodeDescription;\n}\n\nexport function isReference(obj: unknown): obj is Reference {\n    return typeof obj === 'object' && obj !== null && typeof (obj as Reference).$refText === 'string';\n}\n\nexport type ResolvedReference = Reference & {\n    readonly ref: T;\n}\n\n/**\n * A description of an AST node is used when constructing scopes and looking up cross-reference targets.\n */\nexport interface AstNodeDescription {\n    /** The target node; should be present only for local references (linking to the same document). */\n    node?: AstNode;\n    /**\n     * The document segment that represents the range of the name of the AST node.\n     */\n    nameSegment?: DocumentSegment;\n    /**\n     * The document segment that represents the full range of the AST node.\n     */\n    selectionSegment?: DocumentSegment;\n    /** `$type` property value of the AST node */\n    type: string;\n    /** Name of the AST node; this is usually determined by the `NameProvider` service. */\n    name: string;\n    /** URI to the document containing the AST node */\n    documentUri: URI;\n    /** Navigation path inside the document */\n    path: string;\n}\n\nexport function isAstNodeDescription(obj: unknown): obj is AstNodeDescription {\n    return typeof obj === 'object' && obj !== null\n        && typeof (obj as AstNodeDescription).name === 'string'\n        && typeof (obj as AstNodeDescription).type === 'string'\n        && typeof (obj as AstNodeDescription).path === 'string';\n}\n\n/**\n * Information about a cross-reference. This is used when traversing references in an AST or to describe\n * unresolved references.\n */\nexport interface ReferenceInfo {\n    reference: Reference\n    container: AstNode\n    property: string\n    index?: number\n}\n\n/**\n * Used to collect information when the `Linker` service fails to resolve a cross-reference.\n */\nexport interface LinkingError extends ReferenceInfo {\n    message: string;\n    targetDescription?: AstNodeDescription;\n}\n\nexport function isLinkingError(obj: unknown): obj is LinkingError {\n    return typeof obj === 'object' && obj !== null\n        && isAstNode((obj as LinkingError).container)\n        && isReference((obj as LinkingError).reference)\n        && typeof (obj as LinkingError).message === 'string';\n}\n\n/**\n * Service used for generic access to the structure of the AST. This service is shared between\n * all involved languages, so it operates on the superset of types of these languages.\n */\nexport interface AstReflection {\n    getAllTypes(): string[]\n    getAllSubTypes(type: string): string[]\n    getReferenceType(refInfo: ReferenceInfo): string\n    getTypeMetaData(type: string): TypeMetaData\n    isInstance(node: unknown, type: string): boolean\n    isSubtype(subtype: string, supertype: string): boolean\n}\n\n/**\n * An abstract implementation of the {@link AstReflection} interface.\n * Serves to cache subtype computation results to improve performance throughout different parts of Langium.\n */\nexport abstract class AbstractAstReflection implements AstReflection {\n\n    protected subtypes: Record> = {};\n    protected allSubtypes: Record = {};\n\n    abstract getAllTypes(): string[];\n    abstract getReferenceType(refInfo: ReferenceInfo): string;\n    abstract getTypeMetaData(type: string): TypeMetaData;\n    protected abstract computeIsSubtype(subtype: string, supertype: string): boolean;\n\n    isInstance(node: unknown, type: string): boolean {\n        return isAstNode(node) && this.isSubtype(node.$type, type);\n    }\n\n    isSubtype(subtype: string, supertype: string): boolean {\n        if (subtype === supertype) {\n            return true;\n        }\n        let nested = this.subtypes[subtype];\n        if (!nested) {\n            nested = this.subtypes[subtype] = {};\n        }\n        const existing = nested[supertype];\n        if (existing !== undefined) {\n            return existing;\n        } else {\n            const result = this.computeIsSubtype(subtype, supertype);\n            nested[supertype] = result;\n            return result;\n        }\n    }\n\n    getAllSubTypes(type: string): string[] {\n        const existing = this.allSubtypes[type];\n        if (existing) {\n            return existing;\n        } else {\n            const allTypes = this.getAllTypes();\n            const types: string[] = [];\n            for (const possibleSubType of allTypes) {\n                if (this.isSubtype(possibleSubType, type)) {\n                    types.push(possibleSubType);\n                }\n            }\n            this.allSubtypes[type] = types;\n            return types;\n        }\n    }\n}\n\n/**\n * Represents runtime meta data about a meta model type.\n */\nexport interface TypeMetaData {\n    /** The name of this meta model type. Corresponds to the `AstNode.$type` value. */\n    name: string\n    /** A list of properties. They can contain default values for their respective property in the AST. */\n    properties: TypeProperty[]\n}\n\n/**\n * Describes the meta data of a property of an AST node.\n *\n * The optional `defaultValue` indicates that the property is mandatory in the AST node.\n * For example, if an AST node contains an array, but no elements of this array have been parsed, we still expect an empty array instead of `undefined`.\n */\nexport interface TypeProperty {\n    name: string\n    defaultValue?: PropertyType\n}\n\n/**\n * Represents a default value for an AST property.\n */\nexport type PropertyType = number | string | boolean | PropertyType[];\n\n/**\n * A node in the Concrete Syntax Tree (CST).\n */\nexport interface CstNode extends DocumentSegment {\n    /** The container node in the CST */\n    readonly container?: CompositeCstNode;\n    /** @deprecated use `container` instead. */\n    readonly parent?: CompositeCstNode;\n    /** The actual text */\n    readonly text: string;\n    /** The root CST node */\n    readonly root: RootCstNode;\n    /** The grammar element from which this node was parsed */\n    readonly grammarSource: AbstractElement;\n    /** @deprecated use `grammarSource` instead. */\n    readonly feature: AbstractElement;\n    /** The AST node created from this CST node */\n    readonly astNode: AstNode;\n    /** @deprecated use `astNode` instead. */\n    readonly element: AstNode;\n    /** Whether the token is hidden, i.e. not explicitly part of the containing grammar rule */\n    readonly hidden: boolean;\n}\n\n/**\n * A composite CST node contains other nodes, but no directly associated token.\n */\nexport interface CompositeCstNode extends CstNode {\n    readonly content: CstNode[];\n    /** @deprecated use `content` instead. */\n    readonly children: CstNode[];\n}\n\nexport function isCompositeCstNode(node: unknown): node is CompositeCstNode {\n    return typeof node === 'object' && node !== null && Array.isArray((node as CompositeCstNode).content);\n}\n\n/**\n * A leaf CST node corresponds to a token in the input token stream.\n */\nexport interface LeafCstNode extends CstNode {\n    readonly tokenType: TokenType;\n}\n\nexport function isLeafCstNode(node: unknown): node is LeafCstNode {\n    return typeof node === 'object' && node !== null && typeof (node as LeafCstNode).tokenType === 'object';\n}\n\nexport interface RootCstNode extends CompositeCstNode {\n    readonly fullText: string\n}\n\nexport function isRootCstNode(node: unknown): node is RootCstNode {\n    return isCompositeCstNode(node) && typeof (node as RootCstNode).fullText === 'string';\n}\n\n/**\n * Returns a type to have only properties names (!) of a type T whose property value is of a certain type K.\n */\ntype ExtractKeysOfValueType = { [I in keyof T]: T[I] extends K ? I : never }[keyof T];\n\n/**\n * Returns the property names (!) of an AstNode that are cross-references.\n * Meant to be used during cross-reference resolution in combination with `assertUnreachable(context.property)`.\n */\nexport type CrossReferencesOfAstNodeType = (\n    ExtractKeysOfValueType\n    | ExtractKeysOfValueType|undefined>\n// eslint-disable-next-line @typescript-eslint/ban-types\n) & {};\n\n/**\n * Represents the enumeration-like type, that lists all AstNode types of your grammar.\n */\nexport type AstTypeList = Record;\n\n/**\n * Returns all types that contain cross-references, A is meant to be the interface `XXXAstType` fromm your generated `ast.ts` file.\n * Meant to be used during cross-reference resolution in combination with `assertUnreachable(context.container)`.\n */\nexport type AstNodeTypesWithCrossReferences> = {\n    [T in keyof A]: CrossReferencesOfAstNodeType extends never ? never : A[T]\n}[keyof A];\n\nexport type Mutable = {\n    -readonly [P in keyof T]: T[P]\n};\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/**\n * A stream is a read-only sequence of values. While the contents of an array can be accessed\n * both sequentially and randomly (via index), a stream allows only sequential access.\n *\n * The advantage of this is that a stream can be evaluated lazily, so it does not require\n * to store intermediate values. This can boost performance when a large sequence is\n * processed via filtering, mapping etc. and accessed at most once. However, lazy\n * evaluation means that all processing is repeated when you access the sequence multiple\n * times; in such a case, it may be better to store the resulting sequence into an array.\n */\nexport interface Stream extends Iterable {\n\n    /**\n     * Returns an iterator for this stream. This is the same as calling the `Symbol.iterator` function property.\n     */\n    iterator(): IterableIterator;\n\n    /**\n     * Determines whether this stream contains no elements.\n     */\n    isEmpty(): boolean;\n\n    /**\n     * Determines the number of elements in this stream.\n     */\n    count(): number;\n\n    /**\n     * Collects all elements of this stream into an array.\n     */\n    toArray(): T[];\n\n    /**\n     * Collects all elements of this stream into a Set.\n     */\n    toSet(): Set;\n\n    /**\n     * Collects all elements of this stream into a Map, applying the provided functions to determine keys and values.\n     *\n     * @param keyFn The function to derive map keys. If omitted, the stream elements are used as keys.\n     * @param valueFn The function to derive map values. If omitted, the stream elements are used as values.\n     */\n    toMap(keyFn?: (e: T) => K, valueFn?: (e: T) => V): Map;\n\n    /**\n     * Returns a string representation of a stream.\n     */\n    toString(): string;\n\n    /**\n     * Combines two streams by returning a new stream that yields all elements of this stream and the other stream.\n     *\n     * @param other Stream to be concatenated with this one.\n     */\n    concat(other: Iterable): Stream;\n\n    /**\n     * Adds all elements of the stream into a string, separated by the specified separator string.\n     *\n     * @param separator A string used to separate one element of the stream from the next in the resulting string.\n     *        If omitted, the steam elements are separated with a comma.\n     */\n    join(separator?: string): string\n\n    /**\n     * Returns the index of the first occurrence of a value in the stream, or -1 if it is not present.\n     *\n     * @param searchElement The value to locate in the array.\n     * @param fromIndex The stream index at which to begin the search. If fromIndex is omitted, the search\n     *        starts at index 0.\n     */\n    indexOf(searchElement: T, fromIndex?: number): number;\n\n    /**\n     * Determines whether all members of the stream satisfy the specified test.\n     *\n     * @param predicate This method calls the predicate function for each element in the stream until the\n     *        predicate returns a value which is coercible to the Boolean value `false`, or until the end\n     *        of the stream.\n     */\n    every(predicate: (value: T) => value is S): this is Stream;\n    every(predicate: (value: T) => unknown): boolean;\n\n    /**\n     * Determines whether any member of the stream satisfies the specified test.\n     *\n     * @param predicate This method calls the predicate function for each element in the stream until the\n     *        predicate returns a value which is coercible to the Boolean value `true`, or until the end\n     *        of the stream.\n     */\n    some(predicate: (value: T) => unknown): boolean;\n\n    /**\n     * Performs the specified action for each element in the stream.\n     *\n     * @param callbackfn Function called once for each element in the stream.\n     */\n    forEach(callbackfn: (value: T, index: number) => void): void;\n\n    /**\n     * Returns a stream that yields the results of calling the specified callback function on each element\n     * of the stream. The function is called when the resulting stream elements are actually accessed, so\n     * accessing the resulting stream multiple times means the function is also called multiple times for\n     * each element of the stream.\n     *\n     * @param callbackfn Lazily evaluated function mapping stream elements.\n     */\n    map(callbackfn: (value: T) => U): Stream;\n\n    /**\n     * Returns the elements of the stream that meet the condition specified in a callback function.\n     * The function is called when the resulting stream elements are actually accessed, so accessing the\n     * resulting stream multiple times means the function is also called multiple times for each element\n     * of the stream.\n     *\n     * @param predicate Lazily evaluated function checking a condition on stream elements.\n     */\n    filter(predicate: (value: T) => value is S): Stream;\n    filter(predicate: (value: T) => unknown): Stream;\n\n    /**\n     * Returns the elements of the stream that are _non-nullable_, which means they are neither `undefined`\n     * nor `null`.\n     */\n    nonNullable(): Stream>;\n\n    /**\n     * Calls the specified callback function for all elements in the stream. The return value of the\n     * callback function is the accumulated result, and is provided as an argument in the next call to\n     * the callback function.\n     *\n     * @param callbackfn This method calls the function once for each element in the stream, providing\n     *        the previous and current values of the reduction.\n     * @param initialValue If specified, `initialValue` is used as the initial value to start the\n     *        accumulation. The first call to the function provides this value as an argument instead\n     *        of a stream value.\n     */\n    reduce(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduce(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n\n    /**\n     * Calls the specified callback function for all elements in the stream, in descending order.\n     * The return value of the callback function is the accumulated result, and is provided as an\n     * argument in the next call to the callback function.\n     *\n     * @param callbackfn This method calls the function once for each element in the stream, providing\n     *        the previous and current values of the reduction.\n     * @param initialValue If specified, `initialValue` is used as the initial value to start the\n     *        accumulation. The first call to the function provides this value as an argument instead\n     *        of an array value.\n     */\n    reduceRight(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduceRight(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n\n    /**\n     * Returns the value of the first element in the stream that meets the condition, or `undefined`\n     * if there is no such element.\n     *\n     * @param predicate This method calls `predicate` once for each element of the stream, in ascending\n     *        order, until it finds one where `predicate` returns a value which is coercible to the\n     *        Boolean value `true`.\n     */\n    find(predicate: (value: T) => value is S): S | undefined;\n    find(predicate: (value: T) => unknown): T | undefined;\n\n    /**\n     * Returns the index of the first element in the stream that meets the condition, or `-1`\n     * if there is no such element.\n     *\n     * @param predicate This method calls `predicate` once for each element of the stream, in ascending\n     *        order, until it finds one where `predicate` returns a value which is coercible to the\n     *        Boolean value `true`.\n     */\n    findIndex(predicate: (value: T) => unknown): number;\n\n    /**\n     * Determines whether the stream includes a certain element, returning `true` or `false` as appropriate.\n     *\n     * @param searchElement The element to search for.\n     */\n    includes(searchElement: T): boolean;\n\n    /**\n     * Calls a defined callback function on each element of the stream and then flattens the result into\n     * a new stream. This is identical to a `map` followed by `flat` with depth 1.\n     *\n     * @param callbackfn Lazily evaluated function mapping stream elements.\n     */\n    flatMap(callbackfn: (value: T) => U | Iterable): Stream;\n\n    /**\n     * Returns a new stream with all sub-stream or sub-array elements concatenated into it recursively up\n     * to the specified depth.\n     *\n     * @param depth The maximum recursion depth. Defaults to 1.\n     */\n    flat(depth?: D): FlatStream;\n\n    /**\n     * Returns the first element in the stream, or `undefined` if the stream is empty.\n     */\n    head(): T | undefined;\n\n    /**\n     * Returns a stream that skips the first `skipCount` elements from this stream.\n     *\n     * @param skipCount The number of elements to skip. If this is larger than the number of elements in\n     *        the stream, an empty stream is returned. Defaults to 1.\n     */\n    tail(skipCount?: number): Stream;\n\n    /**\n     * Returns a stream consisting of the elements of this stream, truncated to be no longer than `maxSize`\n     * in length.\n     *\n     * @param maxSize The number of elements the stream should be limited to\n     */\n    limit(maxSize: number): Stream;\n\n    /**\n     * Returns a stream containing only the distinct elements from this stream.\n     * Equality is determined with the same rules as a standard `Set`.\n     *\n     * @param by A function returning the key used to check equality with a previous stream element.\n     *        If omitted, the stream elements themselves are used for comparison.\n     */\n    distinct(by?: (element: T) => Key): Stream;\n\n    /**\n     * Returns a stream that contains all elements that don't exist in the {@link other} iterable.\n     * Equality is determined with the same rules as a standard `Set`.\n     * @param other The elements that should be exluded from this stream.\n     * @param key A function returning the key used to check quality.\n     *        If omitted, the stream elements themselves are used for comparison.\n     */\n    exclude(other: Iterable, key?: (element: T) => Key): Stream;\n\n}\n\nexport type FlatStream = {\n    'done': Stream,\n    'recur': T extends Iterable\n        ? FlatStream>\n        : Stream\n}[Depth extends 0 ? 'done' : 'recur'];\n\nexport type MinusOne = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20][N];\n\n/**\n * The default implementation of `Stream` works with two input functions:\n *  - The first function creates the initial state of an iteration.\n *  - The second function gets the current state as argument and returns an `IteratorResult`.\n */\nexport class StreamImpl implements Stream {\n    protected readonly startFn: () => S;\n    protected readonly nextFn: (state: S) => IteratorResult;\n\n    constructor(startFn: () => S, nextFn: (state: S) => IteratorResult) {\n        this.startFn = startFn;\n        this.nextFn = nextFn;\n    }\n\n    iterator(): IterableIterator {\n        const iterator = {\n            state: this.startFn(),\n            next: () => this.nextFn(iterator.state),\n            [Symbol.iterator]: () => iterator\n        };\n        return iterator;\n    }\n\n    [Symbol.iterator](): Iterator {\n        return this.iterator();\n    }\n\n    isEmpty(): boolean {\n        const iterator = this.iterator();\n        return Boolean(iterator.next().done);\n    }\n\n    count(): number {\n        const iterator = this.iterator();\n        let count = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            count++;\n            next = iterator.next();\n        }\n        return count;\n    }\n\n    toArray(): T[] {\n        const result: T[] = [];\n        const iterator = this.iterator();\n        let next: IteratorResult;\n        do {\n            next = iterator.next();\n            if (next.value !== undefined) {\n                result.push(next.value);\n            }\n        } while (!next.done);\n        return result;\n    }\n\n    toSet(): Set {\n        return new Set(this);\n    }\n\n    toMap(keyFn?: (e: T) => K, valueFn?: (e: T) => V): Map {\n        const entryStream = this.map(element => <[K, V]>[\n            keyFn ? keyFn(element) : element,\n            valueFn ? valueFn(element) : element\n        ]);\n        return new Map(entryStream);\n    }\n\n    toString(): string {\n        return this.join();\n    }\n\n    concat(other: Iterable): Stream {\n        const iterator = other[Symbol.iterator]();\n        return new StreamImpl<{ first: S, firstDone: boolean }, T | T2>(\n            () => ({ first: this.startFn(), firstDone: false }),\n            state => {\n                let result: IteratorResult;\n                if (!state.firstDone) {\n                    do {\n                        result = this.nextFn(state.first);\n                        if (!result.done) {\n                            return result;\n                        }\n                    } while (!result.done);\n                    state.firstDone = true;\n                }\n                do {\n                    result = iterator.next();\n                    if (!result.done) {\n                        return result;\n                    }\n                } while (!result.done);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    join(separator = ','): string {\n        const iterator = this.iterator();\n        let value = '';\n        let result: IteratorResult;\n        let addSeparator = false;\n        do {\n            result = iterator.next();\n            if (!result.done) {\n                if (addSeparator) {\n                    value += separator;\n                }\n                value += toString(result.value);\n            }\n            addSeparator = true;\n        } while (!result.done);\n        return value;\n    }\n\n    indexOf(searchElement: T, fromIndex = 0): number {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            if (index >= fromIndex && next.value === searchElement) {\n                return index;\n            }\n            next = iterator.next();\n            index++;\n        }\n        return -1;\n    }\n\n    // In the following definition the '& this' part in the return type is important\n    // _and_ the order within 'Stream & this' is crucial!\n    // Otherwise Typescript would infer the type of 'this' as 'StreamImpl & Stream'\n    // (or ' & Stream') and usages like\n    // ```\n    //  const stream = new StreamImpl(...);\n    //  ... stream.every() & stream....\n    // ```\n    // cannot benefit from '', as Typescript would priorize the signatures\n    // of 'StreamImpl' (i.e. those of 'Stream') over those of 'Stream'.\n    // With the order of 'Stream & this' the signatures of 'Stream' get precedence.\n    every(predicate: (value: T) => value is U): this is Stream & this;\n    every(predicate: (value: T) => unknown): boolean;\n    every(predicate: (value: T) => unknown): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (!predicate(next.value)) {\n                return false;\n            }\n            next = iterator.next();\n        }\n        return true;\n    }\n\n    some(predicate: (value: T) => unknown): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return true;\n            }\n            next = iterator.next();\n        }\n        return false;\n    }\n\n    forEach(callbackfn: (value: T, index: number) => void): void {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            callbackfn(next.value, index);\n            next = iterator.next();\n            index++;\n        }\n    }\n\n    map(callbackfn: (value: T) => U): Stream {\n        return new StreamImpl(\n            this.startFn,\n            (state) => {\n                const { done, value } = this.nextFn(state);\n                if (done) {\n                    return DONE_RESULT;\n                } else {\n                    return { done: false, value: callbackfn(value) };\n                }\n            }\n        );\n    }\n\n    // for remarks on the return type definition refer to 'every(...)'\n    filter(predicate: (value: T) => value is U): Stream & this;\n    filter(predicate: (value: T) => unknown): Stream & this;\n    filter(predicate: (value: T) => unknown): Stream {\n        return new StreamImpl(\n            this.startFn,\n            state => {\n                let result: IteratorResult;\n                do {\n                    result = this.nextFn(state);\n                    if (!result.done && predicate(result.value)) {\n                        return result;\n                    }\n                } while (!result.done);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    nonNullable(): Stream> {\n        return this.filter(e => e !== undefined && e !== null) as Stream>;\n    }\n\n    reduce(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduce(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n    reduce(callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        const iterator = this.iterator();\n        let previousValue: U | T | undefined = initialValue;\n        let next = iterator.next();\n        while (!next.done) {\n            if (previousValue === undefined) {\n                previousValue = next.value;\n            } else {\n                previousValue = callbackfn(previousValue, next.value);\n            }\n            next = iterator.next();\n        }\n        return previousValue;\n    }\n\n    reduceRight(callbackfn: (previousValue: T, currentValue: T) => T): T | undefined;\n    reduceRight(callbackfn: (previousValue: U, currentValue: T) => U, initialValue: U): U;\n    reduceRight(callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        return this.recursiveReduce(this.iterator(), callbackfn, initialValue);\n    }\n\n    protected recursiveReduce(iterator: Iterator, callbackfn: (previousValue: U | T, currentValue: T) => U, initialValue?: U): U | T | undefined {\n        const next = iterator.next();\n        if (next.done) {\n            return initialValue;\n        }\n        const previousValue = this.recursiveReduce(iterator, callbackfn, initialValue);\n        if (previousValue === undefined) {\n            return next.value;\n        }\n        return callbackfn(previousValue, next.value);\n    }\n\n    find(predicate: (value: T) => value is S): S | undefined;\n    find(predicate: (value: T) => unknown): T | undefined;\n    find(predicate: (value: T) => unknown): T | undefined {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return next.value;\n            }\n            next = iterator.next();\n        }\n        return undefined;\n    }\n\n    findIndex(predicate: (value: T) => unknown): number {\n        const iterator = this.iterator();\n        let index = 0;\n        let next = iterator.next();\n        while (!next.done) {\n            if (predicate(next.value)) {\n                return index;\n            }\n            next = iterator.next();\n            index++;\n        }\n        return -1;\n    }\n\n    includes(searchElement: T): boolean {\n        const iterator = this.iterator();\n        let next = iterator.next();\n        while (!next.done) {\n            if (next.value === searchElement) {\n                return true;\n            }\n            next = iterator.next();\n        }\n        return false;\n    }\n\n    flatMap(callbackfn: (value: T) => U | Iterable): Stream {\n        type FlatMapState = { this: S, iterator?: Iterator }\n        return new StreamImpl(\n            () => ({ this: this.startFn() }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (next.done) {\n                            state.iterator = undefined;\n                        } else {\n                            return next;\n                        }\n                    }\n                    const { done, value } = this.nextFn(state.this);\n                    if (!done) {\n                        const mapped = callbackfn(value);\n                        if (isIterable(mapped)) {\n                            state.iterator = mapped[Symbol.iterator]();\n                        } else {\n                            return { done: false, value: mapped };\n                        }\n                    }\n                } while (state.iterator);\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    flat(depth?: D): FlatStream {\n        if (depth === undefined) {\n            depth = 1 as D;\n        }\n        if (depth <= 0) {\n            return this as unknown as FlatStream;\n        }\n        const stream = depth > 1 ? this.flat(depth - 1) as unknown as StreamImpl : this;\n        type FlatMapState = { this: S, iterator?: Iterator }\n        return new StreamImpl(\n            () => ({ this: stream.startFn() }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (next.done) {\n                            state.iterator = undefined;\n                        } else {\n                            return next;\n                        }\n                    }\n                    const { done, value } = stream.nextFn(state.this);\n                    if (!done) {\n                        if (isIterable(value)) {\n                            state.iterator = value[Symbol.iterator]() as Iterator;\n                        } else {\n                            return { done: false, value: value };\n                        }\n                    }\n                } while (state.iterator);\n                return DONE_RESULT;\n            }\n        ) as unknown as FlatStream;\n    }\n\n    head(): T | undefined {\n        const iterator = this.iterator();\n        const result = iterator.next();\n        if (result.done) {\n            return undefined;\n        }\n        return result.value;\n    }\n\n    tail(skipCount = 1): Stream {\n        return new StreamImpl(\n            () => {\n                const state = this.startFn();\n                for (let i = 0; i < skipCount; i++) {\n                    const next = this.nextFn(state);\n                    if (next.done) {\n                        return state;\n                    }\n                }\n                return state;\n            },\n            this.nextFn\n        );\n    }\n\n    limit(maxSize: number): Stream {\n        return new StreamImpl<{ size: number, state: S }, T>(\n            () => ({ size: 0, state: this.startFn() }),\n            state => {\n                state.size++;\n                if (state.size > maxSize) {\n                    return DONE_RESULT;\n                }\n                return this.nextFn(state.state);\n            }\n        );\n    }\n\n    distinct(by?: (element: T) => Key): Stream {\n        const set = new Set();\n        return this.filter(e => {\n            const value = by ? by(e) : e;\n            if (set.has(value)) {\n                return false;\n            } else {\n                set.add(value);\n                return true;\n            }\n        });\n    }\n\n    exclude(other: Iterable, key?: (element: T) => Key): Stream {\n        const otherKeySet = new Set();\n        for (const item of other) {\n            const value = key ? key(item) : item;\n            otherKeySet.add(value);\n        }\n        return this.filter(e => {\n            const ownKey = key ? key(e) : e;\n            return !otherKeySet.has(ownKey);\n        });\n    }\n}\n\nfunction toString(item: unknown): string {\n    if (typeof item === 'string') {\n        return item as string;\n    }\n    if (typeof item === 'undefined') {\n        return 'undefined';\n    }\n    // eslint-disable-next-line @typescript-eslint/no-explicit-any\n    if (typeof (item as any).toString === 'function') {\n        // eslint-disable-next-line @typescript-eslint/no-explicit-any\n        return (item as any).toString();\n    }\n    return Object.prototype.toString.call(item);\n}\n\nfunction isIterable(obj: unknown): obj is Iterable {\n    return !!obj && typeof (obj as Iterable)[Symbol.iterator] === 'function';\n}\n\n/**\n * An empty stream of any type.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport const EMPTY_STREAM: Stream = new StreamImpl(() => undefined, () => DONE_RESULT);\n\n/**\n * Use this `IteratorResult` when implementing a `StreamImpl` to indicate that there are no more elements in the stream.\n */\nexport const DONE_RESULT: IteratorReturnResult = Object.freeze({ done: true, value: undefined });\n\n/**\n * Create a stream from one or more iterables or array-likes.\n */\nexport function stream(...collections: Array | ArrayLike>): Stream {\n    if (collections.length === 1) {\n        const collection = collections[0];\n        if (collection instanceof StreamImpl) {\n            return collection as Stream;\n        }\n        if (isIterable(collection)) {\n            return new StreamImpl, T>(\n                () => collection[Symbol.iterator](),\n                (iterator) => iterator.next()\n            );\n        }\n        if (typeof collection.length === 'number') {\n            return new StreamImpl<{ index: number }, T>(\n                () => ({ index: 0 }),\n                (state) => {\n                    if (state.index < collection.length) {\n                        return { done: false, value: collection[state.index++] };\n                    } else {\n                        return DONE_RESULT;\n                    }\n                }\n            );\n        }\n    }\n    if (collections.length > 1) {\n        type State = { collIndex: number, iterator?: Iterator, array?: ArrayLike, arrIndex: number };\n        return new StreamImpl(\n            () => ({ collIndex: 0, arrIndex: 0 }),\n            (state) => {\n                do {\n                    if (state.iterator) {\n                        const next = state.iterator.next();\n                        if (!next.done) {\n                            return next;\n                        }\n                        state.iterator = undefined;\n                    }\n                    if (state.array) {\n                        if (state.arrIndex < state.array.length) {\n                            return { done: false, value: state.array[state.arrIndex++] };\n                        }\n                        state.array = undefined;\n                        state.arrIndex = 0;\n                    }\n                    if (state.collIndex < collections.length) {\n                        const collection = collections[state.collIndex++];\n                        if (isIterable(collection)) {\n                            state.iterator = collection[Symbol.iterator]();\n                        } else if (collection && typeof collection.length === 'number') {\n                            state.array = collection;\n                        }\n                    }\n                } while (state.iterator || state.array || state.collIndex < collections.length);\n                return DONE_RESULT;\n            }\n        );\n    }\n    return EMPTY_STREAM;\n}\n\n/**\n * A tree iterator adds the ability to prune the current iteration.\n */\nexport interface TreeIterator extends IterableIterator {\n    /**\n     * Skip the whole subtree below the last returned element. The iteration continues as if that\n     * element had no children.\n     */\n    prune(): void\n}\n\n/**\n * A tree stream is used to stream the elements of a tree, for example an AST or CST.\n */\nexport interface TreeStream extends Stream {\n    iterator(): TreeIterator\n}\n\n/**\n * The default implementation of `TreeStream` takes a root element and a function that computes the\n * children of its argument. Whether the root node included in the stream is controlled with the\n * `includeRoot` option, which defaults to `false`.\n */\nexport class TreeStreamImpl\n    extends StreamImpl<{ iterators: Array>, pruned: boolean }, T>\n    implements TreeStream {\n\n    constructor(root: T, children: (node: T) => Iterable, options?: { includeRoot?: boolean }) {\n        super(\n            () => ({\n                iterators: options?.includeRoot ? [[root][Symbol.iterator]()] : [children(root)[Symbol.iterator]()],\n                pruned: false\n            }),\n            state => {\n                if (state.pruned) {\n                    state.iterators.pop();\n                    state.pruned = false;\n                }\n                while (state.iterators.length > 0) {\n                    const iterator = state.iterators[state.iterators.length - 1];\n                    const next = iterator.next();\n                    if (next.done) {\n                        state.iterators.pop();\n                    } else {\n                        state.iterators.push(children(next.value)[Symbol.iterator]());\n                        return next;\n                    }\n                }\n                return DONE_RESULT;\n            }\n        );\n    }\n\n    override iterator(): TreeIterator {\n        const iterator = {\n            state: this.startFn(),\n            next: () => this.nextFn(iterator.state),\n            prune: () => {\n                iterator.state.pruned = true;\n            },\n            [Symbol.iterator]: () => iterator\n        };\n        return iterator;\n    }\n}\n\n/**\n * A set of utility functions that reduce a stream to a single value.\n */\nexport namespace Reduction {\n\n    /**\n     * Compute the sum of a number stream.\n     */\n    export function sum(stream: Stream): number {\n        return stream.reduce((a, b) => a + b, 0);\n    }\n\n    /**\n     * Compute the product of a number stream.\n     */\n    export function product(stream: Stream): number {\n        return stream.reduce((a, b) => a * b, 0);\n    }\n\n    /**\n     * Compute the minimum of a number stream. Returns `undefined` if the stream is empty.\n     */\n    export function min(stream: Stream): number | undefined {\n        return stream.reduce((a, b) => Math.min(a, b));\n    }\n\n    /**\n     * Compute the maximum of a number stream. Returns `undefined` if the stream is empty.\n     */\n    export function max(stream: Stream): number | undefined {\n        return stream.reduce((a, b) => Math.max(a, b));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { assertUnreachable } from '../utils/errors.js';\nimport * as ast from '../languages/generated/ast.js';\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport { isCompositeCstNode } from '../syntax-tree.js';\nimport { getContainerOfType, streamAllContents } from './ast-utils.js';\nimport { streamCst } from './cst-utils.js';\nimport { escapeRegExp } from './regexp-utils.js';\n\n/**\n * Returns the entry rule of the given grammar, if any. If the grammar file does not contain an entry rule,\n * the result is `undefined`.\n */\nexport function getEntryRule(grammar: ast.Grammar): ast.ParserRule | undefined {\n    return grammar.rules.find(e => ast.isParserRule(e) && e.entry) as ast.ParserRule;\n}\n\n/**\n * Returns all hidden terminal rules of the given grammar, if any.\n */\nexport function getHiddenRules(grammar: ast.Grammar) {\n    return grammar.rules.filter((e): e is ast.TerminalRule => ast.isTerminalRule(e) && e.hidden);\n}\n\n/**\n * Returns all rules that can be reached from the topmost rules of the specified grammar (entry and hidden terminal rules).\n *\n * @param grammar The grammar that contains all rules\n * @param allTerminals Whether or not to include terminals that are referenced only by other terminals\n * @returns A list of referenced parser and terminal rules. If the grammar contains no entry rule,\n *      this function returns all rules of the specified grammar.\n */\nexport function getAllReachableRules(grammar: ast.Grammar, allTerminals: boolean): Set {\n    const ruleNames = new Set();\n    const entryRule = getEntryRule(grammar);\n    if (!entryRule) {\n        return new Set(grammar.rules);\n    }\n\n    const topMostRules = [entryRule as ast.AbstractRule].concat(getHiddenRules(grammar));\n    for (const rule of topMostRules) {\n        ruleDfs(rule, ruleNames, allTerminals);\n    }\n\n    const rules = new Set();\n    for (const rule of grammar.rules) {\n        if (ruleNames.has(rule.name) || (ast.isTerminalRule(rule) && rule.hidden)) {\n            rules.add(rule);\n        }\n    }\n    return rules;\n}\n\nfunction ruleDfs(rule: ast.AbstractRule, visitedSet: Set, allTerminals: boolean): void {\n    visitedSet.add(rule.name);\n    streamAllContents(rule).forEach(node => {\n        if (ast.isRuleCall(node) || (allTerminals && ast.isTerminalRuleCall(node))) {\n            const refRule = node.rule.ref;\n            if (refRule && !visitedSet.has(refRule.name)) {\n                ruleDfs(refRule, visitedSet, allTerminals);\n            }\n        }\n    });\n}\n\n/**\n * Determines the grammar expression used to parse a cross-reference (usually a reference to a terminal rule).\n * A cross-reference can declare this expression explicitly in the form `[Type : Terminal]`, but if `Terminal`\n * is omitted, this function attempts to infer it from the name of the referenced `Type` (using `findNameAssignment`).\n *\n * Returns the grammar expression used to parse the given cross-reference, or `undefined` if it is not declared\n * and cannot be inferred.\n */\nexport function getCrossReferenceTerminal(crossRef: ast.CrossReference): ast.AbstractElement | undefined {\n    if (crossRef.terminal) {\n        return crossRef.terminal;\n    } else if (crossRef.type.ref) {\n        const nameAssigment = findNameAssignment(crossRef.type.ref);\n        return nameAssigment?.terminal;\n    }\n    return undefined;\n}\n\n/**\n * Determines whether the given terminal rule represents a comment. This is true if the rule is marked\n * as `hidden` and it does not match white space. This means every hidden token (i.e. excluded from the AST)\n * that contains visible characters is considered a comment.\n */\nexport function isCommentTerminal(terminalRule: ast.TerminalRule): boolean {\n    return terminalRule.hidden && !terminalRegex(terminalRule).test(' ');\n}\n\n/**\n * Find all CST nodes within the given node that contribute to the specified property.\n *\n * @param node A CST node in which to look for property assignments. If this is undefined, the result is an empty array.\n * @param property A property name of the constructed AST node. If this is undefined, the result is an empty array.\n */\nexport function findNodesForProperty(node: CstNode | undefined, property: string | undefined): CstNode[] {\n    if (!node || !property) {\n        return [];\n    }\n    return findNodesForPropertyInternal(node, property, node.astNode, true);\n}\n\n/**\n * Find a single CST node within the given node that contributes to the specified property.\n *\n * @param node A CST node in which to look for property assignments. If this is undefined, the result is `undefined`.\n * @param property A property name of the constructed AST node. If this is undefined, the result is `undefined`.\n * @param index If no index is specified or the index is less than zero, the first found node is returned. If the\n *        specified index exceeds the number of assignments to the property, the last found node is returned. Otherwise,\n *        the node with the specified index is returned.\n */\nexport function findNodeForProperty(node: CstNode | undefined, property: string | undefined, index?: number): CstNode | undefined {\n    if (!node || !property) {\n        return undefined;\n    }\n    const nodes = findNodesForPropertyInternal(node, property, node.astNode, true);\n    if (nodes.length === 0) {\n        return undefined;\n    }\n    if (index !== undefined) {\n        index = Math.max(0, Math.min(index, nodes.length - 1));\n    } else {\n        index = 0;\n    }\n    return nodes[index];\n}\n\nfunction findNodesForPropertyInternal(node: CstNode, property: string, element: AstNode | undefined, first: boolean): CstNode[] {\n    if (!first) {\n        const nodeFeature = getContainerOfType(node.grammarSource, ast.isAssignment);\n        if (nodeFeature && nodeFeature.feature === property) {\n            return [node];\n        }\n    }\n    if (isCompositeCstNode(node) && node.astNode === element) {\n        return node.content.flatMap(e => findNodesForPropertyInternal(e, property, element, false));\n    }\n    return [];\n}\n\n/**\n * Find all CST nodes within the given node that correspond to the specified keyword.\n *\n * @param node A CST node in which to look for keywords. If this is undefined, the result is an empty array.\n * @param keyword A keyword as specified in the grammar.\n */\nexport function findNodesForKeyword(node: CstNode | undefined, keyword: string): CstNode[] {\n    if (!node) {\n        return [];\n    }\n    return findNodesForKeywordInternal(node, keyword, node?.astNode);\n}\n\n/**\n * Find a single CST node within the given node that corresponds to the specified keyword.\n *\n * @param node A CST node in which to look for keywords. If this is undefined, the result is `undefined`.\n * @param keyword A keyword as specified in the grammar.\n * @param index If no index is specified or the index is less than zero, the first found node is returned. If the\n *        specified index exceeds the number of keyword occurrences, the last found node is returned. Otherwise,\n *        the node with the specified index is returned.\n */\nexport function findNodeForKeyword(node: CstNode | undefined, keyword: string, index?: number): CstNode | undefined {\n    if (!node) {\n        return undefined;\n    }\n    const nodes = findNodesForKeywordInternal(node, keyword, node?.astNode);\n    if (nodes.length === 0) {\n        return undefined;\n    }\n    if (index !== undefined) {\n        index = Math.max(0, Math.min(index, nodes.length - 1));\n    } else {\n        index = 0;\n    }\n    return nodes[index];\n}\n\nexport function findNodesForKeywordInternal(node: CstNode, keyword: string, element: AstNode | undefined): CstNode[] {\n    if (node.astNode !== element) {\n        return [];\n    }\n    if (ast.isKeyword(node.grammarSource) && node.grammarSource.value === keyword) {\n        return [node];\n    }\n    const treeIterator = streamCst(node).iterator();\n    let result: IteratorResult;\n    const keywordNodes: CstNode[] = [];\n    do {\n        result = treeIterator.next();\n        if (!result.done) {\n            const childNode = result.value;\n            if (childNode.astNode === element) {\n                if (ast.isKeyword(childNode.grammarSource) && childNode.grammarSource.value === keyword) {\n                    keywordNodes.push(childNode);\n                }\n            } else {\n                treeIterator.prune();\n            }\n        }\n    } while (!result.done);\n    return keywordNodes;\n}\n\n/**\n * If the given CST node was parsed in the context of a property assignment, the respective `Assignment` grammar\n * node is returned. If no assignment is found, the result is `undefined`.\n *\n * @param cstNode A CST node for which to find a property assignment.\n */\nexport function findAssignment(cstNode: CstNode): ast.Assignment | undefined {\n    const astNode = cstNode.astNode;\n    // Only search until the ast node of the parent cst node is no longer the original ast node\n    // This would make us jump to a preceding rule call, which contains only unrelated assignments\n    while (astNode === cstNode.container?.astNode) {\n        const assignment = getContainerOfType(cstNode.grammarSource, ast.isAssignment);\n        if (assignment) {\n            return assignment;\n        }\n        cstNode = cstNode.container;\n    }\n    return undefined;\n}\n\n/**\n * Find an assignment to the `name` property for the given grammar type. This requires the `type` to be inferred\n * from a parser rule, and that rule must contain an assignment to the `name` property. In all other cases,\n * this function returns `undefined`.\n */\nexport function findNameAssignment(type: ast.AbstractType): ast.Assignment | undefined {\n    let startNode: AstNode = type;\n    if (ast.isInferredType(startNode)) {\n        // for inferred types, the location to start searching for the name-assignment is different\n        if (ast.isAction(startNode.$container)) {\n            // a type which is explicitly inferred by an action: investigate the sibbling of the Action node, i.e. start searching at the Action's parent\n            startNode = startNode.$container.$container!;\n        } else if (ast.isParserRule(startNode.$container)) {\n            // investigate the parser rule with the explicitly inferred type\n            startNode = startNode.$container;\n        } else {\n            assertUnreachable(startNode.$container);\n        }\n    }\n    return findNameAssignmentInternal(type, startNode, new Map());\n}\n\nfunction findNameAssignmentInternal(type: ast.AbstractType, startNode: AstNode, cache: Map): ast.Assignment | undefined {\n    // the cache is only required to prevent infinite loops\n    function go(node: AstNode, refType: ast.AbstractType): ast.Assignment | undefined {\n        let childAssignment: ast.Assignment | undefined = undefined;\n        const parentAssignment = getContainerOfType(node, ast.isAssignment);\n        // No parent assignment implies unassigned rule call\n        if (!parentAssignment) {\n            childAssignment = findNameAssignmentInternal(refType, refType, cache);\n        }\n        cache.set(type, childAssignment);\n        return childAssignment;\n    }\n\n    if (cache.has(type)) {\n        return cache.get(type);\n    }\n    cache.set(type, undefined);\n    for (const node of streamAllContents(startNode)) {\n        if (ast.isAssignment(node) && node.feature.toLowerCase() === 'name') {\n            cache.set(type, node);\n            return node;\n        } else if (ast.isRuleCall(node) && ast.isParserRule(node.rule.ref)) {\n            return go(node, node.rule.ref);\n        } else if (ast.isSimpleType(node) && node.typeRef?.ref) {\n            return go(node, node.typeRef.ref);\n        }\n    }\n    return undefined;\n}\n\nexport function getActionAtElement(element: ast.AbstractElement): ast.Action | undefined {\n    const parent = element.$container;\n    if (ast.isGroup(parent)) {\n        const elements = parent.elements;\n        const index = elements.indexOf(element);\n        for (let i = index - 1; i >= 0; i--) {\n            const item = elements[i];\n            if (ast.isAction(item)) {\n                return item;\n            } else {\n                const action = streamAllContents(elements[i]).find(ast.isAction);\n                if (action) {\n                    return action;\n                }\n            }\n        }\n    }\n    if (ast.isAbstractElement(parent)) {\n        return getActionAtElement(parent);\n    } else {\n        return undefined;\n    }\n}\n\nexport type Cardinality = '?' | '*' | '+' | undefined;\nexport type Operator = '=' | '+=' | '?=' | undefined;\n\nexport function isOptionalCardinality(cardinality?: Cardinality, element?: ast.AbstractElement): boolean {\n    return cardinality === '?' || cardinality === '*' || (ast.isGroup(element) && Boolean(element.guardCondition));\n}\n\nexport function isArrayCardinality(cardinality?: Cardinality): boolean {\n    return cardinality === '*' || cardinality === '+';\n}\n\nexport function isArrayOperator(operator?: Operator): boolean {\n    return operator === '+=';\n}\n\n/**\n * Determines whether the given parser rule is a _data type rule_, meaning that it has a\n * primitive return type like `number`, `boolean`, etc.\n */\nexport function isDataTypeRule(rule: ast.ParserRule): boolean {\n    return isDataTypeRuleInternal(rule, new Set());\n}\n\nfunction isDataTypeRuleInternal(rule: ast.ParserRule, visited: Set): boolean {\n    if (visited.has(rule)) {\n        return true;\n    } else {\n        visited.add(rule);\n    }\n    for (const node of streamAllContents(rule)) {\n        if (ast.isRuleCall(node)) {\n            if (!node.rule.ref) {\n                // RuleCall to unresolved rule. Don't assume `rule` is a DataType rule.\n                return false;\n            }\n            if (ast.isParserRule(node.rule.ref) && !isDataTypeRuleInternal(node.rule.ref, visited)) {\n                return false;\n            }\n        } else if (ast.isAssignment(node)) {\n            return false;\n        } else if (ast.isAction(node)) {\n            return false;\n        }\n    }\n    return Boolean(rule.definition);\n}\n\nexport function isDataType(type: ast.Type): boolean {\n    return isDataTypeInternal(type.type, new Set());\n}\n\nfunction isDataTypeInternal(type: ast.TypeDefinition, visited: Set): boolean {\n    if (visited.has(type)) {\n        return true;\n    } else {\n        visited.add(type);\n    }\n    if (ast.isArrayType(type)) {\n        return false;\n    } else if (ast.isReferenceType(type)) {\n        return false;\n    } else if (ast.isUnionType(type)) {\n        return type.types.every(e => isDataTypeInternal(e, visited));\n    } else if (ast.isSimpleType(type)) {\n        if (type.primitiveType !== undefined) {\n            return true;\n        } else if (type.stringType !== undefined) {\n            return true;\n        } else if (type.typeRef !== undefined) {\n            const ref = type.typeRef.ref;\n            if (ast.isType(ref)) {\n                return isDataTypeInternal(ref.type, visited);\n            } else {\n                return false;\n            }\n        } else {\n            return false;\n        }\n    } else {\n        return false;\n    }\n}\n\nexport function getExplicitRuleType(rule: ast.ParserRule): string | undefined {\n    if (rule.inferredType) {\n        return rule.inferredType.name;\n    } else if (rule.dataType) {\n        return rule.dataType;\n    } else if (rule.returnType) {\n        const refType = rule.returnType.ref;\n        if(refType) {\n            // check if we need to check Action as return type\n            if (ast.isParserRule(refType)) {\n                return refType.name;\n            }  else if(ast.isInterface(refType) || ast.isType(refType)) {\n                return refType.name;\n            }\n        }\n    }\n    return undefined;\n}\n\nexport function getTypeName(type: ast.AbstractType | ast.Action): string {\n    if (ast.isParserRule(type)) {\n        return isDataTypeRule(type) ? type.name : getExplicitRuleType(type) ?? type.name;\n    } else if (ast.isInterface(type) || ast.isType(type) || ast.isReturnType(type)) {\n        return type.name;\n    } else if (ast.isAction(type)) {\n        const actionType = getActionType(type);\n        if (actionType) {\n            return actionType;\n        }\n    } else if (ast.isInferredType(type)) {\n        return type.name;\n    }\n    throw new Error('Cannot get name of Unknown Type');\n}\n\nexport function getActionType(action: ast.Action): string | undefined {\n    if (action.inferredType) {\n        return action.inferredType.name;\n    } else if (action.type?.ref) {\n        return getTypeName(action.type.ref);\n    }\n    return undefined; // not inferring and not referencing a valid type\n}\n\nexport function getRuleType(rule: ast.AbstractRule): string {\n    if (ast.isTerminalRule(rule)) {\n        return rule.type?.name ?? 'string';\n    } else {\n        return isDataTypeRule(rule) ? rule.name : getExplicitRuleType(rule) ?? rule.name;\n    }\n}\n\nexport function terminalRegex(terminalRule: ast.TerminalRule): RegExp {\n    const flags: Flags = {\n        s: false,\n        i: false,\n        u: false\n    };\n    const source = abstractElementToRegex(terminalRule.definition, flags);\n    const flagText = Object.entries(flags).filter(([, value]) => value).map(([name]) => name).join('');\n    return new RegExp(source, flagText);\n}\n\n// Using [\\s\\S]* allows to match everything, compared to . which doesn't match line terminators\nconst WILDCARD = /[\\s\\S]/.source;\n\ntype Flags = {\n    s: boolean;\n    i: boolean;\n    u: boolean;\n}\n\nfunction abstractElementToRegex(element: ast.AbstractElement, flags?: Flags): string {\n    if (ast.isTerminalAlternatives(element)) {\n        return terminalAlternativesToRegex(element);\n    } else if (ast.isTerminalGroup(element)) {\n        return terminalGroupToRegex(element);\n    } else if (ast.isCharacterRange(element)) {\n        return characterRangeToRegex(element);\n    } else if (ast.isTerminalRuleCall(element)) {\n        const rule = element.rule.ref;\n        if (!rule) {\n            throw new Error('Missing rule reference.');\n        }\n        return withCardinality(abstractElementToRegex(rule.definition), {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead\n        });\n    } else if (ast.isNegatedToken(element)) {\n        return negateTokenToRegex(element);\n    } else if (ast.isUntilToken(element)) {\n        return untilTokenToRegex(element);\n    } else if (ast.isRegexToken(element)) {\n        const lastSlash = element.regex.lastIndexOf('/');\n        const source = element.regex.substring(1, lastSlash);\n        const regexFlags = element.regex.substring(lastSlash + 1);\n        if (flags) {\n            flags.i = regexFlags.includes('i');\n            flags.s = regexFlags.includes('s');\n            flags.u = regexFlags.includes('u');\n        }\n        return withCardinality(source, {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead,\n            wrap: false\n        });\n    } else if (ast.isWildcard(element)) {\n        return withCardinality(WILDCARD, {\n            cardinality: element.cardinality,\n            lookahead: element.lookahead\n        });\n    } else {\n        throw new Error(`Invalid terminal element: ${element?.$type}`);\n    }\n}\n\nfunction terminalAlternativesToRegex(alternatives: ast.TerminalAlternatives): string {\n    return withCardinality(alternatives.elements.map(e => abstractElementToRegex(e)).join('|'), {\n        cardinality: alternatives.cardinality,\n        lookahead: alternatives.lookahead\n    });\n}\n\nfunction terminalGroupToRegex(group: ast.TerminalGroup): string {\n    return withCardinality(group.elements.map(e => abstractElementToRegex(e)).join(''), {\n        cardinality: group.cardinality,\n        lookahead: group.lookahead\n    });\n}\n\nfunction untilTokenToRegex(until: ast.UntilToken): string {\n    return withCardinality(`${WILDCARD}*?${abstractElementToRegex(until.terminal)}`, {\n        cardinality: until.cardinality,\n        lookahead: until.lookahead\n    });\n}\n\nfunction negateTokenToRegex(negate: ast.NegatedToken): string {\n    return withCardinality(`(?!${abstractElementToRegex(negate.terminal)})${WILDCARD}*?`, {\n        cardinality: negate.cardinality,\n        lookahead: negate.lookahead\n    });\n}\n\nfunction characterRangeToRegex(range: ast.CharacterRange): string {\n    if (range.right) {\n        return withCardinality(`[${keywordToRegex(range.left)}-${keywordToRegex(range.right)}]`, {\n            cardinality: range.cardinality,\n            lookahead: range.lookahead,\n            wrap: false\n        });\n    }\n    return withCardinality(keywordToRegex(range.left), {\n        cardinality: range.cardinality,\n        lookahead: range.lookahead,\n        wrap: false\n    });\n}\n\nfunction keywordToRegex(keyword: ast.Keyword): string {\n    return escapeRegExp(keyword.value);\n}\n\nfunction withCardinality(regex: string, options: {\n    cardinality?: string\n    wrap?: boolean\n    lookahead?: string\n}): string {\n    if (options.wrap !== false || options.lookahead) {\n        regex = `(${options.lookahead ?? ''}${regex})`;\n    }\n    if (options.cardinality) {\n        return `${regex}${options.cardinality}`;\n    }\n    return regex;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CstNode } from '../syntax-tree.js';\n\nexport class ErrorWithLocation extends Error {\n    constructor(node: CstNode | undefined, message: string) {\n        super(node ? `${message} at ${node.range.start.line}:${node.range.start.character}` : message);\n    }\n}\n\nexport function assertUnreachable(_: never): never {\n    throw new Error('Error! The input value was not handled.');\n}\n", "/******************************************************************************\n * This file was generated by langium-cli 3.0.0.\n * DO NOT EDIT MANUALLY!\n ******************************************************************************/\n\n/* eslint-disable */\nimport type { AstNode, Reference, ReferenceInfo, TypeMetaData } from '../../syntax-tree.js';\nimport { AbstractAstReflection } from '../../syntax-tree.js';\n\nexport const LangiumGrammarTerminals = {\n    ID: /\\^?[_a-zA-Z][\\w_]*/,\n    STRING: /\"(\\\\.|[^\"\\\\])*\"|'(\\\\.|[^'\\\\])*'/,\n    NUMBER: /NaN|-?((\\d*\\.\\d+|\\d+)([Ee][+-]?\\d+)?|Infinity)/,\n    RegexLiteral: /\\/(?![*+?])(?:[^\\r\\n\\[/\\\\]|\\\\.|\\[(?:[^\\r\\n\\]\\\\]|\\\\.)*\\])+\\/[a-z]*/,\n    WS: /\\s+/,\n    ML_COMMENT: /\\/\\*[\\s\\S]*?\\*\\//,\n    SL_COMMENT: /\\/\\/[^\\n\\r]*/,\n};\n\nexport type AbstractRule = ParserRule | TerminalRule;\n\nexport const AbstractRule = 'AbstractRule';\n\nexport function isAbstractRule(item: unknown): item is AbstractRule {\n    return reflection.isInstance(item, AbstractRule);\n}\n\nexport type AbstractType = InferredType | Interface | ParserRule | Type;\n\nexport const AbstractType = 'AbstractType';\n\nexport function isAbstractType(item: unknown): item is AbstractType {\n    return reflection.isInstance(item, AbstractType);\n}\n\nexport type Condition = BooleanLiteral | Conjunction | Disjunction | Negation | ParameterReference;\n\nexport const Condition = 'Condition';\n\nexport function isCondition(item: unknown): item is Condition {\n    return reflection.isInstance(item, Condition);\n}\n\nexport type FeatureName = 'current' | 'entry' | 'extends' | 'false' | 'fragment' | 'grammar' | 'hidden' | 'import' | 'infer' | 'infers' | 'interface' | 'returns' | 'terminal' | 'true' | 'type' | 'with' | PrimitiveType | string;\n\nexport function isFeatureName(item: unknown): item is FeatureName {\n    return isPrimitiveType(item) || item === 'current' || item === 'entry' || item === 'extends' || item === 'false' || item === 'fragment' || item === 'grammar' || item === 'hidden' || item === 'import' || item === 'interface' || item === 'returns' || item === 'terminal' || item === 'true' || item === 'type' || item === 'infer' || item === 'infers' || item === 'with' || (typeof item === 'string' && (/\\^?[_a-zA-Z][\\w_]*/.test(item)));\n}\n\nexport type PrimitiveType = 'Date' | 'bigint' | 'boolean' | 'number' | 'string';\n\nexport function isPrimitiveType(item: unknown): item is PrimitiveType {\n    return item === 'string' || item === 'number' || item === 'boolean' || item === 'Date' || item === 'bigint';\n}\n\nexport type TypeDefinition = ArrayType | ReferenceType | SimpleType | UnionType;\n\nexport const TypeDefinition = 'TypeDefinition';\n\nexport function isTypeDefinition(item: unknown): item is TypeDefinition {\n    return reflection.isInstance(item, TypeDefinition);\n}\n\nexport type ValueLiteral = ArrayLiteral | BooleanLiteral | NumberLiteral | StringLiteral;\n\nexport const ValueLiteral = 'ValueLiteral';\n\nexport function isValueLiteral(item: unknown): item is ValueLiteral {\n    return reflection.isInstance(item, ValueLiteral);\n}\n\nexport interface AbstractElement extends AstNode {\n    readonly $type: 'AbstractElement' | 'Action' | 'Alternatives' | 'Assignment' | 'CharacterRange' | 'CrossReference' | 'EndOfFile' | 'Group' | 'Keyword' | 'NegatedToken' | 'RegexToken' | 'RuleCall' | 'TerminalAlternatives' | 'TerminalGroup' | 'TerminalRuleCall' | 'UnorderedGroup' | 'UntilToken' | 'Wildcard';\n    cardinality?: '*' | '+' | '?';\n    lookahead?: '?!' | '?;\n}\n\nexport const ArrayLiteral = 'ArrayLiteral';\n\nexport function isArrayLiteral(item: unknown): item is ArrayLiteral {\n    return reflection.isInstance(item, ArrayLiteral);\n}\n\nexport interface ArrayType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'ArrayType';\n    elementType: TypeDefinition;\n}\n\nexport const ArrayType = 'ArrayType';\n\nexport function isArrayType(item: unknown): item is ArrayType {\n    return reflection.isInstance(item, ArrayType);\n}\n\nexport interface BooleanLiteral extends AstNode {\n    readonly $container: ArrayLiteral | Conjunction | Disjunction | Group | NamedArgument | Negation | TypeAttribute;\n    readonly $type: 'BooleanLiteral';\n    true: boolean;\n}\n\nexport const BooleanLiteral = 'BooleanLiteral';\n\nexport function isBooleanLiteral(item: unknown): item is BooleanLiteral {\n    return reflection.isInstance(item, BooleanLiteral);\n}\n\nexport interface Conjunction extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Conjunction';\n    left: Condition;\n    right: Condition;\n}\n\nexport const Conjunction = 'Conjunction';\n\nexport function isConjunction(item: unknown): item is Conjunction {\n    return reflection.isInstance(item, Conjunction);\n}\n\nexport interface Disjunction extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Disjunction';\n    left: Condition;\n    right: Condition;\n}\n\nexport const Disjunction = 'Disjunction';\n\nexport function isDisjunction(item: unknown): item is Disjunction {\n    return reflection.isInstance(item, Disjunction);\n}\n\nexport interface Grammar extends AstNode {\n    readonly $type: 'Grammar';\n    definesHiddenTokens: boolean;\n    hiddenTokens: Array>;\n    imports: Array;\n    interfaces: Array;\n    isDeclared: boolean;\n    name?: string;\n    rules: Array;\n    types: Array;\n    usedGrammars: Array>;\n}\n\nexport const Grammar = 'Grammar';\n\nexport function isGrammar(item: unknown): item is Grammar {\n    return reflection.isInstance(item, Grammar);\n}\n\nexport interface GrammarImport extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'GrammarImport';\n    path: string;\n}\n\nexport const GrammarImport = 'GrammarImport';\n\nexport function isGrammarImport(item: unknown): item is GrammarImport {\n    return reflection.isInstance(item, GrammarImport);\n}\n\nexport interface InferredType extends AstNode {\n    readonly $container: Action | ParserRule;\n    readonly $type: 'InferredType';\n    name: string;\n}\n\nexport const InferredType = 'InferredType';\n\nexport function isInferredType(item: unknown): item is InferredType {\n    return reflection.isInstance(item, InferredType);\n}\n\nexport interface Interface extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'Interface';\n    attributes: Array;\n    name: string;\n    superTypes: Array>;\n}\n\nexport const Interface = 'Interface';\n\nexport function isInterface(item: unknown): item is Interface {\n    return reflection.isInstance(item, Interface);\n}\n\nexport interface NamedArgument extends AstNode {\n    readonly $container: RuleCall;\n    readonly $type: 'NamedArgument';\n    calledByName: boolean;\n    parameter?: Reference;\n    value: Condition;\n}\n\nexport const NamedArgument = 'NamedArgument';\n\nexport function isNamedArgument(item: unknown): item is NamedArgument {\n    return reflection.isInstance(item, NamedArgument);\n}\n\nexport interface Negation extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'Negation';\n    value: Condition;\n}\n\nexport const Negation = 'Negation';\n\nexport function isNegation(item: unknown): item is Negation {\n    return reflection.isInstance(item, Negation);\n}\n\nexport interface NumberLiteral extends AstNode {\n    readonly $container: ArrayLiteral | TypeAttribute;\n    readonly $type: 'NumberLiteral';\n    value: number;\n}\n\nexport const NumberLiteral = 'NumberLiteral';\n\nexport function isNumberLiteral(item: unknown): item is NumberLiteral {\n    return reflection.isInstance(item, NumberLiteral);\n}\n\nexport interface Parameter extends AstNode {\n    readonly $container: ParserRule;\n    readonly $type: 'Parameter';\n    name: string;\n}\n\nexport const Parameter = 'Parameter';\n\nexport function isParameter(item: unknown): item is Parameter {\n    return reflection.isInstance(item, Parameter);\n}\n\nexport interface ParameterReference extends AstNode {\n    readonly $container: Conjunction | Disjunction | Group | NamedArgument | Negation;\n    readonly $type: 'ParameterReference';\n    parameter: Reference;\n}\n\nexport const ParameterReference = 'ParameterReference';\n\nexport function isParameterReference(item: unknown): item is ParameterReference {\n    return reflection.isInstance(item, ParameterReference);\n}\n\nexport interface ParserRule extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'ParserRule';\n    dataType?: PrimitiveType;\n    definesHiddenTokens: boolean;\n    definition: AbstractElement;\n    entry: boolean;\n    fragment: boolean;\n    hiddenTokens: Array>;\n    inferredType?: InferredType;\n    name: string;\n    parameters: Array;\n    returnType?: Reference;\n    wildcard: boolean;\n}\n\nexport const ParserRule = 'ParserRule';\n\nexport function isParserRule(item: unknown): item is ParserRule {\n    return reflection.isInstance(item, ParserRule);\n}\n\nexport interface ReferenceType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'ReferenceType';\n    referenceType: TypeDefinition;\n}\n\nexport const ReferenceType = 'ReferenceType';\n\nexport function isReferenceType(item: unknown): item is ReferenceType {\n    return reflection.isInstance(item, ReferenceType);\n}\n\nexport interface ReturnType extends AstNode {\n    readonly $container: TerminalRule;\n    readonly $type: 'ReturnType';\n    name: PrimitiveType | string;\n}\n\nexport const ReturnType = 'ReturnType';\n\nexport function isReturnType(item: unknown): item is ReturnType {\n    return reflection.isInstance(item, ReturnType);\n}\n\nexport interface SimpleType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'SimpleType';\n    primitiveType?: PrimitiveType;\n    stringType?: string;\n    typeRef?: Reference;\n}\n\nexport const SimpleType = 'SimpleType';\n\nexport function isSimpleType(item: unknown): item is SimpleType {\n    return reflection.isInstance(item, SimpleType);\n}\n\nexport interface StringLiteral extends AstNode {\n    readonly $container: ArrayLiteral | TypeAttribute;\n    readonly $type: 'StringLiteral';\n    value: string;\n}\n\nexport const StringLiteral = 'StringLiteral';\n\nexport function isStringLiteral(item: unknown): item is StringLiteral {\n    return reflection.isInstance(item, StringLiteral);\n}\n\nexport interface TerminalRule extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'TerminalRule';\n    definition: AbstractElement;\n    fragment: boolean;\n    hidden: boolean;\n    name: string;\n    type?: ReturnType;\n}\n\nexport const TerminalRule = 'TerminalRule';\n\nexport function isTerminalRule(item: unknown): item is TerminalRule {\n    return reflection.isInstance(item, TerminalRule);\n}\n\nexport interface Type extends AstNode {\n    readonly $container: Grammar;\n    readonly $type: 'Type';\n    name: string;\n    type: TypeDefinition;\n}\n\nexport const Type = 'Type';\n\nexport function isType(item: unknown): item is Type {\n    return reflection.isInstance(item, Type);\n}\n\nexport interface TypeAttribute extends AstNode {\n    readonly $container: Interface;\n    readonly $type: 'TypeAttribute';\n    defaultValue?: ValueLiteral;\n    isOptional: boolean;\n    name: FeatureName;\n    type: TypeDefinition;\n}\n\nexport const TypeAttribute = 'TypeAttribute';\n\nexport function isTypeAttribute(item: unknown): item is TypeAttribute {\n    return reflection.isInstance(item, TypeAttribute);\n}\n\nexport interface UnionType extends AstNode {\n    readonly $container: ArrayType | ReferenceType | Type | TypeAttribute | UnionType;\n    readonly $type: 'UnionType';\n    types: Array;\n}\n\nexport const UnionType = 'UnionType';\n\nexport function isUnionType(item: unknown): item is UnionType {\n    return reflection.isInstance(item, UnionType);\n}\n\nexport interface Action extends AbstractElement {\n    readonly $type: 'Action';\n    feature?: FeatureName;\n    inferredType?: InferredType;\n    operator?: '+=' | '=';\n    type?: Reference;\n}\n\nexport const Action = 'Action';\n\nexport function isAction(item: unknown): item is Action {\n    return reflection.isInstance(item, Action);\n}\n\nexport interface Alternatives extends AbstractElement {\n    readonly $type: 'Alternatives';\n    elements: Array;\n}\n\nexport const Alternatives = 'Alternatives';\n\nexport function isAlternatives(item: unknown): item is Alternatives {\n    return reflection.isInstance(item, Alternatives);\n}\n\nexport interface Assignment extends AbstractElement {\n    readonly $type: 'Assignment';\n    feature: FeatureName;\n    operator: '+=' | '=' | '?=';\n    terminal: AbstractElement;\n}\n\nexport const Assignment = 'Assignment';\n\nexport function isAssignment(item: unknown): item is Assignment {\n    return reflection.isInstance(item, Assignment);\n}\n\nexport interface CharacterRange extends AbstractElement {\n    readonly $type: 'CharacterRange';\n    left: Keyword;\n    right?: Keyword;\n}\n\nexport const CharacterRange = 'CharacterRange';\n\nexport function isCharacterRange(item: unknown): item is CharacterRange {\n    return reflection.isInstance(item, CharacterRange);\n}\n\nexport interface CrossReference extends AbstractElement {\n    readonly $type: 'CrossReference';\n    deprecatedSyntax: boolean;\n    terminal?: AbstractElement;\n    type: Reference;\n}\n\nexport const CrossReference = 'CrossReference';\n\nexport function isCrossReference(item: unknown): item is CrossReference {\n    return reflection.isInstance(item, CrossReference);\n}\n\nexport interface EndOfFile extends AbstractElement {\n    readonly $type: 'EndOfFile';\n}\n\nexport const EndOfFile = 'EndOfFile';\n\nexport function isEndOfFile(item: unknown): item is EndOfFile {\n    return reflection.isInstance(item, EndOfFile);\n}\n\nexport interface Group extends AbstractElement {\n    readonly $type: 'Group';\n    elements: Array;\n    guardCondition?: Condition;\n}\n\nexport const Group = 'Group';\n\nexport function isGroup(item: unknown): item is Group {\n    return reflection.isInstance(item, Group);\n}\n\nexport interface Keyword extends AbstractElement {\n    readonly $container: CharacterRange;\n    readonly $type: 'Keyword';\n    value: string;\n}\n\nexport const Keyword = 'Keyword';\n\nexport function isKeyword(item: unknown): item is Keyword {\n    return reflection.isInstance(item, Keyword);\n}\n\nexport interface NegatedToken extends AbstractElement {\n    readonly $type: 'NegatedToken';\n    terminal: AbstractElement;\n}\n\nexport const NegatedToken = 'NegatedToken';\n\nexport function isNegatedToken(item: unknown): item is NegatedToken {\n    return reflection.isInstance(item, NegatedToken);\n}\n\nexport interface RegexToken extends AbstractElement {\n    readonly $type: 'RegexToken';\n    regex: string;\n}\n\nexport const RegexToken = 'RegexToken';\n\nexport function isRegexToken(item: unknown): item is RegexToken {\n    return reflection.isInstance(item, RegexToken);\n}\n\nexport interface RuleCall extends AbstractElement {\n    readonly $type: 'RuleCall';\n    arguments: Array;\n    rule: Reference;\n}\n\nexport const RuleCall = 'RuleCall';\n\nexport function isRuleCall(item: unknown): item is RuleCall {\n    return reflection.isInstance(item, RuleCall);\n}\n\nexport interface TerminalAlternatives extends AbstractElement {\n    readonly $type: 'TerminalAlternatives';\n    elements: Array;\n}\n\nexport const TerminalAlternatives = 'TerminalAlternatives';\n\nexport function isTerminalAlternatives(item: unknown): item is TerminalAlternatives {\n    return reflection.isInstance(item, TerminalAlternatives);\n}\n\nexport interface TerminalGroup extends AbstractElement {\n    readonly $type: 'TerminalGroup';\n    elements: Array;\n}\n\nexport const TerminalGroup = 'TerminalGroup';\n\nexport function isTerminalGroup(item: unknown): item is TerminalGroup {\n    return reflection.isInstance(item, TerminalGroup);\n}\n\nexport interface TerminalRuleCall extends AbstractElement {\n    readonly $type: 'TerminalRuleCall';\n    rule: Reference;\n}\n\nexport const TerminalRuleCall = 'TerminalRuleCall';\n\nexport function isTerminalRuleCall(item: unknown): item is TerminalRuleCall {\n    return reflection.isInstance(item, TerminalRuleCall);\n}\n\nexport interface UnorderedGroup extends AbstractElement {\n    readonly $type: 'UnorderedGroup';\n    elements: Array;\n}\n\nexport const UnorderedGroup = 'UnorderedGroup';\n\nexport function isUnorderedGroup(item: unknown): item is UnorderedGroup {\n    return reflection.isInstance(item, UnorderedGroup);\n}\n\nexport interface UntilToken extends AbstractElement {\n    readonly $type: 'UntilToken';\n    terminal: AbstractElement;\n}\n\nexport const UntilToken = 'UntilToken';\n\nexport function isUntilToken(item: unknown): item is UntilToken {\n    return reflection.isInstance(item, UntilToken);\n}\n\nexport interface Wildcard extends AbstractElement {\n    readonly $type: 'Wildcard';\n}\n\nexport const Wildcard = 'Wildcard';\n\nexport function isWildcard(item: unknown): item is Wildcard {\n    return reflection.isInstance(item, Wildcard);\n}\n\nexport type LangiumGrammarAstType = {\n    AbstractElement: AbstractElement\n    AbstractRule: AbstractRule\n    AbstractType: AbstractType\n    Action: Action\n    Alternatives: Alternatives\n    ArrayLiteral: ArrayLiteral\n    ArrayType: ArrayType\n    Assignment: Assignment\n    BooleanLiteral: BooleanLiteral\n    CharacterRange: CharacterRange\n    Condition: Condition\n    Conjunction: Conjunction\n    CrossReference: CrossReference\n    Disjunction: Disjunction\n    EndOfFile: EndOfFile\n    Grammar: Grammar\n    GrammarImport: GrammarImport\n    Group: Group\n    InferredType: InferredType\n    Interface: Interface\n    Keyword: Keyword\n    NamedArgument: NamedArgument\n    NegatedToken: NegatedToken\n    Negation: Negation\n    NumberLiteral: NumberLiteral\n    Parameter: Parameter\n    ParameterReference: ParameterReference\n    ParserRule: ParserRule\n    ReferenceType: ReferenceType\n    RegexToken: RegexToken\n    ReturnType: ReturnType\n    RuleCall: RuleCall\n    SimpleType: SimpleType\n    StringLiteral: StringLiteral\n    TerminalAlternatives: TerminalAlternatives\n    TerminalGroup: TerminalGroup\n    TerminalRule: TerminalRule\n    TerminalRuleCall: TerminalRuleCall\n    Type: Type\n    TypeAttribute: TypeAttribute\n    TypeDefinition: TypeDefinition\n    UnionType: UnionType\n    UnorderedGroup: UnorderedGroup\n    UntilToken: UntilToken\n    ValueLiteral: ValueLiteral\n    Wildcard: Wildcard\n}\n\nexport class LangiumGrammarAstReflection extends AbstractAstReflection {\n\n    getAllTypes(): string[] {\n        return ['AbstractElement', 'AbstractRule', 'AbstractType', 'Action', 'Alternatives', 'ArrayLiteral', 'ArrayType', 'Assignment', 'BooleanLiteral', 'CharacterRange', 'Condition', 'Conjunction', 'CrossReference', 'Disjunction', 'EndOfFile', 'Grammar', 'GrammarImport', 'Group', 'InferredType', 'Interface', 'Keyword', 'NamedArgument', 'NegatedToken', 'Negation', 'NumberLiteral', 'Parameter', 'ParameterReference', 'ParserRule', 'ReferenceType', 'RegexToken', 'ReturnType', 'RuleCall', 'SimpleType', 'StringLiteral', 'TerminalAlternatives', 'TerminalGroup', 'TerminalRule', 'TerminalRuleCall', 'Type', 'TypeAttribute', 'TypeDefinition', 'UnionType', 'UnorderedGroup', 'UntilToken', 'ValueLiteral', 'Wildcard'];\n    }\n\n    protected override computeIsSubtype(subtype: string, supertype: string): boolean {\n        switch (subtype) {\n            case Action:\n            case Alternatives:\n            case Assignment:\n            case CharacterRange:\n            case CrossReference:\n            case EndOfFile:\n            case Group:\n            case Keyword:\n            case NegatedToken:\n            case RegexToken:\n            case RuleCall:\n            case TerminalAlternatives:\n            case TerminalGroup:\n            case TerminalRuleCall:\n            case UnorderedGroup:\n            case UntilToken:\n            case Wildcard: {\n                return this.isSubtype(AbstractElement, supertype);\n            }\n            case ArrayLiteral:\n            case NumberLiteral:\n            case StringLiteral: {\n                return this.isSubtype(ValueLiteral, supertype);\n            }\n            case ArrayType:\n            case ReferenceType:\n            case SimpleType:\n            case UnionType: {\n                return this.isSubtype(TypeDefinition, supertype);\n            }\n            case BooleanLiteral: {\n                return this.isSubtype(Condition, supertype) || this.isSubtype(ValueLiteral, supertype);\n            }\n            case Conjunction:\n            case Disjunction:\n            case Negation:\n            case ParameterReference: {\n                return this.isSubtype(Condition, supertype);\n            }\n            case InferredType:\n            case Interface:\n            case Type: {\n                return this.isSubtype(AbstractType, supertype);\n            }\n            case ParserRule: {\n                return this.isSubtype(AbstractRule, supertype) || this.isSubtype(AbstractType, supertype);\n            }\n            case TerminalRule: {\n                return this.isSubtype(AbstractRule, supertype);\n            }\n            default: {\n                return false;\n            }\n        }\n    }\n\n    getReferenceType(refInfo: ReferenceInfo): string {\n        const referenceId = `${refInfo.container.$type}:${refInfo.property}`;\n        switch (referenceId) {\n            case 'Action:type':\n            case 'CrossReference:type':\n            case 'Interface:superTypes':\n            case 'ParserRule:returnType':\n            case 'SimpleType:typeRef': {\n                return AbstractType;\n            }\n            case 'Grammar:hiddenTokens':\n            case 'ParserRule:hiddenTokens':\n            case 'RuleCall:rule': {\n                return AbstractRule;\n            }\n            case 'Grammar:usedGrammars': {\n                return Grammar;\n            }\n            case 'NamedArgument:parameter':\n            case 'ParameterReference:parameter': {\n                return Parameter;\n            }\n            case 'TerminalRuleCall:rule': {\n                return TerminalRule;\n            }\n            default: {\n                throw new Error(`${referenceId} is not a valid reference id.`);\n            }\n        }\n    }\n\n    getTypeMetaData(type: string): TypeMetaData {\n        switch (type) {\n            case 'AbstractElement': {\n                return {\n                    name: 'AbstractElement',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'ArrayLiteral': {\n                return {\n                    name: 'ArrayLiteral',\n                    properties: [\n                        { name: 'elements', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'ArrayType': {\n                return {\n                    name: 'ArrayType',\n                    properties: [\n                        { name: 'elementType' }\n                    ]\n                };\n            }\n            case 'BooleanLiteral': {\n                return {\n                    name: 'BooleanLiteral',\n                    properties: [\n                        { name: 'true', defaultValue: false }\n                    ]\n                };\n            }\n            case 'Conjunction': {\n                return {\n                    name: 'Conjunction',\n                    properties: [\n                        { name: 'left' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'Disjunction': {\n                return {\n                    name: 'Disjunction',\n                    properties: [\n                        { name: 'left' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'Grammar': {\n                return {\n                    name: 'Grammar',\n                    properties: [\n                        { name: 'definesHiddenTokens', defaultValue: false },\n                        { name: 'hiddenTokens', defaultValue: [] },\n                        { name: 'imports', defaultValue: [] },\n                        { name: 'interfaces', defaultValue: [] },\n                        { name: 'isDeclared', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'rules', defaultValue: [] },\n                        { name: 'types', defaultValue: [] },\n                        { name: 'usedGrammars', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'GrammarImport': {\n                return {\n                    name: 'GrammarImport',\n                    properties: [\n                        { name: 'path' }\n                    ]\n                };\n            }\n            case 'InferredType': {\n                return {\n                    name: 'InferredType',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'Interface': {\n                return {\n                    name: 'Interface',\n                    properties: [\n                        { name: 'attributes', defaultValue: [] },\n                        { name: 'name' },\n                        { name: 'superTypes', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'NamedArgument': {\n                return {\n                    name: 'NamedArgument',\n                    properties: [\n                        { name: 'calledByName', defaultValue: false },\n                        { name: 'parameter' },\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'Negation': {\n                return {\n                    name: 'Negation',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'NumberLiteral': {\n                return {\n                    name: 'NumberLiteral',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'Parameter': {\n                return {\n                    name: 'Parameter',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'ParameterReference': {\n                return {\n                    name: 'ParameterReference',\n                    properties: [\n                        { name: 'parameter' }\n                    ]\n                };\n            }\n            case 'ParserRule': {\n                return {\n                    name: 'ParserRule',\n                    properties: [\n                        { name: 'dataType' },\n                        { name: 'definesHiddenTokens', defaultValue: false },\n                        { name: 'definition' },\n                        { name: 'entry', defaultValue: false },\n                        { name: 'fragment', defaultValue: false },\n                        { name: 'hiddenTokens', defaultValue: [] },\n                        { name: 'inferredType' },\n                        { name: 'name' },\n                        { name: 'parameters', defaultValue: [] },\n                        { name: 'returnType' },\n                        { name: 'wildcard', defaultValue: false }\n                    ]\n                };\n            }\n            case 'ReferenceType': {\n                return {\n                    name: 'ReferenceType',\n                    properties: [\n                        { name: 'referenceType' }\n                    ]\n                };\n            }\n            case 'ReturnType': {\n                return {\n                    name: 'ReturnType',\n                    properties: [\n                        { name: 'name' }\n                    ]\n                };\n            }\n            case 'SimpleType': {\n                return {\n                    name: 'SimpleType',\n                    properties: [\n                        { name: 'primitiveType' },\n                        { name: 'stringType' },\n                        { name: 'typeRef' }\n                    ]\n                };\n            }\n            case 'StringLiteral': {\n                return {\n                    name: 'StringLiteral',\n                    properties: [\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'TerminalRule': {\n                return {\n                    name: 'TerminalRule',\n                    properties: [\n                        { name: 'definition' },\n                        { name: 'fragment', defaultValue: false },\n                        { name: 'hidden', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'Type': {\n                return {\n                    name: 'Type',\n                    properties: [\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'TypeAttribute': {\n                return {\n                    name: 'TypeAttribute',\n                    properties: [\n                        { name: 'defaultValue' },\n                        { name: 'isOptional', defaultValue: false },\n                        { name: 'name' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'UnionType': {\n                return {\n                    name: 'UnionType',\n                    properties: [\n                        { name: 'types', defaultValue: [] }\n                    ]\n                };\n            }\n            case 'Action': {\n                return {\n                    name: 'Action',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'feature' },\n                        { name: 'inferredType' },\n                        { name: 'lookahead' },\n                        { name: 'operator' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'Alternatives': {\n                return {\n                    name: 'Alternatives',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Assignment': {\n                return {\n                    name: 'Assignment',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'feature' },\n                        { name: 'lookahead' },\n                        { name: 'operator' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'CharacterRange': {\n                return {\n                    name: 'CharacterRange',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'left' },\n                        { name: 'lookahead' },\n                        { name: 'right' }\n                    ]\n                };\n            }\n            case 'CrossReference': {\n                return {\n                    name: 'CrossReference',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'deprecatedSyntax', defaultValue: false },\n                        { name: 'lookahead' },\n                        { name: 'terminal' },\n                        { name: 'type' }\n                    ]\n                };\n            }\n            case 'EndOfFile': {\n                return {\n                    name: 'EndOfFile',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Group': {\n                return {\n                    name: 'Group',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'guardCondition' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'Keyword': {\n                return {\n                    name: 'Keyword',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'value' }\n                    ]\n                };\n            }\n            case 'NegatedToken': {\n                return {\n                    name: 'NegatedToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'RegexToken': {\n                return {\n                    name: 'RegexToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'regex' }\n                    ]\n                };\n            }\n            case 'RuleCall': {\n                return {\n                    name: 'RuleCall',\n                    properties: [\n                        { name: 'arguments', defaultValue: [] },\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'rule' }\n                    ]\n                };\n            }\n            case 'TerminalAlternatives': {\n                return {\n                    name: 'TerminalAlternatives',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'TerminalGroup': {\n                return {\n                    name: 'TerminalGroup',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'TerminalRuleCall': {\n                return {\n                    name: 'TerminalRuleCall',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'rule' }\n                    ]\n                };\n            }\n            case 'UnorderedGroup': {\n                return {\n                    name: 'UnorderedGroup',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'elements', defaultValue: [] },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            case 'UntilToken': {\n                return {\n                    name: 'UntilToken',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' },\n                        { name: 'terminal' }\n                    ]\n                };\n            }\n            case 'Wildcard': {\n                return {\n                    name: 'Wildcard',\n                    properties: [\n                        { name: 'cardinality' },\n                        { name: 'lookahead' }\n                    ]\n                };\n            }\n            default: {\n                return {\n                    name: type,\n                    properties: []\n                };\n            }\n        }\n    }\n}\n\nexport const reflection = new LangiumGrammarAstReflection();\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Range } from 'vscode-languageserver-types';\nimport type { AstNode, AstReflection, CstNode, GenericAstNode, Mutable, PropertyType, Reference, ReferenceInfo } from '../syntax-tree.js';\nimport type { Stream, TreeStream } from './stream.js';\nimport type { LangiumDocument } from '../workspace/documents.js';\nimport { isAstNode, isReference } from '../syntax-tree.js';\nimport { DONE_RESULT, stream, StreamImpl, TreeStreamImpl } from './stream.js';\nimport { inRange } from './cst-utils.js';\n\n/**\n * Link the `$container` and other related properties of every AST node that is directly contained\n * in the given `node`.\n */\nexport function linkContentToContainer(node: AstNode): void {\n    for (const [name, value] of Object.entries(node)) {\n        if (!name.startsWith('$')) {\n            if (Array.isArray(value)) {\n                value.forEach((item, index) => {\n                    if (isAstNode(item)) {\n                        (item as Mutable).$container = node;\n                        (item as Mutable).$containerProperty = name;\n                        (item as Mutable).$containerIndex = index;\n                    }\n                });\n            } else if (isAstNode(value)) {\n                (value as Mutable).$container = node;\n                (value as Mutable).$containerProperty = name;\n            }\n        }\n    }\n}\n\n/**\n * Walk along the hierarchy of containers from the given AST node to the root and return the first\n * node that matches the type predicate. If the start node itself matches, it is returned.\n * If no container matches, `undefined` is returned.\n */\nexport function getContainerOfType(node: AstNode | undefined, typePredicate: (n: AstNode) => n is T): T | undefined {\n    let item = node;\n    while (item) {\n        if (typePredicate(item)) {\n            return item;\n        }\n        item = item.$container;\n    }\n    return undefined;\n}\n\n/**\n * Walk along the hierarchy of containers from the given AST node to the root and check for existence\n * of a container that matches the given predicate. The start node is included in the checks.\n */\nexport function hasContainerOfType(node: AstNode | undefined, predicate: (n: AstNode) => boolean): boolean {\n    let item = node;\n    while (item) {\n        if (predicate(item)) {\n            return true;\n        }\n        item = item.$container;\n    }\n    return false;\n}\n\n/**\n * Retrieve the document in which the given AST node is contained. A reference to the document is\n * usually held by the root node of the AST.\n *\n * @throws an error if the node is not contained in a document.\n */\nexport function getDocument(node: AstNode): LangiumDocument {\n    const rootNode = findRootNode(node);\n    const result = rootNode.$document;\n    if (!result) {\n        throw new Error('AST node has no document.');\n    }\n    return result as LangiumDocument;\n}\n\n/**\n * Returns the root node of the given AST node by following the `$container` references.\n */\nexport function findRootNode(node: AstNode): AstNode {\n    while (node.$container) {\n        node = node.$container;\n    }\n    return node;\n}\n\nexport interface AstStreamOptions {\n    /**\n     * Optional target range that the nodes in the stream need to intersect\n     */\n    range?: Range\n}\n\n/**\n * Create a stream of all AST nodes that are directly contained in the given node. This includes\n * single-valued as well as multi-valued (array) properties.\n */\nexport function streamContents(node: AstNode, options?: AstStreamOptions): Stream {\n    if (!node) {\n        throw new Error('Node must be an AstNode.');\n    }\n    const range = options?.range;\n    type State = { keys: string[], keyIndex: number, arrayIndex: number };\n    return new StreamImpl(() => ({\n        keys: Object.keys(node),\n        keyIndex: 0,\n        arrayIndex: 0\n    }), state => {\n        while (state.keyIndex < state.keys.length) {\n            const property = state.keys[state.keyIndex];\n            if (!property.startsWith('$')) {\n                const value = (node as GenericAstNode)[property];\n                if (isAstNode(value)) {\n                    state.keyIndex++;\n                    if (isAstNodeInRange(value, range)) {\n                        return { done: false, value };\n                    }\n                } else if (Array.isArray(value)) {\n                    while (state.arrayIndex < value.length) {\n                        const index = state.arrayIndex++;\n                        const element = value[index];\n                        if (isAstNode(element) && isAstNodeInRange(element, range)) {\n                            return { done: false, value: element };\n                        }\n                    }\n                    state.arrayIndex = 0;\n                }\n            }\n            state.keyIndex++;\n        }\n        return DONE_RESULT;\n    });\n}\n\n/**\n * Create a stream of all AST nodes that are directly and indirectly contained in the given root node.\n * This does not include the root node itself.\n */\nexport function streamAllContents(root: AstNode, options?: AstStreamOptions): TreeStream {\n    if (!root) {\n        throw new Error('Root node must be an AstNode.');\n    }\n    return new TreeStreamImpl(root, node => streamContents(node, options));\n}\n\n/**\n * Create a stream of all AST nodes that are directly and indirectly contained in the given root node,\n * including the root node itself.\n */\nexport function streamAst(root: AstNode, options?: AstStreamOptions): TreeStream {\n    if (!root) {\n        throw new Error('Root node must be an AstNode.');\n    } else if (options?.range && !isAstNodeInRange(root, options.range)) {\n        // Return an empty stream if the root node isn't in range\n        return new TreeStreamImpl(root, () => []);\n    }\n    return new TreeStreamImpl(root, node => streamContents(node, options), { includeRoot: true });\n}\n\nfunction isAstNodeInRange(astNode: AstNode, range?: Range): boolean {\n    if (!range) {\n        return true;\n    }\n    const nodeRange = astNode.$cstNode?.range;\n    if (!nodeRange) {\n        return false;\n    }\n    return inRange(nodeRange, range);\n}\n\n/**\n * Create a stream of all cross-references that are held by the given AST node. This includes\n * single-valued as well as multi-valued (array) properties.\n */\nexport function streamReferences(node: AstNode): Stream {\n    type State = { keys: string[], keyIndex: number, arrayIndex: number };\n    return new StreamImpl(() => ({\n        keys: Object.keys(node),\n        keyIndex: 0,\n        arrayIndex: 0\n    }), state => {\n        while (state.keyIndex < state.keys.length) {\n            const property = state.keys[state.keyIndex];\n            if (!property.startsWith('$')) {\n                const value = (node as GenericAstNode)[property];\n                if (isReference(value)) {\n                    state.keyIndex++;\n                    return { done: false, value: { reference: value, container: node, property } };\n                } else if (Array.isArray(value)) {\n                    while (state.arrayIndex < value.length) {\n                        const index = state.arrayIndex++;\n                        const element = value[index];\n                        if (isReference(element)) {\n                            return { done: false, value: { reference: element, container: node, property, index } };\n                        }\n                    }\n                    state.arrayIndex = 0;\n                }\n            }\n            state.keyIndex++;\n        }\n        return DONE_RESULT;\n    });\n}\n\n/**\n * Returns a Stream of references to the target node from the AstNode tree\n *\n * @param targetNode AstNode we are looking for\n * @param lookup AstNode where we search for references. If not provided, the root node of the document is used as the default value\n */\nexport function findLocalReferences(targetNode: AstNode, lookup = getDocument(targetNode).parseResult.value): Stream {\n    const refs: Reference[] = [];\n    streamAst(lookup).forEach(node => {\n        streamReferences(node).forEach(refInfo => {\n            if (refInfo.reference.ref === targetNode) {\n                refs.push(refInfo.reference);\n            }\n        });\n    });\n    return stream(refs);\n}\n\n/**\n * Assigns all mandatory AST properties to the specified node.\n *\n * @param reflection Reflection object used to gather mandatory properties for the node.\n * @param node Specified node is modified in place and properties are directly assigned.\n */\nexport function assignMandatoryProperties(reflection: AstReflection, node: AstNode): void {\n    const typeMetaData = reflection.getTypeMetaData(node.$type);\n    const genericNode = node as GenericAstNode;\n    for (const property of typeMetaData.properties) {\n        // Only set the value if the property is not already set and if it has a default value\n        if (property.defaultValue !== undefined && genericNode[property.name] === undefined) {\n            genericNode[property.name] = copyDefaultValue(property.defaultValue);\n        }\n    }\n}\n\nfunction copyDefaultValue(propertyType: PropertyType): PropertyType {\n    if (Array.isArray(propertyType)) {\n        return [...propertyType.map(copyDefaultValue)];\n    } else {\n        return propertyType;\n    }\n}\n\n/**\n * Creates a deep copy of the specified AST node.\n * The resulting copy will only contain semantically relevant information, such as the `$type` property and AST properties.\n *\n * References are copied without resolved cross reference. The specified function is used to rebuild them.\n */\nexport function copyAstNode(node: T, buildReference: (node: AstNode, property: string, refNode: CstNode | undefined, refText: string) => Reference): T {\n    const copy: GenericAstNode = { $type: node.$type };\n\n    for (const [name, value] of Object.entries(node)) {\n        if (!name.startsWith('$')) {\n            if (isAstNode(value)) {\n                copy[name] = copyAstNode(value, buildReference);\n            } else if (isReference(value)) {\n                copy[name] = buildReference(\n                    copy,\n                    name,\n                    value.$refNode,\n                    value.$refText\n                );\n            } else if (Array.isArray(value)) {\n                const copiedArray: unknown[] = [];\n                for (const element of value) {\n                    if (isAstNode(element)) {\n                        copiedArray.push(copyAstNode(element, buildReference));\n                    } else if (isReference(element)) {\n                        copiedArray.push(\n                            buildReference(\n                                copy,\n                                name,\n                                element.$refNode,\n                                element.$refText\n                            )\n                        );\n                    } else {\n                        copiedArray.push(element);\n                    }\n                }\n                copy[name] = copiedArray;\n            } else {\n                copy[name] = value;\n            }\n        }\n    }\n\n    linkContentToContainer(copy);\n    return copy as unknown as T;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Set, Group, Character, IRegExpAST } from '@chevrotain/regexp-to-ast';\nimport { RegExpParser, BaseRegExpVisitor } from '@chevrotain/regexp-to-ast';\n\nexport const NEWLINE_REGEXP = /\\r?\\n/gm;\n\nconst regexpParser = new RegExpParser();\n\n/**\n * This class is in charge of heuristically identifying start/end tokens of terminals.\n *\n * The way this works is by doing the following:\n * 1. Traverse the regular expression in the \"start state\"\n * 2. Add any encountered sets/single characters to the \"start regexp\"\n * 3. Once we encounter any variable-length content (i.e. with quantifiers such as +/?/*), we enter the \"end state\"\n * 4. In the end state, any sets/single characters are added to an \"end stack\".\n * 5. If we re-encounter any variable-length content we reset the end stack\n * 6. We continue visiting the regex until the end, reseting the end stack and rebuilding it as necessary\n *\n * After traversing a regular expression the `startRegexp/endRegexp` properties allow access to the stored start/end of the terminal\n */\nclass TerminalRegExpVisitor extends BaseRegExpVisitor {\n\n    private isStarting = true;\n    startRegexp: string;\n    private endRegexpStack: string[] = [];\n    multiline = false;\n    regex: string;\n\n    get endRegex(): string {\n        return this.endRegexpStack.join('');\n    }\n\n    reset(regex: string): void {\n        this.multiline = false;\n        this.regex = regex;\n        this.startRegexp = '';\n        this.isStarting = true;\n        this.endRegexpStack = [];\n    }\n\n    override visitGroup(node: Group) {\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        }\n    }\n\n    override visitCharacter(node: Character): void {\n        const char = String.fromCharCode(node.value);\n        if (!this.multiline && char === '\\n') {\n            this.multiline = true;\n        }\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        } else {\n            const escapedChar = escapeRegExp(char);\n            this.endRegexpStack.push(escapedChar);\n            if (this.isStarting) {\n                this.startRegexp += escapedChar;\n            }\n        }\n    }\n\n    override visitSet(node: Set): void {\n        if (!this.multiline) {\n            const set = this.regex.substring(node.loc.begin, node.loc.end);\n            const regex = new RegExp(set);\n            this.multiline = Boolean('\\n'.match(regex));\n        }\n        if (node.quantifier) {\n            this.isStarting = false;\n            this.endRegexpStack = [];\n        } else {\n            const set = this.regex.substring(node.loc.begin, node.loc.end);\n            this.endRegexpStack.push(set);\n            if (this.isStarting) {\n                this.startRegexp += set;\n            }\n        }\n    }\n\n    override visitChildren(node: IRegExpAST): void {\n        if (node.type === 'Group') {\n            // Ignore children of groups with quantifier (+/*/?)\n            // These groups are unrelated to start/end tokens of terminals\n            const group = node as Group;\n            if (group.quantifier) {\n                return;\n            }\n        }\n        super.visitChildren(node);\n    }\n}\n\nconst visitor = new TerminalRegExpVisitor();\n\nexport function getTerminalParts(regexp: RegExp | string): Array<{ start: string, end: string }> {\n    try {\n        if (typeof regexp !== 'string') {\n            regexp = regexp.source;\n        }\n        regexp = `/${regexp}/`;\n        const pattern = regexpParser.pattern(regexp);\n        const parts: Array<{ start: string, end: string }> = [];\n        for (const alternative of pattern.value.value) {\n            visitor.reset(regexp);\n            visitor.visit(alternative);\n            parts.push({\n                start: visitor.startRegexp,\n                end: visitor.endRegex\n            });\n        }\n        return parts;\n    } catch {\n        return [];\n    }\n}\n\nexport function isMultilineComment(regexp: RegExp | string): boolean {\n    try {\n        if (typeof regexp === 'string') {\n            regexp = new RegExp(regexp);\n        }\n        regexp = regexp.toString();\n        visitor.reset(regexp);\n        // Parsing the pattern might fail (since it's user code)\n        visitor.visit(regexpParser.pattern(regexp));\n        return visitor.multiline;\n    } catch {\n        return false;\n    }\n}\n\nexport function isWhitespace(value: RegExp | string): boolean {\n    const regexp = typeof value === 'string' ? new RegExp(value) : value;\n    return regexp.test(' ');\n}\n\nexport function escapeRegExp(value: string): string {\n    return value.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n\nexport function getCaseInsensitivePattern(keyword: string): string {\n    return Array.prototype.map.call(keyword, letter =>\n        /\\w/.test(letter) ? `[${letter.toLowerCase()}${letter.toUpperCase()}]` : escapeRegExp(letter)\n    ).join('');\n}\n\n/**\n * Determines whether the given input has a partial match with the specified regex.\n * @param regex The regex to partially match against\n * @param input The input string\n * @returns Whether any match exists.\n */\nexport function partialMatches(regex: RegExp | string, input: string): boolean {\n    const partial = partialRegExp(regex);\n    const match = input.match(partial);\n    return !!match && match[0].length > 0;\n}\n\n/**\n * Builds a partial regex from the input regex. A partial regex is able to match incomplete input strings. E.g.\n * a partial regex constructed from `/ab/` is able to match the string `a` without needing a following `b` character. However it won't match `b` alone.\n * @param regex The input regex to be converted.\n * @returns A partial regex constructed from the input regex.\n */\nexport function partialRegExp(regex: RegExp | string): RegExp {\n    if (typeof regex === 'string') {\n        regex = new RegExp(regex);\n    }\n    const re = regex, source = regex.source;\n    let i = 0;\n\n    function process() {\n        let result = '',\n            tmp;\n\n        function appendRaw(nbChars: number) {\n            result += source.substr(i, nbChars);\n            i += nbChars;\n        }\n\n        function appendOptional(nbChars: number) {\n            result += '(?:' + source.substr(i, nbChars) + '|$)';\n            i += nbChars;\n        }\n\n        while (i < source.length) {\n            switch (source[i]) {\n                case '\\\\':\n                    switch (source[i + 1]) {\n                        case 'c':\n                            appendOptional(3);\n                            break;\n                        case 'x':\n                            appendOptional(4);\n                            break;\n                        case 'u':\n                            if (re.unicode) {\n                                if (source[i + 2] === '{') {\n                                    appendOptional(source.indexOf('}', i) - i + 1);\n                                } else {\n                                    appendOptional(6);\n                                }\n                            } else {\n                                appendOptional(2);\n                            }\n                            break;\n                        case 'p':\n                        case 'P':\n                            if (re.unicode) {\n                                appendOptional(source.indexOf('}', i) - i + 1);\n                            } else {\n                                appendOptional(2);\n                            }\n                            break;\n                        case 'k':\n                            appendOptional(source.indexOf('>', i) - i + 1);\n                            break;\n                        default:\n                            appendOptional(2);\n                            break;\n                    }\n                    break;\n\n                case '[':\n                    tmp = /\\[(?:\\\\.|.)*?\\]/g;\n                    tmp.lastIndex = i;\n                    tmp = tmp.exec(source) || [];\n                    appendOptional(tmp[0].length);\n                    break;\n\n                case '|':\n                case '^':\n                case '$':\n                case '*':\n                case '+':\n                case '?':\n                    appendRaw(1);\n                    break;\n                case '{':\n                    tmp = /\\{\\d+,?\\d*\\}/g;\n                    tmp.lastIndex = i;\n                    tmp = tmp.exec(source);\n                    if (tmp) {\n                        appendRaw(tmp[0].length);\n                    } else {\n                        appendOptional(1);\n                    }\n                    break;\n                case '(':\n                    if (source[i + 1] === '?') {\n                        switch (source[i + 2]) {\n                            case ':':\n                                result += '(?:';\n                                i += 3;\n                                result += process() + '|$)';\n                                break;\n                            case '=':\n                                result += '(?=';\n                                i += 3;\n                                result += process() + ')';\n                                break;\n                            case '!':\n                                tmp = i;\n                                i += 3;\n                                process();\n                                result += source.substr(tmp, i - tmp);\n                                break;\n                            case '<':\n                                switch (source[i + 3]) {\n                                    case '=':\n                                    case '!':\n                                        tmp = i;\n                                        i += 4;\n                                        process();\n                                        result += source.substr(tmp, i - tmp);\n                                        break;\n                                    default:\n                                        appendRaw(source.indexOf('>', i) - i + 1);\n                                        result += process() + '|$)';\n                                        break;\n                                }\n                                break;\n                        }\n                    } else {\n                        appendRaw(1);\n                        result += process() + '|$)';\n                    }\n                    break;\n                case ')':\n                    ++i;\n                    return result;\n                default:\n                    appendOptional(1);\n                    break;\n            }\n        }\n\n        return result;\n    }\n\n    return new RegExp(process(), regex.flags);\n}\n", "import type { Character, IRegExpAST, RegExpFlags } from \"../types\";\n\nexport function cc(char: string): number {\n  return char.charCodeAt(0);\n}\n\nexport function insertToSet(item: T | T[], set: T[]) {\n  if (Array.isArray(item)) {\n    item.forEach(function (subItem) {\n      set.push(subItem);\n    });\n  } else {\n    set.push(item);\n  }\n}\n\nexport function addFlag(\n  flagObj: RegExpFlags,\n  flagKey: keyof Omit,\n) {\n  if (flagObj[flagKey] === true) {\n    throw \"duplicate flag \" + flagKey;\n  }\n\n  const x: boolean = flagObj[flagKey];\n  flagObj[flagKey] = true;\n}\n\nexport function ASSERT_EXISTS(obj: any): obj is T {\n  // istanbul ignore next\n  if (obj === undefined) {\n    throw Error(\"Internal Error - Should never get here!\");\n  }\n  return true;\n}\n\n// istanbul ignore next\nexport function ASSERT_NEVER_REACH_HERE(): any {\n  throw Error(\"Internal Error - Should never get here!\");\n}\n\nexport function isCharacter(obj: { type: string }): obj is Character {\n  return obj[\"type\"] === \"Character\";\n}\n", "import { cc } from \"./utils.js\";\n\nexport const digitsCharCodes: number[] = [];\nfor (let i = cc(\"0\"); i <= cc(\"9\"); i++) {\n  digitsCharCodes.push(i);\n}\n\nexport const wordCharCodes: number[] = [cc(\"_\")].concat(digitsCharCodes);\nfor (let i = cc(\"a\"); i <= cc(\"z\"); i++) {\n  wordCharCodes.push(i);\n}\n\nfor (let i = cc(\"A\"); i <= cc(\"Z\"); i++) {\n  wordCharCodes.push(i);\n}\n\n// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp#character-classes\nexport const whitespaceCodes: number[] = [\n  cc(\" \"),\n  cc(\"\\f\"),\n  cc(\"\\n\"),\n  cc(\"\\r\"),\n  cc(\"\\t\"),\n  cc(\"\\v\"),\n  cc(\"\\t\"),\n  cc(\"\\u00a0\"),\n  cc(\"\\u1680\"),\n  cc(\"\\u2000\"),\n  cc(\"\\u2001\"),\n  cc(\"\\u2002\"),\n  cc(\"\\u2003\"),\n  cc(\"\\u2004\"),\n  cc(\"\\u2005\"),\n  cc(\"\\u2006\"),\n  cc(\"\\u2007\"),\n  cc(\"\\u2008\"),\n  cc(\"\\u2009\"),\n  cc(\"\\u200a\"),\n  cc(\"\\u2028\"),\n  cc(\"\\u2029\"),\n  cc(\"\\u202f\"),\n  cc(\"\\u205f\"),\n  cc(\"\\u3000\"),\n  cc(\"\\ufeff\"),\n];\n", "import type {\n  Alternative,\n  Assertion,\n  Atom,\n  Character,\n  Disjunction,\n  Group,\n  GroupBackReference,\n  Location,\n  Quantifier,\n  Range,\n  RegExpFlags,\n  RegExpPattern,\n  Set,\n  Term,\n} from \"../types\";\nimport {\n  addFlag,\n  ASSERT_EXISTS,\n  ASSERT_NEVER_REACH_HERE,\n  cc,\n  insertToSet,\n  isCharacter,\n} from \"./utils.js\";\nimport {\n  digitsCharCodes,\n  whitespaceCodes,\n  wordCharCodes,\n} from \"./character-classes.js\";\n\n// consts and utilities\nconst hexDigitPattern = /[0-9a-fA-F]/;\nconst decimalPattern = /[0-9]/;\nconst decimalPatternNoZero = /[1-9]/;\n\n// https://hackernoon.com/the-madness-of-parsing-real-world-javascript-regexps-d9ee336df983\n// https://www.ecma-international.org/ecma-262/8.0/index.html#prod-Pattern\nexport class RegExpParser {\n  protected idx: number = 0;\n  protected input: string = \"\";\n  protected groupIdx: number = 0;\n\n  protected saveState() {\n    return {\n      idx: this.idx,\n      input: this.input,\n      groupIdx: this.groupIdx,\n    };\n  }\n\n  protected restoreState(newState: {\n    idx: number;\n    input: string;\n    groupIdx: number;\n  }) {\n    this.idx = newState.idx;\n    this.input = newState.input;\n    this.groupIdx = newState.groupIdx;\n  }\n\n  public pattern(input: string): RegExpPattern {\n    // parser state\n    this.idx = 0;\n    this.input = input;\n    this.groupIdx = 0;\n\n    this.consumeChar(\"/\");\n    const value = this.disjunction();\n    this.consumeChar(\"/\");\n\n    const flags: RegExpFlags = {\n      type: \"Flags\",\n      loc: { begin: this.idx, end: input.length },\n      global: false,\n      ignoreCase: false,\n      multiLine: false,\n      unicode: false,\n      sticky: false,\n    };\n\n    while (this.isRegExpFlag()) {\n      switch (this.popChar()) {\n        case \"g\":\n          addFlag(flags, \"global\");\n          break;\n        case \"i\":\n          addFlag(flags, \"ignoreCase\");\n          break;\n        case \"m\":\n          addFlag(flags, \"multiLine\");\n          break;\n        case \"u\":\n          addFlag(flags, \"unicode\");\n          break;\n        case \"y\":\n          addFlag(flags, \"sticky\");\n          break;\n      }\n    }\n\n    if (this.idx !== this.input.length) {\n      throw Error(\"Redundant input: \" + this.input.substring(this.idx));\n    }\n    return {\n      type: \"Pattern\",\n      flags: flags,\n      value: value,\n      loc: this.loc(0),\n    };\n  }\n\n  protected disjunction(): Disjunction {\n    const alts = [];\n    const begin = this.idx;\n\n    alts.push(this.alternative());\n\n    while (this.peekChar() === \"|\") {\n      this.consumeChar(\"|\");\n      alts.push(this.alternative());\n    }\n\n    return { type: \"Disjunction\", value: alts, loc: this.loc(begin) };\n  }\n\n  protected alternative(): Alternative {\n    const terms = [];\n    const begin = this.idx;\n\n    while (this.isTerm()) {\n      terms.push(this.term());\n    }\n\n    return { type: \"Alternative\", value: terms, loc: this.loc(begin) };\n  }\n\n  protected term(): Term {\n    if (this.isAssertion()) {\n      return this.assertion();\n    } else {\n      return this.atom();\n    }\n  }\n\n  protected assertion(): Assertion {\n    const begin = this.idx;\n    switch (this.popChar()) {\n      case \"^\":\n        return {\n          type: \"StartAnchor\",\n          loc: this.loc(begin),\n        };\n      case \"$\":\n        return { type: \"EndAnchor\", loc: this.loc(begin) };\n      // '\\b' or '\\B'\n      case \"\\\\\":\n        switch (this.popChar()) {\n          case \"b\":\n            return {\n              type: \"WordBoundary\",\n              loc: this.loc(begin),\n            };\n          case \"B\":\n            return {\n              type: \"NonWordBoundary\",\n              loc: this.loc(begin),\n            };\n        }\n        // istanbul ignore next\n        throw Error(\"Invalid Assertion Escape\");\n      // '(?=' or '(?!'\n      case \"(\":\n        this.consumeChar(\"?\");\n\n        let type: \"Lookahead\" | \"NegativeLookahead\" | undefined;\n        switch (this.popChar()) {\n          case \"=\":\n            type = \"Lookahead\";\n            break;\n          case \"!\":\n            type = \"NegativeLookahead\";\n            break;\n        }\n        ASSERT_EXISTS(type);\n\n        const disjunction = this.disjunction();\n\n        this.consumeChar(\")\");\n\n        return {\n          type: type!,\n          value: disjunction,\n          loc: this.loc(begin),\n        };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected quantifier(\n    isBacktracking: boolean = false,\n  ): Quantifier | undefined {\n    let range: Partial | undefined = undefined;\n    const begin = this.idx;\n    switch (this.popChar()) {\n      case \"*\":\n        range = {\n          atLeast: 0,\n          atMost: Infinity,\n        };\n        break;\n      case \"+\":\n        range = {\n          atLeast: 1,\n          atMost: Infinity,\n        };\n        break;\n      case \"?\":\n        range = {\n          atLeast: 0,\n          atMost: 1,\n        };\n        break;\n      case \"{\":\n        const atLeast = this.integerIncludingZero();\n        switch (this.popChar()) {\n          case \"}\":\n            range = {\n              atLeast: atLeast,\n              atMost: atLeast,\n            };\n            break;\n          case \",\":\n            let atMost;\n            if (this.isDigit()) {\n              atMost = this.integerIncludingZero();\n              range = {\n                atLeast: atLeast,\n                atMost: atMost,\n              };\n            } else {\n              range = {\n                atLeast: atLeast,\n                atMost: Infinity,\n              };\n            }\n            this.consumeChar(\"}\");\n            break;\n        }\n        // throwing exceptions from \"ASSERT_EXISTS\" during backtracking\n        // causes severe performance degradations\n        if (isBacktracking === true && range === undefined) {\n          return undefined;\n        }\n        ASSERT_EXISTS(range);\n        break;\n    }\n\n    // throwing exceptions from \"ASSERT_EXISTS\" during backtracking\n    // causes severe performance degradations\n    if (isBacktracking === true && range === undefined) {\n      return undefined;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(range)) {\n      if (this.peekChar(0) === \"?\") {\n        this.consumeChar(\"?\");\n        range.greedy = false;\n      } else {\n        range.greedy = true;\n      }\n\n      range.type = \"Quantifier\";\n      range.loc = this.loc(begin);\n      return range as Quantifier;\n    }\n  }\n\n  protected atom(): Atom {\n    let atom: Omit | undefined;\n    const begin = this.idx;\n    switch (this.peekChar()) {\n      case \".\":\n        atom = this.dotAll();\n        break;\n      case \"\\\\\":\n        atom = this.atomEscape();\n        break;\n      case \"[\":\n        atom = this.characterClass();\n        break;\n      case \"(\":\n        atom = this.group();\n        break;\n    }\n\n    if (atom === undefined && this.isPatternCharacter()) {\n      atom = this.patternCharacter();\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(atom)) {\n      atom.loc = this.loc(begin);\n\n      if (this.isQuantifier()) {\n        atom.quantifier = this.quantifier();\n      }\n\n      return atom;\n    }\n\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected dotAll(): Omit {\n    this.consumeChar(\".\");\n    return {\n      type: \"Set\",\n      complement: true,\n      value: [cc(\"\\n\"), cc(\"\\r\"), cc(\"\\u2028\"), cc(\"\\u2029\")],\n    };\n  }\n\n  protected atomEscape(): Omit {\n    this.consumeChar(\"\\\\\");\n\n    switch (this.peekChar()) {\n      case \"1\":\n      case \"2\":\n      case \"3\":\n      case \"4\":\n      case \"5\":\n      case \"6\":\n      case \"7\":\n      case \"8\":\n      case \"9\":\n        return this.decimalEscapeAtom();\n      case \"d\":\n      case \"D\":\n      case \"s\":\n      case \"S\":\n      case \"w\":\n      case \"W\":\n        return this.characterClassEscape();\n      case \"f\":\n      case \"n\":\n      case \"r\":\n      case \"t\":\n      case \"v\":\n        return this.controlEscapeAtom();\n      case \"c\":\n        return this.controlLetterEscapeAtom();\n      case \"0\":\n        return this.nulCharacterAtom();\n      case \"x\":\n        return this.hexEscapeSequenceAtom();\n      case \"u\":\n        return this.regExpUnicodeEscapeSequenceAtom();\n      default:\n        return this.identityEscapeAtom();\n    }\n  }\n\n  protected decimalEscapeAtom(): Omit {\n    const value = this.positiveInteger();\n\n    return { type: \"GroupBackReference\", value: value };\n  }\n\n  protected characterClassEscape(): Omit {\n    let set: (number | Range)[] | undefined;\n    let complement = false;\n    switch (this.popChar()) {\n      case \"d\":\n        set = digitsCharCodes;\n        break;\n      case \"D\":\n        set = digitsCharCodes;\n        complement = true;\n        break;\n      case \"s\":\n        set = whitespaceCodes;\n        break;\n      case \"S\":\n        set = whitespaceCodes;\n        complement = true;\n        break;\n      case \"w\":\n        set = wordCharCodes;\n        break;\n      case \"W\":\n        set = wordCharCodes;\n        complement = true;\n        break;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(set)) {\n      return { type: \"Set\", value: set, complement: complement };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected controlEscapeAtom(): Omit {\n    let escapeCode;\n    switch (this.popChar()) {\n      case \"f\":\n        escapeCode = cc(\"\\f\");\n        break;\n      case \"n\":\n        escapeCode = cc(\"\\n\");\n        break;\n      case \"r\":\n        escapeCode = cc(\"\\r\");\n        break;\n      case \"t\":\n        escapeCode = cc(\"\\t\");\n        break;\n      case \"v\":\n        escapeCode = cc(\"\\v\");\n        break;\n    }\n\n    // istanbul ignore else\n    if (ASSERT_EXISTS(escapeCode)) {\n      return { type: \"Character\", value: escapeCode };\n    }\n    // istanbul ignore next\n    return ASSERT_NEVER_REACH_HERE();\n  }\n\n  protected controlLetterEscapeAtom(): Omit {\n    this.consumeChar(\"c\");\n    const letter = this.popChar();\n    if (/[a-zA-Z]/.test(letter) === false) {\n      throw Error(\"Invalid \");\n    }\n\n    const letterCode = letter.toUpperCase().charCodeAt(0) - 64;\n    return { type: \"Character\", value: letterCode };\n  }\n\n  protected nulCharacterAtom(): Omit {\n    // TODO implement '[lookahead \u2209 DecimalDigit]'\n    // TODO: for the deprecated octal escape sequence\n    this.consumeChar(\"0\");\n    return { type: \"Character\", value: cc(\"\\0\") };\n  }\n\n  protected hexEscapeSequenceAtom(): Omit {\n    this.consumeChar(\"x\");\n    return this.parseHexDigits(2);\n  }\n\n  protected regExpUnicodeEscapeSequenceAtom(): Omit {\n    this.consumeChar(\"u\");\n    return this.parseHexDigits(4);\n  }\n\n  protected identityEscapeAtom(): Omit {\n    // TODO: implement \"SourceCharacter but not UnicodeIDContinue\"\n    // // http://unicode.org/reports/tr31/#Specific_Character_Adjustments\n    const escapedChar = this.popChar();\n    return { type: \"Character\", value: cc(escapedChar) };\n  }\n\n  protected classPatternCharacterAtom(): Omit {\n    switch (this.peekChar()) {\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n      // istanbul ignore next\n      case \"\\\\\":\n      // istanbul ignore next\n      case \"]\":\n        throw Error(\"TBD\");\n      default:\n        const nextChar = this.popChar();\n        return { type: \"Character\", value: cc(nextChar) };\n    }\n  }\n\n  protected characterClass(): Omit {\n    const set: (number | Range)[] = [];\n    let complement = false;\n    this.consumeChar(\"[\");\n    if (this.peekChar(0) === \"^\") {\n      this.consumeChar(\"^\");\n      complement = true;\n    }\n\n    while (this.isClassAtom()) {\n      const from = this.classAtom();\n      const isFromSingleChar = from.type === \"Character\";\n      if (isCharacter(from) && this.isRangeDash()) {\n        this.consumeChar(\"-\");\n        const to = this.classAtom();\n        const isToSingleChar = to.type === \"Character\";\n\n        // a range can only be used when both sides are single characters\n        if (isCharacter(to)) {\n          if (to.value < from.value) {\n            throw Error(\"Range out of order in character class\");\n          }\n          set.push({ from: from.value, to: to.value });\n        } else {\n          // literal dash\n          insertToSet(from.value, set);\n          set.push(cc(\"-\"));\n          insertToSet(to.value, set);\n        }\n      } else {\n        insertToSet(from.value, set);\n      }\n    }\n\n    this.consumeChar(\"]\");\n\n    return { type: \"Set\", complement: complement, value: set };\n  }\n\n  protected classAtom(): Omit {\n    switch (this.peekChar()) {\n      // istanbul ignore next\n      case \"]\":\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n        throw Error(\"TBD\");\n      case \"\\\\\":\n        return this.classEscape();\n      default:\n        return this.classPatternCharacterAtom();\n    }\n  }\n\n  protected classEscape(): Omit {\n    this.consumeChar(\"\\\\\");\n    switch (this.peekChar()) {\n      // Matches a backspace.\n      // (Not to be confused with \\b word boundary outside characterClass)\n      case \"b\":\n        this.consumeChar(\"b\");\n        return { type: \"Character\", value: cc(\"\\u0008\") };\n      case \"d\":\n      case \"D\":\n      case \"s\":\n      case \"S\":\n      case \"w\":\n      case \"W\":\n        return this.characterClassEscape();\n      case \"f\":\n      case \"n\":\n      case \"r\":\n      case \"t\":\n      case \"v\":\n        return this.controlEscapeAtom();\n      case \"c\":\n        return this.controlLetterEscapeAtom();\n      case \"0\":\n        return this.nulCharacterAtom();\n      case \"x\":\n        return this.hexEscapeSequenceAtom();\n      case \"u\":\n        return this.regExpUnicodeEscapeSequenceAtom();\n      default:\n        return this.identityEscapeAtom();\n    }\n  }\n\n  protected group(): Omit {\n    let capturing = true;\n    this.consumeChar(\"(\");\n    switch (this.peekChar(0)) {\n      case \"?\":\n        this.consumeChar(\"?\");\n        this.consumeChar(\":\");\n        capturing = false;\n        break;\n      default:\n        this.groupIdx++;\n        break;\n    }\n    const value = this.disjunction();\n    this.consumeChar(\")\");\n\n    const groupAst: Omit = {\n      type: \"Group\",\n      capturing: capturing,\n      value: value,\n    };\n\n    if (capturing) {\n      groupAst[\"idx\"] = this.groupIdx;\n    }\n\n    return groupAst;\n  }\n\n  protected positiveInteger(): number {\n    let number = this.popChar();\n\n    // istanbul ignore next - can't ever get here due to previous lookahead checks\n    // still implementing this error checking in case this ever changes.\n    if (decimalPatternNoZero.test(number) === false) {\n      throw Error(\"Expecting a positive integer\");\n    }\n\n    while (decimalPattern.test(this.peekChar(0))) {\n      number += this.popChar();\n    }\n\n    return parseInt(number, 10);\n  }\n\n  protected integerIncludingZero(): number {\n    let number = this.popChar();\n    if (decimalPattern.test(number) === false) {\n      throw Error(\"Expecting an integer\");\n    }\n\n    while (decimalPattern.test(this.peekChar(0))) {\n      number += this.popChar();\n    }\n\n    return parseInt(number, 10);\n  }\n\n  protected patternCharacter(): Omit {\n    const nextChar = this.popChar();\n    switch (nextChar) {\n      // istanbul ignore next\n      case \"\\n\":\n      // istanbul ignore next\n      case \"\\r\":\n      // istanbul ignore next\n      case \"\\u2028\":\n      // istanbul ignore next\n      case \"\\u2029\":\n      // istanbul ignore next\n      case \"^\":\n      // istanbul ignore next\n      case \"$\":\n      // istanbul ignore next\n      case \"\\\\\":\n      // istanbul ignore next\n      case \".\":\n      // istanbul ignore next\n      case \"*\":\n      // istanbul ignore next\n      case \"+\":\n      // istanbul ignore next\n      case \"?\":\n      // istanbul ignore next\n      case \"(\":\n      // istanbul ignore next\n      case \")\":\n      // istanbul ignore next\n      case \"[\":\n      // istanbul ignore next\n      case \"|\":\n        // istanbul ignore next\n        throw Error(\"TBD\");\n      default:\n        return { type: \"Character\", value: cc(nextChar) };\n    }\n  }\n  protected isRegExpFlag(): boolean {\n    switch (this.peekChar(0)) {\n      case \"g\":\n      case \"i\":\n      case \"m\":\n      case \"u\":\n      case \"y\":\n        return true;\n      default:\n        return false;\n    }\n  }\n\n  protected isRangeDash(): boolean {\n    return this.peekChar() === \"-\" && this.isClassAtom(1);\n  }\n\n  protected isDigit(): boolean {\n    return decimalPattern.test(this.peekChar(0));\n  }\n\n  protected isClassAtom(howMuch = 0): boolean {\n    switch (this.peekChar(howMuch)) {\n      case \"]\":\n      case \"\\n\":\n      case \"\\r\":\n      case \"\\u2028\":\n      case \"\\u2029\":\n        return false;\n      default:\n        return true;\n    }\n  }\n\n  protected isTerm() {\n    return this.isAtom() || this.isAssertion();\n  }\n\n  protected isAtom(): boolean {\n    if (this.isPatternCharacter()) {\n      return true;\n    }\n\n    switch (this.peekChar(0)) {\n      case \".\":\n      case \"\\\\\": // atomEscape\n      case \"[\": // characterClass\n      // TODO: isAtom must be called before isAssertion - disambiguate\n      case \"(\": // group\n        return true;\n      default:\n        return false;\n    }\n  }\n\n  protected isAssertion(): boolean {\n    switch (this.peekChar(0)) {\n      case \"^\":\n      case \"$\":\n        return true;\n      // '\\b' or '\\B'\n      case \"\\\\\":\n        switch (this.peekChar(1)) {\n          case \"b\":\n          case \"B\":\n            return true;\n          default:\n            return false;\n        }\n      // '(?=' or '(?!'\n      case \"(\":\n        return (\n          this.peekChar(1) === \"?\" &&\n          (this.peekChar(2) === \"=\" || this.peekChar(2) === \"!\")\n        );\n      default:\n        return false;\n    }\n  }\n\n  protected isQuantifier(): boolean {\n    const prevState = this.saveState();\n    try {\n      return this.quantifier(true) !== undefined;\n    } catch (e) {\n      return false;\n    } finally {\n      this.restoreState(prevState);\n    }\n  }\n\n  protected isPatternCharacter(): boolean {\n    switch (this.peekChar()) {\n      case \"^\":\n      case \"$\":\n      case \"\\\\\":\n      case \".\":\n      case \"*\":\n      case \"+\":\n      case \"?\":\n      case \"(\":\n      case \")\":\n      case \"[\":\n      case \"|\":\n      case \"/\":\n      case \"\\n\":\n      case \"\\r\":\n      case \"\\u2028\":\n      case \"\\u2029\":\n        return false;\n      default:\n        return true;\n    }\n  }\n\n  protected parseHexDigits(howMany: number): Omit {\n    let hexString = \"\";\n    for (let i = 0; i < howMany; i++) {\n      const hexChar = this.popChar();\n      if (hexDigitPattern.test(hexChar) === false) {\n        throw Error(\"Expecting a HexDecimal digits\");\n      }\n      hexString += hexChar;\n    }\n    const charCode = parseInt(hexString, 16);\n    return { type: \"Character\", value: charCode };\n  }\n\n  protected peekChar(howMuch = 0): string {\n    return this.input[this.idx + howMuch];\n  }\n\n  protected popChar(): string {\n    const nextChar = this.peekChar(0);\n    this.consumeChar(undefined);\n    return nextChar;\n  }\n\n  protected consumeChar(char: string | undefined): void {\n    if (char !== undefined && this.input[this.idx] !== char) {\n      throw Error(\n        \"Expected: '\" +\n          char +\n          \"' but found: '\" +\n          this.input[this.idx] +\n          \"' at offset: \" +\n          this.idx,\n      );\n    }\n\n    if (this.idx >= this.input.length) {\n      throw Error(\"Unexpected end of input\");\n    }\n    this.idx++;\n  }\n\n  protected loc(begin: number): Location {\n    return { begin: begin, end: this.idx };\n  }\n}\n", "import type {\n  Alternative,\n  Assertion,\n  Character,\n  Disjunction,\n  Group,\n  GroupBackReference,\n  IRegExpAST,\n  Quantifier,\n  RegExpAstPart,\n  RegExpFlags,\n  RegExpPattern,\n  Set,\n} from \"../types\";\n\nexport class BaseRegExpVisitor {\n  public visitChildren(node: IRegExpAST) {\n    for (const key in node) {\n      const child = (node as any)[key];\n      /* istanbul ignore else */\n      if (node.hasOwnProperty(key)) {\n        if (child.type !== undefined) {\n          this.visit(child);\n        } else if (Array.isArray(child)) {\n          child.forEach((subChild) => {\n            this.visit(subChild);\n          }, this);\n        }\n      }\n    }\n  }\n\n  public visit(node: RegExpAstPart): void {\n    switch (node.type) {\n      case \"Pattern\":\n        this.visitPattern(node);\n        break;\n      case \"Flags\":\n        this.visitFlags(node);\n        break;\n      case \"Disjunction\":\n        this.visitDisjunction(node);\n        break;\n      case \"Alternative\":\n        this.visitAlternative(node);\n        break;\n      case \"StartAnchor\":\n        this.visitStartAnchor(node);\n        break;\n      case \"EndAnchor\":\n        this.visitEndAnchor(node);\n        break;\n      case \"WordBoundary\":\n        this.visitWordBoundary(node);\n        break;\n      case \"NonWordBoundary\":\n        this.visitNonWordBoundary(node);\n        break;\n      case \"Lookahead\":\n        this.visitLookahead(node);\n        break;\n      case \"NegativeLookahead\":\n        this.visitNegativeLookahead(node);\n        break;\n      case \"Character\":\n        this.visitCharacter(node);\n        break;\n      case \"Set\":\n        this.visitSet(node);\n        break;\n      case \"Group\":\n        this.visitGroup(node);\n        break;\n      case \"GroupBackReference\":\n        this.visitGroupBackReference(node);\n        break;\n      case \"Quantifier\":\n        this.visitQuantifier(node);\n        break;\n    }\n\n    this.visitChildren(node);\n  }\n\n  public visitPattern(node: RegExpPattern): void {}\n\n  public visitFlags(node: RegExpFlags): void {}\n\n  public visitDisjunction(node: Disjunction): void {}\n\n  public visitAlternative(node: Alternative): void {}\n\n  // Assertion\n  public visitStartAnchor(node: Assertion): void {}\n\n  public visitEndAnchor(node: Assertion): void {}\n\n  public visitWordBoundary(node: Assertion): void {}\n\n  public visitNonWordBoundary(node: Assertion): void {}\n\n  public visitLookahead(node: Assertion): void {}\n\n  public visitNegativeLookahead(node: Assertion): void {}\n\n  // atoms\n  public visitCharacter(node: Character): void {}\n\n  public visitSet(node: Set): void {}\n\n  public visitGroup(node: Group): void {}\n\n  public visitGroupBackReference(node: GroupBackReference): void {}\n\n  public visitQuantifier(node: Quantifier): void {}\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { DefaultNameRegexp } from '../utils/cst-utils.js';\nimport { isCommentTerminal, terminalRegex } from '../utils/grammar-utils.js';\nimport { isMultilineComment } from '../utils/regexp-utils.js';\nimport { isTerminalRule } from './generated/ast.js';\n\nexport interface GrammarConfig {\n    /**\n     * Lists all rule names which are classified as multiline comment rules\n     */\n    multilineCommentRules: string[]\n    /**\n     * A regular expression which matches characters of names\n     */\n    nameRegexp: RegExp\n}\n\n/**\n * Create the default grammar configuration (used by `createDefaultModule`). This can be overridden in a\n * language-specific module.\n */\nexport function createGrammarConfig(services: LangiumCoreServices): GrammarConfig {\n    const rules: string[] = [];\n    const grammar = services.Grammar;\n    for (const rule of grammar.rules) {\n        if (isTerminalRule(rule) && isCommentTerminal(rule) && isMultilineComment(terminalRegex(rule))) {\n            rules.push(rule.name);\n        }\n    }\n    return {\n        multilineCommentRules: rules,\n        nameRegexp: DefaultNameRegexp\n    };\n}\n", "export function PRINT_ERROR(msg: string) {\n  /* istanbul ignore else - can't override global.console in node.js */\n  if (console && console.error) {\n    console.error(`Error: ${msg}`);\n  }\n}\n\nexport function PRINT_WARNING(msg: string) {\n  /* istanbul ignore else - can't override global.console in node.js*/\n  if (console && console.warn) {\n    // TODO: modify docs accordingly\n    console.warn(`Warning: ${msg}`);\n  }\n}\n", "export function timer(func: () => T): { time: number; value: T } {\n  const start = new Date().getTime();\n  const val = func();\n  const end = new Date().getTime();\n  const total = end - start;\n  return { time: total, value: val };\n}\n", "// based on: https://github.com/petkaantonov/bluebird/blob/b97c0d2d487e8c5076e8bd897e0dcd4622d31846/src/util.js#L201-L216\nexport function toFastProperties(toBecomeFast: any) {\n  function FakeConstructor() {}\n\n  // If our object is used as a constructor, it would receive\n  FakeConstructor.prototype = toBecomeFast;\n  const fakeInstance = new (FakeConstructor as any)();\n\n  function fakeAccess() {\n    return typeof fakeInstance.bar;\n  }\n\n  // help V8 understand this is a \"real\" prototype by actually using\n  // the fake instance.\n  fakeAccess();\n  fakeAccess();\n\n  // Always true condition to suppress the Firefox warning of unreachable\n  // code after a return statement.\n  if (1) return toBecomeFast;\n\n  // Eval prevents optimization of this method (even though this is dead code)\n  // - https://esbuild.github.io/content-types/#direct-eval\n  /* istanbul ignore next */\n  // tslint:disable-next-line\n  (0, eval)(toBecomeFast);\n}\n", "import { assign, forEach, isRegExp, isString, map, pickBy } from \"lodash-es\";\nimport type {\n  IGASTVisitor,\n  IProduction,\n  IProductionWithOccurrence,\n  ISerializedGast,\n  TokenType,\n} from \"@chevrotain/types\";\n\n// TODO: duplicated code to avoid extracting another sub-package -- how to avoid?\nfunction tokenLabel(tokType: TokenType): string {\n  if (hasTokenLabel(tokType)) {\n    return tokType.LABEL;\n  } else {\n    return tokType.name;\n  }\n}\n\n// TODO: duplicated code to avoid extracting another sub-package -- how to avoid?\nfunction hasTokenLabel(\n  obj: TokenType,\n): obj is TokenType & Pick, \"LABEL\"> {\n  return isString(obj.LABEL) && obj.LABEL !== \"\";\n}\n\nexport abstract class AbstractProduction\n  implements IProduction\n{\n  public get definition(): T[] {\n    return this._definition;\n  }\n  public set definition(value: T[]) {\n    this._definition = value;\n  }\n\n  constructor(protected _definition: T[]) {}\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n    forEach(this.definition, (prod) => {\n      prod.accept(visitor);\n    });\n  }\n}\n\nexport class NonTerminal\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public nonTerminalName!: string;\n  public label?: string;\n  public referencedRule!: Rule;\n  public idx: number = 1;\n\n  constructor(options: {\n    nonTerminalName: string;\n    label?: string;\n    referencedRule?: Rule;\n    idx?: number;\n  }) {\n    super([]);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n\n  set definition(definition: IProduction[]) {\n    // immutable\n  }\n\n  get definition(): IProduction[] {\n    if (this.referencedRule !== undefined) {\n      return this.referencedRule.definition;\n    }\n    return [];\n  }\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n    // don't visit children of a reference, we will get cyclic infinite loops if we do so\n  }\n}\n\nexport class Rule extends AbstractProduction {\n  public name!: string;\n  public orgText: string = \"\";\n\n  constructor(options: {\n    name: string;\n    definition: IProduction[];\n    orgText?: string;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Alternative extends AbstractProduction {\n  public ignoreAmbiguities: boolean = false;\n\n  constructor(options: {\n    definition: IProduction[];\n    ignoreAmbiguities?: boolean;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Option\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionMandatory\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionMandatoryWithSeparator\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    separator: TokenType;\n    idx?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Repetition\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    idx?: number;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class RepetitionWithSeparator\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public separator!: TokenType;\n  public idx: number = 1;\n  public maxLookahead?: number;\n\n  constructor(options: {\n    definition: IProduction[];\n    separator: TokenType;\n    idx?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Alternation\n  extends AbstractProduction\n  implements IProductionWithOccurrence\n{\n  public idx: number = 1;\n  public ignoreAmbiguities: boolean = false;\n  public hasPredicates: boolean = false;\n  public maxLookahead?: number;\n\n  public get definition(): Alternative[] {\n    return this._definition;\n  }\n  public set definition(value: Alternative[]) {\n    this._definition = value;\n  }\n\n  constructor(options: {\n    definition: Alternative[];\n    idx?: number;\n    ignoreAmbiguities?: boolean;\n    hasPredicates?: boolean;\n    maxLookahead?: number;\n  }) {\n    super(options.definition);\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n}\n\nexport class Terminal implements IProductionWithOccurrence {\n  public terminalType!: TokenType;\n  public label?: string;\n  public idx: number = 1;\n\n  constructor(options: {\n    terminalType: TokenType;\n    label?: string;\n    idx?: number;\n  }) {\n    assign(\n      this,\n      pickBy(options, (v) => v !== undefined),\n    );\n  }\n\n  accept(visitor: IGASTVisitor): void {\n    visitor.visit(this);\n  }\n}\n\nexport interface ISerializedBasic extends ISerializedGast {\n  type:\n    | \"Alternative\"\n    | \"Option\"\n    | \"RepetitionMandatory\"\n    | \"Repetition\"\n    | \"Alternation\";\n  idx?: number;\n}\n\nexport interface ISerializedGastRule extends ISerializedGast {\n  type: \"Rule\";\n  name: string;\n  orgText: string;\n}\n\nexport interface ISerializedNonTerminal extends ISerializedGast {\n  type: \"NonTerminal\";\n  name: string;\n  label?: string;\n  idx: number;\n}\n\nexport interface ISerializedTerminal extends ISerializedGast {\n  type: \"Terminal\";\n  name: string;\n  terminalLabel?: string;\n  label?: string;\n  pattern?: string;\n  idx: number;\n}\n\nexport interface ISerializedTerminalWithSeparator extends ISerializedGast {\n  type: \"RepetitionMandatoryWithSeparator\" | \"RepetitionWithSeparator\";\n  idx: number;\n  separator: ISerializedTerminal;\n}\n\nexport type ISerializedGastAny =\n  | ISerializedBasic\n  | ISerializedGastRule\n  | ISerializedNonTerminal\n  | ISerializedTerminal\n  | ISerializedTerminalWithSeparator;\n\nexport function serializeGrammar(topRules: Rule[]): ISerializedGast[] {\n  return map(topRules, serializeProduction);\n}\n\nexport function serializeProduction(node: IProduction): ISerializedGast {\n  function convertDefinition(definition: IProduction[]): ISerializedGast[] {\n    return map(definition, serializeProduction);\n  }\n  /* istanbul ignore else */\n  if (node instanceof NonTerminal) {\n    const serializedNonTerminal: ISerializedNonTerminal = {\n      type: \"NonTerminal\",\n      name: node.nonTerminalName,\n      idx: node.idx,\n    };\n\n    if (isString(node.label)) {\n      serializedNonTerminal.label = node.label;\n    }\n\n    return serializedNonTerminal;\n  } else if (node instanceof Alternative) {\n    return {\n      type: \"Alternative\",\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Option) {\n    return {\n      type: \"Option\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionMandatory) {\n    return {\n      type: \"RepetitionMandatory\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionMandatoryWithSeparator) {\n    return {\n      type: \"RepetitionMandatoryWithSeparator\",\n      idx: node.idx,\n      separator: (\n        serializeProduction(new Terminal({ terminalType: node.separator }))\n      ),\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof RepetitionWithSeparator) {\n    return {\n      type: \"RepetitionWithSeparator\",\n      idx: node.idx,\n      separator: (\n        serializeProduction(new Terminal({ terminalType: node.separator }))\n      ),\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Repetition) {\n    return {\n      type: \"Repetition\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Alternation) {\n    return {\n      type: \"Alternation\",\n      idx: node.idx,\n      definition: convertDefinition(node.definition),\n    };\n  } else if (node instanceof Terminal) {\n    const serializedTerminal = {\n      type: \"Terminal\",\n      name: node.terminalType.name,\n      label: tokenLabel(node.terminalType),\n      idx: node.idx,\n    };\n\n    if (isString(node.label)) {\n      serializedTerminal.terminalLabel = node.label;\n    }\n\n    const pattern = node.terminalType.PATTERN;\n    if (node.terminalType.PATTERN) {\n      serializedTerminal.pattern = isRegExp(pattern)\n        ? (pattern).source\n        : pattern;\n    }\n\n    return serializedTerminal;\n  } else if (node instanceof Rule) {\n    return {\n      type: \"Rule\",\n      name: node.name,\n      orgText: node.orgText,\n      definition: convertDefinition(node.definition),\n    };\n    /* c8 ignore next 3 */\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n", "import {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"./model.js\";\nimport type { IProduction } from \"@chevrotain/types\";\n\nexport abstract class GAstVisitor {\n  public visit(node: IProduction): any {\n    const nodeAny: any = node;\n    switch (nodeAny.constructor) {\n      case NonTerminal:\n        return this.visitNonTerminal(nodeAny);\n      case Alternative:\n        return this.visitAlternative(nodeAny);\n      case Option:\n        return this.visitOption(nodeAny);\n      case RepetitionMandatory:\n        return this.visitRepetitionMandatory(nodeAny);\n      case RepetitionMandatoryWithSeparator:\n        return this.visitRepetitionMandatoryWithSeparator(nodeAny);\n      case RepetitionWithSeparator:\n        return this.visitRepetitionWithSeparator(nodeAny);\n      case Repetition:\n        return this.visitRepetition(nodeAny);\n      case Alternation:\n        return this.visitAlternation(nodeAny);\n      case Terminal:\n        return this.visitTerminal(nodeAny);\n      case Rule:\n        return this.visitRule(nodeAny);\n      /* c8 ignore next 2 */\n      default:\n        throw Error(\"non exhaustive match\");\n    }\n  }\n\n  /* c8 ignore next */\n  public visitNonTerminal(node: NonTerminal): any {}\n\n  /* c8 ignore next */\n  public visitAlternative(node: Alternative): any {}\n\n  /* c8 ignore next */\n  public visitOption(node: Option): any {}\n\n  /* c8 ignore next */\n  public visitRepetition(node: Repetition): any {}\n\n  /* c8 ignore next */\n  public visitRepetitionMandatory(node: RepetitionMandatory): any {}\n\n  /* c8 ignore next 3 */\n  public visitRepetitionMandatoryWithSeparator(\n    node: RepetitionMandatoryWithSeparator,\n  ): any {}\n\n  /* c8 ignore next */\n  public visitRepetitionWithSeparator(node: RepetitionWithSeparator): any {}\n\n  /* c8 ignore next */\n  public visitAlternation(node: Alternation): any {}\n\n  /* c8 ignore next */\n  public visitTerminal(node: Terminal): any {}\n\n  /* c8 ignore next */\n  public visitRule(node: Rule): any {}\n}\n", "import { every, includes, some } from \"lodash-es\";\nimport {\n  AbstractProduction,\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"./model.js\";\nimport type { IProduction, IProductionWithOccurrence } from \"@chevrotain/types\";\n\nexport function isSequenceProd(\n  prod: IProduction,\n): prod is { definition: IProduction[] } & IProduction {\n  return (\n    prod instanceof Alternative ||\n    prod instanceof Option ||\n    prod instanceof Repetition ||\n    prod instanceof RepetitionMandatory ||\n    prod instanceof RepetitionMandatoryWithSeparator ||\n    prod instanceof RepetitionWithSeparator ||\n    prod instanceof Terminal ||\n    prod instanceof Rule\n  );\n}\n\nexport function isOptionalProd(\n  prod: IProduction,\n  alreadyVisited: NonTerminal[] = [],\n): boolean {\n  const isDirectlyOptional =\n    prod instanceof Option ||\n    prod instanceof Repetition ||\n    prod instanceof RepetitionWithSeparator;\n  if (isDirectlyOptional) {\n    return true;\n  }\n\n  // note that this can cause infinite loop if one optional empty TOP production has a cyclic dependency with another\n  // empty optional top rule\n  // may be indirectly optional ((A?B?C?) | (D?E?F?))\n  if (prod instanceof Alternation) {\n    // for OR its enough for just one of the alternatives to be optional\n    return some((prod).definition, (subProd: IProduction) => {\n      return isOptionalProd(subProd, alreadyVisited);\n    });\n  } else if (prod instanceof NonTerminal && includes(alreadyVisited, prod)) {\n    // avoiding stack overflow due to infinite recursion\n    return false;\n  } else if (prod instanceof AbstractProduction) {\n    if (prod instanceof NonTerminal) {\n      alreadyVisited.push(prod);\n    }\n    return every(\n      (prod).definition,\n      (subProd: IProduction) => {\n        return isOptionalProd(subProd, alreadyVisited);\n      },\n    );\n  } else {\n    return false;\n  }\n}\n\nexport function isBranchingProd(\n  prod: IProduction,\n): prod is { definition: IProduction[] } & IProduction {\n  return prod instanceof Alternation;\n}\n\nexport function getProductionDslName(prod: IProductionWithOccurrence): string {\n  /* istanbul ignore else */\n  if (prod instanceof NonTerminal) {\n    return \"SUBRULE\";\n  } else if (prod instanceof Option) {\n    return \"OPTION\";\n  } else if (prod instanceof Alternation) {\n    return \"OR\";\n  } else if (prod instanceof RepetitionMandatory) {\n    return \"AT_LEAST_ONE\";\n  } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n    return \"AT_LEAST_ONE_SEP\";\n  } else if (prod instanceof RepetitionWithSeparator) {\n    return \"MANY_SEP\";\n  } else if (prod instanceof Repetition) {\n    return \"MANY\";\n  } else if (prod instanceof Terminal) {\n    return \"CONSUME\";\n    /* c8 ignore next 3 */\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n", "import { drop, forEach } from \"lodash-es\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { IProduction } from \"@chevrotain/types\";\n\n/**\n *  A Grammar Walker that computes the \"remaining\" grammar \"after\" a productions in the grammar.\n */\nexport abstract class RestWalker {\n  walk(prod: { definition: IProduction[] }, prevRest: any[] = []): void {\n    forEach(prod.definition, (subProd: IProduction, index) => {\n      const currRest = drop(prod.definition, index + 1);\n      /* istanbul ignore else */\n      if (subProd instanceof NonTerminal) {\n        this.walkProdRef(subProd, currRest, prevRest);\n      } else if (subProd instanceof Terminal) {\n        this.walkTerminal(subProd, currRest, prevRest);\n      } else if (subProd instanceof Alternative) {\n        this.walkFlat(subProd, currRest, prevRest);\n      } else if (subProd instanceof Option) {\n        this.walkOption(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionMandatory) {\n        this.walkAtLeastOne(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionMandatoryWithSeparator) {\n        this.walkAtLeastOneSep(subProd, currRest, prevRest);\n      } else if (subProd instanceof RepetitionWithSeparator) {\n        this.walkManySep(subProd, currRest, prevRest);\n      } else if (subProd instanceof Repetition) {\n        this.walkMany(subProd, currRest, prevRest);\n      } else if (subProd instanceof Alternation) {\n        this.walkOr(subProd, currRest, prevRest);\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    });\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {}\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {}\n\n  walkFlat(\n    flatProd: Alternative,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABCDEF => after the D the rest is EF\n    const fullOrRest = currRest.concat(prevRest);\n    this.walk(flatProd, fullOrRest);\n  }\n\n  walkOption(\n    optionProd: Option,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)?F => after the (DE)? the rest is F\n    const fullOrRest = currRest.concat(prevRest);\n    this.walk(optionProd, fullOrRest);\n  }\n\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)+F => after the (DE)+ the rest is (DE)?F\n    const fullAtLeastOneRest: IProduction[] = [\n      new Option({ definition: atLeastOneProd.definition }),\n    ].concat(currRest, prevRest);\n    this.walk(atLeastOneProd, fullAtLeastOneRest);\n  }\n\n  walkAtLeastOneSep(\n    atLeastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC DE(,DE)* F => after the (,DE)+ the rest is (,DE)?F\n    const fullAtLeastOneSepRest = restForRepetitionWithSeparator(\n      atLeastOneSepProd,\n      currRest,\n      prevRest,\n    );\n    this.walk(atLeastOneSepProd, fullAtLeastOneSepRest);\n  }\n\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(DE)*F => after the (DE)* the rest is (DE)?F\n    const fullManyRest: IProduction[] = [\n      new Option({ definition: manyProd.definition }),\n    ].concat(currRest, prevRest);\n    this.walk(manyProd, fullManyRest);\n  }\n\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC (DE(,DE)*)? F => after the (,DE)* the rest is (,DE)?F\n    const fullManySepRest = restForRepetitionWithSeparator(\n      manySepProd,\n      currRest,\n      prevRest,\n    );\n    this.walk(manySepProd, fullManySepRest);\n  }\n\n  walkOr(\n    orProd: Alternation,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // ABC(D|E|F)G => when finding the (D|E|F) the rest is G\n    const fullOrRest = currRest.concat(prevRest);\n    // walk all different alternatives\n    forEach(orProd.definition, (alt) => {\n      // wrapping each alternative in a single definition wrapper\n      // to avoid errors in computing the rest of that alternative in the invocation to computeInProdFollows\n      // (otherwise for OR([alt1,alt2]) alt2 will be considered in 'rest' of alt1\n      const prodWrapper = new Alternative({ definition: [alt] });\n      this.walk(prodWrapper, fullOrRest);\n    });\n  }\n}\n\nfunction restForRepetitionWithSeparator(\n  repSepProd: RepetitionWithSeparator,\n  currRest: IProduction[],\n  prevRest: IProduction[],\n) {\n  const repSepRest = [\n    new Option({\n      definition: [\n        new Terminal({ terminalType: repSepProd.separator }) as IProduction,\n      ].concat(repSepProd.definition),\n    }) as IProduction,\n  ];\n  const fullRepSepRest: IProduction[] = repSepRest.concat(currRest, prevRest);\n  return fullRepSepRest;\n}\n", "import { flatten, map, uniq } from \"lodash-es\";\nimport {\n  isBranchingProd,\n  isOptionalProd,\n  isSequenceProd,\n  NonTerminal,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { IProduction, TokenType } from \"@chevrotain/types\";\n\nexport function first(prod: IProduction): TokenType[] {\n  /* istanbul ignore else */\n  if (prod instanceof NonTerminal) {\n    // this could in theory cause infinite loops if\n    // (1) prod A refs prod B.\n    // (2) prod B refs prod A\n    // (3) AB can match the empty set\n    // in other words a cycle where everything is optional so the first will keep\n    // looking ahead for the next optional part and will never exit\n    // currently there is no safeguard for this unique edge case because\n    // (1) not sure a grammar in which this can happen is useful for anything (productive)\n    return first((prod).referencedRule);\n  } else if (prod instanceof Terminal) {\n    return firstForTerminal(prod);\n  } else if (isSequenceProd(prod)) {\n    return firstForSequence(prod);\n  } else if (isBranchingProd(prod)) {\n    return firstForBranching(prod);\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function firstForSequence(prod: {\n  definition: IProduction[];\n}): TokenType[] {\n  let firstSet: TokenType[] = [];\n  const seq = prod.definition;\n  let nextSubProdIdx = 0;\n  let hasInnerProdsRemaining = seq.length > nextSubProdIdx;\n  let currSubProd;\n  // so we enter the loop at least once (if the definition is not empty\n  let isLastInnerProdOptional = true;\n  // scan a sequence until it's end or until we have found a NONE optional production in it\n  while (hasInnerProdsRemaining && isLastInnerProdOptional) {\n    currSubProd = seq[nextSubProdIdx];\n    isLastInnerProdOptional = isOptionalProd(currSubProd);\n    firstSet = firstSet.concat(first(currSubProd));\n    nextSubProdIdx = nextSubProdIdx + 1;\n    hasInnerProdsRemaining = seq.length > nextSubProdIdx;\n  }\n\n  return uniq(firstSet);\n}\n\nexport function firstForBranching(prod: {\n  definition: IProduction[];\n}): TokenType[] {\n  const allAlternativesFirsts: TokenType[][] = map(\n    prod.definition,\n    (innerProd) => {\n      return first(innerProd);\n    },\n  );\n  return uniq(flatten(allAlternativesFirsts));\n}\n\nexport function firstForTerminal(terminal: Terminal): TokenType[] {\n  return [terminal.terminalType];\n}\n", "// TODO: can this be removed? where is it used?\nexport const IN = \"_~IN~_\";\n", "import { RestWalker } from \"./rest.js\";\nimport { first } from \"./first.js\";\nimport { assign, forEach } from \"lodash-es\";\nimport { IN } from \"../constants.js\";\nimport { Alternative, NonTerminal, Rule, Terminal } from \"@chevrotain/gast\";\nimport { IProduction, TokenType } from \"@chevrotain/types\";\n\n// This ResyncFollowsWalker computes all of the follows required for RESYNC\n// (skipping reference production).\nexport class ResyncFollowsWalker extends RestWalker {\n  public follows: Record = {};\n\n  constructor(private topProd: Rule) {\n    super();\n  }\n\n  startWalking(): Record {\n    this.walk(this.topProd);\n    return this.follows;\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // do nothing! just like in the public sector after 13:00\n  }\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    const followName =\n      buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.idx) +\n      this.topProd.name;\n    const fullRest: IProduction[] = currRest.concat(prevRest);\n    const restProd = new Alternative({ definition: fullRest });\n    const t_in_topProd_follows = first(restProd);\n    this.follows[followName] = t_in_topProd_follows;\n  }\n}\n\nexport function computeAllProdsFollows(\n  topProductions: Rule[],\n): Record {\n  const reSyncFollows = {};\n\n  forEach(topProductions, (topProd) => {\n    const currRefsFollow = new ResyncFollowsWalker(topProd).startWalking();\n    assign(reSyncFollows, currRefsFollow);\n  });\n  return reSyncFollows;\n}\n\nexport function buildBetweenProdsFollowPrefix(\n  inner: Rule,\n  occurenceInParent: number,\n): string {\n  return inner.name + occurenceInParent + IN;\n}\n\nexport function buildInProdFollowPrefix(terminal: Terminal): string {\n  const terminalName = terminal.terminalType.name;\n  return terminalName + terminal.idx + IN;\n}\n", "import {\n  Alternative,\n  Assertion,\n  Atom,\n  Disjunction,\n  RegExpParser,\n  RegExpPattern,\n} from \"@chevrotain/regexp-to-ast\";\n\nlet regExpAstCache: { [regex: string]: RegExpPattern } = {};\nconst regExpParser = new RegExpParser();\n\n// this should be moved to regexp-to-ast\nexport type ASTNode =\n  | RegExpPattern\n  | Disjunction\n  | Alternative\n  | Assertion\n  | Atom;\n\nexport function getRegExpAst(regExp: RegExp): RegExpPattern {\n  const regExpStr = regExp.toString();\n  if (regExpAstCache.hasOwnProperty(regExpStr)) {\n    return regExpAstCache[regExpStr];\n  } else {\n    const regExpAst = regExpParser.pattern(regExpStr);\n    regExpAstCache[regExpStr] = regExpAst;\n    return regExpAst;\n  }\n}\n\nexport function clearRegExpParserCache() {\n  regExpAstCache = {};\n}\n", "import {\n  Alternative,\n  Atom,\n  BaseRegExpVisitor,\n  Character,\n  Disjunction,\n  Group,\n  Set,\n} from \"@chevrotain/regexp-to-ast\";\nimport { every, find, forEach, includes, isArray, values } from \"lodash-es\";\nimport { PRINT_ERROR, PRINT_WARNING } from \"@chevrotain/utils\";\nimport { ASTNode, getRegExpAst } from \"./reg_exp_parser.js\";\nimport { charCodeToOptimizedIndex, minOptimizationVal } from \"./lexer.js\";\n\nconst complementErrorMessage =\n  \"Complement Sets are not supported for first char optimization\";\nexport const failedOptimizationPrefixMsg =\n  'Unable to use \"first char\" lexer optimizations:\\n';\n\nexport function getOptimizedStartCodesIndices(\n  regExp: RegExp,\n  ensureOptimizations = false,\n): number[] {\n  try {\n    const ast = getRegExpAst(regExp);\n    const firstChars = firstCharOptimizedIndices(\n      ast.value,\n      {},\n      ast.flags.ignoreCase,\n    );\n    return firstChars;\n  } catch (e) {\n    /* istanbul ignore next */\n    // Testing this relies on the regexp-to-ast library having a bug... */\n    // TODO: only the else branch needs to be ignored, try to fix with newer prettier / tsc\n    if (e.message === complementErrorMessage) {\n      if (ensureOptimizations) {\n        PRINT_WARNING(\n          `${failedOptimizationPrefixMsg}` +\n            `\\tUnable to optimize: < ${regExp.toString()} >\\n` +\n            \"\\tComplement Sets cannot be automatically optimized.\\n\" +\n            \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n            \"\\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#COMPLEMENT for details.\",\n        );\n      }\n    } else {\n      let msgSuffix = \"\";\n      if (ensureOptimizations) {\n        msgSuffix =\n          \"\\n\\tThis will disable the lexer's first char optimizations.\\n\" +\n          \"\\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#REGEXP_PARSING for details.\";\n      }\n      PRINT_ERROR(\n        `${failedOptimizationPrefixMsg}\\n` +\n          `\\tFailed parsing: < ${regExp.toString()} >\\n` +\n          `\\tUsing the @chevrotain/regexp-to-ast library\\n` +\n          \"\\tPlease open an issue at: https://github.com/chevrotain/chevrotain/issues\" +\n          msgSuffix,\n      );\n    }\n  }\n\n  return [];\n}\n\nexport function firstCharOptimizedIndices(\n  ast: ASTNode,\n  result: { [charCode: number]: number },\n  ignoreCase: boolean,\n): number[] {\n  switch (ast.type) {\n    case \"Disjunction\":\n      for (let i = 0; i < ast.value.length; i++) {\n        firstCharOptimizedIndices(ast.value[i], result, ignoreCase);\n      }\n      break;\n    case \"Alternative\":\n      const terms = ast.value;\n      for (let i = 0; i < terms.length; i++) {\n        const term = terms[i];\n\n        // skip terms that cannot effect the first char results\n        switch (term.type) {\n          case \"EndAnchor\":\n          // A group back reference cannot affect potential starting char.\n          // because if a back reference is the first production than automatically\n          // the group being referenced has had to come BEFORE so its codes have already been added\n          case \"GroupBackReference\":\n          // assertions do not affect potential starting codes\n          case \"Lookahead\":\n          case \"NegativeLookahead\":\n          case \"StartAnchor\":\n          case \"WordBoundary\":\n          case \"NonWordBoundary\":\n            continue;\n        }\n\n        const atom = term;\n        switch (atom.type) {\n          case \"Character\":\n            addOptimizedIdxToResult(atom.value, result, ignoreCase);\n            break;\n          case \"Set\":\n            if (atom.complement === true) {\n              throw Error(complementErrorMessage);\n            }\n            forEach(atom.value, (code) => {\n              if (typeof code === \"number\") {\n                addOptimizedIdxToResult(code, result, ignoreCase);\n              } else {\n                // range\n                const range = code as any;\n                // cannot optimize when ignoreCase is\n                if (ignoreCase === true) {\n                  for (\n                    let rangeCode = range.from;\n                    rangeCode <= range.to;\n                    rangeCode++\n                  ) {\n                    addOptimizedIdxToResult(rangeCode, result, ignoreCase);\n                  }\n                }\n                // Optimization (2 orders of magnitude less work for very large ranges)\n                else {\n                  // handle unoptimized values\n                  for (\n                    let rangeCode = range.from;\n                    rangeCode <= range.to && rangeCode < minOptimizationVal;\n                    rangeCode++\n                  ) {\n                    addOptimizedIdxToResult(rangeCode, result, ignoreCase);\n                  }\n\n                  // Less common charCode where we optimize for faster init time, by using larger \"buckets\"\n                  if (range.to >= minOptimizationVal) {\n                    const minUnOptVal =\n                      range.from >= minOptimizationVal\n                        ? range.from\n                        : minOptimizationVal;\n                    const maxUnOptVal = range.to;\n                    const minOptIdx = charCodeToOptimizedIndex(minUnOptVal);\n                    const maxOptIdx = charCodeToOptimizedIndex(maxUnOptVal);\n\n                    for (\n                      let currOptIdx = minOptIdx;\n                      currOptIdx <= maxOptIdx;\n                      currOptIdx++\n                    ) {\n                      result[currOptIdx] = currOptIdx;\n                    }\n                  }\n                }\n              }\n            });\n            break;\n          case \"Group\":\n            firstCharOptimizedIndices(atom.value, result, ignoreCase);\n            break;\n          /* istanbul ignore next */\n          default:\n            throw Error(\"Non Exhaustive Match\");\n        }\n\n        // reached a mandatory production, no more **start** codes can be found on this alternative\n        const isOptionalQuantifier =\n          atom.quantifier !== undefined && atom.quantifier.atLeast === 0;\n        if (\n          // A group may be optional due to empty contents /(?:)/\n          // or if everything inside it is optional /((a)?)/\n          (atom.type === \"Group\" && isWholeOptional(atom) === false) ||\n          // If this term is not a group it may only be optional if it has an optional quantifier\n          (atom.type !== \"Group\" && isOptionalQuantifier === false)\n        ) {\n          break;\n        }\n      }\n      break;\n    /* istanbul ignore next */\n    default:\n      throw Error(\"non exhaustive match!\");\n  }\n\n  // console.log(Object.keys(result).length)\n  return values(result);\n}\n\nfunction addOptimizedIdxToResult(\n  code: number,\n  result: { [charCode: number]: number },\n  ignoreCase: boolean,\n) {\n  const optimizedCharIdx = charCodeToOptimizedIndex(code);\n  result[optimizedCharIdx] = optimizedCharIdx;\n\n  if (ignoreCase === true) {\n    handleIgnoreCase(code, result);\n  }\n}\n\nfunction handleIgnoreCase(\n  code: number,\n  result: { [charCode: number]: number },\n) {\n  const char = String.fromCharCode(code);\n  const upperChar = char.toUpperCase();\n  /* istanbul ignore else */\n  if (upperChar !== char) {\n    const optimizedCharIdx = charCodeToOptimizedIndex(upperChar.charCodeAt(0));\n    result[optimizedCharIdx] = optimizedCharIdx;\n  } else {\n    const lowerChar = char.toLowerCase();\n    if (lowerChar !== char) {\n      const optimizedCharIdx = charCodeToOptimizedIndex(\n        lowerChar.charCodeAt(0),\n      );\n      result[optimizedCharIdx] = optimizedCharIdx;\n    }\n  }\n}\n\nfunction findCode(setNode: Set, targetCharCodes: number[]) {\n  return find(setNode.value, (codeOrRange) => {\n    if (typeof codeOrRange === \"number\") {\n      return includes(targetCharCodes, codeOrRange);\n    } else {\n      // range\n      const range = codeOrRange;\n      return (\n        find(\n          targetCharCodes,\n          (targetCode) => range.from <= targetCode && targetCode <= range.to,\n        ) !== undefined\n      );\n    }\n  });\n}\n\nfunction isWholeOptional(ast: any): boolean {\n  const quantifier = (ast as Atom).quantifier;\n  if (quantifier && quantifier.atLeast === 0) {\n    return true;\n  }\n\n  if (!ast.value) {\n    return false;\n  }\n\n  return isArray(ast.value)\n    ? every(ast.value, isWholeOptional)\n    : isWholeOptional(ast.value);\n}\n\nclass CharCodeFinder extends BaseRegExpVisitor {\n  found: boolean = false;\n\n  constructor(private targetCharCodes: number[]) {\n    super();\n  }\n\n  visitChildren(node: ASTNode) {\n    // No need to keep looking...\n    if (this.found === true) {\n      return;\n    }\n\n    // switch lookaheads as they do not actually consume any characters thus\n    // finding a charCode at lookahead context does not mean that regexp can actually contain it in a match.\n    switch (node.type) {\n      case \"Lookahead\":\n        this.visitLookahead(node);\n        return;\n      case \"NegativeLookahead\":\n        this.visitNegativeLookahead(node);\n        return;\n    }\n\n    super.visitChildren(node);\n  }\n\n  visitCharacter(node: Character) {\n    if (includes(this.targetCharCodes, node.value)) {\n      this.found = true;\n    }\n  }\n\n  visitSet(node: Set) {\n    if (node.complement) {\n      if (findCode(node, this.targetCharCodes) === undefined) {\n        this.found = true;\n      }\n    } else {\n      if (findCode(node, this.targetCharCodes) !== undefined) {\n        this.found = true;\n      }\n    }\n  }\n}\n\nexport function canMatchCharCode(\n  charCodes: number[],\n  pattern: RegExp | string,\n) {\n  if (pattern instanceof RegExp) {\n    const ast = getRegExpAst(pattern);\n    const charCodeFinder = new CharCodeFinder(charCodes);\n    charCodeFinder.visit(ast);\n    return charCodeFinder.found;\n  } else {\n    return (\n      find(pattern, (char) => {\n        return includes(charCodes, (char).charCodeAt(0));\n      }) !== undefined\n    );\n  }\n}\n", "import { BaseRegExpVisitor } from \"@chevrotain/regexp-to-ast\";\nimport {\n  IRegExpExec,\n  Lexer,\n  LexerDefinitionErrorType,\n} from \"./lexer_public.js\";\nimport {\n  compact,\n  defaults,\n  difference,\n  filter,\n  find,\n  first,\n  flatten,\n  forEach,\n  has,\n  includes,\n  indexOf,\n  isArray,\n  isEmpty,\n  isFunction,\n  isRegExp,\n  isString,\n  isUndefined,\n  keys,\n  map,\n  reduce,\n  reject,\n  values,\n} from \"lodash-es\";\nimport { PRINT_ERROR } from \"@chevrotain/utils\";\nimport {\n  canMatchCharCode,\n  failedOptimizationPrefixMsg,\n  getOptimizedStartCodesIndices,\n} from \"./reg_exp.js\";\nimport {\n  ILexerDefinitionError,\n  ILineTerminatorsTester,\n  IMultiModeLexerDefinition,\n  IToken,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { getRegExpAst } from \"./reg_exp_parser.js\";\n\nconst PATTERN = \"PATTERN\";\nexport const DEFAULT_MODE = \"defaultMode\";\nexport const MODES = \"modes\";\n\nexport interface IPatternConfig {\n  pattern: IRegExpExec | string;\n  longerAlt: number[] | undefined;\n  canLineTerminator: boolean;\n  isCustom: boolean;\n  short: number | false;\n  group: string | undefined | false;\n  push: string | undefined;\n  pop: boolean;\n  tokenType: TokenType;\n  tokenTypeIdx: number;\n}\n\nexport interface IAnalyzeResult {\n  patternIdxToConfig: IPatternConfig[];\n  charCodeToPatternIdxToConfig: { [charCode: number]: IPatternConfig[] };\n  emptyGroups: { [groupName: string]: IToken[] };\n  hasCustom: boolean;\n  canBeOptimized: boolean;\n}\n\nexport let SUPPORT_STICKY =\n  typeof (new RegExp(\"(?:)\")).sticky === \"boolean\";\n\nexport function disableSticky() {\n  SUPPORT_STICKY = false;\n}\n\nexport function enableSticky() {\n  SUPPORT_STICKY = true;\n}\n\nexport function analyzeTokenTypes(\n  tokenTypes: TokenType[],\n  options: {\n    positionTracking?: \"full\" | \"onlyStart\" | \"onlyOffset\";\n    ensureOptimizations?: boolean;\n    lineTerminatorCharacters?: (number | string)[];\n    // TODO: should `useSticky` be an argument here?\n    useSticky?: boolean;\n    safeMode?: boolean;\n    tracer?: (msg: string, action: () => void) => void;\n  },\n): IAnalyzeResult {\n  options = defaults(options, {\n    useSticky: SUPPORT_STICKY,\n    debug: false as boolean,\n    safeMode: false as boolean,\n    positionTracking: \"full\",\n    lineTerminatorCharacters: [\"\\r\", \"\\n\"],\n    tracer: (msg: string, action: Function) => action(),\n  });\n\n  const tracer = options.tracer!;\n\n  tracer(\"initCharCodeToOptimizedIndexMap\", () => {\n    initCharCodeToOptimizedIndexMap();\n  });\n\n  let onlyRelevantTypes: TokenType[];\n  tracer(\"Reject Lexer.NA\", () => {\n    onlyRelevantTypes = reject(tokenTypes, (currType) => {\n      return currType[PATTERN] === Lexer.NA;\n    });\n  });\n\n  let hasCustom = false;\n  let allTransformedPatterns: (IRegExpExec | string)[];\n  tracer(\"Transform Patterns\", () => {\n    hasCustom = false;\n    allTransformedPatterns = map(\n      onlyRelevantTypes,\n      (currType): IRegExpExec | string => {\n        const currPattern = currType[PATTERN];\n\n        /* istanbul ignore else */\n        if (isRegExp(currPattern)) {\n          const regExpSource = currPattern.source;\n          if (\n            regExpSource.length === 1 &&\n            // only these regExp meta characters which can appear in a length one regExp\n            regExpSource !== \"^\" &&\n            regExpSource !== \"$\" &&\n            regExpSource !== \".\" &&\n            !currPattern.ignoreCase\n          ) {\n            return regExpSource;\n          } else if (\n            regExpSource.length === 2 &&\n            regExpSource[0] === \"\\\\\" &&\n            // not a meta character\n            !includes(\n              [\n                \"d\",\n                \"D\",\n                \"s\",\n                \"S\",\n                \"t\",\n                \"r\",\n                \"n\",\n                \"t\",\n                \"0\",\n                \"c\",\n                \"b\",\n                \"B\",\n                \"f\",\n                \"v\",\n                \"w\",\n                \"W\",\n              ],\n              regExpSource[1],\n            )\n          ) {\n            // escaped meta Characters: /\\+/ /\\[/\n            // or redundant escaping: /\\a/\n            // without the escaping \"\\\"\n            return regExpSource[1];\n          } else {\n            return options.useSticky\n              ? addStickyFlag(currPattern)\n              : addStartOfInput(currPattern);\n          }\n        } else if (isFunction(currPattern)) {\n          hasCustom = true;\n          // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object\n          return { exec: currPattern };\n        } else if (typeof currPattern === \"object\") {\n          hasCustom = true;\n          // ICustomPattern\n          return currPattern;\n        } else if (typeof currPattern === \"string\") {\n          if (currPattern.length === 1) {\n            return currPattern;\n          } else {\n            const escapedRegExpString = currPattern.replace(\n              /[\\\\^$.*+?()[\\]{}|]/g,\n              \"\\\\$&\",\n            );\n            const wrappedRegExp = new RegExp(escapedRegExpString);\n            return options.useSticky\n              ? addStickyFlag(wrappedRegExp)\n              : addStartOfInput(wrappedRegExp);\n          }\n        } else {\n          throw Error(\"non exhaustive match\");\n        }\n      },\n    );\n  });\n\n  let patternIdxToType: number[];\n  let patternIdxToGroup: (string | undefined | false)[];\n  let patternIdxToLongerAltIdxArr: (number[] | undefined)[];\n  let patternIdxToPushMode: (string | undefined)[];\n  let patternIdxToPopMode: boolean[];\n  tracer(\"misc mapping\", () => {\n    patternIdxToType = map(\n      onlyRelevantTypes,\n      (currType) => currType.tokenTypeIdx!,\n    );\n\n    patternIdxToGroup = map(onlyRelevantTypes, (clazz: any) => {\n      const groupName = clazz.GROUP;\n      /* istanbul ignore next */\n      if (groupName === Lexer.SKIPPED) {\n        return undefined;\n      } else if (isString(groupName)) {\n        return groupName;\n      } else if (isUndefined(groupName)) {\n        return false;\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    });\n\n    patternIdxToLongerAltIdxArr = map(onlyRelevantTypes, (clazz: any) => {\n      const longerAltType = clazz.LONGER_ALT;\n\n      if (longerAltType) {\n        const longerAltIdxArr = isArray(longerAltType)\n          ? map(longerAltType, (type: any) => indexOf(onlyRelevantTypes, type))\n          : [indexOf(onlyRelevantTypes, longerAltType)];\n        return longerAltIdxArr;\n      }\n    });\n\n    patternIdxToPushMode = map(\n      onlyRelevantTypes,\n      (clazz: any) => clazz.PUSH_MODE,\n    );\n\n    patternIdxToPopMode = map(onlyRelevantTypes, (clazz: any) =>\n      has(clazz, \"POP_MODE\"),\n    );\n  });\n\n  let patternIdxToCanLineTerminator: boolean[];\n  tracer(\"Line Terminator Handling\", () => {\n    const lineTerminatorCharCodes = getCharCodes(\n      options.lineTerminatorCharacters!,\n    );\n    patternIdxToCanLineTerminator = map(onlyRelevantTypes, (tokType) => false);\n    if (options.positionTracking !== \"onlyOffset\") {\n      patternIdxToCanLineTerminator = map(onlyRelevantTypes, (tokType) => {\n        if (has(tokType, \"LINE_BREAKS\")) {\n          return !!tokType.LINE_BREAKS;\n        } else {\n          return (\n            checkLineBreaksIssues(tokType, lineTerminatorCharCodes) === false &&\n            canMatchCharCode(\n              lineTerminatorCharCodes,\n              tokType.PATTERN as RegExp | string,\n            )\n          );\n        }\n      });\n    }\n  });\n\n  let patternIdxToIsCustom: boolean[];\n  let patternIdxToShort: (number | false)[];\n  let emptyGroups!: { [groupName: string]: IToken[] };\n  let patternIdxToConfig!: IPatternConfig[];\n  tracer(\"Misc Mapping #2\", () => {\n    patternIdxToIsCustom = map(onlyRelevantTypes, isCustomPattern);\n    patternIdxToShort = map(allTransformedPatterns, isShortPattern);\n\n    emptyGroups = reduce(\n      onlyRelevantTypes,\n      (acc, clazz: any) => {\n        const groupName = clazz.GROUP;\n        if (isString(groupName) && !(groupName === Lexer.SKIPPED)) {\n          acc[groupName] = [];\n        }\n        return acc;\n      },\n      {} as { [groupName: string]: IToken[] },\n    );\n\n    patternIdxToConfig = map(\n      allTransformedPatterns,\n      (x, idx): IPatternConfig => {\n        return {\n          pattern: allTransformedPatterns[idx],\n          longerAlt: patternIdxToLongerAltIdxArr[idx],\n          canLineTerminator: patternIdxToCanLineTerminator[idx],\n          isCustom: patternIdxToIsCustom[idx],\n          short: patternIdxToShort[idx],\n          group: patternIdxToGroup[idx],\n          push: patternIdxToPushMode[idx],\n          pop: patternIdxToPopMode[idx],\n          tokenTypeIdx: patternIdxToType[idx],\n          tokenType: onlyRelevantTypes[idx],\n        };\n      },\n    );\n  });\n\n  let canBeOptimized = true;\n  let charCodeToPatternIdxToConfig: { [charCode: number]: IPatternConfig[] } =\n    [];\n\n  if (!options.safeMode) {\n    tracer(\"First Char Optimization\", () => {\n      charCodeToPatternIdxToConfig = reduce(\n        onlyRelevantTypes,\n        (result, currTokType, idx) => {\n          if (typeof currTokType.PATTERN === \"string\") {\n            const charCode = currTokType.PATTERN.charCodeAt(0);\n            const optimizedIdx = charCodeToOptimizedIndex(charCode);\n            addToMapOfArrays(result, optimizedIdx, patternIdxToConfig[idx]);\n          } else if (isArray(currTokType.START_CHARS_HINT)) {\n            let lastOptimizedIdx: number;\n            forEach(currTokType.START_CHARS_HINT, (charOrInt) => {\n              const charCode =\n                typeof charOrInt === \"string\"\n                  ? charOrInt.charCodeAt(0)\n                  : charOrInt;\n              const currOptimizedIdx = charCodeToOptimizedIndex(charCode);\n              // Avoid adding the config multiple times\n              /* istanbul ignore else */\n              // - Difficult to check this scenario effects as it is only a performance\n              //   optimization that does not change correctness\n              if (lastOptimizedIdx !== currOptimizedIdx) {\n                lastOptimizedIdx = currOptimizedIdx;\n                addToMapOfArrays(\n                  result,\n                  currOptimizedIdx,\n                  patternIdxToConfig[idx],\n                );\n              }\n            });\n          } else if (isRegExp(currTokType.PATTERN)) {\n            if (currTokType.PATTERN.unicode) {\n              canBeOptimized = false;\n              if (options.ensureOptimizations) {\n                PRINT_ERROR(\n                  `${failedOptimizationPrefixMsg}` +\n                    `\\tUnable to analyze < ${currTokType.PATTERN.toString()} > pattern.\\n` +\n                    \"\\tThe regexp unicode flag is not currently supported by the regexp-to-ast library.\\n\" +\n                    \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n                    \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNICODE_OPTIMIZE\",\n                );\n              }\n            } else {\n              const optimizedCodes = getOptimizedStartCodesIndices(\n                currTokType.PATTERN,\n                options.ensureOptimizations,\n              );\n              /* istanbul ignore if */\n              // start code will only be empty given an empty regExp or failure of regexp-to-ast library\n              // the first should be a different validation and the second cannot be tested.\n              if (isEmpty(optimizedCodes)) {\n                // we cannot understand what codes may start possible matches\n                // The optimization correctness requires knowing start codes for ALL patterns.\n                // Not actually sure this is an error, no debug message\n                canBeOptimized = false;\n              }\n              forEach(optimizedCodes, (code) => {\n                addToMapOfArrays(result, code, patternIdxToConfig[idx]);\n              });\n            }\n          } else {\n            if (options.ensureOptimizations) {\n              PRINT_ERROR(\n                `${failedOptimizationPrefixMsg}` +\n                  `\\tTokenType: <${currTokType.name}> is using a custom token pattern without providing  parameter.\\n` +\n                  \"\\tThis will disable the lexer's first char optimizations.\\n\" +\n                  \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE\",\n              );\n            }\n            canBeOptimized = false;\n          }\n\n          return result;\n        },\n        [] as { [charCode: number]: IPatternConfig[] },\n      );\n    });\n  }\n\n  return {\n    emptyGroups: emptyGroups,\n    patternIdxToConfig: patternIdxToConfig,\n    charCodeToPatternIdxToConfig: charCodeToPatternIdxToConfig,\n    hasCustom: hasCustom,\n    canBeOptimized: canBeOptimized,\n  };\n}\n\nexport function validatePatterns(\n  tokenTypes: TokenType[],\n  validModesNames: string[],\n): ILexerDefinitionError[] {\n  let errors: ILexerDefinitionError[] = [];\n\n  const missingResult = findMissingPatterns(tokenTypes);\n  errors = errors.concat(missingResult.errors);\n\n  const invalidResult = findInvalidPatterns(missingResult.valid);\n  const validTokenTypes = invalidResult.valid;\n  errors = errors.concat(invalidResult.errors);\n\n  errors = errors.concat(validateRegExpPattern(validTokenTypes));\n\n  errors = errors.concat(findInvalidGroupType(validTokenTypes));\n\n  errors = errors.concat(\n    findModesThatDoNotExist(validTokenTypes, validModesNames),\n  );\n\n  errors = errors.concat(findUnreachablePatterns(validTokenTypes));\n\n  return errors;\n}\n\nfunction validateRegExpPattern(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  let errors: ILexerDefinitionError[] = [];\n  const withRegExpPatterns = filter(tokenTypes, (currTokType) =>\n    isRegExp(currTokType[PATTERN]),\n  );\n\n  errors = errors.concat(findEndOfInputAnchor(withRegExpPatterns));\n\n  errors = errors.concat(findStartOfInputAnchor(withRegExpPatterns));\n\n  errors = errors.concat(findUnsupportedFlags(withRegExpPatterns));\n\n  errors = errors.concat(findDuplicatePatterns(withRegExpPatterns));\n\n  errors = errors.concat(findEmptyMatchRegExps(withRegExpPatterns));\n\n  return errors;\n}\n\nexport interface ILexerFilterResult {\n  errors: ILexerDefinitionError[];\n  valid: TokenType[];\n}\n\nexport function findMissingPatterns(\n  tokenTypes: TokenType[],\n): ILexerFilterResult {\n  const tokenTypesWithMissingPattern = filter(tokenTypes, (currType) => {\n    return !has(currType, PATTERN);\n  });\n\n  const errors = map(tokenTypesWithMissingPattern, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- missing static 'PATTERN' property\",\n      type: LexerDefinitionErrorType.MISSING_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  const valid = difference(tokenTypes, tokenTypesWithMissingPattern);\n  return { errors, valid };\n}\n\nexport function findInvalidPatterns(\n  tokenTypes: TokenType[],\n): ILexerFilterResult {\n  const tokenTypesWithInvalidPattern = filter(tokenTypes, (currType) => {\n    const pattern = currType[PATTERN];\n    return (\n      !isRegExp(pattern) &&\n      !isFunction(pattern) &&\n      !has(pattern, \"exec\") &&\n      !isString(pattern)\n    );\n  });\n\n  const errors = map(tokenTypesWithInvalidPattern, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' can only be a RegExp, a\" +\n        \" Function matching the {CustomPatternMatcherFunc} type or an Object matching the {ICustomPattern} interface.\",\n      type: LexerDefinitionErrorType.INVALID_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  const valid = difference(tokenTypes, tokenTypesWithInvalidPattern);\n  return { errors, valid };\n}\n\nconst end_of_input = /[^\\\\][$]/;\n\nexport function findEndOfInputAnchor(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  class EndAnchorFinder extends BaseRegExpVisitor {\n    found = false;\n\n    visitEndAnchor(node: unknown) {\n      this.found = true;\n    }\n  }\n\n  const invalidRegex = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN;\n\n    try {\n      const regexpAst = getRegExpAst(pattern as RegExp);\n      const endAnchorVisitor = new EndAnchorFinder();\n      endAnchorVisitor.visit(regexpAst);\n\n      return endAnchorVisitor.found;\n    } catch (e) {\n      // old behavior in case of runtime exceptions with regexp-to-ast.\n      /* istanbul ignore next - cannot ensure an error in regexp-to-ast*/\n      return end_of_input.test((pattern as RegExp).source);\n    }\n  });\n\n  const errors = map(invalidRegex, (currType) => {\n    return {\n      message:\n        \"Unexpected RegExp Anchor Error:\\n\" +\n        \"\\tToken Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' cannot contain end of input anchor '$'\\n\" +\n        \"\\tSee chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.EOI_ANCHOR_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findEmptyMatchRegExps(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const matchesEmptyString = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN as RegExp;\n    return pattern.test(\"\");\n  });\n\n  const errors = map(matchesEmptyString, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' must not match an empty string\",\n      type: LexerDefinitionErrorType.EMPTY_MATCH_PATTERN,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nconst start_of_input = /[^\\\\[][\\^]|^\\^/;\n\nexport function findStartOfInputAnchor(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  class StartAnchorFinder extends BaseRegExpVisitor {\n    found = false;\n\n    visitStartAnchor(node: unknown) {\n      this.found = true;\n    }\n  }\n\n  const invalidRegex = filter(tokenTypes, (currType) => {\n    const pattern = currType.PATTERN as RegExp;\n    try {\n      const regexpAst = getRegExpAst(pattern);\n      const startAnchorVisitor = new StartAnchorFinder();\n      startAnchorVisitor.visit(regexpAst);\n\n      return startAnchorVisitor.found;\n    } catch (e) {\n      // old behavior in case of runtime exceptions with regexp-to-ast.\n      /* istanbul ignore next - cannot ensure an error in regexp-to-ast*/\n      return start_of_input.test(pattern.source);\n    }\n  });\n\n  const errors = map(invalidRegex, (currType) => {\n    return {\n      message:\n        \"Unexpected RegExp Anchor Error:\\n\" +\n        \"\\tToken Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' cannot contain start of input anchor '^'\\n\" +\n        \"\\tSee https://chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.SOI_ANCHOR_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findUnsupportedFlags(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const invalidFlags = filter(tokenTypes, (currType) => {\n    const pattern = currType[PATTERN];\n    return pattern instanceof RegExp && (pattern.multiline || pattern.global);\n  });\n\n  const errors = map(invalidFlags, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'PATTERN' may NOT contain global('g') or multiline('m')\",\n      type: LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\n// This can only test for identical duplicate RegExps, not semantically equivalent ones.\nexport function findDuplicatePatterns(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const found: TokenType[] = [];\n  let identicalPatterns = map(tokenTypes, (outerType: any) => {\n    return reduce(\n      tokenTypes,\n      (result, innerType) => {\n        if (\n          outerType.PATTERN.source === (innerType.PATTERN as RegExp).source &&\n          !includes(found, innerType) &&\n          innerType.PATTERN !== Lexer.NA\n        ) {\n          // this avoids duplicates in the result, each Token Type may only appear in one \"set\"\n          // in essence we are creating Equivalence classes on equality relation.\n          found.push(innerType);\n          result.push(innerType);\n          return result;\n        }\n        return result;\n      },\n      [] as TokenType[],\n    );\n  });\n\n  identicalPatterns = compact(identicalPatterns);\n\n  const duplicatePatterns = filter(identicalPatterns, (currIdenticalSet) => {\n    return currIdenticalSet.length > 1;\n  });\n\n  const errors = map(duplicatePatterns, (setOfIdentical: any) => {\n    const tokenTypeNames = map(setOfIdentical, (currType: any) => {\n      return currType.name;\n    });\n\n    const dupPatternSrc = (first(setOfIdentical)).PATTERN;\n    return {\n      message:\n        `The same RegExp pattern ->${dupPatternSrc}<-` +\n        `has been used in all of the following Token Types: ${tokenTypeNames.join(\n          \", \",\n        )} <-`,\n      type: LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND,\n      tokenTypes: setOfIdentical,\n    };\n  });\n\n  return errors;\n}\n\nexport function findInvalidGroupType(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const invalidTypes = filter(tokenTypes, (clazz: any) => {\n    if (!has(clazz, \"GROUP\")) {\n      return false;\n    }\n    const group = clazz.GROUP;\n\n    return group !== Lexer.SKIPPED && group !== Lexer.NA && !isString(group);\n  });\n\n  const errors = map(invalidTypes, (currType) => {\n    return {\n      message:\n        \"Token Type: ->\" +\n        currType.name +\n        \"<- static 'GROUP' can only be Lexer.SKIPPED/Lexer.NA/A String\",\n      type: LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND,\n      tokenTypes: [currType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findModesThatDoNotExist(\n  tokenTypes: TokenType[],\n  validModes: string[],\n): ILexerDefinitionError[] {\n  const invalidModes = filter(tokenTypes, (clazz: any) => {\n    return (\n      clazz.PUSH_MODE !== undefined && !includes(validModes, clazz.PUSH_MODE)\n    );\n  });\n\n  const errors = map(invalidModes, (tokType) => {\n    const msg =\n      `Token Type: ->${tokType.name}<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->${tokType.PUSH_MODE}<-` +\n      `which does not exist`;\n    return {\n      message: msg,\n      type: LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST,\n      tokenTypes: [tokType],\n    };\n  });\n\n  return errors;\n}\n\nexport function findUnreachablePatterns(\n  tokenTypes: TokenType[],\n): ILexerDefinitionError[] {\n  const errors: ILexerDefinitionError[] = [];\n\n  const canBeTested = reduce(\n    tokenTypes,\n    (result, tokType, idx) => {\n      const pattern = tokType.PATTERN;\n\n      if (pattern === Lexer.NA) {\n        return result;\n      }\n\n      // a more comprehensive validation for all forms of regExps would require\n      // deeper regExp analysis capabilities\n      if (isString(pattern)) {\n        result.push({ str: pattern, idx, tokenType: tokType });\n      } else if (isRegExp(pattern) && noMetaChar(pattern)) {\n        result.push({ str: pattern.source, idx, tokenType: tokType });\n      }\n      return result;\n    },\n    [] as { str: string; idx: number; tokenType: TokenType }[],\n  );\n\n  forEach(tokenTypes, (tokType, testIdx) => {\n    forEach(canBeTested, ({ str, idx, tokenType }) => {\n      if (testIdx < idx && testTokenType(str, tokType.PATTERN)) {\n        const msg =\n          `Token: ->${tokenType.name}<- can never be matched.\\n` +\n          `Because it appears AFTER the Token Type ->${tokType.name}<-` +\n          `in the lexer's definition.\\n` +\n          `See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNREACHABLE`;\n        errors.push({\n          message: msg,\n          type: LexerDefinitionErrorType.UNREACHABLE_PATTERN,\n          tokenTypes: [tokType, tokenType],\n        });\n      }\n    });\n  });\n\n  return errors;\n}\n\nfunction testTokenType(str: string, pattern: any): boolean {\n  /* istanbul ignore else */\n  if (isRegExp(pattern)) {\n    const regExpArray = pattern.exec(str);\n    return regExpArray !== null && regExpArray.index === 0;\n  } else if (isFunction(pattern)) {\n    // maintain the API of custom patterns\n    return pattern(str, 0, [], {});\n  } else if (has(pattern, \"exec\")) {\n    // maintain the API of custom patterns\n    return pattern.exec(str, 0, [], {});\n  } else if (typeof pattern === \"string\") {\n    return pattern === str;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nfunction noMetaChar(regExp: RegExp): boolean {\n  //https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp\n  const metaChars = [\n    \".\",\n    \"\\\\\",\n    \"[\",\n    \"]\",\n    \"|\",\n    \"^\",\n    \"$\",\n    \"(\",\n    \")\",\n    \"?\",\n    \"*\",\n    \"+\",\n    \"{\",\n  ];\n  return (\n    find(metaChars, (char) => regExp.source.indexOf(char) !== -1) === undefined\n  );\n}\n\nexport function addStartOfInput(pattern: RegExp): RegExp {\n  const flags = pattern.ignoreCase ? \"i\" : \"\";\n  // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.\n  // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)\n  return new RegExp(`^(?:${pattern.source})`, flags);\n}\n\nexport function addStickyFlag(pattern: RegExp): RegExp {\n  const flags = pattern.ignoreCase ? \"iy\" : \"y\";\n  // always wrapping in a none capturing group preceded by '^' to make sure matching can only work on start of input.\n  // duplicate/redundant start of input markers have no meaning (/^^^^A/ === /^A/)\n  return new RegExp(`${pattern.source}`, flags);\n}\n\nexport function performRuntimeChecks(\n  lexerDefinition: IMultiModeLexerDefinition,\n  trackLines: boolean,\n  lineTerminatorCharacters: (number | string)[],\n): ILexerDefinitionError[] {\n  const errors: ILexerDefinitionError[] = [];\n\n  // some run time checks to help the end users.\n  if (!has(lexerDefinition, DEFAULT_MODE)) {\n    errors.push({\n      message:\n        \"A MultiMode Lexer cannot be initialized without a <\" +\n        DEFAULT_MODE +\n        \"> property in its definition\\n\",\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE,\n    });\n  }\n  if (!has(lexerDefinition, MODES)) {\n    errors.push({\n      message:\n        \"A MultiMode Lexer cannot be initialized without a <\" +\n        MODES +\n        \"> property in its definition\\n\",\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY,\n    });\n  }\n\n  if (\n    has(lexerDefinition, MODES) &&\n    has(lexerDefinition, DEFAULT_MODE) &&\n    !has(lexerDefinition.modes, lexerDefinition.defaultMode)\n  ) {\n    errors.push({\n      message:\n        `A MultiMode Lexer cannot be initialized with a ${DEFAULT_MODE}: <${lexerDefinition.defaultMode}>` +\n        `which does not exist\\n`,\n      type: LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST,\n    });\n  }\n\n  if (has(lexerDefinition, MODES)) {\n    forEach(lexerDefinition.modes, (currModeValue, currModeName) => {\n      forEach(currModeValue, (currTokType, currIdx) => {\n        if (isUndefined(currTokType)) {\n          errors.push({\n            message:\n              `A Lexer cannot be initialized using an undefined Token Type. Mode:` +\n              `<${currModeName}> at index: <${currIdx}>\\n`,\n            type: LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED,\n          });\n        } else if (has(currTokType, \"LONGER_ALT\")) {\n          const longerAlt = isArray(currTokType.LONGER_ALT)\n            ? currTokType.LONGER_ALT\n            : [currTokType.LONGER_ALT];\n          forEach(longerAlt, (currLongerAlt) => {\n            if (\n              !isUndefined(currLongerAlt) &&\n              !includes(currModeValue, currLongerAlt)\n            ) {\n              errors.push({\n                message: `A MultiMode Lexer cannot be initialized with a longer_alt <${currLongerAlt.name}> on token <${currTokType.name}> outside of mode <${currModeName}>\\n`,\n                type: LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE,\n              });\n            }\n          });\n        }\n      });\n    });\n  }\n\n  return errors;\n}\n\nexport function performWarningRuntimeChecks(\n  lexerDefinition: IMultiModeLexerDefinition,\n  trackLines: boolean,\n  lineTerminatorCharacters: (number | string)[],\n): ILexerDefinitionError[] {\n  const warnings = [];\n  let hasAnyLineBreak = false;\n  const allTokenTypes = compact(flatten(values(lexerDefinition.modes)));\n\n  const concreteTokenTypes = reject(\n    allTokenTypes,\n    (currType) => currType[PATTERN] === Lexer.NA,\n  );\n  const terminatorCharCodes = getCharCodes(lineTerminatorCharacters);\n  if (trackLines) {\n    forEach(concreteTokenTypes, (tokType) => {\n      const currIssue = checkLineBreaksIssues(tokType, terminatorCharCodes);\n      if (currIssue !== false) {\n        const message = buildLineBreakIssueMessage(tokType, currIssue);\n        const warningDescriptor = {\n          message,\n          type: currIssue.issue,\n          tokenType: tokType,\n        };\n        warnings.push(warningDescriptor);\n      } else {\n        // we don't want to attempt to scan if the user explicitly specified the line_breaks option.\n        if (has(tokType, \"LINE_BREAKS\")) {\n          if (tokType.LINE_BREAKS === true) {\n            hasAnyLineBreak = true;\n          }\n        } else {\n          if (\n            canMatchCharCode(terminatorCharCodes, tokType.PATTERN as RegExp)\n          ) {\n            hasAnyLineBreak = true;\n          }\n        }\n      }\n    });\n  }\n\n  if (trackLines && !hasAnyLineBreak) {\n    warnings.push({\n      message:\n        \"Warning: No LINE_BREAKS Found.\\n\" +\n        \"\\tThis Lexer has been defined to track line and column information,\\n\" +\n        \"\\tBut none of the Token Types can be identified as matching a line terminator.\\n\" +\n        \"\\tSee https://chevrotain.io/docs/guide/resolving_lexer_errors.html#LINE_BREAKS \\n\" +\n        \"\\tfor details.\",\n      type: LexerDefinitionErrorType.NO_LINE_BREAKS_FLAGS,\n    });\n  }\n  return warnings;\n}\n\nexport function cloneEmptyGroups(emptyGroups: {\n  [groupName: string]: IToken;\n}): { [groupName: string]: IToken } {\n  const clonedResult: any = {};\n  const groupKeys = keys(emptyGroups);\n\n  forEach(groupKeys, (currKey) => {\n    const currGroupValue = emptyGroups[currKey];\n\n    /* istanbul ignore else */\n    if (isArray(currGroupValue)) {\n      clonedResult[currKey] = [];\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  });\n\n  return clonedResult;\n}\n\n// TODO: refactor to avoid duplication\nexport function isCustomPattern(tokenType: TokenType): boolean {\n  const pattern = tokenType.PATTERN;\n  /* istanbul ignore else */\n  if (isRegExp(pattern)) {\n    return false;\n  } else if (isFunction(pattern)) {\n    // CustomPatternMatcherFunc - custom patterns do not require any transformations, only wrapping in a RegExp Like object\n    return true;\n  } else if (has(pattern, \"exec\")) {\n    // ICustomPattern\n    return true;\n  } else if (isString(pattern)) {\n    return false;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function isShortPattern(pattern: any): number | false {\n  if (isString(pattern) && pattern.length === 1) {\n    return pattern.charCodeAt(0);\n  } else {\n    return false;\n  }\n}\n\n/**\n * Faster than using a RegExp for default newline detection during lexing.\n */\nexport const LineTerminatorOptimizedTester: ILineTerminatorsTester = {\n  // implements /\\n|\\r\\n?/g.test\n  test: function (text) {\n    const len = text.length;\n    for (let i = this.lastIndex; i < len; i++) {\n      const c = text.charCodeAt(i);\n      if (c === 10) {\n        this.lastIndex = i + 1;\n        return true;\n      } else if (c === 13) {\n        if (text.charCodeAt(i + 1) === 10) {\n          this.lastIndex = i + 2;\n        } else {\n          this.lastIndex = i + 1;\n        }\n        return true;\n      }\n    }\n    return false;\n  },\n\n  lastIndex: 0,\n};\n\nfunction checkLineBreaksIssues(\n  tokType: TokenType,\n  lineTerminatorCharCodes: number[],\n):\n  | {\n      issue:\n        | LexerDefinitionErrorType.IDENTIFY_TERMINATOR\n        | LexerDefinitionErrorType.CUSTOM_LINE_BREAK;\n      errMsg?: string;\n    }\n  | false {\n  if (has(tokType, \"LINE_BREAKS\")) {\n    // if the user explicitly declared the line_breaks option we will respect their choice\n    // and assume it is correct.\n    return false;\n  } else {\n    /* istanbul ignore else */\n    if (isRegExp(tokType.PATTERN)) {\n      try {\n        // TODO: why is the casting suddenly needed?\n        canMatchCharCode(lineTerminatorCharCodes, tokType.PATTERN as RegExp);\n      } catch (e) {\n        /* istanbul ignore next - to test this we would have to mock  to throw an error */\n        return {\n          issue: LexerDefinitionErrorType.IDENTIFY_TERMINATOR,\n          errMsg: (e as Error).message,\n        };\n      }\n      return false;\n    } else if (isString(tokType.PATTERN)) {\n      // string literal patterns can always be analyzed to detect line terminator usage\n      return false;\n    } else if (isCustomPattern(tokType)) {\n      // custom token types\n      return { issue: LexerDefinitionErrorType.CUSTOM_LINE_BREAK };\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  }\n}\n\nexport function buildLineBreakIssueMessage(\n  tokType: TokenType,\n  details: {\n    issue:\n      | LexerDefinitionErrorType.IDENTIFY_TERMINATOR\n      | LexerDefinitionErrorType.CUSTOM_LINE_BREAK;\n    errMsg?: string;\n  },\n): string {\n  /* istanbul ignore else */\n  if (details.issue === LexerDefinitionErrorType.IDENTIFY_TERMINATOR) {\n    return (\n      \"Warning: unable to identify line terminator usage in pattern.\\n\" +\n      `\\tThe problem is in the <${tokType.name}> Token Type\\n` +\n      `\\t Root cause: ${details.errMsg}.\\n` +\n      \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#IDENTIFY_TERMINATOR\"\n    );\n  } else if (details.issue === LexerDefinitionErrorType.CUSTOM_LINE_BREAK) {\n    return (\n      \"Warning: A Custom Token Pattern should specify the  option.\\n\" +\n      `\\tThe problem is in the <${tokType.name}> Token Type\\n` +\n      \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_LINE_BREAK\"\n    );\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nfunction getCharCodes(charsOrCodes: (number | string)[]): number[] {\n  const charCodes = map(charsOrCodes, (numOrString) => {\n    if (isString(numOrString)) {\n      return numOrString.charCodeAt(0);\n    } else {\n      return numOrString;\n    }\n  });\n\n  return charCodes;\n}\n\nfunction addToMapOfArrays(\n  map: Record,\n  key: number,\n  value: T,\n): void {\n  if (map[key] === undefined) {\n    map[key] = [value];\n  } else {\n    map[key].push(value);\n  }\n}\n\nexport const minOptimizationVal = 256;\n\n/**\n * We are mapping charCode above ASCI (256) into buckets each in the size of 256.\n * This is because ASCI are the most common start chars so each one of those will get its own\n * possible token configs vector.\n *\n * Tokens starting with charCodes \"above\" ASCI are uncommon, so we can \"afford\"\n * to place these into buckets of possible token configs, What we gain from\n * this is avoiding the case of creating an optimization 'charCodeToPatternIdxToConfig'\n * which would contain 10,000+ arrays of small size (e.g unicode Identifiers scenario).\n * Our 'charCodeToPatternIdxToConfig' max size will now be:\n * 256 + (2^16 / 2^8) - 1 === 511\n *\n * note the hack for fast division integer part extraction\n * See: https://stackoverflow.com/a/4228528\n */\nlet charCodeToOptimizedIdxMap: number[] = [];\nexport function charCodeToOptimizedIndex(charCode: number): number {\n  return charCode < minOptimizationVal\n    ? charCode\n    : charCodeToOptimizedIdxMap[charCode];\n}\n\n/**\n * This is a compromise between cold start / hot running performance\n * Creating this array takes ~3ms on a modern machine,\n * But if we perform the computation at runtime as needed the CSS Lexer benchmark\n * performance degrades by ~10%\n *\n * TODO: Perhaps it should be lazy initialized only if a charCode > 255 is used.\n */\nfunction initCharCodeToOptimizedIndexMap() {\n  if (isEmpty(charCodeToOptimizedIdxMap)) {\n    charCodeToOptimizedIdxMap = new Array(65536);\n    for (let i = 0; i < 65536; i++) {\n      charCodeToOptimizedIdxMap[i] = i > 255 ? 255 + ~~(i / 255) : i;\n    }\n  }\n}\n", "import {\n  clone,\n  compact,\n  difference,\n  flatten,\n  forEach,\n  has,\n  includes,\n  isArray,\n  isEmpty,\n  map,\n} from \"lodash-es\";\nimport { IToken, TokenType } from \"@chevrotain/types\";\n\nexport function tokenStructuredMatcher(\n  tokInstance: IToken,\n  tokConstructor: TokenType,\n) {\n  const instanceType = tokInstance.tokenTypeIdx;\n  if (instanceType === tokConstructor.tokenTypeIdx) {\n    return true;\n  } else {\n    return (\n      tokConstructor.isParent === true &&\n      tokConstructor.categoryMatchesMap![instanceType] === true\n    );\n  }\n}\n\n// Optimized tokenMatcher in case our grammar does not use token categories\n// Being so tiny it is much more likely to be in-lined and this avoid the function call overhead\nexport function tokenStructuredMatcherNoCategories(\n  token: IToken,\n  tokType: TokenType,\n) {\n  return token.tokenTypeIdx === tokType.tokenTypeIdx;\n}\n\nexport let tokenShortNameIdx = 1;\nexport const tokenIdxToClass: { [tokenIdx: number]: TokenType } = {};\n\nexport function augmentTokenTypes(tokenTypes: TokenType[]): void {\n  // collect the parent Token Types as well.\n  const tokenTypesAndParents = expandCategories(tokenTypes);\n\n  // add required tokenType and categoryMatches properties\n  assignTokenDefaultProps(tokenTypesAndParents);\n\n  // fill up the categoryMatches\n  assignCategoriesMapProp(tokenTypesAndParents);\n  assignCategoriesTokensProp(tokenTypesAndParents);\n\n  forEach(tokenTypesAndParents, (tokType) => {\n    tokType.isParent = tokType.categoryMatches!.length > 0;\n  });\n}\n\nexport function expandCategories(tokenTypes: TokenType[]): TokenType[] {\n  let result = clone(tokenTypes);\n\n  let categories = tokenTypes;\n  let searching = true;\n  while (searching) {\n    categories = compact(\n      flatten(map(categories, (currTokType) => currTokType.CATEGORIES)),\n    );\n\n    const newCategories = difference(categories, result);\n\n    result = result.concat(newCategories);\n\n    if (isEmpty(newCategories)) {\n      searching = false;\n    } else {\n      categories = newCategories;\n    }\n  }\n  return result;\n}\n\nexport function assignTokenDefaultProps(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    if (!hasShortKeyProperty(currTokType)) {\n      tokenIdxToClass[tokenShortNameIdx] = currTokType;\n      (currTokType).tokenTypeIdx = tokenShortNameIdx++;\n    }\n\n    // CATEGORIES? : TokenType | TokenType[]\n    if (\n      hasCategoriesProperty(currTokType) &&\n      !isArray(currTokType.CATEGORIES)\n      // &&\n      // !isUndefined(currTokType.CATEGORIES.PATTERN)\n    ) {\n      currTokType.CATEGORIES = [currTokType.CATEGORIES as unknown as TokenType];\n    }\n\n    if (!hasCategoriesProperty(currTokType)) {\n      currTokType.CATEGORIES = [];\n    }\n\n    if (!hasExtendingTokensTypesProperty(currTokType)) {\n      currTokType.categoryMatches = [];\n    }\n\n    if (!hasExtendingTokensTypesMapProperty(currTokType)) {\n      currTokType.categoryMatchesMap = {};\n    }\n  });\n}\n\nexport function assignCategoriesTokensProp(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    // avoid duplications\n    currTokType.categoryMatches = [];\n    forEach(currTokType.categoryMatchesMap!, (val, key) => {\n      currTokType.categoryMatches!.push(\n        tokenIdxToClass[key as unknown as number].tokenTypeIdx!,\n      );\n    });\n  });\n}\n\nexport function assignCategoriesMapProp(tokenTypes: TokenType[]): void {\n  forEach(tokenTypes, (currTokType) => {\n    singleAssignCategoriesToksMap([], currTokType);\n  });\n}\n\nexport function singleAssignCategoriesToksMap(\n  path: TokenType[],\n  nextNode: TokenType,\n): void {\n  forEach(path, (pathNode) => {\n    nextNode.categoryMatchesMap![pathNode.tokenTypeIdx!] = true;\n  });\n\n  forEach(nextNode.CATEGORIES, (nextCategory) => {\n    const newPath = path.concat(nextNode);\n    // avoids infinite loops due to cyclic categories.\n    if (!includes(newPath, nextCategory)) {\n      singleAssignCategoriesToksMap(newPath, nextCategory);\n    }\n  });\n}\n\nexport function hasShortKeyProperty(tokType: TokenType): boolean {\n  return has(tokType, \"tokenTypeIdx\");\n}\n\nexport function hasCategoriesProperty(tokType: TokenType): boolean {\n  return has(tokType, \"CATEGORIES\");\n}\n\nexport function hasExtendingTokensTypesProperty(tokType: TokenType): boolean {\n  return has(tokType, \"categoryMatches\");\n}\n\nexport function hasExtendingTokensTypesMapProperty(\n  tokType: TokenType,\n): boolean {\n  return has(tokType, \"categoryMatchesMap\");\n}\n\nexport function isTokenType(tokType: TokenType): boolean {\n  return has(tokType, \"tokenTypeIdx\");\n}\n", "import { ILexerErrorMessageProvider, IToken } from \"@chevrotain/types\";\n\nexport const defaultLexerErrorProvider: ILexerErrorMessageProvider = {\n  buildUnableToPopLexerModeMessage(token: IToken): string {\n    return `Unable to pop Lexer Mode after encountering Token ->${token.image}<- The Mode Stack is empty`;\n  },\n\n  buildUnexpectedCharactersMessage(\n    fullText: string,\n    startOffset: number,\n    length: number,\n    line?: number,\n    column?: number,\n  ): string {\n    return (\n      `unexpected character: ->${fullText.charAt(\n        startOffset,\n      )}<- at offset: ${startOffset},` + ` skipped ${length} characters.`\n    );\n  },\n};\n", "import {\n  analyzeTokenTypes,\n  charCodeToOptimizedIndex,\n  cloneEmptyGroups,\n  DEFAULT_MODE,\n  IAnalyzeResult,\n  IPatternConfig,\n  LineTerminatorOptimizedTester,\n  performRuntimeChecks,\n  performWarningRuntimeChecks,\n  SUPPORT_STICKY,\n  validatePatterns,\n} from \"./lexer.js\";\nimport {\n  assign,\n  clone,\n  forEach,\n  identity,\n  isArray,\n  isEmpty,\n  isUndefined,\n  keys,\n  last,\n  map,\n  noop,\n  reduce,\n  reject,\n} from \"lodash-es\";\nimport { PRINT_WARNING, timer, toFastProperties } from \"@chevrotain/utils\";\nimport { augmentTokenTypes } from \"./tokens.js\";\nimport {\n  CustomPatternMatcherFunc,\n  CustomPatternMatcherReturn,\n  ILexerConfig,\n  ILexerDefinitionError,\n  ILexingError,\n  IMultiModeLexerDefinition,\n  IToken,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { defaultLexerErrorProvider } from \"./lexer_errors_public.js\";\nimport { clearRegExpParserCache } from \"./reg_exp_parser.js\";\n\nexport interface ILexingResult {\n  tokens: IToken[];\n  groups: { [groupName: string]: IToken[] };\n  errors: ILexingError[];\n}\n\nexport enum LexerDefinitionErrorType {\n  MISSING_PATTERN,\n  INVALID_PATTERN,\n  EOI_ANCHOR_FOUND,\n  UNSUPPORTED_FLAGS_FOUND,\n  DUPLICATE_PATTERNS_FOUND,\n  INVALID_GROUP_TYPE_FOUND,\n  PUSH_MODE_DOES_NOT_EXIST,\n  MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE,\n  MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY,\n  MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST,\n  LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED,\n  SOI_ANCHOR_FOUND,\n  EMPTY_MATCH_PATTERN,\n  NO_LINE_BREAKS_FLAGS,\n  UNREACHABLE_PATTERN,\n  IDENTIFY_TERMINATOR,\n  CUSTOM_LINE_BREAK,\n  MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE,\n}\n\nexport interface IRegExpExec {\n  exec: CustomPatternMatcherFunc;\n}\n\nconst DEFAULT_LEXER_CONFIG: Required = {\n  deferDefinitionErrorsHandling: false,\n  positionTracking: \"full\",\n  lineTerminatorsPattern: /\\n|\\r\\n?/g,\n  lineTerminatorCharacters: [\"\\n\", \"\\r\"],\n  ensureOptimizations: false,\n  safeMode: false,\n  errorMessageProvider: defaultLexerErrorProvider,\n  traceInitPerf: false,\n  skipValidations: false,\n  recoveryEnabled: true,\n};\n\nObject.freeze(DEFAULT_LEXER_CONFIG);\n\nexport class Lexer {\n  public static SKIPPED =\n    \"This marks a skipped Token pattern, this means each token identified by it will\" +\n    \"be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.\";\n\n  public static NA = /NOT_APPLICABLE/;\n  public lexerDefinitionErrors: ILexerDefinitionError[] = [];\n  public lexerDefinitionWarning: ILexerDefinitionError[] = [];\n\n  protected patternIdxToConfig: Record = {};\n  protected charCodeToPatternIdxToConfig: {\n    [modeName: string]: { [charCode: number]: IPatternConfig[] };\n  } = {};\n\n  protected modes: string[] = [];\n  protected defaultMode!: string;\n  protected emptyGroups: { [groupName: string]: IToken } = {};\n\n  private config: Required;\n  private trackStartLines: boolean = true;\n  private trackEndLines: boolean = true;\n  private hasCustom: boolean = false;\n  private canModeBeOptimized: Record = {};\n\n  private traceInitPerf!: boolean | number;\n  private traceInitMaxIdent!: number;\n  private traceInitIndent: number;\n\n  constructor(\n    protected lexerDefinition: TokenType[] | IMultiModeLexerDefinition,\n    config: ILexerConfig = DEFAULT_LEXER_CONFIG,\n  ) {\n    if (typeof config === \"boolean\") {\n      throw Error(\n        \"The second argument to the Lexer constructor is now an ILexerConfig Object.\\n\" +\n          \"a boolean 2nd argument is no longer supported\",\n      );\n    }\n\n    // todo: defaults func?\n    this.config = assign({}, DEFAULT_LEXER_CONFIG, config) as any;\n\n    const traceInitVal = this.config.traceInitPerf;\n    if (traceInitVal === true) {\n      this.traceInitMaxIdent = Infinity;\n      this.traceInitPerf = true;\n    } else if (typeof traceInitVal === \"number\") {\n      this.traceInitMaxIdent = traceInitVal;\n      this.traceInitPerf = true;\n    }\n    this.traceInitIndent = -1;\n\n    this.TRACE_INIT(\"Lexer Constructor\", () => {\n      let actualDefinition!: IMultiModeLexerDefinition;\n      let hasOnlySingleMode = true;\n      this.TRACE_INIT(\"Lexer Config handling\", () => {\n        if (\n          this.config.lineTerminatorsPattern ===\n          DEFAULT_LEXER_CONFIG.lineTerminatorsPattern\n        ) {\n          // optimized built-in implementation for the defaults definition of lineTerminators\n          this.config.lineTerminatorsPattern = LineTerminatorOptimizedTester;\n        } else {\n          if (\n            this.config.lineTerminatorCharacters ===\n            DEFAULT_LEXER_CONFIG.lineTerminatorCharacters\n          ) {\n            throw Error(\n              \"Error: Missing  property on the Lexer config.\\n\" +\n                \"\\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#MISSING_LINE_TERM_CHARS\",\n            );\n          }\n        }\n\n        if (config.safeMode && config.ensureOptimizations) {\n          throw Error(\n            '\"safeMode\" and \"ensureOptimizations\" flags are mutually exclusive.',\n          );\n        }\n\n        this.trackStartLines = /full|onlyStart/i.test(\n          this.config.positionTracking,\n        );\n        this.trackEndLines = /full/i.test(this.config.positionTracking);\n\n        // Convert SingleModeLexerDefinition into a IMultiModeLexerDefinition.\n        if (isArray(lexerDefinition)) {\n          actualDefinition = {\n            modes: { defaultMode: clone(lexerDefinition) },\n            defaultMode: DEFAULT_MODE,\n          };\n        } else {\n          // no conversion needed, input should already be a IMultiModeLexerDefinition\n          hasOnlySingleMode = false;\n          actualDefinition = clone(lexerDefinition);\n        }\n      });\n\n      if (this.config.skipValidations === false) {\n        this.TRACE_INIT(\"performRuntimeChecks\", () => {\n          this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat(\n            performRuntimeChecks(\n              actualDefinition,\n              this.trackStartLines,\n              this.config.lineTerminatorCharacters,\n            ),\n          );\n        });\n\n        this.TRACE_INIT(\"performWarningRuntimeChecks\", () => {\n          this.lexerDefinitionWarning = this.lexerDefinitionWarning.concat(\n            performWarningRuntimeChecks(\n              actualDefinition,\n              this.trackStartLines,\n              this.config.lineTerminatorCharacters,\n            ),\n          );\n        });\n      }\n\n      // for extra robustness to avoid throwing an none informative error message\n      actualDefinition.modes = actualDefinition.modes\n        ? actualDefinition.modes\n        : {};\n\n      // an error of undefined TokenTypes will be detected in \"performRuntimeChecks\" above.\n      // this transformation is to increase robustness in the case of partially invalid lexer definition.\n      forEach(actualDefinition.modes, (currModeValue, currModeName) => {\n        actualDefinition.modes[currModeName] = reject(\n          currModeValue,\n          (currTokType) => isUndefined(currTokType),\n        );\n      });\n\n      const allModeNames = keys(actualDefinition.modes);\n\n      forEach(\n        actualDefinition.modes,\n        (currModDef: TokenType[], currModName) => {\n          this.TRACE_INIT(`Mode: <${currModName}> processing`, () => {\n            this.modes.push(currModName);\n\n            if (this.config.skipValidations === false) {\n              this.TRACE_INIT(`validatePatterns`, () => {\n                this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat(\n                  validatePatterns(currModDef, allModeNames),\n                );\n              });\n            }\n\n            // If definition errors were encountered, the analysis phase may fail unexpectedly/\n            // Considering a lexer with definition errors may never be used, there is no point\n            // to performing the analysis anyhow...\n            if (isEmpty(this.lexerDefinitionErrors)) {\n              augmentTokenTypes(currModDef);\n\n              let currAnalyzeResult!: IAnalyzeResult;\n              this.TRACE_INIT(`analyzeTokenTypes`, () => {\n                currAnalyzeResult = analyzeTokenTypes(currModDef, {\n                  lineTerminatorCharacters:\n                    this.config.lineTerminatorCharacters,\n                  positionTracking: config.positionTracking,\n                  ensureOptimizations: config.ensureOptimizations,\n                  safeMode: config.safeMode,\n                  tracer: this.TRACE_INIT,\n                });\n              });\n\n              this.patternIdxToConfig[currModName] =\n                currAnalyzeResult.patternIdxToConfig;\n\n              this.charCodeToPatternIdxToConfig[currModName] =\n                currAnalyzeResult.charCodeToPatternIdxToConfig;\n\n              this.emptyGroups = assign(\n                {},\n                this.emptyGroups,\n                currAnalyzeResult.emptyGroups,\n              ) as any;\n\n              this.hasCustom = currAnalyzeResult.hasCustom || this.hasCustom;\n\n              this.canModeBeOptimized[currModName] =\n                currAnalyzeResult.canBeOptimized;\n            }\n          });\n        },\n      );\n\n      this.defaultMode = actualDefinition.defaultMode;\n\n      if (\n        !isEmpty(this.lexerDefinitionErrors) &&\n        !this.config.deferDefinitionErrorsHandling\n      ) {\n        const allErrMessages = map(this.lexerDefinitionErrors, (error) => {\n          return error.message;\n        });\n        const allErrMessagesString = allErrMessages.join(\n          \"-----------------------\\n\",\n        );\n        throw new Error(\n          \"Errors detected in definition of Lexer:\\n\" + allErrMessagesString,\n        );\n      }\n\n      // Only print warning if there are no errors, This will avoid pl\n      forEach(this.lexerDefinitionWarning, (warningDescriptor) => {\n        PRINT_WARNING(warningDescriptor.message);\n      });\n\n      this.TRACE_INIT(\"Choosing sub-methods implementations\", () => {\n        // Choose the relevant internal implementations for this specific parser.\n        // These implementations should be in-lined by the JavaScript engine\n        // to provide optimal performance in each scenario.\n        if (SUPPORT_STICKY) {\n          this.chopInput = identity;\n          this.match = this.matchWithTest;\n        } else {\n          this.updateLastIndex = noop;\n          this.match = this.matchWithExec;\n        }\n\n        if (hasOnlySingleMode) {\n          this.handleModes = noop;\n        }\n\n        if (this.trackStartLines === false) {\n          this.computeNewColumn = identity;\n        }\n\n        if (this.trackEndLines === false) {\n          this.updateTokenEndLineColumnLocation = noop;\n        }\n\n        if (/full/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createFullToken;\n        } else if (/onlyStart/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createStartOnlyToken;\n        } else if (/onlyOffset/i.test(this.config.positionTracking)) {\n          this.createTokenInstance = this.createOffsetOnlyToken;\n        } else {\n          throw Error(\n            `Invalid  config option: \"${this.config.positionTracking}\"`,\n          );\n        }\n\n        if (this.hasCustom) {\n          this.addToken = this.addTokenUsingPush;\n          this.handlePayload = this.handlePayloadWithCustom;\n        } else {\n          this.addToken = this.addTokenUsingMemberAccess;\n          this.handlePayload = this.handlePayloadNoCustom;\n        }\n      });\n\n      this.TRACE_INIT(\"Failed Optimization Warnings\", () => {\n        const unOptimizedModes = reduce(\n          this.canModeBeOptimized,\n          (cannotBeOptimized, canBeOptimized, modeName) => {\n            if (canBeOptimized === false) {\n              cannotBeOptimized.push(modeName);\n            }\n            return cannotBeOptimized;\n          },\n          [] as string[],\n        );\n\n        if (config.ensureOptimizations && !isEmpty(unOptimizedModes)) {\n          throw Error(\n            `Lexer Modes: < ${unOptimizedModes.join(\n              \", \",\n            )} > cannot be optimized.\\n` +\n              '\\t Disable the \"ensureOptimizations\" lexer config flag to silently ignore this and run the lexer in an un-optimized mode.\\n' +\n              \"\\t Or inspect the console log for details on how to resolve these issues.\",\n          );\n        }\n      });\n\n      this.TRACE_INIT(\"clearRegExpParserCache\", () => {\n        clearRegExpParserCache();\n      });\n\n      this.TRACE_INIT(\"toFastProperties\", () => {\n        toFastProperties(this);\n      });\n    });\n  }\n\n  public tokenize(\n    text: string,\n    initialMode: string = this.defaultMode,\n  ): ILexingResult {\n    if (!isEmpty(this.lexerDefinitionErrors)) {\n      const allErrMessages = map(this.lexerDefinitionErrors, (error) => {\n        return error.message;\n      });\n      const allErrMessagesString = allErrMessages.join(\n        \"-----------------------\\n\",\n      );\n      throw new Error(\n        \"Unable to Tokenize because Errors detected in definition of Lexer:\\n\" +\n          allErrMessagesString,\n      );\n    }\n\n    return this.tokenizeInternal(text, initialMode);\n  }\n\n  // There is quite a bit of duplication between this and \"tokenizeInternalLazy\"\n  // This is intentional due to performance considerations.\n  // this method also used quite a bit of `!` none null assertions because it is too optimized\n  // for `tsc` to always understand it is \"safe\"\n  private tokenizeInternal(text: string, initialMode: string): ILexingResult {\n    let i,\n      j,\n      k,\n      matchAltImage,\n      longerAlt,\n      matchedImage: string | null,\n      payload,\n      altPayload,\n      imageLength,\n      group,\n      tokType,\n      newToken: IToken,\n      errLength,\n      droppedChar,\n      msg,\n      match;\n    const orgText = text;\n    const orgLength = orgText.length;\n    let offset = 0;\n    let matchedTokensIndex = 0;\n    // initializing the tokensArray to the \"guessed\" size.\n    // guessing too little will still reduce the number of array re-sizes on pushes.\n    // guessing too large (Tested by guessing x4 too large) may cost a bit more of memory\n    // but would still have a faster runtime by avoiding (All but one) array resizing.\n    const guessedNumberOfTokens = this.hasCustom\n      ? 0 // will break custom token pattern APIs the matchedTokens array will contain undefined elements.\n      : Math.floor(text.length / 10);\n    const matchedTokens = new Array(guessedNumberOfTokens);\n    const errors: ILexingError[] = [];\n    let line = this.trackStartLines ? 1 : undefined;\n    let column = this.trackStartLines ? 1 : undefined;\n    const groups: any = cloneEmptyGroups(this.emptyGroups);\n    const trackLines = this.trackStartLines;\n    const lineTerminatorPattern = this.config.lineTerminatorsPattern;\n\n    let currModePatternsLength = 0;\n    let patternIdxToConfig: IPatternConfig[] = [];\n    let currCharCodeToPatternIdxToConfig: {\n      [charCode: number]: IPatternConfig[];\n    } = [];\n\n    const modeStack: string[] = [];\n\n    const emptyArray: IPatternConfig[] = [];\n    Object.freeze(emptyArray);\n    let getPossiblePatterns!: (charCode: number) => IPatternConfig[];\n\n    function getPossiblePatternsSlow() {\n      return patternIdxToConfig;\n    }\n\n    function getPossiblePatternsOptimized(charCode: number): IPatternConfig[] {\n      const optimizedCharIdx = charCodeToOptimizedIndex(charCode);\n      const possiblePatterns =\n        currCharCodeToPatternIdxToConfig[optimizedCharIdx];\n      if (possiblePatterns === undefined) {\n        return emptyArray;\n      } else {\n        return possiblePatterns;\n      }\n    }\n\n    const pop_mode = (popToken: IToken) => {\n      // TODO: perhaps avoid this error in the edge case there is no more input?\n      if (\n        modeStack.length === 1 &&\n        // if we have both a POP_MODE and a PUSH_MODE this is in-fact a \"transition\"\n        // So no error should occur.\n        popToken.tokenType.PUSH_MODE === undefined\n      ) {\n        // if we try to pop the last mode there lexer will no longer have ANY mode.\n        // thus the pop is ignored, an error will be created and the lexer will continue parsing in the previous mode.\n        const msg =\n          this.config.errorMessageProvider.buildUnableToPopLexerModeMessage(\n            popToken,\n          );\n\n        errors.push({\n          offset: popToken.startOffset,\n          line: popToken.startLine,\n          column: popToken.startColumn,\n          length: popToken.image.length,\n          message: msg,\n        });\n      } else {\n        modeStack.pop();\n        const newMode = last(modeStack)!;\n        patternIdxToConfig = this.patternIdxToConfig[newMode];\n        currCharCodeToPatternIdxToConfig =\n          this.charCodeToPatternIdxToConfig[newMode];\n        currModePatternsLength = patternIdxToConfig.length;\n        const modeCanBeOptimized =\n          this.canModeBeOptimized[newMode] && this.config.safeMode === false;\n\n        if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {\n          getPossiblePatterns = getPossiblePatternsOptimized;\n        } else {\n          getPossiblePatterns = getPossiblePatternsSlow;\n        }\n      }\n    };\n\n    function push_mode(this: Lexer, newMode: string) {\n      modeStack.push(newMode);\n      currCharCodeToPatternIdxToConfig =\n        this.charCodeToPatternIdxToConfig[newMode];\n\n      patternIdxToConfig = this.patternIdxToConfig[newMode];\n      currModePatternsLength = patternIdxToConfig.length;\n\n      currModePatternsLength = patternIdxToConfig.length;\n      const modeCanBeOptimized =\n        this.canModeBeOptimized[newMode] && this.config.safeMode === false;\n\n      if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {\n        getPossiblePatterns = getPossiblePatternsOptimized;\n      } else {\n        getPossiblePatterns = getPossiblePatternsSlow;\n      }\n    }\n\n    // this pattern seems to avoid a V8 de-optimization, although that de-optimization does not\n    // seem to matter performance wise.\n    push_mode.call(this, initialMode);\n\n    let currConfig!: IPatternConfig;\n\n    const recoveryEnabled = this.config.recoveryEnabled;\n\n    while (offset < orgLength) {\n      matchedImage = null;\n\n      const nextCharCode = orgText.charCodeAt(offset);\n      const chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);\n      const chosenPatternsLength = chosenPatternIdxToConfig.length;\n\n      for (i = 0; i < chosenPatternsLength; i++) {\n        currConfig = chosenPatternIdxToConfig[i];\n        const currPattern = currConfig.pattern;\n        payload = null;\n\n        // manually in-lined because > 600 chars won't be in-lined in V8\n        const singleCharCode = currConfig.short;\n        if (singleCharCode !== false) {\n          if (nextCharCode === singleCharCode) {\n            // single character string\n            matchedImage = currPattern as string;\n          }\n        } else if (currConfig.isCustom === true) {\n          match = (currPattern as IRegExpExec).exec(\n            orgText,\n            offset,\n            matchedTokens,\n            groups,\n          );\n          if (match !== null) {\n            matchedImage = match[0];\n            if ((match as CustomPatternMatcherReturn).payload !== undefined) {\n              payload = (match as CustomPatternMatcherReturn).payload;\n            }\n          } else {\n            matchedImage = null;\n          }\n        } else {\n          this.updateLastIndex(currPattern as RegExp, offset);\n          matchedImage = this.match(currPattern as RegExp, text, offset);\n        }\n\n        if (matchedImage !== null) {\n          // even though this pattern matched we must try a another longer alternative.\n          // this can be used to prioritize keywords over identifiers\n          longerAlt = currConfig.longerAlt;\n          if (longerAlt !== undefined) {\n            // TODO: micro optimize, avoid extra prop access\n            // by saving/linking longerAlt on the original config?\n            const longerAltLength = longerAlt.length;\n            for (k = 0; k < longerAltLength; k++) {\n              const longerAltConfig = patternIdxToConfig[longerAlt[k]];\n              const longerAltPattern = longerAltConfig.pattern;\n              altPayload = null;\n\n              // single Char can never be a longer alt so no need to test it.\n              // manually in-lined because > 600 chars won't be in-lined in V8\n              if (longerAltConfig.isCustom === true) {\n                match = (longerAltPattern as IRegExpExec).exec(\n                  orgText,\n                  offset,\n                  matchedTokens,\n                  groups,\n                );\n                if (match !== null) {\n                  matchAltImage = match[0];\n                  if (\n                    (match as CustomPatternMatcherReturn).payload !== undefined\n                  ) {\n                    altPayload = (match as CustomPatternMatcherReturn).payload;\n                  }\n                } else {\n                  matchAltImage = null;\n                }\n              } else {\n                this.updateLastIndex(longerAltPattern as RegExp, offset);\n                matchAltImage = this.match(\n                  longerAltPattern as RegExp,\n                  text,\n                  offset,\n                );\n              }\n\n              if (matchAltImage && matchAltImage.length > matchedImage.length) {\n                matchedImage = matchAltImage;\n                payload = altPayload;\n                currConfig = longerAltConfig;\n                // Exit the loop early after matching one of the longer alternatives\n                // The first matched alternative takes precedence\n                break;\n              }\n            }\n          }\n          break;\n        }\n      }\n\n      // successful match\n      if (matchedImage !== null) {\n        imageLength = matchedImage.length;\n        group = currConfig.group;\n        if (group !== undefined) {\n          tokType = currConfig.tokenTypeIdx;\n          // TODO: \"offset + imageLength\" and the new column may be computed twice in case of \"full\" location information inside\n          // createFullToken method\n          newToken = this.createTokenInstance(\n            matchedImage,\n            offset,\n            tokType,\n            currConfig.tokenType,\n            line,\n            column,\n            imageLength,\n          );\n\n          this.handlePayload(newToken, payload);\n\n          // TODO: optimize NOOP in case there are no special groups?\n          if (group === false) {\n            matchedTokensIndex = this.addToken(\n              matchedTokens,\n              matchedTokensIndex,\n              newToken,\n            );\n          } else {\n            groups[group].push(newToken);\n          }\n        }\n        text = this.chopInput(text, imageLength);\n        offset = offset + imageLength;\n\n        // TODO: with newlines the column may be assigned twice\n        column = this.computeNewColumn(column!, imageLength);\n\n        if (trackLines === true && currConfig.canLineTerminator === true) {\n          let numOfLTsInMatch = 0;\n          let foundTerminator;\n          let lastLTEndOffset: number;\n          lineTerminatorPattern.lastIndex = 0;\n          do {\n            foundTerminator = lineTerminatorPattern.test(matchedImage);\n            if (foundTerminator === true) {\n              lastLTEndOffset = lineTerminatorPattern.lastIndex - 1;\n              numOfLTsInMatch++;\n            }\n          } while (foundTerminator === true);\n\n          if (numOfLTsInMatch !== 0) {\n            line = line! + numOfLTsInMatch;\n            column = imageLength - lastLTEndOffset!;\n            this.updateTokenEndLineColumnLocation(\n              newToken!,\n              group!,\n              lastLTEndOffset!,\n              numOfLTsInMatch,\n              line,\n              column,\n              imageLength,\n            );\n          }\n        }\n        // will be NOOP if no modes present\n        this.handleModes(currConfig, pop_mode, push_mode, newToken!);\n      } else {\n        // error recovery, drop characters until we identify a valid token's start point\n        const errorStartOffset = offset;\n        const errorLine = line;\n        const errorColumn = column;\n        let foundResyncPoint = recoveryEnabled === false;\n\n        while (foundResyncPoint === false && offset < orgLength) {\n          // Identity Func (when sticky flag is enabled)\n          text = this.chopInput(text, 1);\n          offset++;\n          for (j = 0; j < currModePatternsLength; j++) {\n            const currConfig = patternIdxToConfig[j];\n            const currPattern = currConfig.pattern;\n\n            // manually in-lined because > 600 chars won't be in-lined in V8\n            const singleCharCode = currConfig.short;\n            if (singleCharCode !== false) {\n              if (orgText.charCodeAt(offset) === singleCharCode) {\n                // single character string\n                foundResyncPoint = true;\n              }\n            } else if (currConfig.isCustom === true) {\n              foundResyncPoint =\n                (currPattern as IRegExpExec).exec(\n                  orgText,\n                  offset,\n                  matchedTokens,\n                  groups,\n                ) !== null;\n            } else {\n              this.updateLastIndex(currPattern as RegExp, offset);\n              foundResyncPoint = (currPattern as RegExp).exec(text) !== null;\n            }\n\n            if (foundResyncPoint === true) {\n              break;\n            }\n          }\n        }\n\n        errLength = offset - errorStartOffset;\n        column = this.computeNewColumn(column!, errLength);\n        // at this point we either re-synced or reached the end of the input text\n        msg = this.config.errorMessageProvider.buildUnexpectedCharactersMessage(\n          orgText,\n          errorStartOffset,\n          errLength,\n          errorLine,\n          errorColumn,\n        );\n        errors.push({\n          offset: errorStartOffset,\n          line: errorLine,\n          column: errorColumn,\n          length: errLength,\n          message: msg,\n        });\n\n        if (recoveryEnabled === false) {\n          break;\n        }\n      }\n    }\n\n    // if we do have custom patterns which push directly into the\n    // TODO: custom tokens should not push directly??\n    if (!this.hasCustom) {\n      // if we guessed a too large size for the tokens array this will shrink it to the right size.\n      matchedTokens.length = matchedTokensIndex;\n    }\n\n    return {\n      tokens: matchedTokens,\n      groups: groups,\n      errors: errors,\n    };\n  }\n\n  private handleModes(\n    config: IPatternConfig,\n    pop_mode: (tok: IToken) => void,\n    push_mode: (this: Lexer, pushMode: string) => void,\n    newToken: IToken,\n  ) {\n    if (config.pop === true) {\n      // need to save the PUSH_MODE property as if the mode is popped\n      // patternIdxToPopMode is updated to reflect the new mode after popping the stack\n      const pushMode = config.push;\n      pop_mode(newToken);\n      if (pushMode !== undefined) {\n        push_mode.call(this, pushMode);\n      }\n    } else if (config.push !== undefined) {\n      push_mode.call(this, config.push);\n    }\n  }\n\n  private chopInput(text: string, length: number): string {\n    return text.substring(length);\n  }\n\n  private updateLastIndex(regExp: RegExp, newLastIndex: number): void {\n    regExp.lastIndex = newLastIndex;\n  }\n\n  // TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler\n  private updateTokenEndLineColumnLocation(\n    newToken: IToken,\n    group: string | false,\n    lastLTIdx: number,\n    numOfLTsInMatch: number,\n    line: number,\n    column: number,\n    imageLength: number,\n  ): void {\n    let lastCharIsLT, fixForEndingInLT;\n    if (group !== undefined) {\n      // a none skipped multi line Token, need to update endLine/endColumn\n      lastCharIsLT = lastLTIdx === imageLength - 1;\n      fixForEndingInLT = lastCharIsLT ? -1 : 0;\n      if (!(numOfLTsInMatch === 1 && lastCharIsLT === true)) {\n        // if a token ends in a LT that last LT only affects the line numbering of following Tokens\n        newToken.endLine = line + fixForEndingInLT;\n        // the last LT in a token does not affect the endColumn either as the [columnStart ... columnEnd)\n        // inclusive to exclusive range.\n        newToken.endColumn = column - 1 + -fixForEndingInLT;\n      }\n      // else single LT in the last character of a token, no need to modify the endLine/EndColumn\n    }\n  }\n\n  private computeNewColumn(oldColumn: number, imageLength: number) {\n    return oldColumn + imageLength;\n  }\n\n  // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.\n  /* istanbul ignore next - place holder */\n  private createTokenInstance!: (...args: any[]) => IToken;\n\n  private createOffsetOnlyToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n  ) {\n    return {\n      image,\n      startOffset,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  private createStartOnlyToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n    startLine: number,\n    startColumn: number,\n  ) {\n    return {\n      image,\n      startOffset,\n      startLine,\n      startColumn,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  private createFullToken(\n    image: string,\n    startOffset: number,\n    tokenTypeIdx: number,\n    tokenType: TokenType,\n    startLine: number,\n    startColumn: number,\n    imageLength: number,\n  ): IToken {\n    return {\n      image,\n      startOffset,\n      endOffset: startOffset + imageLength - 1,\n      startLine,\n      endLine: startLine,\n      startColumn,\n      endColumn: startColumn + imageLength - 1,\n      tokenTypeIdx,\n      tokenType,\n    };\n  }\n\n  // Place holder, will be replaced by the correct variant according to the locationTracking option at runtime.\n  /* istanbul ignore next - place holder */\n  private addToken!: (\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ) => number;\n\n  private addTokenUsingPush(\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ): number {\n    tokenVector.push(tokenToAdd);\n    return index;\n  }\n\n  private addTokenUsingMemberAccess(\n    tokenVector: IToken[],\n    index: number,\n    tokenToAdd: IToken,\n  ): number {\n    tokenVector[index] = tokenToAdd;\n    index++;\n    return index;\n  }\n\n  // Place holder, will be replaced by the correct variant according to the hasCustom flag option at runtime.\n  private handlePayload: (token: IToken, payload: any) => void;\n\n  private handlePayloadNoCustom(token: IToken, payload: any): void {}\n\n  private handlePayloadWithCustom(token: IToken, payload: any): void {\n    if (payload !== null) {\n      token.payload = payload;\n    }\n  }\n\n  // place holder to be replaced with chosen alternative at runtime\n  private match!: (\n    pattern: RegExp,\n    text: string,\n    offset: number,\n  ) => string | null;\n\n  private matchWithTest(\n    pattern: RegExp,\n    text: string,\n    offset: number,\n  ): string | null {\n    const found = pattern.test(text);\n    if (found === true) {\n      return text.substring(offset, pattern.lastIndex);\n    }\n    return null;\n  }\n\n  private matchWithExec(pattern: RegExp, text: string): string | null {\n    const regExpArray = pattern.exec(text);\n    return regExpArray !== null ? regExpArray[0] : null;\n  }\n\n  // Duplicated from the parser's perf trace trait to allow future extraction\n  // of the lexer to a separate package.\n  TRACE_INIT = (phaseDesc: string, phaseImpl: () => T): T => {\n    // No need to optimize this using NOOP pattern because\n    // It is not called in a hot spot...\n    if (this.traceInitPerf === true) {\n      this.traceInitIndent++;\n      const indent = new Array(this.traceInitIndent + 1).join(\"\\t\");\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        console.log(`${indent}--> <${phaseDesc}>`);\n      }\n      const { time, value } = timer(phaseImpl);\n      /* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */\n      const traceMethod = time > 10 ? console.warn : console.log;\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);\n      }\n      this.traceInitIndent--;\n      return value;\n    } else {\n      return phaseImpl();\n    }\n  };\n}\n", "import { has, isString, isUndefined } from \"lodash-es\";\nimport { Lexer } from \"./lexer_public.js\";\nimport { augmentTokenTypes, tokenStructuredMatcher } from \"./tokens.js\";\nimport { IToken, ITokenConfig, TokenType } from \"@chevrotain/types\";\n\nexport function tokenLabel(tokType: TokenType): string {\n  if (hasTokenLabel(tokType)) {\n    return tokType.LABEL;\n  } else {\n    return tokType.name;\n  }\n}\n\nexport function tokenName(tokType: TokenType): string {\n  return tokType.name;\n}\n\nexport function hasTokenLabel(\n  obj: TokenType,\n): obj is TokenType & Pick, \"LABEL\"> {\n  return isString(obj.LABEL) && obj.LABEL !== \"\";\n}\n\nconst PARENT = \"parent\";\nconst CATEGORIES = \"categories\";\nconst LABEL = \"label\";\nconst GROUP = \"group\";\nconst PUSH_MODE = \"push_mode\";\nconst POP_MODE = \"pop_mode\";\nconst LONGER_ALT = \"longer_alt\";\nconst LINE_BREAKS = \"line_breaks\";\nconst START_CHARS_HINT = \"start_chars_hint\";\n\nexport function createToken(config: ITokenConfig): TokenType {\n  return createTokenInternal(config);\n}\n\nfunction createTokenInternal(config: ITokenConfig): TokenType {\n  const pattern = config.pattern;\n\n  const tokenType: TokenType = {};\n  tokenType.name = config.name;\n\n  if (!isUndefined(pattern)) {\n    tokenType.PATTERN = pattern;\n  }\n\n  if (has(config, PARENT)) {\n    throw (\n      \"The parent property is no longer supported.\\n\" +\n      \"See: https://github.com/chevrotain/chevrotain/issues/564#issuecomment-349062346 for details.\"\n    );\n  }\n\n  if (has(config, CATEGORIES)) {\n    // casting to ANY as this will be fixed inside `augmentTokenTypes``\n    tokenType.CATEGORIES = config[CATEGORIES];\n  }\n\n  augmentTokenTypes([tokenType]);\n\n  if (has(config, LABEL)) {\n    tokenType.LABEL = config[LABEL];\n  }\n\n  if (has(config, GROUP)) {\n    tokenType.GROUP = config[GROUP];\n  }\n\n  if (has(config, POP_MODE)) {\n    tokenType.POP_MODE = config[POP_MODE];\n  }\n\n  if (has(config, PUSH_MODE)) {\n    tokenType.PUSH_MODE = config[PUSH_MODE];\n  }\n\n  if (has(config, LONGER_ALT)) {\n    tokenType.LONGER_ALT = config[LONGER_ALT];\n  }\n\n  if (has(config, LINE_BREAKS)) {\n    tokenType.LINE_BREAKS = config[LINE_BREAKS];\n  }\n\n  if (has(config, START_CHARS_HINT)) {\n    tokenType.START_CHARS_HINT = config[START_CHARS_HINT];\n  }\n\n  return tokenType;\n}\n\nexport const EOF = createToken({ name: \"EOF\", pattern: Lexer.NA });\naugmentTokenTypes([EOF]);\n\nexport function createTokenInstance(\n  tokType: TokenType,\n  image: string,\n  startOffset: number,\n  endOffset: number,\n  startLine: number,\n  endLine: number,\n  startColumn: number,\n  endColumn: number,\n): IToken {\n  return {\n    image,\n    startOffset,\n    endOffset,\n    startLine,\n    endLine,\n    startColumn,\n    endColumn,\n    tokenTypeIdx: (tokType).tokenTypeIdx,\n    tokenType: tokType,\n  };\n}\n\nexport function tokenMatcher(token: IToken, tokType: TokenType): boolean {\n  return tokenStructuredMatcher(token, tokType);\n}\n", "import { hasTokenLabel, tokenLabel } from \"../scan/tokens_public.js\";\nimport { first, map, reduce } from \"lodash-es\";\nimport {\n  Alternation,\n  getProductionDslName,\n  NonTerminal,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  IParserErrorMessageProvider,\n  IProductionWithOccurrence,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IGrammarValidatorErrorMessageProvider,\n} from \"./grammar/types.js\";\n\nexport const defaultParserErrorProvider: IParserErrorMessageProvider = {\n  buildMismatchTokenMessage({ expected, actual, previous, ruleName }): string {\n    const hasLabel = hasTokenLabel(expected);\n    const expectedMsg = hasLabel\n      ? `--> ${tokenLabel(expected)} <--`\n      : `token of type --> ${expected.name} <--`;\n\n    const msg = `Expecting ${expectedMsg} but found --> '${actual.image}' <--`;\n\n    return msg;\n  },\n\n  buildNotAllInputParsedMessage({ firstRedundant, ruleName }): string {\n    return \"Redundant input, expecting EOF but found: \" + firstRedundant.image;\n  },\n\n  buildNoViableAltMessage({\n    expectedPathsPerAlt,\n    actual,\n    previous,\n    customUserDescription,\n    ruleName,\n  }): string {\n    const errPrefix = \"Expecting: \";\n    // TODO: issue: No Viable Alternative Error may have incomplete details. #502\n    const actualText = first(actual)!.image;\n    const errSuffix = \"\\nbut found: '\" + actualText + \"'\";\n\n    if (customUserDescription) {\n      return errPrefix + customUserDescription + errSuffix;\n    } else {\n      const allLookAheadPaths = reduce(\n        expectedPathsPerAlt,\n        (result, currAltPaths) => result.concat(currAltPaths),\n        [] as TokenType[][],\n      );\n      const nextValidTokenSequences = map(\n        allLookAheadPaths,\n        (currPath) =>\n          `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join(\n            \", \",\n          )}]`,\n      );\n      const nextValidSequenceItems = map(\n        nextValidTokenSequences,\n        (itemMsg, idx) => `  ${idx + 1}. ${itemMsg}`,\n      );\n      const calculatedDescription = `one of these possible Token sequences:\\n${nextValidSequenceItems.join(\n        \"\\n\",\n      )}`;\n\n      return errPrefix + calculatedDescription + errSuffix;\n    }\n  },\n\n  buildEarlyExitMessage({\n    expectedIterationPaths,\n    actual,\n    customUserDescription,\n    ruleName,\n  }): string {\n    const errPrefix = \"Expecting: \";\n    // TODO: issue: No Viable Alternative Error may have incomplete details. #502\n    const actualText = first(actual)!.image;\n    const errSuffix = \"\\nbut found: '\" + actualText + \"'\";\n\n    if (customUserDescription) {\n      return errPrefix + customUserDescription + errSuffix;\n    } else {\n      const nextValidTokenSequences = map(\n        expectedIterationPaths,\n        (currPath) =>\n          `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join(\n            \",\",\n          )}]`,\n      );\n      const calculatedDescription =\n        `expecting at least one iteration which starts with one of these possible Token sequences::\\n  ` +\n        `<${nextValidTokenSequences.join(\" ,\")}>`;\n\n      return errPrefix + calculatedDescription + errSuffix;\n    }\n  },\n};\n\nObject.freeze(defaultParserErrorProvider);\n\nexport const defaultGrammarResolverErrorProvider: IGrammarResolverErrorMessageProvider =\n  {\n    buildRuleNotFoundError(\n      topLevelRule: Rule,\n      undefinedRule: NonTerminal,\n    ): string {\n      const msg =\n        \"Invalid grammar, reference to a rule which is not defined: ->\" +\n        undefinedRule.nonTerminalName +\n        \"<-\\n\" +\n        \"inside top level rule: ->\" +\n        topLevelRule.name +\n        \"<-\";\n      return msg;\n    },\n  };\n\nexport const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessageProvider =\n  {\n    buildDuplicateFoundError(\n      topLevelRule: Rule,\n      duplicateProds: IProductionWithOccurrence[],\n    ): string {\n      function getExtraProductionArgument(\n        prod: IProductionWithOccurrence,\n      ): string {\n        if (prod instanceof Terminal) {\n          return prod.terminalType.name;\n        } else if (prod instanceof NonTerminal) {\n          return prod.nonTerminalName;\n        } else {\n          return \"\";\n        }\n      }\n\n      const topLevelName = topLevelRule.name;\n      const duplicateProd = first(duplicateProds)!;\n      const index = duplicateProd.idx;\n      const dslName = getProductionDslName(duplicateProd);\n      const extraArgument = getExtraProductionArgument(duplicateProd);\n\n      const hasExplicitIndex = index > 0;\n      let msg = `->${dslName}${hasExplicitIndex ? index : \"\"}<- ${\n        extraArgument ? `with argument: ->${extraArgument}<-` : \"\"\n      }\n                  appears more than once (${\n                    duplicateProds.length\n                  } times) in the top level rule: ->${topLevelName}<-.                  \n                  For further details see: https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES \n                  `;\n\n      // white space trimming time! better to trim afterwards as it allows to use WELL formatted multi line template strings...\n      msg = msg.replace(/[ \\t]+/g, \" \");\n      msg = msg.replace(/\\s\\s+/g, \"\\n\");\n\n      return msg;\n    },\n\n    buildNamespaceConflictError(rule: Rule): string {\n      const errMsg =\n        `Namespace conflict found in grammar.\\n` +\n        `The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: <${rule.name}>.\\n` +\n        `To resolve this make sure each Terminal and Non-Terminal names are unique\\n` +\n        `This is easy to accomplish by using the convention that Terminal names start with an uppercase letter\\n` +\n        `and Non-Terminal names start with a lower case letter.`;\n\n      return errMsg;\n    },\n\n    buildAlternationPrefixAmbiguityError(options: {\n      topLevelRule: Rule;\n      prefixPath: TokenType[];\n      ambiguityIndices: number[];\n      alternation: Alternation;\n    }): string {\n      const pathMsg = map(options.prefixPath, (currTok) =>\n        tokenLabel(currTok),\n      ).join(\", \");\n      const occurrence =\n        options.alternation.idx === 0 ? \"\" : options.alternation.idx;\n      const errMsg =\n        `Ambiguous alternatives: <${options.ambiguityIndices.join(\n          \" ,\",\n        )}> due to common lookahead prefix\\n` +\n        `in  inside <${options.topLevelRule.name}> Rule,\\n` +\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n` +\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX\\n` +\n        `For Further details.`;\n\n      return errMsg;\n    },\n\n    buildAlternationAmbiguityError(options: {\n      topLevelRule: Rule;\n      prefixPath: TokenType[];\n      ambiguityIndices: number[];\n      alternation: Alternation;\n    }): string {\n      const pathMsg = map(options.prefixPath, (currtok) =>\n        tokenLabel(currtok),\n      ).join(\", \");\n      const occurrence =\n        options.alternation.idx === 0 ? \"\" : options.alternation.idx;\n      let currMessage =\n        `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(\n          \" ,\",\n        )}> in ` +\n        ` inside <${options.topLevelRule.name}> Rule,\\n` +\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n`;\n\n      currMessage =\n        currMessage +\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\\n` +\n        `For Further details.`;\n      return currMessage;\n    },\n\n    buildEmptyRepetitionError(options: {\n      topLevelRule: Rule;\n      repetition: IProductionWithOccurrence;\n    }): string {\n      let dslName = getProductionDslName(options.repetition);\n      if (options.repetition.idx !== 0) {\n        dslName += options.repetition.idx;\n      }\n\n      const errMsg =\n        `The repetition <${dslName}> within Rule <${options.topLevelRule.name}> can never consume any tokens.\\n` +\n        `This could lead to an infinite loop.`;\n\n      return errMsg;\n    },\n\n    // TODO: remove - `errors_public` from nyc.config.js exclude\n    //       once this method is fully removed from this file\n    buildTokenNameError(options: {\n      tokenType: TokenType;\n      expectedPattern: RegExp;\n    }): string {\n      /* istanbul ignore next */\n      return \"deprecated\";\n    },\n\n    buildEmptyAlternationError(options: {\n      topLevelRule: Rule;\n      alternation: Alternation;\n      emptyChoiceIdx: number;\n    }): string {\n      const errMsg =\n        `Ambiguous empty alternative: <${options.emptyChoiceIdx + 1}>` +\n        ` in  inside <${options.topLevelRule.name}> Rule.\\n` +\n        `Only the last alternative may be an empty alternative.`;\n\n      return errMsg;\n    },\n\n    buildTooManyAlternativesError(options: {\n      topLevelRule: Rule;\n      alternation: Alternation;\n    }): string {\n      const errMsg =\n        `An Alternation cannot have more than 256 alternatives:\\n` +\n        ` inside <${\n          options.topLevelRule.name\n        }> Rule.\\n has ${\n          options.alternation.definition.length + 1\n        } alternatives.`;\n\n      return errMsg;\n    },\n\n    buildLeftRecursionError(options: {\n      topLevelRule: Rule;\n      leftRecursionPath: Rule[];\n    }): string {\n      const ruleName = options.topLevelRule.name;\n      const pathNames = map(\n        options.leftRecursionPath,\n        (currRule) => currRule.name,\n      );\n      const leftRecursivePath = `${ruleName} --> ${pathNames\n        .concat([ruleName])\n        .join(\" --> \")}`;\n      const errMsg =\n        `Left Recursion found in grammar.\\n` +\n        `rule: <${ruleName}> can be invoked from itself (directly or indirectly)\\n` +\n        `without consuming any Tokens. The grammar path that causes this is: \\n ${leftRecursivePath}\\n` +\n        ` To fix this refactor your grammar to remove the left recursion.\\n` +\n        `see: https://en.wikipedia.org/wiki/LL_parser#Left_factoring.`;\n\n      return errMsg;\n    },\n\n    // TODO: remove - `errors_public` from nyc.config.js exclude\n    //       once this method is fully removed from this file\n    buildInvalidRuleNameError(options: {\n      topLevelRule: Rule;\n      expectedPattern: RegExp;\n    }): string {\n      /* istanbul ignore next */\n      return \"deprecated\";\n    },\n\n    buildDuplicateRuleNameError(options: {\n      topLevelRule: Rule | string;\n      grammarName: string;\n    }): string {\n      let ruleName;\n      if (options.topLevelRule instanceof Rule) {\n        ruleName = options.topLevelRule.name;\n      } else {\n        ruleName = options.topLevelRule;\n      }\n\n      const errMsg = `Duplicate definition, rule: ->${ruleName}<- is already defined in the grammar: ->${options.grammarName}<-`;\n\n      return errMsg;\n    },\n  };\n", "import {\n  IParserUnresolvedRefDefinitionError,\n  ParserDefinitionErrorType,\n} from \"../parser/parser.js\";\nimport { forEach, values } from \"lodash-es\";\nimport { GAstVisitor, NonTerminal, Rule } from \"@chevrotain/gast\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IParserDefinitionError,\n} from \"./types.js\";\n\nexport function resolveGrammar(\n  topLevels: Record,\n  errMsgProvider: IGrammarResolverErrorMessageProvider,\n): IParserDefinitionError[] {\n  const refResolver = new GastRefResolverVisitor(topLevels, errMsgProvider);\n  refResolver.resolveRefs();\n  return refResolver.errors;\n}\n\nexport class GastRefResolverVisitor extends GAstVisitor {\n  public errors: IParserUnresolvedRefDefinitionError[] = [];\n  private currTopLevel: Rule;\n\n  constructor(\n    private nameToTopRule: Record,\n    private errMsgProvider: IGrammarResolverErrorMessageProvider,\n  ) {\n    super();\n  }\n\n  public resolveRefs(): void {\n    forEach(values(this.nameToTopRule), (prod) => {\n      this.currTopLevel = prod;\n      prod.accept(this);\n    });\n  }\n\n  public visitNonTerminal(node: NonTerminal): void {\n    const ref = this.nameToTopRule[node.nonTerminalName];\n\n    if (!ref) {\n      const msg = this.errMsgProvider.buildRuleNotFoundError(\n        this.currTopLevel,\n        node,\n      );\n      this.errors.push({\n        message: msg,\n        type: ParserDefinitionErrorType.UNRESOLVED_SUBRULE_REF,\n        ruleName: this.currTopLevel.name,\n        unresolvedRefName: node.nonTerminalName,\n      });\n    } else {\n      node.referencedRule = ref;\n    }\n  }\n}\n", "import {\n  clone,\n  drop,\n  dropRight,\n  first as _first,\n  forEach,\n  isEmpty,\n  last,\n} from \"lodash-es\";\nimport { first } from \"./first.js\";\nimport { RestWalker } from \"./rest.js\";\nimport { TokenMatcher } from \"../parser/parser.js\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  IGrammarPath,\n  IProduction,\n  ISyntacticContentAssistPath,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\n\nexport abstract class AbstractNextPossibleTokensWalker extends RestWalker {\n  protected possibleTokTypes: TokenType[] = [];\n  protected ruleStack: string[];\n  protected occurrenceStack: number[];\n\n  protected nextProductionName = \"\";\n  protected nextProductionOccurrence = 0;\n  protected found = false;\n  protected isAtEndOfPath = false;\n\n  constructor(\n    protected topProd: Rule,\n    protected path: IGrammarPath,\n  ) {\n    super();\n  }\n\n  startWalking(): TokenType[] {\n    this.found = false;\n\n    if (this.path.ruleStack[0] !== this.topProd.name) {\n      throw Error(\"The path does not start with the walker's top Rule!\");\n    }\n\n    // immutable for the win\n    this.ruleStack = clone(this.path.ruleStack).reverse(); // intelij bug requires assertion\n    this.occurrenceStack = clone(this.path.occurrenceStack).reverse(); // intelij bug requires assertion\n\n    // already verified that the first production is valid, we now seek the 2nd production\n    this.ruleStack.pop();\n    this.occurrenceStack.pop();\n\n    this.updateExpectedNext();\n    this.walk(this.topProd);\n\n    return this.possibleTokTypes;\n  }\n\n  walk(\n    prod: { definition: IProduction[] },\n    prevRest: IProduction[] = [],\n  ): void {\n    // stop scanning once we found the path\n    if (!this.found) {\n      super.walk(prod, prevRest);\n    }\n  }\n\n  walkProdRef(\n    refProd: NonTerminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    // found the next production, need to keep walking in it\n    if (\n      refProd.referencedRule.name === this.nextProductionName &&\n      refProd.idx === this.nextProductionOccurrence\n    ) {\n      const fullRest = currRest.concat(prevRest);\n      this.updateExpectedNext();\n      this.walk(refProd.referencedRule, fullRest);\n    }\n  }\n\n  updateExpectedNext(): void {\n    // need to consume the Terminal\n    if (isEmpty(this.ruleStack)) {\n      // must reset nextProductionXXX to avoid walking down another Top Level production while what we are\n      // really seeking is the last Terminal...\n      this.nextProductionName = \"\";\n      this.nextProductionOccurrence = 0;\n      this.isAtEndOfPath = true;\n    } else {\n      this.nextProductionName = this.ruleStack.pop()!;\n      this.nextProductionOccurrence = this.occurrenceStack.pop()!;\n    }\n  }\n}\n\nexport class NextAfterTokenWalker extends AbstractNextPossibleTokensWalker {\n  private nextTerminalName = \"\";\n  private nextTerminalOccurrence = 0;\n\n  constructor(\n    topProd: Rule,\n    protected path: ITokenGrammarPath,\n  ) {\n    super(topProd, path);\n    this.nextTerminalName = this.path.lastTok.name;\n    this.nextTerminalOccurrence = this.path.lastTokOccurrence;\n  }\n\n  walkTerminal(\n    terminal: Terminal,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      this.isAtEndOfPath &&\n      terminal.terminalType.name === this.nextTerminalName &&\n      terminal.idx === this.nextTerminalOccurrence &&\n      !this.found\n    ) {\n      const fullRest = currRest.concat(prevRest);\n      const restProd = new Alternative({ definition: fullRest });\n      this.possibleTokTypes = first(restProd);\n      this.found = true;\n    }\n  }\n}\n\nexport type AlternativesFirstTokens = TokenType[][];\n\nexport interface IFirstAfterRepetition {\n  token: TokenType | undefined;\n  occurrence: number | undefined;\n  isEndOfRule: boolean | undefined;\n}\n\n/**\n * This walker only \"walks\" a single \"TOP\" level in the Grammar Ast, this means\n * it never \"follows\" production refs\n */\nexport class AbstractNextTerminalAfterProductionWalker extends RestWalker {\n  protected result: IFirstAfterRepetition = {\n    token: undefined,\n    occurrence: undefined,\n    isEndOfRule: undefined,\n  };\n\n  constructor(\n    protected topRule: Rule,\n    protected occurrence: number,\n  ) {\n    super();\n  }\n\n  startWalking(): IFirstAfterRepetition {\n    this.walk(this.topRule);\n    return this.result;\n  }\n}\n\nexport class NextTerminalAfterManyWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (manyProd.idx === this.occurrence) {\n      const firstAfterMany = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterMany === undefined;\n      if (firstAfterMany instanceof Terminal) {\n        this.result.token = firstAfterMany.terminalType;\n        this.result.occurrence = firstAfterMany.idx;\n      }\n    } else {\n      super.walkMany(manyProd, currRest, prevRest);\n    }\n  }\n}\n\nexport class NextTerminalAfterManySepWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (manySepProd.idx === this.occurrence) {\n      const firstAfterManySep = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterManySep === undefined;\n      if (firstAfterManySep instanceof Terminal) {\n        this.result.token = firstAfterManySep.terminalType;\n        this.result.occurrence = firstAfterManySep.idx;\n      }\n    } else {\n      super.walkManySep(manySepProd, currRest, prevRest);\n    }\n  }\n}\n\nexport class NextTerminalAfterAtLeastOneWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (atLeastOneProd.idx === this.occurrence) {\n      const firstAfterAtLeastOne = _first(currRest.concat(prevRest));\n      this.result.isEndOfRule = firstAfterAtLeastOne === undefined;\n      if (firstAfterAtLeastOne instanceof Terminal) {\n        this.result.token = firstAfterAtLeastOne.terminalType;\n        this.result.occurrence = firstAfterAtLeastOne.idx;\n      }\n    } else {\n      super.walkAtLeastOne(atLeastOneProd, currRest, prevRest);\n    }\n  }\n}\n\n// TODO: reduce code duplication in the AfterWalkers\nexport class NextTerminalAfterAtLeastOneSepWalker extends AbstractNextTerminalAfterProductionWalker {\n  walkAtLeastOneSep(\n    atleastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (atleastOneSepProd.idx === this.occurrence) {\n      const firstAfterfirstAfterAtLeastOneSep = _first(\n        currRest.concat(prevRest),\n      );\n      this.result.isEndOfRule = firstAfterfirstAfterAtLeastOneSep === undefined;\n      if (firstAfterfirstAfterAtLeastOneSep instanceof Terminal) {\n        this.result.token = firstAfterfirstAfterAtLeastOneSep.terminalType;\n        this.result.occurrence = firstAfterfirstAfterAtLeastOneSep.idx;\n      }\n    } else {\n      super.walkAtLeastOneSep(atleastOneSepProd, currRest, prevRest);\n    }\n  }\n}\n\nexport interface PartialPathAndSuffixes {\n  partialPath: TokenType[];\n  suffixDef: IProduction[];\n}\n\nexport function possiblePathsFrom(\n  targetDef: IProduction[],\n  maxLength: number,\n  currPath: TokenType[] = [],\n): PartialPathAndSuffixes[] {\n  // avoid side effects\n  currPath = clone(currPath);\n  let result: PartialPathAndSuffixes[] = [];\n  let i = 0;\n\n  // TODO: avoid inner funcs\n  function remainingPathWith(nextDef: IProduction[]) {\n    return nextDef.concat(drop(targetDef, i + 1));\n  }\n\n  // TODO: avoid inner funcs\n  function getAlternativesForProd(definition: IProduction[]) {\n    const alternatives = possiblePathsFrom(\n      remainingPathWith(definition),\n      maxLength,\n      currPath,\n    );\n    return result.concat(alternatives);\n  }\n\n  /**\n   * Mandatory productions will halt the loop as the paths computed from their recursive calls will already contain the\n   * following (rest) of the targetDef.\n   *\n   * For optional productions (Option/Repetition/...) the loop will continue to represent the paths that do not include the\n   * the optional production.\n   */\n  while (currPath.length < maxLength && i < targetDef.length) {\n    const prod = targetDef[i];\n\n    /* istanbul ignore else */\n    if (prod instanceof Alternative) {\n      return getAlternativesForProd(prod.definition);\n    } else if (prod instanceof NonTerminal) {\n      return getAlternativesForProd(prod.definition);\n    } else if (prod instanceof Option) {\n      result = getAlternativesForProd(prod.definition);\n    } else if (prod instanceof RepetitionMandatory) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: prod.definition,\n        }),\n      ]);\n      return getAlternativesForProd(newDef);\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n      const newDef = [\n        new Alternative({ definition: prod.definition }),\n        new Repetition({\n          definition: [new Terminal({ terminalType: prod.separator })].concat(\n            prod.definition,\n          ),\n        }),\n      ];\n      return getAlternativesForProd(newDef);\n    } else if (prod instanceof RepetitionWithSeparator) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: [new Terminal({ terminalType: prod.separator })].concat(\n            prod.definition,\n          ),\n        }),\n      ]);\n      result = getAlternativesForProd(newDef);\n    } else if (prod instanceof Repetition) {\n      const newDef = prod.definition.concat([\n        new Repetition({\n          definition: prod.definition,\n        }),\n      ]);\n      result = getAlternativesForProd(newDef);\n    } else if (prod instanceof Alternation) {\n      forEach(prod.definition, (currAlt) => {\n        // TODO: this is a limited check for empty alternatives\n        //   It would prevent a common case of infinite loops during parser initialization.\n        //   However **in-directly** empty alternatives may still cause issues.\n        if (isEmpty(currAlt.definition) === false) {\n          result = getAlternativesForProd(currAlt.definition);\n        }\n      });\n      return result;\n    } else if (prod instanceof Terminal) {\n      currPath.push(prod.terminalType);\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n\n    i++;\n  }\n  result.push({\n    partialPath: currPath,\n    suffixDef: drop(targetDef, i),\n  });\n\n  return result;\n}\n\ninterface IPathToExamine {\n  idx: number;\n  def: IProduction[];\n  ruleStack: string[];\n  occurrenceStack: number[];\n}\n\nexport function nextPossibleTokensAfter(\n  initialDef: IProduction[],\n  tokenVector: IToken[],\n  tokMatcher: TokenMatcher,\n  maxLookAhead: number,\n): ISyntacticContentAssistPath[] {\n  const EXIT_NON_TERMINAL: any = \"EXIT_NONE_TERMINAL\";\n  // to avoid creating a new Array each time.\n  const EXIT_NON_TERMINAL_ARR = [EXIT_NON_TERMINAL];\n  const EXIT_ALTERNATIVE: any = \"EXIT_ALTERNATIVE\";\n  let foundCompletePath = false;\n\n  const tokenVectorLength = tokenVector.length;\n  const minimalAlternativesIndex = tokenVectorLength - maxLookAhead - 1;\n\n  const result: ISyntacticContentAssistPath[] = [];\n\n  const possiblePaths: IPathToExamine[] = [];\n  possiblePaths.push({\n    idx: -1,\n    def: initialDef,\n    ruleStack: [],\n    occurrenceStack: [],\n  });\n\n  while (!isEmpty(possiblePaths)) {\n    const currPath = possiblePaths.pop()!;\n\n    // skip alternatives if no more results can be found (assuming deterministic grammar with fixed lookahead)\n    if (currPath === EXIT_ALTERNATIVE) {\n      if (\n        foundCompletePath &&\n        last(possiblePaths)!.idx <= minimalAlternativesIndex\n      ) {\n        // remove irrelevant alternative\n        possiblePaths.pop();\n      }\n      continue;\n    }\n\n    const currDef = currPath.def;\n    const currIdx = currPath.idx;\n    const currRuleStack = currPath.ruleStack;\n    const currOccurrenceStack = currPath.occurrenceStack;\n\n    // For Example: an empty path could exist in a valid grammar in the case of an EMPTY_ALT\n    if (isEmpty(currDef)) {\n      continue;\n    }\n\n    const prod = currDef[0];\n    /* istanbul ignore else */\n    if (prod === EXIT_NON_TERMINAL) {\n      const nextPath = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: dropRight(currRuleStack),\n        occurrenceStack: dropRight(currOccurrenceStack),\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof Terminal) {\n      /* istanbul ignore else */\n      if (currIdx < tokenVectorLength - 1) {\n        const nextIdx = currIdx + 1;\n        const actualToken = tokenVector[nextIdx];\n        if (tokMatcher!(actualToken, prod.terminalType)) {\n          const nextPath = {\n            idx: nextIdx,\n            def: drop(currDef),\n            ruleStack: currRuleStack,\n            occurrenceStack: currOccurrenceStack,\n          };\n          possiblePaths.push(nextPath);\n        }\n        // end of the line\n      } else if (currIdx === tokenVectorLength - 1) {\n        // IGNORE ABOVE ELSE\n        result.push({\n          nextTokenType: prod.terminalType,\n          nextTokenOccurrence: prod.idx,\n          ruleStack: currRuleStack,\n          occurrenceStack: currOccurrenceStack,\n        });\n        foundCompletePath = true;\n      } else {\n        throw Error(\"non exhaustive match\");\n      }\n    } else if (prod instanceof NonTerminal) {\n      const newRuleStack = clone(currRuleStack);\n      newRuleStack.push(prod.nonTerminalName);\n\n      const newOccurrenceStack = clone(currOccurrenceStack);\n      newOccurrenceStack.push(prod.idx);\n\n      const nextPath = {\n        idx: currIdx,\n        def: prod.definition.concat(EXIT_NON_TERMINAL_ARR, drop(currDef)),\n        ruleStack: newRuleStack,\n        occurrenceStack: newOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof Option) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      const nextPathWith = {\n        idx: currIdx,\n        def: prod.definition.concat(drop(currDef)),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof RepetitionMandatory) {\n      // TODO:(THE NEW operators here take a while...) (convert once?)\n      const secondIteration = new Repetition({\n        definition: prod.definition,\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([secondIteration], drop(currDef));\n      const nextPath = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\n      // TODO:(THE NEW operators here take a while...) (convert once?)\n      const separatorGast = new Terminal({\n        terminalType: prod.separator,\n      });\n      const secondIteration = new Repetition({\n        definition: [separatorGast].concat(prod.definition),\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([secondIteration], drop(currDef));\n      const nextPath = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPath);\n    } else if (prod instanceof RepetitionWithSeparator) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      const separatorGast = new Terminal({\n        terminalType: prod.separator,\n      });\n      const nthRepetition = new Repetition({\n        definition: [separatorGast].concat(prod.definition),\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([nthRepetition], drop(currDef));\n      const nextPathWith = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof Repetition) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      const nextPathWithout = {\n        idx: currIdx,\n        def: drop(currDef),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWithout);\n      // required marker to avoid backtracking paths whose higher priority alternatives already matched\n      possiblePaths.push(EXIT_ALTERNATIVE);\n\n      // TODO: an empty repetition will cause infinite loops here, will the parser detect this in selfAnalysis?\n      const nthRepetition = new Repetition({\n        definition: prod.definition,\n        idx: prod.idx,\n      });\n      const nextDef = prod.definition.concat([nthRepetition], drop(currDef));\n      const nextPathWith = {\n        idx: currIdx,\n        def: nextDef,\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      };\n      possiblePaths.push(nextPathWith);\n    } else if (prod instanceof Alternation) {\n      // the order of alternatives is meaningful, FILO (Last path will be traversed first).\n      for (let i = prod.definition.length - 1; i >= 0; i--) {\n        const currAlt: any = prod.definition[i];\n        const currAltPath = {\n          idx: currIdx,\n          def: currAlt.definition.concat(drop(currDef)),\n          ruleStack: currRuleStack,\n          occurrenceStack: currOccurrenceStack,\n        };\n        possiblePaths.push(currAltPath);\n        possiblePaths.push(EXIT_ALTERNATIVE);\n      }\n    } else if (prod instanceof Alternative) {\n      possiblePaths.push({\n        idx: currIdx,\n        def: prod.definition.concat(drop(currDef)),\n        ruleStack: currRuleStack,\n        occurrenceStack: currOccurrenceStack,\n      });\n    } else if (prod instanceof Rule) {\n      // last because we should only encounter at most a single one of these per invocation.\n      possiblePaths.push(\n        expandTopLevelRule(prod, currIdx, currRuleStack, currOccurrenceStack),\n      );\n    } else {\n      throw Error(\"non exhaustive match\");\n    }\n  }\n  return result;\n}\n\nfunction expandTopLevelRule(\n  topRule: Rule,\n  currIdx: number,\n  currRuleStack: string[],\n  currOccurrenceStack: number[],\n): IPathToExamine {\n  const newRuleStack = clone(currRuleStack);\n  newRuleStack.push(topRule.name);\n\n  const newCurrOccurrenceStack = clone(currOccurrenceStack);\n  // top rule is always assumed to have been called with occurrence index 1\n  newCurrOccurrenceStack.push(1);\n\n  return {\n    idx: currIdx,\n    def: topRule.definition,\n    ruleStack: newRuleStack,\n    occurrenceStack: newCurrOccurrenceStack,\n  };\n}\n", "import { every, flatten, forEach, has, isEmpty, map, reduce } from \"lodash-es\";\nimport { possiblePathsFrom } from \"./interpreter.js\";\nimport { RestWalker } from \"./rest.js\";\nimport { Predicate, TokenMatcher } from \"../parser/parser.js\";\nimport {\n  tokenStructuredMatcher,\n  tokenStructuredMatcherNoCategories,\n} from \"../../scan/tokens.js\";\nimport {\n  Alternation,\n  Alternative as AlternativeGAST,\n  GAstVisitor,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n} from \"@chevrotain/gast\";\nimport {\n  BaseParser,\n  IOrAlt,\n  IProduction,\n  IProductionWithOccurrence,\n  LookaheadProductionType,\n  LookaheadSequence,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\n\nexport enum PROD_TYPE {\n  OPTION,\n  REPETITION,\n  REPETITION_MANDATORY,\n  REPETITION_MANDATORY_WITH_SEPARATOR,\n  REPETITION_WITH_SEPARATOR,\n  ALTERNATION,\n}\n\nexport function getProdType(\n  prod: IProduction | LookaheadProductionType,\n): PROD_TYPE {\n  /* istanbul ignore else */\n  if (prod instanceof Option || prod === \"Option\") {\n    return PROD_TYPE.OPTION;\n  } else if (prod instanceof Repetition || prod === \"Repetition\") {\n    return PROD_TYPE.REPETITION;\n  } else if (\n    prod instanceof RepetitionMandatory ||\n    prod === \"RepetitionMandatory\"\n  ) {\n    return PROD_TYPE.REPETITION_MANDATORY;\n  } else if (\n    prod instanceof RepetitionMandatoryWithSeparator ||\n    prod === \"RepetitionMandatoryWithSeparator\"\n  ) {\n    return PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR;\n  } else if (\n    prod instanceof RepetitionWithSeparator ||\n    prod === \"RepetitionWithSeparator\"\n  ) {\n    return PROD_TYPE.REPETITION_WITH_SEPARATOR;\n  } else if (prod instanceof Alternation || prod === \"Alternation\") {\n    return PROD_TYPE.ALTERNATION;\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n}\n\nexport function getLookaheadPaths(options: {\n  occurrence: number;\n  rule: Rule;\n  prodType: LookaheadProductionType;\n  maxLookahead: number;\n}): LookaheadSequence[] {\n  const { occurrence, rule, prodType, maxLookahead } = options;\n  const type = getProdType(prodType);\n  if (type === PROD_TYPE.ALTERNATION) {\n    return getLookaheadPathsForOr(occurrence, rule, maxLookahead);\n  } else {\n    return getLookaheadPathsForOptionalProd(\n      occurrence,\n      rule,\n      type,\n      maxLookahead,\n    );\n  }\n}\n\nexport function buildLookaheadFuncForOr(\n  occurrence: number,\n  ruleGrammar: Rule,\n  maxLookahead: number,\n  hasPredicates: boolean,\n  dynamicTokensEnabled: boolean,\n  laFuncBuilder: Function,\n): (orAlts?: IOrAlt[]) => number | undefined {\n  const lookAheadPaths = getLookaheadPathsForOr(\n    occurrence,\n    ruleGrammar,\n    maxLookahead,\n  );\n\n  const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)\n    ? tokenStructuredMatcherNoCategories\n    : tokenStructuredMatcher;\n\n  return laFuncBuilder(\n    lookAheadPaths,\n    hasPredicates,\n    tokenMatcher,\n    dynamicTokensEnabled,\n  );\n}\n\n/**\n *  When dealing with an Optional production (OPTION/MANY/2nd iteration of AT_LEAST_ONE/...) we need to compare\n *  the lookahead \"inside\" the production and the lookahead immediately \"after\" it in the same top level rule (context free).\n *\n *  Example: given a production:\n *  ABC(DE)?DF\n *\n *  The optional '(DE)?' should only be entered if we see 'DE'. a single Token 'D' is not sufficient to distinguish between the two\n *  alternatives.\n *\n *  @returns A Lookahead function which will return true IFF the parser should parse the Optional production.\n */\nexport function buildLookaheadFuncForOptionalProd(\n  occurrence: number,\n  ruleGrammar: Rule,\n  k: number,\n  dynamicTokensEnabled: boolean,\n  prodType: PROD_TYPE,\n  lookaheadBuilder: (\n    lookAheadSequence: LookaheadSequence,\n    tokenMatcher: TokenMatcher,\n    dynamicTokensEnabled: boolean,\n  ) => () => boolean,\n): () => boolean {\n  const lookAheadPaths = getLookaheadPathsForOptionalProd(\n    occurrence,\n    ruleGrammar,\n    prodType,\n    k,\n  );\n\n  const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths)\n    ? tokenStructuredMatcherNoCategories\n    : tokenStructuredMatcher;\n\n  return lookaheadBuilder(\n    lookAheadPaths[0],\n    tokenMatcher,\n    dynamicTokensEnabled,\n  );\n}\n\nexport type Alternative = TokenType[][];\n\nexport function buildAlternativesLookAheadFunc(\n  alts: LookaheadSequence[],\n  hasPredicates: boolean,\n  tokenMatcher: TokenMatcher,\n  dynamicTokensEnabled: boolean,\n): (orAlts: IOrAlt[]) => number | undefined {\n  const numOfAlts = alts.length;\n  const areAllOneTokenLookahead = every(alts, (currAlt) => {\n    return every(currAlt, (currPath) => {\n      return currPath.length === 1;\n    });\n  });\n\n  // This version takes into account the predicates as well.\n  if (hasPredicates) {\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (\n      this: BaseParser,\n      orAlts: IOrAlt[],\n    ): number | undefined {\n      // unfortunately the predicates must be extracted every single time\n      // as they cannot be cached due to references to parameters(vars) which are no longer valid.\n      // note that in the common case of no predicates, no cpu time will be wasted on this (see else block)\n      const predicates: (Predicate | undefined)[] = map(\n        orAlts,\n        (currAlt) => currAlt.GATE,\n      );\n\n      for (let t = 0; t < numOfAlts; t++) {\n        const currAlt = alts[t];\n        const currNumOfPaths = currAlt.length;\n\n        const currPredicate = predicates[t];\n        if (currPredicate !== undefined && currPredicate.call(this) === false) {\n          // if the predicate does not match there is no point in checking the paths\n          continue;\n        }\n        nextPath: for (let j = 0; j < currNumOfPaths; j++) {\n          const currPath = currAlt[j];\n          const currPathLength = currPath.length;\n          for (let i = 0; i < currPathLength; i++) {\n            const nextToken = this.LA(i + 1);\n            if (tokenMatcher(nextToken, currPath[i]) === false) {\n              // mismatch in current path\n              // try the next pth\n              continue nextPath;\n            }\n          }\n          // found a full path that matches.\n          // this will also work for an empty ALT as the loop will be skipped\n          return t;\n        }\n        // none of the paths for the current alternative matched\n        // try the next alternative\n      }\n      // none of the alternatives could be matched\n      return undefined;\n    };\n  } else if (areAllOneTokenLookahead && !dynamicTokensEnabled) {\n    // optimized (common) case of all the lookaheads paths requiring only\n    // a single token lookahead. These Optimizations cannot work if dynamically defined Tokens are used.\n    const singleTokenAlts = map(alts, (currAlt) => {\n      return flatten(currAlt);\n    });\n\n    const choiceToAlt = reduce(\n      singleTokenAlts,\n      (result, currAlt, idx) => {\n        forEach(currAlt, (currTokType) => {\n          if (!has(result, currTokType.tokenTypeIdx!)) {\n            result[currTokType.tokenTypeIdx!] = idx;\n          }\n          forEach(currTokType.categoryMatches!, (currExtendingType) => {\n            if (!has(result, currExtendingType)) {\n              result[currExtendingType] = idx;\n            }\n          });\n        });\n        return result;\n      },\n      {} as Record,\n    );\n\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (this: BaseParser): number {\n      const nextToken = this.LA(1);\n      return choiceToAlt[nextToken.tokenTypeIdx];\n    };\n  } else {\n    // optimized lookahead without needing to check the predicates at all.\n    // this causes code duplication which is intentional to improve performance.\n    /**\n     * @returns {number} - The chosen alternative index\n     */\n    return function (this: BaseParser): number | undefined {\n      for (let t = 0; t < numOfAlts; t++) {\n        const currAlt = alts[t];\n        const currNumOfPaths = currAlt.length;\n        nextPath: for (let j = 0; j < currNumOfPaths; j++) {\n          const currPath = currAlt[j];\n          const currPathLength = currPath.length;\n          for (let i = 0; i < currPathLength; i++) {\n            const nextToken = this.LA(i + 1);\n            if (tokenMatcher(nextToken, currPath[i]) === false) {\n              // mismatch in current path\n              // try the next pth\n              continue nextPath;\n            }\n          }\n          // found a full path that matches.\n          // this will also work for an empty ALT as the loop will be skipped\n          return t;\n        }\n        // none of the paths for the current alternative matched\n        // try the next alternative\n      }\n      // none of the alternatives could be matched\n      return undefined;\n    };\n  }\n}\n\nexport function buildSingleAlternativeLookaheadFunction(\n  alt: LookaheadSequence,\n  tokenMatcher: TokenMatcher,\n  dynamicTokensEnabled: boolean,\n): () => boolean {\n  const areAllOneTokenLookahead = every(alt, (currPath) => {\n    return currPath.length === 1;\n  });\n\n  const numOfPaths = alt.length;\n\n  // optimized (common) case of all the lookaheads paths requiring only\n  // a single token lookahead.\n  if (areAllOneTokenLookahead && !dynamicTokensEnabled) {\n    const singleTokensTypes = flatten(alt);\n\n    if (\n      singleTokensTypes.length === 1 &&\n      isEmpty((singleTokensTypes[0]).categoryMatches)\n    ) {\n      const expectedTokenType = singleTokensTypes[0];\n      const expectedTokenUniqueKey = (expectedTokenType).tokenTypeIdx;\n\n      return function (this: BaseParser): boolean {\n        return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey;\n      };\n    } else {\n      const choiceToAlt = reduce(\n        singleTokensTypes,\n        (result, currTokType, idx) => {\n          result[currTokType.tokenTypeIdx!] = true;\n          forEach(currTokType.categoryMatches!, (currExtendingType) => {\n            result[currExtendingType] = true;\n          });\n          return result;\n        },\n        [] as boolean[],\n      );\n\n      return function (this: BaseParser): boolean {\n        const nextToken = this.LA(1);\n        return choiceToAlt[nextToken.tokenTypeIdx] === true;\n      };\n    }\n  } else {\n    return function (this: BaseParser): boolean {\n      nextPath: for (let j = 0; j < numOfPaths; j++) {\n        const currPath = alt[j];\n        const currPathLength = currPath.length;\n        for (let i = 0; i < currPathLength; i++) {\n          const nextToken = this.LA(i + 1);\n          if (tokenMatcher(nextToken, currPath[i]) === false) {\n            // mismatch in current path\n            // try the next pth\n            continue nextPath;\n          }\n        }\n        // found a full path that matches.\n        return true;\n      }\n\n      // none of the paths matched\n      return false;\n    };\n  }\n}\n\nclass RestDefinitionFinderWalker extends RestWalker {\n  private restDef: IProduction[];\n\n  constructor(\n    private topProd: Rule,\n    private targetOccurrence: number,\n    private targetProdType: PROD_TYPE,\n  ) {\n    super();\n  }\n\n  startWalking(): IProduction[] {\n    this.walk(this.topProd);\n    return this.restDef;\n  }\n\n  private checkIsTarget(\n    node: IProductionWithOccurrence,\n    expectedProdType: PROD_TYPE,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): boolean {\n    if (\n      node.idx === this.targetOccurrence &&\n      this.targetProdType === expectedProdType\n    ) {\n      this.restDef = currRest.concat(prevRest);\n      return true;\n    }\n    // performance optimization, do not iterate over the entire Grammar ast after we have found the target\n    return false;\n  }\n\n  walkOption(\n    optionProd: Option,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (!this.checkIsTarget(optionProd, PROD_TYPE.OPTION, currRest, prevRest)) {\n      super.walkOption(optionProd, currRest, prevRest);\n    }\n  }\n\n  walkAtLeastOne(\n    atLeastOneProd: RepetitionMandatory,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        atLeastOneProd,\n        PROD_TYPE.REPETITION_MANDATORY,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(atLeastOneProd, currRest, prevRest);\n    }\n  }\n\n  walkAtLeastOneSep(\n    atLeastOneSepProd: RepetitionMandatoryWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        atLeastOneSepProd,\n        PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(atLeastOneSepProd, currRest, prevRest);\n    }\n  }\n\n  walkMany(\n    manyProd: Repetition,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(manyProd, PROD_TYPE.REPETITION, currRest, prevRest)\n    ) {\n      super.walkOption(manyProd, currRest, prevRest);\n    }\n  }\n\n  walkManySep(\n    manySepProd: RepetitionWithSeparator,\n    currRest: IProduction[],\n    prevRest: IProduction[],\n  ): void {\n    if (\n      !this.checkIsTarget(\n        manySepProd,\n        PROD_TYPE.REPETITION_WITH_SEPARATOR,\n        currRest,\n        prevRest,\n      )\n    ) {\n      super.walkOption(manySepProd, currRest, prevRest);\n    }\n  }\n}\n\n/**\n * Returns the definition of a target production in a top level level rule.\n */\nclass InsideDefinitionFinderVisitor extends GAstVisitor {\n  public result: IProduction[] = [];\n\n  constructor(\n    private targetOccurrence: number,\n    private targetProdType: PROD_TYPE,\n    private targetRef?: any,\n  ) {\n    super();\n  }\n\n  private checkIsTarget(\n    node: { definition: IProduction[] } & IProductionWithOccurrence,\n    expectedProdName: PROD_TYPE,\n  ): void {\n    if (\n      node.idx === this.targetOccurrence &&\n      this.targetProdType === expectedProdName &&\n      (this.targetRef === undefined || node === this.targetRef)\n    ) {\n      this.result = node.definition;\n    }\n  }\n\n  public visitOption(node: Option): void {\n    this.checkIsTarget(node, PROD_TYPE.OPTION);\n  }\n\n  public visitRepetition(node: Repetition): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION);\n  }\n\n  public visitRepetitionMandatory(node: RepetitionMandatory): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    node: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR);\n  }\n\n  public visitRepetitionWithSeparator(node: RepetitionWithSeparator): void {\n    this.checkIsTarget(node, PROD_TYPE.REPETITION_WITH_SEPARATOR);\n  }\n\n  public visitAlternation(node: Alternation): void {\n    this.checkIsTarget(node, PROD_TYPE.ALTERNATION);\n  }\n}\n\nfunction initializeArrayOfArrays(size: number): any[][] {\n  const result = new Array(size);\n  for (let i = 0; i < size; i++) {\n    result[i] = [];\n  }\n  return result;\n}\n\n/**\n * A sort of hash function between a Path in the grammar and a string.\n * Note that this returns multiple \"hashes\" to support the scenario of token categories.\n * -  A single path with categories may match multiple **actual** paths.\n */\nfunction pathToHashKeys(path: TokenType[]): string[] {\n  let keys = [\"\"];\n  for (let i = 0; i < path.length; i++) {\n    const tokType = path[i];\n    const longerKeys = [];\n    for (let j = 0; j < keys.length; j++) {\n      const currShorterKey = keys[j];\n      longerKeys.push(currShorterKey + \"_\" + tokType.tokenTypeIdx);\n      for (let t = 0; t < tokType.categoryMatches!.length; t++) {\n        const categoriesKeySuffix = \"_\" + tokType.categoryMatches![t];\n        longerKeys.push(currShorterKey + categoriesKeySuffix);\n      }\n    }\n    keys = longerKeys;\n  }\n  return keys;\n}\n\n/**\n * Imperative style due to being called from a hot spot\n */\nfunction isUniquePrefixHash(\n  altKnownPathsKeys: Record[],\n  searchPathKeys: string[],\n  idx: number,\n): boolean {\n  for (\n    let currAltIdx = 0;\n    currAltIdx < altKnownPathsKeys.length;\n    currAltIdx++\n  ) {\n    // We only want to test vs the other alternatives\n    if (currAltIdx === idx) {\n      continue;\n    }\n    const otherAltKnownPathsKeys = altKnownPathsKeys[currAltIdx];\n    for (let searchIdx = 0; searchIdx < searchPathKeys.length; searchIdx++) {\n      const searchKey = searchPathKeys[searchIdx];\n      if (otherAltKnownPathsKeys[searchKey] === true) {\n        return false;\n      }\n    }\n  }\n  // None of the SearchPathKeys were found in any of the other alternatives\n  return true;\n}\n\nexport function lookAheadSequenceFromAlternatives(\n  altsDefs: IProduction[],\n  k: number,\n): LookaheadSequence[] {\n  const partialAlts = map(altsDefs, (currAlt) =>\n    possiblePathsFrom([currAlt], 1),\n  );\n  const finalResult = initializeArrayOfArrays(partialAlts.length);\n  const altsHashes = map(partialAlts, (currAltPaths) => {\n    const dict: { [key: string]: boolean } = {};\n    forEach(currAltPaths, (item) => {\n      const keys = pathToHashKeys(item.partialPath);\n      forEach(keys, (currKey) => {\n        dict[currKey] = true;\n      });\n    });\n    return dict;\n  });\n  let newData = partialAlts;\n\n  // maxLookahead loop\n  for (let pathLength = 1; pathLength <= k; pathLength++) {\n    const currDataset = newData;\n    newData = initializeArrayOfArrays(currDataset.length);\n\n    // alternatives loop\n    for (let altIdx = 0; altIdx < currDataset.length; altIdx++) {\n      const currAltPathsAndSuffixes = currDataset[altIdx];\n      // paths in current alternative loop\n      for (\n        let currPathIdx = 0;\n        currPathIdx < currAltPathsAndSuffixes.length;\n        currPathIdx++\n      ) {\n        const currPathPrefix = currAltPathsAndSuffixes[currPathIdx].partialPath;\n        const suffixDef = currAltPathsAndSuffixes[currPathIdx].suffixDef;\n        const prefixKeys = pathToHashKeys(currPathPrefix);\n        const isUnique = isUniquePrefixHash(altsHashes, prefixKeys, altIdx);\n        // End of the line for this path.\n        if (isUnique || isEmpty(suffixDef) || currPathPrefix.length === k) {\n          const currAltResult = finalResult[altIdx];\n          // TODO: Can we implement a containsPath using Maps/Dictionaries?\n          if (containsPath(currAltResult, currPathPrefix) === false) {\n            currAltResult.push(currPathPrefix);\n            // Update all new  keys for the current path.\n            for (let j = 0; j < prefixKeys.length; j++) {\n              const currKey = prefixKeys[j];\n              altsHashes[altIdx][currKey] = true;\n            }\n          }\n        }\n        // Expand longer paths\n        else {\n          const newPartialPathsAndSuffixes = possiblePathsFrom(\n            suffixDef,\n            pathLength + 1,\n            currPathPrefix,\n          );\n          newData[altIdx] = newData[altIdx].concat(newPartialPathsAndSuffixes);\n\n          // Update keys for new known paths\n          forEach(newPartialPathsAndSuffixes, (item) => {\n            const prefixKeys = pathToHashKeys(item.partialPath);\n            forEach(prefixKeys, (key) => {\n              altsHashes[altIdx][key] = true;\n            });\n          });\n        }\n      }\n    }\n  }\n\n  return finalResult;\n}\n\nexport function getLookaheadPathsForOr(\n  occurrence: number,\n  ruleGrammar: Rule,\n  k: number,\n  orProd?: Alternation,\n): LookaheadSequence[] {\n  const visitor = new InsideDefinitionFinderVisitor(\n    occurrence,\n    PROD_TYPE.ALTERNATION,\n    orProd,\n  );\n  ruleGrammar.accept(visitor);\n  return lookAheadSequenceFromAlternatives(visitor.result, k);\n}\n\nexport function getLookaheadPathsForOptionalProd(\n  occurrence: number,\n  ruleGrammar: Rule,\n  prodType: PROD_TYPE,\n  k: number,\n): LookaheadSequence[] {\n  const insideDefVisitor = new InsideDefinitionFinderVisitor(\n    occurrence,\n    prodType,\n  );\n  ruleGrammar.accept(insideDefVisitor);\n  const insideDef = insideDefVisitor.result;\n\n  const afterDefWalker = new RestDefinitionFinderWalker(\n    ruleGrammar,\n    occurrence,\n    prodType,\n  );\n  const afterDef = afterDefWalker.startWalking();\n\n  const insideFlat = new AlternativeGAST({ definition: insideDef });\n  const afterFlat = new AlternativeGAST({ definition: afterDef });\n\n  return lookAheadSequenceFromAlternatives([insideFlat, afterFlat], k);\n}\n\nexport function containsPath(\n  alternative: Alternative,\n  searchPath: TokenType[],\n): boolean {\n  compareOtherPath: for (let i = 0; i < alternative.length; i++) {\n    const otherPath = alternative[i];\n    if (otherPath.length !== searchPath.length) {\n      continue;\n    }\n    for (let j = 0; j < otherPath.length; j++) {\n      const searchTok = searchPath[j];\n      const otherTok = otherPath[j];\n\n      const matchingTokens =\n        searchTok === otherTok ||\n        otherTok.categoryMatchesMap![searchTok.tokenTypeIdx!] !== undefined;\n      if (matchingTokens === false) {\n        continue compareOtherPath;\n      }\n    }\n    return true;\n  }\n\n  return false;\n}\n\nexport function isStrictPrefixOfPath(\n  prefix: TokenType[],\n  other: TokenType[],\n): boolean {\n  return (\n    prefix.length < other.length &&\n    every(prefix, (tokType, idx) => {\n      const otherTokType = other[idx];\n      return (\n        tokType === otherTokType ||\n        otherTokType.categoryMatchesMap![tokType.tokenTypeIdx!]\n      );\n    })\n  );\n}\n\nexport function areTokenCategoriesNotUsed(\n  lookAheadPaths: LookaheadSequence[],\n): boolean {\n  return every(lookAheadPaths, (singleAltPaths) =>\n    every(singleAltPaths, (singlePath) =>\n      every(singlePath, (token) => isEmpty(token.categoryMatches!)),\n    ),\n  );\n}\n", "import {\n  clone,\n  compact,\n  difference,\n  drop,\n  dropRight,\n  filter,\n  first,\n  flatMap,\n  flatten,\n  forEach,\n  groupBy,\n  includes,\n  isEmpty,\n  map,\n  pickBy,\n  reduce,\n  reject,\n  values,\n} from \"lodash-es\";\nimport {\n  IParserAmbiguousAlternativesDefinitionError,\n  IParserDuplicatesDefinitionError,\n  IParserEmptyAlternativeDefinitionError,\n  ParserDefinitionErrorType,\n} from \"../parser/parser.js\";\nimport {\n  Alternation,\n  Alternative as AlternativeGAST,\n  GAstVisitor,\n  getProductionDslName,\n  isOptionalProd,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport {\n  Alternative,\n  containsPath,\n  getLookaheadPathsForOptionalProd,\n  getLookaheadPathsForOr,\n  getProdType,\n  isStrictPrefixOfPath,\n} from \"./lookahead.js\";\nimport { nextPossibleTokensAfter } from \"./interpreter.js\";\nimport {\n  ILookaheadStrategy,\n  IProduction,\n  IProductionWithOccurrence,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  IGrammarValidatorErrorMessageProvider,\n  IParserDefinitionError,\n} from \"./types.js\";\nimport { tokenStructuredMatcher } from \"../../scan/tokens.js\";\n\nexport function validateLookahead(options: {\n  lookaheadStrategy: ILookaheadStrategy;\n  rules: Rule[];\n  tokenTypes: TokenType[];\n  grammarName: string;\n}): IParserDefinitionError[] {\n  const lookaheadValidationErrorMessages = options.lookaheadStrategy.validate({\n    rules: options.rules,\n    tokenTypes: options.tokenTypes,\n    grammarName: options.grammarName,\n  });\n  return map(lookaheadValidationErrorMessages, (errorMessage) => ({\n    type: ParserDefinitionErrorType.CUSTOM_LOOKAHEAD_VALIDATION,\n    ...errorMessage,\n  }));\n}\n\nexport function validateGrammar(\n  topLevels: Rule[],\n  tokenTypes: TokenType[],\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n  grammarName: string,\n): IParserDefinitionError[] {\n  const duplicateErrors: IParserDefinitionError[] = flatMap(\n    topLevels,\n    (currTopLevel) =>\n      validateDuplicateProductions(currTopLevel, errMsgProvider),\n  );\n\n  const termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace(\n    topLevels,\n    tokenTypes,\n    errMsgProvider,\n  );\n\n  const tooManyAltsErrors = flatMap(topLevels, (curRule) =>\n    validateTooManyAlts(curRule, errMsgProvider),\n  );\n\n  const duplicateRulesError = flatMap(topLevels, (curRule) =>\n    validateRuleDoesNotAlreadyExist(\n      curRule,\n      topLevels,\n      grammarName,\n      errMsgProvider,\n    ),\n  );\n\n  return duplicateErrors.concat(\n    termsNamespaceConflictErrors,\n    tooManyAltsErrors,\n    duplicateRulesError,\n  );\n}\n\nfunction validateDuplicateProductions(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDuplicatesDefinitionError[] {\n  const collectorVisitor = new OccurrenceValidationCollector();\n  topLevelRule.accept(collectorVisitor);\n  const allRuleProductions = collectorVisitor.allProductions;\n\n  const productionGroups = groupBy(\n    allRuleProductions,\n    identifyProductionForDuplicates,\n  );\n\n  const duplicates: any = pickBy(productionGroups, (currGroup) => {\n    return currGroup.length > 1;\n  });\n\n  const errors = map(values(duplicates), (currDuplicates: any) => {\n    const firstProd: any = first(currDuplicates);\n    const msg = errMsgProvider.buildDuplicateFoundError(\n      topLevelRule,\n      currDuplicates,\n    );\n    const dslName = getProductionDslName(firstProd);\n    const defError: IParserDuplicatesDefinitionError = {\n      message: msg,\n      type: ParserDefinitionErrorType.DUPLICATE_PRODUCTIONS,\n      ruleName: topLevelRule.name,\n      dslName: dslName,\n      occurrence: firstProd.idx,\n    };\n\n    const param = getExtraProductionArgument(firstProd);\n    if (param) {\n      defError.parameter = param;\n    }\n\n    return defError;\n  });\n  return errors;\n}\n\nexport function identifyProductionForDuplicates(\n  prod: IProductionWithOccurrence,\n): string {\n  return `${getProductionDslName(prod)}_#_${\n    prod.idx\n  }_#_${getExtraProductionArgument(prod)}`;\n}\n\nfunction getExtraProductionArgument(prod: IProductionWithOccurrence): string {\n  if (prod instanceof Terminal) {\n    return prod.terminalType.name;\n  } else if (prod instanceof NonTerminal) {\n    return prod.nonTerminalName;\n  } else {\n    return \"\";\n  }\n}\n\nexport class OccurrenceValidationCollector extends GAstVisitor {\n  public allProductions: IProductionWithOccurrence[] = [];\n\n  public visitNonTerminal(subrule: NonTerminal): void {\n    this.allProductions.push(subrule);\n  }\n\n  public visitOption(option: Option): void {\n    this.allProductions.push(option);\n  }\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.allProductions.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.allProductions.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.allProductions.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.allProductions.push(many);\n  }\n\n  public visitAlternation(or: Alternation): void {\n    this.allProductions.push(or);\n  }\n\n  public visitTerminal(terminal: Terminal): void {\n    this.allProductions.push(terminal);\n  }\n}\n\nexport function validateRuleDoesNotAlreadyExist(\n  rule: Rule,\n  allRules: Rule[],\n  className: string,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors = [];\n  const occurrences = reduce(\n    allRules,\n    (result, curRule) => {\n      if (curRule.name === rule.name) {\n        return result + 1;\n      }\n      return result;\n    },\n    0,\n  );\n  if (occurrences > 1) {\n    const errMsg = errMsgProvider.buildDuplicateRuleNameError({\n      topLevelRule: rule,\n      grammarName: className,\n    });\n    errors.push({\n      message: errMsg,\n      type: ParserDefinitionErrorType.DUPLICATE_RULE_NAME,\n      ruleName: rule.name,\n    });\n  }\n\n  return errors;\n}\n\n// TODO: is there anyway to get only the rule names of rules inherited from the super grammars?\n// This is not part of the IGrammarErrorProvider because the validation cannot be performed on\n// The grammar structure, only at runtime.\nexport function validateRuleIsOverridden(\n  ruleName: string,\n  definedRulesNames: string[],\n  className: string,\n): IParserDefinitionError[] {\n  const errors = [];\n  let errMsg;\n\n  if (!includes(definedRulesNames, ruleName)) {\n    errMsg =\n      `Invalid rule override, rule: ->${ruleName}<- cannot be overridden in the grammar: ->${className}<-` +\n      `as it is not defined in any of the super grammars `;\n    errors.push({\n      message: errMsg,\n      type: ParserDefinitionErrorType.INVALID_RULE_OVERRIDE,\n      ruleName: ruleName,\n    });\n  }\n\n  return errors;\n}\n\nexport function validateNoLeftRecursion(\n  topRule: Rule,\n  currRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n  path: Rule[] = [],\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n  const nextNonTerminals = getFirstNoneTerminal(currRule.definition);\n  if (isEmpty(nextNonTerminals)) {\n    return [];\n  } else {\n    const ruleName = topRule.name;\n    const foundLeftRecursion = includes(nextNonTerminals, topRule);\n    if (foundLeftRecursion) {\n      errors.push({\n        message: errMsgProvider.buildLeftRecursionError({\n          topLevelRule: topRule,\n          leftRecursionPath: path,\n        }),\n        type: ParserDefinitionErrorType.LEFT_RECURSION,\n        ruleName: ruleName,\n      });\n    }\n\n    // we are only looking for cyclic paths leading back to the specific topRule\n    // other cyclic paths are ignored, we still need this difference to avoid infinite loops...\n    const validNextSteps = difference(nextNonTerminals, path.concat([topRule]));\n    const errorsFromNextSteps = flatMap(validNextSteps, (currRefRule) => {\n      const newPath = clone(path);\n      newPath.push(currRefRule);\n      return validateNoLeftRecursion(\n        topRule,\n        currRefRule,\n        errMsgProvider,\n        newPath,\n      );\n    });\n\n    return errors.concat(errorsFromNextSteps);\n  }\n}\n\nexport function getFirstNoneTerminal(definition: IProduction[]): Rule[] {\n  let result: Rule[] = [];\n  if (isEmpty(definition)) {\n    return result;\n  }\n  const firstProd = first(definition);\n\n  /* istanbul ignore else */\n  if (firstProd instanceof NonTerminal) {\n    result.push(firstProd.referencedRule);\n  } else if (\n    firstProd instanceof AlternativeGAST ||\n    firstProd instanceof Option ||\n    firstProd instanceof RepetitionMandatory ||\n    firstProd instanceof RepetitionMandatoryWithSeparator ||\n    firstProd instanceof RepetitionWithSeparator ||\n    firstProd instanceof Repetition\n  ) {\n    result = result.concat(\n      getFirstNoneTerminal(firstProd.definition),\n    );\n  } else if (firstProd instanceof Alternation) {\n    // each sub definition in alternation is a FLAT\n    result = flatten(\n      map(firstProd.definition, (currSubDef) =>\n        getFirstNoneTerminal((currSubDef).definition),\n      ),\n    );\n  } else if (firstProd instanceof Terminal) {\n    // nothing to see, move along\n  } else {\n    throw Error(\"non exhaustive match\");\n  }\n\n  const isFirstOptional = isOptionalProd(firstProd);\n  const hasMore = definition.length > 1;\n  if (isFirstOptional && hasMore) {\n    const rest = drop(definition);\n    return result.concat(getFirstNoneTerminal(rest));\n  } else {\n    return result;\n  }\n}\n\nclass OrCollector extends GAstVisitor {\n  public alternations: Alternation[] = [];\n\n  public visitAlternation(node: Alternation): void {\n    this.alternations.push(node);\n  }\n}\n\nexport function validateEmptyOrAlternative(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserEmptyAlternativeDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  const ors = orCollector.alternations;\n\n  const errors = flatMap(\n    ors,\n    (currOr) => {\n      const exceptLast = dropRight(currOr.definition);\n      return flatMap(exceptLast, (currAlternative, currAltIdx) => {\n        const possibleFirstInAlt = nextPossibleTokensAfter(\n          [currAlternative],\n          [],\n          tokenStructuredMatcher,\n          1,\n        );\n        if (isEmpty(possibleFirstInAlt)) {\n          return [\n            {\n              message: errMsgProvider.buildEmptyAlternationError({\n                topLevelRule: topLevelRule,\n                alternation: currOr,\n                emptyChoiceIdx: currAltIdx,\n              }),\n              type: ParserDefinitionErrorType.NONE_LAST_EMPTY_ALT,\n              ruleName: topLevelRule.name,\n              occurrence: currOr.idx,\n              alternative: currAltIdx + 1,\n            },\n          ];\n        } else {\n          return [];\n        }\n      });\n    },\n  );\n\n  return errors;\n}\n\nexport function validateAmbiguousAlternationAlternatives(\n  topLevelRule: Rule,\n  globalMaxLookahead: number,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  let ors = orCollector.alternations;\n\n  // New Handling of ignoring ambiguities\n  // - https://github.com/chevrotain/chevrotain/issues/869\n  ors = reject(ors, (currOr) => currOr.ignoreAmbiguities === true);\n\n  const errors = flatMap(ors, (currOr: Alternation) => {\n    const currOccurrence = currOr.idx;\n    const actualMaxLookahead = currOr.maxLookahead || globalMaxLookahead;\n    const alternatives = getLookaheadPathsForOr(\n      currOccurrence,\n      topLevelRule,\n      actualMaxLookahead,\n      currOr,\n    );\n    const altsAmbiguityErrors = checkAlternativesAmbiguities(\n      alternatives,\n      currOr,\n      topLevelRule,\n      errMsgProvider,\n    );\n    const altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities(\n      alternatives,\n      currOr,\n      topLevelRule,\n      errMsgProvider,\n    );\n\n    return altsAmbiguityErrors.concat(altsPrefixAmbiguityErrors);\n  });\n\n  return errors;\n}\n\nexport class RepetitionCollector extends GAstVisitor {\n  public allProductions: (IProductionWithOccurrence & {\n    maxLookahead?: number;\n  })[] = [];\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.allProductions.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.allProductions.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.allProductions.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.allProductions.push(many);\n  }\n}\n\nexport function validateTooManyAlts(\n  topLevelRule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const orCollector = new OrCollector();\n  topLevelRule.accept(orCollector);\n  const ors = orCollector.alternations;\n\n  const errors = flatMap(ors, (currOr) => {\n    if (currOr.definition.length > 255) {\n      return [\n        {\n          message: errMsgProvider.buildTooManyAlternativesError({\n            topLevelRule: topLevelRule,\n            alternation: currOr,\n          }),\n          type: ParserDefinitionErrorType.TOO_MANY_ALTS,\n          ruleName: topLevelRule.name,\n          occurrence: currOr.idx,\n        },\n      ];\n    } else {\n      return [];\n    }\n  });\n\n  return errors;\n}\n\nexport function validateSomeNonEmptyLookaheadPath(\n  topLevelRules: Rule[],\n  maxLookahead: number,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n  forEach(topLevelRules, (currTopRule) => {\n    const collectorVisitor = new RepetitionCollector();\n    currTopRule.accept(collectorVisitor);\n    const allRuleProductions = collectorVisitor.allProductions;\n    forEach(allRuleProductions, (currProd) => {\n      const prodType = getProdType(currProd);\n      const actualMaxLookahead = currProd.maxLookahead || maxLookahead;\n      const currOccurrence = currProd.idx;\n      const paths = getLookaheadPathsForOptionalProd(\n        currOccurrence,\n        currTopRule,\n        prodType,\n        actualMaxLookahead,\n      );\n      const pathsInsideProduction = paths[0];\n      if (isEmpty(flatten(pathsInsideProduction))) {\n        const errMsg = errMsgProvider.buildEmptyRepetitionError({\n          topLevelRule: currTopRule,\n          repetition: currProd,\n        });\n        errors.push({\n          message: errMsg,\n          type: ParserDefinitionErrorType.NO_NON_EMPTY_LOOKAHEAD,\n          ruleName: currTopRule.name,\n        });\n      }\n    });\n  });\n\n  return errors;\n}\n\nexport interface IAmbiguityDescriptor {\n  alts: number[];\n  path: TokenType[];\n}\n\nfunction checkAlternativesAmbiguities(\n  alternatives: Alternative[],\n  alternation: Alternation,\n  rule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  const foundAmbiguousPaths: Alternative = [];\n  const identicalAmbiguities = reduce(\n    alternatives,\n    (result, currAlt, currAltIdx) => {\n      // ignore (skip) ambiguities with this alternative\n      if (alternation.definition[currAltIdx].ignoreAmbiguities === true) {\n        return result;\n      }\n\n      forEach(currAlt, (currPath) => {\n        const altsCurrPathAppearsIn = [currAltIdx];\n        forEach(alternatives, (currOtherAlt, currOtherAltIdx) => {\n          if (\n            currAltIdx !== currOtherAltIdx &&\n            containsPath(currOtherAlt, currPath) &&\n            // ignore (skip) ambiguities with this \"other\" alternative\n            alternation.definition[currOtherAltIdx].ignoreAmbiguities !== true\n          ) {\n            altsCurrPathAppearsIn.push(currOtherAltIdx);\n          }\n        });\n\n        if (\n          altsCurrPathAppearsIn.length > 1 &&\n          !containsPath(foundAmbiguousPaths, currPath)\n        ) {\n          foundAmbiguousPaths.push(currPath);\n          result.push({\n            alts: altsCurrPathAppearsIn,\n            path: currPath,\n          });\n        }\n      });\n      return result;\n    },\n    [] as { alts: number[]; path: TokenType[] }[],\n  );\n\n  const currErrors = map(identicalAmbiguities, (currAmbDescriptor) => {\n    const ambgIndices = map(\n      currAmbDescriptor.alts,\n      (currAltIdx) => currAltIdx + 1,\n    );\n\n    const currMessage = errMsgProvider.buildAlternationAmbiguityError({\n      topLevelRule: rule,\n      alternation: alternation,\n      ambiguityIndices: ambgIndices,\n      prefixPath: currAmbDescriptor.path,\n    });\n\n    return {\n      message: currMessage,\n      type: ParserDefinitionErrorType.AMBIGUOUS_ALTS,\n      ruleName: rule.name,\n      occurrence: alternation.idx,\n      alternatives: currAmbDescriptor.alts,\n    };\n  });\n\n  return currErrors;\n}\n\nexport function checkPrefixAlternativesAmbiguities(\n  alternatives: Alternative[],\n  alternation: Alternation,\n  rule: Rule,\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserAmbiguousAlternativesDefinitionError[] {\n  // flatten\n  const pathsAndIndices = reduce(\n    alternatives,\n    (result, currAlt, idx) => {\n      const currPathsAndIdx = map(currAlt, (currPath) => {\n        return { idx: idx, path: currPath };\n      });\n      return result.concat(currPathsAndIdx);\n    },\n    [] as { idx: number; path: TokenType[] }[],\n  );\n\n  const errors = compact(\n    flatMap(pathsAndIndices, (currPathAndIdx) => {\n      const alternativeGast = alternation.definition[currPathAndIdx.idx];\n      // ignore (skip) ambiguities with this alternative\n      if (alternativeGast.ignoreAmbiguities === true) {\n        return [];\n      }\n      const targetIdx = currPathAndIdx.idx;\n      const targetPath = currPathAndIdx.path;\n\n      const prefixAmbiguitiesPathsAndIndices = filter(\n        pathsAndIndices,\n        (searchPathAndIdx) => {\n          // prefix ambiguity can only be created from lower idx (higher priority) path\n          return (\n            // ignore (skip) ambiguities with this \"other\" alternative\n            alternation.definition[searchPathAndIdx.idx].ignoreAmbiguities !==\n              true &&\n            searchPathAndIdx.idx < targetIdx &&\n            // checking for strict prefix because identical lookaheads\n            // will be be detected using a different validation.\n            isStrictPrefixOfPath(searchPathAndIdx.path, targetPath)\n          );\n        },\n      );\n\n      const currPathPrefixErrors = map(\n        prefixAmbiguitiesPathsAndIndices,\n        (currAmbPathAndIdx): IParserAmbiguousAlternativesDefinitionError => {\n          const ambgIndices = [currAmbPathAndIdx.idx + 1, targetIdx + 1];\n          const occurrence = alternation.idx === 0 ? \"\" : alternation.idx;\n\n          const message = errMsgProvider.buildAlternationPrefixAmbiguityError({\n            topLevelRule: rule,\n            alternation: alternation,\n            ambiguityIndices: ambgIndices,\n            prefixPath: currAmbPathAndIdx.path,\n          });\n          return {\n            message: message,\n            type: ParserDefinitionErrorType.AMBIGUOUS_PREFIX_ALTS,\n            ruleName: rule.name,\n            occurrence: occurrence,\n            alternatives: ambgIndices,\n          };\n        },\n      );\n\n      return currPathPrefixErrors;\n    }),\n  );\n\n  return errors;\n}\n\nfunction checkTerminalAndNoneTerminalsNameSpace(\n  topLevels: Rule[],\n  tokenTypes: TokenType[],\n  errMsgProvider: IGrammarValidatorErrorMessageProvider,\n): IParserDefinitionError[] {\n  const errors: IParserDefinitionError[] = [];\n\n  const tokenNames = map(tokenTypes, (currToken) => currToken.name);\n\n  forEach(topLevels, (currRule) => {\n    const currRuleName = currRule.name;\n    if (includes(tokenNames, currRuleName)) {\n      const errMsg = errMsgProvider.buildNamespaceConflictError(currRule);\n\n      errors.push({\n        message: errMsg,\n        type: ParserDefinitionErrorType.CONFLICT_TOKENS_RULES_NAMESPACE,\n        ruleName: currRuleName,\n      });\n    }\n  });\n\n  return errors;\n}\n", "import { Rule } from \"@chevrotain/gast\";\nimport { defaults, forEach } from \"lodash-es\";\nimport { resolveGrammar as orgResolveGrammar } from \"../resolver.js\";\nimport { validateGrammar as orgValidateGrammar } from \"../checks.js\";\nimport {\n  defaultGrammarResolverErrorProvider,\n  defaultGrammarValidatorErrorProvider,\n} from \"../../errors_public.js\";\nimport { TokenType } from \"@chevrotain/types\";\nimport {\n  IGrammarResolverErrorMessageProvider,\n  IGrammarValidatorErrorMessageProvider,\n  IParserDefinitionError,\n} from \"../types.js\";\n\ntype ResolveGrammarOpts = {\n  rules: Rule[];\n  errMsgProvider?: IGrammarResolverErrorMessageProvider;\n};\nexport function resolveGrammar(\n  options: ResolveGrammarOpts,\n): IParserDefinitionError[] {\n  const actualOptions: Required = defaults(options, {\n    errMsgProvider: defaultGrammarResolverErrorProvider,\n  });\n\n  const topRulesTable: { [ruleName: string]: Rule } = {};\n  forEach(options.rules, (rule) => {\n    topRulesTable[rule.name] = rule;\n  });\n  return orgResolveGrammar(topRulesTable, actualOptions.errMsgProvider);\n}\n\nexport function validateGrammar(options: {\n  rules: Rule[];\n  tokenTypes: TokenType[];\n  grammarName: string;\n  errMsgProvider: IGrammarValidatorErrorMessageProvider;\n}): IParserDefinitionError[] {\n  options = defaults(options, {\n    errMsgProvider: defaultGrammarValidatorErrorProvider,\n  });\n\n  return orgValidateGrammar(\n    options.rules,\n    options.tokenTypes,\n    options.errMsgProvider,\n    options.grammarName,\n  );\n}\n", "import { includes } from \"lodash-es\";\nimport {\n  IRecognitionException,\n  IRecognizerContext,\n  IToken,\n} from \"@chevrotain/types\";\n\nconst MISMATCHED_TOKEN_EXCEPTION = \"MismatchedTokenException\";\nconst NO_VIABLE_ALT_EXCEPTION = \"NoViableAltException\";\nconst EARLY_EXIT_EXCEPTION = \"EarlyExitException\";\nconst NOT_ALL_INPUT_PARSED_EXCEPTION = \"NotAllInputParsedException\";\n\nconst RECOGNITION_EXCEPTION_NAMES = [\n  MISMATCHED_TOKEN_EXCEPTION,\n  NO_VIABLE_ALT_EXCEPTION,\n  EARLY_EXIT_EXCEPTION,\n  NOT_ALL_INPUT_PARSED_EXCEPTION,\n];\n\nObject.freeze(RECOGNITION_EXCEPTION_NAMES);\n\n// hacks to bypass no support for custom Errors in javascript/typescript\nexport function isRecognitionException(error: Error) {\n  // can't do instanceof on hacked custom js exceptions\n  return includes(RECOGNITION_EXCEPTION_NAMES, error.name);\n}\n\nabstract class RecognitionException\n  extends Error\n  implements IRecognitionException\n{\n  context: IRecognizerContext;\n  resyncedTokens: IToken[] = [];\n\n  protected constructor(\n    message: string,\n    public token: IToken,\n  ) {\n    super(message);\n\n    // fix prototype chain when typescript target is ES5\n    Object.setPrototypeOf(this, new.target.prototype);\n\n    /* istanbul ignore next - V8 workaround to remove constructor from stacktrace when typescript target is ES5 */\n    if (Error.captureStackTrace) {\n      Error.captureStackTrace(this, this.constructor);\n    }\n  }\n}\n\nexport class MismatchedTokenException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = MISMATCHED_TOKEN_EXCEPTION;\n  }\n}\n\nexport class NoViableAltException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = NO_VIABLE_ALT_EXCEPTION;\n  }\n}\n\nexport class NotAllInputParsedException extends RecognitionException {\n  constructor(message: string, token: IToken) {\n    super(message, token);\n    this.name = NOT_ALL_INPUT_PARSED_EXCEPTION;\n  }\n}\n\nexport class EarlyExitException extends RecognitionException {\n  constructor(\n    message: string,\n    token: IToken,\n    public previousToken: IToken,\n  ) {\n    super(message, token);\n    this.name = EARLY_EXIT_EXCEPTION;\n  }\n}\n", "import {\n  createTokenInstance,\n  EOF,\n  tokenMatcher,\n} from \"../../../scan/tokens_public.js\";\nimport {\n  AbstractNextTerminalAfterProductionWalker,\n  IFirstAfterRepetition,\n} from \"../../grammar/interpreter.js\";\nimport {\n  clone,\n  dropRight,\n  find,\n  flatten,\n  has,\n  includes,\n  isEmpty,\n  map,\n} from \"lodash-es\";\nimport {\n  IParserConfig,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { MismatchedTokenException } from \"../../exceptions_public.js\";\nimport { IN } from \"../../constants.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\nexport const EOF_FOLLOW_KEY: any = {};\n\nexport interface IFollowKey {\n  ruleName: string;\n  idxInCallingRule: number;\n  inRule: string;\n}\n\nexport const IN_RULE_RECOVERY_EXCEPTION = \"InRuleRecoveryException\";\n\nexport class InRuleRecoveryException extends Error {\n  constructor(message: string) {\n    super(message);\n    this.name = IN_RULE_RECOVERY_EXCEPTION;\n  }\n}\n\n/**\n * This trait is responsible for the error recovery and fault tolerant logic\n */\nexport class Recoverable {\n  recoveryEnabled: boolean;\n  firstAfterRepMap: Record;\n  resyncFollows: Record;\n\n  initRecoverable(config: IParserConfig) {\n    this.firstAfterRepMap = {};\n    this.resyncFollows = {};\n\n    this.recoveryEnabled = has(config, \"recoveryEnabled\")\n      ? (config.recoveryEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.recoveryEnabled;\n\n    // performance optimization, NOOP will be inlined which\n    // effectively means that this optional feature does not exist\n    // when not used.\n    if (this.recoveryEnabled) {\n      this.attemptInRepetitionRecovery = attemptInRepetitionRecovery;\n    }\n  }\n\n  public getTokenToInsert(tokType: TokenType): IToken {\n    const tokToInsert = createTokenInstance(\n      tokType,\n      \"\",\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n      NaN,\n    );\n    tokToInsert.isInsertedInRecovery = true;\n    return tokToInsert;\n  }\n\n  public canTokenTypeBeInsertedInRecovery(tokType: TokenType): boolean {\n    return true;\n  }\n\n  public canTokenTypeBeDeletedInRecovery(tokType: TokenType): boolean {\n    return true;\n  }\n\n  tryInRepetitionRecovery(\n    this: MixedInParser,\n    grammarRule: Function,\n    grammarRuleArgs: any[],\n    lookAheadFunc: () => boolean,\n    expectedTokType: TokenType,\n  ): void {\n    // TODO: can the resyncTokenType be cached?\n    const reSyncTokType = this.findReSyncTokenType();\n    const savedLexerState = this.exportLexerState();\n    const resyncedTokens: IToken[] = [];\n    let passedResyncPoint = false;\n\n    const nextTokenWithoutResync = this.LA(1);\n    let currToken = this.LA(1);\n\n    const generateErrorMessage = () => {\n      const previousToken = this.LA(0);\n      // we are preemptively re-syncing before an error has been detected, therefor we must reproduce\n      // the error that would have been thrown\n      const msg = this.errorMessageProvider.buildMismatchTokenMessage({\n        expected: expectedTokType,\n        actual: nextTokenWithoutResync,\n        previous: previousToken,\n        ruleName: this.getCurrRuleFullName(),\n      });\n      const error = new MismatchedTokenException(\n        msg,\n        nextTokenWithoutResync,\n        this.LA(0),\n      );\n      // the first token here will be the original cause of the error, this is not part of the resyncedTokens property.\n      error.resyncedTokens = dropRight(resyncedTokens);\n      this.SAVE_ERROR(error);\n    };\n\n    while (!passedResyncPoint) {\n      // re-synced to a point where we can safely exit the repetition/\n      if (this.tokenMatcher(currToken, expectedTokType)) {\n        generateErrorMessage();\n        return; // must return here to avoid reverting the inputIdx\n      } else if (lookAheadFunc.call(this)) {\n        // we skipped enough tokens so we can resync right back into another iteration of the repetition grammar rule\n        generateErrorMessage();\n        // recursive invocation in other to support multiple re-syncs in the same top level repetition grammar rule\n        grammarRule.apply(this, grammarRuleArgs);\n        return; // must return here to avoid reverting the inputIdx\n      } else if (this.tokenMatcher(currToken, reSyncTokType)) {\n        passedResyncPoint = true;\n      } else {\n        currToken = this.SKIP_TOKEN();\n        this.addToResyncTokens(currToken, resyncedTokens);\n      }\n    }\n\n    // we were unable to find a CLOSER point to resync inside the Repetition, reset the state.\n    // The parsing exception we were trying to prevent will happen in the NEXT parsing step. it may be handled by\n    // \"between rules\" resync recovery later in the flow.\n    this.importLexerState(savedLexerState);\n  }\n\n  shouldInRepetitionRecoveryBeTried(\n    this: MixedInParser,\n    expectTokAfterLastMatch: TokenType,\n    nextTokIdx: number,\n    notStuck: boolean | undefined,\n  ): boolean {\n    // Edge case of arriving from a MANY repetition which is stuck\n    // Attempting recovery in this case could cause an infinite loop\n    if (notStuck === false) {\n      return false;\n    }\n\n    // no need to recover, next token is what we expect...\n    if (this.tokenMatcher(this.LA(1), expectTokAfterLastMatch)) {\n      return false;\n    }\n\n    // error recovery is disabled during backtracking as it can make the parser ignore a valid grammar path\n    // and prefer some backtracking path that includes recovered errors.\n    if (this.isBackTracking()) {\n      return false;\n    }\n\n    // if we can perform inRule recovery (single token insertion or deletion) we always prefer that recovery algorithm\n    // because if it works, it makes the least amount of changes to the input stream (greedy algorithm)\n    //noinspection RedundantIfStatementJS\n    if (\n      this.canPerformInRuleRecovery(\n        expectTokAfterLastMatch,\n        this.getFollowsForInRuleRecovery(expectTokAfterLastMatch, nextTokIdx),\n      )\n    ) {\n      return false;\n    }\n\n    return true;\n  }\n\n  // Error Recovery functionality\n  getFollowsForInRuleRecovery(\n    this: MixedInParser,\n    tokType: TokenType,\n    tokIdxInRule: number,\n  ): TokenType[] {\n    const grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule);\n    const follows = this.getNextPossibleTokenTypes(grammarPath);\n    return follows;\n  }\n\n  tryInRuleRecovery(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n    follows: TokenType[],\n  ): IToken {\n    if (this.canRecoverWithSingleTokenInsertion(expectedTokType, follows)) {\n      const tokToInsert = this.getTokenToInsert(expectedTokType);\n      return tokToInsert;\n    }\n\n    if (this.canRecoverWithSingleTokenDeletion(expectedTokType)) {\n      const nextTok = this.SKIP_TOKEN();\n      this.consumeToken();\n      return nextTok;\n    }\n\n    throw new InRuleRecoveryException(\"sad sad panda\");\n  }\n\n  canPerformInRuleRecovery(\n    this: MixedInParser,\n    expectedToken: TokenType,\n    follows: TokenType[],\n  ): boolean {\n    return (\n      this.canRecoverWithSingleTokenInsertion(expectedToken, follows) ||\n      this.canRecoverWithSingleTokenDeletion(expectedToken)\n    );\n  }\n\n  canRecoverWithSingleTokenInsertion(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n    follows: TokenType[],\n  ): boolean {\n    if (!this.canTokenTypeBeInsertedInRecovery(expectedTokType)) {\n      return false;\n    }\n\n    // must know the possible following tokens to perform single token insertion\n    if (isEmpty(follows)) {\n      return false;\n    }\n\n    const mismatchedTok = this.LA(1);\n    const isMisMatchedTokInFollows =\n      find(follows, (possibleFollowsTokType: TokenType) => {\n        return this.tokenMatcher(mismatchedTok, possibleFollowsTokType);\n      }) !== undefined;\n\n    return isMisMatchedTokInFollows;\n  }\n\n  canRecoverWithSingleTokenDeletion(\n    this: MixedInParser,\n    expectedTokType: TokenType,\n  ): boolean {\n    if (!this.canTokenTypeBeDeletedInRecovery(expectedTokType)) {\n      return false;\n    }\n\n    const isNextTokenWhatIsExpected = this.tokenMatcher(\n      this.LA(2),\n      expectedTokType,\n    );\n    return isNextTokenWhatIsExpected;\n  }\n\n  isInCurrentRuleReSyncSet(\n    this: MixedInParser,\n    tokenTypeIdx: TokenType,\n  ): boolean {\n    const followKey = this.getCurrFollowKey();\n    const currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey);\n    return includes(currentRuleReSyncSet, tokenTypeIdx);\n  }\n\n  findReSyncTokenType(this: MixedInParser): TokenType {\n    const allPossibleReSyncTokTypes = this.flattenFollowSet();\n    // this loop will always terminate as EOF is always in the follow stack and also always (virtually) in the input\n    let nextToken = this.LA(1);\n    let k = 2;\n    while (true) {\n      const foundMatch = find(allPossibleReSyncTokTypes, (resyncTokType) => {\n        const canMatch = tokenMatcher(nextToken, resyncTokType);\n        return canMatch;\n      });\n      if (foundMatch !== undefined) {\n        return foundMatch;\n      }\n      nextToken = this.LA(k);\n      k++;\n    }\n  }\n\n  getCurrFollowKey(this: MixedInParser): IFollowKey {\n    // the length is at least one as we always add the ruleName to the stack before invoking the rule.\n    if (this.RULE_STACK.length === 1) {\n      return EOF_FOLLOW_KEY;\n    }\n    const currRuleShortName = this.getLastExplicitRuleShortName();\n    const currRuleIdx = this.getLastExplicitRuleOccurrenceIndex();\n    const prevRuleShortName = this.getPreviousExplicitRuleShortName();\n\n    return {\n      ruleName: this.shortRuleNameToFullName(currRuleShortName),\n      idxInCallingRule: currRuleIdx,\n      inRule: this.shortRuleNameToFullName(prevRuleShortName),\n    };\n  }\n\n  buildFullFollowKeyStack(this: MixedInParser): IFollowKey[] {\n    const explicitRuleStack = this.RULE_STACK;\n    const explicitOccurrenceStack = this.RULE_OCCURRENCE_STACK;\n\n    return map(explicitRuleStack, (ruleName, idx) => {\n      if (idx === 0) {\n        return EOF_FOLLOW_KEY;\n      }\n      return {\n        ruleName: this.shortRuleNameToFullName(ruleName),\n        idxInCallingRule: explicitOccurrenceStack[idx],\n        inRule: this.shortRuleNameToFullName(explicitRuleStack[idx - 1]),\n      };\n    });\n  }\n\n  flattenFollowSet(this: MixedInParser): TokenType[] {\n    const followStack = map(this.buildFullFollowKeyStack(), (currKey) => {\n      return this.getFollowSetFromFollowKey(currKey);\n    });\n    return flatten(followStack);\n  }\n\n  getFollowSetFromFollowKey(\n    this: MixedInParser,\n    followKey: IFollowKey,\n  ): TokenType[] {\n    if (followKey === EOF_FOLLOW_KEY) {\n      return [EOF];\n    }\n\n    const followName =\n      followKey.ruleName + followKey.idxInCallingRule + IN + followKey.inRule;\n\n    return this.resyncFollows[followName];\n  }\n\n  // It does not make any sense to include a virtual EOF token in the list of resynced tokens\n  // as EOF does not really exist and thus does not contain any useful information (line/column numbers)\n  addToResyncTokens(\n    this: MixedInParser,\n    token: IToken,\n    resyncTokens: IToken[],\n  ): IToken[] {\n    if (!this.tokenMatcher(token, EOF)) {\n      resyncTokens.push(token);\n    }\n    return resyncTokens;\n  }\n\n  reSyncTo(this: MixedInParser, tokType: TokenType): IToken[] {\n    const resyncedTokens: IToken[] = [];\n    let nextTok = this.LA(1);\n    while (this.tokenMatcher(nextTok, tokType) === false) {\n      nextTok = this.SKIP_TOKEN();\n      this.addToResyncTokens(nextTok, resyncedTokens);\n    }\n    // the last token is not part of the error.\n    return dropRight(resyncedTokens);\n  }\n\n  attemptInRepetitionRecovery(\n    this: MixedInParser,\n    prodFunc: Function,\n    args: any[],\n    lookaheadFunc: () => boolean,\n    dslMethodIdx: number,\n    prodOccurrence: number,\n    nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker,\n    notStuck?: boolean,\n  ): void {\n    // by default this is a NO-OP\n    // The actual implementation is with the function(not method) below\n  }\n\n  getCurrentGrammarPath(\n    this: MixedInParser,\n    tokType: TokenType,\n    tokIdxInRule: number,\n  ): ITokenGrammarPath {\n    const pathRuleStack: string[] = this.getHumanReadableRuleStack();\n    const pathOccurrenceStack: number[] = clone(this.RULE_OCCURRENCE_STACK);\n    const grammarPath: any = {\n      ruleStack: pathRuleStack,\n      occurrenceStack: pathOccurrenceStack,\n      lastTok: tokType,\n      lastTokOccurrence: tokIdxInRule,\n    };\n\n    return grammarPath;\n  }\n  getHumanReadableRuleStack(this: MixedInParser): string[] {\n    return map(this.RULE_STACK, (currShortName) =>\n      this.shortRuleNameToFullName(currShortName),\n    );\n  }\n}\n\nexport function attemptInRepetitionRecovery(\n  this: MixedInParser,\n  prodFunc: Function,\n  args: any[],\n  lookaheadFunc: () => boolean,\n  dslMethodIdx: number,\n  prodOccurrence: number,\n  nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker,\n  notStuck?: boolean,\n): void {\n  const key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence);\n  let firstAfterRepInfo = this.firstAfterRepMap[key];\n  if (firstAfterRepInfo === undefined) {\n    const currRuleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[currRuleName];\n    const walker: AbstractNextTerminalAfterProductionWalker =\n      new nextToksWalker(ruleGrammar, prodOccurrence);\n    firstAfterRepInfo = walker.startWalking();\n    this.firstAfterRepMap[key] = firstAfterRepInfo;\n  }\n\n  let expectTokAfterLastMatch = firstAfterRepInfo.token;\n  let nextTokIdx = firstAfterRepInfo.occurrence;\n  const isEndOfRule = firstAfterRepInfo.isEndOfRule;\n\n  // special edge case of a TOP most repetition after which the input should END.\n  // this will force an attempt for inRule recovery in that scenario.\n  if (\n    this.RULE_STACK.length === 1 &&\n    isEndOfRule &&\n    expectTokAfterLastMatch === undefined\n  ) {\n    expectTokAfterLastMatch = EOF;\n    nextTokIdx = 1;\n  }\n\n  // We don't have anything to re-sync to...\n  // this condition was extracted from `shouldInRepetitionRecoveryBeTried` to act as a type-guard\n  if (expectTokAfterLastMatch === undefined || nextTokIdx === undefined) {\n    return;\n  }\n\n  if (\n    this.shouldInRepetitionRecoveryBeTried(\n      expectTokAfterLastMatch,\n      nextTokIdx,\n      notStuck,\n    )\n  ) {\n    // TODO: performance optimization: instead of passing the original args here, we modify\n    // the args param (or create a new one) and make sure the lookahead func is explicitly provided\n    // to avoid searching the cache for it once more.\n    this.tryInRepetitionRecovery(\n      prodFunc,\n      args,\n      lookaheadFunc,\n      expectTokAfterLastMatch,\n    );\n  }\n}\n", "// Lookahead keys are 32Bit integers in the form\n// TTTTTTTT-ZZZZZZZZZZZZ-YYYY-XXXXXXXX\n// XXXX -> Occurrence Index bitmap.\n// YYYY -> DSL Method Type bitmap.\n// ZZZZZZZZZZZZZZZ -> Rule short Index bitmap.\n// TTTTTTTTT -> alternation alternative index bitmap\n\nexport const BITS_FOR_METHOD_TYPE = 4;\nexport const BITS_FOR_OCCURRENCE_IDX = 8;\nexport const BITS_FOR_RULE_IDX = 12;\n// TODO: validation, this means that there may at most 2^8 --> 256 alternatives for an alternation.\nexport const BITS_FOR_ALT_IDX = 8;\n\n// short string used as part of mapping keys.\n// being short improves the performance when composing KEYS for maps out of these\n// The 5 - 8 bits (16 possible values, are reserved for the DSL method indices)\nexport const OR_IDX = 1 << BITS_FOR_OCCURRENCE_IDX;\nexport const OPTION_IDX = 2 << BITS_FOR_OCCURRENCE_IDX;\nexport const MANY_IDX = 3 << BITS_FOR_OCCURRENCE_IDX;\nexport const AT_LEAST_ONE_IDX = 4 << BITS_FOR_OCCURRENCE_IDX;\nexport const MANY_SEP_IDX = 5 << BITS_FOR_OCCURRENCE_IDX;\nexport const AT_LEAST_ONE_SEP_IDX = 6 << BITS_FOR_OCCURRENCE_IDX;\n\n// this actually returns a number, but it is always used as a string (object prop key)\nexport function getKeyForAutomaticLookahead(\n  ruleIdx: number,\n  dslMethodIdx: number,\n  occurrence: number,\n): number {\n  return occurrence | dslMethodIdx | ruleIdx;\n}\n\nconst BITS_START_FOR_ALT_IDX = 32 - BITS_FOR_ALT_IDX;\n", "import {\n  ILookaheadStrategy,\n  ILookaheadValidationError,\n  IOrAlt,\n  OptionalProductionType,\n  Rule,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { flatMap, isEmpty } from \"lodash-es\";\nimport { defaultGrammarValidatorErrorProvider } from \"../errors_public.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser/parser.js\";\nimport {\n  validateAmbiguousAlternationAlternatives,\n  validateEmptyOrAlternative,\n  validateNoLeftRecursion,\n  validateSomeNonEmptyLookaheadPath,\n} from \"./checks.js\";\nimport {\n  buildAlternativesLookAheadFunc,\n  buildLookaheadFuncForOptionalProd,\n  buildLookaheadFuncForOr,\n  buildSingleAlternativeLookaheadFunction,\n  getProdType,\n} from \"./lookahead.js\";\nimport { IParserDefinitionError } from \"./types.js\";\n\nexport class LLkLookaheadStrategy implements ILookaheadStrategy {\n  readonly maxLookahead: number;\n\n  constructor(options?: { maxLookahead?: number }) {\n    this.maxLookahead =\n      options?.maxLookahead ?? DEFAULT_PARSER_CONFIG.maxLookahead;\n  }\n\n  validate(options: {\n    rules: Rule[];\n    tokenTypes: TokenType[];\n    grammarName: string;\n  }): ILookaheadValidationError[] {\n    const leftRecursionErrors = this.validateNoLeftRecursion(options.rules);\n\n    if (isEmpty(leftRecursionErrors)) {\n      const emptyAltErrors = this.validateEmptyOrAlternatives(options.rules);\n      const ambiguousAltsErrors = this.validateAmbiguousAlternationAlternatives(\n        options.rules,\n        this.maxLookahead,\n      );\n      const emptyRepetitionErrors = this.validateSomeNonEmptyLookaheadPath(\n        options.rules,\n        this.maxLookahead,\n      );\n      const allErrors = [\n        ...leftRecursionErrors,\n        ...emptyAltErrors,\n        ...ambiguousAltsErrors,\n        ...emptyRepetitionErrors,\n      ];\n      return allErrors;\n    }\n    return leftRecursionErrors;\n  }\n\n  validateNoLeftRecursion(rules: Rule[]): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateNoLeftRecursion(\n        currTopRule,\n        currTopRule,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateEmptyOrAlternatives(rules: Rule[]): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateEmptyOrAlternative(\n        currTopRule,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateAmbiguousAlternationAlternatives(\n    rules: Rule[],\n    maxLookahead: number,\n  ): IParserDefinitionError[] {\n    return flatMap(rules, (currTopRule) =>\n      validateAmbiguousAlternationAlternatives(\n        currTopRule,\n        maxLookahead,\n        defaultGrammarValidatorErrorProvider,\n      ),\n    );\n  }\n\n  validateSomeNonEmptyLookaheadPath(\n    rules: Rule[],\n    maxLookahead: number,\n  ): IParserDefinitionError[] {\n    return validateSomeNonEmptyLookaheadPath(\n      rules,\n      maxLookahead,\n      defaultGrammarValidatorErrorProvider,\n    );\n  }\n\n  buildLookaheadForAlternation(options: {\n    prodOccurrence: number;\n    rule: Rule;\n    maxLookahead: number;\n    hasPredicates: boolean;\n    dynamicTokensEnabled: boolean;\n  }): (orAlts?: IOrAlt[] | undefined) => number | undefined {\n    return buildLookaheadFuncForOr(\n      options.prodOccurrence,\n      options.rule,\n      options.maxLookahead,\n      options.hasPredicates,\n      options.dynamicTokensEnabled,\n      buildAlternativesLookAheadFunc,\n    );\n  }\n\n  buildLookaheadForOptional(options: {\n    prodOccurrence: number;\n    prodType: OptionalProductionType;\n    rule: Rule;\n    maxLookahead: number;\n    dynamicTokensEnabled: boolean;\n  }): () => boolean {\n    return buildLookaheadFuncForOptionalProd(\n      options.prodOccurrence,\n      options.rule,\n      options.maxLookahead,\n      options.dynamicTokensEnabled,\n      getProdType(options.prodType),\n      buildSingleAlternativeLookaheadFunction,\n    );\n  }\n}\n", "import { forEach, has } from \"lodash-es\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\nimport {\n  ILookaheadStrategy,\n  IParserConfig,\n  OptionalProductionType,\n} from \"@chevrotain/types\";\nimport {\n  AT_LEAST_ONE_IDX,\n  AT_LEAST_ONE_SEP_IDX,\n  getKeyForAutomaticLookahead,\n  MANY_IDX,\n  MANY_SEP_IDX,\n  OPTION_IDX,\n  OR_IDX,\n} from \"../../grammar/keys.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  Alternation,\n  GAstVisitor,\n  getProductionDslName,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n} from \"@chevrotain/gast\";\nimport { LLkLookaheadStrategy } from \"../../grammar/llk_lookahead.js\";\n\n/**\n * Trait responsible for the lookahead related utilities and optimizations.\n */\nexport class LooksAhead {\n  maxLookahead: number;\n  lookAheadFuncsCache: any;\n  dynamicTokensEnabled: boolean;\n  lookaheadStrategy: ILookaheadStrategy;\n\n  initLooksAhead(config: IParserConfig) {\n    this.dynamicTokensEnabled = has(config, \"dynamicTokensEnabled\")\n      ? (config.dynamicTokensEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.dynamicTokensEnabled;\n\n    this.maxLookahead = has(config, \"maxLookahead\")\n      ? (config.maxLookahead as number) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.maxLookahead;\n\n    this.lookaheadStrategy = has(config, \"lookaheadStrategy\")\n      ? (config.lookaheadStrategy as ILookaheadStrategy) // assumes end user provides the correct config value/type\n      : new LLkLookaheadStrategy({ maxLookahead: this.maxLookahead });\n\n    this.lookAheadFuncsCache = new Map();\n  }\n\n  preComputeLookaheadFunctions(this: MixedInParser, rules: Rule[]): void {\n    forEach(rules, (currRule) => {\n      this.TRACE_INIT(`${currRule.name} Rule Lookahead`, () => {\n        const {\n          alternation,\n          repetition,\n          option,\n          repetitionMandatory,\n          repetitionMandatoryWithSeparator,\n          repetitionWithSeparator,\n        } = collectMethods(currRule);\n\n        forEach(alternation, (currProd) => {\n          const prodIdx = currProd.idx === 0 ? \"\" : currProd.idx;\n          this.TRACE_INIT(`${getProductionDslName(currProd)}${prodIdx}`, () => {\n            const laFunc = this.lookaheadStrategy.buildLookaheadForAlternation({\n              prodOccurrence: currProd.idx,\n              rule: currRule,\n              maxLookahead: currProd.maxLookahead || this.maxLookahead,\n              hasPredicates: currProd.hasPredicates,\n              dynamicTokensEnabled: this.dynamicTokensEnabled,\n            });\n\n            const key = getKeyForAutomaticLookahead(\n              this.fullRuleNameToShort[currRule.name],\n              OR_IDX,\n              currProd.idx,\n            );\n            this.setLaFuncCache(key, laFunc);\n          });\n        });\n\n        forEach(repetition, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            MANY_IDX,\n            \"Repetition\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(option, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            OPTION_IDX,\n            \"Option\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionMandatory, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            AT_LEAST_ONE_IDX,\n            \"RepetitionMandatory\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionMandatoryWithSeparator, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            AT_LEAST_ONE_SEP_IDX,\n            \"RepetitionMandatoryWithSeparator\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n\n        forEach(repetitionWithSeparator, (currProd) => {\n          this.computeLookaheadFunc(\n            currRule,\n            currProd.idx,\n            MANY_SEP_IDX,\n            \"RepetitionWithSeparator\",\n            currProd.maxLookahead,\n            getProductionDslName(currProd),\n          );\n        });\n      });\n    });\n  }\n\n  computeLookaheadFunc(\n    this: MixedInParser,\n    rule: Rule,\n    prodOccurrence: number,\n    prodKey: number,\n    prodType: OptionalProductionType,\n    prodMaxLookahead: number | undefined,\n    dslMethodName: string,\n  ): void {\n    this.TRACE_INIT(\n      `${dslMethodName}${prodOccurrence === 0 ? \"\" : prodOccurrence}`,\n      () => {\n        const laFunc = this.lookaheadStrategy.buildLookaheadForOptional({\n          prodOccurrence,\n          rule,\n          maxLookahead: prodMaxLookahead || this.maxLookahead,\n          dynamicTokensEnabled: this.dynamicTokensEnabled,\n          prodType,\n        });\n        const key = getKeyForAutomaticLookahead(\n          this.fullRuleNameToShort[rule.name],\n          prodKey,\n          prodOccurrence,\n        );\n        this.setLaFuncCache(key, laFunc);\n      },\n    );\n  }\n\n  // this actually returns a number, but it is always used as a string (object prop key)\n  getKeyForAutomaticLookahead(\n    this: MixedInParser,\n    dslMethodIdx: number,\n    occurrence: number,\n  ): number {\n    const currRuleShortName: any = this.getLastExplicitRuleShortName();\n    return getKeyForAutomaticLookahead(\n      currRuleShortName,\n      dslMethodIdx,\n      occurrence,\n    );\n  }\n\n  getLaFuncFromCache(this: MixedInParser, key: number): Function {\n    return this.lookAheadFuncsCache.get(key);\n  }\n\n  /* istanbul ignore next */\n  setLaFuncCache(this: MixedInParser, key: number, value: Function): void {\n    this.lookAheadFuncsCache.set(key, value);\n  }\n}\n\nclass DslMethodsCollectorVisitor extends GAstVisitor {\n  public dslMethods: {\n    option: Option[];\n    alternation: Alternation[];\n    repetition: Repetition[];\n    repetitionWithSeparator: RepetitionWithSeparator[];\n    repetitionMandatory: RepetitionMandatory[];\n    repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator[];\n  } = {\n    option: [],\n    alternation: [],\n    repetition: [],\n    repetitionWithSeparator: [],\n    repetitionMandatory: [],\n    repetitionMandatoryWithSeparator: [],\n  };\n\n  reset() {\n    this.dslMethods = {\n      option: [],\n      alternation: [],\n      repetition: [],\n      repetitionWithSeparator: [],\n      repetitionMandatory: [],\n      repetitionMandatoryWithSeparator: [],\n    };\n  }\n\n  public visitOption(option: Option): void {\n    this.dslMethods.option.push(option);\n  }\n\n  public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void {\n    this.dslMethods.repetitionWithSeparator.push(manySep);\n  }\n\n  public visitRepetitionMandatory(atLeastOne: RepetitionMandatory): void {\n    this.dslMethods.repetitionMandatory.push(atLeastOne);\n  }\n\n  public visitRepetitionMandatoryWithSeparator(\n    atLeastOneSep: RepetitionMandatoryWithSeparator,\n  ): void {\n    this.dslMethods.repetitionMandatoryWithSeparator.push(atLeastOneSep);\n  }\n\n  public visitRepetition(many: Repetition): void {\n    this.dslMethods.repetition.push(many);\n  }\n\n  public visitAlternation(or: Alternation): void {\n    this.dslMethods.alternation.push(or);\n  }\n}\n\nconst collectorVisitor = new DslMethodsCollectorVisitor();\nexport function collectMethods(rule: Rule): {\n  option: Option[];\n  alternation: Alternation[];\n  repetition: Repetition[];\n  repetitionWithSeparator: RepetitionWithSeparator[];\n  repetitionMandatory: RepetitionMandatory[];\n  repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator[];\n} {\n  collectorVisitor.reset();\n  rule.accept(collectorVisitor);\n  const dslMethods = collectorVisitor.dslMethods;\n  // avoid uncleaned references\n  collectorVisitor.reset();\n  return dslMethods;\n}\n", "import { CstNode, CstNodeLocation, IToken } from \"@chevrotain/types\";\n\n/**\n * This nodeLocation tracking is not efficient and should only be used\n * when error recovery is enabled or the Token Vector contains virtual Tokens\n * (e.g, Python Indent/Outdent)\n * As it executes the calculation for every single terminal/nonTerminal\n * and does not rely on the fact the token vector is **sorted**\n */\nexport function setNodeLocationOnlyOffset(\n  currNodeLocation: CstNodeLocation,\n  newLocationInfo: Required>,\n): void {\n  // First (valid) update for this cst node\n  if (isNaN(currNodeLocation.startOffset) === true) {\n    // assumption1: Token location information is either NaN or a valid number\n    // assumption2: Token location information is fully valid if it exist\n    // (both start/end offsets exist and are numbers).\n    currNodeLocation.startOffset = newLocationInfo.startOffset;\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n  }\n  // Once the startOffset has been updated with a valid number it should never receive\n  // any farther updates as the Token vector is sorted.\n  // We still have to check this this condition for every new possible location info\n  // because with error recovery enabled we may encounter invalid tokens (NaN location props)\n  else if (currNodeLocation.endOffset! < newLocationInfo.endOffset === true) {\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n  }\n}\n\n/**\n * This nodeLocation tracking is not efficient and should only be used\n * when error recovery is enabled or the Token Vector contains virtual Tokens\n * (e.g, Python Indent/Outdent)\n * As it executes the calculation for every single terminal/nonTerminal\n * and does not rely on the fact the token vector is **sorted**\n */\nexport function setNodeLocationFull(\n  currNodeLocation: CstNodeLocation,\n  newLocationInfo: CstNodeLocation,\n): void {\n  // First (valid) update for this cst node\n  if (isNaN(currNodeLocation.startOffset) === true) {\n    // assumption1: Token location information is either NaN or a valid number\n    // assumption2: Token location information is fully valid if it exist\n    // (all start/end props exist and are numbers).\n    currNodeLocation.startOffset = newLocationInfo.startOffset;\n    currNodeLocation.startColumn = newLocationInfo.startColumn;\n    currNodeLocation.startLine = newLocationInfo.startLine;\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n    currNodeLocation.endColumn = newLocationInfo.endColumn;\n    currNodeLocation.endLine = newLocationInfo.endLine;\n  }\n  // Once the start props has been updated with a valid number it should never receive\n  // any farther updates as the Token vector is sorted.\n  // We still have to check this this condition for every new possible location info\n  // because with error recovery enabled we may encounter invalid tokens (NaN location props)\n  else if (currNodeLocation.endOffset! < newLocationInfo.endOffset! === true) {\n    currNodeLocation.endOffset = newLocationInfo.endOffset;\n    currNodeLocation.endColumn = newLocationInfo.endColumn;\n    currNodeLocation.endLine = newLocationInfo.endLine;\n  }\n}\n\nexport function addTerminalToCst(\n  node: CstNode,\n  token: IToken,\n  tokenTypeName: string,\n): void {\n  if (node.children[tokenTypeName] === undefined) {\n    node.children[tokenTypeName] = [token];\n  } else {\n    node.children[tokenTypeName].push(token);\n  }\n}\n\nexport function addNoneTerminalToCst(\n  node: CstNode,\n  ruleName: string,\n  ruleResult: any,\n): void {\n  if (node.children[ruleName] === undefined) {\n    node.children[ruleName] = [ruleResult];\n  } else {\n    node.children[ruleName].push(ruleResult);\n  }\n}\n", "const NAME = \"name\";\n\nexport function defineNameProp(obj: {}, nameValue: string): void {\n  Object.defineProperty(obj, NAME, {\n    enumerable: false,\n    configurable: true,\n    writable: false,\n    value: nameValue,\n  });\n}\n", "import {\n  compact,\n  filter,\n  forEach,\n  isArray,\n  isEmpty,\n  isFunction,\n  isUndefined,\n  keys,\n  map,\n} from \"lodash-es\";\nimport { defineNameProp } from \"../../lang/lang_extensions.js\";\nimport { CstNode, ICstVisitor } from \"@chevrotain/types\";\n\nexport function defaultVisit(ctx: any, param: IN): void {\n  const childrenNames = keys(ctx);\n  const childrenNamesLength = childrenNames.length;\n  for (let i = 0; i < childrenNamesLength; i++) {\n    const currChildName = childrenNames[i];\n    const currChildArray = ctx[currChildName];\n    const currChildArrayLength = currChildArray.length;\n    for (let j = 0; j < currChildArrayLength; j++) {\n      const currChild: any = currChildArray[j];\n      // distinction between Tokens Children and CstNode children\n      if (currChild.tokenTypeIdx === undefined) {\n        this[currChild.name](currChild.children, param);\n      }\n    }\n  }\n  // defaultVisit does not support generic out param\n}\n\nexport function createBaseSemanticVisitorConstructor(\n  grammarName: string,\n  ruleNames: string[],\n): {\n  new (...args: any[]): ICstVisitor;\n} {\n  const derivedConstructor: any = function () {};\n\n  // can be overwritten according to:\n  // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/\n  // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname\n  defineNameProp(derivedConstructor, grammarName + \"BaseSemantics\");\n\n  const semanticProto = {\n    visit: function (cstNode: CstNode | CstNode[], param: any) {\n      // enables writing more concise visitor methods when CstNode has only a single child\n      if (isArray(cstNode)) {\n        // A CST Node's children dictionary can never have empty arrays as values\n        // If a key is defined there will be at least one element in the corresponding value array.\n        cstNode = cstNode[0];\n      }\n\n      // enables passing optional CstNodes concisely.\n      if (isUndefined(cstNode)) {\n        return undefined;\n      }\n\n      return this[cstNode.name](cstNode.children, param);\n    },\n\n    validateVisitor: function () {\n      const semanticDefinitionErrors = validateVisitor(this, ruleNames);\n      if (!isEmpty(semanticDefinitionErrors)) {\n        const errorMessages = map(\n          semanticDefinitionErrors,\n          (currDefError) => currDefError.msg,\n        );\n        throw Error(\n          `Errors Detected in CST Visitor <${this.constructor.name}>:\\n\\t` +\n            `${errorMessages.join(\"\\n\\n\").replace(/\\n/g, \"\\n\\t\")}`,\n        );\n      }\n    },\n  };\n\n  derivedConstructor.prototype = semanticProto;\n  derivedConstructor.prototype.constructor = derivedConstructor;\n\n  derivedConstructor._RULE_NAMES = ruleNames;\n\n  return derivedConstructor;\n}\n\nexport function createBaseVisitorConstructorWithDefaults(\n  grammarName: string,\n  ruleNames: string[],\n  baseConstructor: Function,\n): {\n  new (...args: any[]): ICstVisitor;\n} {\n  const derivedConstructor: any = function () {};\n\n  // can be overwritten according to:\n  // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/\n  // name?redirectlocale=en-US&redirectslug=JavaScript%2FReference%2FGlobal_Objects%2FFunction%2Fname\n  defineNameProp(derivedConstructor, grammarName + \"BaseSemanticsWithDefaults\");\n\n  const withDefaultsProto = Object.create(baseConstructor.prototype);\n  forEach(ruleNames, (ruleName) => {\n    withDefaultsProto[ruleName] = defaultVisit;\n  });\n\n  derivedConstructor.prototype = withDefaultsProto;\n  derivedConstructor.prototype.constructor = derivedConstructor;\n\n  return derivedConstructor;\n}\n\nexport enum CstVisitorDefinitionError {\n  REDUNDANT_METHOD,\n  MISSING_METHOD,\n}\n\nexport interface IVisitorDefinitionError {\n  msg: string;\n  type: CstVisitorDefinitionError;\n  methodName: string;\n}\n\nexport function validateVisitor(\n  visitorInstance: ICstVisitor,\n  ruleNames: string[],\n): IVisitorDefinitionError[] {\n  const missingErrors = validateMissingCstMethods(visitorInstance, ruleNames);\n\n  return missingErrors;\n}\n\nexport function validateMissingCstMethods(\n  visitorInstance: ICstVisitor,\n  ruleNames: string[],\n): IVisitorDefinitionError[] {\n  const missingRuleNames = filter(ruleNames, (currRuleName) => {\n    return isFunction((visitorInstance as any)[currRuleName]) === false;\n  });\n\n  const errors: IVisitorDefinitionError[] = map(\n    missingRuleNames,\n    (currRuleName) => {\n      return {\n        msg: `Missing visitor method: <${currRuleName}> on ${(\n          visitorInstance.constructor.name\n        )} CST Visitor.`,\n        type: CstVisitorDefinitionError.MISSING_METHOD,\n        methodName: currRuleName,\n      };\n    },\n  );\n\n  return compact(errors);\n}\n", "import {\n  addNoneTerminalToCst,\n  addTerminalToCst,\n  setNodeLocationFull,\n  setNodeLocationOnlyOffset,\n} from \"../../cst/cst.js\";\nimport { has, isUndefined, keys, noop } from \"lodash-es\";\nimport {\n  createBaseSemanticVisitorConstructor,\n  createBaseVisitorConstructorWithDefaults,\n} from \"../../cst/cst_visitor.js\";\nimport {\n  CstNode,\n  CstNodeLocation,\n  ICstVisitor,\n  IParserConfig,\n  IToken,\n  nodeLocationTrackingOptions,\n} from \"@chevrotain/types\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * This trait is responsible for the CST building logic.\n */\nexport class TreeBuilder {\n  outputCst: boolean;\n  CST_STACK: CstNode[];\n  baseCstVisitorConstructor: Function;\n  baseCstVisitorWithDefaultsConstructor: Function;\n\n  // dynamically assigned Methods\n  setNodeLocationFromNode: (\n    nodeLocation: CstNodeLocation,\n    locationInformation: CstNodeLocation,\n  ) => void;\n  setNodeLocationFromToken: (\n    nodeLocation: CstNodeLocation,\n    locationInformation: CstNodeLocation,\n  ) => void;\n  cstPostRule: (this: MixedInParser, ruleCstNode: CstNode) => void;\n\n  setInitialNodeLocation: (cstNode: CstNode) => void;\n  nodeLocationTracking: nodeLocationTrackingOptions;\n\n  initTreeBuilder(this: MixedInParser, config: IParserConfig) {\n    this.CST_STACK = [];\n\n    // outputCst is no longer exposed/defined in the pubic API\n    this.outputCst = (config as any).outputCst;\n\n    this.nodeLocationTracking = has(config, \"nodeLocationTracking\")\n      ? (config.nodeLocationTracking as nodeLocationTrackingOptions) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.nodeLocationTracking;\n\n    if (!this.outputCst) {\n      this.cstInvocationStateUpdate = noop;\n      this.cstFinallyStateUpdate = noop;\n      this.cstPostTerminal = noop;\n      this.cstPostNonTerminal = noop;\n      this.cstPostRule = noop;\n    } else {\n      if (/full/i.test(this.nodeLocationTracking)) {\n        if (this.recoveryEnabled) {\n          this.setNodeLocationFromToken = setNodeLocationFull;\n          this.setNodeLocationFromNode = setNodeLocationFull;\n          this.cstPostRule = noop;\n          this.setInitialNodeLocation = this.setInitialNodeLocationFullRecovery;\n        } else {\n          this.setNodeLocationFromToken = noop;\n          this.setNodeLocationFromNode = noop;\n          this.cstPostRule = this.cstPostRuleFull;\n          this.setInitialNodeLocation = this.setInitialNodeLocationFullRegular;\n        }\n      } else if (/onlyOffset/i.test(this.nodeLocationTracking)) {\n        if (this.recoveryEnabled) {\n          this.setNodeLocationFromToken = setNodeLocationOnlyOffset;\n          this.setNodeLocationFromNode = setNodeLocationOnlyOffset;\n          this.cstPostRule = noop;\n          this.setInitialNodeLocation =\n            this.setInitialNodeLocationOnlyOffsetRecovery;\n        } else {\n          this.setNodeLocationFromToken = noop;\n          this.setNodeLocationFromNode = noop;\n          this.cstPostRule = this.cstPostRuleOnlyOffset;\n          this.setInitialNodeLocation =\n            this.setInitialNodeLocationOnlyOffsetRegular;\n        }\n      } else if (/none/i.test(this.nodeLocationTracking)) {\n        this.setNodeLocationFromToken = noop;\n        this.setNodeLocationFromNode = noop;\n        this.cstPostRule = noop;\n        this.setInitialNodeLocation = noop;\n      } else {\n        throw Error(\n          `Invalid  config option: \"${config.nodeLocationTracking}\"`,\n        );\n      }\n    }\n  }\n\n  setInitialNodeLocationOnlyOffsetRecovery(\n    this: MixedInParser,\n    cstNode: any,\n  ): void {\n    cstNode.location = {\n      startOffset: NaN,\n      endOffset: NaN,\n    };\n  }\n\n  setInitialNodeLocationOnlyOffsetRegular(\n    this: MixedInParser,\n    cstNode: any,\n  ): void {\n    cstNode.location = {\n      // without error recovery the starting Location of a new CstNode is guaranteed\n      // To be the next Token's startOffset (for valid inputs).\n      // For invalid inputs there won't be any CSTOutput so this potential\n      // inaccuracy does not matter\n      startOffset: this.LA(1).startOffset,\n      endOffset: NaN,\n    };\n  }\n\n  setInitialNodeLocationFullRecovery(this: MixedInParser, cstNode: any): void {\n    cstNode.location = {\n      startOffset: NaN,\n      startLine: NaN,\n      startColumn: NaN,\n      endOffset: NaN,\n      endLine: NaN,\n      endColumn: NaN,\n    };\n  }\n\n  /**\n     *  @see setInitialNodeLocationOnlyOffsetRegular for explanation why this work\n\n     * @param cstNode\n     */\n  setInitialNodeLocationFullRegular(this: MixedInParser, cstNode: any): void {\n    const nextToken = this.LA(1);\n    cstNode.location = {\n      startOffset: nextToken.startOffset,\n      startLine: nextToken.startLine,\n      startColumn: nextToken.startColumn,\n      endOffset: NaN,\n      endLine: NaN,\n      endColumn: NaN,\n    };\n  }\n\n  cstInvocationStateUpdate(this: MixedInParser, fullRuleName: string): void {\n    const cstNode: CstNode = {\n      name: fullRuleName,\n      children: Object.create(null),\n    };\n\n    this.setInitialNodeLocation(cstNode);\n    this.CST_STACK.push(cstNode);\n  }\n\n  cstFinallyStateUpdate(this: MixedInParser): void {\n    this.CST_STACK.pop();\n  }\n\n  cstPostRuleFull(this: MixedInParser, ruleCstNode: CstNode): void {\n    // casts to `required` are safe because `cstPostRuleFull` should only be invoked when full location is enabled\n    const prevToken = this.LA(0) as Required;\n    const loc = ruleCstNode.location as Required;\n\n    // If this condition is true it means we consumed at least one Token\n    // In this CstNode.\n    if (loc.startOffset <= prevToken.startOffset === true) {\n      loc.endOffset = prevToken.endOffset;\n      loc.endLine = prevToken.endLine;\n      loc.endColumn = prevToken.endColumn;\n    }\n    // \"empty\" CstNode edge case\n    else {\n      loc.startOffset = NaN;\n      loc.startLine = NaN;\n      loc.startColumn = NaN;\n    }\n  }\n\n  cstPostRuleOnlyOffset(this: MixedInParser, ruleCstNode: CstNode): void {\n    const prevToken = this.LA(0);\n    // `location' is not null because `cstPostRuleOnlyOffset` will only be invoked when location tracking is enabled.\n    const loc = ruleCstNode.location!;\n\n    // If this condition is true it means we consumed at least one Token\n    // In this CstNode.\n    if (loc.startOffset <= prevToken.startOffset === true) {\n      loc.endOffset = prevToken.endOffset;\n    }\n    // \"empty\" CstNode edge case\n    else {\n      loc.startOffset = NaN;\n    }\n  }\n\n  cstPostTerminal(\n    this: MixedInParser,\n    key: string,\n    consumedToken: IToken,\n  ): void {\n    const rootCst = this.CST_STACK[this.CST_STACK.length - 1];\n    addTerminalToCst(rootCst, consumedToken, key);\n    // This is only used when **both** error recovery and CST Output are enabled.\n    this.setNodeLocationFromToken(rootCst.location!, consumedToken);\n  }\n\n  cstPostNonTerminal(\n    this: MixedInParser,\n    ruleCstResult: CstNode,\n    ruleName: string,\n  ): void {\n    const preCstNode = this.CST_STACK[this.CST_STACK.length - 1];\n    addNoneTerminalToCst(preCstNode, ruleName, ruleCstResult);\n    // This is only used when **both** error recovery and CST Output are enabled.\n    this.setNodeLocationFromNode(preCstNode.location!, ruleCstResult.location!);\n  }\n\n  getBaseCstVisitorConstructor(\n    this: MixedInParser,\n  ): {\n    new (...args: any[]): ICstVisitor;\n  } {\n    if (isUndefined(this.baseCstVisitorConstructor)) {\n      const newBaseCstVisitorConstructor = createBaseSemanticVisitorConstructor(\n        this.className,\n        keys(this.gastProductionsCache),\n      );\n      this.baseCstVisitorConstructor = newBaseCstVisitorConstructor;\n      return newBaseCstVisitorConstructor;\n    }\n\n    return this.baseCstVisitorConstructor;\n  }\n\n  getBaseCstVisitorConstructorWithDefaults(\n    this: MixedInParser,\n  ): {\n    new (...args: any[]): ICstVisitor;\n  } {\n    if (isUndefined(this.baseCstVisitorWithDefaultsConstructor)) {\n      const newConstructor = createBaseVisitorConstructorWithDefaults(\n        this.className,\n        keys(this.gastProductionsCache),\n        this.getBaseCstVisitorConstructor(),\n      );\n      this.baseCstVisitorWithDefaultsConstructor = newConstructor;\n      return newConstructor;\n    }\n\n    return this.baseCstVisitorWithDefaultsConstructor;\n  }\n\n  getLastExplicitRuleShortName(this: MixedInParser): number {\n    const ruleStack = this.RULE_STACK;\n    return ruleStack[ruleStack.length - 1];\n  }\n\n  getPreviousExplicitRuleShortName(this: MixedInParser): number {\n    const ruleStack = this.RULE_STACK;\n    return ruleStack[ruleStack.length - 2];\n  }\n\n  getLastExplicitRuleOccurrenceIndex(this: MixedInParser): number {\n    const occurrenceStack = this.RULE_OCCURRENCE_STACK;\n    return occurrenceStack[occurrenceStack.length - 1];\n  }\n}\n", "import { END_OF_FILE } from \"../parser.js\";\nimport { IToken } from \"@chevrotain/types\";\nimport { MixedInParser } from \"./parser_traits.js\";\n\n/**\n * Trait responsible abstracting over the interaction with Lexer output (Token vector).\n *\n * This could be generalized to support other kinds of lexers, e.g.\n * - Just in Time Lexing / Lexer-Less parsing.\n * - Streaming Lexer.\n */\nexport class LexerAdapter {\n  tokVector: IToken[];\n  tokVectorLength: number;\n  currIdx: number;\n\n  initLexerAdapter() {\n    this.tokVector = [];\n    this.tokVectorLength = 0;\n    this.currIdx = -1;\n  }\n\n  set input(newInput: IToken[]) {\n    // @ts-ignore - `this parameter` not supported in setters/getters\n    //   - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters\n    if (this.selfAnalysisDone !== true) {\n      throw Error(\n        `Missing  invocation at the end of the Parser's constructor.`,\n      );\n    }\n    // @ts-ignore - `this parameter` not supported in setters/getters\n    //   - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters\n    this.reset();\n    this.tokVector = newInput;\n    this.tokVectorLength = newInput.length;\n  }\n\n  get input(): IToken[] {\n    return this.tokVector;\n  }\n\n  // skips a token and returns the next token\n  SKIP_TOKEN(this: MixedInParser): IToken {\n    if (this.currIdx <= this.tokVector.length - 2) {\n      this.consumeToken();\n      return this.LA(1);\n    } else {\n      return END_OF_FILE;\n    }\n  }\n\n  // Lexer (accessing Token vector) related methods which can be overridden to implement lazy lexers\n  // or lexers dependent on parser context.\n  LA(this: MixedInParser, howMuch: number): IToken {\n    const soughtIdx = this.currIdx + howMuch;\n    if (soughtIdx < 0 || this.tokVectorLength <= soughtIdx) {\n      return END_OF_FILE;\n    } else {\n      return this.tokVector[soughtIdx];\n    }\n  }\n\n  consumeToken(this: MixedInParser) {\n    this.currIdx++;\n  }\n\n  exportLexerState(this: MixedInParser): number {\n    return this.currIdx;\n  }\n\n  importLexerState(this: MixedInParser, newState: number) {\n    this.currIdx = newState;\n  }\n\n  resetLexerState(this: MixedInParser): void {\n    this.currIdx = -1;\n  }\n\n  moveToTerminatedState(this: MixedInParser): void {\n    this.currIdx = this.tokVector.length - 1;\n  }\n\n  getLexerPosition(this: MixedInParser): number {\n    return this.exportLexerState();\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IRuleConfig,\n  ISerializedGast,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  SubruleMethodOpts,\n  TokenType,\n} from \"@chevrotain/types\";\nimport { includes, values } from \"lodash-es\";\nimport { isRecognitionException } from \"../../exceptions_public.js\";\nimport { DEFAULT_RULE_CONFIG, ParserDefinitionErrorType } from \"../parser.js\";\nimport { defaultGrammarValidatorErrorProvider } from \"../../errors_public.js\";\nimport { validateRuleIsOverridden } from \"../../grammar/checks.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { Rule, serializeGrammar } from \"@chevrotain/gast\";\nimport { IParserDefinitionError } from \"../../grammar/types.js\";\nimport { ParserMethodInternal } from \"../types.js\";\n\n/**\n * This trait is responsible for implementing the public API\n * for defining Chevrotain parsers, i.e:\n * - CONSUME\n * - RULE\n * - OPTION\n * - ...\n */\nexport class RecognizerApi {\n  ACTION(this: MixedInParser, impl: () => T): T {\n    return impl.call(this);\n  }\n\n  consume(\n    this: MixedInParser,\n    idx: number,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, idx, options);\n  }\n\n  subrule(\n    this: MixedInParser,\n    idx: number,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, idx, options);\n  }\n\n  option(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, idx);\n  }\n\n  or(\n    this: MixedInParser,\n    idx: number,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): any {\n    return this.orInternal(altsOrOpts, idx);\n  }\n\n  many(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    return this.manyInternal(idx, actionORMethodDef);\n  }\n\n  atLeastOne(\n    this: MixedInParser,\n    idx: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    return this.atLeastOneInternal(idx, actionORMethodDef);\n  }\n\n  CONSUME(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 0, options);\n  }\n\n  CONSUME1(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 1, options);\n  }\n\n  CONSUME2(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 2, options);\n  }\n\n  CONSUME3(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 3, options);\n  }\n\n  CONSUME4(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 4, options);\n  }\n\n  CONSUME5(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 5, options);\n  }\n\n  CONSUME6(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 6, options);\n  }\n\n  CONSUME7(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 7, options);\n  }\n\n  CONSUME8(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 8, options);\n  }\n\n  CONSUME9(\n    this: MixedInParser,\n    tokType: TokenType,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    return this.consumeInternal(tokType, 9, options);\n  }\n\n  SUBRULE(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 0, options);\n  }\n\n  SUBRULE1(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 1, options);\n  }\n\n  SUBRULE2(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 2, options);\n  }\n\n  SUBRULE3(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 3, options);\n  }\n\n  SUBRULE4(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 4, options);\n  }\n\n  SUBRULE5(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 5, options);\n  }\n\n  SUBRULE6(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 6, options);\n  }\n\n  SUBRULE7(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 7, options);\n  }\n\n  SUBRULE8(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 8, options);\n  }\n\n  SUBRULE9(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    options?: SubruleMethodOpts,\n  ): R {\n    return this.subruleInternal(ruleToCall, 9, options);\n  }\n\n  OPTION(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 0);\n  }\n\n  OPTION1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 1);\n  }\n\n  OPTION2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 2);\n  }\n\n  OPTION3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 3);\n  }\n\n  OPTION4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 4);\n  }\n\n  OPTION5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 5);\n  }\n\n  OPTION6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 6);\n  }\n\n  OPTION7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 7);\n  }\n\n  OPTION8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 8);\n  }\n\n  OPTION9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): OUT | undefined {\n    return this.optionInternal(actionORMethodDef, 9);\n  }\n\n  OR(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 0);\n  }\n\n  OR1(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 1);\n  }\n\n  OR2(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 2);\n  }\n\n  OR3(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 3);\n  }\n\n  OR4(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 4);\n  }\n\n  OR5(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 5);\n  }\n\n  OR6(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 6);\n  }\n\n  OR7(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 7);\n  }\n\n  OR8(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 8);\n  }\n\n  OR9(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n  ): T {\n    return this.orInternal(altsOrOpts, 9);\n  }\n\n  MANY(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(0, actionORMethodDef);\n  }\n\n  MANY1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(1, actionORMethodDef);\n  }\n\n  MANY2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(2, actionORMethodDef);\n  }\n\n  MANY3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(3, actionORMethodDef);\n  }\n\n  MANY4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(4, actionORMethodDef);\n  }\n\n  MANY5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(5, actionORMethodDef);\n  }\n\n  MANY6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(6, actionORMethodDef);\n  }\n\n  MANY7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(7, actionORMethodDef);\n  }\n\n  MANY8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(8, actionORMethodDef);\n  }\n\n  MANY9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    this.manyInternal(9, actionORMethodDef);\n  }\n\n  MANY_SEP(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(0, options);\n  }\n\n  MANY_SEP1(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(1, options);\n  }\n\n  MANY_SEP2(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(2, options);\n  }\n\n  MANY_SEP3(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(3, options);\n  }\n\n  MANY_SEP4(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(4, options);\n  }\n\n  MANY_SEP5(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(5, options);\n  }\n\n  MANY_SEP6(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(6, options);\n  }\n\n  MANY_SEP7(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(7, options);\n  }\n\n  MANY_SEP8(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(8, options);\n  }\n\n  MANY_SEP9(this: MixedInParser, options: ManySepMethodOpts): void {\n    this.manySepFirstInternal(9, options);\n  }\n\n  AT_LEAST_ONE(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(0, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE1(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    return this.atLeastOneInternal(1, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE2(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(2, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE3(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(3, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE4(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(4, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE5(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(5, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE6(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(6, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE7(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(7, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE8(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(8, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE9(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    this.atLeastOneInternal(9, actionORMethodDef);\n  }\n\n  AT_LEAST_ONE_SEP(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(0, options);\n  }\n\n  AT_LEAST_ONE_SEP1(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(1, options);\n  }\n\n  AT_LEAST_ONE_SEP2(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(2, options);\n  }\n\n  AT_LEAST_ONE_SEP3(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(3, options);\n  }\n\n  AT_LEAST_ONE_SEP4(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(4, options);\n  }\n\n  AT_LEAST_ONE_SEP5(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(5, options);\n  }\n\n  AT_LEAST_ONE_SEP6(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(6, options);\n  }\n\n  AT_LEAST_ONE_SEP7(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(7, options);\n  }\n\n  AT_LEAST_ONE_SEP8(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(8, options);\n  }\n\n  AT_LEAST_ONE_SEP9(\n    this: MixedInParser,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    this.atLeastOneSepFirstInternal(9, options);\n  }\n\n  RULE(\n    this: MixedInParser,\n    name: string,\n    implementation: (...implArgs: any[]) => T,\n    config: IRuleConfig = DEFAULT_RULE_CONFIG,\n  ): (idxInCallingRule?: number, ...args: any[]) => T | any {\n    if (includes(this.definedRulesNames, name)) {\n      const errMsg =\n        defaultGrammarValidatorErrorProvider.buildDuplicateRuleNameError({\n          topLevelRule: name,\n          grammarName: this.className,\n        });\n\n      const error = {\n        message: errMsg,\n        type: ParserDefinitionErrorType.DUPLICATE_RULE_NAME,\n        ruleName: name,\n      };\n      this.definitionErrors.push(error);\n    }\n\n    this.definedRulesNames.push(name);\n\n    const ruleImplementation = this.defineRule(name, implementation, config);\n    (this as any)[name] = ruleImplementation;\n    return ruleImplementation;\n  }\n\n  OVERRIDE_RULE(\n    this: MixedInParser,\n    name: string,\n    impl: (...implArgs: any[]) => T,\n    config: IRuleConfig = DEFAULT_RULE_CONFIG,\n  ): (idxInCallingRule?: number, ...args: any[]) => T {\n    const ruleErrors: IParserDefinitionError[] = validateRuleIsOverridden(\n      name,\n      this.definedRulesNames,\n      this.className,\n    );\n    this.definitionErrors = this.definitionErrors.concat(ruleErrors);\n\n    const ruleImplementation = this.defineRule(name, impl, config);\n    (this as any)[name] = ruleImplementation;\n    return ruleImplementation;\n  }\n\n  BACKTRACK(\n    this: MixedInParser,\n    grammarRule: (...args: any[]) => T,\n    args?: any[],\n  ): () => boolean {\n    return function () {\n      // save org state\n      this.isBackTrackingStack.push(1);\n      const orgState = this.saveRecogState();\n      try {\n        grammarRule.apply(this, args);\n        // if no exception was thrown we have succeed parsing the rule.\n        return true;\n      } catch (e) {\n        if (isRecognitionException(e)) {\n          return false;\n        } else {\n          throw e;\n        }\n      } finally {\n        this.reloadRecogState(orgState);\n        this.isBackTrackingStack.pop();\n      }\n    };\n  }\n\n  // GAST export APIs\n  public getGAstProductions(this: MixedInParser): Record {\n    return this.gastProductionsCache;\n  }\n\n  public getSerializedGastProductions(this: MixedInParser): ISerializedGast[] {\n    return serializeGrammar(values(this.gastProductionsCache));\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IParserConfig,\n  IRuleConfig,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  ParserMethod,\n  SubruleMethodOpts,\n  TokenType,\n  TokenTypeDictionary,\n  TokenVocabulary,\n} from \"@chevrotain/types\";\nimport {\n  clone,\n  every,\n  flatten,\n  has,\n  isArray,\n  isEmpty,\n  isObject,\n  reduce,\n  uniq,\n  values,\n} from \"lodash-es\";\nimport {\n  AT_LEAST_ONE_IDX,\n  AT_LEAST_ONE_SEP_IDX,\n  BITS_FOR_METHOD_TYPE,\n  BITS_FOR_OCCURRENCE_IDX,\n  MANY_IDX,\n  MANY_SEP_IDX,\n  OPTION_IDX,\n  OR_IDX,\n} from \"../../grammar/keys.js\";\nimport {\n  isRecognitionException,\n  MismatchedTokenException,\n  NotAllInputParsedException,\n} from \"../../exceptions_public.js\";\nimport { PROD_TYPE } from \"../../grammar/lookahead.js\";\nimport {\n  AbstractNextTerminalAfterProductionWalker,\n  NextTerminalAfterAtLeastOneSepWalker,\n  NextTerminalAfterAtLeastOneWalker,\n  NextTerminalAfterManySepWalker,\n  NextTerminalAfterManyWalker,\n} from \"../../grammar/interpreter.js\";\nimport { DEFAULT_RULE_CONFIG, IParserState, TokenMatcher } from \"../parser.js\";\nimport { IN_RULE_RECOVERY_EXCEPTION } from \"./recoverable.js\";\nimport { EOF } from \"../../../scan/tokens_public.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  augmentTokenTypes,\n  isTokenType,\n  tokenStructuredMatcher,\n  tokenStructuredMatcherNoCategories,\n} from \"../../../scan/tokens.js\";\nimport { Rule } from \"@chevrotain/gast\";\nimport { ParserMethodInternal } from \"../types.js\";\n\n/**\n * This trait is responsible for the runtime parsing engine\n * Used by the official API (recognizer_api.ts)\n */\nexport class RecognizerEngine {\n  isBackTrackingStack: boolean[];\n  className: string;\n  RULE_STACK: number[];\n  RULE_OCCURRENCE_STACK: number[];\n  definedRulesNames: string[];\n  tokensMap: { [fqn: string]: TokenType };\n  gastProductionsCache: Record;\n  shortRuleNameToFull: Record;\n  fullRuleNameToShort: Record;\n  // The shortName Index must be coded \"after\" the first 8bits to enable building unique lookahead keys\n  ruleShortNameIdx: number;\n  tokenMatcher: TokenMatcher;\n  subruleIdx: number;\n\n  initRecognizerEngine(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfig,\n  ) {\n    this.className = this.constructor.name;\n    // TODO: would using an ES6 Map or plain object be faster (CST building scenario)\n    this.shortRuleNameToFull = {};\n    this.fullRuleNameToShort = {};\n    this.ruleShortNameIdx = 256;\n    this.tokenMatcher = tokenStructuredMatcherNoCategories;\n    this.subruleIdx = 0;\n\n    this.definedRulesNames = [];\n    this.tokensMap = {};\n    this.isBackTrackingStack = [];\n    this.RULE_STACK = [];\n    this.RULE_OCCURRENCE_STACK = [];\n    this.gastProductionsCache = {};\n\n    if (has(config, \"serializedGrammar\")) {\n      throw Error(\n        \"The Parser's configuration can no longer contain a  property.\\n\" +\n          \"\\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_6-0-0\\n\" +\n          \"\\tFor Further details.\",\n      );\n    }\n\n    if (isArray(tokenVocabulary)) {\n      // This only checks for Token vocabularies provided as arrays.\n      // That is good enough because the main objective is to detect users of pre-V4.0 APIs\n      // rather than all edge cases of empty Token vocabularies.\n      if (isEmpty(tokenVocabulary as any[])) {\n        throw Error(\n          \"A Token Vocabulary cannot be empty.\\n\" +\n            \"\\tNote that the first argument for the parser constructor\\n\" +\n            \"\\tis no longer a Token vector (since v4.0).\",\n        );\n      }\n\n      if (typeof (tokenVocabulary as any[])[0].startOffset === \"number\") {\n        throw Error(\n          \"The Parser constructor no longer accepts a token vector as the first argument.\\n\" +\n            \"\\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_4-0-0\\n\" +\n            \"\\tFor Further details.\",\n        );\n      }\n    }\n\n    if (isArray(tokenVocabulary)) {\n      this.tokensMap = reduce(\n        tokenVocabulary,\n        (acc, tokType: TokenType) => {\n          acc[tokType.name] = tokType;\n          return acc;\n        },\n        {} as { [tokenName: string]: TokenType },\n      );\n    } else if (\n      has(tokenVocabulary, \"modes\") &&\n      every(flatten(values((tokenVocabulary).modes)), isTokenType)\n    ) {\n      const allTokenTypes = flatten(values((tokenVocabulary).modes));\n      const uniqueTokens = uniq(allTokenTypes);\n      this.tokensMap = reduce(\n        uniqueTokens,\n        (acc, tokType: TokenType) => {\n          acc[tokType.name] = tokType;\n          return acc;\n        },\n        {} as { [tokenName: string]: TokenType },\n      );\n    } else if (isObject(tokenVocabulary)) {\n      this.tokensMap = clone(tokenVocabulary as TokenTypeDictionary);\n    } else {\n      throw new Error(\n        \" argument must be An Array of Token constructors,\" +\n          \" A dictionary of Token constructors or an IMultiModeLexerDefinition\",\n      );\n    }\n\n    // always add EOF to the tokenNames -> constructors map. it is useful to assure all the input has been\n    // parsed with a clear error message (\"expecting EOF but found ...\")\n    this.tokensMap[\"EOF\"] = EOF;\n\n    const allTokenTypes = has(tokenVocabulary, \"modes\")\n      ? flatten(values((tokenVocabulary).modes))\n      : values(tokenVocabulary);\n    const noTokenCategoriesUsed = every(allTokenTypes, (tokenConstructor) =>\n      isEmpty(tokenConstructor.categoryMatches),\n    );\n\n    this.tokenMatcher = noTokenCategoriesUsed\n      ? tokenStructuredMatcherNoCategories\n      : tokenStructuredMatcher;\n\n    // Because ES2015+ syntax should be supported for creating Token classes\n    // We cannot assume that the Token classes were created using the \"extendToken\" utilities\n    // Therefore we must augment the Token classes both on Lexer initialization and on Parser initialization\n    augmentTokenTypes(values(this.tokensMap));\n  }\n\n  defineRule(\n    this: MixedInParser,\n    ruleName: string,\n    impl: (...args: ARGS) => R,\n    config: IRuleConfig,\n  ): ParserMethodInternal {\n    if (this.selfAnalysisDone) {\n      throw Error(\n        `Grammar rule <${ruleName}> may not be defined after the 'performSelfAnalysis' method has been called'\\n` +\n          `Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.`,\n      );\n    }\n    const resyncEnabled: boolean = has(config, \"resyncEnabled\")\n      ? (config.resyncEnabled as boolean) // assumes end user provides the correct config value/type\n      : DEFAULT_RULE_CONFIG.resyncEnabled;\n    const recoveryValueFunc = has(config, \"recoveryValueFunc\")\n      ? (config.recoveryValueFunc as () => R) // assumes end user provides the correct config value/type\n      : DEFAULT_RULE_CONFIG.recoveryValueFunc;\n\n    // performance optimization: Use small integers as keys for the longer human readable \"full\" rule names.\n    // this greatly improves Map access time (as much as 8% for some performance benchmarks).\n    const shortName =\n      this.ruleShortNameIdx << (BITS_FOR_METHOD_TYPE + BITS_FOR_OCCURRENCE_IDX);\n\n    this.ruleShortNameIdx++;\n    this.shortRuleNameToFull[shortName] = ruleName;\n    this.fullRuleNameToShort[ruleName] = shortName;\n\n    let invokeRuleWithTry: ParserMethod;\n\n    // Micro optimization, only check the condition **once** on rule definition\n    // instead of **every single** rule invocation.\n    if (this.outputCst === true) {\n      invokeRuleWithTry = function invokeRuleWithTry(\n        this: MixedInParser,\n        ...args: ARGS\n      ): R {\n        try {\n          this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);\n          impl.apply(this, args);\n          const cst = this.CST_STACK[this.CST_STACK.length - 1];\n          this.cstPostRule(cst);\n          return cst as unknown as R;\n        } catch (e) {\n          return this.invokeRuleCatch(e, resyncEnabled, recoveryValueFunc) as R;\n        } finally {\n          this.ruleFinallyStateUpdate();\n        }\n      };\n    } else {\n      invokeRuleWithTry = function invokeRuleWithTryCst(\n        this: MixedInParser,\n        ...args: ARGS\n      ): R {\n        try {\n          this.ruleInvocationStateUpdate(shortName, ruleName, this.subruleIdx);\n          return impl.apply(this, args);\n        } catch (e) {\n          return this.invokeRuleCatch(e, resyncEnabled, recoveryValueFunc) as R;\n        } finally {\n          this.ruleFinallyStateUpdate();\n        }\n      };\n    }\n\n    const wrappedGrammarRule: ParserMethodInternal = Object.assign(\n      invokeRuleWithTry as any,\n      { ruleName, originalGrammarAction: impl },\n    );\n\n    return wrappedGrammarRule;\n  }\n\n  invokeRuleCatch(\n    this: MixedInParser,\n    e: Error,\n    resyncEnabledConfig: boolean,\n    recoveryValueFunc: Function,\n  ): unknown {\n    const isFirstInvokedRule = this.RULE_STACK.length === 1;\n    // note the reSync is always enabled for the first rule invocation, because we must always be able to\n    // reSync with EOF and just output some INVALID ParseTree\n    // during backtracking reSync recovery is disabled, otherwise we can't be certain the backtracking\n    // path is really the most valid one\n    const reSyncEnabled =\n      resyncEnabledConfig && !this.isBackTracking() && this.recoveryEnabled;\n\n    if (isRecognitionException(e)) {\n      const recogError: any = e;\n      if (reSyncEnabled) {\n        const reSyncTokType = this.findReSyncTokenType();\n        if (this.isInCurrentRuleReSyncSet(reSyncTokType)) {\n          recogError.resyncedTokens = this.reSyncTo(reSyncTokType);\n          if (this.outputCst) {\n            const partialCstResult: any =\n              this.CST_STACK[this.CST_STACK.length - 1];\n            partialCstResult.recoveredNode = true;\n            return partialCstResult;\n          } else {\n            return recoveryValueFunc(e);\n          }\n        } else {\n          if (this.outputCst) {\n            const partialCstResult: any =\n              this.CST_STACK[this.CST_STACK.length - 1];\n            partialCstResult.recoveredNode = true;\n            recogError.partialCstResult = partialCstResult;\n          }\n          // to be handled Further up the call stack\n          throw recogError;\n        }\n      } else if (isFirstInvokedRule) {\n        // otherwise a Redundant input error will be created as well and we cannot guarantee that this is indeed the case\n        this.moveToTerminatedState();\n        // the parser should never throw one of its own errors outside its flow.\n        // even if error recovery is disabled\n        return recoveryValueFunc(e);\n      } else {\n        // to be recovered Further up the call stack\n        throw recogError;\n      }\n    } else {\n      // some other Error type which we don't know how to handle (for example a built in JavaScript Error)\n      throw e;\n    }\n  }\n\n  // Implementation of parsing DSL\n  optionInternal(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n  ): OUT | undefined {\n    const key = this.getKeyForAutomaticLookahead(OPTION_IDX, occurrence);\n    return this.optionInternalLogic(actionORMethodDef, occurrence, key);\n  }\n\n  optionInternalLogic(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n    key: number,\n  ): OUT | undefined {\n    let lookAheadFunc = this.getLaFuncFromCache(key);\n    let action: GrammarAction;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookAheadFunc;\n        lookAheadFunc = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    if (lookAheadFunc.call(this) === true) {\n      return action.call(this);\n    }\n    return undefined;\n  }\n\n  atLeastOneInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      AT_LEAST_ONE_IDX,\n      prodOccurrence,\n    );\n    return this.atLeastOneInternalLogic(\n      prodOccurrence,\n      actionORMethodDef,\n      laKey,\n    );\n  }\n\n  atLeastOneInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n    key: number,\n  ): void {\n    let lookAheadFunc = this.getLaFuncFromCache(key);\n    let action;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookAheadFunc;\n        lookAheadFunc = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    if ((lookAheadFunc).call(this) === true) {\n      let notStuck = this.doSingleRepetition(action);\n      while (\n        (lookAheadFunc).call(this) === true &&\n        notStuck === true\n      ) {\n        notStuck = this.doSingleRepetition(action);\n      }\n    } else {\n      throw this.raiseEarlyExitException(\n        prodOccurrence,\n        PROD_TYPE.REPETITION_MANDATORY,\n        (>actionORMethodDef).ERR_MSG,\n      );\n    }\n\n    // note that while it may seem that this can cause an error because by using a recursive call to\n    // AT_LEAST_ONE we change the grammar to AT_LEAST_TWO, AT_LEAST_THREE ... , the possible recursive call\n    // from the tryInRepetitionRecovery(...) will only happen IFF there really are TWO/THREE/.... items.\n\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    this.attemptInRepetitionRecovery(\n      this.atLeastOneInternal,\n      [prodOccurrence, actionORMethodDef],\n      lookAheadFunc,\n      AT_LEAST_ONE_IDX,\n      prodOccurrence,\n      NextTerminalAfterAtLeastOneWalker,\n    );\n  }\n\n  atLeastOneSepFirstInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      AT_LEAST_ONE_SEP_IDX,\n      prodOccurrence,\n    );\n    this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, laKey);\n  }\n\n  atLeastOneSepFirstInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: AtLeastOneSepMethodOpts,\n    key: number,\n  ): void {\n    const action = options.DEF;\n    const separator = options.SEP;\n\n    const firstIterationLookaheadFunc = this.getLaFuncFromCache(key);\n\n    // 1st iteration\n    if (firstIterationLookaheadFunc.call(this) === true) {\n      (>action).call(this);\n\n      //  TODO: Optimization can move this function construction into \"attemptInRepetitionRecovery\"\n      //  because it is only needed in error recovery scenarios.\n      const separatorLookAheadFunc = () => {\n        return this.tokenMatcher(this.LA(1), separator);\n      };\n\n      // 2nd..nth iterations\n      while (this.tokenMatcher(this.LA(1), separator) === true) {\n        // note that this CONSUME will never enter recovery because\n        // the separatorLookAheadFunc checks that the separator really does exist.\n        this.CONSUME(separator);\n        // No need for checking infinite loop here due to consuming the separator.\n        (>action).call(this);\n      }\n\n      // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n      this.attemptInRepetitionRecovery(\n        this.repetitionSepSecondInternal,\n        [\n          prodOccurrence,\n          separator,\n          separatorLookAheadFunc,\n          action,\n          NextTerminalAfterAtLeastOneSepWalker,\n        ],\n        separatorLookAheadFunc,\n        AT_LEAST_ONE_SEP_IDX,\n        prodOccurrence,\n        NextTerminalAfterAtLeastOneSepWalker,\n      );\n    } else {\n      throw this.raiseEarlyExitException(\n        prodOccurrence,\n        PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR,\n        options.ERR_MSG,\n      );\n    }\n  }\n\n  manyInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(MANY_IDX, prodOccurrence);\n    return this.manyInternalLogic(prodOccurrence, actionORMethodDef, laKey);\n  }\n\n  manyInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    key: number,\n  ) {\n    let lookaheadFunction = this.getLaFuncFromCache(key);\n    let action;\n    if (typeof actionORMethodDef !== \"function\") {\n      action = actionORMethodDef.DEF;\n      const predicate = actionORMethodDef.GATE;\n      // predicate present\n      if (predicate !== undefined) {\n        const orgLookaheadFunction = lookaheadFunction;\n        lookaheadFunction = () => {\n          return predicate.call(this) && orgLookaheadFunction.call(this);\n        };\n      }\n    } else {\n      action = actionORMethodDef;\n    }\n\n    let notStuck = true;\n    while (lookaheadFunction.call(this) === true && notStuck === true) {\n      notStuck = this.doSingleRepetition(action);\n    }\n\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    this.attemptInRepetitionRecovery(\n      this.manyInternal,\n      [prodOccurrence, actionORMethodDef],\n      lookaheadFunction,\n      MANY_IDX,\n      prodOccurrence,\n      NextTerminalAfterManyWalker,\n      // The notStuck parameter is only relevant when \"attemptInRepetitionRecovery\"\n      // is invoked from manyInternal, in the MANY_SEP case and AT_LEAST_ONE[_SEP]\n      // An infinite loop cannot occur as:\n      // - Either the lookahead is guaranteed to consume something (Single Token Separator)\n      // - AT_LEAST_ONE by definition is guaranteed to consume something (or error out).\n      notStuck,\n    );\n  }\n\n  manySepFirstInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: ManySepMethodOpts,\n  ): void {\n    const laKey = this.getKeyForAutomaticLookahead(\n      MANY_SEP_IDX,\n      prodOccurrence,\n    );\n    this.manySepFirstInternalLogic(prodOccurrence, options, laKey);\n  }\n\n  manySepFirstInternalLogic(\n    this: MixedInParser,\n    prodOccurrence: number,\n    options: ManySepMethodOpts,\n    key: number,\n  ): void {\n    const action = options.DEF;\n    const separator = options.SEP;\n    const firstIterationLaFunc = this.getLaFuncFromCache(key);\n\n    // 1st iteration\n    if (firstIterationLaFunc.call(this) === true) {\n      action.call(this);\n\n      const separatorLookAheadFunc = () => {\n        return this.tokenMatcher(this.LA(1), separator);\n      };\n      // 2nd..nth iterations\n      while (this.tokenMatcher(this.LA(1), separator) === true) {\n        // note that this CONSUME will never enter recovery because\n        // the separatorLookAheadFunc checks that the separator really does exist.\n        this.CONSUME(separator);\n        // No need for checking infinite loop here due to consuming the separator.\n        action.call(this);\n      }\n\n      // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n      this.attemptInRepetitionRecovery(\n        this.repetitionSepSecondInternal,\n        [\n          prodOccurrence,\n          separator,\n          separatorLookAheadFunc,\n          action,\n          NextTerminalAfterManySepWalker,\n        ],\n        separatorLookAheadFunc,\n        MANY_SEP_IDX,\n        prodOccurrence,\n        NextTerminalAfterManySepWalker,\n      );\n    }\n  }\n\n  repetitionSepSecondInternal(\n    this: MixedInParser,\n    prodOccurrence: number,\n    separator: TokenType,\n    separatorLookAheadFunc: () => boolean,\n    action: GrammarAction,\n    nextTerminalAfterWalker: typeof AbstractNextTerminalAfterProductionWalker,\n  ): void {\n    while (separatorLookAheadFunc()) {\n      // note that this CONSUME will never enter recovery because\n      // the separatorLookAheadFunc checks that the separator really does exist.\n      this.CONSUME(separator);\n      action.call(this);\n    }\n\n    // we can only arrive to this function after an error\n    // has occurred (hence the name 'second') so the following\n    // IF will always be entered, its possible to remove it...\n    // however it is kept to avoid confusion and be consistent.\n    // Performance optimization: \"attemptInRepetitionRecovery\" will be defined as NOOP unless recovery is enabled\n    /* istanbul ignore else */\n    this.attemptInRepetitionRecovery(\n      this.repetitionSepSecondInternal,\n      [\n        prodOccurrence,\n        separator,\n        separatorLookAheadFunc,\n        action,\n        nextTerminalAfterWalker,\n      ],\n      separatorLookAheadFunc,\n      AT_LEAST_ONE_SEP_IDX,\n      prodOccurrence,\n      nextTerminalAfterWalker,\n    );\n  }\n\n  doSingleRepetition(this: MixedInParser, action: Function): any {\n    const beforeIteration = this.getLexerPosition();\n    action.call(this);\n    const afterIteration = this.getLexerPosition();\n\n    // This boolean will indicate if this repetition progressed\n    // or if we are \"stuck\" (potential infinite loop in the repetition).\n    return afterIteration > beforeIteration;\n  }\n\n  orInternal(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n    occurrence: number,\n  ): T {\n    const laKey = this.getKeyForAutomaticLookahead(OR_IDX, occurrence);\n    const alts = isArray(altsOrOpts) ? altsOrOpts : altsOrOpts.DEF;\n\n    const laFunc = this.getLaFuncFromCache(laKey);\n    const altIdxToTake = laFunc.call(this, alts);\n    if (altIdxToTake !== undefined) {\n      const chosenAlternative: any = alts[altIdxToTake];\n      return chosenAlternative.ALT.call(this);\n    }\n    this.raiseNoAltException(\n      occurrence,\n      (altsOrOpts as OrMethodOpts).ERR_MSG,\n    );\n  }\n\n  ruleFinallyStateUpdate(this: MixedInParser): void {\n    this.RULE_STACK.pop();\n    this.RULE_OCCURRENCE_STACK.pop();\n\n    // NOOP when cst is disabled\n    this.cstFinallyStateUpdate();\n\n    if (this.RULE_STACK.length === 0 && this.isAtEndOfInput() === false) {\n      const firstRedundantTok = this.LA(1);\n      const errMsg = this.errorMessageProvider.buildNotAllInputParsedMessage({\n        firstRedundant: firstRedundantTok,\n        ruleName: this.getCurrRuleFullName(),\n      });\n      this.SAVE_ERROR(\n        new NotAllInputParsedException(errMsg, firstRedundantTok),\n      );\n    }\n  }\n\n  subruleInternal(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    idx: number,\n    options?: SubruleMethodOpts,\n  ): R {\n    let ruleResult;\n    try {\n      const args = options !== undefined ? options.ARGS : undefined;\n      this.subruleIdx = idx;\n      ruleResult = ruleToCall.apply(this, args);\n      this.cstPostNonTerminal(\n        ruleResult,\n        options !== undefined && options.LABEL !== undefined\n          ? options.LABEL\n          : ruleToCall.ruleName,\n      );\n      return ruleResult;\n    } catch (e) {\n      throw this.subruleInternalError(e, options, ruleToCall.ruleName);\n    }\n  }\n\n  subruleInternalError(\n    this: MixedInParser,\n    e: any,\n    options: SubruleMethodOpts | undefined,\n    ruleName: string,\n  ): void {\n    if (isRecognitionException(e) && e.partialCstResult !== undefined) {\n      this.cstPostNonTerminal(\n        e.partialCstResult,\n        options !== undefined && options.LABEL !== undefined\n          ? options.LABEL\n          : ruleName,\n      );\n\n      delete e.partialCstResult;\n    }\n    throw e;\n  }\n\n  consumeInternal(\n    this: MixedInParser,\n    tokType: TokenType,\n    idx: number,\n    options: ConsumeMethodOpts | undefined,\n  ): IToken {\n    let consumedToken!: IToken;\n    try {\n      const nextToken = this.LA(1);\n      if (this.tokenMatcher(nextToken, tokType) === true) {\n        this.consumeToken();\n        consumedToken = nextToken;\n      } else {\n        this.consumeInternalError(tokType, nextToken, options);\n      }\n    } catch (eFromConsumption) {\n      consumedToken = this.consumeInternalRecovery(\n        tokType,\n        idx,\n        eFromConsumption,\n      );\n    }\n\n    this.cstPostTerminal(\n      options !== undefined && options.LABEL !== undefined\n        ? options.LABEL\n        : tokType.name,\n      consumedToken,\n    );\n    return consumedToken;\n  }\n\n  consumeInternalError(\n    this: MixedInParser,\n    tokType: TokenType,\n    nextToken: IToken,\n    options: ConsumeMethodOpts | undefined,\n  ): void {\n    let msg;\n    const previousToken = this.LA(0);\n    if (options !== undefined && options.ERR_MSG) {\n      msg = options.ERR_MSG;\n    } else {\n      msg = this.errorMessageProvider.buildMismatchTokenMessage({\n        expected: tokType,\n        actual: nextToken,\n        previous: previousToken,\n        ruleName: this.getCurrRuleFullName(),\n      });\n    }\n    throw this.SAVE_ERROR(\n      new MismatchedTokenException(msg, nextToken, previousToken),\n    );\n  }\n\n  consumeInternalRecovery(\n    this: MixedInParser,\n    tokType: TokenType,\n    idx: number,\n    eFromConsumption: Error,\n  ): IToken {\n    // no recovery allowed during backtracking, otherwise backtracking may recover invalid syntax and accept it\n    // but the original syntax could have been parsed successfully without any backtracking + recovery\n    if (\n      this.recoveryEnabled &&\n      // TODO: more robust checking of the exception type. Perhaps Typescript extending expressions?\n      eFromConsumption.name === \"MismatchedTokenException\" &&\n      !this.isBackTracking()\n    ) {\n      const follows = this.getFollowsForInRuleRecovery(tokType, idx);\n      try {\n        return this.tryInRuleRecovery(tokType, follows);\n      } catch (eFromInRuleRecovery) {\n        if (eFromInRuleRecovery.name === IN_RULE_RECOVERY_EXCEPTION) {\n          // failed in RuleRecovery.\n          // throw the original error in order to trigger reSync error recovery\n          throw eFromConsumption;\n        } else {\n          throw eFromInRuleRecovery;\n        }\n      }\n    } else {\n      throw eFromConsumption;\n    }\n  }\n\n  saveRecogState(this: MixedInParser): IParserState {\n    // errors is a getter which will clone the errors array\n    const savedErrors = this.errors;\n    const savedRuleStack = clone(this.RULE_STACK);\n    return {\n      errors: savedErrors,\n      lexerState: this.exportLexerState(),\n      RULE_STACK: savedRuleStack,\n      CST_STACK: this.CST_STACK,\n    };\n  }\n\n  reloadRecogState(this: MixedInParser, newState: IParserState) {\n    this.errors = newState.errors;\n    this.importLexerState(newState.lexerState);\n    this.RULE_STACK = newState.RULE_STACK;\n  }\n\n  ruleInvocationStateUpdate(\n    this: MixedInParser,\n    shortName: number,\n    fullName: string,\n    idxInCallingRule: number,\n  ): void {\n    this.RULE_OCCURRENCE_STACK.push(idxInCallingRule);\n    this.RULE_STACK.push(shortName);\n    // NOOP when cst is disabled\n    this.cstInvocationStateUpdate(fullName);\n  }\n\n  isBackTracking(this: MixedInParser): boolean {\n    return this.isBackTrackingStack.length !== 0;\n  }\n\n  getCurrRuleFullName(this: MixedInParser): string {\n    const shortName = this.getLastExplicitRuleShortName();\n    return this.shortRuleNameToFull[shortName];\n  }\n\n  shortRuleNameToFullName(this: MixedInParser, shortName: number) {\n    return this.shortRuleNameToFull[shortName];\n  }\n\n  public isAtEndOfInput(this: MixedInParser): boolean {\n    return this.tokenMatcher(this.LA(1), EOF);\n  }\n\n  public reset(this: MixedInParser): void {\n    this.resetLexerState();\n    this.subruleIdx = 0;\n    this.isBackTrackingStack = [];\n    this.errors = [];\n    this.RULE_STACK = [];\n    // TODO: extract a specific reset for TreeBuilder trait\n    this.CST_STACK = [];\n    this.RULE_OCCURRENCE_STACK = [];\n  }\n}\n", "import {\n  IParserConfig,\n  IParserErrorMessageProvider,\n  IRecognitionException,\n} from \"@chevrotain/types\";\nimport {\n  EarlyExitException,\n  isRecognitionException,\n  NoViableAltException,\n} from \"../../exceptions_public.js\";\nimport { clone, has } from \"lodash-es\";\nimport {\n  getLookaheadPathsForOptionalProd,\n  getLookaheadPathsForOr,\n  PROD_TYPE,\n} from \"../../grammar/lookahead.js\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * Trait responsible for runtime parsing errors.\n */\nexport class ErrorHandler {\n  _errors: IRecognitionException[];\n  errorMessageProvider: IParserErrorMessageProvider;\n\n  initErrorHandler(config: IParserConfig) {\n    this._errors = [];\n    this.errorMessageProvider = has(config, \"errorMessageProvider\")\n      ? (config.errorMessageProvider as IParserErrorMessageProvider) // assumes end user provides the correct config value/type\n      : DEFAULT_PARSER_CONFIG.errorMessageProvider;\n  }\n\n  SAVE_ERROR(\n    this: MixedInParser,\n    error: IRecognitionException,\n  ): IRecognitionException {\n    if (isRecognitionException(error)) {\n      error.context = {\n        ruleStack: this.getHumanReadableRuleStack(),\n        ruleOccurrenceStack: clone(this.RULE_OCCURRENCE_STACK),\n      };\n      this._errors.push(error);\n      return error;\n    } else {\n      throw Error(\n        \"Trying to save an Error which is not a RecognitionException\",\n      );\n    }\n  }\n\n  get errors(): IRecognitionException[] {\n    return clone(this._errors);\n  }\n\n  set errors(newErrors: IRecognitionException[]) {\n    this._errors = newErrors;\n  }\n\n  // TODO: consider caching the error message computed information\n  raiseEarlyExitException(\n    this: MixedInParser,\n    occurrence: number,\n    prodType: PROD_TYPE,\n    userDefinedErrMsg: string | undefined,\n  ): never {\n    const ruleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[ruleName];\n    const lookAheadPathsPerAlternative = getLookaheadPathsForOptionalProd(\n      occurrence,\n      ruleGrammar,\n      prodType,\n      this.maxLookahead,\n    );\n    const insideProdPaths = lookAheadPathsPerAlternative[0];\n    const actualTokens = [];\n    for (let i = 1; i <= this.maxLookahead; i++) {\n      actualTokens.push(this.LA(i));\n    }\n    const msg = this.errorMessageProvider.buildEarlyExitMessage({\n      expectedIterationPaths: insideProdPaths,\n      actual: actualTokens,\n      previous: this.LA(0),\n      customUserDescription: userDefinedErrMsg,\n      ruleName: ruleName,\n    });\n\n    throw this.SAVE_ERROR(new EarlyExitException(msg, this.LA(1), this.LA(0)));\n  }\n\n  // TODO: consider caching the error message computed information\n  raiseNoAltException(\n    this: MixedInParser,\n    occurrence: number,\n    errMsgTypes: string | undefined,\n  ): never {\n    const ruleName = this.getCurrRuleFullName();\n    const ruleGrammar = this.getGAstProductions()[ruleName];\n    // TODO: getLookaheadPathsForOr can be slow for large enough maxLookahead and certain grammars, consider caching ?\n    const lookAheadPathsPerAlternative = getLookaheadPathsForOr(\n      occurrence,\n      ruleGrammar,\n      this.maxLookahead,\n    );\n\n    const actualTokens = [];\n    for (let i = 1; i <= this.maxLookahead; i++) {\n      actualTokens.push(this.LA(i));\n    }\n    const previousToken = this.LA(0);\n\n    const errMsg = this.errorMessageProvider.buildNoViableAltMessage({\n      expectedPathsPerAlt: lookAheadPathsPerAlternative,\n      actual: actualTokens,\n      previous: previousToken,\n      customUserDescription: errMsgTypes,\n      ruleName: this.getCurrRuleFullName(),\n    });\n\n    throw this.SAVE_ERROR(\n      new NoViableAltException(errMsg, this.LA(1), previousToken),\n    );\n  }\n}\n", "import {\n  ISyntacticContentAssistPath,\n  IToken,\n  ITokenGrammarPath,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  NextAfterTokenWalker,\n  nextPossibleTokensAfter,\n} from \"../../grammar/interpreter.js\";\nimport { first, isUndefined } from \"lodash-es\";\nimport { MixedInParser } from \"./parser_traits.js\";\n\nexport class ContentAssist {\n  initContentAssist() {}\n\n  public computeContentAssist(\n    this: MixedInParser,\n    startRuleName: string,\n    precedingInput: IToken[],\n  ): ISyntacticContentAssistPath[] {\n    const startRuleGast = this.gastProductionsCache[startRuleName];\n\n    if (isUndefined(startRuleGast)) {\n      throw Error(`Rule ->${startRuleName}<- does not exist in this grammar.`);\n    }\n\n    return nextPossibleTokensAfter(\n      [startRuleGast],\n      precedingInput,\n      this.tokenMatcher,\n      this.maxLookahead,\n    );\n  }\n\n  // TODO: should this be a member method or a utility? it does not have any state or usage of 'this'...\n  // TODO: should this be more explicitly part of the public API?\n  public getNextPossibleTokenTypes(\n    this: MixedInParser,\n    grammarPath: ITokenGrammarPath,\n  ): TokenType[] {\n    const topRuleName = first(grammarPath.ruleStack)!;\n    const gastProductions = this.getGAstProductions();\n    const topProduction = gastProductions[topRuleName];\n    const nextPossibleTokenTypes = new NextAfterTokenWalker(\n      topProduction,\n      grammarPath,\n    ).startWalking();\n    return nextPossibleTokenTypes;\n  }\n}\n", "import {\n  AtLeastOneSepMethodOpts,\n  ConsumeMethodOpts,\n  CstNode,\n  DSLMethodOpts,\n  DSLMethodOptsWithErr,\n  GrammarAction,\n  IOrAlt,\n  IParserConfig,\n  IProduction,\n  IToken,\n  ManySepMethodOpts,\n  OrMethodOpts,\n  SubruleMethodOpts,\n  TokenType,\n} from \"@chevrotain/types\";\nimport {\n  forEach,\n  has,\n  isArray,\n  isFunction,\n  last as peek,\n  some,\n} from \"lodash-es\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport {\n  Alternation,\n  Alternative,\n  NonTerminal,\n  Option,\n  Repetition,\n  RepetitionMandatory,\n  RepetitionMandatoryWithSeparator,\n  RepetitionWithSeparator,\n  Rule,\n  Terminal,\n} from \"@chevrotain/gast\";\nimport { Lexer } from \"../../../scan/lexer_public.js\";\nimport {\n  augmentTokenTypes,\n  hasShortKeyProperty,\n} from \"../../../scan/tokens.js\";\nimport {\n  createToken,\n  createTokenInstance,\n} from \"../../../scan/tokens_public.js\";\nimport { END_OF_FILE } from \"../parser.js\";\nimport { BITS_FOR_OCCURRENCE_IDX } from \"../../grammar/keys.js\";\nimport { ParserMethodInternal } from \"../types.js\";\n\ntype ProdWithDef = IProduction & { definition?: IProduction[] };\nconst RECORDING_NULL_OBJECT = {\n  description: \"This Object indicates the Parser is during Recording Phase\",\n};\nObject.freeze(RECORDING_NULL_OBJECT);\n\nconst HANDLE_SEPARATOR = true;\nconst MAX_METHOD_IDX = Math.pow(2, BITS_FOR_OCCURRENCE_IDX) - 1;\n\nconst RFT = createToken({ name: \"RECORDING_PHASE_TOKEN\", pattern: Lexer.NA });\naugmentTokenTypes([RFT]);\nconst RECORDING_PHASE_TOKEN = createTokenInstance(\n  RFT,\n  \"This IToken indicates the Parser is in Recording Phase\\n\\t\" +\n    \"\" +\n    \"See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details\",\n  // Using \"-1\" instead of NaN (as in EOF) because an actual number is less likely to\n  // cause errors if the output of LA or CONSUME would be (incorrectly) used during the recording phase.\n  -1,\n  -1,\n  -1,\n  -1,\n  -1,\n  -1,\n);\nObject.freeze(RECORDING_PHASE_TOKEN);\n\nconst RECORDING_PHASE_CSTNODE: CstNode = {\n  name:\n    \"This CSTNode indicates the Parser is in Recording Phase\\n\\t\" +\n    \"See: https://chevrotain.io/docs/guide/internals.html#grammar-recording for details\",\n  children: {},\n};\n\n/**\n * This trait handles the creation of the GAST structure for Chevrotain Grammars\n */\nexport class GastRecorder {\n  recordingProdStack: ProdWithDef[];\n  RECORDING_PHASE: boolean;\n\n  initGastRecorder(this: MixedInParser, config: IParserConfig): void {\n    this.recordingProdStack = [];\n    this.RECORDING_PHASE = false;\n  }\n\n  enableRecording(this: MixedInParser): void {\n    this.RECORDING_PHASE = true;\n\n    this.TRACE_INIT(\"Enable Recording\", () => {\n      /**\n       * Warning Dark Voodoo Magic upcoming!\n       * We are \"replacing\" the public parsing DSL methods API\n       * With **new** alternative implementations on the Parser **instance**\n       *\n       * So far this is the only way I've found to avoid performance regressions during parsing time.\n       * - Approx 30% performance regression was measured on Chrome 75 Canary when attempting to replace the \"internal\"\n       *   implementations directly instead.\n       */\n      for (let i = 0; i < 10; i++) {\n        const idx = i > 0 ? i : \"\";\n        this[`CONSUME${idx}` as \"CONSUME\"] = function (arg1, arg2) {\n          return this.consumeInternalRecord(arg1, i, arg2);\n        };\n        this[`SUBRULE${idx}` as \"SUBRULE\"] = function (arg1, arg2) {\n          return this.subruleInternalRecord(arg1, i, arg2) as any;\n        };\n        this[`OPTION${idx}` as \"OPTION\"] = function (arg1) {\n          return this.optionInternalRecord(arg1, i);\n        };\n        this[`OR${idx}` as \"OR\"] = function (arg1) {\n          return this.orInternalRecord(arg1, i);\n        };\n        this[`MANY${idx}` as \"MANY\"] = function (arg1) {\n          this.manyInternalRecord(i, arg1);\n        };\n        this[`MANY_SEP${idx}` as \"MANY_SEP\"] = function (arg1) {\n          this.manySepFirstInternalRecord(i, arg1);\n        };\n        this[`AT_LEAST_ONE${idx}` as \"AT_LEAST_ONE\"] = function (arg1) {\n          this.atLeastOneInternalRecord(i, arg1);\n        };\n        this[`AT_LEAST_ONE_SEP${idx}` as \"AT_LEAST_ONE_SEP\"] = function (arg1) {\n          this.atLeastOneSepFirstInternalRecord(i, arg1);\n        };\n      }\n\n      // DSL methods with the idx(suffix) as an argument\n      this[`consume`] = function (idx, arg1, arg2) {\n        return this.consumeInternalRecord(arg1, idx, arg2);\n      };\n      this[`subrule`] = function (idx, arg1, arg2) {\n        return this.subruleInternalRecord(arg1, idx, arg2) as any;\n      };\n      this[`option`] = function (idx, arg1) {\n        return this.optionInternalRecord(arg1, idx);\n      };\n      this[`or`] = function (idx, arg1) {\n        return this.orInternalRecord(arg1, idx);\n      };\n      this[`many`] = function (idx, arg1) {\n        this.manyInternalRecord(idx, arg1);\n      };\n      this[`atLeastOne`] = function (idx, arg1) {\n        this.atLeastOneInternalRecord(idx, arg1);\n      };\n\n      this.ACTION = this.ACTION_RECORD;\n      this.BACKTRACK = this.BACKTRACK_RECORD;\n      this.LA = this.LA_RECORD;\n    });\n  }\n\n  disableRecording(this: MixedInParser) {\n    this.RECORDING_PHASE = false;\n    // By deleting these **instance** properties, any future invocation\n    // will be deferred to the original methods on the **prototype** object\n    // This seems to get rid of any incorrect optimizations that V8 may\n    // do during the recording phase.\n    this.TRACE_INIT(\"Deleting Recording methods\", () => {\n      const that: any = this;\n\n      for (let i = 0; i < 10; i++) {\n        const idx = i > 0 ? i : \"\";\n        delete that[`CONSUME${idx}`];\n        delete that[`SUBRULE${idx}`];\n        delete that[`OPTION${idx}`];\n        delete that[`OR${idx}`];\n        delete that[`MANY${idx}`];\n        delete that[`MANY_SEP${idx}`];\n        delete that[`AT_LEAST_ONE${idx}`];\n        delete that[`AT_LEAST_ONE_SEP${idx}`];\n      }\n\n      delete that[`consume`];\n      delete that[`subrule`];\n      delete that[`option`];\n      delete that[`or`];\n      delete that[`many`];\n      delete that[`atLeastOne`];\n\n      delete that.ACTION;\n      delete that.BACKTRACK;\n      delete that.LA;\n    });\n  }\n\n  //   Parser methods are called inside an ACTION?\n  //   Maybe try/catch/finally on ACTIONS while disabling the recorders state changes?\n  // @ts-expect-error -- noop place holder\n  ACTION_RECORD(this: MixedInParser, impl: () => T): T {\n    // NO-OP during recording\n  }\n\n  // Executing backtracking logic will break our recording logic assumptions\n  BACKTRACK_RECORD(\n    grammarRule: (...args: any[]) => T,\n    args?: any[],\n  ): () => boolean {\n    return () => true;\n  }\n\n  // LA is part of the official API and may be used for custom lookahead logic\n  // by end users who may forget to wrap it in ACTION or inside a GATE\n  LA_RECORD(howMuch: number): IToken {\n    // We cannot use the RECORD_PHASE_TOKEN here because someone may depend\n    // On LA return EOF at the end of the input so an infinite loop may occur.\n    return END_OF_FILE;\n  }\n\n  topLevelRuleRecord(name: string, def: Function): Rule {\n    try {\n      const newTopLevelRule = new Rule({ definition: [], name: name });\n      newTopLevelRule.name = name;\n      this.recordingProdStack.push(newTopLevelRule);\n      def.call(this);\n      this.recordingProdStack.pop();\n      return newTopLevelRule;\n    } catch (originalError) {\n      if (originalError.KNOWN_RECORDER_ERROR !== true) {\n        try {\n          originalError.message =\n            originalError.message +\n            '\\n\\t This error was thrown during the \"grammar recording phase\" For more info see:\\n\\t' +\n            \"https://chevrotain.io/docs/guide/internals.html#grammar-recording\";\n        } catch (mutabilityError) {\n          // We may not be able to modify the original error object\n          throw originalError;\n        }\n      }\n      throw originalError;\n    }\n  }\n\n  // Implementation of parsing DSL\n  optionInternalRecord(\n    this: MixedInParser,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n    occurrence: number,\n  ): OUT {\n    return recordProd.call(this, Option, actionORMethodDef, occurrence);\n  }\n\n  atLeastOneInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOptsWithErr,\n  ): void {\n    recordProd.call(this, RepetitionMandatory, actionORMethodDef, occurrence);\n  }\n\n  atLeastOneSepFirstInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    options: AtLeastOneSepMethodOpts,\n  ): void {\n    recordProd.call(\n      this,\n      RepetitionMandatoryWithSeparator,\n      options,\n      occurrence,\n      HANDLE_SEPARATOR,\n    );\n  }\n\n  manyInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    actionORMethodDef: GrammarAction | DSLMethodOpts,\n  ): void {\n    recordProd.call(this, Repetition, actionORMethodDef, occurrence);\n  }\n\n  manySepFirstInternalRecord(\n    this: MixedInParser,\n    occurrence: number,\n    options: ManySepMethodOpts,\n  ): void {\n    recordProd.call(\n      this,\n      RepetitionWithSeparator,\n      options,\n      occurrence,\n      HANDLE_SEPARATOR,\n    );\n  }\n\n  orInternalRecord(\n    this: MixedInParser,\n    altsOrOpts: IOrAlt[] | OrMethodOpts,\n    occurrence: number,\n  ): T {\n    return recordOrProd.call(this, altsOrOpts, occurrence);\n  }\n\n  subruleInternalRecord(\n    this: MixedInParser,\n    ruleToCall: ParserMethodInternal,\n    occurrence: number,\n    options?: SubruleMethodOpts,\n  ): R | CstNode {\n    assertMethodIdxIsValid(occurrence);\n    if (!ruleToCall || has(ruleToCall, \"ruleName\") === false) {\n      const error: any = new Error(\n        ` argument is invalid` +\n          ` expecting a Parser method reference but got: <${JSON.stringify(\n            ruleToCall,\n          )}>` +\n          `\\n inside top level rule: <${\n            (this.recordingProdStack[0]).name\n          }>`,\n      );\n      error.KNOWN_RECORDER_ERROR = true;\n      throw error;\n    }\n\n    const prevProd: any = peek(this.recordingProdStack);\n    const ruleName = ruleToCall.ruleName;\n    const newNoneTerminal = new NonTerminal({\n      idx: occurrence,\n      nonTerminalName: ruleName,\n      label: options?.LABEL,\n      // The resolving of the `referencedRule` property will be done once all the Rule's GASTs have been created\n      referencedRule: undefined,\n    });\n    prevProd.definition.push(newNoneTerminal);\n\n    return this.outputCst\n      ? RECORDING_PHASE_CSTNODE\n      : RECORDING_NULL_OBJECT;\n  }\n\n  consumeInternalRecord(\n    this: MixedInParser,\n    tokType: TokenType,\n    occurrence: number,\n    options?: ConsumeMethodOpts,\n  ): IToken {\n    assertMethodIdxIsValid(occurrence);\n    if (!hasShortKeyProperty(tokType)) {\n      const error: any = new Error(\n        ` argument is invalid` +\n          ` expecting a TokenType reference but got: <${JSON.stringify(\n            tokType,\n          )}>` +\n          `\\n inside top level rule: <${\n            (this.recordingProdStack[0]).name\n          }>`,\n      );\n      error.KNOWN_RECORDER_ERROR = true;\n      throw error;\n    }\n    const prevProd: any = peek(this.recordingProdStack);\n    const newNoneTerminal = new Terminal({\n      idx: occurrence,\n      terminalType: tokType,\n      label: options?.LABEL,\n    });\n    prevProd.definition.push(newNoneTerminal);\n\n    return RECORDING_PHASE_TOKEN;\n  }\n}\n\nfunction recordProd(\n  prodConstructor: any,\n  mainProdArg: any,\n  occurrence: number,\n  handleSep: boolean = false,\n): any {\n  assertMethodIdxIsValid(occurrence);\n  const prevProd: any = peek(this.recordingProdStack);\n  const grammarAction = isFunction(mainProdArg) ? mainProdArg : mainProdArg.DEF;\n\n  const newProd = new prodConstructor({ definition: [], idx: occurrence });\n  if (handleSep) {\n    newProd.separator = mainProdArg.SEP;\n  }\n  if (has(mainProdArg, \"MAX_LOOKAHEAD\")) {\n    newProd.maxLookahead = mainProdArg.MAX_LOOKAHEAD;\n  }\n\n  this.recordingProdStack.push(newProd);\n  grammarAction.call(this);\n  prevProd.definition.push(newProd);\n  this.recordingProdStack.pop();\n\n  return RECORDING_NULL_OBJECT;\n}\n\nfunction recordOrProd(mainProdArg: any, occurrence: number): any {\n  assertMethodIdxIsValid(occurrence);\n  const prevProd: any = peek(this.recordingProdStack);\n  // Only an array of alternatives\n  const hasOptions = isArray(mainProdArg) === false;\n  const alts: IOrAlt[] =\n    hasOptions === false ? mainProdArg : mainProdArg.DEF;\n\n  const newOrProd = new Alternation({\n    definition: [],\n    idx: occurrence,\n    ignoreAmbiguities: hasOptions && mainProdArg.IGNORE_AMBIGUITIES === true,\n  });\n  if (has(mainProdArg, \"MAX_LOOKAHEAD\")) {\n    newOrProd.maxLookahead = mainProdArg.MAX_LOOKAHEAD;\n  }\n\n  const hasPredicates = some(alts, (currAlt: any) => isFunction(currAlt.GATE));\n  newOrProd.hasPredicates = hasPredicates;\n\n  prevProd.definition.push(newOrProd);\n\n  forEach(alts, (currAlt) => {\n    const currAltFlat = new Alternative({ definition: [] });\n    newOrProd.definition.push(currAltFlat);\n    if (has(currAlt, \"IGNORE_AMBIGUITIES\")) {\n      currAltFlat.ignoreAmbiguities = currAlt.IGNORE_AMBIGUITIES as boolean; // assumes end user provides the correct config value/type\n    }\n    // **implicit** ignoreAmbiguities due to usage of gate\n    else if (has(currAlt, \"GATE\")) {\n      currAltFlat.ignoreAmbiguities = true;\n    }\n    this.recordingProdStack.push(currAltFlat);\n    currAlt.ALT.call(this);\n    this.recordingProdStack.pop();\n  });\n  return RECORDING_NULL_OBJECT;\n}\n\nfunction getIdxSuffix(idx: number): string {\n  return idx === 0 ? \"\" : `${idx}`;\n}\n\nfunction assertMethodIdxIsValid(idx: number): void {\n  if (idx < 0 || idx > MAX_METHOD_IDX) {\n    const error: any = new Error(\n      // The stack trace will contain all the needed details\n      `Invalid DSL Method idx value: <${idx}>\\n\\t` +\n        `Idx value must be a none negative value smaller than ${\n          MAX_METHOD_IDX + 1\n        }`,\n    );\n    error.KNOWN_RECORDER_ERROR = true;\n    throw error;\n  }\n}\n", "import { IParserConfig } from \"@chevrotain/types\";\nimport { has } from \"lodash-es\";\nimport { timer } from \"@chevrotain/utils\";\nimport { MixedInParser } from \"./parser_traits.js\";\nimport { DEFAULT_PARSER_CONFIG } from \"../parser.js\";\n\n/**\n * Trait responsible for runtime parsing errors.\n */\nexport class PerformanceTracer {\n  traceInitPerf: boolean | number;\n  traceInitMaxIdent: number;\n  traceInitIndent: number;\n\n  initPerformanceTracer(config: IParserConfig) {\n    if (has(config, \"traceInitPerf\")) {\n      const userTraceInitPerf = config.traceInitPerf;\n      const traceIsNumber = typeof userTraceInitPerf === \"number\";\n      this.traceInitMaxIdent = traceIsNumber\n        ? userTraceInitPerf\n        : Infinity;\n      this.traceInitPerf = traceIsNumber\n        ? userTraceInitPerf > 0\n        : (userTraceInitPerf as boolean); // assumes end user provides the correct config value/type\n    } else {\n      this.traceInitMaxIdent = 0;\n      this.traceInitPerf = DEFAULT_PARSER_CONFIG.traceInitPerf;\n    }\n\n    this.traceInitIndent = -1;\n  }\n\n  TRACE_INIT(this: MixedInParser, phaseDesc: string, phaseImpl: () => T): T {\n    // No need to optimize this using NOOP pattern because\n    // It is not called in a hot spot...\n    if (this.traceInitPerf === true) {\n      this.traceInitIndent++;\n      const indent = new Array(this.traceInitIndent + 1).join(\"\\t\");\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        console.log(`${indent}--> <${phaseDesc}>`);\n      }\n      const { time, value } = timer(phaseImpl);\n      /* istanbul ignore next - Difficult to reproduce specific performance behavior (>10ms) in tests */\n      const traceMethod = time > 10 ? console.warn : console.log;\n      if (this.traceInitIndent < this.traceInitMaxIdent) {\n        traceMethod(`${indent}<-- <${phaseDesc}> time: ${time}ms`);\n      }\n      this.traceInitIndent--;\n      return value;\n    } else {\n      return phaseImpl();\n    }\n  }\n}\n", "export function applyMixins(derivedCtor: any, baseCtors: any[]) {\n  baseCtors.forEach((baseCtor) => {\n    const baseProto = baseCtor.prototype;\n    Object.getOwnPropertyNames(baseProto).forEach((propName) => {\n      if (propName === \"constructor\") {\n        return;\n      }\n\n      const basePropDescriptor = Object.getOwnPropertyDescriptor(\n        baseProto,\n        propName,\n      );\n      // Handle Accessors\n      if (\n        basePropDescriptor &&\n        (basePropDescriptor.get || basePropDescriptor.set)\n      ) {\n        Object.defineProperty(\n          derivedCtor.prototype,\n          propName,\n          basePropDescriptor,\n        );\n      } else {\n        derivedCtor.prototype[propName] = baseCtor.prototype[propName];\n      }\n    });\n  });\n}\n", "import { clone, forEach, has, isEmpty, map, values } from \"lodash-es\";\nimport { toFastProperties } from \"@chevrotain/utils\";\nimport { computeAllProdsFollows } from \"../grammar/follow.js\";\nimport { createTokenInstance, EOF } from \"../../scan/tokens_public.js\";\nimport {\n  defaultGrammarValidatorErrorProvider,\n  defaultParserErrorProvider,\n} from \"../errors_public.js\";\nimport {\n  resolveGrammar,\n  validateGrammar,\n} from \"../grammar/gast/gast_resolver_public.js\";\nimport {\n  CstNode,\n  IParserConfig,\n  IRecognitionException,\n  IRuleConfig,\n  IToken,\n  TokenType,\n  TokenVocabulary,\n} from \"@chevrotain/types\";\nimport { Recoverable } from \"./traits/recoverable.js\";\nimport { LooksAhead } from \"./traits/looksahead.js\";\nimport { TreeBuilder } from \"./traits/tree_builder.js\";\nimport { LexerAdapter } from \"./traits/lexer_adapter.js\";\nimport { RecognizerApi } from \"./traits/recognizer_api.js\";\nimport { RecognizerEngine } from \"./traits/recognizer_engine.js\";\n\nimport { ErrorHandler } from \"./traits/error_handler.js\";\nimport { MixedInParser } from \"./traits/parser_traits.js\";\nimport { ContentAssist } from \"./traits/context_assist.js\";\nimport { GastRecorder } from \"./traits/gast_recorder.js\";\nimport { PerformanceTracer } from \"./traits/perf_tracer.js\";\nimport { applyMixins } from \"./utils/apply_mixins.js\";\nimport { IParserDefinitionError } from \"../grammar/types.js\";\nimport { Rule } from \"@chevrotain/gast\";\nimport { IParserConfigInternal, ParserMethodInternal } from \"./types.js\";\nimport { validateLookahead } from \"../grammar/checks.js\";\n\nexport const END_OF_FILE = createTokenInstance(\n  EOF,\n  \"\",\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n  NaN,\n);\nObject.freeze(END_OF_FILE);\n\nexport type TokenMatcher = (token: IToken, tokType: TokenType) => boolean;\n\nexport const DEFAULT_PARSER_CONFIG: Required<\n  Omit\n> = Object.freeze({\n  recoveryEnabled: false,\n  maxLookahead: 3,\n  dynamicTokensEnabled: false,\n  outputCst: true,\n  errorMessageProvider: defaultParserErrorProvider,\n  nodeLocationTracking: \"none\",\n  traceInitPerf: false,\n  skipValidations: false,\n});\n\nexport const DEFAULT_RULE_CONFIG: Required> = Object.freeze({\n  recoveryValueFunc: () => undefined,\n  resyncEnabled: true,\n});\n\nexport enum ParserDefinitionErrorType {\n  INVALID_RULE_NAME = 0,\n  DUPLICATE_RULE_NAME = 1,\n  INVALID_RULE_OVERRIDE = 2,\n  DUPLICATE_PRODUCTIONS = 3,\n  UNRESOLVED_SUBRULE_REF = 4,\n  LEFT_RECURSION = 5,\n  NONE_LAST_EMPTY_ALT = 6,\n  AMBIGUOUS_ALTS = 7,\n  CONFLICT_TOKENS_RULES_NAMESPACE = 8,\n  INVALID_TOKEN_NAME = 9,\n  NO_NON_EMPTY_LOOKAHEAD = 10,\n  AMBIGUOUS_PREFIX_ALTS = 11,\n  TOO_MANY_ALTS = 12,\n  CUSTOM_LOOKAHEAD_VALIDATION = 13,\n}\n\nexport interface IParserDuplicatesDefinitionError\n  extends IParserDefinitionError {\n  dslName: string;\n  occurrence: number;\n  parameter?: string;\n}\n\nexport interface IParserEmptyAlternativeDefinitionError\n  extends IParserDefinitionError {\n  occurrence: number;\n  alternative: number;\n}\n\nexport interface IParserAmbiguousAlternativesDefinitionError\n  extends IParserDefinitionError {\n  occurrence: number | string;\n  alternatives: number[];\n}\n\nexport interface IParserUnresolvedRefDefinitionError\n  extends IParserDefinitionError {\n  unresolvedRefName: string;\n}\n\nexport interface IParserState {\n  errors: IRecognitionException[];\n  lexerState: any;\n  RULE_STACK: number[];\n  CST_STACK: CstNode[];\n}\n\nexport type Predicate = () => boolean;\n\nexport function EMPTY_ALT(): () => undefined;\nexport function EMPTY_ALT(value: T): () => T;\nexport function EMPTY_ALT(value: any = undefined) {\n  return function () {\n    return value;\n  };\n}\n\nexport class Parser {\n  // Set this flag to true if you don't want the Parser to throw error when problems in it's definition are detected.\n  // (normally during the parser's constructor).\n  // This is a design time flag, it will not affect the runtime error handling of the parser, just design time errors,\n  // for example: duplicate rule names, referencing an unresolved subrule, ect...\n  // This flag should not be enabled during normal usage, it is used in special situations, for example when\n  // needing to display the parser definition errors in some GUI(online playground).\n  static DEFER_DEFINITION_ERRORS_HANDLING: boolean = false;\n\n  /**\n   *  @deprecated use the **instance** method with the same name instead\n   */\n  static performSelfAnalysis(parserInstance: Parser): void {\n    throw Error(\n      \"The **static** `performSelfAnalysis` method has been deprecated.\" +\n        \"\\t\\nUse the **instance** method with the same name instead.\",\n    );\n  }\n\n  public performSelfAnalysis(this: MixedInParser): void {\n    this.TRACE_INIT(\"performSelfAnalysis\", () => {\n      let defErrorsMsgs;\n\n      this.selfAnalysisDone = true;\n      const className = this.className;\n\n      this.TRACE_INIT(\"toFastProps\", () => {\n        // Without this voodoo magic the parser would be x3-x4 slower\n        // It seems it is better to invoke `toFastProperties` **before**\n        // Any manipulations of the `this` object done during the recording phase.\n        toFastProperties(this);\n      });\n\n      this.TRACE_INIT(\"Grammar Recording\", () => {\n        try {\n          this.enableRecording();\n          // Building the GAST\n          forEach(this.definedRulesNames, (currRuleName) => {\n            const wrappedRule = (this as any)[\n              currRuleName\n            ] as ParserMethodInternal;\n            const originalGrammarAction = wrappedRule[\"originalGrammarAction\"];\n            let recordedRuleGast!: Rule;\n            this.TRACE_INIT(`${currRuleName} Rule`, () => {\n              recordedRuleGast = this.topLevelRuleRecord(\n                currRuleName,\n                originalGrammarAction,\n              );\n            });\n            this.gastProductionsCache[currRuleName] = recordedRuleGast;\n          });\n        } finally {\n          this.disableRecording();\n        }\n      });\n\n      let resolverErrors: IParserDefinitionError[] = [];\n      this.TRACE_INIT(\"Grammar Resolving\", () => {\n        resolverErrors = resolveGrammar({\n          rules: values(this.gastProductionsCache),\n        });\n        this.definitionErrors = this.definitionErrors.concat(resolverErrors);\n      });\n\n      this.TRACE_INIT(\"Grammar Validations\", () => {\n        // only perform additional grammar validations IFF no resolving errors have occurred.\n        // as unresolved grammar may lead to unhandled runtime exceptions in the follow up validations.\n        if (isEmpty(resolverErrors) && this.skipValidations === false) {\n          const validationErrors = validateGrammar({\n            rules: values(this.gastProductionsCache),\n            tokenTypes: values(this.tokensMap),\n            errMsgProvider: defaultGrammarValidatorErrorProvider,\n            grammarName: className,\n          });\n          const lookaheadValidationErrors = validateLookahead({\n            lookaheadStrategy: this.lookaheadStrategy,\n            rules: values(this.gastProductionsCache),\n            tokenTypes: values(this.tokensMap),\n            grammarName: className,\n          });\n          this.definitionErrors = this.definitionErrors.concat(\n            validationErrors,\n            lookaheadValidationErrors,\n          );\n        }\n      });\n\n      // this analysis may fail if the grammar is not perfectly valid\n      if (isEmpty(this.definitionErrors)) {\n        // The results of these computations are not needed unless error recovery is enabled.\n        if (this.recoveryEnabled) {\n          this.TRACE_INIT(\"computeAllProdsFollows\", () => {\n            const allFollows = computeAllProdsFollows(\n              values(this.gastProductionsCache),\n            );\n            this.resyncFollows = allFollows;\n          });\n        }\n\n        this.TRACE_INIT(\"ComputeLookaheadFunctions\", () => {\n          this.lookaheadStrategy.initialize?.({\n            rules: values(this.gastProductionsCache),\n          });\n          this.preComputeLookaheadFunctions(values(this.gastProductionsCache));\n        });\n      }\n\n      if (\n        !Parser.DEFER_DEFINITION_ERRORS_HANDLING &&\n        !isEmpty(this.definitionErrors)\n      ) {\n        defErrorsMsgs = map(\n          this.definitionErrors,\n          (defError) => defError.message,\n        );\n        throw new Error(\n          `Parser Definition Errors detected:\\n ${defErrorsMsgs.join(\n            \"\\n-------------------------------\\n\",\n          )}`,\n        );\n      }\n    });\n  }\n\n  definitionErrors: IParserDefinitionError[] = [];\n  selfAnalysisDone = false;\n  protected skipValidations: boolean;\n\n  constructor(tokenVocabulary: TokenVocabulary, config: IParserConfig) {\n    const that: MixedInParser = this as any;\n    that.initErrorHandler(config);\n    that.initLexerAdapter();\n    that.initLooksAhead(config);\n    that.initRecognizerEngine(tokenVocabulary, config);\n    that.initRecoverable(config);\n    that.initTreeBuilder(config);\n    that.initContentAssist();\n    that.initGastRecorder(config);\n    that.initPerformanceTracer(config);\n\n    if (has(config, \"ignoredIssues\")) {\n      throw new Error(\n        \"The  IParserConfig property has been deprecated.\\n\\t\" +\n          \"Please use the  flag on the relevant DSL method instead.\\n\\t\" +\n          \"See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#IGNORING_AMBIGUITIES\\n\\t\" +\n          \"For further details.\",\n      );\n    }\n\n    this.skipValidations = has(config, \"skipValidations\")\n      ? (config.skipValidations as boolean) // casting assumes the end user passing the correct type\n      : DEFAULT_PARSER_CONFIG.skipValidations;\n  }\n}\n\napplyMixins(Parser, [\n  Recoverable,\n  LooksAhead,\n  TreeBuilder,\n  LexerAdapter,\n  RecognizerEngine,\n  RecognizerApi,\n  ErrorHandler,\n  ContentAssist,\n  GastRecorder,\n  PerformanceTracer,\n]);\n\nexport class CstParser extends Parser {\n  constructor(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfigInternal = DEFAULT_PARSER_CONFIG,\n  ) {\n    const configClone = clone(config);\n    configClone.outputCst = true;\n    super(tokenVocabulary, configClone);\n  }\n}\n\nexport class EmbeddedActionsParser extends Parser {\n  constructor(\n    tokenVocabulary: TokenVocabulary,\n    config: IParserConfigInternal = DEFAULT_PARSER_CONFIG,\n  ) {\n    const configClone = clone(config);\n    configClone.outputCst = false;\n    super(tokenVocabulary, configClone);\n  }\n}\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport map from \"lodash-es/map.js\"\r\nimport filter from \"lodash-es/filter.js\"\r\nimport {\r\n    IProduction,\r\n    IProductionWithOccurrence,\r\n    TokenType,\r\n    Alternation,\r\n    NonTerminal,\r\n    Rule,\r\n    Option,\r\n    RepetitionMandatory,\r\n    Repetition,\r\n    Terminal,\r\n    Alternative,\r\n    RepetitionWithSeparator,\r\n    RepetitionMandatoryWithSeparator,\r\n    LookaheadProductionType\r\n} from \"chevrotain\"\r\n\r\nexport function buildATNKey(rule: Rule, type: LookaheadProductionType, occurrence: number): string {\r\n    return `${rule.name}_${type}_${occurrence}`;\r\n}\r\n\r\nexport interface ATN {\r\n    decisionMap: Record\r\n    states: ATNState[]\r\n    decisionStates: DecisionState[]\r\n    ruleToStartState: Map\r\n    ruleToStopState: Map\r\n}\r\n\r\nexport const ATN_INVALID_TYPE = 0\r\nexport const ATN_BASIC = 1\r\nexport const ATN_RULE_START = 2\r\nexport const ATN_PLUS_BLOCK_START = 4\r\nexport const ATN_STAR_BLOCK_START = 5\r\n// Currently unused as the ATN is not used for lexing\r\nexport const ATN_TOKEN_START = 6\r\nexport const ATN_RULE_STOP = 7\r\nexport const ATN_BLOCK_END = 8\r\nexport const ATN_STAR_LOOP_BACK = 9\r\nexport const ATN_STAR_LOOP_ENTRY = 10\r\nexport const ATN_PLUS_LOOP_BACK = 11\r\nexport const ATN_LOOP_END = 12\r\n\r\nexport type ATNState =\r\n    | BasicState\r\n    | BasicBlockStartState\r\n    | PlusBlockStartState\r\n    | PlusLoopbackState\r\n    | StarBlockStartState\r\n    | StarLoopbackState\r\n    | StarLoopEntryState\r\n    | BlockEndState\r\n    | RuleStartState\r\n    | RuleStopState\r\n    | LoopEndState\r\n\r\nexport interface ATNBaseState {\r\n    atn: ATN\r\n    production: IProductionWithOccurrence\r\n    stateNumber: number\r\n    rule: Rule\r\n    epsilonOnlyTransitions: boolean\r\n    transitions: Transition[]\r\n    nextTokenWithinRule: number[]\r\n}\r\n\r\nexport interface BasicState extends ATNBaseState {\r\n    type: typeof ATN_BASIC\r\n}\r\n\r\nexport interface BlockStartState extends DecisionState {\r\n    end: BlockEndState\r\n}\r\n\r\nexport interface BasicBlockStartState extends BlockStartState {\r\n    type: typeof ATN_BASIC\r\n}\r\n\r\nexport interface PlusBlockStartState extends BlockStartState {\r\n    loopback: PlusLoopbackState\r\n    type: typeof ATN_PLUS_BLOCK_START\r\n}\r\n\r\nexport interface PlusLoopbackState extends DecisionState {\r\n    type: typeof ATN_PLUS_LOOP_BACK\r\n}\r\n\r\nexport interface StarBlockStartState extends BlockStartState {\r\n    type: typeof ATN_STAR_BLOCK_START\r\n}\r\n\r\nexport interface StarLoopbackState extends ATNBaseState {\r\n    type: typeof ATN_STAR_LOOP_BACK\r\n}\r\n\r\nexport interface StarLoopEntryState extends DecisionState {\r\n    loopback: StarLoopbackState\r\n    type: typeof ATN_STAR_LOOP_ENTRY\r\n}\r\n\r\nexport interface BlockEndState extends ATNBaseState {\r\n    start: BlockStartState\r\n    type: typeof ATN_BLOCK_END\r\n}\r\n\r\nexport interface DecisionState extends ATNBaseState {\r\n    decision: number\r\n}\r\n\r\nexport interface LoopEndState extends ATNBaseState {\r\n    loopback: ATNState\r\n    type: typeof ATN_LOOP_END\r\n}\r\n\r\nexport interface RuleStartState extends ATNBaseState {\r\n    stop: RuleStopState\r\n    type: typeof ATN_RULE_START\r\n}\r\n\r\nexport interface RuleStopState extends ATNBaseState {\r\n    type: typeof ATN_RULE_STOP\r\n}\r\n\r\nexport interface Transition {\r\n    target: ATNState\r\n    isEpsilon(): boolean\r\n}\r\n\r\nexport abstract class AbstractTransition implements Transition {\r\n    target: ATNState\r\n\r\n    constructor(target: ATNState) {\r\n        this.target = target\r\n    }\r\n\r\n    isEpsilon() {\r\n        return false\r\n    }\r\n}\r\n\r\nexport class AtomTransition extends AbstractTransition {\r\n    tokenType: TokenType\r\n\r\n    constructor(target: ATNState, tokenType: TokenType) {\r\n        super(target)\r\n        this.tokenType = tokenType\r\n    }\r\n}\r\n\r\nexport class EpsilonTransition extends AbstractTransition {\r\n    constructor(target: ATNState) {\r\n        super(target)\r\n    }\r\n\r\n    isEpsilon() {\r\n        return true\r\n    }\r\n}\r\n\r\nexport class RuleTransition extends AbstractTransition {\r\n    rule: Rule\r\n    followState: ATNState\r\n\r\n    constructor(ruleStart: RuleStartState, rule: Rule, followState: ATNState) {\r\n        super(ruleStart)\r\n        this.rule = rule\r\n        this.followState = followState\r\n    }\r\n\r\n    isEpsilon() {\r\n        return true\r\n    }\r\n}\r\n\r\ninterface ATNHandle {\r\n    left: ATNState\r\n    right: ATNState\r\n}\r\n\r\nexport function createATN(rules: Rule[]): ATN {\r\n    const atn: ATN = {\r\n        decisionMap: {},\r\n        decisionStates: [],\r\n        ruleToStartState: new Map(),\r\n        ruleToStopState: new Map(),\r\n        states: []\r\n    }\r\n    createRuleStartAndStopATNStates(atn, rules)\r\n    const ruleLength = rules.length\r\n    for (let i = 0; i < ruleLength; i++) {\r\n        const rule = rules[i]\r\n        const ruleBlock = block(atn, rule, rule)\r\n        if (ruleBlock === undefined) {\r\n            continue\r\n        }\r\n        buildRuleHandle(atn, rule, ruleBlock)\r\n    }\r\n    return atn\r\n}\r\n\r\nfunction createRuleStartAndStopATNStates(atn: ATN, rules: Rule[]): void {\r\n    const ruleLength = rules.length\r\n    for (let i = 0; i < ruleLength; i++) {\r\n        const rule = rules[i]\r\n        const start = newState(atn, rule, undefined, {\r\n            type: ATN_RULE_START\r\n        })\r\n        const stop = newState(atn, rule, undefined, {\r\n            type: ATN_RULE_STOP\r\n        })\r\n        start.stop = stop\r\n        atn.ruleToStartState.set(rule, start)\r\n        atn.ruleToStopState.set(rule, stop)\r\n    }\r\n}\r\n\r\nfunction atom(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    production: IProduction\r\n): ATNHandle | undefined {\r\n    if (production instanceof Terminal) {\r\n        return tokenRef(atn, rule, production.terminalType, production)\r\n    } else if (production instanceof NonTerminal) {\r\n        return ruleRef(atn, rule, production)\r\n    } else if (production instanceof Alternation) {\r\n        return alternation(atn, rule, production)\r\n    } else if (production instanceof Option) {\r\n        return option(atn, rule, production)\r\n    } else if (production instanceof Repetition) {\r\n        return repetition(atn, rule, production)\r\n    } else if (production instanceof RepetitionWithSeparator) {\r\n        return repetitionSep(atn, rule, production)\r\n    } else if (production instanceof RepetitionMandatory) {\r\n        return repetitionMandatory(atn, rule, production)\r\n    } else if (production instanceof RepetitionMandatoryWithSeparator) {\r\n        return repetitionMandatorySep(atn, rule, production)\r\n    } else {\r\n        return block(atn, rule, production as Alternative)\r\n    }\r\n}\r\n\r\nfunction repetition(atn: ATN, rule: Rule, repetition: Repetition): ATNHandle {\r\n    const starState = newState(atn, rule, repetition, {\r\n        type: ATN_STAR_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, starState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        starState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    return star(atn, rule, repetition, handle)\r\n}\r\n\r\nfunction repetitionSep(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionWithSeparator\r\n): ATNHandle {\r\n    const starState = newState(atn, rule, repetition, {\r\n        type: ATN_STAR_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, starState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        starState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    const sep = tokenRef(atn, rule, repetition.separator, repetition)\r\n    return star(atn, rule, repetition, handle, sep)\r\n}\r\n\r\nfunction repetitionMandatory(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionMandatory\r\n): ATNHandle {\r\n    const plusState = newState(atn, rule, repetition, {\r\n        type: ATN_PLUS_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, plusState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        plusState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    return plus(atn, rule, repetition, handle)\r\n}\r\n\r\nfunction repetitionMandatorySep(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    repetition: RepetitionMandatoryWithSeparator\r\n): ATNHandle {\r\n    const plusState = newState(atn, rule, repetition, {\r\n        type: ATN_PLUS_BLOCK_START\r\n    })\r\n    defineDecisionState(atn, plusState)\r\n    const handle = makeAlts(\r\n        atn,\r\n        rule,\r\n        plusState,\r\n        repetition,\r\n        block(atn, rule, repetition)\r\n    )\r\n    const sep = tokenRef(atn, rule, repetition.separator, repetition)\r\n    return plus(atn, rule, repetition, handle, sep)\r\n}\r\n\r\nfunction alternation(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    alternation: Alternation\r\n): ATNHandle {\r\n    const start = newState(atn, rule, alternation, {\r\n        type: ATN_BASIC\r\n    })\r\n    defineDecisionState(atn, start)\r\n    const alts = map(alternation.definition, (e) => atom(atn, rule, e))\r\n    const handle = makeAlts(atn, rule, start, alternation, ...alts)\r\n    return handle\r\n}\r\n\r\nfunction option(atn: ATN, rule: Rule, option: Option): ATNHandle {\r\n    const start = newState(atn, rule, option, {\r\n        type: ATN_BASIC\r\n    })\r\n    defineDecisionState(atn, start)\r\n    const handle = makeAlts(atn, rule, start, option, block(atn, rule, option))\r\n    return optional(atn, rule, option, handle)\r\n}\r\n\r\nfunction block(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    block: { definition: IProduction[] }\r\n): ATNHandle | undefined {\r\n    const handles = filter(\r\n        map(block.definition, (e) => atom(atn, rule, e)),\r\n        (e) => e !== undefined\r\n    ) as ATNHandle[]\r\n    if (handles.length === 1) {\r\n        return handles[0]\r\n    } else if (handles.length === 0) {\r\n        return undefined\r\n    } else {\r\n        return makeBlock(atn, handles)\r\n    }\r\n}\r\n\r\nfunction plus(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    plus: IProductionWithOccurrence,\r\n    handle: ATNHandle,\r\n    sep?: ATNHandle\r\n): ATNHandle {\r\n    const blkStart = handle.left as PlusBlockStartState\r\n    const blkEnd = handle.right\r\n\r\n    const loop = newState(atn, rule, plus, {\r\n        type: ATN_PLUS_LOOP_BACK\r\n    })\r\n    defineDecisionState(atn, loop)\r\n    const end = newState(atn, rule, plus, {\r\n        type: ATN_LOOP_END\r\n    })\r\n    blkStart.loopback = loop\r\n    end.loopback = loop\r\n    atn.decisionMap[buildATNKey(rule, sep ? 'RepetitionMandatoryWithSeparator' : 'RepetitionMandatory', plus.idx)] = loop;\r\n    epsilon(blkEnd, loop) // block can see loop back\r\n\r\n    // Depending on whether we have a separator we put the exit transition at index 1 or 0\r\n    // This influences the chosen option in the lookahead DFA\r\n    if (sep === undefined) {\r\n        epsilon(loop, blkStart) // loop back to start\r\n        epsilon(loop, end) // exit\r\n    } else {\r\n        epsilon(loop, end) // exit\r\n        // loop back to start with separator\r\n        epsilon(loop, sep.left)\r\n        epsilon(sep.right, blkStart)\r\n    }\r\n\r\n    return {\r\n        left: blkStart,\r\n        right: end\r\n    }\r\n}\r\n\r\nfunction star(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    star: IProductionWithOccurrence,\r\n    handle: ATNHandle,\r\n    sep?: ATNHandle\r\n): ATNHandle {\r\n    const start = handle.left\r\n    const end = handle.right\r\n\r\n    const entry = newState(atn, rule, star, {\r\n        type: ATN_STAR_LOOP_ENTRY\r\n    })\r\n    defineDecisionState(atn, entry)\r\n    const loopEnd = newState(atn, rule, star, {\r\n        type: ATN_LOOP_END\r\n    })\r\n    const loop = newState(atn, rule, star, {\r\n        type: ATN_STAR_LOOP_BACK\r\n    })\r\n    entry.loopback = loop\r\n    loopEnd.loopback = loop\r\n\r\n    epsilon(entry, start) // loop enter edge (alt 2)\r\n    epsilon(entry, loopEnd) // bypass loop edge (alt 1)\r\n    epsilon(end, loop) // block end hits loop back\r\n\r\n    if (sep !== undefined) {\r\n        epsilon(loop, loopEnd) // end loop\r\n        // loop back to start of handle using separator\r\n        epsilon(loop, sep.left)\r\n        epsilon(sep.right, start)\r\n    } else {\r\n        epsilon(loop, entry) // loop back to entry/exit decision\r\n    }\r\n\r\n    atn.decisionMap[buildATNKey(rule, sep ? 'RepetitionWithSeparator' : 'Repetition', star.idx)] = entry;\r\n    return {\r\n        left: entry,\r\n        right: loopEnd\r\n    }\r\n}\r\n\r\nfunction optional(atn: ATN, rule: Rule, optional: Option, handle: ATNHandle): ATNHandle {\r\n    const start = handle.left as DecisionState\r\n    const end = handle.right\r\n\r\n    epsilon(start, end)\r\n\r\n    atn.decisionMap[buildATNKey(rule, 'Option', optional.idx)] = start;\r\n    return handle\r\n}\r\n\r\nfunction defineDecisionState(atn: ATN, state: DecisionState): number {\r\n    atn.decisionStates.push(state)\r\n    state.decision = atn.decisionStates.length - 1\r\n    return state.decision\r\n}\r\n\r\nfunction makeAlts(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    start: BlockStartState,\r\n    production: IProductionWithOccurrence,\r\n    ...alts: (ATNHandle | undefined)[]\r\n): ATNHandle {\r\n    const end = newState(atn, rule, production, {\r\n        type: ATN_BLOCK_END,\r\n        start\r\n    })\r\n    start.end = end\r\n    for (const alt of alts) {\r\n        if (alt !== undefined) {\r\n            // hook alts up to decision block\r\n            epsilon(start, alt.left)\r\n            epsilon(alt.right, end)\r\n        } else {\r\n            epsilon(start, end)\r\n        }\r\n    }\r\n\r\n    const handle: ATNHandle = {\r\n        left: start as ATNState,\r\n        right: end\r\n    }\r\n    atn.decisionMap[buildATNKey(rule, getProdType(production), production.idx)] = start\r\n    return handle\r\n}\r\n\r\nfunction getProdType(production: IProduction): LookaheadProductionType {\r\n    if (production instanceof Alternation) {\r\n        return 'Alternation';\r\n    } else if (production instanceof Option) {\r\n        return 'Option';\r\n    } else if (production instanceof Repetition) {\r\n        return 'Repetition';\r\n    } else if (production instanceof RepetitionWithSeparator) {\r\n        return 'RepetitionWithSeparator';\r\n    } else if (production instanceof RepetitionMandatory) {\r\n        return 'RepetitionMandatory';\r\n    } else if (production instanceof RepetitionMandatoryWithSeparator) {\r\n        return 'RepetitionMandatoryWithSeparator';\r\n    } else {\r\n        throw new Error('Invalid production type encountered');\r\n    }\r\n}\r\n\r\nfunction makeBlock(atn: ATN, alts: ATNHandle[]): ATNHandle {\r\n    const altsLength = alts.length\r\n    for (let i = 0; i < altsLength - 1; i++) {\r\n        const handle = alts[i]\r\n        let transition: Transition | undefined\r\n        if (handle.left.transitions.length === 1) {\r\n            transition = handle.left.transitions[0]\r\n        }\r\n        const isRuleTransition = transition instanceof RuleTransition\r\n        const ruleTransition = transition as RuleTransition\r\n        const next = alts[i + 1].left\r\n        if (\r\n            handle.left.type === ATN_BASIC &&\r\n            handle.right.type === ATN_BASIC &&\r\n            transition !== undefined &&\r\n            ((isRuleTransition && ruleTransition.followState === handle.right) ||\r\n                transition.target === handle.right)\r\n        ) {\r\n            // we can avoid epsilon edge to next element\r\n            if (isRuleTransition) {\r\n                ruleTransition.followState = next\r\n            } else {\r\n                transition.target = next\r\n            }\r\n            removeState(atn, handle.right) // we skipped over this state\r\n        } else {\r\n            // need epsilon if previous block's right end node is complex\r\n            epsilon(handle.right, next)\r\n        }\r\n    }\r\n\r\n    const first = alts[0]\r\n    const last = alts[altsLength - 1]\r\n    return {\r\n        left: first.left,\r\n        right: last.right\r\n    }\r\n}\r\n\r\nfunction tokenRef(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    tokenType: TokenType,\r\n    production: IProductionWithOccurrence\r\n): ATNHandle {\r\n    const left = newState(atn, rule, production, {\r\n        type: ATN_BASIC\r\n    })\r\n    const right = newState(atn, rule, production, {\r\n        type: ATN_BASIC\r\n    })\r\n    addTransition(left, new AtomTransition(right, tokenType))\r\n    return {\r\n        left,\r\n        right\r\n    }\r\n}\r\n\r\nfunction ruleRef(\r\n    atn: ATN,\r\n    currentRule: Rule,\r\n    nonTerminal: NonTerminal\r\n): ATNHandle {\r\n    const rule = nonTerminal.referencedRule\r\n    const start = atn.ruleToStartState.get(rule)!\r\n    const left = newState(atn, currentRule, nonTerminal, {\r\n        type: ATN_BASIC\r\n    })\r\n    const right = newState(atn, currentRule, nonTerminal, {\r\n        type: ATN_BASIC\r\n    })\r\n\r\n    const call = new RuleTransition(start, rule, right)\r\n    addTransition(left, call)\r\n\r\n    return {\r\n        left,\r\n        right\r\n    }\r\n}\r\n\r\nfunction buildRuleHandle(atn: ATN, rule: Rule, block: ATNHandle): ATNHandle {\r\n    const start = atn.ruleToStartState.get(rule)!\r\n    epsilon(start, block.left)\r\n    const stop = atn.ruleToStopState.get(rule)!\r\n    epsilon(block.right, stop)\r\n    const handle: ATNHandle = {\r\n        left: start,\r\n        right: stop\r\n    }\r\n    return handle\r\n}\r\n\r\nfunction epsilon(a: ATNBaseState, b: ATNBaseState): void {\r\n    const transition = new EpsilonTransition(b as ATNState)\r\n    addTransition(a, transition)\r\n}\r\n\r\nfunction newState(\r\n    atn: ATN,\r\n    rule: Rule,\r\n    production: IProductionWithOccurrence | undefined,\r\n    partial: Partial\r\n): T {\r\n    const t: T = {\r\n        atn,\r\n        production,\r\n        epsilonOnlyTransitions: false,\r\n        rule,\r\n        transitions: [],\r\n        nextTokenWithinRule: [],\r\n        stateNumber: atn.states.length,\r\n        ...partial\r\n    } as unknown as T\r\n    atn.states.push(t)\r\n    return t\r\n}\r\n\r\nfunction addTransition(state: ATNBaseState, transition: Transition) {\r\n    // A single ATN state can only contain epsilon transitions or non-epsilon transitions\r\n    // Because they are never mixed, only setting the property for the first transition is fine\r\n    if (state.transitions.length === 0) {\r\n        state.epsilonOnlyTransitions = transition.isEpsilon()\r\n    }\r\n    state.transitions.push(transition)\r\n}\r\n\r\nfunction removeState(atn: ATN, state: ATNState): void {\r\n    atn.states.splice(atn.states.indexOf(state), 1)\r\n}\r\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport map from \"lodash-es/map.js\"\r\nimport { ATNState, DecisionState } from \"./atn.js\"\r\n\r\nexport interface DFA {\r\n  start?: DFAState\r\n  states: Record\r\n  decision: number\r\n  atnStartState: DecisionState\r\n}\r\n\r\nexport interface DFAState {\r\n  configs: ATNConfigSet\r\n  edges: Record\r\n  isAcceptState: boolean\r\n  prediction: number\r\n}\r\n\r\nexport const DFA_ERROR = {} as DFAState\r\n\r\nexport interface ATNConfig {\r\n  state: ATNState\r\n  alt: number\r\n  stack: ATNState[]\r\n}\r\n\r\nexport class ATNConfigSet {\r\n  private map: Record = {}\r\n  private configs: ATNConfig[] = []\r\n\r\n  uniqueAlt: number | undefined\r\n\r\n  get size(): number {\r\n    return this.configs.length\r\n  }\r\n\r\n  finalize(): void {\r\n    // Empties the map to free up memory\r\n    this.map = {}\r\n  }\r\n\r\n  add(config: ATNConfig): void {\r\n    const key = getATNConfigKey(config)\r\n    // Only add configs which don't exist in our map already\r\n    // While this does not influence the actual algorithm, adding them anyway would massively increase memory consumption\r\n    if (!(key in this.map)) {\r\n      this.map[key] = this.configs.length\r\n      this.configs.push(config)\r\n    }\r\n  }\r\n\r\n  get elements(): readonly ATNConfig[] {\r\n    return this.configs\r\n  }\r\n\r\n  get alts(): number[] {\r\n    return map(this.configs, (e) => e.alt)\r\n  }\r\n\r\n  get key(): string {\r\n    let value = \"\"\r\n    for (const k in this.map) {\r\n      value += k + \":\"\r\n    }\r\n    return value\r\n  }\r\n}\r\n\r\nexport function getATNConfigKey(config: ATNConfig, alt = true) {\r\n  return `${alt ? `a${config.alt}` : \"\"}s${\r\n    config.state.stateNumber\r\n  }:${config.stack.map((e) => e.stateNumber.toString()).join(\"_\")}`\r\n}\r\n", "/******************************************************************************\r\n * Copyright 2022 TypeFox GmbH\r\n * This program and the accompanying materials are made available under the\r\n * terms of the MIT License, which is available in the project root.\r\n ******************************************************************************/\r\n\r\nimport {\r\n    IToken,\r\n    TokenType,\r\n    tokenMatcher,\r\n    tokenLabel,\r\n    Rule,\r\n    IProductionWithOccurrence,\r\n    NonTerminal,\r\n    Alternation,\r\n    Option,\r\n    RepetitionMandatory,\r\n    RepetitionMandatoryWithSeparator,\r\n    RepetitionWithSeparator,\r\n    Repetition,\r\n    Terminal,\r\n    BaseParser,\r\n    LLkLookaheadStrategy,\r\n    ILookaheadValidationError,\r\n    IOrAlt,\r\n    getLookaheadPaths,\r\n    OptionalProductionType\r\n} from \"chevrotain\";\r\nimport {\r\n    ATN,\r\n    ATNState,\r\n    ATN_RULE_STOP,\r\n    AtomTransition,\r\n    buildATNKey,\r\n    createATN,\r\n    DecisionState,\r\n    EpsilonTransition,\r\n    RuleTransition,\r\n    Transition\r\n} from \"./atn.js\";\r\nimport {\r\n    ATNConfig,\r\n    ATNConfigSet,\r\n    DFA,\r\n    DFAState,\r\n    DFA_ERROR,\r\n    getATNConfigKey\r\n} from \"./dfa.js\";\r\nimport min from \"lodash-es/min.js\";\r\nimport flatMap from \"lodash-es/flatMap.js\";\r\nimport uniqBy from \"lodash-es/uniqBy.js\";\r\nimport map from \"lodash-es/map.js\";\r\nimport flatten from \"lodash-es/flatten.js\";\r\nimport forEach from \"lodash-es/forEach.js\";\r\nimport isEmpty from \"lodash-es/isEmpty.js\";\r\nimport reduce from \"lodash-es/reduce.js\";\r\n\r\ntype DFACache = (predicateSet: PredicateSet) => DFA\r\n\r\nexport type AmbiguityReport = (message: string) => void;\r\n\r\nfunction createDFACache(startState: DecisionState, decision: number): DFACache {\r\n    const map: Record = {}\r\n    return (predicateSet) => {\r\n        const key = predicateSet.toString()\r\n        let existing = map[key]\r\n        if (existing !== undefined) {\r\n            return existing\r\n        } else {\r\n            existing = {\r\n                atnStartState: startState,\r\n                decision,\r\n                states: {}\r\n            }\r\n            map[key] = existing\r\n            return existing\r\n        }\r\n    }\r\n}\r\n\r\nclass PredicateSet {\r\n    private predicates: boolean[] = []\r\n\r\n    is(index: number): boolean {\r\n        return index >= this.predicates.length || this.predicates[index]\r\n    }\r\n\r\n    set(index: number, value: boolean) {\r\n        this.predicates[index] = value\r\n    }\r\n\r\n    toString(): string {\r\n        let value = \"\"\r\n        const size = this.predicates.length\r\n        for (let i = 0; i < size; i++) {\r\n            value += this.predicates[i] === true ? \"1\" : \"0\"\r\n        }\r\n        return value\r\n    }\r\n}\r\n\r\ninterface AdaptivePredictError {\r\n    tokenPath: IToken[]\r\n    possibleTokenTypes: TokenType[]\r\n    actualToken: IToken\r\n}\r\n\r\nconst EMPTY_PREDICATES = new PredicateSet()\r\n\r\nexport interface LLStarLookaheadOptions {\r\n    logging?: AmbiguityReport\r\n}\r\n\r\nexport class LLStarLookaheadStrategy extends LLkLookaheadStrategy {\r\n\r\n    private atn: ATN;\r\n    private dfas: DFACache[];\r\n    private logging: AmbiguityReport;\r\n\r\n    constructor(options?: LLStarLookaheadOptions) {\r\n        super();\r\n        this.logging = options?.logging ?? ((message) => console.log(message));\r\n    }\r\n\r\n    override initialize(options: { rules: Rule[] }): void {\r\n        this.atn = createATN(options.rules);\r\n        this.dfas = initATNSimulator(this.atn);\r\n    }\r\n\r\n    override validateAmbiguousAlternationAlternatives(): ILookaheadValidationError[] {\r\n        return [];\r\n    }\r\n\r\n    override validateEmptyOrAlternatives(): ILookaheadValidationError[] {\r\n        return [];\r\n    }\r\n\r\n    override buildLookaheadForAlternation(options: {\r\n        prodOccurrence: number;\r\n        rule: Rule;\r\n        maxLookahead: number;\r\n        hasPredicates: boolean;\r\n        dynamicTokensEnabled: boolean\r\n    }): (this: BaseParser, orAlts?: IOrAlt[] | undefined) => number | undefined {\r\n        const { prodOccurrence, rule, hasPredicates, dynamicTokensEnabled } = options;\r\n        const dfas = this.dfas;\r\n        const logging = this.logging;\r\n        const key = buildATNKey(rule, 'Alternation', prodOccurrence);\r\n        const decisionState = this.atn.decisionMap[key];\r\n        const decisionIndex = decisionState.decision;\r\n        const partialAlts: (TokenType | undefined)[][] = map(\r\n            getLookaheadPaths({\r\n                maxLookahead: 1,\r\n                occurrence: prodOccurrence,\r\n                prodType: \"Alternation\",\r\n                rule: rule\r\n            }),\r\n            (currAlt) => map(currAlt, (path) => path[0])\r\n        )\r\n\r\n        if (isLL1Sequence(partialAlts, false) && !dynamicTokensEnabled) {\r\n            const choiceToAlt = reduce(\r\n                partialAlts,\r\n                (result, currAlt, idx) => {\r\n                    forEach(currAlt, (currTokType) => {\r\n                        if (currTokType) {\r\n                            result[currTokType.tokenTypeIdx!] = idx\r\n                            forEach(currTokType.categoryMatches!, (currExtendingType) => {\r\n                                result[currExtendingType] = idx\r\n                            })\r\n                        }\r\n                    })\r\n                    return result\r\n                },\r\n                {} as Record\r\n            )\r\n\r\n            if (hasPredicates) {\r\n                return function (this: BaseParser, orAlts) {\r\n                    const nextToken = this.LA(1)\r\n                    const prediction: number | undefined = choiceToAlt[nextToken.tokenTypeIdx]\r\n                    if (orAlts !== undefined && prediction !== undefined) {\r\n                        const gate = orAlts[prediction]?.GATE\r\n                        if (gate !== undefined && gate.call(this) === false) {\r\n                            return undefined;\r\n                        }\r\n                    }\r\n                    return prediction\r\n                }\r\n            } else {\r\n                return function (this: BaseParser): number | undefined {\r\n                    const nextToken = this.LA(1)\r\n                    return choiceToAlt[nextToken.tokenTypeIdx];\r\n                }\r\n            }\r\n        } else if (hasPredicates) {\r\n            return function (this: BaseParser, orAlts) {\r\n                const predicates = new PredicateSet()\r\n                const length = orAlts === undefined ? 0 : orAlts.length\r\n                for (let i = 0; i < length; i++) {\r\n                    const gate = orAlts?.[i].GATE\r\n                    predicates.set(i, gate === undefined || gate.call(this))\r\n                }\r\n                const result = adaptivePredict.call(this, dfas, decisionIndex, predicates, logging);\r\n                return typeof result === 'number' ? result : undefined;\r\n            }\r\n        } else {\r\n            return function (this: BaseParser) {\r\n                const result = adaptivePredict.call(this, dfas, decisionIndex, EMPTY_PREDICATES, logging);\r\n                return typeof result === 'number' ? result : undefined;\r\n            }\r\n        }\r\n    }\r\n\r\n    override buildLookaheadForOptional(options: {\r\n        prodOccurrence: number;\r\n        prodType: OptionalProductionType;\r\n        rule: Rule;\r\n        maxLookahead: number;\r\n        dynamicTokensEnabled: boolean\r\n    }): (this: BaseParser) => boolean {\r\n        const { prodOccurrence, rule, prodType, dynamicTokensEnabled } = options;\r\n        const dfas = this.dfas;\r\n        const logging = this.logging;\r\n        const key = buildATNKey(rule, prodType, prodOccurrence);\r\n        const decisionState = this.atn.decisionMap[key];\r\n        const decisionIndex = decisionState.decision;\r\n        const alts = map(\r\n            getLookaheadPaths({\r\n                maxLookahead: 1,\r\n                occurrence: prodOccurrence,\r\n                prodType,\r\n                rule\r\n            }),\r\n            (e) => {\r\n              return map(e, (g) => g[0])\r\n            }\r\n          )\r\n        \r\n          if (isLL1Sequence(alts) && alts[0][0] && !dynamicTokensEnabled) {\r\n            const alt = alts[0]\r\n            const singleTokensTypes = flatten(alt)\r\n        \r\n            if (\r\n              singleTokensTypes.length === 1 &&\r\n              isEmpty(singleTokensTypes[0].categoryMatches)\r\n            ) {\r\n              const expectedTokenType = singleTokensTypes[0]\r\n              const expectedTokenUniqueKey = expectedTokenType.tokenTypeIdx\r\n        \r\n              return function (this: BaseParser): boolean {\r\n                return this.LA(1).tokenTypeIdx === expectedTokenUniqueKey\r\n              }\r\n            } else {\r\n              const choiceToAlt = reduce(\r\n                singleTokensTypes,\r\n                (result, currTokType) => {\r\n                  if (currTokType !== undefined) {\r\n                    result[currTokType.tokenTypeIdx!] = true\r\n                    forEach(currTokType.categoryMatches, (currExtendingType) => {\r\n                      result[currExtendingType] = true\r\n                    })\r\n                  }\r\n                  return result\r\n                },\r\n                {} as Record\r\n              )\r\n        \r\n              return function (this: BaseParser): boolean {\r\n                const nextToken = this.LA(1)\r\n                return choiceToAlt[nextToken.tokenTypeIdx] === true\r\n              }\r\n            }\r\n          }\r\n          return function (this: BaseParser) {\r\n            const result = adaptivePredict.call(this, dfas, decisionIndex, EMPTY_PREDICATES, logging)\r\n              return typeof result === \"object\" ? false : result === 0;\r\n          }\r\n    }\r\n\r\n}\r\n\r\nfunction isLL1Sequence(sequences: (TokenType | undefined)[][], allowEmpty = true): boolean {\r\n    const fullSet = new Set()\r\n\r\n    for (const alt of sequences) {\r\n        const altSet = new Set()\r\n        for (const tokType of alt) {\r\n            if (tokType === undefined) {\r\n                if (allowEmpty) {\r\n                    // Epsilon production encountered\r\n                    break\r\n                } else {\r\n                    return false;\r\n                }\r\n            }\r\n            const indices = [tokType.tokenTypeIdx!].concat(tokType.categoryMatches!)\r\n            for (const index of indices) {\r\n                if (fullSet.has(index)) {\r\n                    if (!altSet.has(index)) {\r\n                        return false\r\n                    }\r\n                } else {\r\n                    fullSet.add(index)\r\n                    altSet.add(index)\r\n                }\r\n            }\r\n        }\r\n    }\r\n    return true\r\n}\r\n\r\nfunction initATNSimulator(atn: ATN): DFACache[] {\r\n    const decisionLength = atn.decisionStates.length\r\n    const decisionToDFA: DFACache[] = Array(decisionLength)\r\n    for (let i = 0; i < decisionLength; i++) {\r\n        decisionToDFA[i] = createDFACache(atn.decisionStates[i], i)\r\n    }\r\n    return decisionToDFA;\r\n}\r\n\r\nfunction adaptivePredict(\r\n    this: BaseParser,\r\n    dfaCaches: DFACache[],\r\n    decision: number,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): number | AdaptivePredictError {\r\n    const dfa = dfaCaches[decision](predicateSet)\r\n    let start = dfa.start\r\n    if (start === undefined) {\r\n        const closure = computeStartState(dfa.atnStartState as ATNState)\r\n        start = addDFAState(dfa, newDFAState(closure))\r\n        dfa.start = start\r\n    }\r\n\r\n    const alt = performLookahead.apply(this, [dfa, start, predicateSet, logging])\r\n    return alt\r\n}\r\n\r\nfunction performLookahead(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    s0: DFAState,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): number | AdaptivePredictError {\r\n    let previousD = s0\r\n\r\n    let i = 1\r\n    const path: IToken[] = []\r\n    let t = this.LA(i++)\r\n\r\n    while (true) {\r\n        let d = getExistingTargetState(previousD, t)\r\n        if (d === undefined) {\r\n            d = computeLookaheadTarget.apply(this, [dfa, previousD, t, i, predicateSet, logging])\r\n        }\r\n\r\n        if (d === DFA_ERROR) {\r\n            return buildAdaptivePredictError(path, previousD, t)\r\n        }\r\n\r\n        if (d.isAcceptState === true) {\r\n            return d.prediction\r\n        }\r\n\r\n        previousD = d\r\n        path.push(t)\r\n        t = this.LA(i++)\r\n    }\r\n}\r\n\r\nfunction computeLookaheadTarget(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    previousD: DFAState,\r\n    token: IToken,\r\n    lookahead: number,\r\n    predicateSet: PredicateSet,\r\n    logging: AmbiguityReport\r\n): DFAState {\r\n    const reach = computeReachSet(previousD.configs, token, predicateSet)\r\n    if (reach.size === 0) {\r\n        addDFAEdge(dfa, previousD, token, DFA_ERROR)\r\n        return DFA_ERROR\r\n    }\r\n\r\n    let newState = newDFAState(reach)\r\n    const predictedAlt = getUniqueAlt(reach, predicateSet)\r\n\r\n    if (predictedAlt !== undefined) {\r\n        newState.isAcceptState = true\r\n        newState.prediction = predictedAlt\r\n        newState.configs.uniqueAlt = predictedAlt\r\n    } else if (hasConflictTerminatingPrediction(reach)) {\r\n        const prediction = min(reach.alts)!\r\n        newState.isAcceptState = true\r\n        newState.prediction = prediction\r\n        newState.configs.uniqueAlt = prediction\r\n        reportLookaheadAmbiguity.apply(this, [dfa, lookahead, reach.alts, logging])\r\n    }\r\n\r\n    newState = addDFAEdge(dfa, previousD, token, newState)\r\n    return newState\r\n}\r\n\r\nfunction reportLookaheadAmbiguity(\r\n    this: BaseParser,\r\n    dfa: DFA,\r\n    lookahead: number,\r\n    ambiguityIndices: number[],\r\n    logging: AmbiguityReport\r\n) {\r\n    const prefixPath: TokenType[] = []\r\n    for (let i = 1; i <= lookahead; i++) {\r\n        prefixPath.push(this.LA(i).tokenType)\r\n    }\r\n    const atnState = dfa.atnStartState\r\n    const topLevelRule = atnState.rule\r\n    const production = atnState.production\r\n    const message = buildAmbiguityError({\r\n        topLevelRule,\r\n        ambiguityIndices,\r\n        production,\r\n        prefixPath\r\n    })\r\n    logging(message)\r\n}\r\n\r\nfunction buildAmbiguityError(options: {\r\n    topLevelRule: Rule\r\n    prefixPath: TokenType[]\r\n    ambiguityIndices: number[]\r\n    production: IProductionWithOccurrence\r\n}): string {\r\n    const pathMsg = map(options.prefixPath, (currtok) =>\r\n        tokenLabel(currtok)\r\n    ).join(\", \")\r\n    const occurrence =\r\n        options.production.idx === 0 ? \"\" : options.production.idx\r\n    let currMessage =\r\n        `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(\r\n            \", \"\r\n        )}> in <${getProductionDslName(options.production)}${occurrence}>` +\r\n        ` inside <${options.topLevelRule.name}> Rule,\\n` +\r\n        `<${pathMsg}> may appears as a prefix path in all these alternatives.\\n`\r\n\r\n    currMessage =\r\n        currMessage +\r\n        `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES\\n` +\r\n        `For Further details.`\r\n    return currMessage\r\n}\r\n\r\nfunction getProductionDslName(prod: IProductionWithOccurrence): string {\r\n    if (prod instanceof NonTerminal) {\r\n        return \"SUBRULE\"\r\n    } else if (prod instanceof Option) {\r\n        return \"OPTION\"\r\n    } else if (prod instanceof Alternation) {\r\n        return \"OR\"\r\n    } else if (prod instanceof RepetitionMandatory) {\r\n        return \"AT_LEAST_ONE\"\r\n    } else if (prod instanceof RepetitionMandatoryWithSeparator) {\r\n        return \"AT_LEAST_ONE_SEP\"\r\n    } else if (prod instanceof RepetitionWithSeparator) {\r\n        return \"MANY_SEP\"\r\n    } else if (prod instanceof Repetition) {\r\n        return \"MANY\"\r\n    } else if (prod instanceof Terminal) {\r\n        return \"CONSUME\"\r\n    } else {\r\n        throw Error(\"non exhaustive match\")\r\n    }\r\n}\r\n\r\nfunction buildAdaptivePredictError(\r\n    path: IToken[],\r\n    previous: DFAState,\r\n    current: IToken\r\n): AdaptivePredictError {\r\n    const nextTransitions = flatMap(\r\n        previous.configs.elements,\r\n        (e) => e.state.transitions\r\n    )\r\n    const nextTokenTypes = uniqBy(\r\n        nextTransitions\r\n            .filter((e): e is AtomTransition => e instanceof AtomTransition)\r\n            .map((e) => e.tokenType),\r\n        (e) => e.tokenTypeIdx\r\n    )\r\n    return {\r\n        actualToken: current,\r\n        possibleTokenTypes: nextTokenTypes,\r\n        tokenPath: path\r\n    }\r\n}\r\n\r\nfunction getExistingTargetState(\r\n    state: DFAState,\r\n    token: IToken\r\n): DFAState | undefined {\r\n    return state.edges[token.tokenTypeIdx]\r\n}\r\n\r\nfunction computeReachSet(\r\n    configs: ATNConfigSet,\r\n    token: IToken,\r\n    predicateSet: PredicateSet\r\n): ATNConfigSet {\r\n    const intermediate = new ATNConfigSet()\r\n    const skippedStopStates: ATNConfig[] = []\r\n\r\n    for (const c of configs.elements) {\r\n        if (predicateSet.is(c.alt) === false) {\r\n            continue\r\n        }\r\n        if (c.state.type === ATN_RULE_STOP) {\r\n            skippedStopStates.push(c)\r\n            continue\r\n        }\r\n        const transitionLength = c.state.transitions.length\r\n        for (let i = 0; i < transitionLength; i++) {\r\n            const transition = c.state.transitions[i]\r\n            const target = getReachableTarget(transition, token)\r\n            if (target !== undefined) {\r\n                intermediate.add({\r\n                    state: target,\r\n                    alt: c.alt,\r\n                    stack: c.stack\r\n                })\r\n            }\r\n        }\r\n    }\r\n\r\n    let reach: ATNConfigSet | undefined\r\n\r\n    if (skippedStopStates.length === 0 && intermediate.size === 1) {\r\n        reach = intermediate\r\n    }\r\n\r\n    if (reach === undefined) {\r\n        reach = new ATNConfigSet()\r\n        for (const c of intermediate.elements) {\r\n            closure(c, reach)\r\n        }\r\n    }\r\n\r\n    if (skippedStopStates.length > 0 && !hasConfigInRuleStopState(reach)) {\r\n        for (const c of skippedStopStates) {\r\n            reach.add(c)\r\n        }\r\n    }\r\n\r\n    return reach\r\n}\r\n\r\nfunction getReachableTarget(\r\n    transition: Transition,\r\n    token: IToken\r\n): ATNState | undefined {\r\n    if (\r\n        transition instanceof AtomTransition &&\r\n        tokenMatcher(token, transition.tokenType)\r\n    ) {\r\n        return transition.target\r\n    }\r\n    return undefined\r\n}\r\n\r\nfunction getUniqueAlt(\r\n    configs: ATNConfigSet,\r\n    predicateSet: PredicateSet\r\n): number | undefined {\r\n    let alt: number | undefined\r\n    for (const c of configs.elements) {\r\n        if (predicateSet.is(c.alt) === true) {\r\n            if (alt === undefined) {\r\n                alt = c.alt\r\n            } else if (alt !== c.alt) {\r\n                return undefined\r\n            }\r\n        }\r\n    }\r\n    return alt\r\n}\r\n\r\nfunction newDFAState(closure: ATNConfigSet): DFAState {\r\n    return {\r\n        configs: closure,\r\n        edges: {},\r\n        isAcceptState: false,\r\n        prediction: -1\r\n    }\r\n}\r\n\r\nfunction addDFAEdge(\r\n    dfa: DFA,\r\n    from: DFAState,\r\n    token: IToken,\r\n    to: DFAState\r\n): DFAState {\r\n    to = addDFAState(dfa, to)\r\n    from.edges[token.tokenTypeIdx] = to\r\n    return to\r\n}\r\n\r\nfunction addDFAState(dfa: DFA, state: DFAState): DFAState {\r\n    if (state === DFA_ERROR) {\r\n        return state\r\n    }\r\n    // Repetitions have the same config set\r\n    // Therefore, storing the key of the config in a map allows us to create a loop in our DFA\r\n    const mapKey = state.configs.key\r\n    const existing = dfa.states[mapKey]\r\n    if (existing !== undefined) {\r\n        return existing\r\n    }\r\n    state.configs.finalize()\r\n    dfa.states[mapKey] = state\r\n    return state\r\n}\r\n\r\nfunction computeStartState(atnState: ATNState): ATNConfigSet {\r\n    const configs = new ATNConfigSet()\r\n\r\n    const numberOfTransitions = atnState.transitions.length\r\n    for (let i = 0; i < numberOfTransitions; i++) {\r\n        const target = atnState.transitions[i].target\r\n        const config: ATNConfig = {\r\n            state: target,\r\n            alt: i,\r\n            stack: []\r\n        }\r\n        closure(config, configs)\r\n    }\r\n\r\n    return configs\r\n}\r\n\r\nfunction closure(config: ATNConfig, configs: ATNConfigSet): void {\r\n    const p = config.state\r\n\r\n    if (p.type === ATN_RULE_STOP) {\r\n        if (config.stack.length > 0) {\r\n            const atnStack = [...config.stack]\r\n            const followState = atnStack.pop()!\r\n            const followConfig: ATNConfig = {\r\n                state: followState,\r\n                alt: config.alt,\r\n                stack: atnStack\r\n            }\r\n            closure(followConfig, configs)\r\n        } else {\r\n            // Dipping into outer context, simply add the config\r\n            // This will stop computation once every config is at the rule stop state\r\n            configs.add(config)\r\n        }\r\n        return\r\n    }\r\n\r\n    if (!p.epsilonOnlyTransitions) {\r\n        configs.add(config)\r\n    }\r\n\r\n    const transitionLength = p.transitions.length\r\n    for (let i = 0; i < transitionLength; i++) {\r\n        const transition = p.transitions[i]\r\n        const c = getEpsilonTarget(config, transition)\r\n\r\n        if (c !== undefined) {\r\n            closure(c, configs)\r\n        }\r\n    }\r\n}\r\n\r\nfunction getEpsilonTarget(\r\n    config: ATNConfig,\r\n    transition: Transition\r\n): ATNConfig | undefined {\r\n    if (transition instanceof EpsilonTransition) {\r\n        return {\r\n            state: transition.target,\r\n            alt: config.alt,\r\n            stack: config.stack\r\n        }\r\n    } else if (transition instanceof RuleTransition) {\r\n        const stack = [...config.stack, transition.followState]\r\n        return {\r\n            state: transition.target,\r\n            alt: config.alt,\r\n            stack\r\n        }\r\n    }\r\n    return undefined\r\n}\r\n\r\nfunction hasConfigInRuleStopState(configs: ATNConfigSet): boolean {\r\n    for (const c of configs.elements) {\r\n        if (c.state.type === ATN_RULE_STOP) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n\r\nfunction allConfigsInRuleStopStates(configs: ATNConfigSet): boolean {\r\n    for (const c of configs.elements) {\r\n        if (c.state.type !== ATN_RULE_STOP) {\r\n            return false\r\n        }\r\n    }\r\n    return true\r\n}\r\n\r\nfunction hasConflictTerminatingPrediction(configs: ATNConfigSet): boolean {\r\n    if (allConfigsInRuleStopStates(configs)) {\r\n        return true\r\n    }\r\n    const altSets = getConflictingAltSets(configs.elements)\r\n    const heuristic =\r\n        hasConflictingAltSet(altSets) && !hasStateAssociatedWithOneAlt(altSets)\r\n    return heuristic\r\n}\r\n\r\nfunction getConflictingAltSets(\r\n    configs: readonly ATNConfig[]\r\n): Map> {\r\n    const configToAlts = new Map>()\r\n    for (const c of configs) {\r\n        const key = getATNConfigKey(c, false)\r\n        let alts = configToAlts.get(key)\r\n        if (alts === undefined) {\r\n            alts = {}\r\n            configToAlts.set(key, alts)\r\n        }\r\n        alts[c.alt] = true\r\n    }\r\n    return configToAlts\r\n}\r\n\r\nfunction hasConflictingAltSet(\r\n    altSets: Map>\r\n): boolean {\r\n    for (const value of Array.from(altSets.values())) {\r\n        if (Object.keys(value).length > 1) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n\r\nfunction hasStateAssociatedWithOneAlt(\r\n    altSets: Map>\r\n): boolean {\r\n    for (const value of Array.from(altSets.values())) {\r\n        if (Object.keys(value).length === 1) {\r\n            return true\r\n        }\r\n    }\r\n    return false\r\n}\r\n", "/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\n'use strict';\nexport var DocumentUri;\n(function (DocumentUri) {\n    function is(value) {\n        return typeof value === 'string';\n    }\n    DocumentUri.is = is;\n})(DocumentUri || (DocumentUri = {}));\nexport var URI;\n(function (URI) {\n    function is(value) {\n        return typeof value === 'string';\n    }\n    URI.is = is;\n})(URI || (URI = {}));\nexport var integer;\n(function (integer) {\n    integer.MIN_VALUE = -2147483648;\n    integer.MAX_VALUE = 2147483647;\n    function is(value) {\n        return typeof value === 'number' && integer.MIN_VALUE <= value && value <= integer.MAX_VALUE;\n    }\n    integer.is = is;\n})(integer || (integer = {}));\nexport var uinteger;\n(function (uinteger) {\n    uinteger.MIN_VALUE = 0;\n    uinteger.MAX_VALUE = 2147483647;\n    function is(value) {\n        return typeof value === 'number' && uinteger.MIN_VALUE <= value && value <= uinteger.MAX_VALUE;\n    }\n    uinteger.is = is;\n})(uinteger || (uinteger = {}));\n/**\n * The Position namespace provides helper functions to work with\n * {@link Position} literals.\n */\nexport var Position;\n(function (Position) {\n    /**\n     * Creates a new Position literal from the given line and character.\n     * @param line The position's line.\n     * @param character The position's character.\n     */\n    function create(line, character) {\n        if (line === Number.MAX_VALUE) {\n            line = uinteger.MAX_VALUE;\n        }\n        if (character === Number.MAX_VALUE) {\n            character = uinteger.MAX_VALUE;\n        }\n        return { line, character };\n    }\n    Position.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Position} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Is.uinteger(candidate.line) && Is.uinteger(candidate.character);\n    }\n    Position.is = is;\n})(Position || (Position = {}));\n/**\n * The Range namespace provides helper functions to work with\n * {@link Range} literals.\n */\nexport var Range;\n(function (Range) {\n    function create(one, two, three, four) {\n        if (Is.uinteger(one) && Is.uinteger(two) && Is.uinteger(three) && Is.uinteger(four)) {\n            return { start: Position.create(one, two), end: Position.create(three, four) };\n        }\n        else if (Position.is(one) && Position.is(two)) {\n            return { start: one, end: two };\n        }\n        else {\n            throw new Error(`Range#create called with invalid arguments[${one}, ${two}, ${three}, ${four}]`);\n        }\n    }\n    Range.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Range} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Position.is(candidate.start) && Position.is(candidate.end);\n    }\n    Range.is = is;\n})(Range || (Range = {}));\n/**\n * The Location namespace provides helper functions to work with\n * {@link Location} literals.\n */\nexport var Location;\n(function (Location) {\n    /**\n     * Creates a Location literal.\n     * @param uri The location's uri.\n     * @param range The location's range.\n     */\n    function create(uri, range) {\n        return { uri, range };\n    }\n    Location.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Location} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && (Is.string(candidate.uri) || Is.undefined(candidate.uri));\n    }\n    Location.is = is;\n})(Location || (Location = {}));\n/**\n * The LocationLink namespace provides helper functions to work with\n * {@link LocationLink} literals.\n */\nexport var LocationLink;\n(function (LocationLink) {\n    /**\n     * Creates a LocationLink literal.\n     * @param targetUri The definition's uri.\n     * @param targetRange The full range of the definition.\n     * @param targetSelectionRange The span of the symbol definition at the target.\n     * @param originSelectionRange The span of the symbol being defined in the originating source file.\n     */\n    function create(targetUri, targetRange, targetSelectionRange, originSelectionRange) {\n        return { targetUri, targetRange, targetSelectionRange, originSelectionRange };\n    }\n    LocationLink.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link LocationLink} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.targetRange) && Is.string(candidate.targetUri)\n            && Range.is(candidate.targetSelectionRange)\n            && (Range.is(candidate.originSelectionRange) || Is.undefined(candidate.originSelectionRange));\n    }\n    LocationLink.is = is;\n})(LocationLink || (LocationLink = {}));\n/**\n * The Color namespace provides helper functions to work with\n * {@link Color} literals.\n */\nexport var Color;\n(function (Color) {\n    /**\n     * Creates a new Color literal.\n     */\n    function create(red, green, blue, alpha) {\n        return {\n            red,\n            green,\n            blue,\n            alpha,\n        };\n    }\n    Color.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Color} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.numberRange(candidate.red, 0, 1)\n            && Is.numberRange(candidate.green, 0, 1)\n            && Is.numberRange(candidate.blue, 0, 1)\n            && Is.numberRange(candidate.alpha, 0, 1);\n    }\n    Color.is = is;\n})(Color || (Color = {}));\n/**\n * The ColorInformation namespace provides helper functions to work with\n * {@link ColorInformation} literals.\n */\nexport var ColorInformation;\n(function (ColorInformation) {\n    /**\n     * Creates a new ColorInformation literal.\n     */\n    function create(range, color) {\n        return {\n            range,\n            color,\n        };\n    }\n    ColorInformation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ColorInformation} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && Color.is(candidate.color);\n    }\n    ColorInformation.is = is;\n})(ColorInformation || (ColorInformation = {}));\n/**\n * The Color namespace provides helper functions to work with\n * {@link ColorPresentation} literals.\n */\nexport var ColorPresentation;\n(function (ColorPresentation) {\n    /**\n     * Creates a new ColorInformation literal.\n     */\n    function create(label, textEdit, additionalTextEdits) {\n        return {\n            label,\n            textEdit,\n            additionalTextEdits,\n        };\n    }\n    ColorPresentation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ColorInformation} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.label)\n            && (Is.undefined(candidate.textEdit) || TextEdit.is(candidate))\n            && (Is.undefined(candidate.additionalTextEdits) || Is.typedArray(candidate.additionalTextEdits, TextEdit.is));\n    }\n    ColorPresentation.is = is;\n})(ColorPresentation || (ColorPresentation = {}));\n/**\n * A set of predefined range kinds.\n */\nexport var FoldingRangeKind;\n(function (FoldingRangeKind) {\n    /**\n     * Folding range for a comment\n     */\n    FoldingRangeKind.Comment = 'comment';\n    /**\n     * Folding range for an import or include\n     */\n    FoldingRangeKind.Imports = 'imports';\n    /**\n     * Folding range for a region (e.g. `#region`)\n     */\n    FoldingRangeKind.Region = 'region';\n})(FoldingRangeKind || (FoldingRangeKind = {}));\n/**\n * The folding range namespace provides helper functions to work with\n * {@link FoldingRange} literals.\n */\nexport var FoldingRange;\n(function (FoldingRange) {\n    /**\n     * Creates a new FoldingRange literal.\n     */\n    function create(startLine, endLine, startCharacter, endCharacter, kind, collapsedText) {\n        const result = {\n            startLine,\n            endLine\n        };\n        if (Is.defined(startCharacter)) {\n            result.startCharacter = startCharacter;\n        }\n        if (Is.defined(endCharacter)) {\n            result.endCharacter = endCharacter;\n        }\n        if (Is.defined(kind)) {\n            result.kind = kind;\n        }\n        if (Is.defined(collapsedText)) {\n            result.collapsedText = collapsedText;\n        }\n        return result;\n    }\n    FoldingRange.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link FoldingRange} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.uinteger(candidate.startLine) && Is.uinteger(candidate.startLine)\n            && (Is.undefined(candidate.startCharacter) || Is.uinteger(candidate.startCharacter))\n            && (Is.undefined(candidate.endCharacter) || Is.uinteger(candidate.endCharacter))\n            && (Is.undefined(candidate.kind) || Is.string(candidate.kind));\n    }\n    FoldingRange.is = is;\n})(FoldingRange || (FoldingRange = {}));\n/**\n * The DiagnosticRelatedInformation namespace provides helper functions to work with\n * {@link DiagnosticRelatedInformation} literals.\n */\nexport var DiagnosticRelatedInformation;\n(function (DiagnosticRelatedInformation) {\n    /**\n     * Creates a new DiagnosticRelatedInformation literal.\n     */\n    function create(location, message) {\n        return {\n            location,\n            message\n        };\n    }\n    DiagnosticRelatedInformation.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DiagnosticRelatedInformation} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Location.is(candidate.location) && Is.string(candidate.message);\n    }\n    DiagnosticRelatedInformation.is = is;\n})(DiagnosticRelatedInformation || (DiagnosticRelatedInformation = {}));\n/**\n * The diagnostic's severity.\n */\nexport var DiagnosticSeverity;\n(function (DiagnosticSeverity) {\n    /**\n     * Reports an error.\n     */\n    DiagnosticSeverity.Error = 1;\n    /**\n     * Reports a warning.\n     */\n    DiagnosticSeverity.Warning = 2;\n    /**\n     * Reports an information.\n     */\n    DiagnosticSeverity.Information = 3;\n    /**\n     * Reports a hint.\n     */\n    DiagnosticSeverity.Hint = 4;\n})(DiagnosticSeverity || (DiagnosticSeverity = {}));\n/**\n * The diagnostic tags.\n *\n * @since 3.15.0\n */\nexport var DiagnosticTag;\n(function (DiagnosticTag) {\n    /**\n     * Unused or unnecessary code.\n     *\n     * Clients are allowed to render diagnostics with this tag faded out instead of having\n     * an error squiggle.\n     */\n    DiagnosticTag.Unnecessary = 1;\n    /**\n     * Deprecated or obsolete code.\n     *\n     * Clients are allowed to rendered diagnostics with this tag strike through.\n     */\n    DiagnosticTag.Deprecated = 2;\n})(DiagnosticTag || (DiagnosticTag = {}));\n/**\n * The CodeDescription namespace provides functions to deal with descriptions for diagnostic codes.\n *\n * @since 3.16.0\n */\nexport var CodeDescription;\n(function (CodeDescription) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.href);\n    }\n    CodeDescription.is = is;\n})(CodeDescription || (CodeDescription = {}));\n/**\n * The Diagnostic namespace provides helper functions to work with\n * {@link Diagnostic} literals.\n */\nexport var Diagnostic;\n(function (Diagnostic) {\n    /**\n     * Creates a new Diagnostic literal.\n     */\n    function create(range, message, severity, code, source, relatedInformation) {\n        let result = { range, message };\n        if (Is.defined(severity)) {\n            result.severity = severity;\n        }\n        if (Is.defined(code)) {\n            result.code = code;\n        }\n        if (Is.defined(source)) {\n            result.source = source;\n        }\n        if (Is.defined(relatedInformation)) {\n            result.relatedInformation = relatedInformation;\n        }\n        return result;\n    }\n    Diagnostic.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Diagnostic} interface.\n     */\n    function is(value) {\n        var _a;\n        let candidate = value;\n        return Is.defined(candidate)\n            && Range.is(candidate.range)\n            && Is.string(candidate.message)\n            && (Is.number(candidate.severity) || Is.undefined(candidate.severity))\n            && (Is.integer(candidate.code) || Is.string(candidate.code) || Is.undefined(candidate.code))\n            && (Is.undefined(candidate.codeDescription) || (Is.string((_a = candidate.codeDescription) === null || _a === void 0 ? void 0 : _a.href)))\n            && (Is.string(candidate.source) || Is.undefined(candidate.source))\n            && (Is.undefined(candidate.relatedInformation) || Is.typedArray(candidate.relatedInformation, DiagnosticRelatedInformation.is));\n    }\n    Diagnostic.is = is;\n})(Diagnostic || (Diagnostic = {}));\n/**\n * The Command namespace provides helper functions to work with\n * {@link Command} literals.\n */\nexport var Command;\n(function (Command) {\n    /**\n     * Creates a new Command literal.\n     */\n    function create(title, command, ...args) {\n        let result = { title, command };\n        if (Is.defined(args) && args.length > 0) {\n            result.arguments = args;\n        }\n        return result;\n    }\n    Command.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link Command} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.title) && Is.string(candidate.command);\n    }\n    Command.is = is;\n})(Command || (Command = {}));\n/**\n * The TextEdit namespace provides helper function to create replace,\n * insert and delete edits more easily.\n */\nexport var TextEdit;\n(function (TextEdit) {\n    /**\n     * Creates a replace text edit.\n     * @param range The range of text to be replaced.\n     * @param newText The new text.\n     */\n    function replace(range, newText) {\n        return { range, newText };\n    }\n    TextEdit.replace = replace;\n    /**\n     * Creates an insert text edit.\n     * @param position The position to insert the text at.\n     * @param newText The text to be inserted.\n     */\n    function insert(position, newText) {\n        return { range: { start: position, end: position }, newText };\n    }\n    TextEdit.insert = insert;\n    /**\n     * Creates a delete text edit.\n     * @param range The range of text to be deleted.\n     */\n    function del(range) {\n        return { range, newText: '' };\n    }\n    TextEdit.del = del;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate)\n            && Is.string(candidate.newText)\n            && Range.is(candidate.range);\n    }\n    TextEdit.is = is;\n})(TextEdit || (TextEdit = {}));\nexport var ChangeAnnotation;\n(function (ChangeAnnotation) {\n    function create(label, needsConfirmation, description) {\n        const result = { label };\n        if (needsConfirmation !== undefined) {\n            result.needsConfirmation = needsConfirmation;\n        }\n        if (description !== undefined) {\n            result.description = description;\n        }\n        return result;\n    }\n    ChangeAnnotation.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Is.string(candidate.label) &&\n            (Is.boolean(candidate.needsConfirmation) || candidate.needsConfirmation === undefined) &&\n            (Is.string(candidate.description) || candidate.description === undefined);\n    }\n    ChangeAnnotation.is = is;\n})(ChangeAnnotation || (ChangeAnnotation = {}));\nexport var ChangeAnnotationIdentifier;\n(function (ChangeAnnotationIdentifier) {\n    function is(value) {\n        const candidate = value;\n        return Is.string(candidate);\n    }\n    ChangeAnnotationIdentifier.is = is;\n})(ChangeAnnotationIdentifier || (ChangeAnnotationIdentifier = {}));\nexport var AnnotatedTextEdit;\n(function (AnnotatedTextEdit) {\n    /**\n     * Creates an annotated replace text edit.\n     *\n     * @param range The range of text to be replaced.\n     * @param newText The new text.\n     * @param annotation The annotation.\n     */\n    function replace(range, newText, annotation) {\n        return { range, newText, annotationId: annotation };\n    }\n    AnnotatedTextEdit.replace = replace;\n    /**\n     * Creates an annotated insert text edit.\n     *\n     * @param position The position to insert the text at.\n     * @param newText The text to be inserted.\n     * @param annotation The annotation.\n     */\n    function insert(position, newText, annotation) {\n        return { range: { start: position, end: position }, newText, annotationId: annotation };\n    }\n    AnnotatedTextEdit.insert = insert;\n    /**\n     * Creates an annotated delete text edit.\n     *\n     * @param range The range of text to be deleted.\n     * @param annotation The annotation.\n     */\n    function del(range, annotation) {\n        return { range, newText: '', annotationId: annotation };\n    }\n    AnnotatedTextEdit.del = del;\n    function is(value) {\n        const candidate = value;\n        return TextEdit.is(candidate) && (ChangeAnnotation.is(candidate.annotationId) || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    AnnotatedTextEdit.is = is;\n})(AnnotatedTextEdit || (AnnotatedTextEdit = {}));\n/**\n * The TextDocumentEdit namespace provides helper function to create\n * an edit that manipulates a text document.\n */\nexport var TextDocumentEdit;\n(function (TextDocumentEdit) {\n    /**\n     * Creates a new `TextDocumentEdit`\n     */\n    function create(textDocument, edits) {\n        return { textDocument, edits };\n    }\n    TextDocumentEdit.create = create;\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate)\n            && OptionalVersionedTextDocumentIdentifier.is(candidate.textDocument)\n            && Array.isArray(candidate.edits);\n    }\n    TextDocumentEdit.is = is;\n})(TextDocumentEdit || (TextDocumentEdit = {}));\nexport var CreateFile;\n(function (CreateFile) {\n    function create(uri, options, annotation) {\n        let result = {\n            kind: 'create',\n            uri\n        };\n        if (options !== undefined && (options.overwrite !== undefined || options.ignoreIfExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    CreateFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'create' && Is.string(candidate.uri) && (candidate.options === undefined ||\n            ((candidate.options.overwrite === undefined || Is.boolean(candidate.options.overwrite)) && (candidate.options.ignoreIfExists === undefined || Is.boolean(candidate.options.ignoreIfExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    CreateFile.is = is;\n})(CreateFile || (CreateFile = {}));\nexport var RenameFile;\n(function (RenameFile) {\n    function create(oldUri, newUri, options, annotation) {\n        let result = {\n            kind: 'rename',\n            oldUri,\n            newUri\n        };\n        if (options !== undefined && (options.overwrite !== undefined || options.ignoreIfExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    RenameFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'rename' && Is.string(candidate.oldUri) && Is.string(candidate.newUri) && (candidate.options === undefined ||\n            ((candidate.options.overwrite === undefined || Is.boolean(candidate.options.overwrite)) && (candidate.options.ignoreIfExists === undefined || Is.boolean(candidate.options.ignoreIfExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    RenameFile.is = is;\n})(RenameFile || (RenameFile = {}));\nexport var DeleteFile;\n(function (DeleteFile) {\n    function create(uri, options, annotation) {\n        let result = {\n            kind: 'delete',\n            uri\n        };\n        if (options !== undefined && (options.recursive !== undefined || options.ignoreIfNotExists !== undefined)) {\n            result.options = options;\n        }\n        if (annotation !== undefined) {\n            result.annotationId = annotation;\n        }\n        return result;\n    }\n    DeleteFile.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && candidate.kind === 'delete' && Is.string(candidate.uri) && (candidate.options === undefined ||\n            ((candidate.options.recursive === undefined || Is.boolean(candidate.options.recursive)) && (candidate.options.ignoreIfNotExists === undefined || Is.boolean(candidate.options.ignoreIfNotExists)))) && (candidate.annotationId === undefined || ChangeAnnotationIdentifier.is(candidate.annotationId));\n    }\n    DeleteFile.is = is;\n})(DeleteFile || (DeleteFile = {}));\nexport var WorkspaceEdit;\n(function (WorkspaceEdit) {\n    function is(value) {\n        let candidate = value;\n        return candidate &&\n            (candidate.changes !== undefined || candidate.documentChanges !== undefined) &&\n            (candidate.documentChanges === undefined || candidate.documentChanges.every((change) => {\n                if (Is.string(change.kind)) {\n                    return CreateFile.is(change) || RenameFile.is(change) || DeleteFile.is(change);\n                }\n                else {\n                    return TextDocumentEdit.is(change);\n                }\n            }));\n    }\n    WorkspaceEdit.is = is;\n})(WorkspaceEdit || (WorkspaceEdit = {}));\nclass TextEditChangeImpl {\n    constructor(edits, changeAnnotations) {\n        this.edits = edits;\n        this.changeAnnotations = changeAnnotations;\n    }\n    insert(position, newText, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.insert(position, newText);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.insert(position, newText, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.insert(position, newText, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    replace(range, newText, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.replace(range, newText);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.replace(range, newText, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.replace(range, newText, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    delete(range, annotation) {\n        let edit;\n        let id;\n        if (annotation === undefined) {\n            edit = TextEdit.del(range);\n        }\n        else if (ChangeAnnotationIdentifier.is(annotation)) {\n            id = annotation;\n            edit = AnnotatedTextEdit.del(range, annotation);\n        }\n        else {\n            this.assertChangeAnnotations(this.changeAnnotations);\n            id = this.changeAnnotations.manage(annotation);\n            edit = AnnotatedTextEdit.del(range, id);\n        }\n        this.edits.push(edit);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    add(edit) {\n        this.edits.push(edit);\n    }\n    all() {\n        return this.edits;\n    }\n    clear() {\n        this.edits.splice(0, this.edits.length);\n    }\n    assertChangeAnnotations(value) {\n        if (value === undefined) {\n            throw new Error(`Text edit change is not configured to manage change annotations.`);\n        }\n    }\n}\n/**\n * A helper class\n */\nclass ChangeAnnotations {\n    constructor(annotations) {\n        this._annotations = annotations === undefined ? Object.create(null) : annotations;\n        this._counter = 0;\n        this._size = 0;\n    }\n    all() {\n        return this._annotations;\n    }\n    get size() {\n        return this._size;\n    }\n    manage(idOrAnnotation, annotation) {\n        let id;\n        if (ChangeAnnotationIdentifier.is(idOrAnnotation)) {\n            id = idOrAnnotation;\n        }\n        else {\n            id = this.nextId();\n            annotation = idOrAnnotation;\n        }\n        if (this._annotations[id] !== undefined) {\n            throw new Error(`Id ${id} is already in use.`);\n        }\n        if (annotation === undefined) {\n            throw new Error(`No annotation provided for id ${id}`);\n        }\n        this._annotations[id] = annotation;\n        this._size++;\n        return id;\n    }\n    nextId() {\n        this._counter++;\n        return this._counter.toString();\n    }\n}\n/**\n * A workspace change helps constructing changes to a workspace.\n */\nexport class WorkspaceChange {\n    constructor(workspaceEdit) {\n        this._textEditChanges = Object.create(null);\n        if (workspaceEdit !== undefined) {\n            this._workspaceEdit = workspaceEdit;\n            if (workspaceEdit.documentChanges) {\n                this._changeAnnotations = new ChangeAnnotations(workspaceEdit.changeAnnotations);\n                workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n                workspaceEdit.documentChanges.forEach((change) => {\n                    if (TextDocumentEdit.is(change)) {\n                        const textEditChange = new TextEditChangeImpl(change.edits, this._changeAnnotations);\n                        this._textEditChanges[change.textDocument.uri] = textEditChange;\n                    }\n                });\n            }\n            else if (workspaceEdit.changes) {\n                Object.keys(workspaceEdit.changes).forEach((key) => {\n                    const textEditChange = new TextEditChangeImpl(workspaceEdit.changes[key]);\n                    this._textEditChanges[key] = textEditChange;\n                });\n            }\n        }\n        else {\n            this._workspaceEdit = {};\n        }\n    }\n    /**\n     * Returns the underlying {@link WorkspaceEdit} literal\n     * use to be returned from a workspace edit operation like rename.\n     */\n    get edit() {\n        this.initDocumentChanges();\n        if (this._changeAnnotations !== undefined) {\n            if (this._changeAnnotations.size === 0) {\n                this._workspaceEdit.changeAnnotations = undefined;\n            }\n            else {\n                this._workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n            }\n        }\n        return this._workspaceEdit;\n    }\n    getTextEditChange(key) {\n        if (OptionalVersionedTextDocumentIdentifier.is(key)) {\n            this.initDocumentChanges();\n            if (this._workspaceEdit.documentChanges === undefined) {\n                throw new Error('Workspace edit is not configured for document changes.');\n            }\n            const textDocument = { uri: key.uri, version: key.version };\n            let result = this._textEditChanges[textDocument.uri];\n            if (!result) {\n                const edits = [];\n                const textDocumentEdit = {\n                    textDocument,\n                    edits\n                };\n                this._workspaceEdit.documentChanges.push(textDocumentEdit);\n                result = new TextEditChangeImpl(edits, this._changeAnnotations);\n                this._textEditChanges[textDocument.uri] = result;\n            }\n            return result;\n        }\n        else {\n            this.initChanges();\n            if (this._workspaceEdit.changes === undefined) {\n                throw new Error('Workspace edit is not configured for normal text edit changes.');\n            }\n            let result = this._textEditChanges[key];\n            if (!result) {\n                let edits = [];\n                this._workspaceEdit.changes[key] = edits;\n                result = new TextEditChangeImpl(edits);\n                this._textEditChanges[key] = result;\n            }\n            return result;\n        }\n    }\n    initDocumentChanges() {\n        if (this._workspaceEdit.documentChanges === undefined && this._workspaceEdit.changes === undefined) {\n            this._changeAnnotations = new ChangeAnnotations();\n            this._workspaceEdit.documentChanges = [];\n            this._workspaceEdit.changeAnnotations = this._changeAnnotations.all();\n        }\n    }\n    initChanges() {\n        if (this._workspaceEdit.documentChanges === undefined && this._workspaceEdit.changes === undefined) {\n            this._workspaceEdit.changes = Object.create(null);\n        }\n    }\n    createFile(uri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = CreateFile.create(uri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = CreateFile.create(uri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    renameFile(oldUri, newUri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = RenameFile.create(oldUri, newUri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = RenameFile.create(oldUri, newUri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n    deleteFile(uri, optionsOrAnnotation, options) {\n        this.initDocumentChanges();\n        if (this._workspaceEdit.documentChanges === undefined) {\n            throw new Error('Workspace edit is not configured for document changes.');\n        }\n        let annotation;\n        if (ChangeAnnotation.is(optionsOrAnnotation) || ChangeAnnotationIdentifier.is(optionsOrAnnotation)) {\n            annotation = optionsOrAnnotation;\n        }\n        else {\n            options = optionsOrAnnotation;\n        }\n        let operation;\n        let id;\n        if (annotation === undefined) {\n            operation = DeleteFile.create(uri, options);\n        }\n        else {\n            id = ChangeAnnotationIdentifier.is(annotation) ? annotation : this._changeAnnotations.manage(annotation);\n            operation = DeleteFile.create(uri, options, id);\n        }\n        this._workspaceEdit.documentChanges.push(operation);\n        if (id !== undefined) {\n            return id;\n        }\n    }\n}\n/**\n * The TextDocumentIdentifier namespace provides helper functions to work with\n * {@link TextDocumentIdentifier} literals.\n */\nexport var TextDocumentIdentifier;\n(function (TextDocumentIdentifier) {\n    /**\n     * Creates a new TextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     */\n    function create(uri) {\n        return { uri };\n    }\n    TextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link TextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri);\n    }\n    TextDocumentIdentifier.is = is;\n})(TextDocumentIdentifier || (TextDocumentIdentifier = {}));\n/**\n * The VersionedTextDocumentIdentifier namespace provides helper functions to work with\n * {@link VersionedTextDocumentIdentifier} literals.\n */\nexport var VersionedTextDocumentIdentifier;\n(function (VersionedTextDocumentIdentifier) {\n    /**\n     * Creates a new VersionedTextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     * @param version The document's version.\n     */\n    function create(uri, version) {\n        return { uri, version };\n    }\n    VersionedTextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link VersionedTextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && Is.integer(candidate.version);\n    }\n    VersionedTextDocumentIdentifier.is = is;\n})(VersionedTextDocumentIdentifier || (VersionedTextDocumentIdentifier = {}));\n/**\n * The OptionalVersionedTextDocumentIdentifier namespace provides helper functions to work with\n * {@link OptionalVersionedTextDocumentIdentifier} literals.\n */\nexport var OptionalVersionedTextDocumentIdentifier;\n(function (OptionalVersionedTextDocumentIdentifier) {\n    /**\n     * Creates a new OptionalVersionedTextDocumentIdentifier literal.\n     * @param uri The document's uri.\n     * @param version The document's version.\n     */\n    function create(uri, version) {\n        return { uri, version };\n    }\n    OptionalVersionedTextDocumentIdentifier.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link OptionalVersionedTextDocumentIdentifier} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && (candidate.version === null || Is.integer(candidate.version));\n    }\n    OptionalVersionedTextDocumentIdentifier.is = is;\n})(OptionalVersionedTextDocumentIdentifier || (OptionalVersionedTextDocumentIdentifier = {}));\n/**\n * The TextDocumentItem namespace provides helper functions to work with\n * {@link TextDocumentItem} literals.\n */\nexport var TextDocumentItem;\n(function (TextDocumentItem) {\n    /**\n     * Creates a new TextDocumentItem literal.\n     * @param uri The document's uri.\n     * @param languageId The document's language identifier.\n     * @param version The document's version number.\n     * @param text The document's text.\n     */\n    function create(uri, languageId, version, text) {\n        return { uri, languageId, version, text };\n    }\n    TextDocumentItem.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link TextDocumentItem} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && Is.string(candidate.languageId) && Is.integer(candidate.version) && Is.string(candidate.text);\n    }\n    TextDocumentItem.is = is;\n})(TextDocumentItem || (TextDocumentItem = {}));\n/**\n * Describes the content type that a client supports in various\n * result literals like `Hover`, `ParameterInfo` or `CompletionItem`.\n *\n * Please note that `MarkupKinds` must not start with a `$`. This kinds\n * are reserved for internal usage.\n */\nexport var MarkupKind;\n(function (MarkupKind) {\n    /**\n     * Plain text is supported as a content format\n     */\n    MarkupKind.PlainText = 'plaintext';\n    /**\n     * Markdown is supported as a content format\n     */\n    MarkupKind.Markdown = 'markdown';\n    /**\n     * Checks whether the given value is a value of the {@link MarkupKind} type.\n     */\n    function is(value) {\n        const candidate = value;\n        return candidate === MarkupKind.PlainText || candidate === MarkupKind.Markdown;\n    }\n    MarkupKind.is = is;\n})(MarkupKind || (MarkupKind = {}));\nexport var MarkupContent;\n(function (MarkupContent) {\n    /**\n     * Checks whether the given value conforms to the {@link MarkupContent} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(value) && MarkupKind.is(candidate.kind) && Is.string(candidate.value);\n    }\n    MarkupContent.is = is;\n})(MarkupContent || (MarkupContent = {}));\n/**\n * The kind of a completion entry.\n */\nexport var CompletionItemKind;\n(function (CompletionItemKind) {\n    CompletionItemKind.Text = 1;\n    CompletionItemKind.Method = 2;\n    CompletionItemKind.Function = 3;\n    CompletionItemKind.Constructor = 4;\n    CompletionItemKind.Field = 5;\n    CompletionItemKind.Variable = 6;\n    CompletionItemKind.Class = 7;\n    CompletionItemKind.Interface = 8;\n    CompletionItemKind.Module = 9;\n    CompletionItemKind.Property = 10;\n    CompletionItemKind.Unit = 11;\n    CompletionItemKind.Value = 12;\n    CompletionItemKind.Enum = 13;\n    CompletionItemKind.Keyword = 14;\n    CompletionItemKind.Snippet = 15;\n    CompletionItemKind.Color = 16;\n    CompletionItemKind.File = 17;\n    CompletionItemKind.Reference = 18;\n    CompletionItemKind.Folder = 19;\n    CompletionItemKind.EnumMember = 20;\n    CompletionItemKind.Constant = 21;\n    CompletionItemKind.Struct = 22;\n    CompletionItemKind.Event = 23;\n    CompletionItemKind.Operator = 24;\n    CompletionItemKind.TypeParameter = 25;\n})(CompletionItemKind || (CompletionItemKind = {}));\n/**\n * Defines whether the insert text in a completion item should be interpreted as\n * plain text or a snippet.\n */\nexport var InsertTextFormat;\n(function (InsertTextFormat) {\n    /**\n     * The primary text to be inserted is treated as a plain string.\n     */\n    InsertTextFormat.PlainText = 1;\n    /**\n     * The primary text to be inserted is treated as a snippet.\n     *\n     * A snippet can define tab stops and placeholders with `$1`, `$2`\n     * and `${3:foo}`. `$0` defines the final tab stop, it defaults to\n     * the end of the snippet. Placeholders with equal identifiers are linked,\n     * that is typing in one will update others too.\n     *\n     * See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax\n     */\n    InsertTextFormat.Snippet = 2;\n})(InsertTextFormat || (InsertTextFormat = {}));\n/**\n * Completion item tags are extra annotations that tweak the rendering of a completion\n * item.\n *\n * @since 3.15.0\n */\nexport var CompletionItemTag;\n(function (CompletionItemTag) {\n    /**\n     * Render a completion as obsolete, usually using a strike-out.\n     */\n    CompletionItemTag.Deprecated = 1;\n})(CompletionItemTag || (CompletionItemTag = {}));\n/**\n * The InsertReplaceEdit namespace provides functions to deal with insert / replace edits.\n *\n * @since 3.16.0\n */\nexport var InsertReplaceEdit;\n(function (InsertReplaceEdit) {\n    /**\n     * Creates a new insert / replace edit\n     */\n    function create(newText, insert, replace) {\n        return { newText, insert, replace };\n    }\n    InsertReplaceEdit.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link InsertReplaceEdit} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return candidate && Is.string(candidate.newText) && Range.is(candidate.insert) && Range.is(candidate.replace);\n    }\n    InsertReplaceEdit.is = is;\n})(InsertReplaceEdit || (InsertReplaceEdit = {}));\n/**\n * How whitespace and indentation is handled during completion\n * item insertion.\n *\n * @since 3.16.0\n */\nexport var InsertTextMode;\n(function (InsertTextMode) {\n    /**\n     * The insertion or replace strings is taken as it is. If the\n     * value is multi line the lines below the cursor will be\n     * inserted using the indentation defined in the string value.\n     * The client will not apply any kind of adjustments to the\n     * string.\n     */\n    InsertTextMode.asIs = 1;\n    /**\n     * The editor adjusts leading whitespace of new lines so that\n     * they match the indentation up to the cursor of the line for\n     * which the item is accepted.\n     *\n     * Consider a line like this: <2tabs><3tabs>foo. Accepting a\n     * multi line completion item is indented using 2 tabs and all\n     * following lines inserted will be indented using 2 tabs as well.\n     */\n    InsertTextMode.adjustIndentation = 2;\n})(InsertTextMode || (InsertTextMode = {}));\nexport var CompletionItemLabelDetails;\n(function (CompletionItemLabelDetails) {\n    function is(value) {\n        const candidate = value;\n        return candidate && (Is.string(candidate.detail) || candidate.detail === undefined) &&\n            (Is.string(candidate.description) || candidate.description === undefined);\n    }\n    CompletionItemLabelDetails.is = is;\n})(CompletionItemLabelDetails || (CompletionItemLabelDetails = {}));\n/**\n * The CompletionItem namespace provides functions to deal with\n * completion items.\n */\nexport var CompletionItem;\n(function (CompletionItem) {\n    /**\n     * Create a completion item and seed it with a label.\n     * @param label The completion item's label\n     */\n    function create(label) {\n        return { label };\n    }\n    CompletionItem.create = create;\n})(CompletionItem || (CompletionItem = {}));\n/**\n * The CompletionList namespace provides functions to deal with\n * completion lists.\n */\nexport var CompletionList;\n(function (CompletionList) {\n    /**\n     * Creates a new completion list.\n     *\n     * @param items The completion items.\n     * @param isIncomplete The list is not complete.\n     */\n    function create(items, isIncomplete) {\n        return { items: items ? items : [], isIncomplete: !!isIncomplete };\n    }\n    CompletionList.create = create;\n})(CompletionList || (CompletionList = {}));\nexport var MarkedString;\n(function (MarkedString) {\n    /**\n     * Creates a marked string from plain text.\n     *\n     * @param plainText The plain text.\n     */\n    function fromPlainText(plainText) {\n        return plainText.replace(/[\\\\`*_{}[\\]()#+\\-.!]/g, '\\\\$&'); // escape markdown syntax tokens: http://daringfireball.net/projects/markdown/syntax#backslash\n    }\n    MarkedString.fromPlainText = fromPlainText;\n    /**\n     * Checks whether the given value conforms to the {@link MarkedString} type.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.string(candidate) || (Is.objectLiteral(candidate) && Is.string(candidate.language) && Is.string(candidate.value));\n    }\n    MarkedString.is = is;\n})(MarkedString || (MarkedString = {}));\nexport var Hover;\n(function (Hover) {\n    /**\n     * Checks whether the given value conforms to the {@link Hover} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return !!candidate && Is.objectLiteral(candidate) && (MarkupContent.is(candidate.contents) ||\n            MarkedString.is(candidate.contents) ||\n            Is.typedArray(candidate.contents, MarkedString.is)) && (value.range === undefined || Range.is(value.range));\n    }\n    Hover.is = is;\n})(Hover || (Hover = {}));\n/**\n * The ParameterInformation namespace provides helper functions to work with\n * {@link ParameterInformation} literals.\n */\nexport var ParameterInformation;\n(function (ParameterInformation) {\n    /**\n     * Creates a new parameter information literal.\n     *\n     * @param label A label string.\n     * @param documentation A doc string.\n     */\n    function create(label, documentation) {\n        return documentation ? { label, documentation } : { label };\n    }\n    ParameterInformation.create = create;\n})(ParameterInformation || (ParameterInformation = {}));\n/**\n * The SignatureInformation namespace provides helper functions to work with\n * {@link SignatureInformation} literals.\n */\nexport var SignatureInformation;\n(function (SignatureInformation) {\n    function create(label, documentation, ...parameters) {\n        let result = { label };\n        if (Is.defined(documentation)) {\n            result.documentation = documentation;\n        }\n        if (Is.defined(parameters)) {\n            result.parameters = parameters;\n        }\n        else {\n            result.parameters = [];\n        }\n        return result;\n    }\n    SignatureInformation.create = create;\n})(SignatureInformation || (SignatureInformation = {}));\n/**\n * A document highlight kind.\n */\nexport var DocumentHighlightKind;\n(function (DocumentHighlightKind) {\n    /**\n     * A textual occurrence.\n     */\n    DocumentHighlightKind.Text = 1;\n    /**\n     * Read-access of a symbol, like reading a variable.\n     */\n    DocumentHighlightKind.Read = 2;\n    /**\n     * Write-access of a symbol, like writing to a variable.\n     */\n    DocumentHighlightKind.Write = 3;\n})(DocumentHighlightKind || (DocumentHighlightKind = {}));\n/**\n * DocumentHighlight namespace to provide helper functions to work with\n * {@link DocumentHighlight} literals.\n */\nexport var DocumentHighlight;\n(function (DocumentHighlight) {\n    /**\n     * Create a DocumentHighlight object.\n     * @param range The range the highlight applies to.\n     * @param kind The highlight kind\n     */\n    function create(range, kind) {\n        let result = { range };\n        if (Is.number(kind)) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    DocumentHighlight.create = create;\n})(DocumentHighlight || (DocumentHighlight = {}));\n/**\n * A symbol kind.\n */\nexport var SymbolKind;\n(function (SymbolKind) {\n    SymbolKind.File = 1;\n    SymbolKind.Module = 2;\n    SymbolKind.Namespace = 3;\n    SymbolKind.Package = 4;\n    SymbolKind.Class = 5;\n    SymbolKind.Method = 6;\n    SymbolKind.Property = 7;\n    SymbolKind.Field = 8;\n    SymbolKind.Constructor = 9;\n    SymbolKind.Enum = 10;\n    SymbolKind.Interface = 11;\n    SymbolKind.Function = 12;\n    SymbolKind.Variable = 13;\n    SymbolKind.Constant = 14;\n    SymbolKind.String = 15;\n    SymbolKind.Number = 16;\n    SymbolKind.Boolean = 17;\n    SymbolKind.Array = 18;\n    SymbolKind.Object = 19;\n    SymbolKind.Key = 20;\n    SymbolKind.Null = 21;\n    SymbolKind.EnumMember = 22;\n    SymbolKind.Struct = 23;\n    SymbolKind.Event = 24;\n    SymbolKind.Operator = 25;\n    SymbolKind.TypeParameter = 26;\n})(SymbolKind || (SymbolKind = {}));\n/**\n * Symbol tags are extra annotations that tweak the rendering of a symbol.\n *\n * @since 3.16\n */\nexport var SymbolTag;\n(function (SymbolTag) {\n    /**\n     * Render a symbol as obsolete, usually using a strike-out.\n     */\n    SymbolTag.Deprecated = 1;\n})(SymbolTag || (SymbolTag = {}));\nexport var SymbolInformation;\n(function (SymbolInformation) {\n    /**\n     * Creates a new symbol information literal.\n     *\n     * @param name The name of the symbol.\n     * @param kind The kind of the symbol.\n     * @param range The range of the location of the symbol.\n     * @param uri The resource of the location of symbol.\n     * @param containerName The name of the symbol containing the symbol.\n     */\n    function create(name, kind, range, uri, containerName) {\n        let result = {\n            name,\n            kind,\n            location: { uri, range }\n        };\n        if (containerName) {\n            result.containerName = containerName;\n        }\n        return result;\n    }\n    SymbolInformation.create = create;\n})(SymbolInformation || (SymbolInformation = {}));\nexport var WorkspaceSymbol;\n(function (WorkspaceSymbol) {\n    /**\n     * Create a new workspace symbol.\n     *\n     * @param name The name of the symbol.\n     * @param kind The kind of the symbol.\n     * @param uri The resource of the location of the symbol.\n     * @param range An options range of the location.\n     * @returns A WorkspaceSymbol.\n     */\n    function create(name, kind, uri, range) {\n        return range !== undefined\n            ? { name, kind, location: { uri, range } }\n            : { name, kind, location: { uri } };\n    }\n    WorkspaceSymbol.create = create;\n})(WorkspaceSymbol || (WorkspaceSymbol = {}));\nexport var DocumentSymbol;\n(function (DocumentSymbol) {\n    /**\n     * Creates a new symbol information literal.\n     *\n     * @param name The name of the symbol.\n     * @param detail The detail of the symbol.\n     * @param kind The kind of the symbol.\n     * @param range The range of the symbol.\n     * @param selectionRange The selectionRange of the symbol.\n     * @param children Children of the symbol.\n     */\n    function create(name, detail, kind, range, selectionRange, children) {\n        let result = {\n            name,\n            detail,\n            kind,\n            range,\n            selectionRange\n        };\n        if (children !== undefined) {\n            result.children = children;\n        }\n        return result;\n    }\n    DocumentSymbol.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DocumentSymbol} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return candidate &&\n            Is.string(candidate.name) && Is.number(candidate.kind) &&\n            Range.is(candidate.range) && Range.is(candidate.selectionRange) &&\n            (candidate.detail === undefined || Is.string(candidate.detail)) &&\n            (candidate.deprecated === undefined || Is.boolean(candidate.deprecated)) &&\n            (candidate.children === undefined || Array.isArray(candidate.children)) &&\n            (candidate.tags === undefined || Array.isArray(candidate.tags));\n    }\n    DocumentSymbol.is = is;\n})(DocumentSymbol || (DocumentSymbol = {}));\n/**\n * A set of predefined code action kinds\n */\nexport var CodeActionKind;\n(function (CodeActionKind) {\n    /**\n     * Empty kind.\n     */\n    CodeActionKind.Empty = '';\n    /**\n     * Base kind for quickfix actions: 'quickfix'\n     */\n    CodeActionKind.QuickFix = 'quickfix';\n    /**\n     * Base kind for refactoring actions: 'refactor'\n     */\n    CodeActionKind.Refactor = 'refactor';\n    /**\n     * Base kind for refactoring extraction actions: 'refactor.extract'\n     *\n     * Example extract actions:\n     *\n     * - Extract method\n     * - Extract function\n     * - Extract variable\n     * - Extract interface from class\n     * - ...\n     */\n    CodeActionKind.RefactorExtract = 'refactor.extract';\n    /**\n     * Base kind for refactoring inline actions: 'refactor.inline'\n     *\n     * Example inline actions:\n     *\n     * - Inline function\n     * - Inline variable\n     * - Inline constant\n     * - ...\n     */\n    CodeActionKind.RefactorInline = 'refactor.inline';\n    /**\n     * Base kind for refactoring rewrite actions: 'refactor.rewrite'\n     *\n     * Example rewrite actions:\n     *\n     * - Convert JavaScript function to class\n     * - Add or remove parameter\n     * - Encapsulate field\n     * - Make method static\n     * - Move method to base class\n     * - ...\n     */\n    CodeActionKind.RefactorRewrite = 'refactor.rewrite';\n    /**\n     * Base kind for source actions: `source`\n     *\n     * Source code actions apply to the entire file.\n     */\n    CodeActionKind.Source = 'source';\n    /**\n     * Base kind for an organize imports source action: `source.organizeImports`\n     */\n    CodeActionKind.SourceOrganizeImports = 'source.organizeImports';\n    /**\n     * Base kind for auto-fix source actions: `source.fixAll`.\n     *\n     * Fix all actions automatically fix errors that have a clear fix that do not require user input.\n     * They should not suppress errors or perform unsafe fixes such as generating new types or classes.\n     *\n     * @since 3.15.0\n     */\n    CodeActionKind.SourceFixAll = 'source.fixAll';\n})(CodeActionKind || (CodeActionKind = {}));\n/**\n * The reason why code actions were requested.\n *\n * @since 3.17.0\n */\nexport var CodeActionTriggerKind;\n(function (CodeActionTriggerKind) {\n    /**\n     * Code actions were explicitly requested by the user or by an extension.\n     */\n    CodeActionTriggerKind.Invoked = 1;\n    /**\n     * Code actions were requested automatically.\n     *\n     * This typically happens when current selection in a file changes, but can\n     * also be triggered when file content changes.\n     */\n    CodeActionTriggerKind.Automatic = 2;\n})(CodeActionTriggerKind || (CodeActionTriggerKind = {}));\n/**\n * The CodeActionContext namespace provides helper functions to work with\n * {@link CodeActionContext} literals.\n */\nexport var CodeActionContext;\n(function (CodeActionContext) {\n    /**\n     * Creates a new CodeActionContext literal.\n     */\n    function create(diagnostics, only, triggerKind) {\n        let result = { diagnostics };\n        if (only !== undefined && only !== null) {\n            result.only = only;\n        }\n        if (triggerKind !== undefined && triggerKind !== null) {\n            result.triggerKind = triggerKind;\n        }\n        return result;\n    }\n    CodeActionContext.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link CodeActionContext} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.typedArray(candidate.diagnostics, Diagnostic.is)\n            && (candidate.only === undefined || Is.typedArray(candidate.only, Is.string))\n            && (candidate.triggerKind === undefined || candidate.triggerKind === CodeActionTriggerKind.Invoked || candidate.triggerKind === CodeActionTriggerKind.Automatic);\n    }\n    CodeActionContext.is = is;\n})(CodeActionContext || (CodeActionContext = {}));\nexport var CodeAction;\n(function (CodeAction) {\n    function create(title, kindOrCommandOrEdit, kind) {\n        let result = { title };\n        let checkKind = true;\n        if (typeof kindOrCommandOrEdit === 'string') {\n            checkKind = false;\n            result.kind = kindOrCommandOrEdit;\n        }\n        else if (Command.is(kindOrCommandOrEdit)) {\n            result.command = kindOrCommandOrEdit;\n        }\n        else {\n            result.edit = kindOrCommandOrEdit;\n        }\n        if (checkKind && kind !== undefined) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    CodeAction.create = create;\n    function is(value) {\n        let candidate = value;\n        return candidate && Is.string(candidate.title) &&\n            (candidate.diagnostics === undefined || Is.typedArray(candidate.diagnostics, Diagnostic.is)) &&\n            (candidate.kind === undefined || Is.string(candidate.kind)) &&\n            (candidate.edit !== undefined || candidate.command !== undefined) &&\n            (candidate.command === undefined || Command.is(candidate.command)) &&\n            (candidate.isPreferred === undefined || Is.boolean(candidate.isPreferred)) &&\n            (candidate.edit === undefined || WorkspaceEdit.is(candidate.edit));\n    }\n    CodeAction.is = is;\n})(CodeAction || (CodeAction = {}));\n/**\n * The CodeLens namespace provides helper functions to work with\n * {@link CodeLens} literals.\n */\nexport var CodeLens;\n(function (CodeLens) {\n    /**\n     * Creates a new CodeLens literal.\n     */\n    function create(range, data) {\n        let result = { range };\n        if (Is.defined(data)) {\n            result.data = data;\n        }\n        return result;\n    }\n    CodeLens.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link CodeLens} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Range.is(candidate.range) && (Is.undefined(candidate.command) || Command.is(candidate.command));\n    }\n    CodeLens.is = is;\n})(CodeLens || (CodeLens = {}));\n/**\n * The FormattingOptions namespace provides helper functions to work with\n * {@link FormattingOptions} literals.\n */\nexport var FormattingOptions;\n(function (FormattingOptions) {\n    /**\n     * Creates a new FormattingOptions literal.\n     */\n    function create(tabSize, insertSpaces) {\n        return { tabSize, insertSpaces };\n    }\n    FormattingOptions.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link FormattingOptions} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.uinteger(candidate.tabSize) && Is.boolean(candidate.insertSpaces);\n    }\n    FormattingOptions.is = is;\n})(FormattingOptions || (FormattingOptions = {}));\n/**\n * The DocumentLink namespace provides helper functions to work with\n * {@link DocumentLink} literals.\n */\nexport var DocumentLink;\n(function (DocumentLink) {\n    /**\n     * Creates a new DocumentLink literal.\n     */\n    function create(range, target, data) {\n        return { range, target, data };\n    }\n    DocumentLink.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link DocumentLink} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Range.is(candidate.range) && (Is.undefined(candidate.target) || Is.string(candidate.target));\n    }\n    DocumentLink.is = is;\n})(DocumentLink || (DocumentLink = {}));\n/**\n * The SelectionRange namespace provides helper function to work with\n * SelectionRange literals.\n */\nexport var SelectionRange;\n(function (SelectionRange) {\n    /**\n     * Creates a new SelectionRange\n     * @param range the range.\n     * @param parent an optional parent.\n     */\n    function create(range, parent) {\n        return { range, parent };\n    }\n    SelectionRange.create = create;\n    function is(value) {\n        let candidate = value;\n        return Is.objectLiteral(candidate) && Range.is(candidate.range) && (candidate.parent === undefined || SelectionRange.is(candidate.parent));\n    }\n    SelectionRange.is = is;\n})(SelectionRange || (SelectionRange = {}));\n/**\n * A set of predefined token types. This set is not fixed\n * an clients can specify additional token types via the\n * corresponding client capabilities.\n *\n * @since 3.16.0\n */\nexport var SemanticTokenTypes;\n(function (SemanticTokenTypes) {\n    SemanticTokenTypes[\"namespace\"] = \"namespace\";\n    /**\n     * Represents a generic type. Acts as a fallback for types which can't be mapped to\n     * a specific type like class or enum.\n     */\n    SemanticTokenTypes[\"type\"] = \"type\";\n    SemanticTokenTypes[\"class\"] = \"class\";\n    SemanticTokenTypes[\"enum\"] = \"enum\";\n    SemanticTokenTypes[\"interface\"] = \"interface\";\n    SemanticTokenTypes[\"struct\"] = \"struct\";\n    SemanticTokenTypes[\"typeParameter\"] = \"typeParameter\";\n    SemanticTokenTypes[\"parameter\"] = \"parameter\";\n    SemanticTokenTypes[\"variable\"] = \"variable\";\n    SemanticTokenTypes[\"property\"] = \"property\";\n    SemanticTokenTypes[\"enumMember\"] = \"enumMember\";\n    SemanticTokenTypes[\"event\"] = \"event\";\n    SemanticTokenTypes[\"function\"] = \"function\";\n    SemanticTokenTypes[\"method\"] = \"method\";\n    SemanticTokenTypes[\"macro\"] = \"macro\";\n    SemanticTokenTypes[\"keyword\"] = \"keyword\";\n    SemanticTokenTypes[\"modifier\"] = \"modifier\";\n    SemanticTokenTypes[\"comment\"] = \"comment\";\n    SemanticTokenTypes[\"string\"] = \"string\";\n    SemanticTokenTypes[\"number\"] = \"number\";\n    SemanticTokenTypes[\"regexp\"] = \"regexp\";\n    SemanticTokenTypes[\"operator\"] = \"operator\";\n    /**\n     * @since 3.17.0\n     */\n    SemanticTokenTypes[\"decorator\"] = \"decorator\";\n})(SemanticTokenTypes || (SemanticTokenTypes = {}));\n/**\n * A set of predefined token modifiers. This set is not fixed\n * an clients can specify additional token types via the\n * corresponding client capabilities.\n *\n * @since 3.16.0\n */\nexport var SemanticTokenModifiers;\n(function (SemanticTokenModifiers) {\n    SemanticTokenModifiers[\"declaration\"] = \"declaration\";\n    SemanticTokenModifiers[\"definition\"] = \"definition\";\n    SemanticTokenModifiers[\"readonly\"] = \"readonly\";\n    SemanticTokenModifiers[\"static\"] = \"static\";\n    SemanticTokenModifiers[\"deprecated\"] = \"deprecated\";\n    SemanticTokenModifiers[\"abstract\"] = \"abstract\";\n    SemanticTokenModifiers[\"async\"] = \"async\";\n    SemanticTokenModifiers[\"modification\"] = \"modification\";\n    SemanticTokenModifiers[\"documentation\"] = \"documentation\";\n    SemanticTokenModifiers[\"defaultLibrary\"] = \"defaultLibrary\";\n})(SemanticTokenModifiers || (SemanticTokenModifiers = {}));\n/**\n * @since 3.16.0\n */\nexport var SemanticTokens;\n(function (SemanticTokens) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && (candidate.resultId === undefined || typeof candidate.resultId === 'string') &&\n            Array.isArray(candidate.data) && (candidate.data.length === 0 || typeof candidate.data[0] === 'number');\n    }\n    SemanticTokens.is = is;\n})(SemanticTokens || (SemanticTokens = {}));\n/**\n * The InlineValueText namespace provides functions to deal with InlineValueTexts.\n *\n * @since 3.17.0\n */\nexport var InlineValueText;\n(function (InlineValueText) {\n    /**\n     * Creates a new InlineValueText literal.\n     */\n    function create(range, text) {\n        return { range, text };\n    }\n    InlineValueText.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range) && Is.string(candidate.text);\n    }\n    InlineValueText.is = is;\n})(InlineValueText || (InlineValueText = {}));\n/**\n * The InlineValueVariableLookup namespace provides functions to deal with InlineValueVariableLookups.\n *\n * @since 3.17.0\n */\nexport var InlineValueVariableLookup;\n(function (InlineValueVariableLookup) {\n    /**\n     * Creates a new InlineValueText literal.\n     */\n    function create(range, variableName, caseSensitiveLookup) {\n        return { range, variableName, caseSensitiveLookup };\n    }\n    InlineValueVariableLookup.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range) && Is.boolean(candidate.caseSensitiveLookup)\n            && (Is.string(candidate.variableName) || candidate.variableName === undefined);\n    }\n    InlineValueVariableLookup.is = is;\n})(InlineValueVariableLookup || (InlineValueVariableLookup = {}));\n/**\n * The InlineValueEvaluatableExpression namespace provides functions to deal with InlineValueEvaluatableExpression.\n *\n * @since 3.17.0\n */\nexport var InlineValueEvaluatableExpression;\n(function (InlineValueEvaluatableExpression) {\n    /**\n     * Creates a new InlineValueEvaluatableExpression literal.\n     */\n    function create(range, expression) {\n        return { range, expression };\n    }\n    InlineValueEvaluatableExpression.create = create;\n    function is(value) {\n        const candidate = value;\n        return candidate !== undefined && candidate !== null && Range.is(candidate.range)\n            && (Is.string(candidate.expression) || candidate.expression === undefined);\n    }\n    InlineValueEvaluatableExpression.is = is;\n})(InlineValueEvaluatableExpression || (InlineValueEvaluatableExpression = {}));\n/**\n * The InlineValueContext namespace provides helper functions to work with\n * {@link InlineValueContext} literals.\n *\n * @since 3.17.0\n */\nexport var InlineValueContext;\n(function (InlineValueContext) {\n    /**\n     * Creates a new InlineValueContext literal.\n     */\n    function create(frameId, stoppedLocation) {\n        return { frameId, stoppedLocation };\n    }\n    InlineValueContext.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link InlineValueContext} interface.\n     */\n    function is(value) {\n        const candidate = value;\n        return Is.defined(candidate) && Range.is(value.stoppedLocation);\n    }\n    InlineValueContext.is = is;\n})(InlineValueContext || (InlineValueContext = {}));\n/**\n * Inlay hint kinds.\n *\n * @since 3.17.0\n */\nexport var InlayHintKind;\n(function (InlayHintKind) {\n    /**\n     * An inlay hint that for a type annotation.\n     */\n    InlayHintKind.Type = 1;\n    /**\n     * An inlay hint that is for a parameter.\n     */\n    InlayHintKind.Parameter = 2;\n    function is(value) {\n        return value === 1 || value === 2;\n    }\n    InlayHintKind.is = is;\n})(InlayHintKind || (InlayHintKind = {}));\nexport var InlayHintLabelPart;\n(function (InlayHintLabelPart) {\n    function create(value) {\n        return { value };\n    }\n    InlayHintLabelPart.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate)\n            && (candidate.tooltip === undefined || Is.string(candidate.tooltip) || MarkupContent.is(candidate.tooltip))\n            && (candidate.location === undefined || Location.is(candidate.location))\n            && (candidate.command === undefined || Command.is(candidate.command));\n    }\n    InlayHintLabelPart.is = is;\n})(InlayHintLabelPart || (InlayHintLabelPart = {}));\nexport var InlayHint;\n(function (InlayHint) {\n    function create(position, label, kind) {\n        const result = { position, label };\n        if (kind !== undefined) {\n            result.kind = kind;\n        }\n        return result;\n    }\n    InlayHint.create = create;\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && Position.is(candidate.position)\n            && (Is.string(candidate.label) || Is.typedArray(candidate.label, InlayHintLabelPart.is))\n            && (candidate.kind === undefined || InlayHintKind.is(candidate.kind))\n            && (candidate.textEdits === undefined) || Is.typedArray(candidate.textEdits, TextEdit.is)\n            && (candidate.tooltip === undefined || Is.string(candidate.tooltip) || MarkupContent.is(candidate.tooltip))\n            && (candidate.paddingLeft === undefined || Is.boolean(candidate.paddingLeft))\n            && (candidate.paddingRight === undefined || Is.boolean(candidate.paddingRight));\n    }\n    InlayHint.is = is;\n})(InlayHint || (InlayHint = {}));\nexport var StringValue;\n(function (StringValue) {\n    function createSnippet(value) {\n        return { kind: 'snippet', value };\n    }\n    StringValue.createSnippet = createSnippet;\n})(StringValue || (StringValue = {}));\nexport var InlineCompletionItem;\n(function (InlineCompletionItem) {\n    function create(insertText, filterText, range, command) {\n        return { insertText, filterText, range, command };\n    }\n    InlineCompletionItem.create = create;\n})(InlineCompletionItem || (InlineCompletionItem = {}));\nexport var InlineCompletionList;\n(function (InlineCompletionList) {\n    function create(items) {\n        return { items };\n    }\n    InlineCompletionList.create = create;\n})(InlineCompletionList || (InlineCompletionList = {}));\n/**\n * Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered.\n *\n * @since 3.18.0\n * @proposed\n */\nexport var InlineCompletionTriggerKind;\n(function (InlineCompletionTriggerKind) {\n    /**\n     * Completion was triggered explicitly by a user gesture.\n     */\n    InlineCompletionTriggerKind.Invoked = 0;\n    /**\n     * Completion was triggered automatically while editing.\n     */\n    InlineCompletionTriggerKind.Automatic = 1;\n})(InlineCompletionTriggerKind || (InlineCompletionTriggerKind = {}));\nexport var SelectedCompletionInfo;\n(function (SelectedCompletionInfo) {\n    function create(range, text) {\n        return { range, text };\n    }\n    SelectedCompletionInfo.create = create;\n})(SelectedCompletionInfo || (SelectedCompletionInfo = {}));\nexport var InlineCompletionContext;\n(function (InlineCompletionContext) {\n    function create(triggerKind, selectedCompletionInfo) {\n        return { triggerKind, selectedCompletionInfo };\n    }\n    InlineCompletionContext.create = create;\n})(InlineCompletionContext || (InlineCompletionContext = {}));\nexport var WorkspaceFolder;\n(function (WorkspaceFolder) {\n    function is(value) {\n        const candidate = value;\n        return Is.objectLiteral(candidate) && URI.is(candidate.uri) && Is.string(candidate.name);\n    }\n    WorkspaceFolder.is = is;\n})(WorkspaceFolder || (WorkspaceFolder = {}));\nexport const EOL = ['\\n', '\\r\\n', '\\r'];\n/**\n * @deprecated Use the text document from the new vscode-languageserver-textdocument package.\n */\nexport var TextDocument;\n(function (TextDocument) {\n    /**\n     * Creates a new ITextDocument literal from the given uri and content.\n     * @param uri The document's uri.\n     * @param languageId The document's language Id.\n     * @param version The document's version.\n     * @param content The document's content.\n     */\n    function create(uri, languageId, version, content) {\n        return new FullTextDocument(uri, languageId, version, content);\n    }\n    TextDocument.create = create;\n    /**\n     * Checks whether the given literal conforms to the {@link ITextDocument} interface.\n     */\n    function is(value) {\n        let candidate = value;\n        return Is.defined(candidate) && Is.string(candidate.uri) && (Is.undefined(candidate.languageId) || Is.string(candidate.languageId)) && Is.uinteger(candidate.lineCount)\n            && Is.func(candidate.getText) && Is.func(candidate.positionAt) && Is.func(candidate.offsetAt) ? true : false;\n    }\n    TextDocument.is = is;\n    function applyEdits(document, edits) {\n        let text = document.getText();\n        let sortedEdits = mergeSort(edits, (a, b) => {\n            let diff = a.range.start.line - b.range.start.line;\n            if (diff === 0) {\n                return a.range.start.character - b.range.start.character;\n            }\n            return diff;\n        });\n        let lastModifiedOffset = text.length;\n        for (let i = sortedEdits.length - 1; i >= 0; i--) {\n            let e = sortedEdits[i];\n            let startOffset = document.offsetAt(e.range.start);\n            let endOffset = document.offsetAt(e.range.end);\n            if (endOffset <= lastModifiedOffset) {\n                text = text.substring(0, startOffset) + e.newText + text.substring(endOffset, text.length);\n            }\n            else {\n                throw new Error('Overlapping edit');\n            }\n            lastModifiedOffset = startOffset;\n        }\n        return text;\n    }\n    TextDocument.applyEdits = applyEdits;\n    function mergeSort(data, compare) {\n        if (data.length <= 1) {\n            // sorted\n            return data;\n        }\n        const p = (data.length / 2) | 0;\n        const left = data.slice(0, p);\n        const right = data.slice(p);\n        mergeSort(left, compare);\n        mergeSort(right, compare);\n        let leftIdx = 0;\n        let rightIdx = 0;\n        let i = 0;\n        while (leftIdx < left.length && rightIdx < right.length) {\n            let ret = compare(left[leftIdx], right[rightIdx]);\n            if (ret <= 0) {\n                // smaller_equal -> take left to preserve order\n                data[i++] = left[leftIdx++];\n            }\n            else {\n                // greater -> take right\n                data[i++] = right[rightIdx++];\n            }\n        }\n        while (leftIdx < left.length) {\n            data[i++] = left[leftIdx++];\n        }\n        while (rightIdx < right.length) {\n            data[i++] = right[rightIdx++];\n        }\n        return data;\n    }\n})(TextDocument || (TextDocument = {}));\n/**\n * @deprecated Use the text document from the new vscode-languageserver-textdocument package.\n */\nclass FullTextDocument {\n    constructor(uri, languageId, version, content) {\n        this._uri = uri;\n        this._languageId = languageId;\n        this._version = version;\n        this._content = content;\n        this._lineOffsets = undefined;\n    }\n    get uri() {\n        return this._uri;\n    }\n    get languageId() {\n        return this._languageId;\n    }\n    get version() {\n        return this._version;\n    }\n    getText(range) {\n        if (range) {\n            let start = this.offsetAt(range.start);\n            let end = this.offsetAt(range.end);\n            return this._content.substring(start, end);\n        }\n        return this._content;\n    }\n    update(event, version) {\n        this._content = event.text;\n        this._version = version;\n        this._lineOffsets = undefined;\n    }\n    getLineOffsets() {\n        if (this._lineOffsets === undefined) {\n            let lineOffsets = [];\n            let text = this._content;\n            let isLineStart = true;\n            for (let i = 0; i < text.length; i++) {\n                if (isLineStart) {\n                    lineOffsets.push(i);\n                    isLineStart = false;\n                }\n                let ch = text.charAt(i);\n                isLineStart = (ch === '\\r' || ch === '\\n');\n                if (ch === '\\r' && i + 1 < text.length && text.charAt(i + 1) === '\\n') {\n                    i++;\n                }\n            }\n            if (isLineStart && text.length > 0) {\n                lineOffsets.push(text.length);\n            }\n            this._lineOffsets = lineOffsets;\n        }\n        return this._lineOffsets;\n    }\n    positionAt(offset) {\n        offset = Math.max(Math.min(offset, this._content.length), 0);\n        let lineOffsets = this.getLineOffsets();\n        let low = 0, high = lineOffsets.length;\n        if (high === 0) {\n            return Position.create(0, offset);\n        }\n        while (low < high) {\n            let mid = Math.floor((low + high) / 2);\n            if (lineOffsets[mid] > offset) {\n                high = mid;\n            }\n            else {\n                low = mid + 1;\n            }\n        }\n        // low is the least x for which the line offset is larger than the current offset\n        // or array.length if no line offset is larger than the current offset\n        let line = low - 1;\n        return Position.create(line, offset - lineOffsets[line]);\n    }\n    offsetAt(position) {\n        let lineOffsets = this.getLineOffsets();\n        if (position.line >= lineOffsets.length) {\n            return this._content.length;\n        }\n        else if (position.line < 0) {\n            return 0;\n        }\n        let lineOffset = lineOffsets[position.line];\n        let nextLineOffset = (position.line + 1 < lineOffsets.length) ? lineOffsets[position.line + 1] : this._content.length;\n        return Math.max(Math.min(lineOffset + position.character, nextLineOffset), lineOffset);\n    }\n    get lineCount() {\n        return this.getLineOffsets().length;\n    }\n}\nvar Is;\n(function (Is) {\n    const toString = Object.prototype.toString;\n    function defined(value) {\n        return typeof value !== 'undefined';\n    }\n    Is.defined = defined;\n    function undefined(value) {\n        return typeof value === 'undefined';\n    }\n    Is.undefined = undefined;\n    function boolean(value) {\n        return value === true || value === false;\n    }\n    Is.boolean = boolean;\n    function string(value) {\n        return toString.call(value) === '[object String]';\n    }\n    Is.string = string;\n    function number(value) {\n        return toString.call(value) === '[object Number]';\n    }\n    Is.number = number;\n    function numberRange(value, min, max) {\n        return toString.call(value) === '[object Number]' && min <= value && value <= max;\n    }\n    Is.numberRange = numberRange;\n    function integer(value) {\n        return toString.call(value) === '[object Number]' && -2147483648 <= value && value <= 2147483647;\n    }\n    Is.integer = integer;\n    function uinteger(value) {\n        return toString.call(value) === '[object Number]' && 0 <= value && value <= 2147483647;\n    }\n    Is.uinteger = uinteger;\n    function func(value) {\n        return toString.call(value) === '[object Function]';\n    }\n    Is.func = func;\n    function objectLiteral(value) {\n        // Strictly speaking class instances pass this check as well. Since the LSP\n        // doesn't use classes we ignore this for now. If we do we need to add something\n        // like this: `Object.getPrototypeOf(Object.getPrototypeOf(x)) === null`\n        return value !== null && typeof value === 'object';\n    }\n    Is.objectLiteral = objectLiteral;\n    function typedArray(value, check) {\n        return Array.isArray(value) && value.every(check);\n    }\n    Is.typedArray = typedArray;\n})(Is || (Is = {}));\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IToken, TokenType } from 'chevrotain';\nimport type { Range } from 'vscode-languageserver-types';\nimport type { AbstractElement } from '../languages/generated/ast.js';\nimport type { AstNode, CompositeCstNode, CstNode, LeafCstNode, RootCstNode } from '../syntax-tree.js';\nimport { Position } from 'vscode-languageserver-types';\nimport { isCompositeCstNode } from '../syntax-tree.js';\nimport { tokenToRange } from '../utils/cst-utils.js';\n\nexport class CstNodeBuilder {\n\n    private rootNode!: RootCstNodeImpl;\n    private nodeStack: CompositeCstNodeImpl[] = [];\n\n    private get current(): CompositeCstNodeImpl {\n        return this.nodeStack[this.nodeStack.length - 1];\n    }\n\n    buildRootNode(input: string): RootCstNode {\n        this.rootNode = new RootCstNodeImpl(input);\n        this.rootNode.root = this.rootNode;\n        this.nodeStack = [this.rootNode];\n        return this.rootNode;\n    }\n\n    buildCompositeNode(feature: AbstractElement): CompositeCstNode {\n        const compositeNode = new CompositeCstNodeImpl();\n        compositeNode.grammarSource = feature;\n        compositeNode.root = this.rootNode;\n        this.current.content.push(compositeNode);\n        this.nodeStack.push(compositeNode);\n        return compositeNode;\n    }\n\n    buildLeafNode(token: IToken, feature: AbstractElement): LeafCstNode {\n        const leafNode = new LeafCstNodeImpl(token.startOffset, token.image.length, tokenToRange(token), token.tokenType, false);\n        leafNode.grammarSource = feature;\n        leafNode.root = this.rootNode;\n        this.current.content.push(leafNode);\n        return leafNode;\n    }\n\n    removeNode(node: CstNode): void {\n        const parent = node.container;\n        if (parent) {\n            const index = parent.content.indexOf(node);\n            if (index >= 0) {\n                parent.content.splice(index, 1);\n            }\n        }\n    }\n\n    construct(item: { $type: string | symbol | undefined, $cstNode: CstNode }): void {\n        const current: CstNode = this.current;\n        // The specified item could be a datatype ($type is symbol) or a fragment ($type is undefined)\n        // Only if the $type is a string, we actually assign the element\n        if (typeof item.$type === 'string') {\n            this.current.astNode = item;\n        }\n        item.$cstNode = current;\n        const node = this.nodeStack.pop();\n        // Empty composite nodes are not valid\n        // Simply remove the node from the tree\n        if (node?.content.length === 0) {\n            this.removeNode(node);\n        }\n    }\n\n    addHiddenTokens(hiddenTokens: IToken[]): void {\n        for (const token of hiddenTokens) {\n            const hiddenNode = new LeafCstNodeImpl(token.startOffset, token.image.length, tokenToRange(token), token.tokenType, true);\n            hiddenNode.root = this.rootNode;\n            this.addHiddenToken(this.rootNode, hiddenNode);\n        }\n    }\n\n    private addHiddenToken(node: CompositeCstNode, token: LeafCstNode): void {\n        const { offset: tokenStart, end: tokenEnd } = token;\n\n        for (let i = 0; i < node.content.length; i++) {\n            const child = node.content[i];\n            const { offset: childStart, end: childEnd } = child;\n            if (isCompositeCstNode(child) && tokenStart > childStart && tokenEnd < childEnd) {\n                this.addHiddenToken(child, token);\n                return;\n            } else if (tokenEnd <= childStart) {\n                node.content.splice(i, 0, token);\n                return;\n            }\n        }\n\n        // We know that we haven't found a suited position for the token\n        // So we simply add it to the end of the current node\n        node.content.push(token);\n    }\n}\n\nexport abstract class AbstractCstNode implements CstNode {\n    abstract get offset(): number;\n    abstract get length(): number;\n    abstract get end(): number;\n    abstract get range(): Range;\n\n    container?: CompositeCstNode;\n    grammarSource: AbstractElement;\n    root: RootCstNode;\n    private _astNode?: AstNode;\n\n    /** @deprecated use `container` instead. */\n    get parent(): CompositeCstNode | undefined {\n        return this.container;\n    }\n\n    /** @deprecated use `grammarSource` instead. */\n    get feature(): AbstractElement {\n        return this.grammarSource;\n    }\n\n    get hidden(): boolean {\n        return false;\n    }\n\n    get astNode(): AstNode {\n        const node = typeof this._astNode?.$type === 'string' ? this._astNode : this.container?.astNode;\n        if (!node) {\n            throw new Error('This node has no associated AST element');\n        }\n        return node;\n    }\n\n    set astNode(value: AstNode) {\n        this._astNode = value;\n    }\n\n    /** @deprecated use `astNode` instead. */\n    get element(): AstNode {\n        return this.astNode;\n    }\n\n    get text(): string {\n        return this.root.fullText.substring(this.offset, this.end);\n    }\n}\n\nexport class LeafCstNodeImpl extends AbstractCstNode implements LeafCstNode {\n    get offset(): number {\n        return this._offset;\n    }\n\n    get length(): number {\n        return this._length;\n    }\n\n    get end(): number {\n        return this._offset + this._length;\n    }\n\n    override get hidden(): boolean {\n        return this._hidden;\n    }\n\n    get tokenType(): TokenType {\n        return this._tokenType;\n    }\n\n    get range(): Range {\n        return this._range;\n    }\n\n    private _hidden: boolean;\n    private _offset: number;\n    private _length: number;\n    private _range: Range;\n    private _tokenType: TokenType;\n\n    constructor(offset: number, length: number, range: Range, tokenType: TokenType, hidden = false) {\n        super();\n        this._hidden = hidden;\n        this._offset = offset;\n        this._tokenType = tokenType;\n        this._length = length;\n        this._range = range;\n    }\n}\n\nexport class CompositeCstNodeImpl extends AbstractCstNode implements CompositeCstNode {\n    readonly content: CstNode[] = new CstNodeContainer(this);\n    private _rangeCache?: Range;\n\n    /** @deprecated use `content` instead. */\n    get children(): CstNode[] {\n        return this.content;\n    }\n\n    get offset(): number {\n        return this.firstNonHiddenNode?.offset ?? 0;\n    }\n\n    get length(): number {\n        return this.end - this.offset;\n    }\n\n    get end(): number {\n        return this.lastNonHiddenNode?.end ?? 0;\n    }\n\n    get range(): Range {\n        const firstNode = this.firstNonHiddenNode;\n        const lastNode = this.lastNonHiddenNode;\n        if (firstNode && lastNode) {\n            if (this._rangeCache === undefined) {\n                const { range: firstRange } = firstNode;\n                const { range: lastRange } = lastNode;\n                this._rangeCache = { start: firstRange.start, end: lastRange.end.line < firstRange.start.line ? firstRange.start : lastRange.end };\n            }\n            return this._rangeCache;\n        } else {\n            return { start: Position.create(0, 0), end: Position.create(0, 0) };\n        }\n    }\n\n    private get firstNonHiddenNode(): CstNode | undefined {\n        for (const child of this.content) {\n            if (!child.hidden) {\n                return child;\n            }\n        }\n        return this.content[0];\n    }\n\n    private get lastNonHiddenNode(): CstNode | undefined {\n        for (let i = this.content.length - 1; i >= 0; i--) {\n            const child = this.content[i];\n            if (!child.hidden) {\n                return child;\n            }\n        }\n        return this.content[this.content.length - 1];\n    }\n}\n\nclass CstNodeContainer extends Array {\n    readonly parent: CompositeCstNode;\n\n    constructor(parent: CompositeCstNode) {\n        super();\n        this.parent = parent;\n        Object.setPrototypeOf(this, CstNodeContainer.prototype);\n    }\n\n    override push(...items: CstNode[]): number {\n        this.addParents(items);\n        return super.push(...items);\n    }\n\n    override unshift(...items: CstNode[]): number {\n        this.addParents(items);\n        return super.unshift(...items);\n    }\n\n    override splice(start: number, count: number, ...items: CstNode[]): CstNode[] {\n        this.addParents(items);\n        return super.splice(start, count, ...items);\n    }\n\n    private addParents(items: CstNode[]): void {\n        for (const item of items) {\n            (item).container = this.parent;\n        }\n    }\n}\n\nexport class RootCstNodeImpl extends CompositeCstNodeImpl implements RootCstNode {\n    private _text = '';\n\n    override get text(): string {\n        return this._text.substring(this.offset, this.end);\n    }\n\n    get fullText(): string {\n        return this._text;\n    }\n\n    constructor(input?: string) {\n        super();\n        this._text = input ?? '';\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\nimport type { DSLMethodOpts, ILexingError, IOrAlt, IParserErrorMessageProvider, IRecognitionException, IToken, TokenType, TokenVocabulary } from 'chevrotain';\nimport type { AbstractElement, Action, Assignment, ParserRule } from '../languages/generated/ast.js';\nimport type { Linker } from '../references/linker.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstReflection, CompositeCstNode, CstNode } from '../syntax-tree.js';\nimport type { Lexer } from './lexer.js';\nimport type { IParserConfig } from './parser-config.js';\nimport type { ValueConverter } from './value-converter.js';\nimport { defaultParserErrorProvider, EmbeddedActionsParser, LLkLookaheadStrategy } from 'chevrotain';\nimport { LLStarLookaheadStrategy } from 'chevrotain-allstar';\nimport { isAssignment, isCrossReference, isKeyword } from '../languages/generated/ast.js';\nimport { getTypeName, isDataTypeRule } from '../utils/grammar-utils.js';\nimport { assignMandatoryProperties, getContainerOfType, linkContentToContainer } from '../utils/ast-utils.js';\nimport { CstNodeBuilder } from './cst-node-builder.js';\n\nexport type ParseResult = {\n    value: T,\n    parserErrors: IRecognitionException[],\n    lexerErrors: ILexingError[]\n}\n\nexport const DatatypeSymbol = Symbol('Datatype');\n\ninterface DataTypeNode {\n    $cstNode: CompositeCstNode\n    /** Instead of a string, this node is uniquely identified by the `Datatype` symbol */\n    $type: symbol\n    /** Used as a storage for all parsed terminals, keywords and sub-datatype rules */\n    value: string\n}\n\nfunction isDataTypeNode(node: { $type: string | symbol | undefined }): node is DataTypeNode {\n    return node.$type === DatatypeSymbol;\n}\n\ntype RuleResult = (args: Args) => any;\n\ntype Args = Record;\n\ntype RuleImpl = (args: Args) => any;\n\ninterface AssignmentElement {\n    assignment?: Assignment\n    isCrossRef: boolean\n}\n\nexport interface BaseParser {\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult;\n    alternatives(idx: number, choices: Array>): void;\n    optional(idx: number, callback: DSLMethodOpts): void;\n    many(idx: number, callback: DSLMethodOpts): void;\n    atLeastOne(idx: number, callback: DSLMethodOpts): void;\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void;\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void;\n    action($type: string, action: Action): void;\n    construct(): unknown;\n    isRecording(): boolean;\n    get unorderedGroups(): Map;\n    getRuleStack(): number[];\n}\n\nconst ruleSuffix = '\\u200B';\nconst withRuleSuffix = (name: string): string => name.endsWith(ruleSuffix) ? name : name + ruleSuffix;\n\nexport abstract class AbstractLangiumParser implements BaseParser {\n\n    protected readonly lexer: Lexer;\n    protected readonly wrapper: ChevrotainWrapper;\n    protected _unorderedGroups: Map = new Map();\n\n    constructor(services: LangiumCoreServices) {\n        this.lexer = services.parser.Lexer;\n        const tokens = this.lexer.definition;\n        this.wrapper = new ChevrotainWrapper(tokens, {\n            ...services.parser.ParserConfig,\n            errorMessageProvider: services.parser.ParserErrorMessageProvider\n        });\n    }\n\n    alternatives(idx: number, choices: Array>): void {\n        this.wrapper.wrapOr(idx, choices);\n    }\n\n    optional(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapOption(idx, callback);\n    }\n\n    many(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapMany(idx, callback);\n    }\n\n    atLeastOne(idx: number, callback: DSLMethodOpts): void {\n        this.wrapper.wrapAtLeastOne(idx, callback);\n    }\n\n    abstract rule(rule: ParserRule, impl: RuleImpl): RuleResult;\n    abstract consume(idx: number, tokenType: TokenType, feature: AbstractElement): void;\n    abstract subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void;\n    abstract action($type: string, action: Action): void;\n    abstract construct(): unknown;\n\n    isRecording(): boolean {\n        return this.wrapper.IS_RECORDING;\n    }\n\n    get unorderedGroups(): Map {\n        return this._unorderedGroups;\n    }\n\n    getRuleStack(): number[] {\n        return (this.wrapper as any).RULE_STACK;\n    }\n\n    finalize(): void {\n        this.wrapper.wrapSelfAnalysis();\n    }\n}\n\nexport class LangiumParser extends AbstractLangiumParser {\n    private readonly linker: Linker;\n    private readonly converter: ValueConverter;\n    private readonly astReflection: AstReflection;\n    private readonly nodeBuilder = new CstNodeBuilder();\n    private stack: any[] = [];\n    private mainRule!: RuleResult;\n    private assignmentMap = new Map();\n\n    private get current(): any {\n        return this.stack[this.stack.length - 1];\n    }\n\n    constructor(services: LangiumCoreServices) {\n        super(services);\n        this.linker = services.references.Linker;\n        this.converter = services.parser.ValueConverter;\n        this.astReflection = services.shared.AstReflection;\n    }\n\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult {\n        const type = rule.fragment ? undefined : isDataTypeRule(rule) ? DatatypeSymbol : getTypeName(rule);\n        const ruleMethod = this.wrapper.DEFINE_RULE(withRuleSuffix(rule.name), this.startImplementation(type, impl).bind(this));\n        if (rule.entry) {\n            this.mainRule = ruleMethod;\n        }\n        return ruleMethod;\n    }\n\n    parse(input: string): ParseResult {\n        this.nodeBuilder.buildRootNode(input);\n        const lexerResult = this.lexer.tokenize(input);\n        this.wrapper.input = lexerResult.tokens;\n        const result = this.mainRule.call(this.wrapper, {});\n        this.nodeBuilder.addHiddenTokens(lexerResult.hidden);\n        this.unorderedGroups.clear();\n        return {\n            value: result,\n            lexerErrors: lexerResult.errors,\n            parserErrors: this.wrapper.errors\n        };\n    }\n\n    private startImplementation($type: string | symbol | undefined, implementation: RuleImpl): RuleImpl {\n        return (args) => {\n            if (!this.isRecording()) {\n                const node: any = { $type };\n                this.stack.push(node);\n                if ($type === DatatypeSymbol) {\n                    node.value = '';\n                }\n            }\n            let result: unknown;\n            try {\n                result = implementation(args);\n            } catch (err) {\n                result = undefined;\n            }\n            if (!this.isRecording() && result === undefined) {\n                result = this.construct();\n            }\n            return result;\n        };\n    }\n\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void {\n        const token = this.wrapper.wrapConsume(idx, tokenType);\n        if (!this.isRecording() && this.isValidToken(token)) {\n            const leafNode = this.nodeBuilder.buildLeafNode(token, feature);\n            const { assignment, isCrossRef } = this.getAssignment(feature);\n            const current = this.current;\n            if (assignment) {\n                const convertedValue = isKeyword(feature) ? token.image : this.converter.convert(token.image, leafNode);\n                this.assign(assignment.operator, assignment.feature, convertedValue, leafNode, isCrossRef);\n            } else if (isDataTypeNode(current)) {\n                let text = token.image;\n                if (!isKeyword(feature)) {\n                    text = this.converter.convert(text, leafNode).toString();\n                }\n                current.value += text;\n            }\n        }\n    }\n\n    /**\n     * Most consumed parser tokens are valid. However there are two cases in which they are not valid:\n     *\n     * 1. They were inserted during error recovery by the parser. These tokens don't really exist and should not be further processed\n     * 2. They contain invalid token ranges. This might include the special EOF token, or other tokens produced by invalid token builders.\n     */\n    private isValidToken(token: IToken): boolean {\n        return !token.isInsertedInRecovery && !isNaN(token.startOffset) && typeof token.endOffset === 'number' && !isNaN(token.endOffset);\n    }\n\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void {\n        let cstNode: CompositeCstNode | undefined;\n        if (!this.isRecording()) {\n            cstNode = this.nodeBuilder.buildCompositeNode(feature);\n        }\n        const subruleResult = this.wrapper.wrapSubrule(idx, rule, args) as any;\n        if (!this.isRecording() && cstNode && cstNode.length > 0) {\n            this.performSubruleAssignment(subruleResult, feature, cstNode);\n        }\n    }\n\n    private performSubruleAssignment(result: any, feature: AbstractElement, cstNode: CompositeCstNode): void {\n        const { assignment, isCrossRef } = this.getAssignment(feature);\n        if (assignment) {\n            this.assign(assignment.operator, assignment.feature, result, cstNode, isCrossRef);\n        } else if (!assignment) {\n            // If we call a subrule without an assignment we either:\n            // 1. append the result of the subrule (data type rule)\n            // 2. override the current object with the newly parsed object\n            // If the current element is an AST node and the result of the subrule\n            // is a data type rule, we can safely discard the results.\n            const current = this.current;\n            if (isDataTypeNode(current)) {\n                current.value += result.toString();\n            } else if (typeof result === 'object' && result) {\n                const resultKind = result.$type;\n                const object = this.assignWithoutOverride(result, current);\n                if (resultKind) {\n                    object.$type = resultKind;\n                }\n                const newItem = object;\n                this.stack.pop();\n                this.stack.push(newItem);\n            }\n        }\n    }\n\n    action($type: string, action: Action): void {\n        if (!this.isRecording()) {\n            let last = this.current;\n            // This branch is used for left recursive grammar rules.\n            // Those don't call `construct` before another action.\n            // Therefore, we need to call it here.\n            if (!last.$cstNode && action.feature && action.operator) {\n                last = this.construct(false);\n                const feature = last.$cstNode.feature;\n                this.nodeBuilder.buildCompositeNode(feature);\n            }\n            const newItem = { $type };\n            this.stack.pop();\n            this.stack.push(newItem);\n            if (action.feature && action.operator) {\n                this.assign(action.operator, action.feature, last, last.$cstNode, false);\n            }\n        }\n    }\n\n    construct(pop = true): unknown {\n        if (this.isRecording()) {\n            return undefined;\n        }\n        const obj = this.current;\n        linkContentToContainer(obj);\n        this.nodeBuilder.construct(obj);\n        if (pop) {\n            this.stack.pop();\n        }\n        if (isDataTypeNode(obj)) {\n            return this.converter.convert(obj.value, obj.$cstNode);\n        } else {\n            assignMandatoryProperties(this.astReflection, obj);\n        }\n        return obj;\n    }\n\n    private getAssignment(feature: AbstractElement): AssignmentElement {\n        if (!this.assignmentMap.has(feature)) {\n            const assignment = getContainerOfType(feature, isAssignment);\n            this.assignmentMap.set(feature, {\n                assignment: assignment,\n                isCrossRef: assignment ? isCrossReference(assignment.terminal) : false\n            });\n        }\n        return this.assignmentMap.get(feature)!;\n    }\n\n    private assign(operator: string, feature: string, value: unknown, cstNode: CstNode, isCrossRef: boolean): void {\n        const obj = this.current;\n        let item: unknown;\n        if (isCrossRef && typeof value === 'string') {\n            item = this.linker.buildReference(obj, feature, cstNode, value);\n        } else {\n            item = value;\n        }\n        switch (operator) {\n            case '=': {\n                obj[feature] = item;\n                break;\n            }\n            case '?=': {\n                obj[feature] = true;\n                break;\n            }\n            case '+=': {\n                if (!Array.isArray(obj[feature])) {\n                    obj[feature] = [];\n                }\n                obj[feature].push(item);\n            }\n        }\n    }\n\n    private assignWithoutOverride(target: any, source: any): any {\n        for (const [name, existingValue] of Object.entries(source)) {\n            const newValue = target[name];\n            if (newValue === undefined) {\n                target[name] = existingValue;\n            } else if (Array.isArray(newValue) && Array.isArray(existingValue)) {\n                existingValue.push(...newValue);\n                target[name] = existingValue;\n            }\n        }\n        return target;\n    }\n\n    get definitionErrors(): IParserDefinitionError[] {\n        return this.wrapper.definitionErrors;\n    }\n}\n\nexport interface IParserDefinitionError {\n    message: string\n    type: number\n    ruleName?: string\n}\n\nexport abstract class AbstractParserErrorMessageProvider implements IParserErrorMessageProvider {\n\n    buildMismatchTokenMessage(options: {\n        expected: TokenType\n        actual: IToken\n        previous: IToken\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildMismatchTokenMessage(options);\n    }\n\n    buildNotAllInputParsedMessage(options: {\n        firstRedundant: IToken\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildNotAllInputParsedMessage(options);\n    }\n\n    buildNoViableAltMessage(options: {\n        expectedPathsPerAlt: TokenType[][][]\n        actual: IToken[]\n        previous: IToken\n        customUserDescription: string\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildNoViableAltMessage(options);\n    }\n\n    buildEarlyExitMessage(options: {\n        expectedIterationPaths: TokenType[][]\n        actual: IToken[]\n        previous: IToken\n        customUserDescription: string\n        ruleName: string\n    }): string {\n        return defaultParserErrorProvider.buildEarlyExitMessage(options);\n    }\n\n}\n\nexport class LangiumParserErrorMessageProvider extends AbstractParserErrorMessageProvider {\n\n    override buildMismatchTokenMessage({ expected, actual }: {\n        expected: TokenType\n        actual: IToken\n        previous: IToken\n        ruleName: string\n    }): string {\n        const expectedMsg = expected.LABEL\n            ? '`' + expected.LABEL + '`'\n            : expected.name.endsWith(':KW')\n                ? `keyword '${expected.name.substring(0, expected.name.length - 3)}'`\n                : `token of type '${expected.name}'`;\n        return `Expecting ${expectedMsg} but found \\`${actual.image}\\`.`;\n    }\n\n    override buildNotAllInputParsedMessage({ firstRedundant }: {\n        firstRedundant: IToken\n        ruleName: string\n    }): string {\n        return `Expecting end of file but found \\`${firstRedundant.image}\\`.`;\n    }\n}\n\nexport interface CompletionParserResult {\n    tokens: IToken[]\n    elementStack: AbstractElement[]\n    tokenIndex: number\n}\n\nexport class LangiumCompletionParser extends AbstractLangiumParser {\n    private mainRule!: RuleResult;\n    private tokens: IToken[] = [];\n\n    private elementStack: AbstractElement[] = [];\n    private lastElementStack: AbstractElement[] = [];\n    private nextTokenIndex = 0;\n    private stackSize = 0;\n\n    action(): void {\n        // NOOP\n    }\n\n    construct(): unknown {\n        // NOOP\n        return undefined;\n    }\n\n    parse(input: string): CompletionParserResult {\n        this.resetState();\n        const tokens = this.lexer.tokenize(input);\n        this.tokens = tokens.tokens;\n        this.wrapper.input = [...this.tokens];\n        this.mainRule.call(this.wrapper, {});\n        this.unorderedGroups.clear();\n        return {\n            tokens: this.tokens,\n            elementStack: [...this.lastElementStack],\n            tokenIndex: this.nextTokenIndex\n        };\n    }\n\n    rule(rule: ParserRule, impl: RuleImpl): RuleResult {\n        const ruleMethod = this.wrapper.DEFINE_RULE(withRuleSuffix(rule.name), this.startImplementation(impl).bind(this));\n        if (rule.entry) {\n            this.mainRule = ruleMethod;\n        }\n        return ruleMethod;\n    }\n\n    private resetState(): void {\n        this.elementStack = [];\n        this.lastElementStack = [];\n        this.nextTokenIndex = 0;\n        this.stackSize = 0;\n    }\n\n    private startImplementation(implementation: RuleImpl): RuleImpl {\n        return (args) => {\n            const size = this.keepStackSize();\n            try {\n                implementation(args);\n            } finally {\n                this.resetStackSize(size);\n            }\n        };\n    }\n\n    private removeUnexpectedElements(): void {\n        this.elementStack.splice(this.stackSize);\n    }\n\n    keepStackSize(): number {\n        const size = this.elementStack.length;\n        this.stackSize = size;\n        return size;\n    }\n\n    resetStackSize(size: number): void {\n        this.removeUnexpectedElements();\n        this.stackSize = size;\n    }\n\n    consume(idx: number, tokenType: TokenType, feature: AbstractElement): void {\n        this.wrapper.wrapConsume(idx, tokenType);\n        if (!this.isRecording()) {\n            this.lastElementStack = [...this.elementStack, feature];\n            this.nextTokenIndex = this.currIdx + 1;\n        }\n    }\n\n    subrule(idx: number, rule: RuleResult, feature: AbstractElement, args: Args): void {\n        this.before(feature);\n        this.wrapper.wrapSubrule(idx, rule, args);\n        this.after(feature);\n    }\n\n    before(element: AbstractElement): void {\n        if (!this.isRecording()) {\n            this.elementStack.push(element);\n        }\n    }\n\n    after(element: AbstractElement): void {\n        if (!this.isRecording()) {\n            const index = this.elementStack.lastIndexOf(element);\n            if (index >= 0) {\n                this.elementStack.splice(index);\n            }\n        }\n    }\n\n    get currIdx(): number {\n        return (this.wrapper as any).currIdx;\n    }\n}\n\nconst defaultConfig: IParserConfig = {\n    recoveryEnabled: true,\n    nodeLocationTracking: 'full',\n    skipValidations: true,\n    errorMessageProvider: new LangiumParserErrorMessageProvider()\n};\n\n/**\n * This class wraps the embedded actions parser of chevrotain and exposes protected methods.\n * This way, we can build the `LangiumParser` as a composition.\n */\nclass ChevrotainWrapper extends EmbeddedActionsParser {\n\n    // This array is set in the base implementation of Chevrotain.\n    definitionErrors: IParserDefinitionError[];\n\n    constructor(tokens: TokenVocabulary, config?: IParserConfig) {\n        const useDefaultLookahead = config && 'maxLookahead' in config;\n        super(tokens, {\n            ...defaultConfig,\n            lookaheadStrategy: useDefaultLookahead\n                ? new LLkLookaheadStrategy({ maxLookahead: config.maxLookahead })\n                : new LLStarLookaheadStrategy(),\n            ...config,\n        });\n    }\n\n    get IS_RECORDING(): boolean {\n        return this.RECORDING_PHASE;\n    }\n\n    DEFINE_RULE(name: string, impl: RuleImpl): RuleResult {\n        return this.RULE(name, impl);\n    }\n\n    wrapSelfAnalysis(): void {\n        this.performSelfAnalysis();\n    }\n\n    wrapConsume(idx: number, tokenType: TokenType): IToken {\n        return this.consume(idx, tokenType);\n    }\n\n    wrapSubrule(idx: number, rule: RuleResult, args: Args): unknown {\n        return this.subrule(idx, rule, {\n            ARGS: [args]\n        });\n    }\n\n    wrapOr(idx: number, choices: Array>): void {\n        this.or(idx, choices);\n    }\n\n    wrapOption(idx: number, callback: DSLMethodOpts): void {\n        this.option(idx, callback);\n    }\n\n    wrapMany(idx: number, callback: DSLMethodOpts): void {\n        this.many(idx, callback);\n    }\n\n    wrapAtLeastOne(idx: number, callback: DSLMethodOpts): void {\n        this.atLeastOne(idx, callback);\n    }\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { IOrAlt, TokenType, TokenTypeDictionary } from 'chevrotain';\nimport type { AbstractElement, Action, Alternatives, Condition, CrossReference, Grammar, Group, Keyword, NamedArgument, ParserRule, RuleCall, UnorderedGroup } from '../languages/generated/ast.js';\nimport type { BaseParser } from './langium-parser.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { Cardinality } from '../utils/grammar-utils.js';\nimport { EMPTY_ALT, EOF } from 'chevrotain';\nimport { isAction, isAlternatives, isEndOfFile, isAssignment, isConjunction, isCrossReference, isDisjunction, isGroup, isKeyword, isNegation, isParameterReference, isParserRule, isRuleCall, isTerminalRule, isUnorderedGroup, isBooleanLiteral } from '../languages/generated/ast.js';\nimport { assertUnreachable, ErrorWithLocation } from '../utils/errors.js';\nimport { stream } from '../utils/stream.js';\nimport { findNameAssignment, getAllReachableRules, getTypeName } from '../utils/grammar-utils.js';\n\ntype RuleContext = {\n    optional: number,\n    consume: number,\n    subrule: number,\n    many: number,\n    or: number\n} & ParserContext;\n\ntype ParserContext = {\n    parser: BaseParser\n    tokens: TokenTypeDictionary\n    rules: Map\n    ruleNames: Map\n}\n\ntype Rule = (args: Args) => unknown;\n\ntype Args = Record;\n\ntype Predicate = (args: Args) => boolean;\n\ntype Method = (args: Args) => void;\n\nexport function createParser(grammar: Grammar, parser: T, tokens: TokenTypeDictionary): T {\n    const rules = new Map();\n    const parserContext: ParserContext = {\n        parser,\n        tokens,\n        rules,\n        ruleNames: new Map()\n    };\n    buildRules(parserContext, grammar);\n    return parser;\n}\n\nfunction buildRules(parserContext: ParserContext, grammar: Grammar): void {\n    const reachable = getAllReachableRules(grammar, false);\n    const parserRules = stream(grammar.rules).filter(isParserRule).filter(rule => reachable.has(rule));\n    for (const rule of parserRules) {\n        const ctx: RuleContext = {\n            ...parserContext,\n            consume: 1,\n            optional: 1,\n            subrule: 1,\n            many: 1,\n            or: 1\n        };\n        ctx.rules.set(\n            rule.name,\n            parserContext.parser.rule(rule, buildElement(ctx, rule.definition))\n        );\n    }\n}\n\nfunction buildElement(ctx: RuleContext, element: AbstractElement, ignoreGuard = false): Method {\n    let method: Method;\n    if (isKeyword(element)) {\n        method = buildKeyword(ctx, element);\n    } else if (isAction(element)) {\n        method = buildAction(ctx, element);\n    } else if (isAssignment(element)) {\n        method = buildElement(ctx, element.terminal);\n    } else if (isCrossReference(element)) {\n        method = buildCrossReference(ctx, element);\n    } else if (isRuleCall(element)) {\n        method = buildRuleCall(ctx, element);\n    } else if (isAlternatives(element)) {\n        method = buildAlternatives(ctx, element);\n    } else if (isUnorderedGroup(element)) {\n        method = buildUnorderedGroup(ctx, element);\n    } else if (isGroup(element)) {\n        method = buildGroup(ctx, element);\n    } else if(isEndOfFile(element)) {\n        const idx = ctx.consume++;\n        method = () => ctx.parser.consume(idx, EOF, element);\n    } else {\n        throw new ErrorWithLocation(element.$cstNode, `Unexpected element type: ${element.$type}`);\n    }\n    return wrap(ctx, ignoreGuard ? undefined : getGuardCondition(element), method, element.cardinality);\n}\n\nfunction buildAction(ctx: RuleContext, action: Action): Method {\n    const actionType = getTypeName(action);\n    return () => ctx.parser.action(actionType, action);\n}\n\nfunction buildRuleCall(ctx: RuleContext, ruleCall: RuleCall): Method {\n    const rule = ruleCall.rule.ref;\n    if (isParserRule(rule)) {\n        const idx = ctx.subrule++;\n        const predicate = ruleCall.arguments.length > 0 ? buildRuleCallPredicate(rule, ruleCall.arguments) : () => ({});\n        return (args) => ctx.parser.subrule(idx, getRule(ctx, rule), ruleCall, predicate(args));\n    } else if (isTerminalRule(rule)) {\n        const idx = ctx.consume++;\n        const method = getToken(ctx, rule.name);\n        return () => ctx.parser.consume(idx, method, ruleCall);\n    } else if (!rule) {\n        throw new ErrorWithLocation(ruleCall.$cstNode, `Undefined rule type: ${ruleCall.$type}`);\n    } else {\n        assertUnreachable(rule);\n    }\n}\n\nfunction buildRuleCallPredicate(rule: ParserRule, namedArgs: NamedArgument[]): (args: Args) => Args {\n    const predicates = namedArgs.map(e => buildPredicate(e.value));\n    return (args) => {\n        const ruleArgs: Args = {};\n        for (let i = 0; i < predicates.length; i++) {\n            const ruleTarget = rule.parameters[i];\n            const predicate = predicates[i];\n            ruleArgs[ruleTarget.name] = predicate(args);\n        }\n        return ruleArgs;\n    };\n}\n\ninterface PredicatedMethod {\n    ALT: Method,\n    GATE?: Predicate\n}\n\nfunction buildPredicate(condition: Condition): Predicate {\n    if (isDisjunction(condition)) {\n        const left = buildPredicate(condition.left);\n        const right = buildPredicate(condition.right);\n        return (args) => (left(args) || right(args));\n    } else if (isConjunction(condition)) {\n        const left = buildPredicate(condition.left);\n        const right = buildPredicate(condition.right);\n        return (args) => (left(args) && right(args));\n    } else if (isNegation(condition)) {\n        const value = buildPredicate(condition.value);\n        return (args) => !value(args);\n    } else if (isParameterReference(condition)) {\n        const name = condition.parameter.ref!.name;\n        return (args) => args !== undefined && args[name] === true;\n    } else if (isBooleanLiteral(condition)) {\n        const value = Boolean(condition.true);\n        return () => value;\n    }\n    assertUnreachable(condition);\n}\n\nfunction buildAlternatives(ctx: RuleContext, alternatives: Alternatives): Method {\n    if (alternatives.elements.length === 1) {\n        return buildElement(ctx, alternatives.elements[0]);\n    } else {\n        const methods: PredicatedMethod[] = [];\n\n        for (const element of alternatives.elements) {\n            const predicatedMethod: PredicatedMethod = {\n                // Since we handle the guard condition in the alternative already\n                // We can ignore the group guard condition inside\n                ALT: buildElement(ctx, element, true)\n            };\n            const guard = getGuardCondition(element);\n            if (guard) {\n                predicatedMethod.GATE = buildPredicate(guard);\n            }\n            methods.push(predicatedMethod);\n        }\n\n        const idx = ctx.or++;\n        return (args) => ctx.parser.alternatives(idx, methods.map(method => {\n            const alt: IOrAlt = {\n                ALT: () => method.ALT(args)\n            };\n            const gate = method.GATE;\n            if (gate) {\n                alt.GATE = () => gate(args);\n            }\n            return alt;\n        }));\n    }\n}\n\nfunction buildUnorderedGroup(ctx: RuleContext, group: UnorderedGroup): Method {\n    if (group.elements.length === 1) {\n        return buildElement(ctx, group.elements[0]);\n    }\n    const methods: PredicatedMethod[] = [];\n\n    for (const element of group.elements) {\n        const predicatedMethod: PredicatedMethod = {\n            // Since we handle the guard condition in the alternative already\n            // We can ignore the group guard condition inside\n            ALT: buildElement(ctx, element, true)\n        };\n        const guard = getGuardCondition(element);\n        if (guard) {\n            predicatedMethod.GATE = buildPredicate(guard);\n        }\n        methods.push(predicatedMethod);\n    }\n\n    const orIdx = ctx.or++;\n\n    const idFunc = (groupIdx: number, lParser: BaseParser) => {\n        const stackId = lParser.getRuleStack().join('-');\n        return `uGroup_${groupIdx}_${stackId}`;\n    };\n    const alternatives: Method = (args) => ctx.parser.alternatives(orIdx, methods.map((method, idx) => {\n        const alt: IOrAlt = { ALT: () => true };\n        const parser = ctx.parser;\n        alt.ALT = () => {\n            method.ALT(args);\n            if (!parser.isRecording()) {\n                const key = idFunc(orIdx, parser);\n                if (!parser.unorderedGroups.get(key)) {\n                    // init after clear state\n                    parser.unorderedGroups.set(key, []);\n                }\n                const groupState = parser.unorderedGroups.get(key)!;\n                if (typeof groupState?.[idx] === 'undefined') {\n                    // Not accessed yet\n                    groupState[idx] = true;\n                }\n            }\n        };\n        const gate = method.GATE;\n        if (gate) {\n            alt.GATE = () => gate(args);\n        } else {\n            alt.GATE = () => {\n                const trackedAlternatives = parser.unorderedGroups.get(idFunc(orIdx, parser));\n                const allow = !trackedAlternatives?.[idx];\n                return allow;\n            };\n        }\n        return alt;\n    }));\n    const wrapped = wrap(ctx, getGuardCondition(group), alternatives, '*');\n    return (args) => {\n        wrapped(args);\n        if (!ctx.parser.isRecording()) {\n            ctx.parser.unorderedGroups.delete(idFunc(orIdx, ctx.parser));\n        }\n    };\n}\n\nfunction buildGroup(ctx: RuleContext, group: Group): Method {\n    const methods = group.elements.map(e => buildElement(ctx, e));\n    return (args) => methods.forEach(method => method(args));\n}\n\nfunction getGuardCondition(element: AbstractElement): Condition | undefined {\n    if (isGroup(element)) {\n        return element.guardCondition;\n    }\n    return undefined;\n}\n\nfunction buildCrossReference(ctx: RuleContext, crossRef: CrossReference, terminal = crossRef.terminal): Method {\n    if (!terminal) {\n        if (!crossRef.type.ref) {\n            throw new Error('Could not resolve reference to type: ' + crossRef.type.$refText);\n        }\n        const assignment = findNameAssignment(crossRef.type.ref);\n        const assignTerminal = assignment?.terminal;\n        if (!assignTerminal) {\n            throw new Error('Could not find name assignment for type: ' + getTypeName(crossRef.type.ref));\n        }\n        return buildCrossReference(ctx, crossRef, assignTerminal);\n    } else if (isRuleCall(terminal) && isParserRule(terminal.rule.ref)) {\n        const idx = ctx.subrule++;\n        return (args) => ctx.parser.subrule(idx, getRule(ctx, terminal.rule.ref as ParserRule), crossRef, args);\n    } else if (isRuleCall(terminal) && isTerminalRule(terminal.rule.ref)) {\n        const idx = ctx.consume++;\n        const terminalRule = getToken(ctx, terminal.rule.ref.name);\n        return () => ctx.parser.consume(idx, terminalRule, crossRef);\n    } else if (isKeyword(terminal)) {\n        const idx = ctx.consume++;\n        const keyword = getToken(ctx, terminal.value);\n        return () => ctx.parser.consume(idx, keyword, crossRef);\n    }\n    else {\n        throw new Error('Could not build cross reference parser');\n    }\n}\n\nfunction buildKeyword(ctx: RuleContext, keyword: Keyword): Method {\n    const idx = ctx.consume++;\n    const token = ctx.tokens[keyword.value];\n    if (!token) {\n        throw new Error('Could not find token for keyword: ' + keyword.value);\n    }\n    return () => ctx.parser.consume(idx, token, keyword);\n}\n\nfunction wrap(ctx: RuleContext, guard: Condition | undefined, method: Method, cardinality: Cardinality): Method {\n    const gate = guard && buildPredicate(guard);\n\n    if (!cardinality) {\n        if (gate) {\n            const idx = ctx.or++;\n            return (args) => ctx.parser.alternatives(idx, [\n                {\n                    ALT: () => method(args),\n                    GATE: () => gate(args)\n                },\n                {\n                    ALT: EMPTY_ALT(),\n                    GATE: () => !gate(args)\n                }\n            ]);\n        } else {\n            return method;\n        }\n    }\n\n    if (cardinality === '*') {\n        const idx = ctx.many++;\n        return (args) => ctx.parser.many(idx, {\n            DEF: () => method(args),\n            GATE: gate ? () => gate(args) : undefined\n        });\n    } else if (cardinality === '+') {\n        const idx = ctx.many++;\n        if (gate) {\n            const orIdx = ctx.or++;\n            // In the case of a guard condition for the `+` group\n            // We combine it with an empty alternative\n            // If the condition returns true, it needs to parse at least a single iteration\n            // If its false, it is not allowed to parse anything\n            return (args) => ctx.parser.alternatives(orIdx, [\n                {\n                    ALT: () => ctx.parser.atLeastOne(idx, {\n                        DEF: () => method(args)\n                    }),\n                    GATE: () => gate(args)\n                },\n                {\n                    ALT: EMPTY_ALT(),\n                    GATE: () => !gate(args)\n                }\n            ]);\n        } else {\n            return (args) => ctx.parser.atLeastOne(idx, {\n                DEF: () => method(args),\n            });\n        }\n    } else if (cardinality === '?') {\n        const idx = ctx.optional++;\n        return (args) => ctx.parser.optional(idx, {\n            DEF: () => method(args),\n            GATE: gate ? () => gate(args) : undefined\n        });\n    } else {\n        assertUnreachable(cardinality);\n    }\n}\n\nfunction getRule(ctx: ParserContext, element: ParserRule | AbstractElement): Rule {\n    const name = getRuleName(ctx, element);\n    const rule = ctx.rules.get(name);\n    if (!rule) throw new Error(`Rule \"${name}\" not found.\"`);\n    return rule;\n}\n\nfunction getRuleName(ctx: ParserContext, element: ParserRule | AbstractElement): string {\n    if (isParserRule(element)) {\n        return element.name;\n    } else if (ctx.ruleNames.has(element)) {\n        return ctx.ruleNames.get(element)!;\n    } else {\n        let item: AstNode = element;\n        let parent: AstNode = item.$container!;\n        let ruleName: string = element.$type;\n        while (!isParserRule(parent)) {\n            if (isGroup(parent) || isAlternatives(parent) || isUnorderedGroup(parent)) {\n                const index = parent.elements.indexOf(item as AbstractElement);\n                ruleName = index.toString() + ':' + ruleName;\n            }\n            item = parent;\n            parent = parent.$container!;\n        }\n        const rule = parent as ParserRule;\n        ruleName = rule.name + ':' + ruleName;\n        ctx.ruleNames.set(element, ruleName);\n        return ruleName;\n    }\n}\n\nfunction getToken(ctx: ParserContext, name: string): TokenType {\n    const token = ctx.tokens[name];\n    if (!token) throw new Error(`Token \"${name}\" not found.\"`);\n    return token;\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { LangiumCompletionParser } from './langium-parser.js';\nimport { createParser } from './parser-builder-base.js';\n\nexport function createCompletionParser(services: LangiumCoreServices): LangiumCompletionParser {\n    const grammar = services.Grammar;\n    const lexer = services.parser.Lexer;\n    const parser = new LangiumCompletionParser(services);\n    createParser(grammar, parser, lexer.definition);\n    parser.finalize();\n    return parser;\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport { LangiumParser } from './langium-parser.js';\nimport { createParser } from './parser-builder-base.js';\n\n/**\n * Create and finalize a Langium parser. The parser rules are derived from the grammar, which is\n * available at `services.Grammar`.\n */\nexport function createLangiumParser(services: LangiumCoreServices): LangiumParser {\n    const parser = prepareLangiumParser(services);\n    parser.finalize();\n    return parser;\n}\n\n/**\n * Create a Langium parser without finalizing it. This is used to extract more detailed error\n * information when the parser is initially validated.\n */\nexport function prepareLangiumParser(services: LangiumCoreServices): LangiumParser {\n    const grammar = services.Grammar;\n    const lexer = services.parser.Lexer;\n    const parser = new LangiumParser(services);\n    return createParser(grammar, parser, lexer.definition);\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CustomPatternMatcherFunc, TokenPattern, TokenType, TokenVocabulary } from 'chevrotain';\nimport type { AbstractRule, Grammar, Keyword, TerminalRule } from '../languages/generated/ast.js';\nimport type { Stream } from '../utils/stream.js';\nimport { Lexer } from 'chevrotain';\nimport { isKeyword, isParserRule, isTerminalRule } from '../languages/generated/ast.js';\nimport { streamAllContents } from '../utils/ast-utils.js';\nimport { getAllReachableRules, terminalRegex } from '../utils/grammar-utils.js';\nimport { getCaseInsensitivePattern, isWhitespace, partialMatches } from '../utils/regexp-utils.js';\nimport { stream } from '../utils/stream.js';\n\nexport interface TokenBuilderOptions {\n    caseInsensitive?: boolean\n}\n\nexport interface TokenBuilder {\n    buildTokens(grammar: Grammar, options?: TokenBuilderOptions): TokenVocabulary;\n}\n\nexport class DefaultTokenBuilder implements TokenBuilder {\n\n    buildTokens(grammar: Grammar, options?: TokenBuilderOptions): TokenVocabulary {\n        const reachableRules = stream(getAllReachableRules(grammar, false));\n        const terminalTokens: TokenType[] = this.buildTerminalTokens(reachableRules);\n        const tokens: TokenType[] = this.buildKeywordTokens(reachableRules, terminalTokens, options);\n\n        terminalTokens.forEach(terminalToken => {\n            const pattern = terminalToken.PATTERN;\n            if (typeof pattern === 'object' && pattern && 'test' in pattern && isWhitespace(pattern)) {\n                tokens.unshift(terminalToken);\n            } else {\n                tokens.push(terminalToken);\n            }\n        });\n        // We don't need to add the EOF token explicitly.\n        // It is automatically available at the end of the token stream.\n        return tokens;\n    }\n\n    protected buildTerminalTokens(rules: Stream): TokenType[] {\n        return rules.filter(isTerminalRule).filter(e => !e.fragment)\n            .map(terminal => this.buildTerminalToken(terminal)).toArray();\n    }\n\n    protected buildTerminalToken(terminal: TerminalRule): TokenType {\n        const regex = terminalRegex(terminal);\n        const pattern = this.requiresCustomPattern(regex) ? this.regexPatternFunction(regex) : regex;\n        const tokenType: TokenType = {\n            name: terminal.name,\n            PATTERN: pattern,\n            LINE_BREAKS: true\n        };\n        if (terminal.hidden) {\n            // Only skip tokens that are able to accept whitespace\n            tokenType.GROUP = isWhitespace(regex) ? Lexer.SKIPPED : 'hidden';\n        }\n        return tokenType;\n    }\n\n    protected requiresCustomPattern(regex: RegExp): boolean {\n        if (regex.flags.includes('u')) {\n            // Unicode regexes are not supported by Chevrotain.\n            return true;\n        } else if (regex.source.includes('?<=') || regex.source.includes('? {\n            stickyRegex.lastIndex = offset;\n            const execResult = stickyRegex.exec(text);\n            return execResult;\n        };\n    }\n\n    protected buildKeywordTokens(rules: Stream, terminalTokens: TokenType[], options?: TokenBuilderOptions): TokenType[] {\n        return rules\n            // We filter by parser rules, since keywords in terminal rules get transformed into regex and are not actual tokens\n            .filter(isParserRule)\n            .flatMap(rule => streamAllContents(rule).filter(isKeyword))\n            .distinct(e => e.value).toArray()\n            // Sort keywords by descending length\n            .sort((a, b) => b.value.length - a.value.length)\n            .map(keyword => this.buildKeywordToken(keyword, terminalTokens, Boolean(options?.caseInsensitive)));\n    }\n\n    protected buildKeywordToken(keyword: Keyword, terminalTokens: TokenType[], caseInsensitive: boolean): TokenType {\n        return {\n            name: keyword.value,\n            PATTERN: this.buildKeywordPattern(keyword, caseInsensitive),\n            LONGER_ALT: this.findLongerAlt(keyword, terminalTokens)\n        };\n    }\n\n    protected buildKeywordPattern(keyword: Keyword, caseInsensitive: boolean): TokenPattern {\n        return caseInsensitive ?\n            new RegExp(getCaseInsensitivePattern(keyword.value)) :\n            keyword.value;\n    }\n\n    protected findLongerAlt(keyword: Keyword, terminalTokens: TokenType[]): TokenType[] {\n        return terminalTokens.reduce((longerAlts: TokenType[], token) => {\n            const pattern = token?.PATTERN as RegExp;\n            if (pattern?.source && partialMatches('^' + pattern.source + '$', keyword.value)) {\n                longerAlts.push(token);\n            }\n            return longerAlts;\n        }, []);\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AbstractElement, AbstractRule } from '../languages/generated/ast.js';\nimport type { CstNode } from '../syntax-tree.js';\nimport { isCrossReference, isRuleCall } from '../languages/generated/ast.js';\nimport { getCrossReferenceTerminal, getRuleType } from '../utils/grammar-utils.js';\n\n/**\n * Language-specific service for converting string values from the source text format into a value to be held in the AST.\n */\nexport interface ValueConverter {\n    /**\n     * Converts a string value from the source text format into a value to be held in the AST.\n     */\n    convert(input: string, cstNode: CstNode): ValueType;\n}\n\nexport type ValueType = string | number | boolean | bigint | Date;\n\nexport class DefaultValueConverter implements ValueConverter {\n\n    convert(input: string, cstNode: CstNode): ValueType {\n        let feature: AbstractElement | undefined = cstNode.grammarSource;\n        if (isCrossReference(feature)) {\n            feature = getCrossReferenceTerminal(feature);\n        }\n        if (isRuleCall(feature)) {\n            const rule = feature.rule.ref;\n            if (!rule) {\n                throw new Error('This cst node was not parsed by a rule.');\n            }\n            return this.runConverter(rule, input, cstNode);\n        }\n        return input;\n    }\n\n    // eslint-disable-next-line @typescript-eslint/no-unused-vars\n    protected runConverter(rule: AbstractRule, input: string, cstNode: CstNode): ValueType {\n        switch (rule.name.toUpperCase()) {\n            case 'INT': return ValueConverter.convertInt(input);\n            case 'STRING': return ValueConverter.convertString(input);\n            case 'ID': return ValueConverter.convertID(input);\n        }\n        switch (getRuleType(rule)?.toLowerCase()) {\n            case 'number': return ValueConverter.convertNumber(input);\n            case 'boolean': return ValueConverter.convertBoolean(input);\n            case 'bigint': return ValueConverter.convertBigint(input);\n            case 'date': return ValueConverter.convertDate(input);\n            default: return input;\n        }\n    }\n}\n\nexport namespace ValueConverter {\n\n    export function convertString(input: string): string {\n        let result = '';\n        for (let i = 1; i < input.length - 1; i++) {\n            const c = input.charAt(i);\n            if (c === '\\\\') {\n                const c1 = input.charAt(++i);\n                result += convertEscapeCharacter(c1);\n            } else {\n                result += c;\n            }\n        }\n        return result;\n    }\n\n    function convertEscapeCharacter(char: string): string {\n        switch (char) {\n            case 'b': return '\\b';\n            case 'f': return '\\f';\n            case 'n': return '\\n';\n            case 'r': return '\\r';\n            case 't': return '\\t';\n            case 'v': return '\\v';\n            case '0': return '\\0';\n            default: return char;\n        }\n    }\n\n    export function convertID(input: string): string {\n        if (input.charAt(0) === '^') {\n            return input.substring(1);\n        } else {\n            return input;\n        }\n    }\n\n    export function convertInt(input: string): number {\n        return parseInt(input);\n    }\n\n    export function convertBigint(input: string): bigint {\n        return BigInt(input);\n    }\n\n    export function convertDate(input: string): Date {\n        return new Date(input);\n    }\n\n    export function convertNumber(input: string): number {\n        return Number(input);\n    }\n\n    export function convertBoolean(input: string): boolean {\n        return input.toLowerCase() === 'true';\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n// eslint-disable-next-line no-restricted-imports\nexport * from 'vscode-jsonrpc/lib/common/cancellation.js';\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken, CancellationTokenSource, type AbstractCancellationTokenSource } from '../utils/cancellation.js';\n\nexport type MaybePromise = T | Promise\n\n/**\n * Delays the execution of the current code to the next tick of the event loop.\n * Don't call this method directly in a tight loop to prevent too many promises from being created.\n */\nexport function delayNextTick(): Promise {\n    return new Promise(resolve => {\n        // In case we are running in a non-node environment, `setImmediate` isn't available.\n        // Using `setTimeout` of the browser API accomplishes the same result.\n        if (typeof setImmediate === 'undefined') {\n            setTimeout(resolve, 0);\n        } else {\n            setImmediate(resolve);\n        }\n    });\n}\n\nlet lastTick = 0;\nlet globalInterruptionPeriod = 10;\n\n/**\n * Reset the global interruption period and create a cancellation token source.\n */\nexport function startCancelableOperation(): AbstractCancellationTokenSource {\n    lastTick = Date.now();\n    return new CancellationTokenSource();\n}\n\n/**\n * Change the period duration for `interruptAndCheck` to the given number of milliseconds.\n * The default value is 10ms.\n */\nexport function setInterruptionPeriod(period: number): void {\n    globalInterruptionPeriod = period;\n}\n\n/**\n * This symbol may be thrown in an asynchronous context by any Langium service that receives\n * a `CancellationToken`. This means that the promise returned by such a service is rejected with\n * this symbol as rejection reason.\n */\nexport const OperationCancelled = Symbol('OperationCancelled');\n\n/**\n * Use this in a `catch` block to check whether the thrown object indicates that the operation\n * has been cancelled.\n */\nexport function isOperationCancelled(err: unknown): err is typeof OperationCancelled {\n    return err === OperationCancelled;\n}\n\n/**\n * This function does two things:\n *  1. Check the elapsed time since the last call to this function or to `startCancelableOperation`. If the predefined\n *     period (configured with `setInterruptionPeriod`) is exceeded, execution is delayed with `delayNextTick`.\n *  2. If the predefined period is not met yet or execution is resumed after an interruption, the given cancellation\n *     token is checked, and if cancellation is requested, `OperationCanceled` is thrown.\n *\n * All services in Langium that receive a `CancellationToken` may potentially call this function, so the\n * `CancellationToken` must be caught (with an `async` try-catch block or a `catch` callback attached to\n * the promise) to avoid that event being exposed as an error.\n */\nexport async function interruptAndCheck(token: CancellationToken): Promise {\n    if (token === CancellationToken.None) {\n        // Early exit in case cancellation was disabled by the caller\n        return;\n    }\n    const current = Date.now();\n    if (current - lastTick >= globalInterruptionPeriod) {\n        lastTick = current;\n        await delayNextTick();\n    }\n    if (token.isCancellationRequested) {\n        throw OperationCancelled;\n    }\n}\n\n/**\n * Simple implementation of the deferred pattern.\n * An object that exposes a promise and functions to resolve and reject it.\n */\nexport class Deferred {\n    resolve: (value: T) => this;\n    reject: (err?: unknown) => this;\n\n    promise = new Promise((resolve, reject) => {\n        this.resolve = (arg) => {\n            resolve(arg);\n            return this;\n        };\n        this.reject = (err) => {\n            reject(err);\n            return this;\n        };\n    });\n}\n", "/* --------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n * ------------------------------------------------------------------------------------------ */\n'use strict';\nclass FullTextDocument {\n    constructor(uri, languageId, version, content) {\n        this._uri = uri;\n        this._languageId = languageId;\n        this._version = version;\n        this._content = content;\n        this._lineOffsets = undefined;\n    }\n    get uri() {\n        return this._uri;\n    }\n    get languageId() {\n        return this._languageId;\n    }\n    get version() {\n        return this._version;\n    }\n    getText(range) {\n        if (range) {\n            const start = this.offsetAt(range.start);\n            const end = this.offsetAt(range.end);\n            return this._content.substring(start, end);\n        }\n        return this._content;\n    }\n    update(changes, version) {\n        for (const change of changes) {\n            if (FullTextDocument.isIncremental(change)) {\n                // makes sure start is before end\n                const range = getWellformedRange(change.range);\n                // update content\n                const startOffset = this.offsetAt(range.start);\n                const endOffset = this.offsetAt(range.end);\n                this._content = this._content.substring(0, startOffset) + change.text + this._content.substring(endOffset, this._content.length);\n                // update the offsets\n                const startLine = Math.max(range.start.line, 0);\n                const endLine = Math.max(range.end.line, 0);\n                let lineOffsets = this._lineOffsets;\n                const addedLineOffsets = computeLineOffsets(change.text, false, startOffset);\n                if (endLine - startLine === addedLineOffsets.length) {\n                    for (let i = 0, len = addedLineOffsets.length; i < len; i++) {\n                        lineOffsets[i + startLine + 1] = addedLineOffsets[i];\n                    }\n                }\n                else {\n                    if (addedLineOffsets.length < 10000) {\n                        lineOffsets.splice(startLine + 1, endLine - startLine, ...addedLineOffsets);\n                    }\n                    else { // avoid too many arguments for splice\n                        this._lineOffsets = lineOffsets = lineOffsets.slice(0, startLine + 1).concat(addedLineOffsets, lineOffsets.slice(endLine + 1));\n                    }\n                }\n                const diff = change.text.length - (endOffset - startOffset);\n                if (diff !== 0) {\n                    for (let i = startLine + 1 + addedLineOffsets.length, len = lineOffsets.length; i < len; i++) {\n                        lineOffsets[i] = lineOffsets[i] + diff;\n                    }\n                }\n            }\n            else if (FullTextDocument.isFull(change)) {\n                this._content = change.text;\n                this._lineOffsets = undefined;\n            }\n            else {\n                throw new Error('Unknown change event received');\n            }\n        }\n        this._version = version;\n    }\n    getLineOffsets() {\n        if (this._lineOffsets === undefined) {\n            this._lineOffsets = computeLineOffsets(this._content, true);\n        }\n        return this._lineOffsets;\n    }\n    positionAt(offset) {\n        offset = Math.max(Math.min(offset, this._content.length), 0);\n        const lineOffsets = this.getLineOffsets();\n        let low = 0, high = lineOffsets.length;\n        if (high === 0) {\n            return { line: 0, character: offset };\n        }\n        while (low < high) {\n            const mid = Math.floor((low + high) / 2);\n            if (lineOffsets[mid] > offset) {\n                high = mid;\n            }\n            else {\n                low = mid + 1;\n            }\n        }\n        // low is the least x for which the line offset is larger than the current offset\n        // or array.length if no line offset is larger than the current offset\n        const line = low - 1;\n        offset = this.ensureBeforeEOL(offset, lineOffsets[line]);\n        return { line, character: offset - lineOffsets[line] };\n    }\n    offsetAt(position) {\n        const lineOffsets = this.getLineOffsets();\n        if (position.line >= lineOffsets.length) {\n            return this._content.length;\n        }\n        else if (position.line < 0) {\n            return 0;\n        }\n        const lineOffset = lineOffsets[position.line];\n        if (position.character <= 0) {\n            return lineOffset;\n        }\n        const nextLineOffset = (position.line + 1 < lineOffsets.length) ? lineOffsets[position.line + 1] : this._content.length;\n        const offset = Math.min(lineOffset + position.character, nextLineOffset);\n        return this.ensureBeforeEOL(offset, lineOffset);\n    }\n    ensureBeforeEOL(offset, lineOffset) {\n        while (offset > lineOffset && isEOL(this._content.charCodeAt(offset - 1))) {\n            offset--;\n        }\n        return offset;\n    }\n    get lineCount() {\n        return this.getLineOffsets().length;\n    }\n    static isIncremental(event) {\n        const candidate = event;\n        return candidate !== undefined && candidate !== null &&\n            typeof candidate.text === 'string' && candidate.range !== undefined &&\n            (candidate.rangeLength === undefined || typeof candidate.rangeLength === 'number');\n    }\n    static isFull(event) {\n        const candidate = event;\n        return candidate !== undefined && candidate !== null &&\n            typeof candidate.text === 'string' && candidate.range === undefined && candidate.rangeLength === undefined;\n    }\n}\nexport var TextDocument;\n(function (TextDocument) {\n    /**\n     * Creates a new text document.\n     *\n     * @param uri The document's uri.\n     * @param languageId  The document's language Id.\n     * @param version The document's initial version number.\n     * @param content The document's content.\n     */\n    function create(uri, languageId, version, content) {\n        return new FullTextDocument(uri, languageId, version, content);\n    }\n    TextDocument.create = create;\n    /**\n     * Updates a TextDocument by modifying its content.\n     *\n     * @param document the document to update. Only documents created by TextDocument.create are valid inputs.\n     * @param changes the changes to apply to the document.\n     * @param version the changes version for the document.\n     * @returns The updated TextDocument. Note: That's the same document instance passed in as first parameter.\n     *\n     */\n    function update(document, changes, version) {\n        if (document instanceof FullTextDocument) {\n            document.update(changes, version);\n            return document;\n        }\n        else {\n            throw new Error('TextDocument.update: document must be created by TextDocument.create');\n        }\n    }\n    TextDocument.update = update;\n    function applyEdits(document, edits) {\n        const text = document.getText();\n        const sortedEdits = mergeSort(edits.map(getWellformedEdit), (a, b) => {\n            const diff = a.range.start.line - b.range.start.line;\n            if (diff === 0) {\n                return a.range.start.character - b.range.start.character;\n            }\n            return diff;\n        });\n        let lastModifiedOffset = 0;\n        const spans = [];\n        for (const e of sortedEdits) {\n            const startOffset = document.offsetAt(e.range.start);\n            if (startOffset < lastModifiedOffset) {\n                throw new Error('Overlapping edit');\n            }\n            else if (startOffset > lastModifiedOffset) {\n                spans.push(text.substring(lastModifiedOffset, startOffset));\n            }\n            if (e.newText.length) {\n                spans.push(e.newText);\n            }\n            lastModifiedOffset = document.offsetAt(e.range.end);\n        }\n        spans.push(text.substr(lastModifiedOffset));\n        return spans.join('');\n    }\n    TextDocument.applyEdits = applyEdits;\n})(TextDocument || (TextDocument = {}));\nfunction mergeSort(data, compare) {\n    if (data.length <= 1) {\n        // sorted\n        return data;\n    }\n    const p = (data.length / 2) | 0;\n    const left = data.slice(0, p);\n    const right = data.slice(p);\n    mergeSort(left, compare);\n    mergeSort(right, compare);\n    let leftIdx = 0;\n    let rightIdx = 0;\n    let i = 0;\n    while (leftIdx < left.length && rightIdx < right.length) {\n        const ret = compare(left[leftIdx], right[rightIdx]);\n        if (ret <= 0) {\n            // smaller_equal -> take left to preserve order\n            data[i++] = left[leftIdx++];\n        }\n        else {\n            // greater -> take right\n            data[i++] = right[rightIdx++];\n        }\n    }\n    while (leftIdx < left.length) {\n        data[i++] = left[leftIdx++];\n    }\n    while (rightIdx < right.length) {\n        data[i++] = right[rightIdx++];\n    }\n    return data;\n}\nfunction computeLineOffsets(text, isAtLineStart, textOffset = 0) {\n    const result = isAtLineStart ? [textOffset] : [];\n    for (let i = 0; i < text.length; i++) {\n        const ch = text.charCodeAt(i);\n        if (isEOL(ch)) {\n            if (ch === 13 /* CharCode.CarriageReturn */ && i + 1 < text.length && text.charCodeAt(i + 1) === 10 /* CharCode.LineFeed */) {\n                i++;\n            }\n            result.push(textOffset + i + 1);\n        }\n    }\n    return result;\n}\nfunction isEOL(char) {\n    return char === 13 /* CharCode.CarriageReturn */ || char === 10 /* CharCode.LineFeed */;\n}\nfunction getWellformedRange(range) {\n    const start = range.start;\n    const end = range.end;\n    if (start.line > end.line || (start.line === end.line && start.character > end.character)) {\n        return { start: end, end: start };\n    }\n    return range;\n}\nfunction getWellformedEdit(textEdit) {\n    const range = getWellformedRange(textEdit.range);\n    if (range !== textEdit.range) {\n        return { newText: textEdit.newText, range };\n    }\n    return textEdit;\n}\n", "// 'path' module extracted from Node.js v8.11.1 (only the posix part)\n// transplited with Babel\n\n// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n'use strict';\n\nfunction assertPath(path) {\n  if (typeof path !== 'string') {\n    throw new TypeError('Path must be a string. Received ' + JSON.stringify(path));\n  }\n}\n\n// Resolves . and .. elements in a path with directory names\nfunction normalizeStringPosix(path, allowAboveRoot) {\n  var res = '';\n  var lastSegmentLength = 0;\n  var lastSlash = -1;\n  var dots = 0;\n  var code;\n  for (var i = 0; i <= path.length; ++i) {\n    if (i < path.length)\n      code = path.charCodeAt(i);\n    else if (code === 47 /*/*/)\n      break;\n    else\n      code = 47 /*/*/;\n    if (code === 47 /*/*/) {\n      if (lastSlash === i - 1 || dots === 1) {\n        // NOOP\n      } else if (lastSlash !== i - 1 && dots === 2) {\n        if (res.length < 2 || lastSegmentLength !== 2 || res.charCodeAt(res.length - 1) !== 46 /*.*/ || res.charCodeAt(res.length - 2) !== 46 /*.*/) {\n          if (res.length > 2) {\n            var lastSlashIndex = res.lastIndexOf('/');\n            if (lastSlashIndex !== res.length - 1) {\n              if (lastSlashIndex === -1) {\n                res = '';\n                lastSegmentLength = 0;\n              } else {\n                res = res.slice(0, lastSlashIndex);\n                lastSegmentLength = res.length - 1 - res.lastIndexOf('/');\n              }\n              lastSlash = i;\n              dots = 0;\n              continue;\n            }\n          } else if (res.length === 2 || res.length === 1) {\n            res = '';\n            lastSegmentLength = 0;\n            lastSlash = i;\n            dots = 0;\n            continue;\n          }\n        }\n        if (allowAboveRoot) {\n          if (res.length > 0)\n            res += '/..';\n          else\n            res = '..';\n          lastSegmentLength = 2;\n        }\n      } else {\n        if (res.length > 0)\n          res += '/' + path.slice(lastSlash + 1, i);\n        else\n          res = path.slice(lastSlash + 1, i);\n        lastSegmentLength = i - lastSlash - 1;\n      }\n      lastSlash = i;\n      dots = 0;\n    } else if (code === 46 /*.*/ && dots !== -1) {\n      ++dots;\n    } else {\n      dots = -1;\n    }\n  }\n  return res;\n}\n\nfunction _format(sep, pathObject) {\n  var dir = pathObject.dir || pathObject.root;\n  var base = pathObject.base || (pathObject.name || '') + (pathObject.ext || '');\n  if (!dir) {\n    return base;\n  }\n  if (dir === pathObject.root) {\n    return dir + base;\n  }\n  return dir + sep + base;\n}\n\nvar posix = {\n  // path.resolve([from ...], to)\n  resolve: function resolve() {\n    var resolvedPath = '';\n    var resolvedAbsolute = false;\n    var cwd;\n\n    for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {\n      var path;\n      if (i >= 0)\n        path = arguments[i];\n      else {\n        if (cwd === undefined)\n          cwd = process.cwd();\n        path = cwd;\n      }\n\n      assertPath(path);\n\n      // Skip empty entries\n      if (path.length === 0) {\n        continue;\n      }\n\n      resolvedPath = path + '/' + resolvedPath;\n      resolvedAbsolute = path.charCodeAt(0) === 47 /*/*/;\n    }\n\n    // At this point the path should be resolved to a full absolute path, but\n    // handle relative paths to be safe (might happen when process.cwd() fails)\n\n    // Normalize the path\n    resolvedPath = normalizeStringPosix(resolvedPath, !resolvedAbsolute);\n\n    if (resolvedAbsolute) {\n      if (resolvedPath.length > 0)\n        return '/' + resolvedPath;\n      else\n        return '/';\n    } else if (resolvedPath.length > 0) {\n      return resolvedPath;\n    } else {\n      return '.';\n    }\n  },\n\n  normalize: function normalize(path) {\n    assertPath(path);\n\n    if (path.length === 0) return '.';\n\n    var isAbsolute = path.charCodeAt(0) === 47 /*/*/;\n    var trailingSeparator = path.charCodeAt(path.length - 1) === 47 /*/*/;\n\n    // Normalize the path\n    path = normalizeStringPosix(path, !isAbsolute);\n\n    if (path.length === 0 && !isAbsolute) path = '.';\n    if (path.length > 0 && trailingSeparator) path += '/';\n\n    if (isAbsolute) return '/' + path;\n    return path;\n  },\n\n  isAbsolute: function isAbsolute(path) {\n    assertPath(path);\n    return path.length > 0 && path.charCodeAt(0) === 47 /*/*/;\n  },\n\n  join: function join() {\n    if (arguments.length === 0)\n      return '.';\n    var joined;\n    for (var i = 0; i < arguments.length; ++i) {\n      var arg = arguments[i];\n      assertPath(arg);\n      if (arg.length > 0) {\n        if (joined === undefined)\n          joined = arg;\n        else\n          joined += '/' + arg;\n      }\n    }\n    if (joined === undefined)\n      return '.';\n    return posix.normalize(joined);\n  },\n\n  relative: function relative(from, to) {\n    assertPath(from);\n    assertPath(to);\n\n    if (from === to) return '';\n\n    from = posix.resolve(from);\n    to = posix.resolve(to);\n\n    if (from === to) return '';\n\n    // Trim any leading backslashes\n    var fromStart = 1;\n    for (; fromStart < from.length; ++fromStart) {\n      if (from.charCodeAt(fromStart) !== 47 /*/*/)\n        break;\n    }\n    var fromEnd = from.length;\n    var fromLen = fromEnd - fromStart;\n\n    // Trim any leading backslashes\n    var toStart = 1;\n    for (; toStart < to.length; ++toStart) {\n      if (to.charCodeAt(toStart) !== 47 /*/*/)\n        break;\n    }\n    var toEnd = to.length;\n    var toLen = toEnd - toStart;\n\n    // Compare paths to find the longest common path from root\n    var length = fromLen < toLen ? fromLen : toLen;\n    var lastCommonSep = -1;\n    var i = 0;\n    for (; i <= length; ++i) {\n      if (i === length) {\n        if (toLen > length) {\n          if (to.charCodeAt(toStart + i) === 47 /*/*/) {\n            // We get here if `from` is the exact base path for `to`.\n            // For example: from='/foo/bar'; to='/foo/bar/baz'\n            return to.slice(toStart + i + 1);\n          } else if (i === 0) {\n            // We get here if `from` is the root\n            // For example: from='/'; to='/foo'\n            return to.slice(toStart + i);\n          }\n        } else if (fromLen > length) {\n          if (from.charCodeAt(fromStart + i) === 47 /*/*/) {\n            // We get here if `to` is the exact base path for `from`.\n            // For example: from='/foo/bar/baz'; to='/foo/bar'\n            lastCommonSep = i;\n          } else if (i === 0) {\n            // We get here if `to` is the root.\n            // For example: from='/foo'; to='/'\n            lastCommonSep = 0;\n          }\n        }\n        break;\n      }\n      var fromCode = from.charCodeAt(fromStart + i);\n      var toCode = to.charCodeAt(toStart + i);\n      if (fromCode !== toCode)\n        break;\n      else if (fromCode === 47 /*/*/)\n        lastCommonSep = i;\n    }\n\n    var out = '';\n    // Generate the relative path based on the path difference between `to`\n    // and `from`\n    for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) {\n      if (i === fromEnd || from.charCodeAt(i) === 47 /*/*/) {\n        if (out.length === 0)\n          out += '..';\n        else\n          out += '/..';\n      }\n    }\n\n    // Lastly, append the rest of the destination (`to`) path that comes after\n    // the common path parts\n    if (out.length > 0)\n      return out + to.slice(toStart + lastCommonSep);\n    else {\n      toStart += lastCommonSep;\n      if (to.charCodeAt(toStart) === 47 /*/*/)\n        ++toStart;\n      return to.slice(toStart);\n    }\n  },\n\n  _makeLong: function _makeLong(path) {\n    return path;\n  },\n\n  dirname: function dirname(path) {\n    assertPath(path);\n    if (path.length === 0) return '.';\n    var code = path.charCodeAt(0);\n    var hasRoot = code === 47 /*/*/;\n    var end = -1;\n    var matchedSlash = true;\n    for (var i = path.length - 1; i >= 1; --i) {\n      code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          if (!matchedSlash) {\n            end = i;\n            break;\n          }\n        } else {\n        // We saw the first non-path separator\n        matchedSlash = false;\n      }\n    }\n\n    if (end === -1) return hasRoot ? '/' : '.';\n    if (hasRoot && end === 1) return '//';\n    return path.slice(0, end);\n  },\n\n  basename: function basename(path, ext) {\n    if (ext !== undefined && typeof ext !== 'string') throw new TypeError('\"ext\" argument must be a string');\n    assertPath(path);\n\n    var start = 0;\n    var end = -1;\n    var matchedSlash = true;\n    var i;\n\n    if (ext !== undefined && ext.length > 0 && ext.length <= path.length) {\n      if (ext.length === path.length && ext === path) return '';\n      var extIdx = ext.length - 1;\n      var firstNonSlashEnd = -1;\n      for (i = path.length - 1; i >= 0; --i) {\n        var code = path.charCodeAt(i);\n        if (code === 47 /*/*/) {\n            // If we reached a path separator that was not part of a set of path\n            // separators at the end of the string, stop now\n            if (!matchedSlash) {\n              start = i + 1;\n              break;\n            }\n          } else {\n          if (firstNonSlashEnd === -1) {\n            // We saw the first non-path separator, remember this index in case\n            // we need it if the extension ends up not matching\n            matchedSlash = false;\n            firstNonSlashEnd = i + 1;\n          }\n          if (extIdx >= 0) {\n            // Try to match the explicit extension\n            if (code === ext.charCodeAt(extIdx)) {\n              if (--extIdx === -1) {\n                // We matched the extension, so mark this as the end of our path\n                // component\n                end = i;\n              }\n            } else {\n              // Extension does not match, so our result is the entire path\n              // component\n              extIdx = -1;\n              end = firstNonSlashEnd;\n            }\n          }\n        }\n      }\n\n      if (start === end) end = firstNonSlashEnd;else if (end === -1) end = path.length;\n      return path.slice(start, end);\n    } else {\n      for (i = path.length - 1; i >= 0; --i) {\n        if (path.charCodeAt(i) === 47 /*/*/) {\n            // If we reached a path separator that was not part of a set of path\n            // separators at the end of the string, stop now\n            if (!matchedSlash) {\n              start = i + 1;\n              break;\n            }\n          } else if (end === -1) {\n          // We saw the first non-path separator, mark this as the end of our\n          // path component\n          matchedSlash = false;\n          end = i + 1;\n        }\n      }\n\n      if (end === -1) return '';\n      return path.slice(start, end);\n    }\n  },\n\n  extname: function extname(path) {\n    assertPath(path);\n    var startDot = -1;\n    var startPart = 0;\n    var end = -1;\n    var matchedSlash = true;\n    // Track the state of characters (if any) we see before our first dot and\n    // after any path separator we find\n    var preDotState = 0;\n    for (var i = path.length - 1; i >= 0; --i) {\n      var code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          // If we reached a path separator that was not part of a set of path\n          // separators at the end of the string, stop now\n          if (!matchedSlash) {\n            startPart = i + 1;\n            break;\n          }\n          continue;\n        }\n      if (end === -1) {\n        // We saw the first non-path separator, mark this as the end of our\n        // extension\n        matchedSlash = false;\n        end = i + 1;\n      }\n      if (code === 46 /*.*/) {\n          // If this is our first dot, mark it as the start of our extension\n          if (startDot === -1)\n            startDot = i;\n          else if (preDotState !== 1)\n            preDotState = 1;\n      } else if (startDot !== -1) {\n        // We saw a non-dot and non-path separator before our dot, so we should\n        // have a good chance at having a non-empty extension\n        preDotState = -1;\n      }\n    }\n\n    if (startDot === -1 || end === -1 ||\n        // We saw a non-dot character immediately before the dot\n        preDotState === 0 ||\n        // The (right-most) trimmed path component is exactly '..'\n        preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n      return '';\n    }\n    return path.slice(startDot, end);\n  },\n\n  format: function format(pathObject) {\n    if (pathObject === null || typeof pathObject !== 'object') {\n      throw new TypeError('The \"pathObject\" argument must be of type Object. Received type ' + typeof pathObject);\n    }\n    return _format('/', pathObject);\n  },\n\n  parse: function parse(path) {\n    assertPath(path);\n\n    var ret = { root: '', dir: '', base: '', ext: '', name: '' };\n    if (path.length === 0) return ret;\n    var code = path.charCodeAt(0);\n    var isAbsolute = code === 47 /*/*/;\n    var start;\n    if (isAbsolute) {\n      ret.root = '/';\n      start = 1;\n    } else {\n      start = 0;\n    }\n    var startDot = -1;\n    var startPart = 0;\n    var end = -1;\n    var matchedSlash = true;\n    var i = path.length - 1;\n\n    // Track the state of characters (if any) we see before our first dot and\n    // after any path separator we find\n    var preDotState = 0;\n\n    // Get non-dir info\n    for (; i >= start; --i) {\n      code = path.charCodeAt(i);\n      if (code === 47 /*/*/) {\n          // If we reached a path separator that was not part of a set of path\n          // separators at the end of the string, stop now\n          if (!matchedSlash) {\n            startPart = i + 1;\n            break;\n          }\n          continue;\n        }\n      if (end === -1) {\n        // We saw the first non-path separator, mark this as the end of our\n        // extension\n        matchedSlash = false;\n        end = i + 1;\n      }\n      if (code === 46 /*.*/) {\n          // If this is our first dot, mark it as the start of our extension\n          if (startDot === -1) startDot = i;else if (preDotState !== 1) preDotState = 1;\n        } else if (startDot !== -1) {\n        // We saw a non-dot and non-path separator before our dot, so we should\n        // have a good chance at having a non-empty extension\n        preDotState = -1;\n      }\n    }\n\n    if (startDot === -1 || end === -1 ||\n    // We saw a non-dot character immediately before the dot\n    preDotState === 0 ||\n    // The (right-most) trimmed path component is exactly '..'\n    preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {\n      if (end !== -1) {\n        if (startPart === 0 && isAbsolute) ret.base = ret.name = path.slice(1, end);else ret.base = ret.name = path.slice(startPart, end);\n      }\n    } else {\n      if (startPart === 0 && isAbsolute) {\n        ret.name = path.slice(1, startDot);\n        ret.base = path.slice(1, end);\n      } else {\n        ret.name = path.slice(startPart, startDot);\n        ret.base = path.slice(startPart, end);\n      }\n      ret.ext = path.slice(startDot, end);\n    }\n\n    if (startPart > 0) ret.dir = path.slice(0, startPart - 1);else if (isAbsolute) ret.dir = '/';\n\n    return ret;\n  },\n\n  sep: '/',\n  delimiter: ':',\n  win32: null,\n  posix: null\n};\n\nposix.posix = posix;\n\nmodule.exports = posix;\n", "// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n", "// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};", "__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))", "// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n'use strict';\n\n// !!!!!\n// SEE https://github.com/microsoft/vscode/blob/master/src/vs/base/common/platform.ts\n// !!!!!\n\ndeclare const process: { platform: 'win32' };\ndeclare const navigator: { userAgent: string };\n\nexport let isWindows: boolean;\n\nif (typeof process === 'object') {\n\tisWindows = process.platform === 'win32';\n} else if (typeof navigator === 'object') {\n\tlet userAgent = navigator.userAgent;\n\tisWindows = userAgent.indexOf('Windows') >= 0;\n}\n", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n'use strict';\n\nimport { CharCode } from './charCode'\nimport { isWindows } from './platform';\n\nconst _schemePattern = /^\\w[\\w\\d+.-]*$/;\nconst _singleSlashStart = /^\\//;\nconst _doubleSlashStart = /^\\/\\//;\n\nfunction _validateUri(ret: URI, _strict?: boolean): void {\n\n\t// scheme, must be set\n\tif (!ret.scheme && _strict) {\n\t\tthrow new Error(`[UriError]: Scheme is missing: {scheme: \"\", authority: \"${ret.authority}\", path: \"${ret.path}\", query: \"${ret.query}\", fragment: \"${ret.fragment}\"}`);\n\t}\n\n\t// scheme, https://tools.ietf.org/html/rfc3986#section-3.1\n\t// ALPHA *( ALPHA / DIGIT / \"+\" / \"-\" / \".\" )\n\tif (ret.scheme && !_schemePattern.test(ret.scheme)) {\n\t\tthrow new Error('[UriError]: Scheme contains illegal characters.');\n\t}\n\n\t// path, http://tools.ietf.org/html/rfc3986#section-3.3\n\t// If a URI contains an authority component, then the path component\n\t// must either be empty or begin with a slash (\"/\") character.  If a URI\n\t// does not contain an authority component, then the path cannot begin\n\t// with two slash characters (\"//\").\n\tif (ret.path) {\n\t\tif (ret.authority) {\n\t\t\tif (!_singleSlashStart.test(ret.path)) {\n\t\t\t\tthrow new Error('[UriError]: If a URI contains an authority component, then the path component must either be empty or begin with a slash (\"/\") character');\n\t\t\t}\n\t\t} else {\n\t\t\tif (_doubleSlashStart.test(ret.path)) {\n\t\t\t\tthrow new Error('[UriError]: If a URI does not contain an authority component, then the path cannot begin with two slash characters (\"//\")');\n\t\t\t}\n\t\t}\n\t}\n}\n\n// for a while we allowed uris *without* schemes and this is the migration\n// for them, e.g. an uri without scheme and without strict-mode warns and falls\n// back to the file-scheme. that should cause the least carnage and still be a\n// clear warning\nfunction _schemeFix(scheme: string, _strict: boolean): string {\n\tif (!scheme && !_strict) {\n\t\treturn 'file';\n\t}\n\treturn scheme;\n}\n\n// implements a bit of https://tools.ietf.org/html/rfc3986#section-5\nfunction _referenceResolution(scheme: string, path: string): string {\n\n\t// the slash-character is our 'default base' as we don't\n\t// support constructing URIs relative to other URIs. This\n\t// also means that we alter and potentially break paths.\n\t// see https://tools.ietf.org/html/rfc3986#section-5.1.4\n\tswitch (scheme) {\n\t\tcase 'https':\n\t\tcase 'http':\n\t\tcase 'file':\n\t\t\tif (!path) {\n\t\t\t\tpath = _slash;\n\t\t\t} else if (path[0] !== _slash) {\n\t\t\t\tpath = _slash + path;\n\t\t\t}\n\t\t\tbreak;\n\t}\n\treturn path;\n}\n\nconst _empty = '';\nconst _slash = '/';\nconst _regexp = /^(([^:/?#]+?):)?(\\/\\/([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?/;\n\n/**\n * Uniform Resource Identifier (URI) http://tools.ietf.org/html/rfc3986.\n * This class is a simple parser which creates the basic component parts\n * (http://tools.ietf.org/html/rfc3986#section-3) with minimal validation\n * and encoding.\n *\n * ```txt\n *       foo://example.com:8042/over/there?name=ferret#nose\n *       \\_/   \\______________/\\_________/ \\_________/ \\__/\n *        |           |            |            |        |\n *     scheme     authority       path        query   fragment\n *        |   _____________________|__\n *       / \\ /                        \\\n *       urn:example:animal:ferret:nose\n * ```\n */\nexport class URI implements UriComponents {\n\n\tstatic isUri(thing: any): thing is URI {\n\t\tif (thing instanceof URI) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!thing) {\n\t\t\treturn false;\n\t\t}\n\t\treturn typeof (thing).authority === 'string'\n\t\t\t&& typeof (thing).fragment === 'string'\n\t\t\t&& typeof (thing).path === 'string'\n\t\t\t&& typeof (thing).query === 'string'\n\t\t\t&& typeof (thing).scheme === 'string'\n\t\t\t&& typeof (thing).fsPath === 'string'\n\t\t\t&& typeof (thing).with === 'function'\n\t\t\t&& typeof (thing).toString === 'function';\n\t}\n\n\t/**\n\t * scheme is the 'http' part of 'http://www.example.com/some/path?query#fragment'.\n\t * The part before the first colon.\n\t */\n\treadonly scheme: string;\n\n\t/**\n\t * authority is the 'www.example.com' part of 'http://www.example.com/some/path?query#fragment'.\n\t * The part between the first double slashes and the next slash.\n\t */\n\treadonly authority: string;\n\n\t/**\n\t * path is the '/some/path' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly path: string;\n\n\t/**\n\t * query is the 'query' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly query: string;\n\n\t/**\n\t * fragment is the 'fragment' part of 'http://www.example.com/some/path?query#fragment'.\n\t */\n\treadonly fragment: string;\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(scheme: string, authority?: string, path?: string, query?: string, fragment?: string, _strict?: boolean);\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(components: UriComponents);\n\n\t/**\n\t * @internal\n\t */\n\tprotected constructor(schemeOrData: string | UriComponents, authority?: string, path?: string, query?: string, fragment?: string, _strict: boolean = false) {\n\n\t\tif (typeof schemeOrData === 'object') {\n\t\t\tthis.scheme = schemeOrData.scheme || _empty;\n\t\t\tthis.authority = schemeOrData.authority || _empty;\n\t\t\tthis.path = schemeOrData.path || _empty;\n\t\t\tthis.query = schemeOrData.query || _empty;\n\t\t\tthis.fragment = schemeOrData.fragment || _empty;\n\t\t\t// no validation because it's this URI\n\t\t\t// that creates uri components.\n\t\t\t// _validateUri(this);\n\t\t} else {\n\t\t\tthis.scheme = _schemeFix(schemeOrData, _strict);\n\t\t\tthis.authority = authority || _empty;\n\t\t\tthis.path = _referenceResolution(this.scheme, path || _empty);\n\t\t\tthis.query = query || _empty;\n\t\t\tthis.fragment = fragment || _empty;\n\n\t\t\t_validateUri(this, _strict);\n\t\t}\n\t}\n\n\t// ---- filesystem path -----------------------\n\n\t/**\n\t * Returns a string representing the corresponding file system path of this URI.\n\t * Will handle UNC paths, normalizes windows drive letters to lower-case, and uses the\n\t * platform specific path separator.\n\t *\n\t * * Will *not* validate the path for invalid characters and semantics.\n\t * * Will *not* look at the scheme of this URI.\n\t * * The result shall *not* be used for display purposes but for accessing a file on disk.\n\t *\n\t *\n\t * The *difference* to `URI#path` is the use of the platform specific separator and the handling\n\t * of UNC paths. See the below sample of a file-uri with an authority (UNC path).\n\t *\n\t * ```ts\n\t\tconst u = URI.parse('file://server/c$/folder/file.txt')\n\t\tu.authority === 'server'\n\t\tu.path === '/shares/c$/file.txt'\n\t\tu.fsPath === '\\\\server\\c$\\folder\\file.txt'\n\t```\n\t *\n\t * Using `URI#path` to read a file (using fs-apis) would not be enough because parts of the path,\n\t * namely the server name, would be missing. Therefore `URI#fsPath` exists - it's sugar to ease working\n\t * with URIs that represent files on disk (`file` scheme).\n\t */\n\tget fsPath(): string {\n\t\t// if (this.scheme !== 'file') {\n\t\t// \tconsole.warn(`[UriError] calling fsPath with scheme ${this.scheme}`);\n\t\t// }\n\t\treturn uriToFsPath(this, false);\n\t}\n\n\t// ---- modify to new -------------------------\n\n\twith(change: { scheme?: string; authority?: string | null; path?: string | null; query?: string | null; fragment?: string | null }): URI {\n\n\t\tif (!change) {\n\t\t\treturn this;\n\t\t}\n\n\t\tlet { scheme, authority, path, query, fragment } = change;\n\t\tif (scheme === undefined) {\n\t\t\tscheme = this.scheme;\n\t\t} else if (scheme === null) {\n\t\t\tscheme = _empty;\n\t\t}\n\t\tif (authority === undefined) {\n\t\t\tauthority = this.authority;\n\t\t} else if (authority === null) {\n\t\t\tauthority = _empty;\n\t\t}\n\t\tif (path === undefined) {\n\t\t\tpath = this.path;\n\t\t} else if (path === null) {\n\t\t\tpath = _empty;\n\t\t}\n\t\tif (query === undefined) {\n\t\t\tquery = this.query;\n\t\t} else if (query === null) {\n\t\t\tquery = _empty;\n\t\t}\n\t\tif (fragment === undefined) {\n\t\t\tfragment = this.fragment;\n\t\t} else if (fragment === null) {\n\t\t\tfragment = _empty;\n\t\t}\n\n\t\tif (scheme === this.scheme\n\t\t\t&& authority === this.authority\n\t\t\t&& path === this.path\n\t\t\t&& query === this.query\n\t\t\t&& fragment === this.fragment) {\n\n\t\t\treturn this;\n\t\t}\n\n\t\treturn new Uri(scheme, authority, path, query, fragment);\n\t}\n\n\t// ---- parse & validate ------------------------\n\n\t/**\n\t * Creates a new URI from a string, e.g. `http://www.example.com/some/path`,\n\t * `file:///usr/home`, or `scheme:with/path`.\n\t *\n\t * @param value A string which represents an URI (see `URI#toString`).\n\t */\n\tstatic parse(value: string, _strict: boolean = false): URI {\n\t\tconst match = _regexp.exec(value);\n\t\tif (!match) {\n\t\t\treturn new Uri(_empty, _empty, _empty, _empty, _empty);\n\t\t}\n\t\treturn new Uri(\n\t\t\tmatch[2] || _empty,\n\t\t\tpercentDecode(match[4] || _empty),\n\t\t\tpercentDecode(match[5] || _empty),\n\t\t\tpercentDecode(match[7] || _empty),\n\t\t\tpercentDecode(match[9] || _empty),\n\t\t\t_strict\n\t\t);\n\t}\n\n\t/**\n\t * Creates a new URI from a file system path, e.g. `c:\\my\\files`,\n\t * `/usr/home`, or `\\\\server\\share\\some\\path`.\n\t *\n\t * The *difference* between `URI#parse` and `URI#file` is that the latter treats the argument\n\t * as path, not as stringified-uri. E.g. `URI.file(path)` is **not the same as**\n\t * `URI.parse('file://' + path)` because the path might contain characters that are\n\t * interpreted (# and ?). See the following sample:\n\t * ```ts\n\tconst good = URI.file('/coding/c#/project1');\n\tgood.scheme === 'file';\n\tgood.path === '/coding/c#/project1';\n\tgood.fragment === '';\n\tconst bad = URI.parse('file://' + '/coding/c#/project1');\n\tbad.scheme === 'file';\n\tbad.path === '/coding/c'; // path is now broken\n\tbad.fragment === '/project1';\n\t```\n\t *\n\t * @param path A file system path (see `URI#fsPath`)\n\t */\n\tstatic file(path: string): URI {\n\n\t\tlet authority = _empty;\n\n\t\t// normalize to fwd-slashes on windows,\n\t\t// on other systems bwd-slashes are valid\n\t\t// filename character, eg /f\\oo/ba\\r.txt\n\t\tif (isWindows) {\n\t\t\tpath = path.replace(/\\\\/g, _slash);\n\t\t}\n\n\t\t// check for authority as used in UNC shares\n\t\t// or use the path as given\n\t\tif (path[0] === _slash && path[1] === _slash) {\n\t\t\tconst idx = path.indexOf(_slash, 2);\n\t\t\tif (idx === -1) {\n\t\t\t\tauthority = path.substring(2);\n\t\t\t\tpath = _slash;\n\t\t\t} else {\n\t\t\t\tauthority = path.substring(2, idx);\n\t\t\t\tpath = path.substring(idx) || _slash;\n\t\t\t}\n\t\t}\n\n\t\treturn new Uri('file', authority, path, _empty, _empty);\n\t}\n\n\tstatic from(components: { scheme: string; authority?: string; path?: string; query?: string; fragment?: string }): URI {\n\t\tconst result = new Uri(\n\t\t\tcomponents.scheme,\n\t\t\tcomponents.authority,\n\t\t\tcomponents.path,\n\t\t\tcomponents.query,\n\t\t\tcomponents.fragment,\n\t\t);\n\t\t_validateUri(result, true);\n\t\treturn result;\n\t}\n\n\t// ---- printing/externalize ---------------------------\n\n\t/**\n\t * Creates a string representation for this URI. It's guaranteed that calling\n\t * `URI.parse` with the result of this function creates an URI which is equal\n\t * to this URI.\n\t *\n\t * * The result shall *not* be used for display purposes but for externalization or transport.\n\t * * The result will be encoded using the percentage encoding and encoding happens mostly\n\t * ignore the scheme-specific encoding rules.\n\t *\n\t * @param skipEncoding Do not encode the result, default is `false`\n\t */\n\ttoString(skipEncoding: boolean = false): string {\n\t\treturn _asFormatted(this, skipEncoding);\n\t}\n\n\ttoJSON(): UriComponents {\n\t\treturn this;\n\t}\n\n\tstatic revive(data: UriComponents | URI): URI;\n\tstatic revive(data: UriComponents | URI | undefined): URI | undefined;\n\tstatic revive(data: UriComponents | URI | null): URI | null;\n\tstatic revive(data: UriComponents | URI | undefined | null): URI | undefined | null;\n\tstatic revive(data: UriComponents | URI | undefined | null): URI | undefined | null {\n\t\tif (!data) {\n\t\t\treturn data;\n\t\t} else if (data instanceof URI) {\n\t\t\treturn data;\n\t\t} else {\n\t\t\tconst result = new Uri(data);\n\t\t\tresult._formatted = (data).external;\n\t\t\tresult._fsPath = (data)._sep === _pathSepMarker ? (data).fsPath : null;\n\t\t\treturn result;\n\t\t}\n\t}\n}\n\nexport interface UriComponents {\n\tscheme: string;\n\tauthority: string;\n\tpath: string;\n\tquery: string;\n\tfragment: string;\n}\n\ninterface UriState extends UriComponents {\n\t$mid: number;\n\texternal: string;\n\tfsPath: string;\n\t_sep: 1 | undefined;\n}\n\nconst _pathSepMarker = isWindows ? 1 : undefined;\n\n// This class exists so that URI is compatible with vscode.Uri (API).\nclass Uri extends URI {\n\n\t_formatted: string | null = null;\n\t_fsPath: string | null = null;\n\n\toverride get fsPath(): string {\n\t\tif (!this._fsPath) {\n\t\t\tthis._fsPath = uriToFsPath(this, false);\n\t\t}\n\t\treturn this._fsPath;\n\t}\n\n\toverride toString(skipEncoding: boolean = false): string {\n\t\tif (!skipEncoding) {\n\t\t\tif (!this._formatted) {\n\t\t\t\tthis._formatted = _asFormatted(this, false);\n\t\t\t}\n\t\t\treturn this._formatted;\n\t\t} else {\n\t\t\t// we don't cache that\n\t\t\treturn _asFormatted(this, true);\n\t\t}\n\t}\n\n\toverride toJSON(): UriComponents {\n\t\tconst res = {\n\t\t\t$mid: 1\n\t\t};\n\t\t// cached state\n\t\tif (this._fsPath) {\n\t\t\tres.fsPath = this._fsPath;\n\t\t\tres._sep = _pathSepMarker;\n\t\t}\n\t\tif (this._formatted) {\n\t\t\tres.external = this._formatted;\n\t\t}\n\t\t// uri components\n\t\tif (this.path) {\n\t\t\tres.path = this.path;\n\t\t}\n\t\tif (this.scheme) {\n\t\t\tres.scheme = this.scheme;\n\t\t}\n\t\tif (this.authority) {\n\t\t\tres.authority = this.authority;\n\t\t}\n\t\tif (this.query) {\n\t\t\tres.query = this.query;\n\t\t}\n\t\tif (this.fragment) {\n\t\t\tres.fragment = this.fragment;\n\t\t}\n\t\treturn res;\n\t}\n}\n\n// reserved characters: https://tools.ietf.org/html/rfc3986#section-2.2\nconst encodeTable: { [ch: number]: string } = {\n\t[CharCode.Colon]: '%3A', // gen-delims\n\t[CharCode.Slash]: '%2F',\n\t[CharCode.QuestionMark]: '%3F',\n\t[CharCode.Hash]: '%23',\n\t[CharCode.OpenSquareBracket]: '%5B',\n\t[CharCode.CloseSquareBracket]: '%5D',\n\t[CharCode.AtSign]: '%40',\n\n\t[CharCode.ExclamationMark]: '%21', // sub-delims\n\t[CharCode.DollarSign]: '%24',\n\t[CharCode.Ampersand]: '%26',\n\t[CharCode.SingleQuote]: '%27',\n\t[CharCode.OpenParen]: '%28',\n\t[CharCode.CloseParen]: '%29',\n\t[CharCode.Asterisk]: '%2A',\n\t[CharCode.Plus]: '%2B',\n\t[CharCode.Comma]: '%2C',\n\t[CharCode.Semicolon]: '%3B',\n\t[CharCode.Equals]: '%3D',\n\n\t[CharCode.Space]: '%20',\n};\n\nfunction encodeURIComponentFast(uriComponent: string, isPath: boolean, isAuthority: boolean): string {\n\tlet res: string | undefined = undefined;\n\tlet nativeEncodePos = -1;\n\n\tfor (let pos = 0; pos < uriComponent.length; pos++) {\n\t\tconst code = uriComponent.charCodeAt(pos);\n\n\t\t// unreserved characters: https://tools.ietf.org/html/rfc3986#section-2.3\n\t\tif (\n\t\t\t(code >= CharCode.a && code <= CharCode.z)\n\t\t\t|| (code >= CharCode.A && code <= CharCode.Z)\n\t\t\t|| (code >= CharCode.Digit0 && code <= CharCode.Digit9)\n\t\t\t|| code === CharCode.Dash\n\t\t\t|| code === CharCode.Period\n\t\t\t|| code === CharCode.Underline\n\t\t\t|| code === CharCode.Tilde\n\t\t\t|| (isPath && code === CharCode.Slash)\n\t\t\t|| (isAuthority && code === CharCode.OpenSquareBracket)\n\t\t\t|| (isAuthority && code === CharCode.CloseSquareBracket)\n\t\t\t|| (isAuthority && code === CharCode.Colon)\n\t\t) {\n\t\t\t// check if we are delaying native encode\n\t\t\tif (nativeEncodePos !== -1) {\n\t\t\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos, pos));\n\t\t\t\tnativeEncodePos = -1;\n\t\t\t}\n\t\t\t// check if we write into a new string (by default we try to return the param)\n\t\t\tif (res !== undefined) {\n\t\t\t\tres += uriComponent.charAt(pos);\n\t\t\t}\n\n\t\t} else {\n\t\t\t// encoding needed, we need to allocate a new string\n\t\t\tif (res === undefined) {\n\t\t\t\tres = uriComponent.substr(0, pos);\n\t\t\t}\n\n\t\t\t// check with default table first\n\t\t\tconst escaped = encodeTable[code];\n\t\t\tif (escaped !== undefined) {\n\n\t\t\t\t// check if we are delaying native encode\n\t\t\t\tif (nativeEncodePos !== -1) {\n\t\t\t\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos, pos));\n\t\t\t\t\tnativeEncodePos = -1;\n\t\t\t\t}\n\n\t\t\t\t// append escaped variant to result\n\t\t\t\tres += escaped;\n\n\t\t\t} else if (nativeEncodePos === -1) {\n\t\t\t\t// use native encode only when needed\n\t\t\t\tnativeEncodePos = pos;\n\t\t\t}\n\t\t}\n\t}\n\n\tif (nativeEncodePos !== -1) {\n\t\tres += encodeURIComponent(uriComponent.substring(nativeEncodePos));\n\t}\n\n\treturn res !== undefined ? res : uriComponent;\n}\n\nfunction encodeURIComponentMinimal(path: string): string {\n\tlet res: string | undefined = undefined;\n\tfor (let pos = 0; pos < path.length; pos++) {\n\t\tconst code = path.charCodeAt(pos);\n\t\tif (code === CharCode.Hash || code === CharCode.QuestionMark) {\n\t\t\tif (res === undefined) {\n\t\t\t\tres = path.substr(0, pos);\n\t\t\t}\n\t\t\tres += encodeTable[code];\n\t\t} else {\n\t\t\tif (res !== undefined) {\n\t\t\t\tres += path[pos];\n\t\t\t}\n\t\t}\n\t}\n\treturn res !== undefined ? res : path;\n}\n\n/**\n * Compute `fsPath` for the given uri\n */\nexport function uriToFsPath(uri: URI, keepDriveLetterCasing: boolean): string {\n\n\tlet value: string;\n\tif (uri.authority && uri.path.length > 1 && uri.scheme === 'file') {\n\t\t// unc path: file://shares/c$/far/boo\n\t\tvalue = `//${uri.authority}${uri.path}`;\n\t} else if (\n\t\turi.path.charCodeAt(0) === CharCode.Slash\n\t\t&& (uri.path.charCodeAt(1) >= CharCode.A && uri.path.charCodeAt(1) <= CharCode.Z || uri.path.charCodeAt(1) >= CharCode.a && uri.path.charCodeAt(1) <= CharCode.z)\n\t\t&& uri.path.charCodeAt(2) === CharCode.Colon\n\t) {\n\t\tif (!keepDriveLetterCasing) {\n\t\t\t// windows drive letter: file:///c:/far/boo\n\t\t\tvalue = uri.path[1].toLowerCase() + uri.path.substr(2);\n\t\t} else {\n\t\t\tvalue = uri.path.substr(1);\n\t\t}\n\t} else {\n\t\t// other path\n\t\tvalue = uri.path;\n\t}\n\tif (isWindows) {\n\t\tvalue = value.replace(/\\//g, '\\\\');\n\t}\n\treturn value;\n}\n\n/**\n * Create the external version of a uri\n */\nfunction _asFormatted(uri: URI, skipEncoding: boolean): string {\n\n\tconst encoder = !skipEncoding\n\t\t? encodeURIComponentFast\n\t\t: encodeURIComponentMinimal;\n\n\tlet res = '';\n\tlet { scheme, authority, path, query, fragment } = uri;\n\tif (scheme) {\n\t\tres += scheme;\n\t\tres += ':';\n\t}\n\tif (authority || scheme === 'file') {\n\t\tres += _slash;\n\t\tres += _slash;\n\t}\n\tif (authority) {\n\t\tlet idx = authority.indexOf('@');\n\t\tif (idx !== -1) {\n\t\t\t// @\n\t\t\tconst userinfo = authority.substr(0, idx);\n\t\t\tauthority = authority.substr(idx + 1);\n\t\t\tidx = userinfo.lastIndexOf(':');\n\t\t\tif (idx === -1) {\n\t\t\t\tres += encoder(userinfo, false, false);\n\t\t\t} else {\n\t\t\t\t// :@\n\t\t\t\tres += encoder(userinfo.substr(0, idx), false, false);\n\t\t\t\tres += ':';\n\t\t\t\tres += encoder(userinfo.substr(idx + 1), false, true);\n\t\t\t}\n\t\t\tres += '@';\n\t\t}\n\t\tauthority = authority.toLowerCase();\n\t\tidx = authority.lastIndexOf(':');\n\t\tif (idx === -1) {\n\t\t\tres += encoder(authority, false, true);\n\t\t} else {\n\t\t\t// :\n\t\t\tres += encoder(authority.substr(0, idx), false, true);\n\t\t\tres += authority.substr(idx);\n\t\t}\n\t}\n\tif (path) {\n\t\t// lower-case windows drive letters in /C:/fff or C:/fff\n\t\tif (path.length >= 3 && path.charCodeAt(0) === CharCode.Slash && path.charCodeAt(2) === CharCode.Colon) {\n\t\t\tconst code = path.charCodeAt(1);\n\t\t\tif (code >= CharCode.A && code <= CharCode.Z) {\n\t\t\t\tpath = `/${String.fromCharCode(code + 32)}:${path.substr(3)}`; // \"/c:\".length === 3\n\t\t\t}\n\t\t} else if (path.length >= 2 && path.charCodeAt(1) === CharCode.Colon) {\n\t\t\tconst code = path.charCodeAt(0);\n\t\t\tif (code >= CharCode.A && code <= CharCode.Z) {\n\t\t\t\tpath = `${String.fromCharCode(code + 32)}:${path.substr(2)}`; // \"/c:\".length === 3\n\t\t\t}\n\t\t}\n\t\t// encode the rest of the path\n\t\tres += encoder(path, true, false);\n\t}\n\tif (query) {\n\t\tres += '?';\n\t\tres += encoder(query, false, false);\n\t}\n\tif (fragment) {\n\t\tres += '#';\n\t\tres += !skipEncoding ? encodeURIComponentFast(fragment, false, false) : fragment;\n\t}\n\treturn res;\n}\n\n// --- decode\n\nfunction decodeURIComponentGraceful(str: string): string {\n\ttry {\n\t\treturn decodeURIComponent(str);\n\t} catch {\n\t\tif (str.length > 3) {\n\t\t\treturn str.substr(0, 3) + decodeURIComponentGraceful(str.substr(3));\n\t\t} else {\n\t\t\treturn str;\n\t\t}\n\t}\n}\n\nconst _rEncodedAsHex = /(%[0-9A-Za-z][0-9A-Za-z])+/g;\n\nfunction percentDecode(str: string): string {\n\tif (!str.match(_rEncodedAsHex)) {\n\t\treturn str;\n\t}\n\treturn str.replace(_rEncodedAsHex, (match) => decodeURIComponentGraceful(match));\n}\n\n/**\n * Mapped-type that replaces all occurrences of URI with UriComponents\n */\nexport type UriDto = { [K in keyof T]: T[K] extends URI\n\t? UriComponents\n\t: UriDto };\n", "/*---------------------------------------------------------------------------------------------\n *  Copyright (c) Microsoft Corporation. All rights reserved.\n *  Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n\n'use strict';\n\nimport { CharCode } from './charCode';\nimport { URI } from './uri';\nimport * as nodePath from 'path';\n\nconst posixPath = nodePath.posix || nodePath;\nconst slash = '/';\n\nexport namespace Utils {\n\n    /**\n     * Joins one or more input paths to the path of URI. \n     * '/' is used as the directory separation character. \n     * \n     * The resolved path will be normalized. That means:\n     *  - all '..' and '.' segments are resolved.\n     *  - multiple, sequential occurences of '/' are replaced by a single instance of '/'.\n     *  - trailing separators are preserved.\n     * \n     * @param uri The input URI.\n     * @param paths The paths to be joined with the path of URI.\n     * @returns A URI with the joined path. All other properties of the URI (scheme, authority, query, fragments, ...) will be taken from the input URI.\n     */\n    export function joinPath(uri: URI, ...paths: string[]): URI {\n        return uri.with({ path: posixPath.join(uri.path, ...paths) });\n    }\n\n\n    /**\n     * Resolves one or more paths against the path of a URI. \n     * '/' is used as the directory separation character. \n     * \n     * The resolved path will be normalized. That means:\n     *  - all '..' and '.' segments are resolved. \n     *  - multiple, sequential occurences of '/' are replaced by a single instance of '/'.\n     *  - trailing separators are removed.\n     * \n     * @param uri The input URI.\n     * @param paths The paths to resolve against the path of URI.\n     * @returns A URI with the resolved path. All other properties of the URI (scheme, authority, query, fragments, ...) will be taken from the input URI.\n     */\n    export function resolvePath(uri: URI, ...paths: string[]): URI {\n        let path = uri.path; \n        let slashAdded = false;\n        if (path[0] !== slash) {\n            path = slash + path; // make the path abstract: for posixPath.resolve the first segments has to be absolute or cwd is used.\n            slashAdded = true;\n        }\n        let resolvedPath = posixPath.resolve(path, ...paths);\n        if (slashAdded && resolvedPath[0] === slash && !uri.authority) {\n            resolvedPath = resolvedPath.substring(1);\n        }\n        return uri.with({ path: resolvedPath });\n    }\n\n    /**\n     * Returns a URI where the path is the directory name of the input uri, similar to the Unix dirname command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The orignal URI is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The last segment of the URIs path.\n     */\n    export function dirname(uri: URI): URI {\n        if (uri.path.length === 0 || uri.path === slash) {\n            return uri;\n        }\n        let path = posixPath.dirname(uri.path);\n        if (path.length === 1 && path.charCodeAt(0) === CharCode.Period) {\n            path = '';\n        }\n        return uri.with({ path });\n    }\n\n    /**\n     * Returns the last segment of the path of a URI, similar to the Unix basename command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The empty string is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The base name of the URIs path.\n     */\n    export function basename(uri: URI): string {\n        return posixPath.basename(uri.path);\n    }\n\n    /**\n     * Returns the extension name of the path of a URI, similar to the Unix extname command. \n     * In the path, '/' is recognized as the directory separation character. Trailing directory separators are ignored.\n     * The empty string is returned if the URIs path is empty or does not contain any path segments.\n     * \n     * @param uri The input URI.\n     * @return The extension name of the URIs path.\n     */\n    export function extname(uri: URI): string {\n        return posixPath.extname(uri.path);\n    }\n}", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { URI, Utils } from 'vscode-uri';\n\nexport { URI };\n\nexport namespace UriUtils {\n\n    export const basename = Utils.basename;\n    export const dirname = Utils.dirname;\n    export const extname = Utils.extname;\n    export const joinPath = Utils.joinPath;\n    export const resolvePath = Utils.resolvePath;\n\n    export function equals(a?: URI | string, b?: URI | string): boolean {\n        return a?.toString() === b?.toString();\n    }\n\n    export function relative(from: URI | string, to: URI | string): string {\n        const fromPath = typeof from === 'string' ? from : from.path;\n        const toPath = typeof to === 'string' ? to : to.path;\n        const fromParts = fromPath.split('/').filter(e => e.length > 0);\n        const toParts = toPath.split('/').filter(e => e.length > 0);\n        let i = 0;\n        for (; i < fromParts.length; i++) {\n            if (fromParts[i] !== toParts[i]) {\n                break;\n            }\n        }\n        const backPart = '../'.repeat(fromParts.length - i);\n        const toPart = toParts.slice(i).join('/');\n        return backPart + toPart;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/**\n * Re-export 'TextDocument' from 'vscode-languageserver-textdocument' for convenience,\n *  including both type _and_ symbol (namespace), as we here and there also refer to the symbol,\n *  the overhead is very small, just a few kilobytes.\n * Everything else of that package (at the time contributing) is also defined\n *  in 'vscode-languageserver-protocol' or 'vscode-languageserver-types'.\n */\nexport { TextDocument } from 'vscode-languageserver-textdocument';\n\nimport type { Diagnostic, Range } from 'vscode-languageserver-types';\nimport type { FileSystemProvider } from './file-system-provider.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, Mutable, Reference } from '../syntax-tree.js';\nimport type { MultiMap } from '../utils/collections.js';\nimport type { Stream } from '../utils/stream.js';\nimport { TextDocument } from './documents.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { stream } from '../utils/stream.js';\nimport { URI } from '../utils/uri-utils.js';\n\n/**\n * A Langium document holds the parse result (AST and CST) and any additional state that is derived\n * from the AST, e.g. the result of scope precomputation.\n */\nexport interface LangiumDocument {\n    /** The Uniform Resource Identifier (URI) of the document */\n    readonly uri: URI;\n    /** The text document used to convert between offsets and positions */\n    readonly textDocument: TextDocument;\n    /** The current state of the document */\n    state: DocumentState;\n    /** The parse result holds the Abstract Syntax Tree (AST) and potentially also parser / lexer errors */\n    parseResult: ParseResult;\n    /** Result of the scope precomputation phase */\n    precomputedScopes?: PrecomputedScopes;\n    /** An array of all cross-references found in the AST while linking */\n    references: Reference[];\n    /** Result of the validation phase */\n    diagnostics?: Diagnostic[]\n}\n\n/**\n * A document is subject to several phases that are run in predefined order. Any state value implies that\n * smaller state values are finished as well.\n */\nexport enum DocumentState {\n    /**\n     * The text content has changed and needs to be parsed again. The AST held by this outdated\n     * document instance is no longer valid.\n     */\n    Changed = 0,\n    /**\n     * An AST has been created from the text content. The document structure can be traversed,\n     * but cross-references cannot be resolved yet. If necessary, the structure can be manipulated\n     * at this stage as a preprocessing step.\n     */\n    Parsed = 1,\n    /**\n     * The `IndexManager` service has processed AST nodes of this document. This means the\n     * exported symbols are available in the global scope and can be resolved from other documents.\n     */\n    IndexedContent = 2,\n    /**\n     * The `ScopeComputation` service has processed this document. This means the local symbols\n     * are stored in a MultiMap so they can be looked up by the `ScopeProvider` service.\n     * Once a document has reached this state, you may follow every reference - it will lazily\n     * resolve its `ref` property and yield either the target AST node or `undefined` in case\n     * the target is not in scope.\n     */\n    ComputedScopes = 3,\n    /**\n     * The `Linker` service has processed this document. All outgoing references have been\n     * resolved or marked as erroneous.\n     */\n    Linked = 4,\n    /**\n     * The `IndexManager` service has processed AST node references of this document. This is\n     * necessary to determine which documents are affected by a change in one of the workspace\n     * documents.\n     */\n    IndexedReferences = 5,\n    /**\n     * The `DocumentValidator` service has processed this document. The language server listens\n     * to the results of this phase and sends diagnostics to the client.\n     */\n    Validated = 6\n}\n\n/**\n * Result of the scope precomputation phase (`ScopeComputation` service).\n * It maps every AST node to the set of symbols that are visible in the subtree of that node.\n */\nexport type PrecomputedScopes = MultiMap\n\nexport interface DocumentSegment {\n    readonly range: Range\n    readonly offset: number\n    readonly length: number\n    readonly end: number\n}\n\n/**\n * Surrogate definition of the `TextDocuments` interface from the `vscode-languageserver` package.\n * No implementation object is expected to be offered by `LangiumCoreServices`, but only by `LangiumLSPServices`.\n */\nexport type TextDocumentProvider = {\n    get(uri: string): TextDocument | undefined\n}\n\n/**\n * Shared service for creating `LangiumDocument` instances.\n *\n * Register a custom implementation if special (additional) behavior is required for your language(s).\n * Note: If you specialize {@link fromString} or {@link fromTextDocument} you probably might want to\n * specialize {@link update}, too!\n */\nexport interface LangiumDocumentFactory {\n    /**\n     * Create a Langium document from a `TextDocument` (usually associated with a file).\n     */\n    fromTextDocument(textDocument: TextDocument, uri?: URI): LangiumDocument;\n    /**\n     * Create a Langium document from a `TextDocument` asynchronously. This action can be cancelled if a cancellable parser implementation has been provided.\n     */\n    fromTextDocument(textDocument: TextDocument, uri: URI | undefined, cancellationToken: CancellationToken): Promise>;\n\n    /**\n     * Create an Langium document from an in-memory string.\n     */\n    fromString(text: string, uri: URI): LangiumDocument;\n    /**\n     * Create a Langium document from an in-memory string asynchronously. This action can be cancelled if a cancellable parser implementation has been provided.\n     */\n    fromString(text: string, uri: URI, cancellationToken: CancellationToken): Promise>;\n\n    /**\n     * Create an Langium document from a model that has been constructed in memory.\n     */\n    fromModel(model: T, uri: URI): LangiumDocument;\n\n    /**\n     * Create an Langium document from a specified `URI`. The factory will use the `FileSystemAccess` service to read the file.\n     */\n    fromUri(uri: URI, cancellationToken?: CancellationToken): Promise>;\n\n    /**\n     * Update the given document after changes in the corresponding textual representation.\n     * Method is called by the document builder after it has been requested to build an existing\n     * document and the document's state is {@link DocumentState.Changed}.\n     * The text parsing is expected to be done the same way as in {@link fromTextDocument}\n     * and {@link fromString}.\n     */\n    update(document: LangiumDocument, cancellationToken: CancellationToken): Promise>\n}\n\nexport class DefaultLangiumDocumentFactory implements LangiumDocumentFactory {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly textDocuments?: TextDocumentProvider;\n    protected readonly fileSystemProvider: FileSystemProvider;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n        this.textDocuments = services.workspace.TextDocuments;\n        this.fileSystemProvider = services.workspace.FileSystemProvider;\n    }\n\n    async fromUri(uri: URI, cancellationToken = CancellationToken.None): Promise> {\n        const content = await this.fileSystemProvider.readFile(uri);\n        return this.createAsync(uri, content, cancellationToken);\n    }\n\n    fromTextDocument(textDocument: TextDocument, uri?: URI): LangiumDocument;\n    fromTextDocument(textDocument: TextDocument, uri: URI | undefined, cancellationToken: CancellationToken): Promise>;\n    fromTextDocument(textDocument: TextDocument, uri?: URI, cancellationToken?: CancellationToken): LangiumDocument | Promise> {\n        uri = uri ?? URI.parse(textDocument.uri);\n        if (cancellationToken) {\n            return this.createAsync(uri, textDocument, cancellationToken);\n        } else {\n            return this.create(uri, textDocument);\n        }\n    }\n\n    fromString(text: string, uri: URI): LangiumDocument;\n    fromString(text: string, uri: URI, cancellationToken: CancellationToken): Promise>;\n    fromString(text: string, uri: URI, cancellationToken?: CancellationToken): LangiumDocument | Promise> {\n        if (cancellationToken) {\n            return this.createAsync(uri, text, cancellationToken);\n        } else {\n            return this.create(uri, text);\n        }\n    }\n\n    fromModel(model: T, uri: URI): LangiumDocument {\n        return this.create(uri, { $model: model });\n    }\n\n    protected create(uri: URI, content: string | TextDocument | { $model: T }): LangiumDocument {\n        if (typeof content === 'string') {\n            const parseResult = this.parse(uri, content);\n            return this.createLangiumDocument(parseResult, uri, undefined, content);\n\n        } else if ('$model' in content) {\n            const parseResult = { value: content.$model, parserErrors: [], lexerErrors: [] };\n            return this.createLangiumDocument(parseResult, uri);\n\n        } else {\n            const parseResult = this.parse(uri, content.getText());\n            return this.createLangiumDocument(parseResult, uri, content);\n        }\n    }\n\n    protected async createAsync(uri: URI, content: string | TextDocument, cancelToken: CancellationToken): Promise> {\n        if (typeof content === 'string') {\n            const parseResult = await this.parseAsync(uri, content, cancelToken);\n            return this.createLangiumDocument(parseResult, uri, undefined, content);\n        } else {\n            const parseResult = await this.parseAsync(uri, content.getText(), cancelToken);\n            return this.createLangiumDocument(parseResult, uri, content);\n        }\n    }\n\n    /**\n     * Create a LangiumDocument from a given parse result.\n     *\n     * A TextDocument is created on demand if it is not provided as argument here. Usually this\n     * should not be necessary because the main purpose of the TextDocument is to convert between\n     * text ranges and offsets, which is done solely in LSP request handling.\n     *\n     * With the introduction of {@link update} below this method is supposed to be mainly called\n     * during workspace initialization and on addition/recognition of new files, while changes in\n     * existing documents are processed via {@link update}.\n     */\n    protected createLangiumDocument(parseResult: ParseResult, uri: URI, textDocument?: TextDocument, text?: string): LangiumDocument {\n        let document: LangiumDocument;\n        if (textDocument) {\n            document = {\n                parseResult,\n                uri,\n                state: DocumentState.Parsed,\n                references: [],\n                textDocument\n            };\n        } else {\n            const textDocumentGetter = this.createTextDocumentGetter(uri, text);\n            document = {\n                parseResult,\n                uri,\n                state: DocumentState.Parsed,\n                references: [],\n                get textDocument() {\n                    return textDocumentGetter();\n                }\n            };\n        }\n        (parseResult.value as Mutable).$document = document;\n        return document;\n    }\n\n    async update(document: Mutable>, cancellationToken: CancellationToken): Promise> {\n        // The CST full text property contains the original text that was used to create the AST.\n        const oldText = document.parseResult.value.$cstNode?.root.fullText;\n        const textDocument = this.textDocuments?.get(document.uri.toString());\n        const text = textDocument ? textDocument.getText() : await this.fileSystemProvider.readFile(document.uri);\n\n        if (textDocument) {\n            Object.defineProperty(\n                document,\n                'textDocument',\n                {\n                    value: textDocument\n                }\n            );\n        } else {\n            const textDocumentGetter = this.createTextDocumentGetter(document.uri, text);\n            Object.defineProperty(\n                document,\n                'textDocument',\n                {\n                    get: textDocumentGetter\n                }\n            );\n        }\n\n        // Some of these documents can be pretty large, so parsing them again can be quite expensive.\n        // Therefore, we only parse if the text has actually changed.\n        if (oldText !== text) {\n            document.parseResult = await this.parseAsync(document.uri, text, cancellationToken);\n            (document.parseResult.value as Mutable).$document = document;\n        }\n        document.state = DocumentState.Parsed;\n        return document;\n    }\n\n    protected parse(uri: URI, text: string): ParseResult {\n        const services = this.serviceRegistry.getServices(uri);\n        return services.parser.LangiumParser.parse(text);\n    }\n\n    protected parseAsync(uri: URI, text: string, cancellationToken: CancellationToken): Promise> {\n        const services = this.serviceRegistry.getServices(uri);\n        return services.parser.AsyncParser.parse(text, cancellationToken);\n    }\n\n    protected createTextDocumentGetter(uri: URI, text?: string): () => TextDocument {\n        const serviceRegistry = this.serviceRegistry;\n        let textDoc: TextDocument | undefined = undefined;\n        return () => {\n            return textDoc ??= TextDocument.create(\n                uri.toString(), serviceRegistry.getServices(uri).LanguageMetaData.languageId, 0, text ?? ''\n            );\n        };\n    }\n}\n\n/**\n * Shared service for managing Langium documents.\n */\nexport interface LangiumDocuments {\n\n    /**\n     * A stream of all documents managed under this service.\n     */\n    readonly all: Stream\n\n    /**\n     * Manage a new document under this service.\n     * @throws an error if a document with the same URI is already present.\n     */\n    addDocument(document: LangiumDocument): void;\n\n    /**\n     * Retrieve the document with the given URI, if present. Otherwise returns `undefined`.\n     */\n    getDocument(uri: URI): LangiumDocument | undefined;\n\n    /**\n     * Retrieve the document with the given URI. If not present, a new one will be created using the file system access.\n     * The new document will be added to the list of documents managed under this service.\n     */\n    getOrCreateDocument(uri: URI, cancellationToken?: CancellationToken): Promise;\n\n    /**\n     * Creates a new document with the given URI and text content.\n     * The new document is automatically added to this service and can be retrieved using {@link getDocument}.\n     *\n     * @throws an error if a document with the same URI is already present.\n     */\n    createDocument(uri: URI, text: string): LangiumDocument;\n\n    /**\n     * Creates a new document with the given URI and text content asynchronously.\n     * The process can be interrupted with a cancellation token.\n     * The new document is automatically added to this service and can be retrieved using {@link getDocument}.\n     *\n     * @throws an error if a document with the same URI is already present.\n     */\n    createDocument(uri: URI, text: string, cancellationToken: CancellationToken): Promise;\n\n    /**\n     * Returns `true` if a document with the given URI is managed under this service.\n     */\n    hasDocument(uri: URI): boolean;\n\n    /**\n     * Flag the document with the given URI as `Changed`, if present, meaning that its content\n     * is no longer valid. The content (parseResult) stays untouched, while internal data may\n     * be dropped to reduce memory footprint.\n     *\n     * @returns the affected {@link LangiumDocument} if existing for convenience\n     */\n    invalidateDocument(uri: URI): LangiumDocument | undefined;\n\n    /**\n     * Remove the document with the given URI, if present, and mark it as `Changed`, meaning\n     * that its content is no longer valid. The next call to `getOrCreateDocument` with the same\n     * URI will create a new document instance.\n     *\n     * @returns the affected {@link LangiumDocument} if existing for convenience\n     */\n    deleteDocument(uri: URI): LangiumDocument | undefined;\n}\n\nexport class DefaultLangiumDocuments implements LangiumDocuments {\n\n    protected readonly langiumDocumentFactory: LangiumDocumentFactory;\n\n    protected readonly documentMap: Map = new Map();\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.langiumDocumentFactory = services.workspace.LangiumDocumentFactory;\n    }\n\n    get all(): Stream {\n        return stream(this.documentMap.values());\n    }\n\n    addDocument(document: LangiumDocument): void {\n        const uriString = document.uri.toString();\n        if (this.documentMap.has(uriString)) {\n            throw new Error(`A document with the URI '${uriString}' is already present.`);\n        }\n        this.documentMap.set(uriString, document);\n    }\n\n    getDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        return this.documentMap.get(uriString);\n    }\n\n    async getOrCreateDocument(uri: URI, cancellationToken?: CancellationToken): Promise {\n        let document = this.getDocument(uri);\n        if (document) {\n            return document;\n        }\n        document = await this.langiumDocumentFactory.fromUri(uri, cancellationToken);\n        this.addDocument(document);\n        return document;\n    }\n\n    createDocument(uri: URI, text: string): LangiumDocument;\n    createDocument(uri: URI, text: string, cancellationToken: CancellationToken): Promise;\n    createDocument(uri: URI, text: string, cancellationToken?: CancellationToken): LangiumDocument | Promise {\n        if (cancellationToken) {\n            return this.langiumDocumentFactory.fromString(text, uri, cancellationToken).then(document => {\n                this.addDocument(document);\n                return document;\n            });\n        } else {\n            const document = this.langiumDocumentFactory.fromString(text, uri);\n            this.addDocument(document);\n            return document;\n        }\n    }\n\n    hasDocument(uri: URI): boolean {\n        return this.documentMap.has(uri.toString());\n    }\n\n    invalidateDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        const langiumDoc = this.documentMap.get(uriString);\n        if (langiumDoc) {\n            langiumDoc.state = DocumentState.Changed;\n            langiumDoc.precomputedScopes = undefined;\n            langiumDoc.references = [];\n            langiumDoc.diagnostics = undefined;\n        }\n        return langiumDoc;\n    }\n\n    deleteDocument(uri: URI): LangiumDocument | undefined {\n        const uriString = uri.toString();\n        const langiumDoc = this.documentMap.get(uriString);\n        if (langiumDoc) {\n            langiumDoc.state = DocumentState.Changed;\n            this.documentMap.delete(uriString);\n        }\n        return langiumDoc;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection, CstNode, LinkingError, Reference, ReferenceInfo } from '../syntax-tree.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { LangiumDocument, LangiumDocuments } from '../workspace/documents.js';\nimport type { ScopeProvider } from './scope-provider.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { isAstNode, isAstNodeDescription, isLinkingError } from '../syntax-tree.js';\nimport { getDocument, streamAst, streamReferences } from '../utils/ast-utils.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\nimport { DocumentState } from '../workspace/documents.js';\n\n/**\n * Language-specific service for resolving cross-references in the AST.\n */\nexport interface Linker {\n\n    /**\n     * Links all cross-references within the specified document. The default implementation loads only target\n     * elements from documents that are present in the `LangiumDocuments` service. The linked references are\n     * stored in the document's `references` property.\n     *\n     * @param document A LangiumDocument that shall be linked.\n     * @param cancelToken A token for cancelling the operation.\n     */\n    link(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Unlinks all references within the specified document and removes them from the list of `references`.\n     *\n     * @param document A LangiumDocument that shall be unlinked.\n     */\n    unlink(document: LangiumDocument): void;\n\n    /**\n     * Determines a candidate AST node description for linking the given reference.\n     *\n     * @param node The AST node containing the reference.\n     * @param refId The reference identifier used to build a scope.\n     * @param reference The actual reference to resolve.\n     */\n    getCandidate(refInfo: ReferenceInfo): AstNodeDescription | LinkingError;\n\n    /**\n     * Creates a cross reference node being aware of its containing AstNode, the corresponding CstNode,\n     * the cross reference text denoting the target AstNode being already extracted of the document text,\n     * as well as the unique cross reference identifier.\n     *\n     * Default behavior:\n     *  - The returned Reference's 'ref' property pointing to the target AstNode is populated lazily on its\n     *    first visit.\n     *  - If the target AstNode cannot be resolved on the first visit, an error indicator will be installed\n     *    and further resolution attempts will *not* be performed.\n     *\n     * @param node The containing AST node\n     * @param refNode The corresponding CST node\n     * @param refId The cross reference identifier like ':'\n     * @param refText The cross reference text denoting the target AstNode\n     * @returns the desired Reference node, whose behavior wrt. resolving the cross reference is implementation specific.\n     */\n    buildReference(node: AstNode, property: string, refNode: CstNode | undefined, refText: string): Reference;\n\n}\n\ninterface DefaultReference extends Reference {\n    _ref?: AstNode | LinkingError;\n    _nodeDescription?: AstNodeDescription;\n}\n\nexport class DefaultLinker implements Linker {\n    protected readonly reflection: AstReflection;\n    protected readonly scopeProvider: ScopeProvider;\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly langiumDocuments: () => LangiumDocuments;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n        this.langiumDocuments = () => services.shared.workspace.LangiumDocuments;\n        this.scopeProvider = services.references.ScopeProvider;\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    async link(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        for (const node of streamAst(document.parseResult.value)) {\n            await interruptAndCheck(cancelToken);\n            streamReferences(node).forEach(ref => this.doLink(ref, document));\n        }\n    }\n\n    protected doLink(refInfo: ReferenceInfo, document: LangiumDocument): void {\n        const ref = refInfo.reference as DefaultReference;\n        // The reference may already have been resolved lazily by accessing its `ref` property.\n        if (ref._ref === undefined) {\n            try {\n                const description = this.getCandidate(refInfo);\n                if (isLinkingError(description)) {\n                    ref._ref = description;\n                } else {\n                    ref._nodeDescription = description;\n                    if (this.langiumDocuments().hasDocument(description.documentUri)) {\n                        // The target document is already loaded\n                        const linkedNode = this.loadAstNode(description);\n                        ref._ref = linkedNode ?? this.createLinkingError(refInfo, description);\n                    }\n                }\n            } catch (err) {\n                ref._ref = {\n                    ...refInfo,\n                    message: `An error occurred while resolving reference to '${ref.$refText}': ${err}`\n                };\n            }\n        }\n        // Add the reference to the document's array of references\n        document.references.push(ref);\n    }\n\n    unlink(document: LangiumDocument): void {\n        for (const ref of document.references) {\n            delete (ref as DefaultReference)._ref;\n            delete (ref as DefaultReference)._nodeDescription;\n        }\n        document.references = [];\n    }\n\n    getCandidate(refInfo: ReferenceInfo): AstNodeDescription | LinkingError {\n        const scope = this.scopeProvider.getScope(refInfo);\n        const description = scope.getElement(refInfo.reference.$refText);\n        return description ?? this.createLinkingError(refInfo);\n    }\n\n    buildReference(node: AstNode, property: string, refNode: CstNode | undefined, refText: string): Reference {\n        // See behavior description in doc of Linker, update that on changes in here.\n        // eslint-disable-next-line @typescript-eslint/no-this-alias\n        const linker = this;\n        const reference: DefaultReference = {\n            $refNode: refNode,\n            $refText: refText,\n\n            get ref() {\n                if (isAstNode(this._ref)) {\n                    // Most frequent case: the target is already resolved.\n                    return this._ref;\n                } else if (isAstNodeDescription(this._nodeDescription)) {\n                    // A candidate has been found before, but it is not loaded yet.\n                    const linkedNode = linker.loadAstNode(this._nodeDescription);\n                    this._ref = linkedNode ??\n                        linker.createLinkingError({ reference, container: node, property }, this._nodeDescription);\n                } else if (this._ref === undefined) {\n                    // The reference has not been linked yet, so do that now.\n                    const refData = linker.getLinkedNode({ reference, container: node, property });\n                    if (refData.error && getDocument(node).state < DocumentState.ComputedScopes) {\n                        // Document scope is not ready, don't set `this._ref` so linker can retry later.\n                        return undefined;\n                    }\n                    this._ref = refData.node ?? refData.error;\n                    this._nodeDescription = refData.descr;\n                }\n                return isAstNode(this._ref) ? this._ref : undefined;\n            },\n            get $nodeDescription() {\n                return this._nodeDescription;\n            },\n            get error() {\n                return isLinkingError(this._ref) ? this._ref : undefined;\n            }\n        };\n        return reference;\n    }\n\n    protected getLinkedNode(refInfo: ReferenceInfo): { node?: AstNode, descr?: AstNodeDescription, error?: LinkingError } {\n        try {\n            const description = this.getCandidate(refInfo);\n            if (isLinkingError(description)) {\n                return { error: description };\n            }\n            const linkedNode = this.loadAstNode(description);\n            if (linkedNode) {\n                return { node: linkedNode, descr: description };\n            }\n            else {\n                return {\n                    descr: description,\n                    error:\n                        this.createLinkingError(refInfo, description)\n                };\n            }\n        } catch (err) {\n            return {\n                error: {\n                    ...refInfo,\n                    message: `An error occurred while resolving reference to '${refInfo.reference.$refText}': ${err}`\n                }\n            };\n        }\n    }\n\n    protected loadAstNode(nodeDescription: AstNodeDescription): AstNode | undefined {\n        if (nodeDescription.node) {\n            return nodeDescription.node;\n        }\n        const doc = this.langiumDocuments().getDocument(nodeDescription.documentUri);\n        if (!doc) {\n            return undefined;\n        }\n        return this.astNodeLocator.getAstNode(doc.parseResult.value, nodeDescription.path);\n    }\n\n    protected createLinkingError(refInfo: ReferenceInfo, targetDescription?: AstNodeDescription): LinkingError {\n        // Check whether the document is sufficiently processed by the DocumentBuilder. If not, this is a hint for a bug\n        // in the language implementation.\n        const document = getDocument(refInfo.container);\n        if (document.state < DocumentState.ComputedScopes) {\n            console.warn(`Attempted reference resolution before document reached ComputedScopes state (${document.uri}).`);\n        }\n        const referenceType = this.reflection.getReferenceType(refInfo);\n        return {\n            ...refInfo,\n            message: `Could not resolve reference to ${referenceType} named '${refInfo.reference.$refText}'.`,\n            targetDescription\n        };\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport { findNodeForProperty } from '../utils/grammar-utils.js';\n\nexport interface NamedAstNode extends AstNode {\n    name: string;\n}\n\nexport function isNamed(node: AstNode): node is NamedAstNode {\n    return typeof (node as NamedAstNode).name === 'string';\n}\n\n/**\n * Utility service for retrieving the `name` of an `AstNode` or the `CstNode` containing a `name`.\n */\nexport interface NameProvider {\n    /**\n     * Returns the `name` of a given AstNode.\n     * @param node Specified `AstNode` whose name node shall be retrieved.\n     */\n    getName(node: AstNode): string | undefined;\n    /**\n     * Returns the `CstNode` which contains the parsed value of the `name` assignment.\n     * @param node Specified `AstNode` whose name node shall be retrieved.\n     */\n    getNameNode(node: AstNode): CstNode | undefined;\n}\n\nexport class DefaultNameProvider implements NameProvider {\n    getName(node: AstNode): string | undefined {\n        if (isNamed(node)) {\n            return node.name;\n        }\n        return undefined;\n    }\n\n    getNameNode(node: AstNode): CstNode | undefined {\n        return findNodeForProperty(node.$cstNode, 'name');\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode, GenericAstNode } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { ReferenceDescription } from '../workspace/ast-descriptions.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { NameProvider } from './name-provider.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { findAssignment } from '../utils/grammar-utils.js';\nimport { isReference } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { isChildNode, toDocumentSegment } from '../utils/cst-utils.js';\nimport { stream } from '../utils/stream.js';\nimport { UriUtils } from '../utils/uri-utils.js';\n\n/**\n * Language-specific service for finding references and declaration of a given `CstNode`.\n */\nexport interface References {\n\n    /**\n     * If the CstNode is a reference node the target CstNode will be returned.\n     * If the CstNode is a significant node of the CstNode this CstNode will be returned.\n     *\n     * @param sourceCstNode CstNode that points to a AstNode\n     */\n    findDeclaration(sourceCstNode: CstNode): AstNode | undefined;\n\n    /**\n     * If the CstNode is a reference node the target CstNode will be returned.\n     * If the CstNode is a significant node of the CstNode this CstNode will be returned.\n     *\n     * @param sourceCstNode CstNode that points to a AstNode\n     */\n    findDeclarationNode(sourceCstNode: CstNode): CstNode | undefined;\n\n    /**\n     * Finds all references to the target node as references (local references) or reference descriptions.\n     *\n     * @param targetNode Specified target node whose references should be returned\n     */\n    findReferences(targetNode: AstNode, options: FindReferencesOptions): Stream;\n}\n\nexport interface FindReferencesOptions {\n    /**\n     * @deprecated Since v1.2.0. Please use `documentUri` instead.\n     */\n    onlyLocal?: boolean;\n    /**\n     * When set, the `findReferences` method will only return references/declarations from the specified document.\n     */\n    documentUri?: URI;\n    /**\n     * Whether the returned list of references should include the declaration.\n     */\n    includeDeclaration?: boolean;\n}\n\nexport class DefaultReferences implements References {\n    protected readonly nameProvider: NameProvider;\n    protected readonly index: IndexManager;\n    protected readonly nodeLocator: AstNodeLocator;\n\n    constructor(services: LangiumCoreServices) {\n        this.nameProvider = services.references.NameProvider;\n        this.index = services.shared.workspace.IndexManager;\n        this.nodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    findDeclaration(sourceCstNode: CstNode): AstNode | undefined {\n        if (sourceCstNode) {\n            const assignment = findAssignment(sourceCstNode);\n            const nodeElem = sourceCstNode.astNode;\n            if (assignment && nodeElem) {\n                const reference = (nodeElem as GenericAstNode)[assignment.feature];\n\n                if (isReference(reference)) {\n                    return reference.ref;\n                } else if (Array.isArray(reference)) {\n                    for (const ref of reference) {\n                        if (isReference(ref) && ref.$refNode\n                            && ref.$refNode.offset <= sourceCstNode.offset\n                            && ref.$refNode.end >= sourceCstNode.end) {\n                            return ref.ref;\n                        }\n                    }\n                }\n            }\n            if (nodeElem) {\n                const nameNode = this.nameProvider.getNameNode(nodeElem);\n                // Only return the targeted node in case the targeted cst node is the name node or part of it\n                if (nameNode && (nameNode === sourceCstNode || isChildNode(sourceCstNode, nameNode))) {\n                    return nodeElem;\n                }\n            }\n        }\n        return undefined;\n    }\n\n    findDeclarationNode(sourceCstNode: CstNode): CstNode | undefined {\n        const astNode = this.findDeclaration(sourceCstNode);\n        if (astNode?.$cstNode) {\n            const targetNode = this.nameProvider.getNameNode(astNode);\n            return targetNode ?? astNode.$cstNode;\n        }\n        return undefined;\n    }\n\n    findReferences(targetNode: AstNode, options: FindReferencesOptions): Stream {\n        const refs: ReferenceDescription[] = [];\n        if (options.includeDeclaration) {\n            const ref = this.getReferenceToSelf(targetNode);\n            if (ref) {\n                refs.push(ref);\n            }\n        }\n        let indexReferences = this.index.findAllReferences(targetNode, this.nodeLocator.getAstNodePath(targetNode));\n        if (options.documentUri) {\n            indexReferences = indexReferences.filter(ref => UriUtils.equals(ref.sourceUri, options.documentUri));\n        }\n        refs.push(...indexReferences);\n        return stream(refs);\n    }\n\n    protected getReferenceToSelf(targetNode: AstNode): ReferenceDescription | undefined {\n        const nameNode = this.nameProvider.getNameNode(targetNode);\n        if (nameNode) {\n            const doc = getDocument(targetNode);\n            const path = this.nodeLocator.getAstNodePath(targetNode);\n            return {\n                sourceUri: doc.uri,\n                sourcePath: path,\n                targetUri: doc.uri,\n                targetPath: path,\n                segment: toDocumentSegment(nameNode),\n                local: true\n            };\n        }\n        return undefined;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Stream } from './stream.js';\nimport { Reduction, stream } from './stream.js';\n\n/**\n * A multimap is a variation of a Map that has potentially multiple values for every key.\n */\nexport class MultiMap {\n\n    private map = new Map();\n\n    constructor()\n    constructor(elements: Array<[K, V]>)\n    constructor(elements?: Array<[K, V]>) {\n        if (elements) {\n            for (const [key, value] of elements) {\n                this.add(key, value);\n            }\n        }\n    }\n\n    /**\n     * The total number of values in the multimap.\n     */\n    get size(): number {\n        return Reduction.sum(stream(this.map.values()).map(a => a.length));\n    }\n\n    /**\n     * Clear all entries in the multimap.\n     */\n    clear(): void {\n        this.map.clear();\n    }\n\n    /**\n     * Operates differently depending on whether a `value` is given:\n     *  * With a value, this method deletes the specific key / value pair from the multimap.\n     *  * Without a value, all values associated with the given key are deleted.\n     *\n     * @returns `true` if a value existed and has been removed, or `false` if the specified\n     *     key / value does not exist.\n     */\n    delete(key: K, value?: V): boolean {\n        if (value === undefined) {\n            return this.map.delete(key);\n        } else {\n            const values = this.map.get(key);\n            if (values) {\n                const index = values.indexOf(value);\n                if (index >= 0) {\n                    if (values.length === 1) {\n                        this.map.delete(key);\n                    } else {\n                        values.splice(index, 1);\n                    }\n                    return true;\n                }\n            }\n            return false;\n        }\n    }\n\n    /**\n     * Returns an array of all values associated with the given key. If no value exists,\n     * an empty array is returned.\n     *\n     * _Note:_ The returned array is assumed not to be modified. Use the `set` method to add a\n     * value and `delete` to remove a value from the multimap.\n     */\n    get(key: K): readonly V[] {\n        return this.map.get(key) ?? [];\n    }\n\n    /**\n     * Operates differently depending on whether a `value` is given:\n     *  * With a value, this method returns `true` if the specific key / value pair is present in the multimap.\n     *  * Without a value, this method returns `true` if the given key is present in the multimap.\n     */\n    has(key: K, value?: V): boolean {\n        if (value === undefined) {\n            return this.map.has(key);\n        } else {\n            const values = this.map.get(key);\n            if (values) {\n                return values.indexOf(value) >= 0;\n            }\n            return false;\n        }\n    }\n\n    /**\n     * Add the given key / value pair to the multimap.\n     */\n    add(key: K, value: V): this {\n        if (this.map.has(key)) {\n            this.map.get(key)!.push(value);\n        } else {\n            this.map.set(key, [value]);\n        }\n        return this;\n    }\n\n    /**\n     * Add the given set of key / value pairs to the multimap.\n     */\n    addAll(key: K, values: Iterable): this {\n        if (this.map.has(key)) {\n            this.map.get(key)!.push(...values);\n        } else {\n            this.map.set(key, Array.from(values));\n        }\n        return this;\n    }\n\n    /**\n     * Invokes the given callback function for every key / value pair in the multimap.\n     */\n    forEach(callbackfn: (value: V, key: K, map: this) => void): void {\n        this.map.forEach((array, key) =>\n            array.forEach(value => callbackfn(value, key, this))\n        );\n    }\n\n    /**\n     * Returns an iterator of key, value pairs for every entry in the map.\n     */\n    [Symbol.iterator](): Iterator<[K, V]> {\n        return this.entries().iterator();\n    }\n\n    /**\n     * Returns a stream of key, value pairs for every entry in the map.\n     */\n    entries(): Stream<[K, V]> {\n        return stream(this.map.entries())\n            .flatMap(([key, array]) => array.map(value => [key, value] as [K, V]));\n    }\n\n    /**\n     * Returns a stream of keys in the map.\n     */\n    keys(): Stream {\n        return stream(this.map.keys());\n    }\n\n    /**\n     * Returns a stream of values in the map.\n     */\n    values(): Stream {\n        return stream(this.map.values()).flat();\n    }\n\n    /**\n     * Returns a stream of key, value set pairs for every key in the map.\n     */\n    entriesGroupedByKey(): Stream<[K, V[]]> {\n        return stream(this.map.entries());\n    }\n\n}\n\nexport class BiMap {\n\n    private map = new Map();\n    private inverse = new Map();\n\n    get size(): number {\n        return this.map.size;\n    }\n\n    constructor()\n    constructor(elements: Array<[K, V]>)\n    constructor(elements?: Array<[K, V]>) {\n        if (elements) {\n            for (const [key, value] of elements) {\n                this.set(key, value);\n            }\n        }\n    }\n\n    clear(): void {\n        this.map.clear();\n        this.inverse.clear();\n    }\n\n    set(key: K, value: V): this {\n        this.map.set(key, value);\n        this.inverse.set(value, key);\n        return this;\n    }\n\n    get(key: K): V | undefined {\n        return this.map.get(key);\n    }\n\n    getKey(value: V): K | undefined {\n        return this.inverse.get(value);\n    }\n\n    delete(key: K): boolean {\n        const value = this.map.get(key);\n        if (value !== undefined) {\n            this.map.delete(key);\n            this.inverse.delete(value);\n            return true;\n        }\n        return false;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription } from '../syntax-tree.js';\nimport type { AstNodeDescriptionProvider } from '../workspace/ast-descriptions.js';\nimport type { LangiumDocument, PrecomputedScopes } from '../workspace/documents.js';\nimport type { NameProvider } from './name-provider.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { streamAllContents, streamContents } from '../utils/ast-utils.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\n\n/**\n * Language-specific service for precomputing global and local scopes. The service methods are executed\n * as the first and second phase in the `DocumentBuilder`.\n */\nexport interface ScopeComputation {\n\n    /**\n     * Creates descriptions of all AST nodes that shall be exported into the _global_ scope from the given\n     * document. These descriptions are gathered by the `IndexManager` and stored in the global index so\n     * they can be referenced from other documents.\n     *\n     * _Note:_ You should not resolve any cross-references in this service method. Cross-reference resolution\n     * depends on the scope computation phase to be completed (`computeScope` method), which runs after the\n     * initial indexing where this method is used.\n     *\n     * @param document The document from which to gather exported AST nodes.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    computeExports(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Precomputes the _local_ scopes for a document, which are necessary for the default way of\n     * resolving references to symbols in the same document. The result is a multimap assigning a\n     * set of AST node descriptions to every level of the AST. These data are used by the `ScopeProvider`\n     * service to determine which target nodes are visible in the context of a specific cross-reference.\n     *\n     * _Note:_ You should not resolve any cross-references in this service method. Cross-reference\n     * resolution depends on the scope computation phase to be completed.\n     *\n     * @param document The document in which to compute scopes.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    computeLocalScopes(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n}\n\n/**\n * The default scope computation creates and collectes descriptions of the AST nodes to be exported into the\n * _global_ scope from the given document. By default those are the document's root AST node and its directly\n * contained child nodes.\n *\n * Besides, it gathers all AST nodes that have a name (according to the `NameProvider` service) and includes them\n * in the local scope of their particular container nodes. As a result, for every cross-reference in the AST,\n * target elements from the same level (siblings) and further up towards the root (parents and siblings of parents)\n * are visible. Elements being nested inside lower levels (children, children of siblings and parents' siblings)\n * are _invisible_ by default, but that can be changed by customizing this service.\n */\nexport class DefaultScopeComputation implements ScopeComputation {\n\n    protected readonly nameProvider: NameProvider;\n    protected readonly descriptions: AstNodeDescriptionProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.nameProvider = services.references.NameProvider;\n        this.descriptions = services.workspace.AstNodeDescriptionProvider;\n    }\n\n    async computeExports(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        return this.computeExportsForNode(document.parseResult.value, document, undefined, cancelToken);\n    }\n\n    /**\n     * Creates {@link AstNodeDescription AstNodeDescriptions} for the given {@link AstNode parentNode} and its children.\n     * The list of children to be considered is determined by the function parameter {@link children}.\n     * By default only the direct children of {@link parentNode} are visited, nested nodes are not exported.\n     *\n     * @param parentNode AST node to be exported, i.e., of which an {@link AstNodeDescription} shall be added to the returned list.\n     * @param document The document containing the AST node to be exported.\n     * @param children A function called with {@link parentNode} as single argument and returning an {@link Iterable} supplying the children to be visited, which must be directly or transitively contained in {@link parentNode}.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution.\n     * @returns A list of {@link AstNodeDescription AstNodeDescriptions} to be published to index.\n     */\n    async computeExportsForNode(parentNode: AstNode, document: LangiumDocument, children: (root: AstNode) => Iterable = streamContents, cancelToken: CancellationToken = CancellationToken.None): Promise {\n        const exports: AstNodeDescription[] = [];\n\n        this.exportNode(parentNode, exports, document);\n        for (const node of children(parentNode)) {\n            await interruptAndCheck(cancelToken);\n            this.exportNode(node, exports, document);\n        }\n        return exports;\n    }\n\n    /**\n     * Add a single node to the list of exports if it has a name. Override this method to change how\n     * symbols are exported, e.g. by modifying their exported name.\n     */\n    protected exportNode(node: AstNode, exports: AstNodeDescription[], document: LangiumDocument): void {\n        const name = this.nameProvider.getName(node);\n        if (name) {\n            exports.push(this.descriptions.createDescription(node, name, document));\n        }\n    }\n\n    async computeLocalScopes(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const rootNode = document.parseResult.value;\n        const scopes = new MultiMap();\n        // Here we navigate the full AST - local scopes shall be available in the whole document\n        for (const node of streamAllContents(rootNode)) {\n            await interruptAndCheck(cancelToken);\n            this.processNode(node, document, scopes);\n        }\n        return scopes;\n    }\n\n    /**\n     * Process a single node during scopes computation. The default implementation makes the node visible\n     * in the subtree of its container (if the node has a name). Override this method to change this,\n     * e.g. by increasing the visibility to a higher level in the AST.\n     */\n    protected processNode(node: AstNode, document: LangiumDocument, scopes: PrecomputedScopes): void {\n        const container = node.$container;\n        if (container) {\n            const name = this.nameProvider.getName(node);\n            if (name) {\n                scopes.add(container, this.descriptions.createDescription(node, name, document));\n            }\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNodeDescription } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport { EMPTY_STREAM, stream } from '../utils/stream.js';\n\n/**\n * A scope describes what target elements are visible from a specific cross-reference context.\n */\nexport interface Scope {\n\n    /**\n     * Find a target element matching the given name. If no element is found, `undefined` is returned.\n     * If multiple matching elements are present, the selection of the returned element should be done\n     * according to the semantics of your language. Usually it is the element that is most closely defined.\n     *\n     * @param name Name of the cross-reference target as it appears in the source text.\n     */\n    getElement(name: string): AstNodeDescription | undefined;\n\n    /**\n     * Create a stream of all elements in the scope. This is used to compute completion proposals to be\n     * shown in the editor.\n     */\n    getAllElements(): Stream;\n\n}\n\nexport interface ScopeOptions {\n    caseInsensitive?: boolean;\n}\n\n/**\n * The default scope implementation is based on a `Stream`. It has an optional _outer scope_ describing\n * the next level of elements, which are queried when a target element is not found in the stream provided\n * to this scope.\n */\nexport class StreamScope implements Scope {\n    readonly elements: Stream;\n    readonly outerScope?: Scope;\n    readonly caseInsensitive: boolean;\n\n    constructor(elements: Stream, outerScope?: Scope, options?: ScopeOptions) {\n        this.elements = elements;\n        this.outerScope = outerScope;\n        this.caseInsensitive = options?.caseInsensitive ?? false;\n    }\n\n    getAllElements(): Stream {\n        if (this.outerScope) {\n            return this.elements.concat(this.outerScope.getAllElements());\n        } else {\n            return this.elements;\n        }\n    }\n\n    getElement(name: string): AstNodeDescription | undefined {\n        const local = this.caseInsensitive\n            ? this.elements.find(e => e.name.toLowerCase() === name.toLowerCase())\n            : this.elements.find(e => e.name === name);\n        if (local) {\n            return local;\n        }\n        if (this.outerScope) {\n            return this.outerScope.getElement(name);\n        }\n        return undefined;\n    }\n}\n\nexport class MapScope implements Scope {\n    readonly elements: Map;\n    readonly outerScope?: Scope;\n    readonly caseInsensitive: boolean;\n\n    constructor(elements: Iterable, outerScope?: Scope, options?: ScopeOptions) {\n        this.elements = new Map();\n        this.caseInsensitive = options?.caseInsensitive ?? false;\n        for (const element of elements) {\n            const name = this.caseInsensitive\n                ? element.name.toLowerCase()\n                : element.name;\n            this.elements.set(name, element);\n        }\n        this.outerScope = outerScope;\n    }\n\n    getElement(name: string): AstNodeDescription | undefined {\n        const localName = this.caseInsensitive ? name.toLowerCase() : name;\n        const local = this.elements.get(localName);\n        if (local) {\n            return local;\n        }\n        if (this.outerScope) {\n            return this.outerScope.getElement(name);\n        }\n        return undefined;\n    }\n\n    getAllElements(): Stream {\n        let elementStream = stream(this.elements.values());\n        if (this.outerScope) {\n            elementStream = elementStream.concat(this.outerScope.getAllElements());\n        }\n        return elementStream;\n    }\n\n}\n\nexport const EMPTY_SCOPE: Scope = {\n    getElement(): undefined {\n        return undefined;\n    },\n    getAllElements(): Stream {\n        return EMPTY_STREAM;\n    }\n};\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { Disposable } from './disposable.js';\nimport type { URI } from './uri-utils.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\n\nexport abstract class DisposableCache implements Disposable {\n\n    protected toDispose: Disposable[] = [];\n    protected isDisposed = false;\n\n    onDispose(disposable: Disposable): void {\n        this.toDispose.push(disposable);\n    }\n\n    dispose(): void {\n        this.throwIfDisposed();\n        this.clear();\n        this.isDisposed = true;\n        this.toDispose.forEach(disposable => disposable.dispose());\n    }\n\n    protected throwIfDisposed(): void {\n        if (this.isDisposed) {\n            throw new Error('This cache has already been disposed');\n        }\n    }\n\n    abstract clear(): void;\n}\n\nexport class SimpleCache extends DisposableCache {\n    protected readonly cache = new Map();\n\n    has(key: K): boolean {\n        this.throwIfDisposed();\n        return this.cache.has(key);\n    }\n\n    set(key: K, value: V): void {\n        this.throwIfDisposed();\n        this.cache.set(key, value);\n    }\n\n    get(key: K): V | undefined;\n    get(key: K, provider: () => V): V;\n    get(key: K, provider?: () => V): V | undefined {\n        this.throwIfDisposed();\n        if (this.cache.has(key)) {\n            return this.cache.get(key);\n        } else if (provider) {\n            const value = provider();\n            this.cache.set(key, value);\n            return value;\n        } else {\n            return undefined;\n        }\n    }\n\n    delete(key: K): boolean {\n        this.throwIfDisposed();\n        return this.cache.delete(key);\n    }\n\n    clear(): void {\n        this.throwIfDisposed();\n        this.cache.clear();\n    }\n}\n\nexport class ContextCache extends DisposableCache {\n\n    private readonly cache = new Map>();\n    private readonly converter: (input: Context) => ContextKey | Context;\n\n    constructor(converter?: (input: Context) => ContextKey) {\n        super();\n        this.converter = converter ?? (value => value);\n    }\n\n    has(contextKey: Context, key: Key): boolean {\n        this.throwIfDisposed();\n        return this.cacheForContext(contextKey).has(key);\n    }\n\n    set(contextKey: Context, key: Key, value: Value): void {\n        this.throwIfDisposed();\n        this.cacheForContext(contextKey).set(key, value);\n    }\n\n    get(contextKey: Context, key: Key): Value | undefined;\n    get(contextKey: Context, key: Key, provider: () => Value): Value;\n    get(contextKey: Context, key: Key, provider?: () => Value): Value | undefined {\n        this.throwIfDisposed();\n        const contextCache = this.cacheForContext(contextKey);\n        if (contextCache.has(key)) {\n            return contextCache.get(key);\n        } else if (provider) {\n            const value = provider();\n            contextCache.set(key, value);\n            return value;\n        } else {\n            return undefined;\n        }\n    }\n\n    delete(contextKey: Context, key: Key): boolean {\n        this.throwIfDisposed();\n        return this.cacheForContext(contextKey).delete(key);\n    }\n\n    clear(): void;\n    clear(contextKey: Context): void;\n    clear(contextKey?: Context): void {\n        this.throwIfDisposed();\n        if (contextKey) {\n            const mapKey = this.converter(contextKey);\n            this.cache.delete(mapKey);\n        } else {\n            this.cache.clear();\n        }\n    }\n\n    protected cacheForContext(contextKey: Context): Map {\n        const mapKey = this.converter(contextKey);\n        let documentCache = this.cache.get(mapKey);\n        if (!documentCache) {\n            documentCache = new Map();\n            this.cache.set(mapKey, documentCache);\n        }\n        return documentCache;\n    }\n}\n\n/**\n * Every key/value pair in this cache is scoped to a document.\n * If this document is changed or deleted, all associated key/value pairs are deleted.\n */\nexport class DocumentCache extends ContextCache {\n    constructor(sharedServices: LangiumSharedCoreServices) {\n        super(uri => uri.toString());\n        this.onDispose(sharedServices.workspace.DocumentBuilder.onUpdate((changed, deleted) => {\n            const allUris = changed.concat(deleted);\n            for (const uri of allUris) {\n                this.clear(uri);\n            }\n        }));\n    }\n}\n\n/**\n * Every key/value pair in this cache is scoped to the whole workspace.\n * If any document in the workspace changes, the whole cache is evicted.\n */\nexport class WorkspaceCache extends SimpleCache {\n    constructor(sharedServices: LangiumSharedCoreServices) {\n        super();\n        this.onDispose(sharedServices.workspace.DocumentBuilder.onUpdate(() => {\n            this.clear();\n        }));\n    }\n}\n", "/******************************************************************************\n * Copyright 2021-2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection, ReferenceInfo } from '../syntax-tree.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { AstNodeDescriptionProvider } from '../workspace/ast-descriptions.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { NameProvider } from './name-provider.js';\nimport type { Scope, ScopeOptions} from './scope.js';\nimport { MapScope, StreamScope } from './scope.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { stream } from '../utils/stream.js';\nimport { WorkspaceCache } from '../utils/caching.js';\n\n/**\n * Language-specific service for determining the scope of target elements visible in a specific cross-reference context.\n */\nexport interface ScopeProvider {\n\n    /**\n     * Return a scope describing what elements are visible for the given AST node and cross-reference\n     * identifier.\n     *\n     * @param context Information about the reference for which a scope is requested.\n     */\n    getScope(context: ReferenceInfo): Scope;\n\n}\n\nexport class DefaultScopeProvider implements ScopeProvider {\n\n    protected readonly reflection: AstReflection;\n    protected readonly nameProvider: NameProvider;\n    protected readonly descriptions: AstNodeDescriptionProvider;\n    protected readonly indexManager: IndexManager;\n\n    protected readonly globalScopeCache: WorkspaceCache;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n        this.nameProvider = services.references.NameProvider;\n        this.descriptions = services.workspace.AstNodeDescriptionProvider;\n        this.indexManager = services.shared.workspace.IndexManager;\n        this.globalScopeCache = new WorkspaceCache(services.shared);\n    }\n\n    getScope(context: ReferenceInfo): Scope {\n        const scopes: Array> = [];\n        const referenceType = this.reflection.getReferenceType(context);\n\n        const precomputed = getDocument(context.container).precomputedScopes;\n        if (precomputed) {\n            let currentNode: AstNode | undefined = context.container;\n            do {\n                const allDescriptions = precomputed.get(currentNode);\n                if (allDescriptions.length > 0) {\n                    scopes.push(stream(allDescriptions).filter(\n                        desc => this.reflection.isSubtype(desc.type, referenceType)));\n                }\n                currentNode = currentNode.$container;\n            } while (currentNode);\n        }\n\n        let result: Scope = this.getGlobalScope(referenceType, context);\n        for (let i = scopes.length - 1; i >= 0; i--) {\n            result = this.createScope(scopes[i], result);\n        }\n        return result;\n    }\n\n    /**\n     * Create a scope for the given collection of AST node descriptions.\n     */\n    protected createScope(elements: Iterable, outerScope?: Scope, options?: ScopeOptions): Scope {\n        return new StreamScope(stream(elements), outerScope, options);\n    }\n\n    /**\n     * Create a scope for the given collection of AST nodes, which need to be transformed into respective\n     * descriptions first. This is done using the `NameProvider` and `AstNodeDescriptionProvider` services.\n     */\n    protected createScopeForNodes(elements: Iterable, outerScope?: Scope, options?: ScopeOptions): Scope {\n        const s = stream(elements).map(e => {\n            const name = this.nameProvider.getName(e);\n            if (name) {\n                return this.descriptions.createDescription(e, name);\n            }\n            return undefined;\n        }).nonNullable();\n        return new StreamScope(s, outerScope, options);\n    }\n\n    /**\n     * Create a global scope filtered for the given reference type.\n     */\n    protected getGlobalScope(referenceType: string, _context: ReferenceInfo): Scope {\n        return this.globalScopeCache.get(referenceType, () => new MapScope(this.indexManager.allElements(referenceType)));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { URI } from 'vscode-uri';\nimport type { CommentProvider } from '../documentation/comment-provider.js';\nimport type { NameProvider } from '../references/name-provider.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode, GenericAstNode, Mutable, Reference } from '../syntax-tree.js';\nimport { isAstNode, isReference } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { findNodesForProperty } from '../utils/grammar-utils.js';\nimport type { AstNodeLocator } from '../workspace/ast-node-locator.js';\nimport type { DocumentSegment, LangiumDocument, LangiumDocuments } from '../workspace/documents.js';\n\nexport interface JsonSerializeOptions {\n    /** The space parameter for `JSON.stringify`, controlling whether and how to pretty-print the output. */\n    space?: string | number;\n    /** Whether to include the `$refText` property for references (the name used to identify the target node). */\n    refText?: boolean;\n    /** Whether to include the `$sourceText` property, which holds the full source text from which an AST node was parsed. */\n    sourceText?: boolean;\n    /** Whether to include the `$textRegion` property, which holds information to trace AST node properties to their respective source text regions. */\n    textRegions?: boolean;\n    /** Whether to include the `$comment` property, which holds comments according to the CommentProvider service. */\n    comments?: boolean;\n    /** The replacer parameter for `JSON.stringify`; the default replacer given as parameter should be used to apply basic replacements. */\n    replacer?: (key: string, value: unknown, defaultReplacer: (key: string, value: unknown) => unknown) => unknown\n    /** Used to convert and serialize URIs when the target of a cross-reference is in a different document. */\n    uriConverter?: (uri: URI, reference: Reference) => string\n}\n\nexport interface JsonDeserializeOptions {\n    /** Used to parse and convert URIs when the target of a cross-reference is in a different document. */\n    uriConverter?: (uri: string) => URI\n}\n\n/**\n * {@link AstNode}s that may carry information on their definition area within the DSL text.\n */\nexport interface AstNodeWithTextRegion extends AstNode {\n    $sourceText?: string;\n    $textRegion?: AstNodeRegionWithAssignments;\n}\n\n/**\n * {@link AstNode}s that may carry a semantically relevant comment.\n */\nexport interface AstNodeWithComment extends AstNode {\n    $comment?: string;\n}\n\nexport function isAstNodeWithComment(node: AstNode): node is AstNodeWithComment {\n    return typeof (node as AstNodeWithComment).$comment === 'string';\n}\n\n/**\n * A {@DocumentSegment} representing the definition area of an AstNode within the DSL text.\n * Usually contains text region information on all assigned property values of the AstNode,\n * and may contain the defining file's URI as string.\n */\nexport interface AstNodeRegionWithAssignments extends DocumentSegment {\n    /**\n     * A record containing an entry for each assigned property of the AstNode.\n     * The key is equal to the property name and the value is an array of the property values'\n     * text regions, regardless of whether the property is a single value or list property.\n     */\n    assignments?: Record;\n    /**\n     * The AstNode defining file's URI as string\n     */\n    documentURI?: string;\n}\n\n/**\n * Utility service for transforming an `AstNode` into a JSON string and vice versa.\n */\nexport interface JsonSerializer {\n    /**\n     * Serialize an `AstNode` into a JSON `string`.\n     * @param node The `AstNode` to be serialized.\n     * @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read.\n     */\n    serialize(node: AstNode, options?: JsonSerializeOptions): string;\n    /**\n     * Deserialize (parse) a JSON `string` into an `AstNode`.\n     */\n    deserialize(content: string, options?: JsonDeserializeOptions): T;\n}\n\n/**\n * A cross-reference in the serialized JSON representation of an AstNode.\n */\ninterface IntermediateReference {\n    /** URI pointing to the target element. This is either `#${path}` if the target is in the same document, or `${documentURI}#${path}` otherwise. */\n    $ref?: string\n    /** The actual text used to look up the reference target in the surrounding scope. */\n    $refText?: string\n    /** If any problem occurred while resolving the reference, it is described by this property. */\n    $error?: string\n}\n\nfunction isIntermediateReference(obj: unknown): obj is IntermediateReference {\n    return typeof obj === 'object' && !!obj && ('$ref' in obj || '$error' in obj);\n}\n\nexport class DefaultJsonSerializer implements JsonSerializer {\n\n    /** The set of AstNode properties to be ignored by the serializer. */\n    ignoreProperties = new Set(['$container', '$containerProperty', '$containerIndex', '$document', '$cstNode']);\n\n    /** The document that is currently processed by the serializer; this is used by the replacer function.  */\n    protected currentDocument: LangiumDocument | undefined;\n\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly nameProvider: NameProvider;\n    protected readonly commentProvider: CommentProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.langiumDocuments = services.shared.workspace.LangiumDocuments;\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n        this.nameProvider = services.references.NameProvider;\n        this.commentProvider = services.documentation.CommentProvider;\n    }\n\n    serialize(node: AstNode, options: JsonSerializeOptions = {}): string {\n        const specificReplacer = options?.replacer;\n        const defaultReplacer = (key: string, value: unknown) => this.replacer(key, value, options);\n        const replacer = specificReplacer ? (key: string, value: unknown) => specificReplacer(key, value, defaultReplacer) : defaultReplacer;\n\n        try {\n            this.currentDocument = getDocument(node);\n            return JSON.stringify(node, replacer, options?.space);\n        } finally {\n            this.currentDocument = undefined;\n        }\n    }\n\n    deserialize(content: string, options: JsonDeserializeOptions = {}): T {\n        const root = JSON.parse(content);\n        this.linkNode(root, root, options);\n        return root;\n    }\n\n    protected replacer(key: string, value: unknown, { refText, sourceText, textRegions, comments, uriConverter }: JsonSerializeOptions): unknown {\n        if (this.ignoreProperties.has(key)) {\n            return undefined;\n        } else if (isReference(value)) {\n            const refValue = value.ref;\n            const $refText = refText ? value.$refText : undefined;\n            if (refValue) {\n                const targetDocument = getDocument(refValue);\n                let targetUri = '';\n                if (this.currentDocument && this.currentDocument !== targetDocument) {\n                    if (uriConverter) {\n                        targetUri = uriConverter(targetDocument.uri, value);\n                    } else {\n                        targetUri = targetDocument.uri.toString();\n                    }\n                }\n                const targetPath = this.astNodeLocator.getAstNodePath(refValue);\n                return {\n                    $ref: `${targetUri}#${targetPath}`,\n                    $refText\n                } satisfies IntermediateReference;\n            } else {\n                return {\n                    $error: value.error?.message ?? 'Could not resolve reference',\n                    $refText\n                } satisfies IntermediateReference;\n            }\n        } else if (isAstNode(value)) {\n            let astNode: AstNodeWithTextRegion | undefined = undefined;\n            if (textRegions) {\n                astNode = this.addAstNodeRegionWithAssignmentsTo({ ...value });\n                if ((!key || value.$document) && astNode?.$textRegion) {\n                    // The document URI is added to the root node of the resulting JSON tree\n                    astNode.$textRegion.documentURI = this.currentDocument?.uri.toString();\n                }\n            }\n            if (sourceText && !key) {\n                astNode ??= { ...value };\n                astNode.$sourceText = value.$cstNode?.text;\n            }\n            if (comments) {\n                astNode ??= { ...value };\n                const comment = this.commentProvider.getComment(value);\n                if (comment) {\n                    (astNode as AstNodeWithComment).$comment = comment.replace(/\\r/g, '');\n                }\n            }\n            return astNode ?? value;\n        } else {\n            return value;\n        }\n    }\n\n    protected addAstNodeRegionWithAssignmentsTo(node: AstNodeWithTextRegion) {\n        const createDocumentSegment: (cstNode: CstNode) => AstNodeRegionWithAssignments = cstNode => {\n            offset: cstNode.offset,\n            end: cstNode.end,\n            length: cstNode.length,\n            range: cstNode.range,\n        };\n\n        if (node.$cstNode) {\n            const textRegion = node.$textRegion = createDocumentSegment(node.$cstNode);\n            const assignments: Record = textRegion.assignments = {};\n\n            Object.keys(node).filter(key => !key.startsWith('$')).forEach(key => {\n                const propertyAssignments = findNodesForProperty(node.$cstNode, key).map(createDocumentSegment);\n                if (propertyAssignments.length !== 0) {\n                    assignments[key] = propertyAssignments;\n                }\n            });\n\n            return node;\n        }\n        return undefined;\n    }\n\n    protected linkNode(node: GenericAstNode, root: AstNode, options: JsonDeserializeOptions, container?: AstNode, containerProperty?: string, containerIndex?: number) {\n        for (const [propertyName, item] of Object.entries(node)) {\n            if (Array.isArray(item)) {\n                for (let index = 0; index < item.length; index++) {\n                    const element = item[index];\n                    if (isIntermediateReference(element)) {\n                        item[index] = this.reviveReference(node, propertyName, root, element, options);\n                    } else if (isAstNode(element)) {\n                        this.linkNode(element as GenericAstNode, root, options, node, propertyName, index);\n                    }\n                }\n            } else if (isIntermediateReference(item)) {\n                node[propertyName] = this.reviveReference(node, propertyName, root, item, options);\n            } else if (isAstNode(item)) {\n                this.linkNode(item as GenericAstNode, root, options, node, propertyName);\n            }\n        }\n        const mutable = node as Mutable;\n        mutable.$container = container;\n        mutable.$containerProperty = containerProperty;\n        mutable.$containerIndex = containerIndex;\n    }\n\n    protected reviveReference(container: AstNode, property: string, root: AstNode, reference: IntermediateReference, options: JsonDeserializeOptions): Reference | undefined {\n        let refText = reference.$refText;\n        let error = reference.$error;\n        if (reference.$ref) {\n            const ref = this.getRefNode(root, reference.$ref, options.uriConverter);\n            if (isAstNode(ref)) {\n                if (!refText) {\n                    refText = this.nameProvider.getName(ref);\n                }\n                return {\n                    $refText: refText ?? '',\n                    ref\n                };\n            } else {\n                error = ref;\n            }\n        }\n        if (error) {\n            const ref: Mutable = {\n                $refText: refText ?? ''\n            };\n            ref.error = {\n                container,\n                property,\n                message: error,\n                reference: ref\n            };\n            return ref;\n        } else {\n            return undefined;\n        }\n    }\n\n    protected getRefNode(root: AstNode, uri: string, uriConverter?: (uri: string) => URI): AstNode | string {\n        try {\n            const fragmentIndex = uri.indexOf('#');\n            if (fragmentIndex === 0) {\n                const node = this.astNodeLocator.getAstNode(root, uri.substring(1));\n                if (!node) {\n                    return 'Could not resolve path: ' + uri;\n                }\n                return node;\n            }\n            if (fragmentIndex < 0) {\n                const documentUri = uriConverter ? uriConverter(uri) : URI.parse(uri);\n                const document = this.langiumDocuments.getDocument(documentUri);\n                if (!document) {\n                    return 'Could not find document for URI: ' + uri;\n                }\n                return document.parseResult.value;\n            }\n            const documentUri = uriConverter ? uriConverter(uri.substring(0, fragmentIndex)) : URI.parse(uri.substring(0, fragmentIndex));\n            const document = this.langiumDocuments.getDocument(documentUri);\n            if (!document) {\n                return 'Could not find document for URI: ' + uri;\n            }\n            if (fragmentIndex === uri.length - 1) {\n                return document.parseResult.value;\n            }\n            const node = this.astNodeLocator.getAstNode(document.parseResult.value, uri.substring(fragmentIndex + 1));\n            if (!node) {\n                return 'Could not resolve URI: ' + uri;\n            }\n            return node;\n        } catch (err) {\n            return String(err);\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from './services.js';\nimport { UriUtils, type URI } from './utils/uri-utils.js';\n\n/**\n * The service registry provides access to the language-specific {@link LangiumCoreServices} optionally including LSP-related services.\n * These are resolved via the URI of a text document.\n */\nexport interface ServiceRegistry {\n\n    /**\n     * Register a language via its injected services.\n     */\n    register(language: LangiumCoreServices): void;\n\n    /**\n     * Retrieve the language-specific services for the given URI. In case only one language is\n     * registered, it may be used regardless of the URI format.\n     */\n    getServices(uri: URI): LangiumCoreServices;\n\n    /**\n     * The full set of registered language services.\n     */\n    readonly all: readonly LangiumCoreServices[];\n}\n\n/**\n * Generic registry for Langium services, but capable of being used with extending service sets as well (such as the lsp-complete LangiumCoreServices set)\n */\nexport class DefaultServiceRegistry implements ServiceRegistry {\n\n    protected singleton?: LangiumCoreServices;\n    protected map?: Record;\n\n    register(language: LangiumCoreServices): void {\n        if (!this.singleton && !this.map) {\n            // This is the first language to be registered; store it as singleton.\n            this.singleton = language;\n            return;\n        }\n        if (!this.map) {\n            this.map = {};\n            if (this.singleton) {\n                // Move the previous singleton instance to the new map.\n                for (const ext of this.singleton.LanguageMetaData.fileExtensions) {\n                    this.map[ext] = this.singleton;\n                }\n                this.singleton = undefined;\n            }\n        }\n        // Store the language services in the map.\n        for (const ext of language.LanguageMetaData.fileExtensions) {\n            if (this.map[ext] !== undefined && this.map[ext] !== language) {\n                console.warn(`The file extension ${ext} is used by multiple languages. It is now assigned to '${language.LanguageMetaData.languageId}'.`);\n            }\n            this.map[ext] = language;\n        }\n    }\n\n    getServices(uri: URI): LangiumCoreServices {\n        if (this.singleton !== undefined) {\n            return this.singleton;\n        }\n        if (this.map === undefined) {\n            throw new Error('The service registry is empty. Use `register` to register the services of a language.');\n        }\n        const ext = UriUtils.extname(uri);\n        const services = this.map[ext];\n        if (!services) {\n            throw new Error(`The service registry contains no services for the extension '${ext}'.`);\n        }\n        return services;\n    }\n\n    get all(): readonly LangiumCoreServices[] {\n        if (this.singleton !== undefined) {\n            return [this.singleton];\n        }\n        if (this.map !== undefined) {\n            return Object.values(this.map);\n        }\n        return [];\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CodeDescription, DiagnosticRelatedInformation, DiagnosticTag, integer, Range } from 'vscode-languageserver-types';\nimport type { CancellationToken } from '../utils/cancellation.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstReflection, Properties } from '../syntax-tree.js';\nimport type { MaybePromise } from '../utils/promise-utils.js';\nimport type { Stream } from '../utils/stream.js';\nimport type { DocumentSegment } from '../workspace/documents.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { isOperationCancelled } from '../utils/promise-utils.js';\nimport { stream } from '../utils/stream.js';\n\nexport type DiagnosticInfo> = {\n    /** The AST node to which the diagnostic is attached. */\n    node: N;\n    /** If a property name is given, the diagnostic is restricted to the corresponding text region. */\n    property?: P;\n    /** If the value of a keyword is given, the diagnostic will appear at its corresponding text region */\n    keyword?: string;\n    /** In case of a multi-value property (array), an index can be given to select a specific element. */\n    index?: number;\n    /** If you want to create a diagnostic independent to any property, use the range property. */\n    range?: Range;\n    /** The diagnostic's code, which usually appear in the user interface. */\n    code?: integer | string;\n    /** An optional property to describe the error code. */\n    codeDescription?: CodeDescription;\n    /** Additional metadata about the diagnostic. */\n    tags?: DiagnosticTag[];\n    /** An array of related diagnostic information, e.g. when symbol-names within a scope collide all definitions can be marked via this property. */\n    relatedInformation?: DiagnosticRelatedInformation[];\n    /** A data entry field that is preserved between a `textDocument/publishDiagnostics` notification and `textDocument/codeAction` request. */\n    data?: unknown;\n}\n\n/**\n * Shape of information commonly used in the `data` field of diagnostics.\n */\nexport interface DiagnosticData {\n    /** Diagnostic code for identifying which code action to apply. This code is _not_ shown in the user interface. */\n    code: string\n    /** Specifies where to apply the code action in the form of a `DocumentSegment`. */\n    actionSegment?: DocumentSegment\n    /** Specifies where to apply the code action in the form of a `Range`. */\n    actionRange?: Range\n}\n\n/**\n * Create DiagnosticData for a given diagnostic code. The result can be put into the `data` field of a DiagnosticInfo.\n */\nexport function diagnosticData(code: string): DiagnosticData {\n    return { code };\n}\n\nexport type ValidationAcceptor = (severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo) => void\n\nexport type ValidationCheck = (node: T, accept: ValidationAcceptor, cancelToken: CancellationToken) => MaybePromise;\n\n/**\n * A utility type for associating non-primitive AST types to corresponding validation checks. For example:\n *\n * ```ts\n *   const checks: ValidationChecks = {\n *       State: validator.checkStateNameStartsWithCapital\n *    };\n * ```\n *\n * If an AST type does not extend AstNode, e.g. if it describes a union of string literals, that type's name must not occur as a key in objects of type `ValidationCheck<...>`.\n *\n * @param T a type definition mapping language specific type names (keys) to the corresponding types (values)\n */\nexport type ValidationChecks = {\n    [K in keyof T]?: T[K] extends AstNode ? ValidationCheck | Array> : never\n} & {\n    AstNode?: ValidationCheck | Array>;\n}\n\n/**\n * `fast` checks can be executed after every document change (i.e. as the user is typing). If a check\n * is too slow it can delay the response to document changes, yielding bad user experience. By marking\n * it as `slow`, it will be skipped for normal as-you-type validation. Then it's up to you when to\n * schedule these long-running checks: after the fast checks are done, or after saving a document,\n * or with an explicit command, etc.\n *\n * `built-in` checks are errors produced by the lexer, the parser, or the linker. They cannot be used\n * for custom validation checks.\n */\nexport type ValidationCategory = 'fast' | 'slow' | 'built-in'\n\nexport namespace ValidationCategory {\n    export const all: readonly ValidationCategory[] = ['fast', 'slow', 'built-in'];\n}\n\ntype ValidationCheckEntry = {\n    check: ValidationCheck\n    category: ValidationCategory\n}\n\n/**\n * Manages a set of `ValidationCheck`s to be applied when documents are validated.\n */\nexport class ValidationRegistry {\n    private readonly entries = new MultiMap();\n    private readonly reflection: AstReflection;\n\n    constructor(services: LangiumCoreServices) {\n        this.reflection = services.shared.AstReflection;\n    }\n\n    /**\n     * Register a set of validation checks. Each value in the record can be either a single validation check (i.e. a function)\n     * or an array of validation checks.\n     *\n     * @param checksRecord Set of validation checks to register.\n     * @param category Optional category for the validation checks (defaults to `'fast'`).\n     * @param thisObj Optional object to be used as `this` when calling the validation check functions.\n     */\n    register(checksRecord: ValidationChecks, thisObj: ThisParameterType = this, category: ValidationCategory = 'fast'): void {\n        if (category === 'built-in') {\n            throw new Error(\"The 'built-in' category is reserved for lexer, parser, and linker errors.\");\n        }\n        for (const [type, ch] of Object.entries(checksRecord)) {\n            const callbacks = ch as ValidationCheck | ValidationCheck[];\n            if (Array.isArray(callbacks)) {\n                for (const check of callbacks) {\n                    const entry: ValidationCheckEntry = {\n                        check: this.wrapValidationException(check, thisObj),\n                        category\n                    };\n                    this.addEntry(type, entry);\n                }\n            } else if (typeof callbacks === 'function') {\n                const entry: ValidationCheckEntry = {\n                    check: this.wrapValidationException(callbacks, thisObj),\n                    category\n                };\n                this.addEntry(type, entry);\n            }\n        }\n    }\n\n    protected wrapValidationException(check: ValidationCheck, thisObj: unknown): ValidationCheck {\n        return async (node, accept, cancelToken) => {\n            try {\n                await check.call(thisObj, node, accept, cancelToken);\n            } catch (err) {\n                if (isOperationCancelled(err)) {\n                    throw err;\n                }\n                console.error('An error occurred during validation:', err);\n                const message = err instanceof Error ? err.message : String(err);\n                if (err instanceof Error && err.stack) {\n                    console.error(err.stack);\n                }\n                accept('error', 'An error occurred during validation: ' + message, { node });\n            }\n        };\n    }\n\n    protected addEntry(type: string, entry: ValidationCheckEntry): void {\n        if (type === 'AstNode') {\n            this.entries.add('AstNode', entry);\n            return;\n        }\n        for (const subtype of this.reflection.getAllSubTypes(type)) {\n            this.entries.add(subtype, entry);\n        }\n    }\n\n    getChecks(type: string, categories?: ValidationCategory[]): Stream {\n        let checks = stream(this.entries.get(type))\n            .concat(this.entries.get('AstNode'));\n        if (categories) {\n            checks = checks.filter(entry => categories.includes(entry.category));\n        }\n        return checks.map(entry => entry.check);\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { MismatchedTokenException } from 'chevrotain';\nimport type { DiagnosticSeverity, Position, Range, Diagnostic } from 'vscode-languageserver-types';\nimport type { LanguageMetaData } from '../languages/language-meta-data.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, CstNode } from '../syntax-tree.js';\nimport type { LangiumDocument } from '../workspace/documents.js';\nimport type { DiagnosticData, DiagnosticInfo, ValidationAcceptor, ValidationCategory, ValidationRegistry } from './validation-registry.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { findNodeForKeyword, findNodeForProperty } from '../utils/grammar-utils.js';\nimport { streamAst } from '../utils/ast-utils.js';\nimport { tokenToRange } from '../utils/cst-utils.js';\nimport { interruptAndCheck, isOperationCancelled } from '../utils/promise-utils.js';\nimport { diagnosticData } from './validation-registry.js';\n\nexport interface ValidationOptions {\n    /**\n     * If this is set, only the checks associated with these categories are executed; otherwise\n     * all checks are executed. The default category if not specified to the registry is `'fast'`.\n     */\n    categories?: ValidationCategory[];\n    /** If true, no further diagnostics are reported if there are lexing errors. */\n    stopAfterLexingErrors?: boolean\n    /** If true, no further diagnostics are reported if there are parsing errors. */\n    stopAfterParsingErrors?: boolean\n    /** If true, no further diagnostics are reported if there are linking errors. */\n    stopAfterLinkingErrors?: boolean\n}\n\n/**\n * Language-specific service for validating `LangiumDocument`s.\n */\nexport interface DocumentValidator {\n    /**\n     * Validates the whole specified document.\n     *\n     * @param document specified document to validate\n     * @param options options to control the validation process\n     * @param cancelToken allows to cancel the current operation\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    validateDocument(document: LangiumDocument, options?: ValidationOptions, cancelToken?: CancellationToken): Promise;\n}\n\nexport class DefaultDocumentValidator implements DocumentValidator {\n\n    protected readonly validationRegistry: ValidationRegistry;\n    protected readonly metadata: LanguageMetaData;\n\n    constructor(services: LangiumCoreServices) {\n        this.validationRegistry = services.validation.ValidationRegistry;\n        this.metadata = services.LanguageMetaData;\n    }\n\n    async validateDocument(document: LangiumDocument, options: ValidationOptions = {}, cancelToken = CancellationToken.None): Promise {\n        const parseResult = document.parseResult;\n        const diagnostics: Diagnostic[] = [];\n\n        await interruptAndCheck(cancelToken);\n\n        if (!options.categories || options.categories.includes('built-in')) {\n            this.processLexingErrors(parseResult, diagnostics, options);\n            if (options.stopAfterLexingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.LexingError)) {\n                return diagnostics;\n            }\n\n            this.processParsingErrors(parseResult, diagnostics, options);\n            if (options.stopAfterParsingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.ParsingError)) {\n                return diagnostics;\n            }\n\n            this.processLinkingErrors(document, diagnostics, options);\n            if (options.stopAfterLinkingErrors && diagnostics.some(d => d.data?.code === DocumentValidator.LinkingError)) {\n                return diagnostics;\n            }\n        }\n\n        // Process custom validations\n        try {\n            diagnostics.push(...await this.validateAst(parseResult.value, options, cancelToken));\n        } catch (err) {\n            if (isOperationCancelled(err)) {\n                throw err;\n            }\n            console.error('An error occurred during validation:', err);\n        }\n\n        await interruptAndCheck(cancelToken);\n\n        return diagnostics;\n    }\n\n    protected processLexingErrors(parseResult: ParseResult, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const lexerError of parseResult.lexerErrors) {\n            const diagnostic: Diagnostic = {\n                severity: toDiagnosticSeverity('error'),\n                range: {\n                    start: {\n                        line: lexerError.line! - 1,\n                        character: lexerError.column! - 1\n                    },\n                    end: {\n                        line: lexerError.line! - 1,\n                        character: lexerError.column! + lexerError.length - 1\n                    }\n                },\n                message: lexerError.message,\n                data: diagnosticData(DocumentValidator.LexingError),\n                source: this.getSource()\n            };\n            diagnostics.push(diagnostic);\n        }\n    }\n\n    protected processParsingErrors(parseResult: ParseResult, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const parserError of parseResult.parserErrors) {\n            let range: Range | undefined = undefined;\n            // We can run into the chevrotain error recovery here\n            // The token contained in the parser error might be automatically inserted\n            // In this case every position value will be `NaN`\n            if (isNaN(parserError.token.startOffset)) {\n                // Some special parser error types contain a `previousToken`\n                // We can simply append our diagnostic to that token\n                if ('previousToken' in parserError) {\n                    const token = (parserError as MismatchedTokenException).previousToken;\n                    if (!isNaN(token.startOffset)) {\n                        const position: Position = { line: token.endLine! - 1, character: token.endColumn! };\n                        range = { start: position, end: position};\n                    } else {\n                        // No valid prev token. Might be empty document or containing only hidden tokens.\n                        // Point to document start\n                        const position: Position = { line: 0, character: 0 };\n                        range = { start: position, end: position};\n                    }\n                }\n            } else {\n                range = tokenToRange(parserError.token);\n            }\n            if (range) {\n                const diagnostic: Diagnostic = {\n                    severity: toDiagnosticSeverity('error'),\n                    range,\n                    message: parserError.message,\n                    data: diagnosticData(DocumentValidator.ParsingError),\n                    source: this.getSource()\n                };\n                diagnostics.push(diagnostic);\n            }\n        }\n    }\n\n    protected processLinkingErrors(document: LangiumDocument, diagnostics: Diagnostic[], _options: ValidationOptions): void {\n        for (const reference of document.references) {\n            const linkingError = reference.error;\n            if (linkingError) {\n                const info: DiagnosticInfo = {\n                    node: linkingError.container,\n                    property: linkingError.property,\n                    index: linkingError.index,\n                    data: {\n                        code: DocumentValidator.LinkingError,\n                        containerType: linkingError.container.$type,\n                        property: linkingError.property,\n                        refText: linkingError.reference.$refText\n                    } satisfies LinkingErrorData\n                };\n                diagnostics.push(this.toDiagnostic('error', linkingError.message, info));\n            }\n        }\n    }\n\n    protected async validateAst(rootNode: AstNode, options: ValidationOptions, cancelToken = CancellationToken.None): Promise {\n        const validationItems: Diagnostic[] = [];\n        const acceptor: ValidationAcceptor = (severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo) => {\n            validationItems.push(this.toDiagnostic(severity, message, info));\n        };\n\n        await Promise.all(streamAst(rootNode).map(async node => {\n            await interruptAndCheck(cancelToken);\n            const checks = this.validationRegistry.getChecks(node.$type, options.categories);\n            for (const check of checks) {\n                await check(node, acceptor, cancelToken);\n            }\n        }));\n        return validationItems;\n    }\n\n    protected toDiagnostic(severity: 'error' | 'warning' | 'info' | 'hint', message: string, info: DiagnosticInfo): Diagnostic {\n        return {\n            message,\n            range: getDiagnosticRange(info),\n            severity: toDiagnosticSeverity(severity),\n            code: info.code,\n            codeDescription: info.codeDescription,\n            tags: info.tags,\n            relatedInformation: info.relatedInformation,\n            data: info.data,\n            source: this.getSource()\n        };\n    }\n\n    protected getSource(): string | undefined {\n        return this.metadata.languageId;\n    }\n}\n\nexport function getDiagnosticRange(info: DiagnosticInfo): Range {\n    if (info.range) {\n        return info.range;\n    }\n    let cstNode: CstNode | undefined;\n    if (typeof info.property === 'string') {\n        cstNode = findNodeForProperty(info.node.$cstNode, info.property, info.index);\n    } else if (typeof info.keyword === 'string') {\n        cstNode = findNodeForKeyword(info.node.$cstNode, info.keyword, info.index);\n    }\n    cstNode ??= info.node.$cstNode;\n    if (!cstNode) {\n        return {\n            start: { line: 0, character: 0 },\n            end: { line: 0, character: 0 }\n        };\n    }\n    return cstNode.range;\n}\n\nexport function toDiagnosticSeverity(severity: 'error' | 'warning' | 'info' | 'hint'): DiagnosticSeverity {\n    switch (severity) {\n        case 'error':\n            return 1; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Error\n        case 'warning':\n            return 2; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Warning\n        case 'info':\n            return 3; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Information\n        case 'hint':\n            return 4; // according to vscode-languageserver-types/lib/esm/main.js#DiagnosticSeverity.Hint\n        default:\n            throw new Error('Invalid diagnostic severity: ' + severity);\n    }\n}\n\nexport namespace DocumentValidator {\n    export const LexingError = 'lexing-error';\n    export const ParsingError = 'parsing-error';\n    export const LinkingError = 'linking-error';\n}\n\nexport interface LinkingErrorData extends DiagnosticData {\n    containerType: string\n    property: string\n    refText: string\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { URI } from '../utils/uri-utils.js';\nimport type { NameProvider } from '../references/name-provider.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, ReferenceInfo } from '../syntax-tree.js';\nimport type { AstNodeLocator } from './ast-node-locator.js';\nimport type { DocumentSegment, LangiumDocument } from './documents.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { isLinkingError } from '../syntax-tree.js';\nimport { getDocument, streamAst, streamReferences } from '../utils/ast-utils.js';\nimport { toDocumentSegment } from '../utils/cst-utils.js';\nimport { interruptAndCheck } from '../utils/promise-utils.js';\nimport { UriUtils } from '../utils/uri-utils.js';\n\n/**\n * Language-specific service for creating descriptions of AST nodes to be used for cross-reference resolutions.\n */\nexport interface AstNodeDescriptionProvider {\n\n    /**\n     * Create a description for the given AST node. This service method is typically used while indexing\n     * the contents of a document and during scope computation.\n     *\n     * @param node An AST node.\n     * @param name The name to be used to refer to the AST node. By default, this is determined by the\n     *     `NameProvider` service, but alternative names may be provided according to the semantics\n     *     of your language.\n     * @param document The document containing the AST node. If omitted, it is taken from the root AST node.\n     */\n    createDescription(node: AstNode, name: string | undefined, document?: LangiumDocument): AstNodeDescription;\n\n}\n\nexport class DefaultAstNodeDescriptionProvider implements AstNodeDescriptionProvider {\n\n    protected readonly astNodeLocator: AstNodeLocator;\n    protected readonly nameProvider: NameProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.astNodeLocator = services.workspace.AstNodeLocator;\n        this.nameProvider = services.references.NameProvider;\n    }\n\n    createDescription(node: AstNode, name: string | undefined, document: LangiumDocument = getDocument(node)): AstNodeDescription {\n        name ??= this.nameProvider.getName(node);\n        const path = this.astNodeLocator.getAstNodePath(node);\n        if (!name) {\n            throw new Error(`Node at path ${path} has no name.`);\n        }\n        let nameNodeSegment: DocumentSegment | undefined;\n        const nameSegmentGetter = () => nameNodeSegment ??= toDocumentSegment(this.nameProvider.getNameNode(node) ?? node.$cstNode);\n        return {\n            node,\n            name,\n            get nameSegment() {\n                return nameSegmentGetter();\n            },\n            selectionSegment: toDocumentSegment(node.$cstNode),\n            type: node.$type,\n            documentUri: document.uri,\n            path\n        };\n    }\n\n}\n\n/**\n * Describes a cross-reference within a document or between two documents.\n */\nexport interface ReferenceDescription {\n    /** URI of the document that holds a reference */\n    sourceUri: URI\n    /** Path to AstNode that holds a reference */\n    sourcePath: string\n    /** Target document uri */\n    targetUri: URI\n    /** Path to the target AstNode inside the document */\n    targetPath: string\n    /** Segment of the reference text. */\n    segment: DocumentSegment\n    /** Marks a local reference i.e. a cross reference inside a document.   */\n    local?: boolean\n}\n\n/**\n * Language-specific service to create descriptions of all cross-references in a document. These are used by the `IndexManager`\n * to determine which documents are affected and should be rebuilt when a document is changed.\n */\nexport interface ReferenceDescriptionProvider {\n    /**\n     * Create descriptions of all cross-references found in the given document. These descriptions are\n     * gathered by the `IndexManager` and stored in the global index so they can be considered when\n     * a document change is reported by the client.\n     *\n     * @param document The document in which to gather cross-references.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    createDescriptions(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n}\n\nexport class DefaultReferenceDescriptionProvider implements ReferenceDescriptionProvider {\n\n    protected readonly nodeLocator: AstNodeLocator;\n\n    constructor(services: LangiumCoreServices) {\n        this.nodeLocator = services.workspace.AstNodeLocator;\n    }\n\n    async createDescriptions(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const descr: ReferenceDescription[] = [];\n        const rootNode = document.parseResult.value;\n        for (const astNode of streamAst(rootNode)) {\n            await interruptAndCheck(cancelToken);\n            streamReferences(astNode).filter(refInfo => !isLinkingError(refInfo)).forEach(refInfo => {\n                // TODO: Consider logging a warning or throw an exception when DocumentState is < than Linked\n                const description = this.createDescription(refInfo);\n                if (description) {\n                    descr.push(description);\n                }\n            });\n        }\n        return descr;\n    }\n\n    protected createDescription(refInfo: ReferenceInfo): ReferenceDescription | undefined {\n        const targetNodeDescr = refInfo.reference.$nodeDescription;\n        const refCstNode = refInfo.reference.$refNode;\n        if (!targetNodeDescr || !refCstNode) {\n            return undefined;\n        }\n        const docUri = getDocument(refInfo.container).uri;\n        return {\n            sourceUri: docUri,\n            sourcePath: this.nodeLocator.getAstNodePath(refInfo.container),\n            targetUri: targetNodeDescr.documentUri,\n            targetPath: targetNodeDescr.path,\n            segment: toDocumentSegment(refCstNode),\n            local: UriUtils.equals(targetNodeDescr.documentUri, docUri)\n        };\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { AstNode } from '../syntax-tree.js';\n\n/**\n * Language-specific service for locating an `AstNode` in a document.\n */\nexport interface AstNodeLocator {\n\n    /**\n     * Creates a path represented by a `string` that identifies an `AstNode` inside its document.\n     * It must be possible to retrieve exactly the same `AstNode` from the document using this path.\n     *\n     * @param node The `AstNode` for which to create the path.\n     * @returns a path represented by a `string` that identifies `node` inside its document.\n     * @see AstNodeLocator.getAstNode\n     */\n    getAstNodePath(node: AstNode): string;\n\n    /**\n     * Locates an `AstNode` inside another node by following the given path.\n     *\n     * @param node Parent element.\n     * @param path Describes how to locate the `AstNode` inside the given `node`.\n     * @returns The `AstNode` located under the given path, or `undefined` if the path cannot be resolved.\n     * @see AstNodeLocator.getAstNodePath\n     */\n    getAstNode(node: AstNode, path: string): T | undefined;\n\n}\n\nexport class DefaultAstNodeLocator implements AstNodeLocator {\n    protected segmentSeparator = '/';\n    protected indexSeparator = '@';\n\n    getAstNodePath(node: AstNode): string {\n        if (node.$container) {\n            const containerPath = this.getAstNodePath(node.$container);\n            const newSegment = this.getPathSegment(node);\n            const nodePath = containerPath + this.segmentSeparator + newSegment;\n            return nodePath;\n        }\n        return '';\n    }\n\n    protected getPathSegment({ $containerProperty, $containerIndex }: AstNode): string {\n        if (!$containerProperty) {\n            throw new Error(\"Missing '$containerProperty' in AST node.\");\n        }\n        if ($containerIndex !== undefined) {\n            return $containerProperty + this.indexSeparator + $containerIndex;\n        }\n        return $containerProperty;\n    }\n\n    getAstNode(node: AstNode, path: string): T | undefined {\n        const segments = path.split(this.segmentSeparator);\n        return segments.reduce((previousValue, currentValue) => {\n            if (!previousValue || currentValue.length === 0) {\n                return previousValue;\n            }\n            const propertyIndex = currentValue.indexOf(this.indexSeparator);\n            if (propertyIndex > 0) {\n                const property = currentValue.substring(0, propertyIndex);\n                const arrayIndex = parseInt(currentValue.substring(propertyIndex + 1));\n                const array = (previousValue as unknown as Record)[property];\n                return array?.[arrayIndex];\n            }\n            return (previousValue as unknown as Record)[currentValue];\n        }, node) as T;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ConfigurationItem, DidChangeConfigurationParams, DidChangeConfigurationRegistrationOptions, InitializeParams, InitializedParams } from 'vscode-languageserver-protocol';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport { Deferred } from '../utils/promise-utils.js';\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\nexport interface ConfigurationProvider {\n\n    /**\n     * A promise that resolves when the configuration provider is ready to be used.\n     */\n    readonly ready: Promise;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialize` request.\n     */\n    initialize(params: InitializeParams): void;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialized` notification.\n     */\n    initialized(params: ConfigurationInitializedParams): Promise;\n\n    /**\n     * Returns a configuration value stored for the given language.\n     *\n     * @param language The language id\n     * @param configuration Configuration name\n     */\n    getConfiguration(language: string, configuration: string): Promise;\n\n    /**\n     *  Updates the cached configurations using the `change` notification parameters.\n     *\n     * @param change The parameters of a change configuration notification.\n     * `settings` property of the change object could be expressed as `Record>`\n     */\n    updateConfiguration(change: DidChangeConfigurationParams): void;\n}\n\nexport interface ConfigurationInitializedParams extends InitializedParams {\n    register?: (params: DidChangeConfigurationRegistrationOptions) => void,\n    fetchConfiguration?: (configuration: ConfigurationItem[]) => Promise\n}\n\n/**\n * Base configuration provider for building up other configuration providers\n */\nexport class DefaultConfigurationProvider implements ConfigurationProvider {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly _ready = new Deferred();\n    protected settings: Record> = {};\n    protected workspaceConfig = false;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n    }\n\n    get ready(): Promise {\n        return this._ready.promise;\n    }\n\n    initialize(params: InitializeParams): void {\n        this.workspaceConfig = params.capabilities.workspace?.configuration ?? false;\n    }\n\n    async initialized(params: ConfigurationInitializedParams): Promise {\n        if (this.workspaceConfig) {\n            if (params.register) {\n                // params.register(...) is a function to be provided by the calling language server for the sake of\n                //  decoupling this implementation from the concrete LSP implementations, specifically the LSP Connection\n\n                const languages = this.serviceRegistry.all;\n                params.register({\n                    // Listen to configuration changes for all languages\n                    section: languages.map(lang => this.toSectionName(lang.LanguageMetaData.languageId))\n                });\n            }\n\n            if (params.fetchConfiguration) {\n                // params.fetchConfiguration(...) is a function to be provided by the calling language server for the sake of\n                //  decoupling this implementation from the concrete LSP implementations, specifically the LSP Connection\n                const configToUpdate = this.serviceRegistry.all.map(lang => {\n                    // Fetch the configuration changes for all languages\n                    section: this.toSectionName(lang.LanguageMetaData.languageId)\n                });\n\n                // get workspace configurations (default scope URI)\n                const configs = await params.fetchConfiguration(configToUpdate);\n                configToUpdate.forEach((conf, idx) => {\n                    this.updateSectionConfiguration(conf.section!, configs[idx]);\n                });\n            }\n        }\n        this._ready.resolve();\n    }\n\n    /**\n     *  Updates the cached configurations using the `change` notification parameters.\n     *\n     * @param change The parameters of a change configuration notification.\n     * `settings` property of the change object could be expressed as `Record>`\n     */\n    updateConfiguration(change: DidChangeConfigurationParams): void {\n        if (!change.settings) {\n            return;\n        }\n        Object.keys(change.settings).forEach(section => {\n            this.updateSectionConfiguration(section, change.settings[section]);\n        });\n    }\n\n    protected updateSectionConfiguration(section: string, configuration: any): void {\n        this.settings[section] = configuration;\n    }\n\n    /**\n    * Returns a configuration value stored for the given language.\n    *\n    * @param language The language id\n    * @param configuration Configuration name\n    */\n    async getConfiguration(language: string, configuration: string): Promise {\n        await this.ready;\n\n        const sectionName = this.toSectionName(language);\n        if (this.settings[sectionName]) {\n            return this.settings[sectionName][configuration];\n        }\n    }\n\n    protected toSectionName(languageId: string): string {\n        return `${languageId}`;\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport interface Disposable {\n    /**\n     * Dispose this object.\n     */\n    dispose(): void;\n}\n\nexport interface AsyncDisposable {\n    /**\n     * Dispose this object.\n     */\n    dispose(): Promise;\n}\n\nexport namespace Disposable {\n    export function create(callback: () => Promise): AsyncDisposable;\n    export function create(callback: () => void): Disposable;\n    export function create(callback: () => void | Promise): Disposable | AsyncDisposable {\n        return {\n            dispose: async () => await callback()\n        };\n    }\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { Disposable } from '../utils/disposable.js';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { MaybePromise } from '../utils/promise-utils.js';\nimport type { Deferred } from '../utils/promise-utils.js';\nimport type { ValidationOptions } from '../validation/document-validator.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { LangiumDocument, LangiumDocuments, LangiumDocumentFactory } from './documents.js';\nimport { MultiMap } from '../utils/collections.js';\nimport { OperationCancelled, interruptAndCheck } from '../utils/promise-utils.js';\nimport { stream } from '../utils/stream.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { ValidationCategory } from '../validation/validation-registry.js';\nimport { DocumentState } from './documents.js';\n\nexport interface BuildOptions {\n    /**\n     * Control the validation phase with this option:\n     *  - `true` enables all validation checks and forces revalidating the documents\n     *  - `false` or `undefined` disables all validation checks\n     *  - An object runs only the necessary validation checks; the `categories` property restricts this to a specific subset\n     */\n    validation?: boolean | ValidationOptions\n}\n\nexport interface DocumentBuildState {\n    /** Whether a document has completed its last build process. */\n    completed: boolean\n    /** The options used for the last build process. */\n    options: BuildOptions\n    /** Additional information about the last build result. */\n    result?: {\n        validationChecks?: ValidationCategory[]\n    }\n}\n\n/**\n * Shared-service for building and updating `LangiumDocument`s.\n */\nexport interface DocumentBuilder {\n\n    /** The options used for rebuilding documents after an update. */\n    updateBuildOptions: BuildOptions;\n\n    /**\n     * Execute all necessary build steps for the given documents.\n     *\n     * @param documents Set of documents to be built.\n     * @param options Options for the document builder.\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    build(documents: Array>, options?: BuildOptions, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * This method is called when a document change is detected. It updates the state of all\n     * affected documents, including those with references to the changed ones, so they are rebuilt.\n     *\n     * @param changed URIs of changed or created documents\n     * @param deleted URIs of deleted documents\n     * @param cancelToken allows to cancel the current operation\n     * @throws `OperationCancelled` if cancellation is detected during execution\n     */\n    update(changed: URI[], deleted: URI[], cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Notify the given callback when a document update was triggered, but before any document\n     * is rebuilt. Listeners to this event should not perform any long-running task.\n     */\n    onUpdate(callback: DocumentUpdateListener): Disposable;\n\n    /**\n     * Notify the given callback when a set of documents has been built reaching a desired target state.\n     */\n    onBuildPhase(targetState: DocumentState, callback: DocumentBuildListener): Disposable;\n\n    /**\n     * Wait until the workspace has reached the specified state for all documents.\n     *\n     * @param state The desired state. The promise won't resolve until all documents have reached this state\n     * @param cancelToken Optionally allows to cancel the wait operation, disposing any listeners in the process\n     * @throws `OperationCancelled` if cancellation has been requested before the state has been reached\n     */\n    waitUntil(state: DocumentState, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Wait until the document specified by the {@link uri} has reached the specified state.\n     *\n     * @param state The desired state. The promise won't resolve until the document has reached this state.\n     * @param uri The specified URI that points to the document. If the URI does not exist, the promise will resolve once the workspace has reached the specified state.\n     * @param cancelToken Optionally allows to cancel the wait operation, disposing any listeners in the process.\n     * @return The URI of the document that has reached the desired state, or `undefined` if the document does not exist.\n     * @throws `OperationCancelled` if cancellation has been requested before the state has been reached\n     */\n    waitUntil(state: DocumentState, uri?: URI, cancelToken?: CancellationToken): Promise;\n}\n\nexport type DocumentUpdateListener = (changed: URI[], deleted: URI[]) => void | Promise\nexport type DocumentBuildListener = (built: LangiumDocument[], cancelToken: CancellationToken) => void | Promise\nexport class DefaultDocumentBuilder implements DocumentBuilder {\n\n    updateBuildOptions: BuildOptions = {\n        // Default: run only the built-in validation checks and those in the _fast_ category (includes those without category)\n        validation: {\n            categories: ['built-in', 'fast']\n        }\n    };\n\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly langiumDocumentFactory: LangiumDocumentFactory;\n    protected readonly indexManager: IndexManager;\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly updateListeners: DocumentUpdateListener[] = [];\n    protected readonly buildPhaseListeners = new MultiMap();\n    protected readonly buildState = new Map();\n    protected readonly documentBuildWaiters = new Map>();\n    protected currentState = DocumentState.Changed;\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.langiumDocuments = services.workspace.LangiumDocuments;\n        this.langiumDocumentFactory = services.workspace.LangiumDocumentFactory;\n        this.indexManager = services.workspace.IndexManager;\n        this.serviceRegistry = services.ServiceRegistry;\n    }\n\n    async build(documents: Array>, options: BuildOptions = {}, cancelToken = CancellationToken.None): Promise {\n        for (const document of documents) {\n            const key = document.uri.toString();\n            if (document.state === DocumentState.Validated) {\n                if (typeof options.validation === 'boolean' && options.validation) {\n                    // Force re-running all validation checks\n                    document.state = DocumentState.IndexedReferences;\n                    document.diagnostics = undefined;\n                    this.buildState.delete(key);\n                } else if (typeof options.validation === 'object') {\n                    const buildState = this.buildState.get(key);\n                    const previousCategories = buildState?.result?.validationChecks;\n                    if (previousCategories) {\n                        // Validation with explicit options was requested for a document that has already been partly validated.\n                        // In this case, we need to merge the previous validation categories with the new ones.\n                        const newCategories = options.validation.categories ?? ValidationCategory.all as ValidationCategory[];\n                        const categories = newCategories.filter(c => !previousCategories.includes(c));\n                        if (categories.length > 0) {\n                            this.buildState.set(key, {\n                                completed: false,\n                                options: {\n                                    validation: {\n                                        ...options.validation,\n                                        categories\n                                    }\n                                },\n                                result: buildState.result\n                            });\n                            document.state = DocumentState.IndexedReferences;\n                        }\n                    }\n                }\n            } else {\n                // Default: forget any previous build options\n                this.buildState.delete(key);\n            }\n        }\n        this.currentState = DocumentState.Changed;\n        await this.emitUpdate(documents.map(e => e.uri), []);\n        await this.buildDocuments(documents, options, cancelToken);\n    }\n\n    async update(changed: URI[], deleted: URI[], cancelToken = CancellationToken.None): Promise {\n        this.currentState = DocumentState.Changed;\n        // Remove all metadata of documents that are reported as deleted\n        for (const deletedUri of deleted) {\n            this.langiumDocuments.deleteDocument(deletedUri);\n            this.buildState.delete(deletedUri.toString());\n            this.indexManager.remove(deletedUri);\n        }\n        // Set the state of all changed documents to `Changed` so they are completely rebuilt\n        for (const changedUri of changed) {\n            const invalidated = this.langiumDocuments.invalidateDocument(changedUri);\n            if (!invalidated) {\n                // We create an unparsed, invalid document.\n                // This will be parsed as soon as we reach the first document builder phase.\n                // This allows to cancel the parsing process later in case we need it.\n                const newDocument = this.langiumDocumentFactory.fromModel({ $type: 'INVALID' }, changedUri);\n                newDocument.state = DocumentState.Changed;\n                this.langiumDocuments.addDocument(newDocument);\n            }\n            this.buildState.delete(changedUri.toString());\n        }\n        // Set the state of all documents that should be relinked to `ComputedScopes` (if not already lower)\n        const allChangedUris = stream(changed).concat(deleted).map(uri => uri.toString()).toSet();\n        this.langiumDocuments.all\n            .filter(doc => !allChangedUris.has(doc.uri.toString()) && this.shouldRelink(doc, allChangedUris))\n            .forEach(doc => {\n                const linker = this.serviceRegistry.getServices(doc.uri).references.Linker;\n                linker.unlink(doc);\n                doc.state = Math.min(doc.state, DocumentState.ComputedScopes);\n                doc.diagnostics = undefined;\n            });\n        // Notify listeners of the update\n        await this.emitUpdate(changed, deleted);\n        // Only allow interrupting the execution after all state changes are done\n        await interruptAndCheck(cancelToken);\n\n        // Collect all documents that we should rebuild\n        const rebuildDocuments = this.langiumDocuments.all\n            .filter(doc =>\n                // This includes those that were reported as changed and those that we selected for relinking\n                doc.state < DocumentState.Linked\n                // This includes those for which a previous build has been cancelled\n                || !this.buildState.get(doc.uri.toString())?.completed\n            )\n            .toArray();\n        await this.buildDocuments(rebuildDocuments, this.updateBuildOptions, cancelToken);\n    }\n\n    protected async emitUpdate(changed: URI[], deleted: URI[]): Promise {\n        await Promise.all(this.updateListeners.map(listener => listener(changed, deleted)));\n    }\n\n    /**\n     * Check whether the given document should be relinked after changes were found in the given URIs.\n     */\n    protected shouldRelink(document: LangiumDocument, changedUris: Set): boolean {\n        // Relink documents with linking errors -- maybe those references can be resolved now\n        if (document.references.some(ref => ref.error !== undefined)) {\n            return true;\n        }\n        // Check whether the document is affected by any of the changed URIs\n        return this.indexManager.isAffected(document, changedUris);\n    }\n\n    onUpdate(callback: DocumentUpdateListener): Disposable {\n        this.updateListeners.push(callback);\n        return Disposable.create(() => {\n            const index = this.updateListeners.indexOf(callback);\n            if (index >= 0) {\n                this.updateListeners.splice(index, 1);\n            }\n        });\n    }\n\n    /**\n     * Build the given documents by stepping through all build phases. If a document's state indicates\n     * that a certain build phase is already done, the phase is skipped for that document.\n     */\n    protected async buildDocuments(documents: LangiumDocument[], options: BuildOptions, cancelToken: CancellationToken): Promise {\n        this.prepareBuild(documents, options);\n        // 0. Parse content\n        await this.runCancelable(documents, DocumentState.Parsed, cancelToken, doc =>\n            this.langiumDocumentFactory.update(doc, cancelToken)\n        );\n        // 1. Index content\n        await this.runCancelable(documents, DocumentState.IndexedContent, cancelToken, doc =>\n            this.indexManager.updateContent(doc, cancelToken)\n        );\n        // 2. Compute scopes\n        await this.runCancelable(documents, DocumentState.ComputedScopes, cancelToken, async doc => {\n            const scopeComputation = this.serviceRegistry.getServices(doc.uri).references.ScopeComputation;\n            doc.precomputedScopes = await scopeComputation.computeLocalScopes(doc, cancelToken);\n        });\n        // 3. Linking\n        await this.runCancelable(documents, DocumentState.Linked, cancelToken, doc => {\n            const linker = this.serviceRegistry.getServices(doc.uri).references.Linker;\n            return linker.link(doc, cancelToken);\n        });\n        // 4. Index references\n        await this.runCancelable(documents, DocumentState.IndexedReferences, cancelToken, doc =>\n            this.indexManager.updateReferences(doc, cancelToken)\n        );\n        // 5. Validation\n        const toBeValidated = documents.filter(doc => this.shouldValidate(doc));\n        await this.runCancelable(toBeValidated, DocumentState.Validated, cancelToken, doc =>\n            this.validate(doc, cancelToken)\n        );\n\n        // If we've made it to this point without being cancelled, we can mark the build state as completed.\n        for (const doc of documents) {\n            const state = this.buildState.get(doc.uri.toString());\n            if (state) {\n                state.completed = true;\n            }\n        }\n    }\n\n    protected prepareBuild(documents: LangiumDocument[], options: BuildOptions): void {\n        for (const doc of documents) {\n            const key = doc.uri.toString();\n            const state = this.buildState.get(key);\n            // If the document has no previous build state, we set it. If it has one, but it's already marked\n            // as completed, we overwrite it. If the previous build was not completed, we keep its state\n            // and continue where it was cancelled.\n            if (!state || state.completed) {\n                this.buildState.set(key, {\n                    completed: false,\n                    options,\n                    result: state?.result\n                });\n            }\n        }\n    }\n\n    protected async runCancelable(documents: LangiumDocument[], targetState: DocumentState, cancelToken: CancellationToken,\n        callback: (document: LangiumDocument) => MaybePromise): Promise {\n        const filtered = documents.filter(e => e.state < targetState);\n        for (const document of filtered) {\n            await interruptAndCheck(cancelToken);\n            await callback(document);\n            document.state = targetState;\n        }\n        await this.notifyBuildPhase(filtered, targetState, cancelToken);\n        this.currentState = targetState;\n    }\n\n    onBuildPhase(targetState: DocumentState, callback: DocumentBuildListener): Disposable {\n        this.buildPhaseListeners.add(targetState, callback);\n        return Disposable.create(() => {\n            this.buildPhaseListeners.delete(targetState, callback);\n        });\n    }\n\n    waitUntil(state: DocumentState, cancelToken?: CancellationToken): Promise;\n    waitUntil(state: DocumentState, uri?: URI, cancelToken?: CancellationToken): Promise;\n    waitUntil(state: DocumentState, uriOrToken?: URI | CancellationToken, cancelToken?: CancellationToken): Promise {\n        let uri: URI | undefined = undefined;\n        if (uriOrToken && 'path' in uriOrToken) {\n            uri = uriOrToken;\n        } else {\n            cancelToken = uriOrToken;\n        }\n        cancelToken ??= CancellationToken.None;\n        if (uri) {\n            const document = this.langiumDocuments.getDocument(uri);\n            if (document && document.state > state) {\n                return Promise.resolve(uri);\n            }\n        }\n        if (this.currentState >= state) {\n            return Promise.resolve(undefined);\n        } else if (cancelToken.isCancellationRequested) {\n            return Promise.reject(OperationCancelled);\n        }\n        return new Promise((resolve, reject) => {\n            const buildDisposable = this.onBuildPhase(state, () => {\n                buildDisposable.dispose();\n                cancelDisposable.dispose();\n                if (uri) {\n                    const document = this.langiumDocuments.getDocument(uri);\n                    resolve(document?.uri);\n                } else {\n                    resolve(undefined);\n                }\n            });\n            const cancelDisposable = cancelToken!.onCancellationRequested(() => {\n                buildDisposable.dispose();\n                cancelDisposable.dispose();\n                reject(OperationCancelled);\n            });\n        });\n    }\n\n    protected async notifyBuildPhase(documents: LangiumDocument[], state: DocumentState, cancelToken: CancellationToken): Promise {\n        if (documents.length === 0) {\n            // Don't notify when no document has been processed\n            return;\n        }\n        const listeners = this.buildPhaseListeners.get(state);\n        for (const listener of listeners) {\n            await interruptAndCheck(cancelToken);\n            await listener(documents, cancelToken);\n        }\n    }\n\n    /**\n     * Determine whether the given document should be validated during a build. The default\n     * implementation checks the `validation` property of the build options. If it's set to `true`\n     * or a `ValidationOptions` object, the document is included in the validation phase.\n     */\n    protected shouldValidate(document: LangiumDocument): boolean {\n        return Boolean(this.getBuildOptions(document).validation);\n    }\n\n    /**\n     * Run validation checks on the given document and store the resulting diagnostics in the document.\n     * If the document already contains diagnostics, the new ones are added to the list.\n     */\n    protected async validate(document: LangiumDocument, cancelToken: CancellationToken): Promise {\n        const validator = this.serviceRegistry.getServices(document.uri).validation.DocumentValidator;\n        const validationSetting = this.getBuildOptions(document).validation;\n        const options = typeof validationSetting === 'object' ? validationSetting : undefined;\n        const diagnostics = await validator.validateDocument(document, options, cancelToken);\n        if (document.diagnostics) {\n            document.diagnostics.push(...diagnostics);\n        } else {\n            document.diagnostics = diagnostics;\n        }\n\n        // Store information about the executed validation in the build state\n        const state = this.buildState.get(document.uri.toString());\n        if (state) {\n            state.result ??= {};\n            const newCategories = options?.categories ?? ValidationCategory.all;\n            if (state.result.validationChecks) {\n                state.result.validationChecks.push(...newCategories);\n            } else {\n                state.result.validationChecks = [...newCategories];\n            }\n        }\n    }\n\n    protected getBuildOptions(document: LangiumDocument): BuildOptions {\n        return this.buildState.get(document.uri.toString())?.options ?? {};\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription, AstReflection } from '../syntax-tree.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { ContextCache } from '../utils/caching.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport type { Stream } from '../utils/stream.js';\nimport { stream } from '../utils/stream.js';\nimport type { URI } from '../utils/uri-utils.js';\nimport { UriUtils } from '../utils/uri-utils.js';\nimport type { ReferenceDescription } from './ast-descriptions.js';\nimport type { LangiumDocument, LangiumDocuments } from './documents.js';\n\n/**\n * The index manager is responsible for keeping metadata about symbols and cross-references\n * in the workspace. It is used to look up symbols in the global scope, mostly during linking\n * and completion. This service is shared between all languages of a language server.\n */\nexport interface IndexManager {\n\n    /**\n     * Removes the specified document URI from the index.\n     * Necessary when documents are deleted and not referenceable anymore.\n     *\n     * @param uri The URI of the document for which index data shall be removed\n     */\n    remove(uri: URI): void;\n\n    /**\n     * Updates the information about the exportable content of a document inside the index.\n     *\n     * @param document Document to be updated\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    updateContent(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Updates the information about the cross-references of a document inside the index.\n     *\n     * @param document Document to be updated\n     * @param cancelToken Indicates when to cancel the current operation.\n     * @throws `OperationCanceled` if a user action occurs during execution\n     */\n    updateReferences(document: LangiumDocument, cancelToken?: CancellationToken): Promise;\n\n    /**\n     * Determine whether the given document could be affected by changes of the documents\n     * identified by the given URIs (second parameter). The document is typically regarded as\n     * affected if it contains a reference to any of the changed files.\n     *\n     * @param document Document to check whether it's affected\n     * @param changedUris URIs of the changed documents\n     */\n    isAffected(document: LangiumDocument, changedUris: Set): boolean;\n\n    /**\n     * Compute a list of all exported elements, optionally filtered using a type identifier and document URIs.\n     *\n     * @param nodeType The type to filter with, or `undefined` to return descriptions of all types.\n     * @param uris If specified, only returns elements from the given URIs.\n     * @returns a `Stream` containing all globally visible nodes (of a given type).\n     */\n    allElements(nodeType?: string, uris?: Set): Stream;\n\n    /**\n     * Returns all known references that are pointing to the given `targetNode`.\n     *\n     * @param targetNode the `AstNode` to look up references for\n     * @param astNodePath the path that points to the `targetNode` inside the document. See also `AstNodeLocator`\n     *\n     * @returns a `Stream` of references that are targeting the `targetNode`\n     */\n    findAllReferences(targetNode: AstNode, astNodePath: string): Stream;\n\n}\n\nexport class DefaultIndexManager implements IndexManager {\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly documents: LangiumDocuments;\n    protected readonly astReflection: AstReflection;\n\n    /**\n     * The symbol index stores all `AstNodeDescription` items exported by a document.\n     * The key used in this map is the string representation of the specific document URI.\n     */\n    protected readonly symbolIndex = new Map();\n    /**\n     * This is a cache for the `allElements()` method.\n     * It caches the descriptions from `symbolIndex` grouped by types.\n     */\n    protected readonly symbolByTypeIndex = new ContextCache();\n    /**\n     * This index keeps track of all `ReferenceDescription` items exported by a document.\n     * This is used to compute which elements are affected by a document change\n     * and for finding references to an AST node.\n     */\n    protected readonly referenceIndex = new Map();\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.documents = services.workspace.LangiumDocuments;\n        this.serviceRegistry = services.ServiceRegistry;\n        this.astReflection = services.AstReflection;\n    }\n\n    findAllReferences(targetNode: AstNode, astNodePath: string): Stream {\n        const targetDocUri = getDocument(targetNode).uri;\n        const result: ReferenceDescription[] = [];\n        this.referenceIndex.forEach(docRefs => {\n            docRefs.forEach(refDescr => {\n                if (UriUtils.equals(refDescr.targetUri, targetDocUri) && refDescr.targetPath === astNodePath) {\n                    result.push(refDescr);\n                }\n            });\n        });\n        return stream(result);\n    }\n\n    allElements(nodeType?: string, uris?: Set): Stream {\n        let documentUris = stream(this.symbolIndex.keys());\n        if (uris) {\n            documentUris = documentUris.filter(uri => !uris || uris.has(uri));\n        }\n        return documentUris\n            .map(uri => this.getFileDescriptions(uri, nodeType))\n            .flat();\n    }\n\n    protected getFileDescriptions(uri: string, nodeType?: string): AstNodeDescription[] {\n        if (!nodeType) {\n            return this.symbolIndex.get(uri) ?? [];\n        }\n        const descriptions = this.symbolByTypeIndex.get(uri, nodeType, () => {\n            const allFileDescriptions = this.symbolIndex.get(uri) ?? [];\n            return allFileDescriptions.filter(e => this.astReflection.isSubtype(e.type, nodeType));\n        });\n        return descriptions;\n    }\n\n    remove(uri: URI): void {\n        const uriString = uri.toString();\n        this.symbolIndex.delete(uriString);\n        this.symbolByTypeIndex.clear(uriString);\n        this.referenceIndex.delete(uriString);\n    }\n\n    async updateContent(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const services = this.serviceRegistry.getServices(document.uri);\n        const exports = await services.references.ScopeComputation.computeExports(document, cancelToken);\n        const uri = document.uri.toString();\n        this.symbolIndex.set(uri, exports);\n        this.symbolByTypeIndex.clear(uri);\n    }\n\n    async updateReferences(document: LangiumDocument, cancelToken = CancellationToken.None): Promise {\n        const services = this.serviceRegistry.getServices(document.uri);\n        const indexData = await services.workspace.ReferenceDescriptionProvider.createDescriptions(document, cancelToken);\n        this.referenceIndex.set(document.uri.toString(), indexData);\n    }\n\n    isAffected(document: LangiumDocument, changedUris: Set): boolean {\n        const references = this.referenceIndex.get(document.uri.toString());\n        if (!references) {\n            return false;\n        }\n        return references.some(ref => !ref.local && changedUris.has(ref.targetUri.toString()));\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { InitializeParams, InitializedParams } from 'vscode-languageserver-protocol';\nimport type { WorkspaceFolder } from 'vscode-languageserver-types';\nimport type { ServiceRegistry } from '../service-registry.js';\nimport type { LangiumSharedCoreServices } from '../services.js';\nimport { CancellationToken } from '../utils/cancellation.js';\nimport { Deferred, interruptAndCheck } from '../utils/promise-utils.js';\nimport { URI, UriUtils } from '../utils/uri-utils.js';\nimport type { BuildOptions, DocumentBuilder } from './document-builder.js';\nimport type { LangiumDocument, LangiumDocuments } from './documents.js';\nimport type { FileSystemNode, FileSystemProvider } from './file-system-provider.js';\nimport type { WorkspaceLock } from './workspace-lock.js';\n\n// export type WorkspaceFolder from 'vscode-languageserver-types' for convenience,\n//  is supposed to avoid confusion as 'WorkspaceFolder' might accidentally be imported via 'vscode-languageclient'\nexport type { WorkspaceFolder };\n\n/**\n * The workspace manager is responsible for finding source files in the workspace.\n * This service is shared between all languages of a language server.\n */\nexport interface WorkspaceManager {\n\n    /** The options used for the initial workspace build. */\n    initialBuildOptions: BuildOptions | undefined;\n\n    /**\n     * A promise that resolves when the workspace manager is ready to be used.\n     * Use this to ensure that the workspace manager has finished its initialization.\n     */\n    readonly ready: Promise;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialize` request.\n     */\n    initialize(params: InitializeParams): void;\n\n    /**\n     * When used in a language server context, this method is called when the server receives\n     * the `initialized` notification.\n     */\n    initialized(params: InitializedParams): Promise;\n\n    /**\n     * Does the initial indexing of workspace folders.\n     * Collects information about exported and referenced AstNodes in\n     * each language file and stores it locally.\n     *\n     * @param folders The set of workspace folders to be indexed.\n     */\n    initializeWorkspace(folders: WorkspaceFolder[], cancelToken?: CancellationToken): Promise;\n\n}\n\nexport class DefaultWorkspaceManager implements WorkspaceManager {\n\n    initialBuildOptions: BuildOptions = {};\n\n    protected readonly serviceRegistry: ServiceRegistry;\n    protected readonly langiumDocuments: LangiumDocuments;\n    protected readonly documentBuilder: DocumentBuilder;\n    protected readonly fileSystemProvider: FileSystemProvider;\n    protected readonly mutex: WorkspaceLock;\n    protected readonly _ready = new Deferred();\n    protected folders?: WorkspaceFolder[];\n\n    constructor(services: LangiumSharedCoreServices) {\n        this.serviceRegistry = services.ServiceRegistry;\n        this.langiumDocuments = services.workspace.LangiumDocuments;\n        this.documentBuilder = services.workspace.DocumentBuilder;\n        this.fileSystemProvider = services.workspace.FileSystemProvider;\n        this.mutex = services.workspace.WorkspaceLock;\n    }\n\n    get ready(): Promise {\n        return this._ready.promise;\n    }\n\n    initialize(params: InitializeParams): void {\n        this.folders = params.workspaceFolders ?? undefined;\n    }\n\n    initialized(_params: InitializedParams): Promise {\n        // Initialize the workspace even if there are no workspace folders\n        // We still want to load additional documents (language library or similar) during initialization\n        return this.mutex.write(token => this.initializeWorkspace(this.folders ?? [], token));\n    }\n\n    async initializeWorkspace(folders: WorkspaceFolder[], cancelToken = CancellationToken.None): Promise {\n        const documents = await this.performStartup(folders);\n        // Only after creating all documents do we check whether we need to cancel the initialization\n        // The document builder will later pick up on all unprocessed documents\n        await interruptAndCheck(cancelToken);\n        await this.documentBuilder.build(documents, this.initialBuildOptions, cancelToken);\n    }\n\n    /**\n     * Performs the uninterruptable startup sequence of the workspace manager.\n     * This methods loads all documents in the workspace and other documents and returns them.\n     */\n    protected async performStartup(folders: WorkspaceFolder[]): Promise {\n        const fileExtensions = this.serviceRegistry.all.flatMap(e => e.LanguageMetaData.fileExtensions);\n        const documents: LangiumDocument[] = [];\n        const collector = (document: LangiumDocument) => {\n            documents.push(document);\n            if (!this.langiumDocuments.hasDocument(document.uri)) {\n                this.langiumDocuments.addDocument(document);\n            }\n        };\n        // Even though we don't await the initialization of the workspace manager,\n        // we can still assume that all library documents and file documents are loaded by the time we start building documents.\n        // The mutex prevents anything from performing a workspace build until we check the cancellation token\n        await this.loadAdditionalDocuments(folders, collector);\n        await Promise.all(\n            folders.map(wf => [wf, this.getRootFolder(wf)] as [WorkspaceFolder, URI])\n                .map(async entry => this.traverseFolder(...entry, fileExtensions, collector))\n        );\n        this._ready.resolve();\n        return documents;\n    }\n\n    /**\n     * Load all additional documents that shall be visible in the context of the given workspace\n     * folders and add them to the collector. This can be used to include built-in libraries of\n     * your language, which can be either loaded from provided files or constructed in memory.\n     */\n    protected loadAdditionalDocuments(_folders: WorkspaceFolder[], _collector: (document: LangiumDocument) => void): Promise {\n        return Promise.resolve();\n    }\n\n    /**\n     * Determine the root folder of the source documents in the given workspace folder.\n     * The default implementation returns the URI of the workspace folder, but you can override\n     * this to return a subfolder like `src` instead.\n     */\n    protected getRootFolder(workspaceFolder: WorkspaceFolder): URI {\n        return URI.parse(workspaceFolder.uri);\n    }\n\n    /**\n     * Traverse the file system folder identified by the given URI and its subfolders. All\n     * contained files that match the file extensions are added to the collector.\n     */\n    protected async traverseFolder(workspaceFolder: WorkspaceFolder, folderPath: URI, fileExtensions: string[], collector: (document: LangiumDocument) => void): Promise {\n        const content = await this.fileSystemProvider.readDirectory(folderPath);\n        await Promise.all(content.map(async entry => {\n            if (this.includeEntry(workspaceFolder, entry, fileExtensions)) {\n                if (entry.isDirectory) {\n                    await this.traverseFolder(workspaceFolder, entry.uri, fileExtensions, collector);\n                } else if (entry.isFile) {\n                    const document = await this.langiumDocuments.getOrCreateDocument(entry.uri);\n                    collector(document);\n                }\n            }\n        }));\n    }\n\n    /**\n     * Determine whether the given folder entry shall be included while indexing the workspace.\n     */\n    protected includeEntry(_workspaceFolder: WorkspaceFolder, entry: FileSystemNode, fileExtensions: string[]): boolean {\n        const name = UriUtils.basename(entry.uri);\n        if (name.startsWith('.')) {\n            return false;\n        }\n        if (entry.isDirectory) {\n            return name !== 'node_modules' && name !== 'out';\n        } else if (entry.isFile) {\n            const extname = UriUtils.extname(entry.uri);\n            return fileExtensions.includes(extname);\n        }\n        return false;\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { ILexingError, IMultiModeLexerDefinition, IToken, TokenType, TokenTypeDictionary, TokenVocabulary } from 'chevrotain';\nimport type { LangiumCoreServices } from '../services.js';\nimport { Lexer as ChevrotainLexer } from 'chevrotain';\n\nexport interface LexerResult {\n    /**\n     * A list of all tokens that were lexed from the input.\n     *\n     * Note that Langium requires the optional properties\n     * `startLine`, `startColumn`, `endOffset`, `endLine` and `endColumn` to be set on each token.\n     */\n    tokens: IToken[];\n    /**\n     * Contains hidden tokens, usually comments.\n     */\n    hidden: IToken[];\n    errors: ILexingError[];\n}\n\nexport interface Lexer {\n    readonly definition: TokenTypeDictionary;\n    tokenize(text: string): LexerResult;\n}\n\nexport class DefaultLexer implements Lexer {\n\n    protected chevrotainLexer: ChevrotainLexer;\n    protected tokenTypes: TokenTypeDictionary;\n\n    constructor(services: LangiumCoreServices) {\n        const tokens = services.parser.TokenBuilder.buildTokens(services.Grammar, {\n            caseInsensitive: services.LanguageMetaData.caseInsensitive\n        });\n        this.tokenTypes = this.toTokenTypeDictionary(tokens);\n        const lexerTokens = isTokenTypeDictionary(tokens) ? Object.values(tokens) : tokens;\n        this.chevrotainLexer = new ChevrotainLexer(lexerTokens, {\n            positionTracking: 'full'\n        });\n    }\n\n    get definition(): TokenTypeDictionary {\n        return this.tokenTypes;\n    }\n\n    tokenize(text: string): LexerResult {\n        const chevrotainResult = this.chevrotainLexer.tokenize(text);\n        return {\n            tokens: chevrotainResult.tokens,\n            errors: chevrotainResult.errors,\n            hidden: chevrotainResult.groups.hidden ?? []\n        };\n    }\n\n    protected toTokenTypeDictionary(buildTokens: TokenVocabulary): TokenTypeDictionary {\n        if (isTokenTypeDictionary(buildTokens)) return buildTokens;\n        const tokens = isIMultiModeLexerDefinition(buildTokens) ? Object.values(buildTokens.modes).flat() : buildTokens;\n        const res: TokenTypeDictionary = {};\n        tokens.forEach(token => res[token.name] = token);\n        return res;\n    }\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is TokenType array\n */\nexport function isTokenTypeArray(tokenVocabulary: TokenVocabulary): tokenVocabulary is TokenType[] {\n    return Array.isArray(tokenVocabulary) && (tokenVocabulary.length === 0 || 'name' in tokenVocabulary[0]);\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is IMultiModeLexerDefinition\n */\nexport function isIMultiModeLexerDefinition(tokenVocabulary: TokenVocabulary): tokenVocabulary is IMultiModeLexerDefinition {\n    return tokenVocabulary && 'modes' in tokenVocabulary && 'defaultMode' in tokenVocabulary;\n}\n\n/**\n * Returns a check whether the given TokenVocabulary is TokenTypeDictionary\n */\nexport function isTokenTypeDictionary(tokenVocabulary: TokenVocabulary): tokenVocabulary is TokenTypeDictionary {\n    return !isTokenTypeArray(tokenVocabulary) && !isIMultiModeLexerDefinition(tokenVocabulary);\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { Position, Range } from 'vscode-languageserver-types';\nimport type { CstNode } from '../syntax-tree.js';\nimport { NEWLINE_REGEXP, escapeRegExp } from '../utils/regexp-utils.js';\nimport { URI } from '../utils/uri-utils.js';\n\nexport interface JSDocComment extends JSDocValue {\n    readonly elements: JSDocElement[]\n    getTag(name: string): JSDocTag | undefined\n    getTags(name: string): JSDocTag[]\n}\n\nexport type JSDocElement = JSDocParagraph | JSDocTag;\n\nexport type JSDocInline = JSDocTag | JSDocLine;\n\nexport interface JSDocValue {\n    /**\n     * Represents the range that this JSDoc element occupies.\n     * If the JSDoc was parsed from a `CstNode`, the range will represent the location in the source document.\n     */\n    readonly range: Range\n    /**\n     * Renders this JSDoc element to a plain text representation.\n     */\n    toString(): string\n    /**\n     * Renders this JSDoc element to a markdown representation.\n     *\n     * @param options Rendering options to customize the markdown result.\n     */\n    toMarkdown(options?: JSDocRenderOptions): string\n}\n\nexport interface JSDocParagraph extends JSDocValue {\n    readonly inlines: JSDocInline[]\n}\n\nexport interface JSDocLine extends JSDocValue {\n    readonly text: string\n}\n\nexport interface JSDocTag extends JSDocValue {\n    readonly name: string\n    readonly content: JSDocParagraph\n    readonly inline: boolean\n}\n\nexport interface JSDocParseOptions {\n    /**\n     * The start symbol of your comment format. Defaults to `/**`.\n     */\n    readonly start?: RegExp | string\n    /**\n     * The symbol that start a line of your comment format. Defaults to `*`.\n     */\n    readonly line?: RegExp | string\n    /**\n     * The end symbol of your comment format. Defaults to `*\\/`.\n     */\n    readonly end?: RegExp | string\n}\n\nexport interface JSDocRenderOptions {\n    /**\n     * Determines the style for rendering tags. Defaults to `italic`.\n     */\n    tag?: 'plain' | 'italic' | 'bold' | 'bold-italic'\n    /**\n     * Determines the default for rendering `@link` tags. Defaults to `plain`.\n     */\n    link?: 'code' | 'plain'\n    /**\n     * Custom tag rendering function.\n     * Return a markdown formatted tag or `undefined` to fall back to the default rendering.\n     */\n    renderTag?(tag: JSDocTag): string | undefined\n    /**\n     * Custom link rendering function. Accepts a link target and a display value for the link.\n     * Return a markdown formatted link with the format `[$display]($link)` or `undefined` if the link is not a valid target.\n     */\n    renderLink?(link: string, display: string): string | undefined\n}\n\n/**\n * Parses a JSDoc from a `CstNode` containing a comment.\n *\n * @param node A `CstNode` from a parsed Langium document.\n * @param options Parsing options specialized to your language. See {@link JSDocParseOptions}.\n */\nexport function parseJSDoc(node: CstNode, options?: JSDocParseOptions): JSDocComment;\n/**\n * Parses a JSDoc from a string comment.\n *\n * @param content A string containing the source of the JSDoc comment.\n * @param start The start position the comment occupies in the source document.\n * @param options Parsing options specialized to your language. See {@link JSDocParseOptions}.\n */\nexport function parseJSDoc(content: string, start?: Position, options?: JSDocParseOptions): JSDocComment;\nexport function parseJSDoc(node: CstNode | string, start?: Position | JSDocParseOptions, options?: JSDocParseOptions): JSDocComment {\n    let opts: JSDocParseOptions | undefined;\n    let position: Position | undefined;\n    if (typeof node === 'string') {\n        position = start as Position | undefined;\n        opts = options as JSDocParseOptions | undefined;\n    } else {\n        position = node.range.start;\n        opts = start as JSDocParseOptions | undefined;\n    }\n    if (!position) {\n        position = Position.create(0, 0);\n    }\n\n    const lines = getLines(node);\n    const normalizedOptions = normalizeOptions(opts);\n\n    const tokens = tokenize({\n        lines,\n        position,\n        options: normalizedOptions\n    });\n\n    return parseJSDocComment({\n        index: 0,\n        tokens,\n        position\n    });\n}\n\nexport function isJSDoc(node: CstNode | string, options?: JSDocParseOptions): boolean {\n    const normalizedOptions = normalizeOptions(options);\n    const lines = getLines(node);\n    if (lines.length === 0) {\n        return false;\n    }\n\n    const first = lines[0];\n    const last = lines[lines.length - 1];\n    const firstRegex = normalizedOptions.start;\n    const lastRegex = normalizedOptions.end;\n\n    return Boolean(firstRegex?.exec(first)) && Boolean(lastRegex?.exec(last));\n}\n\nfunction getLines(node: CstNode | string): string[] {\n    let content = '';\n    if (typeof node === 'string') {\n        content = node;\n    } else {\n        content = node.text;\n    }\n    const lines = content.split(NEWLINE_REGEXP);\n    return lines;\n}\n\n// Tokenization\n\ninterface JSDocToken {\n    type: 'text' | 'tag' | 'inline-tag' | 'break'\n    content: string\n    range: Range\n}\n\nconst tagRegex = /\\s*(@([\\p{L}][\\p{L}\\p{N}]*)?)/uy;\nconst inlineTagRegex = /\\{(@[\\p{L}][\\p{L}\\p{N}]*)(\\s*)([^\\r\\n}]+)?\\}/gu;\n\nfunction tokenize(context: TokenizationContext): JSDocToken[] {\n    const tokens: JSDocToken[] = [];\n    let currentLine = context.position.line;\n    let currentCharacter = context.position.character;\n    for (let i = 0; i < context.lines.length; i++) {\n        const first = i === 0;\n        const last = i === context.lines.length - 1;\n        let line = context.lines[i];\n        let index = 0;\n\n        if (first && context.options.start) {\n            const match = context.options.start?.exec(line);\n            if (match) {\n                index = match.index + match[0].length;\n            }\n        } else {\n            const match = context.options.line?.exec(line);\n            if (match) {\n                index = match.index + match[0].length;\n            }\n        }\n        if (last) {\n            const match = context.options.end?.exec(line);\n            if (match) {\n                line = line.substring(0, match.index);\n            }\n        }\n\n        line = line.substring(0, lastCharacter(line));\n        const whitespaceEnd = skipWhitespace(line, index);\n\n        if (whitespaceEnd >= line.length) {\n            // Only create a break token when we already have previous tokens\n            if (tokens.length > 0) {\n                const position = Position.create(currentLine, currentCharacter);\n                tokens.push({\n                    type: 'break',\n                    content: '',\n                    range: Range.create(position, position)\n                });\n            }\n        } else {\n            tagRegex.lastIndex = index;\n            const tagMatch = tagRegex.exec(line);\n            if (tagMatch) {\n                const fullMatch = tagMatch[0];\n                const value = tagMatch[1];\n                const start = Position.create(currentLine, currentCharacter + index);\n                const end = Position.create(currentLine, currentCharacter + index + fullMatch.length);\n                tokens.push({\n                    type: 'tag',\n                    content: value,\n                    range: Range.create(start, end)\n                });\n                index += fullMatch.length;\n                index = skipWhitespace(line, index);\n            }\n\n            if (index < line.length) {\n                const rest = line.substring(index);\n                const inlineTagMatches = Array.from(rest.matchAll(inlineTagRegex));\n                tokens.push(...buildInlineTokens(inlineTagMatches, rest, currentLine, currentCharacter + index));\n            }\n        }\n\n        currentLine++;\n        currentCharacter = 0;\n    }\n\n    // Remove last break token if there is one\n    if (tokens.length > 0 && tokens[tokens.length - 1].type === 'break') {\n        return tokens.slice(0, -1);\n    }\n\n    return tokens;\n}\n\nfunction buildInlineTokens(tags: RegExpMatchArray[], line: string, lineIndex: number, characterIndex: number): JSDocToken[] {\n    const tokens: JSDocToken[] = [];\n\n    if (tags.length === 0) {\n        const start = Position.create(lineIndex, characterIndex);\n        const end = Position.create(lineIndex, characterIndex + line.length);\n        tokens.push({\n            type: 'text',\n            content: line,\n            range: Range.create(start, end)\n        });\n    } else {\n        let lastIndex = 0;\n        for (const match of tags) {\n            const matchIndex = match.index!;\n            const startContent = line.substring(lastIndex, matchIndex);\n            if (startContent.length > 0) {\n                tokens.push({\n                    type: 'text',\n                    content: line.substring(lastIndex, matchIndex),\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + characterIndex),\n                        Position.create(lineIndex, matchIndex + characterIndex)\n                    )\n                });\n            }\n            let offset = startContent.length + 1;\n            const tagName = match[1];\n            tokens.push({\n                type: 'inline-tag',\n                content: tagName,\n                range: Range.create(\n                    Position.create(lineIndex, lastIndex + offset + characterIndex),\n                    Position.create(lineIndex, lastIndex + offset + tagName.length + characterIndex)\n                )\n            });\n            offset += tagName.length;\n            if (match.length === 4) {\n                offset += match[2].length;\n                const value = match[3];\n                tokens.push({\n                    type: 'text',\n                    content: value,\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + offset + characterIndex),\n                        Position.create(lineIndex, lastIndex + offset + value.length + characterIndex)\n                    )\n                });\n            } else {\n                tokens.push({\n                    type: 'text',\n                    content: '',\n                    range: Range.create(\n                        Position.create(lineIndex, lastIndex + offset + characterIndex),\n                        Position.create(lineIndex, lastIndex + offset + characterIndex)\n                    )\n                });\n            }\n            lastIndex = matchIndex + match[0].length;\n        }\n        const endContent = line.substring(lastIndex);\n        if (endContent.length > 0) {\n            tokens.push({\n                type: 'text',\n                content: endContent,\n                range: Range.create(\n                    Position.create(lineIndex, lastIndex + characterIndex),\n                    Position.create(lineIndex, lastIndex + characterIndex + endContent.length)\n                )\n            });\n        }\n    }\n\n    return tokens;\n}\n\nconst nonWhitespaceRegex = /\\S/;\nconst whitespaceEndRegex = /\\s*$/;\n\nfunction skipWhitespace(line: string, index: number): number {\n    const match = line.substring(index).match(nonWhitespaceRegex);\n    if (match) {\n        return index + match.index!;\n    } else {\n        return line.length;\n    }\n}\n\nfunction lastCharacter(line: string): number | undefined {\n    const match = line.match(whitespaceEndRegex);\n    if (match && typeof match.index === 'number') {\n        return match.index;\n    }\n    return undefined;\n}\n\n// Parsing\n\nfunction parseJSDocComment(context: ParseContext): JSDocComment {\n    const startPosition: Position = Position.create(context.position.line, context.position.character);\n    if (context.tokens.length === 0) {\n        return new JSDocCommentImpl([], Range.create(startPosition, startPosition));\n    }\n    const elements: JSDocElement[] = [];\n    while (context.index < context.tokens.length) {\n        const element = parseJSDocElement(context, elements[elements.length - 1]);\n        if (element) {\n            elements.push(element);\n        }\n    }\n    const start = elements[0]?.range.start ?? startPosition;\n    const end = elements[elements.length - 1]?.range.end ?? startPosition;\n    return new JSDocCommentImpl(elements, Range.create(start, end));\n}\n\nfunction parseJSDocElement(context: ParseContext, last?: JSDocElement): JSDocElement | undefined {\n    const next = context.tokens[context.index];\n    if (next.type === 'tag') {\n        return parseJSDocTag(context, false);\n    } else if (next.type === 'text' || next.type === 'inline-tag') {\n        return parseJSDocText(context);\n    } else {\n        appendEmptyLine(next, last);\n        context.index++;\n        return undefined;\n    }\n}\n\nfunction appendEmptyLine(token: JSDocToken, element?: JSDocElement): void {\n    if (element) {\n        const line = new JSDocLineImpl('', token.range);\n        if ('inlines' in element) {\n            element.inlines.push(line);\n        } else {\n            element.content.inlines.push(line);\n        }\n    }\n}\n\nfunction parseJSDocText(context: ParseContext): JSDocParagraph {\n    let token = context.tokens[context.index];\n    const firstToken = token;\n    let lastToken = token;\n    const lines: JSDocInline[] = [];\n    while (token && token.type !== 'break' && token.type !== 'tag') {\n        lines.push(parseJSDocInline(context));\n        lastToken = token;\n        token = context.tokens[context.index];\n    }\n    return new JSDocTextImpl(lines, Range.create(firstToken.range.start, lastToken.range.end));\n}\n\nfunction parseJSDocInline(context: ParseContext): JSDocInline {\n    const token = context.tokens[context.index];\n    if (token.type === 'inline-tag') {\n        return parseJSDocTag(context, true);\n    } else {\n        return parseJSDocLine(context);\n    }\n}\n\nfunction parseJSDocTag(context: ParseContext, inline: boolean): JSDocTag {\n    const tagToken = context.tokens[context.index++];\n    const name = tagToken.content.substring(1);\n    const nextToken = context.tokens[context.index];\n    if (nextToken?.type === 'text') {\n        if (inline) {\n            const docLine = parseJSDocLine(context);\n            return new JSDocTagImpl(\n                name,\n                new JSDocTextImpl([docLine], docLine.range),\n                inline,\n                Range.create(tagToken.range.start, docLine.range.end)\n            );\n        } else {\n            const textDoc = parseJSDocText(context);\n            return new JSDocTagImpl(\n                name,\n                textDoc,\n                inline,\n                Range.create(tagToken.range.start, textDoc.range.end)\n            );\n        }\n    } else {\n        const range = tagToken.range;\n        return new JSDocTagImpl(name, new JSDocTextImpl([], range), inline, range);\n    }\n}\n\nfunction parseJSDocLine(context: ParseContext): JSDocLine {\n    const token = context.tokens[context.index++];\n    return new JSDocLineImpl(token.content, token.range);\n}\n\ninterface NormalizedOptions {\n    start?: RegExp\n    end?: RegExp\n    line?: RegExp\n}\n\ninterface TokenizationContext {\n    position: Position\n    lines: string[]\n    options: NormalizedOptions\n}\n\ninterface ParseContext {\n    position: Position\n    tokens: JSDocToken[]\n    index: number\n}\n\nfunction normalizeOptions(options?: JSDocParseOptions): NormalizedOptions {\n    if (!options) {\n        return normalizeOptions({\n            start: '/**',\n            end: '*/',\n            line: '*'\n        });\n    }\n    const { start, end, line } = options;\n    return {\n        start: normalizeOption(start, true),\n        end: normalizeOption(end, false),\n        line: normalizeOption(line, true)\n    };\n}\n\nfunction normalizeOption(option: RegExp | string | undefined, start: boolean): RegExp | undefined {\n    if (typeof option === 'string' || typeof option === 'object') {\n        const escaped = typeof option === 'string' ? escapeRegExp(option) : option.source;\n        if (start) {\n            return new RegExp(`^\\\\s*${escaped}`);\n        } else {\n            return new RegExp(`\\\\s*${escaped}\\\\s*$`);\n        }\n    } else {\n        return option;\n    }\n}\n\nclass JSDocCommentImpl implements JSDocComment {\n\n    readonly elements: JSDocElement[];\n    readonly range: Range;\n\n    constructor(elements: JSDocElement[], range: Range) {\n        this.elements = elements;\n        this.range = range;\n    }\n\n    getTag(name: string): JSDocTag | undefined {\n        return this.getAllTags().find(e => e.name === name);\n    }\n\n    getTags(name: string): JSDocTag[] {\n        return this.getAllTags().filter(e => e.name === name);\n    }\n\n    private getAllTags(): JSDocTag[] {\n        return this.elements.filter((e): e is JSDocTag => 'name' in e);\n    }\n\n    toString(): string {\n        let value = '';\n        for (const element of this.elements) {\n            if (value.length === 0) {\n                value = element.toString();\n            } else {\n                const text = element.toString();\n                value += fillNewlines(value) + text;\n            }\n        }\n        return value.trim();\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        let value = '';\n        for (const element of this.elements) {\n            if (value.length === 0) {\n                value = element.toMarkdown(options);\n            } else {\n                const text = element.toMarkdown(options);\n                value += fillNewlines(value) + text;\n            }\n        }\n        return value.trim();\n    }\n}\n\nclass JSDocTagImpl implements JSDocTag {\n    name: string;\n    content: JSDocParagraph;\n    range: Range;\n    inline: boolean;\n\n    constructor(name: string, content: JSDocParagraph, inline: boolean, range: Range) {\n        this.name = name;\n        this.content = content;\n        this.inline = inline;\n        this.range = range;\n    }\n\n    toString(): string {\n        let text = `@${this.name}`;\n        const content = this.content.toString();\n        if (this.content.inlines.length === 1) {\n            text = `${text} ${content}`;\n        } else if (this.content.inlines.length > 1) {\n            text = `${text}\\n${content}`;\n        }\n        if (this.inline) {\n            // Inline tags are surrounded by curly braces\n            return `{${text}}`;\n        } else {\n            return text;\n        }\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        return options?.renderTag?.(this) ?? this.toMarkdownDefault(options);\n    }\n\n    private toMarkdownDefault(options?: JSDocRenderOptions): string {\n        const content = this.content.toMarkdown(options);\n        if (this.inline) {\n            const rendered = renderInlineTag(this.name, content, options ?? {});\n            if (typeof rendered === 'string') {\n                return rendered;\n            }\n        }\n        let marker = '';\n        if (options?.tag === 'italic' || options?.tag === undefined) {\n            marker = '*';\n        } else if (options?.tag === 'bold') {\n            marker = '**';\n        } else if (options?.tag === 'bold-italic') {\n            marker = '***';\n        }\n        let text = `${marker}@${this.name}${marker}`;\n        if (this.content.inlines.length === 1) {\n            text = `${text} \u2014 ${content}`;\n        } else if (this.content.inlines.length > 1) {\n            text = `${text}\\n${content}`;\n        }\n        if (this.inline) {\n            // Inline tags are surrounded by curly braces\n            return `{${text}}`;\n        } else {\n            return text;\n        }\n    }\n}\n\nfunction renderInlineTag(tag: string, content: string, options: JSDocRenderOptions): string | undefined {\n    if (tag === 'linkplain' || tag === 'linkcode' || tag === 'link') {\n        const index = content.indexOf(' ');\n        let display = content;\n        if (index > 0) {\n            const displayStart = skipWhitespace(content, index);\n            display = content.substring(displayStart);\n            content = content.substring(0, index);\n        }\n        if (tag === 'linkcode' || (tag === 'link' && options.link === 'code')) {\n            // Surround the display value in a markdown inline code block\n            display = `\\`${display}\\``;\n        }\n        const renderedLink = options.renderLink?.(content, display) ?? renderLinkDefault(content, display);\n        return renderedLink;\n    }\n    return undefined;\n}\n\nfunction renderLinkDefault(content: string, display: string): string {\n    try {\n        URI.parse(content, true);\n        return `[${display}](${content})`;\n    } catch {\n        return content;\n    }\n}\n\nclass JSDocTextImpl implements JSDocParagraph {\n    inlines: JSDocInline[];\n    range: Range;\n\n    constructor(lines: JSDocInline[], range: Range) {\n        this.inlines = lines;\n        this.range = range;\n    }\n\n    toString(): string {\n        let text = '';\n        for (let i = 0; i < this.inlines.length; i++) {\n            const inline = this.inlines[i];\n            const next = this.inlines[i + 1];\n            text += inline.toString();\n            if (next && next.range.start.line > inline.range.start.line) {\n                text += '\\n';\n            }\n        }\n        return text;\n    }\n\n    toMarkdown(options?: JSDocRenderOptions): string {\n        let text = '';\n        for (let i = 0; i < this.inlines.length; i++) {\n            const inline = this.inlines[i];\n            const next = this.inlines[i + 1];\n            text += inline.toMarkdown(options);\n            if (next && next.range.start.line > inline.range.start.line) {\n                text += '\\n';\n            }\n        }\n        return text;\n    }\n}\n\nclass JSDocLineImpl implements JSDocLine {\n    text: string;\n    range: Range;\n\n    constructor(text: string, range: Range) {\n        this.text = text;\n        this.range = range;\n    }\n\n    toString(): string {\n        return this.text;\n    }\n    toMarkdown(): string {\n        return this.text;\n    }\n\n}\n\nfunction fillNewlines(text: string): string {\n    if (text.endsWith('\\n')) {\n        return '\\n';\n    } else {\n        return '\\n\\n';\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode, AstNodeDescription } from '../syntax-tree.js';\nimport type { IndexManager } from '../workspace/index-manager.js';\nimport type { CommentProvider } from './comment-provider.js';\nimport type { JSDocTag } from './jsdoc.js';\nimport { getDocument } from '../utils/ast-utils.js';\nimport { isJSDoc, parseJSDoc } from './jsdoc.js';\n\n/**\n * Provides documentation for AST nodes.\n */\nexport interface DocumentationProvider {\n    /**\n     * Returns a markdown documentation string for the specified AST node.\n     *\n     * The default implementation `JSDocDocumentationProvider` will inspect the comment associated with the specified node.\n     */\n    getDocumentation(node: AstNode): string | undefined;\n}\n\nexport class JSDocDocumentationProvider implements DocumentationProvider {\n\n    protected readonly indexManager: IndexManager;\n    protected readonly commentProvider: CommentProvider;\n\n    constructor(services: LangiumCoreServices) {\n        this.indexManager = services.shared.workspace.IndexManager;\n        this.commentProvider = services.documentation.CommentProvider;\n    }\n\n    getDocumentation(node: AstNode): string | undefined {\n        const comment = this.commentProvider.getComment(node);\n        if (comment && isJSDoc(comment)) {\n            const parsedJSDoc = parseJSDoc(comment);\n            return parsedJSDoc.toMarkdown({\n                renderLink: (link, display) => {\n                    return this.documentationLinkRenderer(node, link, display);\n                },\n                renderTag: (tag) => {\n                    return this.documentationTagRenderer(node, tag);\n                }\n            });\n        }\n        return undefined;\n    }\n\n    protected documentationLinkRenderer(node: AstNode, name: string, display: string): string | undefined {\n        const description = this.findNameInPrecomputedScopes(node, name) ?? this.findNameInGlobalScope(node, name);\n        if (description && description.nameSegment) {\n            const line = description.nameSegment.range.start.line + 1;\n            const character = description.nameSegment.range.start.character + 1;\n            const uri = description.documentUri.with({ fragment: `L${line},${character}` });\n            return `[${display}](${uri.toString()})`;\n        } else {\n            return undefined;\n        }\n    }\n\n    protected documentationTagRenderer(_node: AstNode, _tag: JSDocTag): string | undefined {\n        // Fall back to the default tag rendering\n        return undefined;\n    }\n\n    protected findNameInPrecomputedScopes(node: AstNode, name: string): AstNodeDescription | undefined {\n        const document = getDocument(node);\n        const precomputed = document.precomputedScopes;\n        if (!precomputed) {\n            return undefined;\n        }\n        let currentNode: AstNode | undefined = node;\n        do {\n            const allDescriptions = precomputed.get(currentNode);\n            const description = allDescriptions.find(e => e.name === name);\n            if (description) {\n                return description;\n            }\n            currentNode = currentNode.$container;\n        } while (currentNode);\n\n        return undefined;\n    }\n\n    protected findNameInGlobalScope(node: AstNode, name: string): AstNodeDescription | undefined {\n        const description = this.indexManager.allElements().find(e => e.name === name);\n        return description;\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { GrammarConfig } from '../languages/grammar-config.js';\nimport { isAstNodeWithComment } from '../serializer/json-serializer.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport { findCommentNode } from '../utils/cst-utils.js';\n\n/**\n * Provides comments for AST nodes.\n */\nexport interface CommentProvider {\n    /**\n     * Returns the comment associated with the specified AST node.\n     * @param node The AST node to get the comment for.\n     * @returns The comment associated with the specified AST node or `undefined` if there is no comment.\n     */\n    getComment(node: AstNode): string | undefined;\n}\n\nexport class DefaultCommentProvider implements CommentProvider {\n    protected readonly grammarConfig: () => GrammarConfig;\n    constructor(services: LangiumCoreServices) {\n        this.grammarConfig = () => services.parser.GrammarConfig;\n    }\n    getComment(node: AstNode): string | undefined {\n        if(isAstNodeWithComment(node)) {\n            return node.$comment;\n        }\n        return findCommentNode(node.$cstNode, this.grammarConfig().multilineCommentRules)?.text;\n    }\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n// eslint-disable-next-line no-restricted-imports\nexport * from 'vscode-jsonrpc/lib/common/events.js';\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { CancellationToken } from '../utils/cancellation.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { AstNode } from '../syntax-tree.js';\nimport type { LangiumParser, ParseResult } from './langium-parser.js';\nimport type { Hydrator } from '../serializer/hydrator.js';\nimport type { Event } from '../utils/event.js';\nimport { Deferred, OperationCancelled } from '../utils/promise-utils.js';\nimport { Emitter } from '../utils/event.js';\n\n/**\n * Async parser that allows to cancel the current parsing process.\n * The sync parser implementation is blocking the event loop, which can become quite problematic for large files.\n *\n * Note that the default implementation is not actually async. It just wraps the sync parser in a promise.\n * A real implementation would create worker threads or web workers to offload the parsing work.\n */\nexport interface AsyncParser {\n    parse(text: string, cancelToken: CancellationToken): Promise>;\n}\n\n/**\n * Default implementation of the async parser. This implementation only wraps the sync parser in a promise.\n *\n * A real implementation would create worker threads or web workers to offload the parsing work.\n */\nexport class DefaultAsyncParser implements AsyncParser {\n\n    protected readonly syncParser: LangiumParser;\n\n    constructor(services: LangiumCoreServices) {\n        this.syncParser = services.parser.LangiumParser;\n    }\n\n    parse(text: string): Promise> {\n        return Promise.resolve(this.syncParser.parse(text));\n    }\n}\n\nexport abstract class AbstractThreadedAsyncParser implements AsyncParser {\n\n    /**\n     * The thread count determines how many threads are used to parse files in parallel.\n     * The default value is 8. Decreasing this value increases startup performance, but decreases parallel parsing performance.\n     */\n    protected threadCount = 8;\n    /**\n     * The termination delay determines how long the parser waits for a thread to finish after a cancellation request.\n     * The default value is 200(ms).\n     */\n    protected terminationDelay = 200;\n    protected workerPool: ParserWorker[] = [];\n    protected queue: Array> = [];\n\n    protected readonly hydrator: Hydrator;\n\n    constructor(services: LangiumCoreServices) {\n        this.hydrator = services.serializer.Hydrator;\n    }\n\n    protected initializeWorkers(): void {\n        while (this.workerPool.length < this.threadCount) {\n            const worker = this.createWorker();\n            worker.onReady(() => {\n                if (this.queue.length > 0) {\n                    const deferred = this.queue.shift();\n                    if (deferred) {\n                        worker.lock();\n                        deferred.resolve(worker);\n                    }\n                }\n            });\n            this.workerPool.push(worker);\n        }\n    }\n\n    async parse(text: string, cancelToken: CancellationToken): Promise> {\n        const worker = await this.acquireParserWorker(cancelToken);\n        const deferred = new Deferred>();\n        let timeout: NodeJS.Timeout | undefined;\n        // If the cancellation token is requested, we wait for a certain time before terminating the worker.\n        // Since the cancellation token lives longer than the parsing process, we need to dispose the event listener.\n        // Otherwise, we might accidentally terminate the worker after the parsing process has finished.\n        const cancellation = cancelToken.onCancellationRequested(() => {\n            timeout = setTimeout(() => {\n                this.terminateWorker(worker);\n            }, this.terminationDelay);\n        });\n        worker.parse(text).then(result => {\n            const hydrated = this.hydrator.hydrate(result);\n            deferred.resolve(hydrated);\n        }).catch(err => {\n            deferred.reject(err);\n        }).finally(() => {\n            cancellation.dispose();\n            clearTimeout(timeout);\n        });\n        return deferred.promise;\n    }\n\n    protected terminateWorker(worker: ParserWorker): void {\n        worker.terminate();\n        const index = this.workerPool.indexOf(worker);\n        if (index >= 0) {\n            this.workerPool.splice(index, 1);\n        }\n    }\n\n    protected async acquireParserWorker(cancelToken: CancellationToken): Promise {\n        this.initializeWorkers();\n        for (const worker of this.workerPool) {\n            if (worker.ready) {\n                worker.lock();\n                return worker;\n            }\n        }\n        const deferred = new Deferred();\n        cancelToken.onCancellationRequested(() => {\n            const index = this.queue.indexOf(deferred);\n            if (index >= 0) {\n                this.queue.splice(index, 1);\n            }\n            deferred.reject(OperationCancelled);\n        });\n        this.queue.push(deferred);\n        return deferred.promise;\n    }\n\n    protected abstract createWorker(): ParserWorker;\n}\n\nexport type WorkerMessagePost = (message: unknown) => void;\nexport type WorkerMessageCallback = (cb: (message: unknown) => void) => void;\n\nexport class ParserWorker {\n\n    protected readonly sendMessage: WorkerMessagePost;\n    protected readonly _terminate: () => void;\n    protected readonly onReadyEmitter = new Emitter();\n\n    protected deferred = new Deferred();\n    protected _ready = true;\n    protected _parsing = false;\n\n    get ready(): boolean {\n        return this._ready;\n    }\n\n    get onReady(): Event {\n        return this.onReadyEmitter.event;\n    }\n\n    constructor(sendMessage: WorkerMessagePost, onMessage: WorkerMessageCallback, onError: WorkerMessageCallback, terminate: () => void) {\n        this.sendMessage = sendMessage;\n        this._terminate = terminate;\n        onMessage(result => {\n            const parseResult = result as ParseResult;\n            this.deferred.resolve(parseResult);\n            this.unlock();\n        });\n        onError(error => {\n            this.deferred.reject(error);\n            this.unlock();\n        });\n    }\n\n    terminate(): void {\n        this.deferred.reject(OperationCancelled);\n        this._terminate();\n    }\n\n    lock(): void {\n        this._ready = false;\n    }\n\n    unlock(): void {\n        this._parsing = false;\n        this._ready = true;\n        this.onReadyEmitter.fire();\n    }\n\n    parse(text: string): Promise {\n        if (this._parsing) {\n            throw new Error('Parser worker is busy');\n        }\n        this._parsing = true;\n        this.deferred = new Deferred();\n        this.sendMessage(text);\n        return this.deferred.promise;\n    }\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { CancellationToken, CancellationTokenSource } from '../utils/cancellation.js';\nimport { Deferred, isOperationCancelled, type MaybePromise } from '../utils/promise-utils.js';\n\n/**\n * Utility service to execute mutually exclusive actions.\n */\nexport interface WorkspaceLock {\n    /**\n     * Performs a single async action, like initializing the workspace or processing document changes.\n     * Only one action will be executed at a time.\n     *\n     * When another action is queued up, the token provided for the action will be cancelled.\n     * Assuming the action makes use of this token, the next action only has to wait for the current action to finish cancellation.\n     */\n    write(action: (token: CancellationToken) => MaybePromise): Promise;\n\n    /**\n     * Performs a single action, like computing completion results or providing workspace symbols.\n     * Read actions will only be executed after all write actions have finished. They will be executed in parallel if possible.\n     *\n     * If a write action is currently running, the read action will be queued up and executed afterwards.\n     * If a new write action is queued up while a read action is waiting, the write action will receive priority and will be handled before the read action.\n     *\n     * Note that read actions are not allowed to modify anything in the workspace. Please use {@link write} instead.\n     */\n    read(action: () => MaybePromise): Promise;\n\n    /**\n     * Cancels the last queued write action. All previous write actions already have been cancelled.\n     */\n    cancelWrite(): void;\n}\n\ntype LockAction = (token: CancellationToken) => MaybePromise;\n\ninterface LockEntry {\n    action: LockAction;\n    deferred: Deferred;\n    cancellationToken: CancellationToken;\n}\n\nexport class DefaultWorkspaceLock implements WorkspaceLock {\n\n    private previousTokenSource = new CancellationTokenSource();\n    private writeQueue: LockEntry[] = [];\n    private readQueue: LockEntry[] = [];\n    private done = true;\n\n    write(action: (token: CancellationToken) => MaybePromise): Promise {\n        this.cancelWrite();\n        const tokenSource = new CancellationTokenSource();\n        this.previousTokenSource = tokenSource;\n        return this.enqueue(this.writeQueue, action, tokenSource.token);\n    }\n\n    read(action: () => MaybePromise): Promise {\n        return this.enqueue(this.readQueue, action);\n    }\n\n    private enqueue(queue: LockEntry[], action: LockAction, cancellationToken?: CancellationToken): Promise {\n        const deferred = new Deferred();\n        const entry: LockEntry = {\n            action,\n            deferred,\n            cancellationToken: cancellationToken ?? CancellationToken.None\n        };\n        queue.push(entry);\n        this.performNextOperation();\n        return deferred.promise as Promise;\n    }\n\n    private async performNextOperation(): Promise {\n        if (!this.done) {\n            return;\n        }\n        const entries: LockEntry[] = [];\n        if (this.writeQueue.length > 0) {\n            // Just perform the next write action\n            entries.push(this.writeQueue.shift()!);\n        } else if (this.readQueue.length > 0) {\n            // Empty the read queue and perform all actions in parallel\n            entries.push(...this.readQueue.splice(0, this.readQueue.length));\n        } else {\n            return;\n        }\n        this.done = false;\n        await Promise.all(entries.map(async ({ action, deferred, cancellationToken }) => {\n            try {\n                // Move the execution of the action to the next event loop tick via `Promise.resolve()`\n                const result = await Promise.resolve().then(() => action(cancellationToken));\n                deferred.resolve(result);\n            } catch (err) {\n                if (isOperationCancelled(err)) {\n                    // If the operation was cancelled, we don't want to reject the promise\n                    deferred.resolve(undefined);\n                } else {\n                    deferred.reject(err);\n                }\n            }\n        }));\n        this.done = true;\n        this.performNextOperation();\n    }\n\n    cancelWrite(): void {\n        this.previousTokenSource.cancel();\n    }\n}\n", "/******************************************************************************\n * Copyright 2024 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\nimport type { TokenType } from 'chevrotain';\nimport { CompositeCstNodeImpl, LeafCstNodeImpl, RootCstNodeImpl } from '../parser/cst-node-builder.js';\nimport { isAbstractElement, type AbstractElement, type Grammar } from '../languages/generated/ast.js';\nimport type { Linker } from '../references/linker.js';\nimport type { Lexer } from '../parser/lexer.js';\nimport type { LangiumCoreServices } from '../services.js';\nimport type { ParseResult } from '../parser/langium-parser.js';\nimport type { Reference, AstNode, CstNode, LeafCstNode, GenericAstNode, Mutable, RootCstNode } from '../syntax-tree.js';\nimport { isRootCstNode, isCompositeCstNode, isLeafCstNode, isAstNode, isReference } from '../syntax-tree.js';\nimport { streamAst } from '../utils/ast-utils.js';\nimport { BiMap } from '../utils/collections.js';\nimport { streamCst } from '../utils/cst-utils.js';\n\n/**\n * The hydrator service is responsible for allowing AST parse results to be sent across worker threads.\n */\nexport interface Hydrator {\n    /**\n     * Converts a parse result to a plain object. The resulting object can be sent across worker threads.\n     */\n    dehydrate(result: ParseResult): ParseResult;\n    /**\n     * Converts a plain object to a parse result. The included AST node can then be used in the main thread.\n     * Calling this method on objects that have not been dehydrated first will result in undefined behavior.\n     */\n    hydrate(result: ParseResult): ParseResult;\n}\n\nexport interface DehydrateContext {\n    astNodes: Map;\n    cstNodes: Map;\n}\n\nexport interface HydrateContext {\n    astNodes: Map;\n    cstNodes: Map;\n}\n\nexport class DefaultHydrator implements Hydrator {\n\n    protected readonly grammar: Grammar;\n    protected readonly lexer: Lexer;\n    protected readonly linker: Linker;\n\n    protected readonly grammarElementIdMap = new BiMap();\n    protected readonly tokenTypeIdMap = new BiMap();\n\n    constructor(services: LangiumCoreServices) {\n        this.grammar = services.Grammar;\n        this.lexer = services.parser.Lexer;\n        this.linker = services.references.Linker;\n    }\n\n    dehydrate(result: ParseResult): ParseResult {\n        return {\n            // We need to create shallow copies of the errors\n            // The original errors inherit from the `Error` class, which is not transferable across worker threads\n            lexerErrors: result.lexerErrors.map(e => ({ ...e })),\n            parserErrors: result.parserErrors.map(e => ({ ...e })),\n            value: this.dehydrateAstNode(result.value, this.createDehyrationContext(result.value))\n        };\n    }\n\n    protected createDehyrationContext(node: AstNode): DehydrateContext {\n        const astNodes = new Map();\n        const cstNodes = new Map();\n        for (const astNode of streamAst(node)) {\n            astNodes.set(astNode, {});\n        }\n        if (node.$cstNode) {\n            for (const cstNode of streamCst(node.$cstNode)) {\n                cstNodes.set(cstNode, {});\n            }\n        }\n        return {\n            astNodes,\n            cstNodes\n        };\n    }\n\n    protected dehydrateAstNode(node: AstNode, context: DehydrateContext): object {\n        const obj = context.astNodes.get(node) as Record;\n        obj.$type = node.$type;\n        obj.$containerIndex = node.$containerIndex;\n        obj.$containerProperty = node.$containerProperty;\n        if (node.$cstNode !== undefined) {\n            obj.$cstNode = this.dehydrateCstNode(node.$cstNode, context);\n        }\n        for (const [name, value] of Object.entries(node)) {\n            if (name.startsWith('$')) {\n                continue;\n            }\n            if (Array.isArray(value)) {\n                const arr: any[] = [];\n                obj[name] = arr;\n                for (const item of value) {\n                    if (isAstNode(item)) {\n                        arr.push(this.dehydrateAstNode(item, context));\n                    } else if (isReference(item)) {\n                        arr.push(this.dehydrateReference(item, context));\n                    } else {\n                        arr.push(item);\n                    }\n                }\n            } else if (isAstNode(value)) {\n                obj[name] = this.dehydrateAstNode(value, context);\n            } else if (isReference(value)) {\n                obj[name] = this.dehydrateReference(value, context);\n            } else if (value !== undefined) {\n                obj[name] = value;\n            }\n        }\n        return obj;\n    }\n\n    protected dehydrateReference(reference: Reference, context: DehydrateContext): any {\n        const obj: Record = {};\n        obj.$refText = reference.$refText;\n        if (reference.$refNode) {\n            obj.$refNode = context.cstNodes.get(reference.$refNode);\n        }\n        return obj;\n    }\n\n    protected dehydrateCstNode(node: CstNode, context: DehydrateContext): any {\n        const cstNode = context.cstNodes.get(node) as Record;\n        if (isRootCstNode(node)) {\n            cstNode.fullText = node.fullText;\n        } else {\n            // Note: This returns undefined for hidden nodes (i.e. comments)\n            cstNode.grammarSource = this.getGrammarElementId(node.grammarSource);\n        }\n        cstNode.hidden = node.hidden;\n        cstNode.astNode = context.astNodes.get(node.astNode);\n        if (isCompositeCstNode(node)) {\n            cstNode.content = node.content.map(child => this.dehydrateCstNode(child, context));\n        } else if (isLeafCstNode(node)) {\n            cstNode.tokenType = node.tokenType.name;\n            cstNode.offset = node.offset;\n            cstNode.length = node.length;\n            cstNode.startLine = node.range.start.line;\n            cstNode.startColumn = node.range.start.character;\n            cstNode.endLine = node.range.end.line;\n            cstNode.endColumn = node.range.end.character;\n        }\n        return cstNode;\n    }\n\n    hydrate(result: ParseResult): ParseResult {\n        const node = result.value;\n        const context = this.createHydrationContext(node);\n        if ('$cstNode' in node) {\n            this.hydrateCstNode(node.$cstNode, context);\n        }\n        return {\n            lexerErrors: result.lexerErrors,\n            parserErrors: result.parserErrors,\n            value: this.hydrateAstNode(node, context) as T\n        };\n    }\n\n    protected createHydrationContext(node: any): HydrateContext {\n        const astNodes = new Map();\n        const cstNodes = new Map();\n        for (const astNode of streamAst(node)) {\n            astNodes.set(astNode, {} as AstNode);\n        }\n        let root: RootCstNode;\n        if (node.$cstNode) {\n            for (const cstNode of streamCst(node.$cstNode)) {\n                let cst: Mutable | undefined;\n                if ('fullText' in cstNode) {\n                    cst = new RootCstNodeImpl(cstNode.fullText as string);\n                    root = cst as RootCstNode;\n                } else if ('content' in cstNode) {\n                    cst = new CompositeCstNodeImpl();\n                } else if ('tokenType' in cstNode) {\n                    cst = this.hydrateCstLeafNode(cstNode);\n                }\n                if (cst) {\n                    cstNodes.set(cstNode, cst);\n                    cst.root = root!;\n                }\n            }\n        }\n        return {\n            astNodes,\n            cstNodes\n        };\n    }\n\n    protected hydrateAstNode(node: any, context: HydrateContext): AstNode {\n        const astNode = context.astNodes.get(node) as Mutable;\n        astNode.$type = node.$type;\n        astNode.$containerIndex = node.$containerIndex;\n        astNode.$containerProperty = node.$containerProperty;\n        if (node.$cstNode) {\n            astNode.$cstNode = context.cstNodes.get(node.$cstNode);\n        }\n        for (const [name, value] of Object.entries(node)) {\n            if (name.startsWith('$')) {\n                continue;\n            }\n            if (Array.isArray(value)) {\n                const arr: unknown[] = [];\n                astNode[name] = arr;\n                for (const item of value) {\n                    if (isAstNode(item)) {\n                        arr.push(this.setParent(this.hydrateAstNode(item, context), astNode));\n                    } else if (isReference(item)) {\n                        arr.push(this.hydrateReference(item, astNode, name, context));\n                    } else {\n                        arr.push(item);\n                    }\n                }\n            } else if (isAstNode(value)) {\n                astNode[name] = this.setParent(this.hydrateAstNode(value, context), astNode);\n            } else if (isReference(value)) {\n                astNode[name] = this.hydrateReference(value, astNode, name, context);\n            } else if (value !== undefined) {\n                astNode[name] = value;\n            }\n        }\n        return astNode;\n    }\n\n    protected setParent(node: any, parent: any): any {\n        node.$container = parent as AstNode;\n        return node;\n    }\n\n    protected hydrateReference(reference: any, node: AstNode, name: string, context: HydrateContext): Reference {\n        return this.linker.buildReference(node, name, context.cstNodes.get(reference.$refNode)!, reference.$refText);\n    }\n\n    protected hydrateCstNode(cstNode: any, context: HydrateContext, num = 0): CstNode {\n        const cstNodeObj = context.cstNodes.get(cstNode) as Mutable;\n        if (typeof cstNode.grammarSource === 'number') {\n            cstNodeObj.grammarSource = this.getGrammarElement(cstNode.grammarSource);\n        }\n        cstNodeObj.astNode = context.astNodes.get(cstNode.astNode)!;\n        if (isCompositeCstNode(cstNodeObj)) {\n            for (const child of cstNode.content) {\n                const hydrated = this.hydrateCstNode(child, context, num++);\n                cstNodeObj.content.push(hydrated);\n            }\n        }\n        return cstNodeObj;\n    }\n\n    protected hydrateCstLeafNode(cstNode: any): LeafCstNode {\n        const tokenType = this.getTokenType(cstNode.tokenType);\n        const offset = cstNode.offset;\n        const length = cstNode.length;\n        const startLine = cstNode.startLine;\n        const startColumn = cstNode.startColumn;\n        const endLine = cstNode.endLine;\n        const endColumn = cstNode.endColumn;\n        const hidden = cstNode.hidden;\n        const node = new LeafCstNodeImpl(\n            offset,\n            length,\n            {\n                start: {\n                    line: startLine,\n                    character: startColumn\n                },\n                end: {\n                    line: endLine,\n                    character: endColumn\n                }\n            },\n            tokenType,\n            hidden\n        );\n        return node;\n    }\n\n    protected getTokenType(name: string): TokenType {\n        return this.lexer.definition[name];\n    }\n\n    protected getGrammarElementId(node: AbstractElement): number | undefined {\n        if (this.grammarElementIdMap.size === 0) {\n            this.createGrammarElementIdMap();\n        }\n        return this.grammarElementIdMap.get(node);\n    }\n\n    protected getGrammarElement(id: number): AbstractElement {\n        if (this.grammarElementIdMap.size === 0) {\n            this.createGrammarElementIdMap();\n        }\n        const element = this.grammarElementIdMap.getKey(id);\n        if (element) {\n            return element;\n        } else {\n            throw new Error('Invalid grammar element id: ' + id);\n        }\n    }\n\n    protected createGrammarElementIdMap(): void {\n        let id = 0;\n        for (const element of streamAst(this.grammar)) {\n            if (isAbstractElement(element)) {\n                this.grammarElementIdMap.set(element, id++);\n            }\n        }\n    }\n\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n******************************************************************************/\n\nimport type { Module } from './dependency-injection.js';\nimport type { LangiumDefaultCoreServices, LangiumDefaultSharedCoreServices, LangiumCoreServices, LangiumSharedCoreServices } from './services.js';\nimport type { FileSystemProvider } from './workspace/file-system-provider.js';\nimport { createGrammarConfig } from './languages/grammar-config.js';\nimport { createCompletionParser } from './parser/completion-parser-builder.js';\nimport { createLangiumParser } from './parser/langium-parser-builder.js';\nimport { DefaultTokenBuilder } from './parser/token-builder.js';\nimport { DefaultValueConverter } from './parser/value-converter.js';\nimport { DefaultLinker } from './references/linker.js';\nimport { DefaultNameProvider } from './references/name-provider.js';\nimport { DefaultReferences } from './references/references.js';\nimport { DefaultScopeComputation } from './references/scope-computation.js';\nimport { DefaultScopeProvider } from './references/scope-provider.js';\nimport { DefaultJsonSerializer } from './serializer/json-serializer.js';\nimport { DefaultServiceRegistry } from './service-registry.js';\nimport { DefaultDocumentValidator } from './validation/document-validator.js';\nimport { ValidationRegistry } from './validation/validation-registry.js';\nimport { DefaultAstNodeDescriptionProvider, DefaultReferenceDescriptionProvider } from './workspace/ast-descriptions.js';\nimport { DefaultAstNodeLocator } from './workspace/ast-node-locator.js';\nimport { DefaultConfigurationProvider } from './workspace/configuration.js';\nimport { DefaultDocumentBuilder } from './workspace/document-builder.js';\nimport { DefaultLangiumDocumentFactory, DefaultLangiumDocuments } from './workspace/documents.js';\nimport { DefaultIndexManager } from './workspace/index-manager.js';\nimport { DefaultWorkspaceManager } from './workspace/workspace-manager.js';\nimport { DefaultLexer } from './parser/lexer.js';\nimport { JSDocDocumentationProvider } from './documentation/documentation-provider.js';\nimport { DefaultCommentProvider } from './documentation/comment-provider.js';\nimport { LangiumParserErrorMessageProvider } from './parser/langium-parser.js';\nimport { DefaultAsyncParser } from './parser/async-parser.js';\nimport { DefaultWorkspaceLock } from './workspace/workspace-lock.js';\nimport { DefaultHydrator } from './serializer/hydrator.js';\n\n/**\n * Context required for creating the default language-specific dependency injection module.\n */\nexport interface DefaultCoreModuleContext {\n    shared: LangiumSharedCoreServices;\n}\n\n/**\n * Creates a dependency injection module configuring the default core services.\n * This is a set of services that are dedicated to a specific language.\n */\nexport function createDefaultCoreModule(context: DefaultCoreModuleContext): Module {\n    return {\n        documentation: {\n            CommentProvider: (services) => new DefaultCommentProvider(services),\n            DocumentationProvider: (services) => new JSDocDocumentationProvider(services)\n        },\n        parser: {\n            AsyncParser: (services) => new DefaultAsyncParser(services),\n            GrammarConfig: (services) => createGrammarConfig(services),\n            LangiumParser: (services) => createLangiumParser(services),\n            CompletionParser: (services) => createCompletionParser(services),\n            ValueConverter: () => new DefaultValueConverter(),\n            TokenBuilder: () => new DefaultTokenBuilder(),\n            Lexer: (services) => new DefaultLexer(services),\n            ParserErrorMessageProvider: () => new LangiumParserErrorMessageProvider()\n        },\n        workspace: {\n            AstNodeLocator: () => new DefaultAstNodeLocator(),\n            AstNodeDescriptionProvider: (services) => new DefaultAstNodeDescriptionProvider(services),\n            ReferenceDescriptionProvider: (services) => new DefaultReferenceDescriptionProvider(services)\n        },\n        references: {\n            Linker: (services) => new DefaultLinker(services),\n            NameProvider: () => new DefaultNameProvider(),\n            ScopeProvider: (services) => new DefaultScopeProvider(services),\n            ScopeComputation: (services) => new DefaultScopeComputation(services),\n            References: (services) => new DefaultReferences(services)\n        },\n        serializer: {\n            Hydrator: (services) => new DefaultHydrator(services),\n            JsonSerializer: (services) => new DefaultJsonSerializer(services)\n        },\n        validation: {\n            DocumentValidator: (services) => new DefaultDocumentValidator(services),\n            ValidationRegistry: (services) => new ValidationRegistry(services)\n        },\n        shared: () => context.shared\n    };\n}\n\n/**\n * Context required for creating the default shared dependency injection module.\n */\nexport interface DefaultSharedCoreModuleContext {\n    /**\n     * Factory function to create a {@link FileSystemProvider}.\n     *\n     * Langium exposes an `EmptyFileSystem` and `NodeFileSystem`, exported through `langium/node`.\n     * When running Langium as part of a vscode language server or a Node.js app, using the `NodeFileSystem` is recommended,\n     * the `EmptyFileSystem` in every other use case.\n     */\n    fileSystemProvider: (services: LangiumSharedCoreServices) => FileSystemProvider;\n}\n\n/**\n * Creates a dependency injection module configuring the default shared core services.\n * This is the set of services that are shared between multiple languages.\n */\nexport function createDefaultSharedCoreModule(context: DefaultSharedCoreModuleContext): Module {\n    return {\n        ServiceRegistry: () => new DefaultServiceRegistry(),\n        workspace: {\n            LangiumDocuments: (services) => new DefaultLangiumDocuments(services),\n            LangiumDocumentFactory: (services) => new DefaultLangiumDocumentFactory(services),\n            DocumentBuilder: (services) => new DefaultDocumentBuilder(services),\n            IndexManager: (services) => new DefaultIndexManager(services),\n            WorkspaceManager: (services) => new DefaultWorkspaceManager(services),\n            FileSystemProvider: (services) => context.fileSystemProvider(services),\n            WorkspaceLock: () => new DefaultWorkspaceLock(),\n            ConfigurationProvider: (services) => new DefaultConfigurationProvider(services)\n        }\n    };\n}\n", "/******************************************************************************\n * Copyright 2021 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\n/* eslint-disable @typescript-eslint/no-explicit-any */\n\n/**\n * A `Module` is a description of possibly grouped service factories.\n *\n * Given a type I = { group: { service: A } },\n * Module := { group: { service: (injector: I) => A } }\n *\n * Making `I` available during the creation of `I` allows us to create cyclic\n * dependencies.\n */\nexport type Module = {\n    [K in keyof T]: Module | ((injector: I) => T[K])\n}\n\nexport namespace Module {\n    export const merge = (m1: Module, m2: Module) => (_merge(_merge({}, m1), m2) as Module);\n}\n\n/**\n * Given a set of modules, the inject function returns a lazily evaluated injector\n * that injects dependencies into the requested service when it is requested the\n * first time. Subsequent requests will return the same service.\n *\n * In the case of cyclic dependencies, an Error will be thrown. This can be fixed\n * by injecting a provider `() => T` instead of a `T`.\n *\n * Please note that the arguments may be objects or arrays. However, the result will\n * be an object. Using it with for..of will have no effect.\n *\n * @param module1 first Module\n * @param module2 (optional) second Module\n * @param module3 (optional) third Module\n * @param module4 (optional) fourth Module\n * @param module5 (optional) fifth Module\n * @param module6 (optional) sixth Module\n * @param module7 (optional) seventh Module\n * @param module8 (optional) eighth Module\n * @param module9 (optional) ninth Module\n * @returns a new object of type I\n */\nexport function inject(\n    module1: Module, module2?: Module, module3?: Module, module4?: Module, module5?: Module, module6?: Module, module7?: Module, module8?: Module, module9?: Module\n): I {\n    const module = [module1, module2, module3, module4, module5, module6, module7, module8, module9].reduce(_merge, {}) as Module;\n    return _inject(module);\n}\n\nconst isProxy = Symbol('isProxy');\n\n/**\n * Eagerly load all services in the given dependency injection container. This is sometimes\n * necessary because services can register event listeners in their constructors.\n */\nexport function eagerLoad(item: T): T {\n    if (item && (item as any)[isProxy]) {\n        for (const value of Object.values(item)) {\n            eagerLoad(value);\n        }\n    }\n    return item;\n}\n\n/**\n * Helper function that returns an injector by creating a proxy.\n * Invariant: injector is of type I. If injector is undefined, then T = I.\n */\nfunction _inject(module: Module, injector?: any): T {\n    const proxy: any = new Proxy({} as any, {\n        deleteProperty: () => false,\n        get: (obj, prop) => _resolve(obj, prop, module, injector || proxy),\n        getOwnPropertyDescriptor: (obj, prop) => (_resolve(obj, prop, module, injector || proxy), Object.getOwnPropertyDescriptor(obj, prop)), // used by for..in\n        has: (_, prop) => prop in module, // used by ..in..\n        ownKeys: () => [...Reflect.ownKeys(module), isProxy] // used by for..in\n    });\n    proxy[isProxy] = true;\n    return proxy;\n}\n\n/**\n * Internally used to tag a requested dependency, directly before calling the factory.\n * This allows us to find cycles during instance creation.\n */\nconst __requested__ = Symbol();\n\n/**\n * Returns the value `obj[prop]`. If the value does not exist, yet, it is resolved from\n * the module description. The result of service factories is cached. Groups are\n * recursively proxied.\n *\n * @param obj an object holding all group proxies and services\n * @param prop the key of a value within obj\n * @param module an object containing groups and service factories\n * @param injector the first level proxy that provides access to all values\n * @returns the requested value `obj[prop]`\n * @throws Error if a dependency cycle is detected\n */\nfunction _resolve(obj: any, prop: string | symbol | number, module: Module, injector: I): T[keyof T] | undefined {\n    if (prop in obj) {\n        if (obj[prop] instanceof Error) {\n            throw new Error('Construction failure. Please make sure that your dependencies are constructable.', {cause: obj[prop]});\n        }\n        if (obj[prop] === __requested__) {\n            throw new Error('Cycle detected. Please make \"' + String(prop) + '\" lazy. See https://langium.org/docs/configuration-services/#resolving-cyclic-dependencies');\n        }\n        return obj[prop];\n    } else if (prop in module) {\n        const value: Module | ((injector: I) => T[keyof T]) = module[prop as keyof T];\n        obj[prop] = __requested__;\n        try {\n            obj[prop] = (typeof value === 'function') ? value(injector) : _inject(value, injector);\n        } catch (error) {\n            obj[prop] = error instanceof Error ? error : undefined;\n            throw error;\n        }\n        return obj[prop];\n    } else {\n        return undefined;\n    }\n}\n\n/**\n * Performs a deep-merge of two modules by writing source entries into the target module.\n *\n * @param target the module which is written\n * @param source the module which is read\n * @returns the target module\n */\nfunction _merge(target: Module, source?: Module): Module {\n    if (source) {\n        for (const [key, value2] of Object.entries(source)) {\n            if (value2 !== undefined) {\n                const value1 = target[key];\n                if (value1 !== null && value2 !== null && typeof value1 === 'object' && typeof value2 === 'object') {\n                    target[key] = _merge(value1, value2);\n                } else {\n                    target[key] = value2;\n                }\n            }\n        }\n    }\n    return target;\n}\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nexport * from './caching.js';\nexport * from './event.js';\nexport * from './collections.js';\nexport * from './disposable.js';\nexport * from './errors.js';\nexport * from './grammar-loader.js';\nexport * from './promise-utils.js';\nexport * from './stream.js';\nexport * from './uri-utils.js';\n\nimport * as AstUtils from './ast-utils.js';\nimport * as Cancellation from './cancellation.js';\nimport * as CstUtils from './cst-utils.js';\nimport * as GrammarUtils from './grammar-utils.js';\nimport * as RegExpUtils from './regexp-utils.js';\nexport { AstUtils, Cancellation, CstUtils, GrammarUtils, RegExpUtils };\n", "/******************************************************************************\n * Copyright 2022 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport type { URI } from '../utils/uri-utils.js';\n\nexport interface FileSystemNode {\n    readonly isFile: boolean;\n    readonly isDirectory: boolean;\n    readonly uri: URI;\n}\n\nexport type FileSystemFilter = (node: FileSystemNode) => boolean;\n\n/**\n * Provides methods to interact with an abstract file system. The default implementation is based on the node.js `fs` API.\n */\nexport interface FileSystemProvider {\n    /**\n     * Reads a document asynchronously from a given URI.\n     * @returns The string content of the file with the specified URI.\n     */\n    readFile(uri: URI): Promise;\n    /**\n     * Reads the directory information for the given URI.\n     * @returns The list of file system entries that are contained within the specified directory.\n     */\n    readDirectory(uri: URI): Promise;\n}\n\nexport class EmptyFileSystemProvider implements FileSystemProvider {\n\n    readFile(): Promise {\n        throw new Error('No file system is available.');\n    }\n\n    async readDirectory(): Promise {\n        return [];\n    }\n\n}\n\nexport const EmptyFileSystem = {\n    fileSystemProvider: () => new EmptyFileSystemProvider()\n};\n", "/******************************************************************************\n * Copyright 2023 TypeFox GmbH\n * This program and the accompanying materials are made available under the\n * terms of the MIT License, which is available in the project root.\n ******************************************************************************/\n\nimport { createDefaultCoreModule, createDefaultSharedCoreModule } from '../default-module.js';\nimport type { Module } from '../dependency-injection.js';\nimport { inject } from '../dependency-injection.js';\nimport * as ast from '../languages/generated/ast.js';\nimport type { LangiumCoreServices, LangiumSharedCoreServices, PartialLangiumCoreServices, PartialLangiumSharedCoreServices } from '../services.js';\nimport type { Mutable } from '../syntax-tree.js';\nimport { EmptyFileSystem } from '../workspace/file-system-provider.js';\nimport { URI } from './uri-utils.js';\n\nconst minimalGrammarModule: Module = {\n    Grammar: () => undefined as unknown as ast.Grammar,\n    LanguageMetaData: () => ({\n        caseInsensitive: false,\n        fileExtensions: ['.langium'],\n        languageId: 'langium'\n    })\n};\n\nconst minimalSharedGrammarModule: Module = {\n    AstReflection: () => new ast.LangiumGrammarAstReflection()\n};\n\nfunction createMinimalGrammarServices(): LangiumCoreServices {\n    const shared = inject(\n        createDefaultSharedCoreModule(EmptyFileSystem),\n        minimalSharedGrammarModule\n    );\n    const grammar = inject(\n        createDefaultCoreModule({ shared }),\n        minimalGrammarModule\n    );\n    shared.ServiceRegistry.register(grammar);\n    return grammar;\n}\n\n/**\n * Load a Langium grammar for your language from a JSON string. This is used by several services,\n * most notably the parser builder which interprets the grammar to create a parser.\n */\nexport function loadGrammarFromJson(json: string): ast.Grammar {\n    const services = createMinimalGrammarServices();\n    const astNode = services.serializer.JsonSerializer.deserialize(json) as Mutable;\n    services.shared.workspace.LangiumDocumentFactory.fromModel(astNode, URI.parse(`memory://${astNode.name ?? 'grammar'}.langium`));\n    return astNode;\n}\n", "var __defProp = Object.defineProperty;\nvar __name = (target, value) => __defProp(target, \"name\", { value, configurable: true });\n\n// src/language/generated/ast.ts\nimport { AbstractAstReflection } from \"langium\";\nvar Statement = \"Statement\";\nvar Branch = \"Branch\";\nfunction isBranch(item) {\n  return reflection.isInstance(item, Branch);\n}\n__name(isBranch, \"isBranch\");\nvar Checkout = \"Checkout\";\nvar CherryPicking = \"CherryPicking\";\nvar Commit = \"Commit\";\nfunction isCommit(item) {\n  return reflection.isInstance(item, Commit);\n}\n__name(isCommit, \"isCommit\");\nvar Common = \"Common\";\nfunction isCommon(item) {\n  return reflection.isInstance(item, Common);\n}\n__name(isCommon, \"isCommon\");\nvar GitGraph = \"GitGraph\";\nfunction isGitGraph(item) {\n  return reflection.isInstance(item, GitGraph);\n}\n__name(isGitGraph, \"isGitGraph\");\nvar Info = \"Info\";\nfunction isInfo(item) {\n  return reflection.isInstance(item, Info);\n}\n__name(isInfo, \"isInfo\");\nvar Merge = \"Merge\";\nfunction isMerge(item) {\n  return reflection.isInstance(item, Merge);\n}\n__name(isMerge, \"isMerge\");\nvar Packet = \"Packet\";\nfunction isPacket(item) {\n  return reflection.isInstance(item, Packet);\n}\n__name(isPacket, \"isPacket\");\nvar PacketBlock = \"PacketBlock\";\nfunction isPacketBlock(item) {\n  return reflection.isInstance(item, PacketBlock);\n}\n__name(isPacketBlock, \"isPacketBlock\");\nvar Pie = \"Pie\";\nfunction isPie(item) {\n  return reflection.isInstance(item, Pie);\n}\n__name(isPie, \"isPie\");\nvar PieSection = \"PieSection\";\nfunction isPieSection(item) {\n  return reflection.isInstance(item, PieSection);\n}\n__name(isPieSection, \"isPieSection\");\nvar Direction = \"Direction\";\nvar MermaidAstReflection = class extends AbstractAstReflection {\n  static {\n    __name(this, \"MermaidAstReflection\");\n  }\n  getAllTypes() {\n    return [\"Branch\", \"Checkout\", \"CherryPicking\", \"Commit\", \"Common\", \"Direction\", \"GitGraph\", \"Info\", \"Merge\", \"Packet\", \"PacketBlock\", \"Pie\", \"PieSection\", \"Statement\"];\n  }\n  computeIsSubtype(subtype, supertype) {\n    switch (subtype) {\n      case Branch:\n      case Checkout:\n      case CherryPicking:\n      case Commit:\n      case Merge: {\n        return this.isSubtype(Statement, supertype);\n      }\n      case Direction: {\n        return this.isSubtype(GitGraph, supertype);\n      }\n      default: {\n        return false;\n      }\n    }\n  }\n  getReferenceType(refInfo) {\n    const referenceId = `${refInfo.container.$type}:${refInfo.property}`;\n    switch (referenceId) {\n      default: {\n        throw new Error(`${referenceId} is not a valid reference id.`);\n      }\n    }\n  }\n  getTypeMetaData(type) {\n    switch (type) {\n      case \"Branch\": {\n        return {\n          name: \"Branch\",\n          properties: [\n            { name: \"name\" },\n            { name: \"order\" }\n          ]\n        };\n      }\n      case \"Checkout\": {\n        return {\n          name: \"Checkout\",\n          properties: [\n            { name: \"branch\" }\n          ]\n        };\n      }\n      case \"CherryPicking\": {\n        return {\n          name: \"CherryPicking\",\n          properties: [\n            { name: \"id\" },\n            { name: \"parent\" },\n            { name: \"tags\", defaultValue: [] }\n          ]\n        };\n      }\n      case \"Commit\": {\n        return {\n          name: \"Commit\",\n          properties: [\n            { name: \"id\" },\n            { name: \"message\" },\n            { name: \"tags\", defaultValue: [] },\n            { name: \"type\" }\n          ]\n        };\n      }\n      case \"Common\": {\n        return {\n          name: \"Common\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"GitGraph\": {\n        return {\n          name: \"GitGraph\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"statements\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Info\": {\n        return {\n          name: \"Info\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"Merge\": {\n        return {\n          name: \"Merge\",\n          properties: [\n            { name: \"branch\" },\n            { name: \"id\" },\n            { name: \"tags\", defaultValue: [] },\n            { name: \"type\" }\n          ]\n        };\n      }\n      case \"Packet\": {\n        return {\n          name: \"Packet\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"blocks\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"PacketBlock\": {\n        return {\n          name: \"PacketBlock\",\n          properties: [\n            { name: \"end\" },\n            { name: \"label\" },\n            { name: \"start\" }\n          ]\n        };\n      }\n      case \"Pie\": {\n        return {\n          name: \"Pie\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"sections\", defaultValue: [] },\n            { name: \"showData\", defaultValue: false },\n            { name: \"title\" }\n          ]\n        };\n      }\n      case \"PieSection\": {\n        return {\n          name: \"PieSection\",\n          properties: [\n            { name: \"label\" },\n            { name: \"value\" }\n          ]\n        };\n      }\n      case \"Direction\": {\n        return {\n          name: \"Direction\",\n          properties: [\n            { name: \"accDescr\" },\n            { name: \"accTitle\" },\n            { name: \"dir\" },\n            { name: \"statements\", defaultValue: [] },\n            { name: \"title\" }\n          ]\n        };\n      }\n      default: {\n        return {\n          name: type,\n          properties: []\n        };\n      }\n    }\n  }\n};\nvar reflection = new MermaidAstReflection();\n\n// src/language/generated/grammar.ts\nimport { loadGrammarFromJson } from \"langium\";\nvar loadedInfoGrammar;\nvar InfoGrammar = /* @__PURE__ */ __name(() => loadedInfoGrammar ?? (loadedInfoGrammar = loadGrammarFromJson('{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Info\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Info\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"info\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"showInfo\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"*\"}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[],\"cardinality\":\"?\"}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}')), \"InfoGrammar\");\nvar loadedPacketGrammar;\nvar PacketGrammar = /* @__PURE__ */ __name(() => loadedPacketGrammar ?? (loadedPacketGrammar = loadGrammarFromJson(`{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Packet\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Packet\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"packet-beta\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"blocks\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"*\"}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"Assignment\",\"feature\":\"blocks\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"+\"}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"PacketBlock\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"start\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"-\"},{\"$type\":\"Assignment\",\"feature\":\"end\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"Keyword\",\"value\":\":\"},{\"$type\":\"Assignment\",\"feature\":\"label\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"INT\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/0|[1-9][0-9]*/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"STRING\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]*\\\\\"|'[^']*'/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@7\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@8\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}`)), \"PacketGrammar\");\nvar loadedPieGrammar;\nvar PieGrammar = /* @__PURE__ */ __name(() => loadedPieGrammar ?? (loadedPieGrammar = loadGrammarFromJson('{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"Pie\",\"imports\":[],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"Pie\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Keyword\",\"value\":\"pie\"},{\"$type\":\"Assignment\",\"feature\":\"showData\",\"operator\":\"?=\",\"terminal\":{\"$type\":\"Keyword\",\"value\":\"showData\"},\"cardinality\":\"?\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"sections\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"*\"}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"Assignment\",\"feature\":\"sections\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]},\"cardinality\":\"+\"}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"PieSection\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"label\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}},{\"$type\":\"Keyword\",\"value\":\":\"},{\"$type\":\"Assignment\",\"feature\":\"value\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"PIE_SECTION_LABEL\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]+\\\\\"/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"PIE_SECTION_VALUE\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/(0|[1-9][0-9]*)(\\\\\\\\.[0-9]+)?/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@7\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@8\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@9\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@6\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"types\":[],\"usedGrammars\":[]}')), \"PieGrammar\");\nvar loadedGitGraphGrammar;\nvar GitGraphGrammar = /* @__PURE__ */ __name(() => loadedGitGraphGrammar ?? (loadedGitGraphGrammar = loadGrammarFromJson(`{\"$type\":\"Grammar\",\"isDeclared\":true,\"name\":\"GitGraph\",\"interfaces\":[{\"$type\":\"Interface\",\"name\":\"Common\",\"attributes\":[{\"$type\":\"TypeAttribute\",\"name\":\"accDescr\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"accTitle\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}},{\"$type\":\"TypeAttribute\",\"name\":\"title\",\"isOptional\":true,\"type\":{\"$type\":\"SimpleType\",\"primitiveType\":\"string\"}}],\"superTypes\":[]}],\"rules\":[{\"$type\":\"ParserRule\",\"name\":\"TitleAndAccessibilities\",\"fragment\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Assignment\",\"feature\":\"accDescr\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@3\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"accTitle\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@4\"},\"arguments\":[]}},{\"$type\":\"Assignment\",\"feature\":\"title\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@5\"},\"arguments\":[]}}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}],\"cardinality\":\"+\"},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"EOL\",\"fragment\":true,\"dataType\":\"string\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"+\"},{\"$type\":\"EndOfFile\"}]},\"definesHiddenTokens\":false,\"entry\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"NEWLINE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\r?\\\\\\\\n/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_DESCR\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accDescr(?:[\\\\\\\\t ]*:([^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)|\\\\\\\\s*{([^}]*)})/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ACC_TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*accTitle[\\\\\\\\t ]*:(?:[^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[^\\\\\\\\n\\\\\\\\r]*)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"TITLE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*title(?:[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*?(?=%%)|[\\\\\\\\t ][^\\\\\\\\n\\\\\\\\r]*|)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"WHITESPACE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]+/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"YAML\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/---[\\\\\\\\t ]*\\\\\\\\r?\\\\\\\\n(?:[\\\\\\\\S\\\\\\\\s]*?\\\\\\\\r?\\\\\\\\n)?---(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"DIRECTIVE\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%{[\\\\\\\\S\\\\\\\\s]*?}%%(?:\\\\\\\\r?\\\\\\\\n|(?!\\\\\\\\S))/\"},\"fragment\":false},{\"$type\":\"TerminalRule\",\"hidden\":true,\"name\":\"SINGLE_LINE_COMMENT\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[\\\\\\\\t ]*%%[^\\\\\\\\n\\\\\\\\r]*/\"},\"fragment\":false},{\"$type\":\"ParserRule\",\"name\":\"GitGraph\",\"entry\":true,\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"Keyword\",\"value\":\":\"}]},{\"$type\":\"Keyword\",\"value\":\"gitGraph:\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"gitGraph\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@12\"},\"arguments\":[]},{\"$type\":\"Keyword\",\"value\":\":\"}]}]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[],\"cardinality\":\"*\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@0\"},\"arguments\":[]},{\"$type\":\"Assignment\",\"feature\":\"statements\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@11\"},\"arguments\":[]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@2\"},\"arguments\":[]}],\"cardinality\":\"*\"}]}]},\"definesHiddenTokens\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Statement\",\"definition\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@13\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@14\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@15\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@16\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@17\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Direction\",\"definition\":{\"$type\":\"Assignment\",\"feature\":\"dir\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"LR\"},{\"$type\":\"Keyword\",\"value\":\"TB\"},{\"$type\":\"Keyword\",\"value\":\"BT\"}]}},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Commit\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"commit\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"msg:\",\"cardinality\":\"?\"},{\"$type\":\"Assignment\",\"feature\":\"message\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"type:\"},{\"$type\":\"Assignment\",\"feature\":\"type\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"NORMAL\"},{\"$type\":\"Keyword\",\"value\":\"REVERSE\"},{\"$type\":\"Keyword\",\"value\":\"HIGHLIGHT\"}]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Branch\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"branch\"},{\"$type\":\"Assignment\",\"feature\":\"name\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"order:\"},{\"$type\":\"Assignment\",\"feature\":\"order\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@18\"},\"arguments\":[]}}],\"cardinality\":\"?\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Merge\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"merge\"},{\"$type\":\"Assignment\",\"feature\":\"branch\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"type:\"},{\"$type\":\"Assignment\",\"feature\":\"type\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"NORMAL\"},{\"$type\":\"Keyword\",\"value\":\"REVERSE\"},{\"$type\":\"Keyword\",\"value\":\"HIGHLIGHT\"}]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"Checkout\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"checkout\"},{\"$type\":\"Keyword\",\"value\":\"switch\"}]},{\"$type\":\"Assignment\",\"feature\":\"branch\",\"operator\":\"=\",\"terminal\":{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@19\"},\"arguments\":[]},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}]}},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"ParserRule\",\"name\":\"CherryPicking\",\"definition\":{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"cherry-pick\"},{\"$type\":\"Alternatives\",\"elements\":[{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"id:\"},{\"$type\":\"Assignment\",\"feature\":\"id\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"tag:\"},{\"$type\":\"Assignment\",\"feature\":\"tags\",\"operator\":\"+=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]},{\"$type\":\"Group\",\"elements\":[{\"$type\":\"Keyword\",\"value\":\"parent:\"},{\"$type\":\"Assignment\",\"feature\":\"parent\",\"operator\":\"=\",\"terminal\":{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@20\"},\"arguments\":[]}}]}],\"cardinality\":\"*\"},{\"$type\":\"RuleCall\",\"rule\":{\"$ref\":\"#/rules@1\"},\"arguments\":[]}]},\"definesHiddenTokens\":false,\"entry\":false,\"fragment\":false,\"hiddenTokens\":[],\"parameters\":[],\"wildcard\":false},{\"$type\":\"TerminalRule\",\"name\":\"INT\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"number\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/[0-9]+(?=\\\\\\\\s)/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"ID\",\"type\":{\"$type\":\"ReturnType\",\"name\":\"string\"},\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\\\\w([-\\\\\\\\./\\\\\\\\w]*[-\\\\\\\\w])?/\"},\"fragment\":false,\"hidden\":false},{\"$type\":\"TerminalRule\",\"name\":\"STRING\",\"definition\":{\"$type\":\"RegexToken\",\"regex\":\"/\\\\\"[^\\\\\"]*\\\\\"|'[^']*'/\"},\"fragment\":false,\"hidden\":false}],\"definesHiddenTokens\":false,\"hiddenTokens\":[],\"imports\":[],\"types\":[],\"usedGrammars\":[]}`)), \"GitGraphGrammar\");\n\n// src/language/generated/module.ts\nvar InfoLanguageMetaData = {\n  languageId: \"info\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar PacketLanguageMetaData = {\n  languageId: \"packet\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar PieLanguageMetaData = {\n  languageId: \"pie\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar GitGraphLanguageMetaData = {\n  languageId: \"gitGraph\",\n  fileExtensions: [\".mmd\", \".mermaid\"],\n  caseInsensitive: false\n};\nvar MermaidGeneratedSharedModule = {\n  AstReflection: /* @__PURE__ */ __name(() => new MermaidAstReflection(), \"AstReflection\")\n};\nvar InfoGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => InfoGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => InfoLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar PacketGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => PacketGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => PacketLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar PieGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => PieGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => PieLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\nvar GitGraphGeneratedModule = {\n  Grammar: /* @__PURE__ */ __name(() => GitGraphGrammar(), \"Grammar\"),\n  LanguageMetaData: /* @__PURE__ */ __name(() => GitGraphLanguageMetaData, \"LanguageMetaData\"),\n  parser: {}\n};\n\n// src/language/common/valueConverter.ts\nimport { DefaultValueConverter } from \"langium\";\n\n// src/language/common/matcher.ts\nvar accessibilityDescrRegex = /accDescr(?:[\\t ]*:([^\\n\\r]*)|\\s*{([^}]*)})/;\nvar accessibilityTitleRegex = /accTitle[\\t ]*:([^\\n\\r]*)/;\nvar titleRegex = /title([\\t ][^\\n\\r]*|)/;\n\n// src/language/common/valueConverter.ts\nvar rulesRegexes = {\n  ACC_DESCR: accessibilityDescrRegex,\n  ACC_TITLE: accessibilityTitleRegex,\n  TITLE: titleRegex\n};\nvar AbstractMermaidValueConverter = class extends DefaultValueConverter {\n  static {\n    __name(this, \"AbstractMermaidValueConverter\");\n  }\n  runConverter(rule, input, cstNode) {\n    let value = this.runCommonConverter(rule, input, cstNode);\n    if (value === void 0) {\n      value = this.runCustomConverter(rule, input, cstNode);\n    }\n    if (value === void 0) {\n      return super.runConverter(rule, input, cstNode);\n    }\n    return value;\n  }\n  runCommonConverter(rule, input, _cstNode) {\n    const regex = rulesRegexes[rule.name];\n    if (regex === void 0) {\n      return void 0;\n    }\n    const match = regex.exec(input);\n    if (match === null) {\n      return void 0;\n    }\n    if (match[1] !== void 0) {\n      return match[1].trim().replace(/[\\t ]{2,}/gm, \" \");\n    }\n    if (match[2] !== void 0) {\n      return match[2].replace(/^\\s*/gm, \"\").replace(/\\s+$/gm, \"\").replace(/[\\t ]{2,}/gm, \" \").replace(/[\\n\\r]{2,}/gm, \"\\n\");\n    }\n    return void 0;\n  }\n};\nvar CommonValueConverter = class extends AbstractMermaidValueConverter {\n  static {\n    __name(this, \"CommonValueConverter\");\n  }\n  runCustomConverter(_rule, _input, _cstNode) {\n    return void 0;\n  }\n};\n\n// src/language/common/tokenBuilder.ts\nimport { DefaultTokenBuilder } from \"langium\";\nvar AbstractMermaidTokenBuilder = class extends DefaultTokenBuilder {\n  static {\n    __name(this, \"AbstractMermaidTokenBuilder\");\n  }\n  constructor(keywords) {\n    super();\n    this.keywords = new Set(keywords);\n  }\n  buildKeywordTokens(rules, terminalTokens, options) {\n    const tokenTypes = super.buildKeywordTokens(rules, terminalTokens, options);\n    tokenTypes.forEach((tokenType) => {\n      if (this.keywords.has(tokenType.name) && tokenType.PATTERN !== void 0) {\n        tokenType.PATTERN = new RegExp(tokenType.PATTERN.toString() + \"(?:(?=%%)|(?!\\\\S))\");\n      }\n    });\n    return tokenTypes;\n  }\n};\nvar CommonTokenBuilder = class extends AbstractMermaidTokenBuilder {\n  static {\n    __name(this, \"CommonTokenBuilder\");\n  }\n};\n\nexport {\n  __name,\n  Statement,\n  Branch,\n  isBranch,\n  Commit,\n  isCommit,\n  isCommon,\n  GitGraph,\n  isGitGraph,\n  Info,\n  isInfo,\n  Merge,\n  isMerge,\n  Packet,\n  isPacket,\n  PacketBlock,\n  isPacketBlock,\n  Pie,\n  isPie,\n  PieSection,\n  isPieSection,\n  MermaidGeneratedSharedModule,\n  InfoGeneratedModule,\n  PacketGeneratedModule,\n  PieGeneratedModule,\n  GitGraphGeneratedModule,\n  AbstractMermaidValueConverter,\n  CommonValueConverter,\n  AbstractMermaidTokenBuilder,\n  CommonTokenBuilder\n};\n"],
  "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAKA,WAAO,eAAe,SAAS,cAAc,EAAE,OAAO,KAAK,CAAC;AAC5D,QAAI;AACJ,aAAS,MAAM;AACX,UAAI,SAAS,QAAW;AACpB,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC5D;AACA,aAAO;AAAA,IACX;AALS;AAMT,KAAC,SAAUA,MAAK;AACZ,eAAS,QAAQ,KAAK;AAClB,YAAI,QAAQ,QAAW;AACnB,gBAAM,IAAI,MAAM,uCAAuC;AAAA,QAC3D;AACA,eAAO;AAAA,MACX;AALS;AAMT,MAAAA,KAAI,UAAU;AAAA,IAClB,GAAG,QAAQ,MAAM,CAAC,EAAE;AACpB,YAAQ,UAAU;AAAA;AAAA;;;ACtBlB;AAAA;AAAA;AAKA,WAAO,eAAe,SAAS,cAAc,EAAE,OAAO,KAAK,CAAC;AAC5D,YAAQ,cAAc,QAAQ,QAAQ,QAAQ,OAAO,QAAQ,QAAQ,QAAQ,SAAS,QAAQ,SAAS,QAAQ,UAAU;AACzH,aAAS,QAAQ,OAAO;AACpB,aAAO,UAAU,QAAQ,UAAU;AAAA,IACvC;AAFS;AAGT,YAAQ,UAAU;AAClB,aAAS,OAAO,OAAO;AACnB,aAAO,OAAO,UAAU,YAAY,iBAAiB;AAAA,IACzD;AAFS;AAGT,YAAQ,SAAS;AACjB,aAAS,OAAO,OAAO;AACnB,aAAO,OAAO,UAAU,YAAY,iBAAiB;AAAA,IACzD;AAFS;AAGT,YAAQ,SAAS;AACjB,aAAS,MAAM,OAAO;AAClB,aAAO,iBAAiB;AAAA,IAC5B;AAFS;AAGT,YAAQ,QAAQ;AAChB,aAAS,KAAK,OAAO;AACjB,aAAO,OAAO,UAAU;AAAA,IAC5B;AAFS;AAGT,YAAQ,OAAO;AACf,aAAS,MAAM,OAAO;AAClB,aAAO,MAAM,QAAQ,KAAK;AAAA,IAC9B;AAFS;AAGT,YAAQ,QAAQ;AAChB,aAAS,YAAY,OAAO;AACxB,aAAO,MAAM,KAAK,KAAK,MAAM,MAAM,UAAQ,OAAO,IAAI,CAAC;AAAA,IAC3D;AAFS;AAGT,YAAQ,cAAc;AAAA;AAAA;;;AClCtB;AAAA;AAAA;AAKA,WAAO,eAAe,SAAS,cAAc,EAAE,OAAO,KAAK,CAAC;AAC5D,YAAQ,UAAU,QAAQ,QAAQ;AAClC,QAAM,QAAQ;AACd,QAAI;AACJ,KAAC,SAAUC,QAAO;AACd,YAAM,cAAc,EAAE,UAAU;AAAA,MAAE,EAAE;AACpC,MAAAA,OAAM,OAAO,WAAY;AAAE,eAAO;AAAA,MAAa;AAAA,IACnD,GAAG,UAAU,QAAQ,QAAQ,QAAQ,CAAC,EAAE;AACxC,QAAM,eAAN,MAAmB;AAAA,MAbnB,OAamB;AAAA;AAAA;AAAA,MACf,IAAI,UAAU,UAAU,MAAM,QAAQ;AAClC,YAAI,CAAC,KAAK,YAAY;AAClB,eAAK,aAAa,CAAC;AACnB,eAAK,YAAY,CAAC;AAAA,QACtB;AACA,aAAK,WAAW,KAAK,QAAQ;AAC7B,aAAK,UAAU,KAAK,OAAO;AAC3B,YAAI,MAAM,QAAQ,MAAM,GAAG;AACvB,iBAAO,KAAK,EAAE,SAAS,6BAAM,KAAK,OAAO,UAAU,OAAO,GAAnC,WAAqC,CAAC;AAAA,QACjE;AAAA,MACJ;AAAA,MACA,OAAO,UAAU,UAAU,MAAM;AAC7B,YAAI,CAAC,KAAK,YAAY;AAClB;AAAA,QACJ;AACA,YAAI,oCAAoC;AACxC,iBAAS,IAAI,GAAG,MAAM,KAAK,WAAW,QAAQ,IAAI,KAAK,KAAK;AACxD,cAAI,KAAK,WAAW,CAAC,MAAM,UAAU;AACjC,gBAAI,KAAK,UAAU,CAAC,MAAM,SAAS;AAE/B,mBAAK,WAAW,OAAO,GAAG,CAAC;AAC3B,mBAAK,UAAU,OAAO,GAAG,CAAC;AAC1B;AAAA,YACJ,OACK;AACD,kDAAoC;AAAA,YACxC;AAAA,UACJ;AAAA,QACJ;AACA,YAAI,mCAAmC;AACnC,gBAAM,IAAI,MAAM,mFAAmF;AAAA,QACvG;AAAA,MACJ;AAAA,MACA,UAAU,MAAM;AACZ,YAAI,CAAC,KAAK,YAAY;AAClB,iBAAO,CAAC;AAAA,QACZ;AACA,cAAM,MAAM,CAAC,GAAG,YAAY,KAAK,WAAW,MAAM,CAAC,GAAG,WAAW,KAAK,UAAU,MAAM,CAAC;AACvF,iBAAS,IAAI,GAAG,MAAM,UAAU,QAAQ,IAAI,KAAK,KAAK;AAClD,cAAI;AACA,gBAAI,KAAK,UAAU,CAAC,EAAE,MAAM,SAAS,CAAC,GAAG,IAAI,CAAC;AAAA,UAClD,SACO,GAAG;AAEN,aAAC,GAAG,MAAM,SAAS,EAAE,QAAQ,MAAM,CAAC;AAAA,UACxC;AAAA,QACJ;AACA,eAAO;AAAA,MACX;AAAA,MACA,UAAU;AACN,eAAO,CAAC,KAAK,cAAc,KAAK,WAAW,WAAW;AAAA,MAC1D;AAAA,MACA,UAAU;AACN,aAAK,aAAa;AAClB,aAAK,YAAY;AAAA,MACrB;AAAA,IACJ;AACA,QAAMC,WAAN,MAAM,SAAQ;AAAA,MAvEd,OAuEc;AAAA;AAAA;AAAA,MACV,YAAY,UAAU;AAClB,aAAK,WAAW;AAAA,MACpB;AAAA;AAAA;AAAA;AAAA;AAAA,MAKA,IAAI,QAAQ;AACR,YAAI,CAAC,KAAK,QAAQ;AACd,eAAK,SAAS,CAAC,UAAU,UAAU,gBAAgB;AAC/C,gBAAI,CAAC,KAAK,YAAY;AAClB,mBAAK,aAAa,IAAI,aAAa;AAAA,YACvC;AACA,gBAAI,KAAK,YAAY,KAAK,SAAS,sBAAsB,KAAK,WAAW,QAAQ,GAAG;AAChF,mBAAK,SAAS,mBAAmB,IAAI;AAAA,YACzC;AACA,iBAAK,WAAW,IAAI,UAAU,QAAQ;AACtC,kBAAM,SAAS;AAAA,cACX,SAAS,6BAAM;AACX,oBAAI,CAAC,KAAK,YAAY;AAElB;AAAA,gBACJ;AACA,qBAAK,WAAW,OAAO,UAAU,QAAQ;AACzC,uBAAO,UAAU,SAAQ;AACzB,oBAAI,KAAK,YAAY,KAAK,SAAS,wBAAwB,KAAK,WAAW,QAAQ,GAAG;AAClF,uBAAK,SAAS,qBAAqB,IAAI;AAAA,gBAC3C;AAAA,cACJ,GAVS;AAAA,YAWb;AACA,gBAAI,MAAM,QAAQ,WAAW,GAAG;AAC5B,0BAAY,KAAK,MAAM;AAAA,YAC3B;AACA,mBAAO;AAAA,UACX;AAAA,QACJ;AACA,eAAO,KAAK;AAAA,MAChB;AAAA;AAAA;AAAA;AAAA;AAAA,MAKA,KAAK,OAAO;AACR,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,OAAO,KAAK,KAAK,YAAY,KAAK;AAAA,QACtD;AAAA,MACJ;AAAA,MACA,UAAU;AACN,YAAI,KAAK,YAAY;AACjB,eAAK,WAAW,QAAQ;AACxB,eAAK,aAAa;AAAA,QACtB;AAAA,MACJ;AAAA,IACJ;AACA,YAAQ,UAAUA;AAClB,IAAAA,SAAQ,QAAQ,WAAY;AAAA,IAAE;AAAA;AAAA;;;AC/H9B;AAAA;AAAA;AAKA,WAAO,eAAe,SAAS,cAAc,EAAE,OAAO,KAAK,CAAC;AAC5D,YAAQ,0BAA0B,QAAQ,oBAAoB;AAC9D,QAAM,QAAQ;AACd,QAAMC,MAAK;AACX,QAAM,WAAW;AACjB,QAAIC;AACJ,KAAC,SAAUA,qBAAmB;AAC1B,MAAAA,oBAAkB,OAAO,OAAO,OAAO;AAAA,QACnC,yBAAyB;AAAA,QACzB,yBAAyB,SAAS,MAAM;AAAA,MAC5C,CAAC;AACD,MAAAA,oBAAkB,YAAY,OAAO,OAAO;AAAA,QACxC,yBAAyB;AAAA,QACzB,yBAAyB,SAAS,MAAM;AAAA,MAC5C,CAAC;AACD,eAAS,GAAG,OAAO;AACf,cAAM,YAAY;AAClB,eAAO,cAAc,cAAcA,oBAAkB,QAC9C,cAAcA,oBAAkB,aAC/BD,IAAG,QAAQ,UAAU,uBAAuB,KAAK,CAAC,CAAC,UAAU;AAAA,MACzE;AALS;AAMT,MAAAC,oBAAkB,KAAK;AAAA,IAC3B,GAAGA,wBAAsB,QAAQ,oBAAoBA,sBAAoB,CAAC,EAAE;AAC5E,QAAM,gBAAgB,OAAO,OAAO,SAAU,UAAU,SAAS;AAC7D,YAAM,UAAU,GAAG,MAAM,SAAS,EAAE,MAAM,WAAW,SAAS,KAAK,OAAO,GAAG,CAAC;AAC9E,aAAO,EAAE,UAAU;AAAE,eAAO,QAAQ;AAAA,MAAG,EAAE;AAAA,IAC7C,CAAC;AACD,QAAM,eAAN,MAAmB;AAAA,MAhCnB,OAgCmB;AAAA;AAAA;AAAA,MACf,cAAc;AACV,aAAK,eAAe;AAAA,MACxB;AAAA,MACA,SAAS;AACL,YAAI,CAAC,KAAK,cAAc;AACpB,eAAK,eAAe;AACpB,cAAI,KAAK,UAAU;AACf,iBAAK,SAAS,KAAK,MAAS;AAC5B,iBAAK,QAAQ;AAAA,UACjB;AAAA,QACJ;AAAA,MACJ;AAAA,MACA,IAAI,0BAA0B;AAC1B,eAAO,KAAK;AAAA,MAChB;AAAA,MACA,IAAI,0BAA0B;AAC1B,YAAI,KAAK,cAAc;AACnB,iBAAO;AAAA,QACX;AACA,YAAI,CAAC,KAAK,UAAU;AAChB,eAAK,WAAW,IAAI,SAAS,QAAQ;AAAA,QACzC;AACA,eAAO,KAAK,SAAS;AAAA,MACzB;AAAA,MACA,UAAU;AACN,YAAI,KAAK,UAAU;AACf,eAAK,SAAS,QAAQ;AACtB,eAAK,WAAW;AAAA,QACpB;AAAA,MACJ;AAAA,IACJ;AACA,QAAMC,2BAAN,MAA8B;AAAA,MAhE9B,OAgE8B;AAAA;AAAA;AAAA,MAC1B,IAAI,QAAQ;AACR,YAAI,CAAC,KAAK,QAAQ;AAGd,eAAK,SAAS,IAAI,aAAa;AAAA,QACnC;AACA,eAAO,KAAK;AAAA,MAChB;AAAA,MACA,SAAS;AACL,YAAI,CAAC,KAAK,QAAQ;AAId,eAAK,SAASD,oBAAkB;AAAA,QACpC,OACK;AACD,eAAK,OAAO,OAAO;AAAA,QACvB;AAAA,MACJ;AAAA,MACA,UAAU;AACN,YAAI,CAAC,KAAK,QAAQ;AAEd,eAAK,SAASA,oBAAkB;AAAA,QACpC,WACS,KAAK,kBAAkB,cAAc;AAE1C,eAAK,OAAO,QAAQ;AAAA,QACxB;AAAA,MACJ;AAAA,IACJ;AACA,YAAQ,0BAA0BC;AAAA;AAAA;;;AC/FlC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;sBAAAC;EAAA;aAAAC;EAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAA;;;;;;;;;;;;;;;;;;;;;;;AC6BM,SAAU,UAAU,KAAY;AAClC,SAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,OAAQ,IAAgB,UAAU;AACxF;AAFgB;AAoCV,SAAU,YAAY,KAAY;AACpC,SAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,OAAQ,IAAkB,aAAa;AAC7F;AAFgB;AAgCV,SAAU,qBAAqB,KAAY;AAC7C,SAAO,OAAO,QAAQ,YAAY,QAAQ,QACnC,OAAQ,IAA2B,SAAS,YAC5C,OAAQ,IAA2B,SAAS,YAC5C,OAAQ,IAA2B,SAAS;AACvD;AALgB;AA0BV,SAAU,eAAe,KAAY;AACvC,SAAO,OAAO,QAAQ,YAAY,QAAQ,QACnC,UAAW,IAAqB,SAAS,KACzC,YAAa,IAAqB,SAAS,KAC3C,OAAQ,IAAqB,YAAY;AACpD;AALgB;AAwBV,IAAgB,wBAAhB,MAAqC;EAnJ3C,OAmJ2C;;;EAA3C,cAAA;AAEc,SAAA,WAAgE,CAAA;AAChE,SAAA,cAAoD,CAAA;EA6ClE;EAtCI,WAAW,MAAe,MAAY;AAClC,WAAO,UAAU,IAAI,KAAK,KAAK,UAAU,KAAK,OAAO,IAAI;EAC7D;EAEA,UAAU,SAAiB,WAAiB;AACxC,QAAI,YAAY,WAAW;AACvB,aAAO;;AAEX,QAAI,SAAS,KAAK,SAAS,OAAO;AAClC,QAAI,CAAC,QAAQ;AACT,eAAS,KAAK,SAAS,OAAO,IAAI,CAAA;;AAEtC,UAAM,WAAW,OAAO,SAAS;AACjC,QAAI,aAAa,QAAW;AACxB,aAAO;WACJ;AACH,YAAM,SAAS,KAAK,iBAAiB,SAAS,SAAS;AACvD,aAAO,SAAS,IAAI;AACpB,aAAO;;EAEf;EAEA,eAAe,MAAY;AACvB,UAAM,WAAW,KAAK,YAAY,IAAI;AACtC,QAAI,UAAU;AACV,aAAO;WACJ;AACH,YAAM,WAAW,KAAK,YAAW;AACjC,YAAM,QAAkB,CAAA;AACxB,iBAAW,mBAAmB,UAAU;AACpC,YAAI,KAAK,UAAU,iBAAiB,IAAI,GAAG;AACvC,gBAAM,KAAK,eAAe;;;AAGlC,WAAK,YAAY,IAAI,IAAI;AACzB,aAAO;;EAEf;;AA8DE,SAAU,mBAAmB,MAAa;AAC5C,SAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,MAAM,QAAS,KAA0B,OAAO;AACxG;AAFgB;AAWV,SAAU,cAAc,MAAa;AACvC,SAAO,OAAO,SAAS,YAAY,SAAS,QAAQ,OAAQ,KAAqB,cAAc;AACnG;AAFgB;AAQV,SAAU,cAAc,MAAa;AACvC,SAAO,mBAAmB,IAAI,KAAK,OAAQ,KAAqB,aAAa;AACjF;AAFgB;;;ACfV,IAAO,aAAP,MAAO,YAAU;EApQvB,OAoQuB;;;EAInB,YAAY,SAAkB,QAAkD;AAC5E,SAAK,UAAU;AACf,SAAK,SAAS;EAClB;EAEA,WAAQ;AACJ,UAAM,WAAW;MACb,OAAO,KAAK,QAAO;MACnB,MAAM,6BAAM,KAAK,OAAO,SAAS,KAAK,GAAhC;MACN,CAAC,OAAO,QAAQ,GAAG,MAAM;;AAE7B,WAAO;EACX;EAEA,CAAC,OAAO,QAAQ,IAAC;AACb,WAAO,KAAK,SAAQ;EACxB;EAEA,UAAO;AACH,UAAM,WAAW,KAAK,SAAQ;AAC9B,WAAO,QAAQ,SAAS,KAAI,EAAG,IAAI;EACvC;EAEA,QAAK;AACD,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf;AACA,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAEA,UAAO;AACH,UAAM,SAAc,CAAA;AACpB,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI;AACJ,OAAG;AACC,aAAO,SAAS,KAAI;AACpB,UAAI,KAAK,UAAU,QAAW;AAC1B,eAAO,KAAK,KAAK,KAAK;;aAErB,CAAC,KAAK;AACf,WAAO;EACX;EAEA,QAAK;AACD,WAAO,IAAI,IAAI,IAAI;EACvB;EAEA,MAAoB,OAAqB,SAAqB;AAC1D,UAAM,cAAc,KAAK,IAAI,aAAmB;MAC5C,QAAQ,MAAM,OAAO,IAAI;MACzB,UAAU,QAAQ,OAAO,IAAI;KAChC;AACD,WAAO,IAAI,IAAI,WAAW;EAC9B;EAEA,WAAQ;AACJ,WAAO,KAAK,KAAI;EACpB;EAEA,OAAW,OAAmB;AAC1B,UAAM,WAAW,MAAM,OAAO,QAAQ,EAAC;AACvC,WAAO,IAAI,YACP,OAAO,EAAE,OAAO,KAAK,QAAO,GAAI,WAAW,MAAK,IAChD,WAAQ;AACJ,UAAI;AACJ,UAAI,CAAC,MAAM,WAAW;AAClB,WAAG;AACC,mBAAS,KAAK,OAAO,MAAM,KAAK;AAChC,cAAI,CAAC,OAAO,MAAM;AACd,mBAAO;;iBAEN,CAAC,OAAO;AACjB,cAAM,YAAY;;AAEtB,SAAG;AACC,iBAAS,SAAS,KAAI;AACtB,YAAI,CAAC,OAAO,MAAM;AACd,iBAAO;;eAEN,CAAC,OAAO;AACjB,aAAO;IACX,CAAC;EAET;EAEA,KAAK,YAAY,KAAG;AAChB,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,QAAQ;AACZ,QAAI;AACJ,QAAI,eAAe;AACnB,OAAG;AACC,eAAS,SAAS,KAAI;AACtB,UAAI,CAAC,OAAO,MAAM;AACd,YAAI,cAAc;AACd,mBAAS;;AAEb,iBAAS,SAAS,OAAO,KAAK;;AAElC,qBAAe;aACV,CAAC,OAAO;AACjB,WAAO;EACX;EAEA,QAAQ,eAAkB,YAAY,GAAC;AACnC,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,SAAS,aAAa,KAAK,UAAU,eAAe;AACpD,eAAO;;AAEX,aAAO,SAAS,KAAI;AACpB;;AAEJ,WAAO;EACX;EAeA,MAAM,WAAgC;AAClC,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,CAAC,UAAU,KAAK,KAAK,GAAG;AACxB,eAAO;;AAEX,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAEA,KAAK,WAAgC;AACjC,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,UAAU,KAAK,KAAK,GAAG;AACvB,eAAO;;AAEX,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAEA,QAAQ,YAA6C;AACjD,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,iBAAW,KAAK,OAAO,KAAK;AAC5B,aAAO,SAAS,KAAI;AACpB;;EAER;EAEA,IAAO,YAA2B;AAC9B,WAAO,IAAI,YACP,KAAK,SACL,CAAC,UAAS;AACN,YAAM,EAAE,MAAM,MAAK,IAAK,KAAK,OAAO,KAAK;AACzC,UAAI,MAAM;AACN,eAAO;aACJ;AACH,eAAO,EAAE,MAAM,OAAO,OAAO,WAAW,KAAK,EAAC;;IAEtD,CAAC;EAET;EAKA,OAAO,WAAgC;AACnC,WAAO,IAAI,YACP,KAAK,SACL,WAAQ;AACJ,UAAI;AACJ,SAAG;AACC,iBAAS,KAAK,OAAO,KAAK;AAC1B,YAAI,CAAC,OAAO,QAAQ,UAAU,OAAO,KAAK,GAAG;AACzC,iBAAO;;eAEN,CAAC,OAAO;AACjB,aAAO;IACX,CAAC;EAET;EAEA,cAAW;AACP,WAAO,KAAK,OAAO,OAAK,MAAM,UAAa,MAAM,IAAI;EACzD;EAIA,OAAU,YAA0D,cAAgB;AAChF,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,gBAAmC;AACvC,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,kBAAkB,QAAW;AAC7B,wBAAgB,KAAK;aAClB;AACH,wBAAgB,WAAW,eAAe,KAAK,KAAK;;AAExD,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAIA,YAAe,YAA0D,cAAgB;AACrF,WAAO,KAAK,gBAAgB,KAAK,SAAQ,GAAI,YAAY,YAAY;EACzE;EAEU,gBAAmB,UAAuB,YAA0D,cAAgB;AAC1H,UAAM,OAAO,SAAS,KAAI;AAC1B,QAAI,KAAK,MAAM;AACX,aAAO;;AAEX,UAAM,gBAAgB,KAAK,gBAAgB,UAAU,YAAY,YAAY;AAC7E,QAAI,kBAAkB,QAAW;AAC7B,aAAO,KAAK;;AAEhB,WAAO,WAAW,eAAe,KAAK,KAAK;EAC/C;EAIA,KAAK,WAAgC;AACjC,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,UAAU,KAAK,KAAK,GAAG;AACvB,eAAO,KAAK;;AAEhB,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAEA,UAAU,WAAgC;AACtC,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,QAAQ;AACZ,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,UAAU,KAAK,KAAK,GAAG;AACvB,eAAO;;AAEX,aAAO,SAAS,KAAI;AACpB;;AAEJ,WAAO;EACX;EAEA,SAAS,eAAgB;AACrB,UAAM,WAAW,KAAK,SAAQ;AAC9B,QAAI,OAAO,SAAS,KAAI;AACxB,WAAO,CAAC,KAAK,MAAM;AACf,UAAI,KAAK,UAAU,eAAe;AAC9B,eAAO;;AAEX,aAAO,SAAS,KAAI;;AAExB,WAAO;EACX;EAEA,QAAW,YAAyC;AAEhD,WAAO,IAAI,YACP,OAAO,EAAE,MAAM,KAAK,QAAO,EAAE,IAC7B,CAAC,UAAS;AACN,SAAG;AACC,YAAI,MAAM,UAAU;AAChB,gBAAM,OAAO,MAAM,SAAS,KAAI;AAChC,cAAI,KAAK,MAAM;AACX,kBAAM,WAAW;iBACd;AACH,mBAAO;;;AAGf,cAAM,EAAE,MAAM,MAAK,IAAK,KAAK,OAAO,MAAM,IAAI;AAC9C,YAAI,CAAC,MAAM;AACP,gBAAM,SAAS,WAAW,KAAK;AAC/B,cAAI,WAAW,MAAM,GAAG;AACpB,kBAAM,WAAW,OAAO,OAAO,QAAQ,EAAC;iBACrC;AACH,mBAAO,EAAE,MAAM,OAAO,OAAO,OAAM;;;eAGtC,MAAM;AACf,aAAO;IACX,CAAC;EAET;EAEA,KAA2B,OAAS;AAChC,QAAI,UAAU,QAAW;AACrB,cAAQ;;AAEZ,QAAI,SAAS,GAAG;AACZ,aAAO;;AAEX,UAAMC,UAAS,QAAQ,IAAI,KAAK,KAAK,QAAQ,CAAC,IAAmC;AAEjF,WAAO,IAAI,YACP,OAAO,EAAE,MAAMA,QAAO,QAAO,EAAE,IAC/B,CAAC,UAAS;AACN,SAAG;AACC,YAAI,MAAM,UAAU;AAChB,gBAAM,OAAO,MAAM,SAAS,KAAI;AAChC,cAAI,KAAK,MAAM;AACX,kBAAM,WAAW;iBACd;AACH,mBAAO;;;AAGf,cAAM,EAAE,MAAM,MAAK,IAAKA,QAAO,OAAO,MAAM,IAAI;AAChD,YAAI,CAAC,MAAM;AACP,cAAI,WAAW,KAAK,GAAG;AACnB,kBAAM,WAAW,MAAM,OAAO,QAAQ,EAAC;iBACpC;AACH,mBAAO,EAAE,MAAM,OAAO,MAAY;;;eAGrC,MAAM;AACf,aAAO;IACX,CAAC;EAET;EAEA,OAAI;AACA,UAAM,WAAW,KAAK,SAAQ;AAC9B,UAAM,SAAS,SAAS,KAAI;AAC5B,QAAI,OAAO,MAAM;AACb,aAAO;;AAEX,WAAO,OAAO;EAClB;EAEA,KAAK,YAAY,GAAC;AACd,WAAO,IAAI,YACP,MAAK;AACD,YAAM,QAAQ,KAAK,QAAO;AAC1B,eAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAChC,cAAM,OAAO,KAAK,OAAO,KAAK;AAC9B,YAAI,KAAK,MAAM;AACX,iBAAO;;;AAGf,aAAO;IACX,GACA,KAAK,MAAM;EAEnB;EAEA,MAAM,SAAe;AACjB,WAAO,IAAI,YACP,OAAO,EAAE,MAAM,GAAG,OAAO,KAAK,QAAO,EAAE,IACvC,WAAQ;AACJ,YAAM;AACN,UAAI,MAAM,OAAO,SAAS;AACtB,eAAO;;AAEX,aAAO,KAAK,OAAO,MAAM,KAAK;IAClC,CAAC;EAET;EAEA,SAAkB,IAAwB;AACtC,UAAM,MAAM,oBAAI,IAAG;AACnB,WAAO,KAAK,OAAO,OAAI;AACnB,YAAM,QAAQ,KAAK,GAAG,CAAC,IAAI;AAC3B,UAAI,IAAI,IAAI,KAAK,GAAG;AAChB,eAAO;aACJ;AACH,YAAI,IAAI,KAAK;AACb,eAAO;;IAEf,CAAC;EACL;EAEA,QAAiB,OAAoB,KAAyB;AAC1D,UAAM,cAAc,oBAAI,IAAG;AAC3B,eAAW,QAAQ,OAAO;AACtB,YAAM,QAAQ,MAAM,IAAI,IAAI,IAAI;AAChC,kBAAY,IAAI,KAAK;;AAEzB,WAAO,KAAK,OAAO,OAAI;AACnB,YAAM,SAAS,MAAM,IAAI,CAAC,IAAI;AAC9B,aAAO,CAAC,YAAY,IAAI,MAAM;IAClC,CAAC;EACL;;AAGJ,SAAS,SAAS,MAAa;AAC3B,MAAI,OAAO,SAAS,UAAU;AAC1B,WAAO;;AAEX,MAAI,OAAO,SAAS,aAAa;AAC7B,WAAO;;AAGX,MAAI,OAAQ,KAAa,aAAa,YAAY;AAE9C,WAAQ,KAAa,SAAQ;;AAEjC,SAAO,OAAO,UAAU,SAAS,KAAK,IAAI;AAC9C;AAbS;AAeT,SAAS,WAAc,KAAY;AAC/B,SAAO,CAAC,CAAC,OAAO,OAAQ,IAAoB,OAAO,QAAQ,MAAM;AACrE;AAFS;AAQF,IAAM,eAA4B,IAAI,WAA2B,MAAM,QAAW,MAAM,WAAW;AAKnG,IAAM,cAA+C,OAAO,OAAO,EAAE,MAAM,MAAM,OAAO,OAAS,CAAE;AAKpG,SAAU,UAAa,aAA8C;AACvE,MAAI,YAAY,WAAW,GAAG;AAC1B,UAAM,aAAa,YAAY,CAAC;AAChC,QAAI,sBAAsB,YAAY;AAClC,aAAO;;AAEX,QAAI,WAAW,UAAU,GAAG;AACxB,aAAO,IAAI,WACP,MAAM,WAAW,OAAO,QAAQ,EAAC,GACjC,CAAC,aAAa,SAAS,KAAI,CAAE;;AAGrC,QAAI,OAAO,WAAW,WAAW,UAAU;AACvC,aAAO,IAAI,WACP,OAAO,EAAE,OAAO,EAAC,IACjB,CAAC,UAAS;AACN,YAAI,MAAM,QAAQ,WAAW,QAAQ;AACjC,iBAAO,EAAE,MAAM,OAAO,OAAO,WAAW,MAAM,OAAO,EAAC;eACnD;AACH,iBAAO;;MAEf,CAAC;;;AAIb,MAAI,YAAY,SAAS,GAAG;AAExB,WAAO,IAAI,WACP,OAAO,EAAE,WAAW,GAAG,UAAU,EAAC,IAClC,CAAC,UAAS;AACN,SAAG;AACC,YAAI,MAAM,UAAU;AAChB,gBAAM,OAAO,MAAM,SAAS,KAAI;AAChC,cAAI,CAAC,KAAK,MAAM;AACZ,mBAAO;;AAEX,gBAAM,WAAW;;AAErB,YAAI,MAAM,OAAO;AACb,cAAI,MAAM,WAAW,MAAM,MAAM,QAAQ;AACrC,mBAAO,EAAE,MAAM,OAAO,OAAO,MAAM,MAAM,MAAM,UAAU,EAAC;;AAE9D,gBAAM,QAAQ;AACd,gBAAM,WAAW;;AAErB,YAAI,MAAM,YAAY,YAAY,QAAQ;AACtC,gBAAM,aAAa,YAAY,MAAM,WAAW;AAChD,cAAI,WAAW,UAAU,GAAG;AACxB,kBAAM,WAAW,WAAW,OAAO,QAAQ,EAAC;qBACrC,cAAc,OAAO,WAAW,WAAW,UAAU;AAC5D,kBAAM,QAAQ;;;eAGjB,MAAM,YAAY,MAAM,SAAS,MAAM,YAAY,YAAY;AACxE,aAAO;IACX,CAAC;;AAGT,SAAO;AACX;AA3DgB;AAoFV,IAAO,iBAAP,cACM,WAAiE;EAvxB7E,OAuxB6E;;;EAGzE,YAAY,MAAS,UAAoC,SAAmC;AACxF,UACI,OAAO;MACH,YAAW,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,eAAc,CAAC,CAAC,IAAI,EAAE,OAAO,QAAQ,EAAC,CAAE,IAAI,CAAC,SAAS,IAAI,EAAE,OAAO,QAAQ,EAAC,CAAE;MAClG,QAAQ;QAEZ,WAAQ;AACJ,UAAI,MAAM,QAAQ;AACd,cAAM,UAAU,IAAG;AACnB,cAAM,SAAS;;AAEnB,aAAO,MAAM,UAAU,SAAS,GAAG;AAC/B,cAAM,WAAW,MAAM,UAAU,MAAM,UAAU,SAAS,CAAC;AAC3D,cAAM,OAAO,SAAS,KAAI;AAC1B,YAAI,KAAK,MAAM;AACX,gBAAM,UAAU,IAAG;eAChB;AACH,gBAAM,UAAU,KAAK,SAAS,KAAK,KAAK,EAAE,OAAO,QAAQ,EAAC,CAAE;AAC5D,iBAAO;;;AAGf,aAAO;IACX,CAAC;EAET;EAES,WAAQ;AACb,UAAM,WAAW;MACb,OAAO,KAAK,QAAO;MACnB,MAAM,6BAAM,KAAK,OAAO,SAAS,KAAK,GAAhC;MACN,OAAO,6BAAK;AACR,iBAAS,MAAM,SAAS;MAC5B,GAFO;MAGP,CAAC,OAAO,QAAQ,GAAG,MAAM;;AAE7B,WAAO;EACX;;AAME,IAAW;CAAjB,SAAiBC,YAAS;AAKtB,WAAgB,IAAID,SAAsB;AACtC,WAAOA,QAAO,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;EAC3C;AAFgB;AAAA,EAAAC,WAAA,MAAG;AAOnB,WAAgB,QAAQD,SAAsB;AAC1C,WAAOA,QAAO,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;EAC3C;AAFgB;AAAA,EAAAC,WAAA,UAAO;AAOvB,WAAgB,IAAID,SAAsB;AACtC,WAAOA,QAAO,OAAO,CAAC,GAAG,MAAM,KAAK,IAAI,GAAG,CAAC,CAAC;EACjD;AAFgB;AAAA,EAAAC,WAAA,MAAG;AAOnB,WAAgB,IAAID,SAAsB;AACtC,WAAOA,QAAO,OAAO,CAAC,GAAG,MAAM,KAAK,IAAI,GAAG,CAAC,CAAC;EACjD;AAFgB;AAAA,EAAAC,WAAA,MAAG;AAIvB,GA9BiB,cAAA,YAAS,CAAA,EAAA;;;AFlzBpB,SAAU,UAAU,MAAa;AACnC,SAAO,IAAI,eAAe,MAAM,aAAU;AACtC,QAAI,mBAAmB,OAAO,GAAG;AAC7B,aAAO,QAAQ;WACZ;AACH,aAAO,CAAA;;EAEf,GAAG,EAAE,aAAa,KAAI,CAAE;AAC5B;AARgB;AAaV,SAAU,WAAW,MAAa;AACpC,SAAO,UAAU,IAAI,EAAE,OAAO,aAAa;AAC/C;AAFgB;AAOV,SAAU,YAAY,OAAgB,QAAe;AACvD,SAAO,MAAM,WAAW;AACpB,YAAQ,MAAM;AACd,QAAI,UAAU,QAAQ;AAClB,aAAO;;;AAGf,SAAO;AACX;AARgB;AAUV,SAAU,aAAa,OAAa;AAGtC,SAAO;IACH,OAAO;MACH,WAAW,MAAM,cAAe;MAChC,MAAM,MAAM,YAAa;;IAE7B,KAAK;MACD,WAAW,MAAM;MACjB,MAAM,MAAM,UAAW;;;AAGnC;AAbgB;AAiBV,SAAU,kBAAkB,MAAc;AAC5C,MAAI,CAAC,MAAM;AACP,WAAO;;AAEX,QAAM,EAAE,QAAQ,KAAK,MAAK,IAAK;AAC/B,SAAO;IACH;IACA;IACA;IACA,QAAQ,MAAM;;AAEtB;AAXgB;AAahB,IAAY;CAAZ,SAAYC,kBAAe;AACvB,EAAAA,iBAAAA,iBAAA,QAAA,IAAA,CAAA,IAAA;AACA,EAAAA,iBAAAA,iBAAA,OAAA,IAAA,CAAA,IAAA;AACA,EAAAA,iBAAAA,iBAAA,cAAA,IAAA,CAAA,IAAA;AACA,EAAAA,iBAAAA,iBAAA,aAAA,IAAA,CAAA,IAAA;AACA,EAAAA,iBAAAA,iBAAA,QAAA,IAAA,CAAA,IAAA;AACJ,GANY,oBAAA,kBAAe,CAAA,EAAA;AAQrB,SAAU,aAAa,OAAc,IAAS;AAChD,MAAI,MAAM,IAAI,OAAO,GAAG,MAAM,QAAS,MAAM,IAAI,SAAS,GAAG,MAAM,QAAQ,MAAM,IAAI,YAAY,MAAM,MAAM,WAAY;AACrH,WAAO,gBAAgB;aAChB,MAAM,MAAM,OAAO,GAAG,IAAI,QAAS,MAAM,MAAM,SAAS,GAAG,IAAI,QAAQ,MAAM,MAAM,YAAY,GAAG,IAAI,WAAY;AACzH,WAAO,gBAAgB;;AAE3B,QAAM,cAAc,MAAM,MAAM,OAAO,GAAG,MAAM,QAAS,MAAM,MAAM,SAAS,GAAG,MAAM,QAAQ,MAAM,MAAM,aAAa,GAAG,MAAM;AACjI,QAAM,YAAY,MAAM,IAAI,OAAO,GAAG,IAAI,QAAS,MAAM,IAAI,SAAS,GAAG,IAAI,QAAQ,MAAM,IAAI,aAAa,GAAG,IAAI;AACnH,MAAI,eAAe,WAAW;AAC1B,WAAO,gBAAgB;aAChB,aAAa;AACpB,WAAO,gBAAgB;SACpB;AACH,WAAO,gBAAgB;;AAE/B;AAfgB;AAiBV,SAAU,QAAQ,OAAc,IAAS;AAC3C,QAAM,aAAa,aAAa,OAAO,EAAE;AACzC,SAAO,aAAa,gBAAgB;AACxC;AAHgB;AAOT,IAAM,oBAAoB;AAQ3B,SAAU,4BAA4B,SAA8B,QAAgB,aAAa,mBAAiB;AACpH,MAAI,SAAS;AACT,QAAI,SAAS,GAAG;AACZ,YAAM,cAAc,SAAS,QAAQ;AACrC,YAAM,eAAe,QAAQ,KAAK,OAAO,WAAW;AACpD,UAAI,CAAC,WAAW,KAAK,YAAY,GAAG;AAChC;;;AAGR,WAAO,qBAAqB,SAAS,MAAM;;AAE/C,SAAO;AACX;AAZgB;AAcV,SAAU,gBAAgB,SAA8B,cAAsB;AAChF,MAAI,SAAS;AACT,UAAM,WAAW,gBAAgB,SAAS,IAAI;AAC9C,QAAI,YAAY,cAAc,UAAU,YAAY,GAAG;AACnD,aAAO;;AAEX,QAAI,cAAc,OAAO,GAAG;AAGxB,YAAM,WAAW,QAAQ,QAAQ,UAAU,OAAK,CAAC,EAAE,MAAM;AACzD,eAAS,IAAI,WAAW,GAAG,KAAK,GAAG,KAAK;AACpC,cAAM,QAAQ,QAAQ,QAAQ,CAAC;AAC/B,YAAI,cAAc,OAAO,YAAY,GAAG;AACpC,iBAAO;;;;;AAKvB,SAAO;AACX;AAnBgB;AAqBV,SAAU,cAAc,SAAkB,cAAsB;AAClE,SAAO,cAAc,OAAO,KAAK,aAAa,SAAS,QAAQ,UAAU,IAAI;AACjF;AAFgB;AAcV,SAAU,qBAAqB,MAAe,QAAc;AAC9D,MAAI,cAAc,IAAI,GAAG;AACrB,WAAO;aACA,mBAAmB,IAAI,GAAG;AACjC,UAAM,eAAe,aAAa,MAAM,QAAQ,KAAK;AACrD,QAAI,cAAc;AACd,aAAO,qBAAqB,cAAc,MAAM;;;AAGxD,SAAO;AACX;AAVgB;AAsBV,SAAU,yBAAyB,MAAe,QAAc;AAClE,MAAI,cAAc,IAAI,GAAG;AACrB,WAAO;aACA,mBAAmB,IAAI,GAAG;AACjC,UAAM,eAAe,aAAa,MAAM,QAAQ,IAAI;AACpD,QAAI,cAAc;AACd,aAAO,yBAAyB,cAAc,MAAM;;;AAG5D,SAAO;AACX;AAVgB;AAYhB,SAAS,aAAa,MAAwB,QAAgB,SAAgB;AAC1E,MAAI,OAAO;AACX,MAAI,QAAQ,KAAK,QAAQ,SAAS;AAClC,MAAI,cAAmC;AAEvC,SAAO,QAAQ,OAAO;AAClB,UAAM,SAAS,KAAK,OAAO,OAAO,SAAS,CAAC;AAC5C,UAAM,aAAa,KAAK,QAAQ,MAAM;AAEtC,QAAI,WAAW,UAAU,UAAU,WAAW,MAAM,QAAQ;AAExD,aAAO;;AAGX,QAAI,WAAW,OAAO,QAAQ;AAE1B,oBAAc,UAAU,aAAa;AACrC,aAAO,SAAS;WACb;AAEH,cAAQ,SAAS;;;AAIzB,SAAO;AACX;AAzBS;AA2BH,SAAU,gBAAgB,MAAe,SAAS,MAAI;AACxD,SAAO,KAAK,WAAW;AACnB,UAAM,SAAS,KAAK;AACpB,QAAI,QAAQ,OAAO,QAAQ,QAAQ,IAAI;AACvC,WAAO,QAAQ,GAAG;AACd;AACA,YAAM,WAAW,OAAO,QAAQ,KAAK;AACrC,UAAI,UAAU,CAAC,SAAS,QAAQ;AAC5B,eAAO;;;AAGf,WAAO;;AAEX,SAAO;AACX;AAdgB;AAgBV,SAAU,YAAY,MAAe,SAAS,MAAI;AACpD,SAAO,KAAK,WAAW;AACnB,UAAM,SAAS,KAAK;AACpB,QAAI,QAAQ,OAAO,QAAQ,QAAQ,IAAI;AACvC,UAAM,OAAO,OAAO,QAAQ,SAAS;AACrC,WAAO,QAAQ,MAAM;AACjB;AACA,YAAM,OAAO,OAAO,QAAQ,KAAK;AACjC,UAAI,UAAU,CAAC,KAAK,QAAQ;AACxB,eAAO;;;AAGf,WAAO;;AAEX,SAAO;AACX;AAfgB;AAiBV,SAAU,iBAAiB,MAAa;AAC1C,MAAI,KAAK,MAAM,MAAM,cAAc,GAAG;AAClC,WAAO;;AAEX,QAAM,OAAO,KAAK,MAAM,MAAM;AAC9B,MAAI,OAAO;AACX,MAAI;AACJ,SAAO,KAAK,WAAW;AACnB,UAAM,SAAS,KAAK;AACpB,UAAM,YAAY,UAAK,QAAL,UAAK,SAAL,QAAS,OAAO,QAAQ,QAAQ,IAAI;AACtD,QAAI,cAAc,GAAG;AACjB,aAAO;AACP,cAAQ;WACL;AACH,cAAQ,YAAY;AACpB,aAAO,OAAO,QAAQ,KAAK;;AAE/B,QAAI,KAAK,MAAM,MAAM,SAAS,MAAM;AAChC;;AAEJ,WAAO;;AAEX,SAAO;AACX;AAvBgB;AAyBV,SAAU,iBAAiB,OAAgB,KAAY;AACzD,QAAM,eAAe,gBAAgB,OAAO,GAAG;AAC/C,MAAI,CAAC,cAAc;AACf,WAAO,CAAA;;AAEX,SAAO,aAAa,OAAO,QAAQ,MAAM,aAAa,IAAI,GAAG,aAAa,CAAC;AAC/E;AANgB;AAQhB,SAAS,gBAAgB,GAAY,GAAU;AAC3C,QAAM,WAAW,eAAe,CAAC;AACjC,QAAM,WAAW,eAAe,CAAC;AACjC,MAAI;AACJ,WAAS,IAAI,GAAG,IAAI,SAAS,UAAU,IAAI,SAAS,QAAQ,KAAK;AAC7D,UAAM,UAAU,SAAS,CAAC;AAC1B,UAAM,UAAU,SAAS,CAAC;AAC1B,QAAI,QAAQ,WAAW,QAAQ,QAAQ;AACnC,gBAAU;QACN,QAAQ,QAAQ;QAChB,GAAG,QAAQ;QACX,GAAG,QAAQ;;WAEZ;AACH;;;AAGR,SAAO;AACX;AAlBS;AA0BT,SAAS,eAAe,MAAa;AACjC,QAAM,QAAsB,CAAA;AAC5B,SAAO,KAAK,WAAW;AACnB,UAAM,SAAS,KAAK;AACpB,UAAM,QAAQ,OAAO,QAAQ,QAAQ,IAAI;AACzC,UAAM,KAAK;MACP;MACA;KACH;AACD,WAAO;;AAEX,SAAO,MAAM,QAAO;AACxB;AAZS;;;AGhUT;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACQM,IAAO,oBAAP,cAAiC,MAAK;EAR5C,OAQ4C;;;EACxC,YAAY,MAA2B,SAAe;AAClD,UAAM,OAAO,GAAG,OAAO,OAAO,KAAK,MAAM,MAAM,IAAI,IAAI,KAAK,MAAM,MAAM,SAAS,KAAK,OAAO;EACjG;;AAGE,SAAU,kBAAkB,GAAQ;AACtC,QAAM,IAAI,MAAM,yCAAyC;AAC7D;AAFgB;;;ACdhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AASO,IAAM,0BAA0B;EACnC,IAAI;EACJ,QAAQ;EACR,QAAQ;EACR,cAAc;EACd,IAAI;EACJ,YAAY;EACZ,YAAY;;AAKT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAMT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAMT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAMV,SAAU,cAAc,MAAa;AACvC,SAAO,gBAAgB,IAAI,KAAK,SAAS,aAAa,SAAS,WAAW,SAAS,aAAa,SAAS,WAAW,SAAS,cAAc,SAAS,aAAa,SAAS,YAAY,SAAS,YAAY,SAAS,eAAe,SAAS,aAAa,SAAS,cAAc,SAAS,UAAU,SAAS,UAAU,SAAS,WAAW,SAAS,YAAY,SAAS,UAAW,OAAO,SAAS,YAAa,qBAAqB,KAAK,IAAI;AAClb;AAFgB;AAMV,SAAU,gBAAgB,MAAa;AACzC,SAAO,SAAS,YAAY,SAAS,YAAY,SAAS,aAAa,SAAS,UAAU,SAAS;AACvG;AAFgB;AAMT,IAAM,iBAAiB;AAExB,SAAU,iBAAiB,MAAa;AAC1C,SAAO,WAAW,WAAW,MAAM,cAAc;AACrD;AAFgB;AAMT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAUT,IAAM,kBAAkB;AAEzB,SAAU,kBAAkB,MAAa;AAC3C,SAAO,WAAW,WAAW,MAAM,eAAe;AACtD;AAFgB;AAUT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAUT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAUT,IAAM,iBAAiB;AAExB,SAAU,iBAAiB,MAAa;AAC1C,SAAO,WAAW,WAAW,MAAM,cAAc;AACrD;AAFgB;AAWT,IAAM,cAAc;AAErB,SAAU,cAAc,MAAa;AACvC,SAAO,WAAW,WAAW,MAAM,WAAW;AAClD;AAFgB;AAWT,IAAM,cAAc;AAErB,SAAU,cAAc,MAAa;AACvC,SAAO,WAAW,WAAW,MAAM,WAAW;AAClD;AAFgB;AAiBT,IAAM,UAAU;AAEjB,SAAU,UAAU,MAAa;AACnC,SAAO,WAAW,WAAW,MAAM,OAAO;AAC9C;AAFgB;AAUT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAUT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAYT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAYT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAUT,IAAM,WAAW;AAElB,SAAU,WAAW,MAAa;AACpC,SAAO,WAAW,WAAW,MAAM,QAAQ;AAC/C;AAFgB;AAUT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAUT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAUT,IAAM,qBAAqB;AAE5B,SAAU,qBAAqB,MAAa;AAC9C,SAAO,WAAW,WAAW,MAAM,kBAAkB;AACzD;AAFgB;AAoBT,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAUT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAUT,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAYT,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAUT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAcT,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAWT,IAAM,OAAO;AAEd,SAAU,OAAO,MAAa;AAChC,SAAO,WAAW,WAAW,MAAM,IAAI;AAC3C;AAFgB;AAaT,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAUT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAYT,IAAM,SAAS;AAEhB,SAAU,SAAS,MAAa;AAClC,SAAO,WAAW,WAAW,MAAM,MAAM;AAC7C;AAFgB;AAST,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAWT,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAUT,IAAM,iBAAiB;AAExB,SAAU,iBAAiB,MAAa;AAC1C,SAAO,WAAW,WAAW,MAAM,cAAc;AACrD;AAFgB;AAWT,IAAM,iBAAiB;AAExB,SAAU,iBAAiB,MAAa;AAC1C,SAAO,WAAW,WAAW,MAAM,cAAc;AACrD;AAFgB;AAQT,IAAM,YAAY;AAEnB,SAAU,YAAY,MAAa;AACrC,SAAO,WAAW,WAAW,MAAM,SAAS;AAChD;AAFgB;AAUT,IAAM,QAAQ;AAEf,SAAU,QAAQ,MAAa;AACjC,SAAO,WAAW,WAAW,MAAM,KAAK;AAC5C;AAFgB;AAUT,IAAM,UAAU;AAEjB,SAAU,UAAU,MAAa;AACnC,SAAO,WAAW,WAAW,MAAM,OAAO;AAC9C;AAFgB;AAST,IAAM,eAAe;AAEtB,SAAU,eAAe,MAAa;AACxC,SAAO,WAAW,WAAW,MAAM,YAAY;AACnD;AAFgB;AAST,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAUT,IAAM,WAAW;AAElB,SAAU,WAAW,MAAa;AACpC,SAAO,WAAW,WAAW,MAAM,QAAQ;AAC/C;AAFgB;AAST,IAAM,uBAAuB;AAE9B,SAAU,uBAAuB,MAAa;AAChD,SAAO,WAAW,WAAW,MAAM,oBAAoB;AAC3D;AAFgB;AAST,IAAM,gBAAgB;AAEvB,SAAU,gBAAgB,MAAa;AACzC,SAAO,WAAW,WAAW,MAAM,aAAa;AACpD;AAFgB;AAST,IAAM,mBAAmB;AAE1B,SAAU,mBAAmB,MAAa;AAC5C,SAAO,WAAW,WAAW,MAAM,gBAAgB;AACvD;AAFgB;AAST,IAAM,iBAAiB;AAExB,SAAU,iBAAiB,MAAa;AAC1C,SAAO,WAAW,WAAW,MAAM,cAAc;AACrD;AAFgB;AAST,IAAM,aAAa;AAEpB,SAAU,aAAa,MAAa;AACtC,SAAO,WAAW,WAAW,MAAM,UAAU;AACjD;AAFgB;AAQT,IAAM,WAAW;AAElB,SAAU,WAAW,MAAa;AACpC,SAAO,WAAW,WAAW,MAAM,QAAQ;AAC/C;AAFgB;AAqDV,IAAO,8BAAP,cAA2C,sBAAqB;EA7nBtE,OA6nBsE;;;EAElE,cAAW;AACP,WAAO,CAAC,mBAAmB,gBAAgB,gBAAgB,UAAU,gBAAgB,gBAAgB,aAAa,cAAc,kBAAkB,kBAAkB,aAAa,eAAe,kBAAkB,eAAe,aAAa,WAAW,iBAAiB,SAAS,gBAAgB,aAAa,WAAW,iBAAiB,gBAAgB,YAAY,iBAAiB,aAAa,sBAAsB,cAAc,iBAAiB,cAAc,cAAc,YAAY,cAAc,iBAAiB,wBAAwB,iBAAiB,gBAAgB,oBAAoB,QAAQ,iBAAiB,kBAAkB,aAAa,kBAAkB,cAAc,gBAAgB,UAAU;EACrsB;EAEmB,iBAAiB,SAAiB,WAAiB;AAClE,YAAQ,SAAS;MACb,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK,UAAU;AACX,eAAO,KAAK,UAAU,iBAAiB,SAAS;;MAEpD,KAAK;MACL,KAAK;MACL,KAAK,eAAe;AAChB,eAAO,KAAK,UAAU,cAAc,SAAS;;MAEjD,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK,WAAW;AACZ,eAAO,KAAK,UAAU,gBAAgB,SAAS;;MAEnD,KAAK,gBAAgB;AACjB,eAAO,KAAK,UAAU,WAAW,SAAS,KAAK,KAAK,UAAU,cAAc,SAAS;;MAEzF,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK,oBAAoB;AACrB,eAAO,KAAK,UAAU,WAAW,SAAS;;MAE9C,KAAK;MACL,KAAK;MACL,KAAK,MAAM;AACP,eAAO,KAAK,UAAU,cAAc,SAAS;;MAEjD,KAAK,YAAY;AACb,eAAO,KAAK,UAAU,cAAc,SAAS,KAAK,KAAK,UAAU,cAAc,SAAS;;MAE5F,KAAK,cAAc;AACf,eAAO,KAAK,UAAU,cAAc,SAAS;;MAEjD,SAAS;AACL,eAAO;;;EAGnB;EAEA,iBAAiB,SAAsB;AACnC,UAAM,cAAc,GAAG,QAAQ,UAAU,KAAK,IAAI,QAAQ,QAAQ;AAClE,YAAQ,aAAa;MACjB,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK,sBAAsB;AACvB,eAAO;;MAEX,KAAK;MACL,KAAK;MACL,KAAK,iBAAiB;AAClB,eAAO;;MAEX,KAAK,wBAAwB;AACzB,eAAO;;MAEX,KAAK;MACL,KAAK,gCAAgC;AACjC,eAAO;;MAEX,KAAK,yBAAyB;AAC1B,eAAO;;MAEX,SAAS;AACL,cAAM,IAAI,MAAM,GAAG,WAAW,+BAA+B;;;EAGzE;EAEA,gBAAgB,MAAY;AACxB,YAAQ,MAAM;MACV,KAAK,mBAAmB;AACpB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,gBAAgB;AACjB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;;;;MAIhD,KAAK,aAAa;AACd,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;;;;MAIjC,KAAK,kBAAkB;AACnB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,QAAQ,cAAc,MAAK;;;;MAI/C,KAAK,eAAe;AAChB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,eAAe;AAChB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,WAAW;AACZ,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,uBAAuB,cAAc,MAAK;YAClD,EAAE,MAAM,gBAAgB,cAAc,CAAA,EAAE;YACxC,EAAE,MAAM,WAAW,cAAc,CAAA,EAAE;YACnC,EAAE,MAAM,cAAc,cAAc,CAAA,EAAE;YACtC,EAAE,MAAM,cAAc,cAAc,MAAK;YACzC,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,SAAS,cAAc,CAAA,EAAE;YACjC,EAAE,MAAM,SAAS,cAAc,CAAA,EAAE;YACjC,EAAE,MAAM,gBAAgB,cAAc,CAAA,EAAE;;;;MAIpD,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,gBAAgB;AACjB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,aAAa;AACd,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAc,cAAc,CAAA,EAAE;YACtC,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,cAAc,cAAc,CAAA,EAAE;;;;MAIlD,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,gBAAgB,cAAc,MAAK;YAC3C,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,YAAY;AACb,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,aAAa;AACd,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,sBAAsB;AACvB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,WAAU;YAClB,EAAE,MAAM,uBAAuB,cAAc,MAAK;YAClD,EAAE,MAAM,aAAY;YACpB,EAAE,MAAM,SAAS,cAAc,MAAK;YACpC,EAAE,MAAM,YAAY,cAAc,MAAK;YACvC,EAAE,MAAM,gBAAgB,cAAc,CAAA,EAAE;YACxC,EAAE,MAAM,eAAc;YACtB,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,cAAc,cAAc,CAAA,EAAE;YACtC,EAAE,MAAM,aAAY;YACpB,EAAE,MAAM,YAAY,cAAc,MAAK;;;;MAInD,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,gBAAe;;;;MAInC,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,gBAAe;YACvB,EAAE,MAAM,aAAY;YACpB,EAAE,MAAM,UAAS;;;;MAI7B,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,gBAAgB;AACjB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,aAAY;YACpB,EAAE,MAAM,YAAY,cAAc,MAAK;YACvC,EAAE,MAAM,UAAU,cAAc,MAAK;YACrC,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,QAAQ;AACT,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,eAAc;YACtB,EAAE,MAAM,cAAc,cAAc,MAAK;YACzC,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,aAAa;AACd,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,SAAS,cAAc,CAAA,EAAE;;;;MAI7C,KAAK,UAAU;AACX,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,UAAS;YACjB,EAAE,MAAM,eAAc;YACtB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,WAAU;YAClB,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,gBAAgB;AACjB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;YACpC,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,UAAS;YACjB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,WAAU;YAClB,EAAE,MAAM,WAAU;;;;MAI9B,KAAK,kBAAkB;AACnB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,OAAM;YACd,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,kBAAkB;AACnB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,oBAAoB,cAAc,MAAK;YAC/C,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,WAAU;YAClB,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,aAAa;AACd,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,SAAS;AACV,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;YACpC,EAAE,MAAM,iBAAgB;YACxB,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,WAAW;AACZ,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,gBAAgB;AACjB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,WAAU;;;;MAI9B,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,QAAO;;;;MAI3B,KAAK,YAAY;AACb,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,aAAa,cAAc,CAAA,EAAE;YACrC,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,wBAAwB;AACzB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;YACpC,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,iBAAiB;AAClB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;YACpC,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,oBAAoB;AACrB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,OAAM;;;;MAI1B,KAAK,kBAAkB;AACnB,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAY,cAAc,CAAA,EAAE;YACpC,EAAE,MAAM,YAAW;;;;MAI/B,KAAK,cAAc;AACf,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;YACnB,EAAE,MAAM,WAAU;;;;MAI9B,KAAK,YAAY;AACb,eAAO;UACH,MAAM;UACN,YAAY;YACR,EAAE,MAAM,cAAa;YACrB,EAAE,MAAM,YAAW;;;;MAI/B,SAAS;AACL,eAAO;UACH,MAAM;UACN,YAAY,CAAA;;;;EAI5B;;AAGG,IAAM,aAAa,IAAI,4BAA2B;;;AC7nCzD;;;;;;;;;;;;;;;AAkBM,SAAU,uBAAuB,MAAa;AAChD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC9C,QAAI,CAAC,KAAK,WAAW,GAAG,GAAG;AACvB,UAAI,MAAM,QAAQ,KAAK,GAAG;AACtB,cAAM,QAAQ,CAAC,MAAM,UAAS;AAC1B,cAAI,UAAU,IAAI,GAAG;AAChB,iBAA0B,aAAa;AACvC,iBAA0B,qBAAqB;AAC/C,iBAA0B,kBAAkB;;QAErD,CAAC;iBACM,UAAU,KAAK,GAAG;AACxB,cAA2B,aAAa;AACxC,cAA2B,qBAAqB;;;;AAIjE;AAjBgB;AAwBV,SAAU,mBAAsC,MAA2B,eAAqC;AAClH,MAAI,OAAO;AACX,SAAO,MAAM;AACT,QAAI,cAAc,IAAI,GAAG;AACrB,aAAO;;AAEX,WAAO,KAAK;;AAEhB,SAAO;AACX;AATgB;AAeV,SAAU,mBAAmB,MAA2B,WAAkC;AAC5F,MAAI,OAAO;AACX,SAAO,MAAM;AACT,QAAI,UAAU,IAAI,GAAG;AACjB,aAAO;;AAEX,WAAO,KAAK;;AAEhB,SAAO;AACX;AATgB;AAiBV,SAAU,YAAyC,MAAa;AAClE,QAAM,WAAW,aAAa,IAAI;AAClC,QAAM,SAAS,SAAS;AACxB,MAAI,CAAC,QAAQ;AACT,UAAM,IAAI,MAAM,2BAA2B;;AAE/C,SAAO;AACX;AAPgB;AAYV,SAAU,aAAa,MAAa;AACtC,SAAO,KAAK,YAAY;AACpB,WAAO,KAAK;;AAEhB,SAAO;AACX;AALgB;AAkBV,SAAU,eAAe,MAAe,SAA0B;AACpE,MAAI,CAAC,MAAM;AACP,UAAM,IAAI,MAAM,0BAA0B;;AAE9C,QAAM,QAAQ,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS;AAEvB,SAAO,IAAI,WAA2B,OAAO;IACzC,MAAM,OAAO,KAAK,IAAI;IACtB,UAAU;IACV,YAAY;MACZ,WAAQ;AACR,WAAO,MAAM,WAAW,MAAM,KAAK,QAAQ;AACvC,YAAM,WAAW,MAAM,KAAK,MAAM,QAAQ;AAC1C,UAAI,CAAC,SAAS,WAAW,GAAG,GAAG;AAC3B,cAAM,QAAS,KAAwB,QAAQ;AAC/C,YAAI,UAAU,KAAK,GAAG;AAClB,gBAAM;AACN,cAAI,iBAAiB,OAAO,KAAK,GAAG;AAChC,mBAAO,EAAE,MAAM,OAAO,MAAK;;mBAExB,MAAM,QAAQ,KAAK,GAAG;AAC7B,iBAAO,MAAM,aAAa,MAAM,QAAQ;AACpC,kBAAM,QAAQ,MAAM;AACpB,kBAAM,UAAU,MAAM,KAAK;AAC3B,gBAAI,UAAU,OAAO,KAAK,iBAAiB,SAAS,KAAK,GAAG;AACxD,qBAAO,EAAE,MAAM,OAAO,OAAO,QAAO;;;AAG5C,gBAAM,aAAa;;;AAG3B,YAAM;;AAEV,WAAO;EACX,CAAC;AACL;AAnCgB;AAyCV,SAAU,kBAAkB,MAAe,SAA0B;AACvE,MAAI,CAAC,MAAM;AACP,UAAM,IAAI,MAAM,+BAA+B;;AAEnD,SAAO,IAAI,eAAe,MAAM,UAAQ,eAAe,MAAM,OAAO,CAAC;AACzE;AALgB;AAWV,SAAU,UAAU,MAAe,SAA0B;AAC/D,MAAI,CAAC,MAAM;AACP,UAAM,IAAI,MAAM,+BAA+B;cACxC,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,UAAS,CAAC,iBAAiB,MAAM,QAAQ,KAAK,GAAG;AAEjE,WAAO,IAAI,eAAe,MAAM,MAAM,CAAA,CAAE;;AAE5C,SAAO,IAAI,eAAe,MAAM,UAAQ,eAAe,MAAM,OAAO,GAAG,EAAE,aAAa,KAAI,CAAE;AAChG;AARgB;AAUhB,SAAS,iBAAiB,SAAkB,OAAa;;AACrD,MAAI,CAAC,OAAO;AACR,WAAO;;AAEX,QAAM,aAAY,KAAA,QAAQ,cAAQ,QAAA,OAAA,SAAA,SAAA,GAAE;AACpC,MAAI,CAAC,WAAW;AACZ,WAAO;;AAEX,SAAO,QAAQ,WAAW,KAAK;AACnC;AATS;AAeH,SAAU,iBAAiB,MAAa;AAE1C,SAAO,IAAI,WAAiC,OAAO;IAC/C,MAAM,OAAO,KAAK,IAAI;IACtB,UAAU;IACV,YAAY;MACZ,WAAQ;AACR,WAAO,MAAM,WAAW,MAAM,KAAK,QAAQ;AACvC,YAAM,WAAW,MAAM,KAAK,MAAM,QAAQ;AAC1C,UAAI,CAAC,SAAS,WAAW,GAAG,GAAG;AAC3B,cAAM,QAAS,KAAwB,QAAQ;AAC/C,YAAI,YAAY,KAAK,GAAG;AACpB,gBAAM;AACN,iBAAO,EAAE,MAAM,OAAO,OAAO,EAAE,WAAW,OAAO,WAAW,MAAM,SAAQ,EAAE;mBACrE,MAAM,QAAQ,KAAK,GAAG;AAC7B,iBAAO,MAAM,aAAa,MAAM,QAAQ;AACpC,kBAAM,QAAQ,MAAM;AACpB,kBAAM,UAAU,MAAM,KAAK;AAC3B,gBAAI,YAAY,OAAO,GAAG;AACtB,qBAAO,EAAE,MAAM,OAAO,OAAO,EAAE,WAAW,SAAS,WAAW,MAAM,UAAU,MAAK,EAAE;;;AAG7F,gBAAM,aAAa;;;AAG3B,YAAM;;AAEV,WAAO;EACX,CAAC;AACL;AA7BgB;AAqCV,SAAU,oBAAoB,YAAqB,SAAS,YAAY,UAAU,EAAE,YAAY,OAAK;AACvG,QAAM,OAAoB,CAAA;AAC1B,YAAU,MAAM,EAAE,QAAQ,UAAO;AAC7B,qBAAiB,IAAI,EAAE,QAAQ,aAAU;AACrC,UAAI,QAAQ,UAAU,QAAQ,YAAY;AACtC,aAAK,KAAK,QAAQ,SAAS;;IAEnC,CAAC;EACL,CAAC;AACD,SAAO,OAAO,IAAI;AACtB;AAVgB;AAkBV,SAAU,0BAA0BC,aAA2B,MAAa;AAC9E,QAAM,eAAeA,YAAW,gBAAgB,KAAK,KAAK;AAC1D,QAAM,cAAc;AACpB,aAAW,YAAY,aAAa,YAAY;AAE5C,QAAI,SAAS,iBAAiB,UAAa,YAAY,SAAS,IAAI,MAAM,QAAW;AACjF,kBAAY,SAAS,IAAI,IAAI,iBAAiB,SAAS,YAAY;;;AAG/E;AATgB;AAWhB,SAAS,iBAAiB,cAA0B;AAChD,MAAI,MAAM,QAAQ,YAAY,GAAG;AAC7B,WAAO,CAAC,GAAG,aAAa,IAAI,gBAAgB,CAAC;SAC1C;AACH,WAAO;;AAEf;AANS;AAcH,SAAU,YAAyC,MAAS,gBAAsH;AACpL,QAAM,OAAuB,EAAE,OAAO,KAAK,MAAK;AAEhD,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC9C,QAAI,CAAC,KAAK,WAAW,GAAG,GAAG;AACvB,UAAI,UAAU,KAAK,GAAG;AAClB,aAAK,IAAI,IAAI,YAAY,OAAO,cAAc;iBACvC,YAAY,KAAK,GAAG;AAC3B,aAAK,IAAI,IAAI,eACT,MACA,MACA,MAAM,UACN,MAAM,QAAQ;iBAEX,MAAM,QAAQ,KAAK,GAAG;AAC7B,cAAM,cAAyB,CAAA;AAC/B,mBAAW,WAAW,OAAO;AACzB,cAAI,UAAU,OAAO,GAAG;AACpB,wBAAY,KAAK,YAAY,SAAS,cAAc,CAAC;qBAC9C,YAAY,OAAO,GAAG;AAC7B,wBAAY,KACR,eACI,MACA,MACA,QAAQ,UACR,QAAQ,QAAQ,CACnB;iBAEF;AACH,wBAAY,KAAK,OAAO;;;AAGhC,aAAK,IAAI,IAAI;aACV;AACH,aAAK,IAAI,IAAI;;;;AAKzB,yBAAuB,IAAI;AAC3B,SAAO;AACX;AAzCgB;;;ACrQhB;;;;;;;;;;;;;ACEM,SAAU,GAAG,MAAY;AAC7B,SAAO,KAAK,WAAW,CAAC;AAC1B;AAFgB;AAIV,SAAU,YAAe,MAAe,KAAQ;AACpD,MAAI,MAAM,QAAQ,IAAI,GAAG;AACvB,SAAK,QAAQ,SAAU,SAAO;AAC5B,UAAI,KAAK,OAAO;IAClB,CAAC;SACI;AACL,QAAI,KAAK,IAAI;;AAEjB;AARgB;AAUV,SAAU,QACd,SACA,SAAkD;AAElD,MAAI,QAAQ,OAAO,MAAM,MAAM;AAC7B,UAAM,oBAAoB;;AAG5B,QAAM,IAAa,QAAQ,OAAO;AAClC,UAAQ,OAAO,IAAI;AACrB;AAVgB;AAYV,SAAU,cAA0B,KAAQ;AAEhD,MAAI,QAAQ,QAAW;AACrB,UAAM,MAAM,yCAAyC;;AAEvD,SAAO;AACT;AANgB;AASV,SAAU,0BAAuB;AACrC,QAAM,MAAM,yCAAyC;AACvD;AAFgB;AAIV,SAAU,YAAY,KAAqB;AAC/C,SAAO,IAAI,MAAM,MAAM;AACzB;AAFgB;;;ACvCT,IAAM,kBAA4B,CAAA;AACzC,SAAS,IAAI,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK;AACvC,kBAAgB,KAAK,CAAC;;AAGjB,IAAM,gBAA0B,CAAC,GAAG,GAAG,CAAC,EAAE,OAAO,eAAe;AACvE,SAAS,IAAI,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK;AACvC,gBAAc,KAAK,CAAC;;AAGtB,SAAS,IAAI,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK;AACvC,gBAAc,KAAK,CAAC;;AAIf,IAAM,kBAA4B;EACvC,GAAG,GAAG;EACN,GAAG,IAAI;EACP,GAAG,IAAI;EACP,GAAG,IAAI;EACP,GAAG,GAAI;EACP,GAAG,IAAI;EACP,GAAG,GAAI;EACP,GAAG,MAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;EACX,GAAG,QAAQ;;;;ACZb,IAAM,kBAAkB;AACxB,IAAM,iBAAiB;AACvB,IAAM,uBAAuB;AAIvB,IAAO,eAAP,MAAmB;EArBzB,OAqByB;;;EAAzB,cAAA;AACY,SAAA,MAAc;AACd,SAAA,QAAgB;AAChB,SAAA,WAAmB;EA+xB/B;EA7xBY,YAAS;AACjB,WAAO;MACL,KAAK,KAAK;MACV,OAAO,KAAK;MACZ,UAAU,KAAK;;EAEnB;EAEU,aAAaC,WAItB;AACC,SAAK,MAAMA,UAAS;AACpB,SAAK,QAAQA,UAAS;AACtB,SAAK,WAAWA,UAAS;EAC3B;EAEO,QAAQ,OAAa;AAE1B,SAAK,MAAM;AACX,SAAK,QAAQ;AACb,SAAK,WAAW;AAEhB,SAAK,YAAY,GAAG;AACpB,UAAM,QAAQ,KAAK,YAAW;AAC9B,SAAK,YAAY,GAAG;AAEpB,UAAM,QAAqB;MACzB,MAAM;MACN,KAAK,EAAE,OAAO,KAAK,KAAK,KAAK,MAAM,OAAM;MACzC,QAAQ;MACR,YAAY;MACZ,WAAW;MACX,SAAS;MACT,QAAQ;;AAGV,WAAO,KAAK,aAAY,GAAI;AAC1B,cAAQ,KAAK,QAAO,GAAI;QACtB,KAAK;AACH,kBAAQ,OAAO,QAAQ;AACvB;QACF,KAAK;AACH,kBAAQ,OAAO,YAAY;AAC3B;QACF,KAAK;AACH,kBAAQ,OAAO,WAAW;AAC1B;QACF,KAAK;AACH,kBAAQ,OAAO,SAAS;AACxB;QACF,KAAK;AACH,kBAAQ,OAAO,QAAQ;AACvB;;;AAIN,QAAI,KAAK,QAAQ,KAAK,MAAM,QAAQ;AAClC,YAAM,MAAM,sBAAsB,KAAK,MAAM,UAAU,KAAK,GAAG,CAAC;;AAElE,WAAO;MACL,MAAM;MACN;MACA;MACA,KAAK,KAAK,IAAI,CAAC;;EAEnB;EAEU,cAAW;AACnB,UAAM,OAAO,CAAA;AACb,UAAM,QAAQ,KAAK;AAEnB,SAAK,KAAK,KAAK,YAAW,CAAE;AAE5B,WAAO,KAAK,SAAQ,MAAO,KAAK;AAC9B,WAAK,YAAY,GAAG;AACpB,WAAK,KAAK,KAAK,YAAW,CAAE;;AAG9B,WAAO,EAAE,MAAM,eAAe,OAAO,MAAM,KAAK,KAAK,IAAI,KAAK,EAAC;EACjE;EAEU,cAAW;AACnB,UAAM,QAAQ,CAAA;AACd,UAAM,QAAQ,KAAK;AAEnB,WAAO,KAAK,OAAM,GAAI;AACpB,YAAM,KAAK,KAAK,KAAI,CAAE;;AAGxB,WAAO,EAAE,MAAM,eAAe,OAAO,OAAO,KAAK,KAAK,IAAI,KAAK,EAAC;EAClE;EAEU,OAAI;AACZ,QAAI,KAAK,YAAW,GAAI;AACtB,aAAO,KAAK,UAAS;WAChB;AACL,aAAO,KAAK,KAAI;;EAEpB;EAEU,YAAS;AACjB,UAAM,QAAQ,KAAK;AACnB,YAAQ,KAAK,QAAO,GAAI;MACtB,KAAK;AACH,eAAO;UACL,MAAM;UACN,KAAK,KAAK,IAAI,KAAK;;MAEvB,KAAK;AACH,eAAO,EAAE,MAAM,aAAa,KAAK,KAAK,IAAI,KAAK,EAAC;MAElD,KAAK;AACH,gBAAQ,KAAK,QAAO,GAAI;UACtB,KAAK;AACH,mBAAO;cACL,MAAM;cACN,KAAK,KAAK,IAAI,KAAK;;UAEvB,KAAK;AACH,mBAAO;cACL,MAAM;cACN,KAAK,KAAK,IAAI,KAAK;;;AAIzB,cAAM,MAAM,0BAA0B;MAExC,KAAK;AACH,aAAK,YAAY,GAAG;AAEpB,YAAI;AACJ,gBAAQ,KAAK,QAAO,GAAI;UACtB,KAAK;AACH,mBAAO;AACP;UACF,KAAK;AACH,mBAAO;AACP;;AAEJ,sBAAc,IAAI;AAElB,cAAM,cAAc,KAAK,YAAW;AAEpC,aAAK,YAAY,GAAG;AAEpB,eAAO;UACL;UACA,OAAO;UACP,KAAK,KAAK,IAAI,KAAK;;;AAIzB,WAAO,wBAAuB;EAChC;EAEU,WACR,iBAA0B,OAAK;AAE/B,QAAI,QAAyC;AAC7C,UAAM,QAAQ,KAAK;AACnB,YAAQ,KAAK,QAAO,GAAI;MACtB,KAAK;AACH,gBAAQ;UACN,SAAS;UACT,QAAQ;;AAEV;MACF,KAAK;AACH,gBAAQ;UACN,SAAS;UACT,QAAQ;;AAEV;MACF,KAAK;AACH,gBAAQ;UACN,SAAS;UACT,QAAQ;;AAEV;MACF,KAAK;AACH,cAAM,UAAU,KAAK,qBAAoB;AACzC,gBAAQ,KAAK,QAAO,GAAI;UACtB,KAAK;AACH,oBAAQ;cACN;cACA,QAAQ;;AAEV;UACF,KAAK;AACH,gBAAI;AACJ,gBAAI,KAAK,QAAO,GAAI;AAClB,uBAAS,KAAK,qBAAoB;AAClC,sBAAQ;gBACN;gBACA;;mBAEG;AACL,sBAAQ;gBACN;gBACA,QAAQ;;;AAGZ,iBAAK,YAAY,GAAG;AACpB;;AAIJ,YAAI,mBAAmB,QAAQ,UAAU,QAAW;AAClD,iBAAO;;AAET,sBAAc,KAAK;AACnB;;AAKJ,QAAI,mBAAmB,QAAQ,UAAU,QAAW;AAClD,aAAO;;AAIT,QAAI,cAAc,KAAK,GAAG;AACxB,UAAI,KAAK,SAAS,CAAC,MAAM,KAAK;AAC5B,aAAK,YAAY,GAAG;AACpB,cAAM,SAAS;aACV;AACL,cAAM,SAAS;;AAGjB,YAAM,OAAO;AACb,YAAM,MAAM,KAAK,IAAI,KAAK;AAC1B,aAAO;;EAEX;EAEU,OAAI;AACZ,QAAIC;AACJ,UAAM,QAAQ,KAAK;AACnB,YAAQ,KAAK,SAAQ,GAAI;MACvB,KAAK;AACH,QAAAA,QAAO,KAAK,OAAM;AAClB;MACF,KAAK;AACH,QAAAA,QAAO,KAAK,WAAU;AACtB;MACF,KAAK;AACH,QAAAA,QAAO,KAAK,eAAc;AAC1B;MACF,KAAK;AACH,QAAAA,QAAO,KAAK,MAAK;AACjB;;AAGJ,QAAIA,UAAS,UAAa,KAAK,mBAAkB,GAAI;AACnD,MAAAA,QAAO,KAAK,iBAAgB;;AAI9B,QAAI,cAAoBA,KAAI,GAAG;AAC7B,MAAAA,MAAK,MAAM,KAAK,IAAI,KAAK;AAEzB,UAAI,KAAK,aAAY,GAAI;AACvB,QAAAA,MAAK,aAAa,KAAK,WAAU;;AAGnC,aAAOA;;AAIT,WAAO,wBAAuB;EAChC;EAEU,SAAM;AACd,SAAK,YAAY,GAAG;AACpB,WAAO;MACL,MAAM;MACN,YAAY;MACZ,OAAO,CAAC,GAAG,IAAI,GAAG,GAAG,IAAI,GAAG,GAAG,QAAQ,GAAG,GAAG,QAAQ,CAAC;;EAE1D;EAEU,aAAU;AAClB,SAAK,YAAY,IAAI;AAErB,YAAQ,KAAK,SAAQ,GAAI;MACvB,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO,KAAK,kBAAiB;MAC/B,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO,KAAK,qBAAoB;MAClC,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO,KAAK,kBAAiB;MAC/B,KAAK;AACH,eAAO,KAAK,wBAAuB;MACrC,KAAK;AACH,eAAO,KAAK,iBAAgB;MAC9B,KAAK;AACH,eAAO,KAAK,sBAAqB;MACnC,KAAK;AACH,eAAO,KAAK,gCAA+B;MAC7C;AACE,eAAO,KAAK,mBAAkB;;EAEpC;EAEU,oBAAiB;AACzB,UAAM,QAAQ,KAAK,gBAAe;AAElC,WAAO,EAAE,MAAM,sBAAsB,MAAY;EACnD;EAEU,uBAAoB;AAC5B,QAAI;AACJ,QAAI,aAAa;AACjB,YAAQ,KAAK,QAAO,GAAI;MACtB,KAAK;AACH,cAAM;AACN;MACF,KAAK;AACH,cAAM;AACN,qBAAa;AACb;MACF,KAAK;AACH,cAAM;AACN;MACF,KAAK;AACH,cAAM;AACN,qBAAa;AACb;MACF,KAAK;AACH,cAAM;AACN;MACF,KAAK;AACH,cAAM;AACN,qBAAa;AACb;;AAIJ,QAAI,cAAc,GAAG,GAAG;AACtB,aAAO,EAAE,MAAM,OAAO,OAAO,KAAK,WAAsB;;AAG1D,WAAO,wBAAuB;EAChC;EAEU,oBAAiB;AACzB,QAAI;AACJ,YAAQ,KAAK,QAAO,GAAI;MACtB,KAAK;AACH,qBAAa,GAAG,IAAI;AACpB;MACF,KAAK;AACH,qBAAa,GAAG,IAAI;AACpB;MACF,KAAK;AACH,qBAAa,GAAG,IAAI;AACpB;MACF,KAAK;AACH,qBAAa,GAAG,GAAI;AACpB;MACF,KAAK;AACH,qBAAa,GAAG,IAAI;AACpB;;AAIJ,QAAI,cAAc,UAAU,GAAG;AAC7B,aAAO,EAAE,MAAM,aAAa,OAAO,WAAU;;AAG/C,WAAO,wBAAuB;EAChC;EAEU,0BAAuB;AAC/B,SAAK,YAAY,GAAG;AACpB,UAAM,SAAS,KAAK,QAAO;AAC3B,QAAI,WAAW,KAAK,MAAM,MAAM,OAAO;AACrC,YAAM,MAAM,UAAU;;AAGxB,UAAM,aAAa,OAAO,YAAW,EAAG,WAAW,CAAC,IAAI;AACxD,WAAO,EAAE,MAAM,aAAa,OAAO,WAAU;EAC/C;EAEU,mBAAgB;AAGxB,SAAK,YAAY,GAAG;AACpB,WAAO,EAAE,MAAM,aAAa,OAAO,GAAG,IAAI,EAAC;EAC7C;EAEU,wBAAqB;AAC7B,SAAK,YAAY,GAAG;AACpB,WAAO,KAAK,eAAe,CAAC;EAC9B;EAEU,kCAA+B;AACvC,SAAK,YAAY,GAAG;AACpB,WAAO,KAAK,eAAe,CAAC;EAC9B;EAEU,qBAAkB;AAG1B,UAAM,cAAc,KAAK,QAAO;AAChC,WAAO,EAAE,MAAM,aAAa,OAAO,GAAG,WAAW,EAAC;EACpD;EAEU,4BAAyB;AACjC,YAAQ,KAAK,SAAQ,GAAI;MAEvB,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;AACH,cAAM,MAAM,KAAK;MACnB;AACE,cAAM,WAAW,KAAK,QAAO;AAC7B,eAAO,EAAE,MAAM,aAAa,OAAO,GAAG,QAAQ,EAAC;;EAErD;EAEU,iBAAc;AACtB,UAAM,MAA0B,CAAA;AAChC,QAAI,aAAa;AACjB,SAAK,YAAY,GAAG;AACpB,QAAI,KAAK,SAAS,CAAC,MAAM,KAAK;AAC5B,WAAK,YAAY,GAAG;AACpB,mBAAa;;AAGf,WAAO,KAAK,YAAW,GAAI;AACzB,YAAM,OAAO,KAAK,UAAS;AAC3B,YAAM,mBAAmB,KAAK,SAAS;AACvC,UAAI,YAAY,IAAI,KAAK,KAAK,YAAW,GAAI;AAC3C,aAAK,YAAY,GAAG;AACpB,cAAM,KAAK,KAAK,UAAS;AACzB,cAAM,iBAAiB,GAAG,SAAS;AAGnC,YAAI,YAAY,EAAE,GAAG;AACnB,cAAI,GAAG,QAAQ,KAAK,OAAO;AACzB,kBAAM,MAAM,uCAAuC;;AAErD,cAAI,KAAK,EAAE,MAAM,KAAK,OAAO,IAAI,GAAG,MAAK,CAAE;eACtC;AAEL,sBAAY,KAAK,OAAO,GAAG;AAC3B,cAAI,KAAK,GAAG,GAAG,CAAC;AAChB,sBAAY,GAAG,OAAO,GAAG;;aAEtB;AACL,oBAAY,KAAK,OAAO,GAAG;;;AAI/B,SAAK,YAAY,GAAG;AAEpB,WAAO,EAAE,MAAM,OAAO,YAAwB,OAAO,IAAG;EAC1D;EAEU,YAAS;AACjB,YAAQ,KAAK,SAAQ,GAAI;MAEvB,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;AACH,cAAM,MAAM,KAAK;MACnB,KAAK;AACH,eAAO,KAAK,YAAW;MACzB;AACE,eAAO,KAAK,0BAAyB;;EAE3C;EAEU,cAAW;AACnB,SAAK,YAAY,IAAI;AACrB,YAAQ,KAAK,SAAQ,GAAI;MAGvB,KAAK;AACH,aAAK,YAAY,GAAG;AACpB,eAAO,EAAE,MAAM,aAAa,OAAO,GAAG,IAAQ,EAAC;MACjD,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO,KAAK,qBAAoB;MAClC,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO,KAAK,kBAAiB;MAC/B,KAAK;AACH,eAAO,KAAK,wBAAuB;MACrC,KAAK;AACH,eAAO,KAAK,iBAAgB;MAC9B,KAAK;AACH,eAAO,KAAK,sBAAqB;MACnC,KAAK;AACH,eAAO,KAAK,gCAA+B;MAC7C;AACE,eAAO,KAAK,mBAAkB;;EAEpC;EAEU,QAAK;AACb,QAAI,YAAY;AAChB,SAAK,YAAY,GAAG;AACpB,YAAQ,KAAK,SAAS,CAAC,GAAG;MACxB,KAAK;AACH,aAAK,YAAY,GAAG;AACpB,aAAK,YAAY,GAAG;AACpB,oBAAY;AACZ;MACF;AACE,aAAK;AACL;;AAEJ,UAAM,QAAQ,KAAK,YAAW;AAC9B,SAAK,YAAY,GAAG;AAEpB,UAAM,WAA+B;MACnC,MAAM;MACN;MACA;;AAGF,QAAI,WAAW;AACb,eAAS,KAAK,IAAI,KAAK;;AAGzB,WAAO;EACT;EAEU,kBAAe;AACvB,QAAI,SAAS,KAAK,QAAO;AAIzB,QAAI,qBAAqB,KAAK,MAAM,MAAM,OAAO;AAC/C,YAAM,MAAM,8BAA8B;;AAG5C,WAAO,eAAe,KAAK,KAAK,SAAS,CAAC,CAAC,GAAG;AAC5C,gBAAU,KAAK,QAAO;;AAGxB,WAAO,SAAS,QAAQ,EAAE;EAC5B;EAEU,uBAAoB;AAC5B,QAAI,SAAS,KAAK,QAAO;AACzB,QAAI,eAAe,KAAK,MAAM,MAAM,OAAO;AACzC,YAAM,MAAM,sBAAsB;;AAGpC,WAAO,eAAe,KAAK,KAAK,SAAS,CAAC,CAAC,GAAG;AAC5C,gBAAU,KAAK,QAAO;;AAGxB,WAAO,SAAS,QAAQ,EAAE;EAC5B;EAEU,mBAAgB;AACxB,UAAM,WAAW,KAAK,QAAO;AAC7B,YAAQ,UAAU;MAEhB,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;MAEL,KAAK;AAEH,cAAM,MAAM,KAAK;MACnB;AACE,eAAO,EAAE,MAAM,aAAa,OAAO,GAAG,QAAQ,EAAC;;EAErD;EACU,eAAY;AACpB,YAAQ,KAAK,SAAS,CAAC,GAAG;MACxB,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO;MACT;AACE,eAAO;;EAEb;EAEU,cAAW;AACnB,WAAO,KAAK,SAAQ,MAAO,OAAO,KAAK,YAAY,CAAC;EACtD;EAEU,UAAO;AACf,WAAO,eAAe,KAAK,KAAK,SAAS,CAAC,CAAC;EAC7C;EAEU,YAAY,UAAU,GAAC;AAC/B,YAAQ,KAAK,SAAS,OAAO,GAAG;MAC9B,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO;MACT;AACE,eAAO;;EAEb;EAEU,SAAM;AACd,WAAO,KAAK,OAAM,KAAM,KAAK,YAAW;EAC1C;EAEU,SAAM;AACd,QAAI,KAAK,mBAAkB,GAAI;AAC7B,aAAO;;AAGT,YAAQ,KAAK,SAAS,CAAC,GAAG;MACxB,KAAK;MACL,KAAK;MACL,KAAK;MAEL,KAAK;AACH,eAAO;MACT;AACE,eAAO;;EAEb;EAEU,cAAW;AACnB,YAAQ,KAAK,SAAS,CAAC,GAAG;MACxB,KAAK;MACL,KAAK;AACH,eAAO;MAET,KAAK;AACH,gBAAQ,KAAK,SAAS,CAAC,GAAG;UACxB,KAAK;UACL,KAAK;AACH,mBAAO;UACT;AACE,mBAAO;;MAGb,KAAK;AACH,eACE,KAAK,SAAS,CAAC,MAAM,QACpB,KAAK,SAAS,CAAC,MAAM,OAAO,KAAK,SAAS,CAAC,MAAM;MAEtD;AACE,eAAO;;EAEb;EAEU,eAAY;AACpB,UAAM,YAAY,KAAK,UAAS;AAChC,QAAI;AACF,aAAO,KAAK,WAAW,IAAI,MAAM;aAC1B,GAAG;AACV,aAAO;;AAEP,WAAK,aAAa,SAAS;;EAE/B;EAEU,qBAAkB;AAC1B,YAAQ,KAAK,SAAQ,GAAI;MACvB,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;MACL,KAAK;AACH,eAAO;MACT;AACE,eAAO;;EAEb;EAEU,eAAe,SAAe;AACtC,QAAI,YAAY;AAChB,aAAS,IAAI,GAAG,IAAI,SAAS,KAAK;AAChC,YAAM,UAAU,KAAK,QAAO;AAC5B,UAAI,gBAAgB,KAAK,OAAO,MAAM,OAAO;AAC3C,cAAM,MAAM,+BAA+B;;AAE7C,mBAAa;;AAEf,UAAM,WAAW,SAAS,WAAW,EAAE;AACvC,WAAO,EAAE,MAAM,aAAa,OAAO,SAAQ;EAC7C;EAEU,SAAS,UAAU,GAAC;AAC5B,WAAO,KAAK,MAAM,KAAK,MAAM,OAAO;EACtC;EAEU,UAAO;AACf,UAAM,WAAW,KAAK,SAAS,CAAC;AAChC,SAAK,YAAY,MAAS;AAC1B,WAAO;EACT;EAEU,YAAY,MAAwB;AAC5C,QAAI,SAAS,UAAa,KAAK,MAAM,KAAK,GAAG,MAAM,MAAM;AACvD,YAAM,MACJ,gBACE,OACA,mBACA,KAAK,MAAM,KAAK,GAAG,IACnB,kBACA,KAAK,GAAG;;AAId,QAAI,KAAK,OAAO,KAAK,MAAM,QAAQ;AACjC,YAAM,MAAM,yBAAyB;;AAEvC,SAAK;EACP;EAEU,IAAI,OAAa;AACzB,WAAO,EAAE,OAAc,KAAK,KAAK,IAAG;EACtC;;;;ACvzBI,IAAO,oBAAP,MAAwB;EAA9B,OAA8B;;;EACrB,cAAc,MAAgB;AACnC,eAAW,OAAO,MAAM;AACtB,YAAM,QAAS,KAAa,GAAG;AAE/B,UAAI,KAAK,eAAe,GAAG,GAAG;AAC5B,YAAI,MAAM,SAAS,QAAW;AAC5B,eAAK,MAAM,KAAK;mBACP,MAAM,QAAQ,KAAK,GAAG;AAC/B,gBAAM,QAAQ,CAAC,aAAY;AACzB,iBAAK,MAAM,QAAQ;UACrB,GAAG,IAAI;;;;EAIf;EAEO,MAAM,MAAmB;AAC9B,YAAQ,KAAK,MAAM;MACjB,KAAK;AACH,aAAK,aAAa,IAAI;AACtB;MACF,KAAK;AACH,aAAK,WAAW,IAAI;AACpB;MACF,KAAK;AACH,aAAK,iBAAiB,IAAI;AAC1B;MACF,KAAK;AACH,aAAK,iBAAiB,IAAI;AAC1B;MACF,KAAK;AACH,aAAK,iBAAiB,IAAI;AAC1B;MACF,KAAK;AACH,aAAK,eAAe,IAAI;AACxB;MACF,KAAK;AACH,aAAK,kBAAkB,IAAI;AAC3B;MACF,KAAK;AACH,aAAK,qBAAqB,IAAI;AAC9B;MACF,KAAK;AACH,aAAK,eAAe,IAAI;AACxB;MACF,KAAK;AACH,aAAK,uBAAuB,IAAI;AAChC;MACF,KAAK;AACH,aAAK,eAAe,IAAI;AACxB;MACF,KAAK;AACH,aAAK,SAAS,IAAI;AAClB;MACF,KAAK;AACH,aAAK,WAAW,IAAI;AACpB;MACF,KAAK;AACH,aAAK,wBAAwB,IAAI;AACjC;MACF,KAAK;AACH,aAAK,gBAAgB,IAAI;AACzB;;AAGJ,SAAK,cAAc,IAAI;EACzB;EAEO,aAAa,MAAmB;EAAS;EAEzC,WAAW,MAAiB;EAAS;EAErC,iBAAiB,MAAiB;EAAS;EAE3C,iBAAiB,MAAiB;EAAS;;EAG3C,iBAAiB,MAAe;EAAS;EAEzC,eAAe,MAAe;EAAS;EAEvC,kBAAkB,MAAe;EAAS;EAE1C,qBAAqB,MAAe;EAAS;EAE7C,eAAe,MAAe;EAAS;EAEvC,uBAAuB,MAAe;EAAS;;EAG/C,eAAe,MAAe;EAAS;EAEvC,SAAS,MAAS;EAAS;EAE3B,WAAW,MAAW;EAAS;EAE/B,wBAAwB,MAAwB;EAAS;EAEzD,gBAAgB,MAAgB;EAAS;;;;AJzG3C,IAAM,iBAAiB;AAE9B,IAAM,eAAe,IAAI,aAAY;AAerC,IAAM,wBAAN,cAAoC,kBAAiB;EA1BrD,OA0BqD;;;EAArD,cAAA;;AAEY,SAAA,aAAa;AAEb,SAAA,iBAA2B,CAAA;AACnC,SAAA,YAAY;EAoEhB;EAjEI,IAAI,WAAQ;AACR,WAAO,KAAK,eAAe,KAAK,EAAE;EACtC;EAEA,MAAM,OAAa;AACf,SAAK,YAAY;AACjB,SAAK,QAAQ;AACb,SAAK,cAAc;AACnB,SAAK,aAAa;AAClB,SAAK,iBAAiB,CAAA;EAC1B;EAES,WAAW,MAAW;AAC3B,QAAI,KAAK,YAAY;AACjB,WAAK,aAAa;AAClB,WAAK,iBAAiB,CAAA;;EAE9B;EAES,eAAe,MAAe;AACnC,UAAM,OAAO,OAAO,aAAa,KAAK,KAAK;AAC3C,QAAI,CAAC,KAAK,aAAa,SAAS,MAAM;AAClC,WAAK,YAAY;;AAErB,QAAI,KAAK,YAAY;AACjB,WAAK,aAAa;AAClB,WAAK,iBAAiB,CAAA;WACnB;AACH,YAAM,cAAc,aAAa,IAAI;AACrC,WAAK,eAAe,KAAK,WAAW;AACpC,UAAI,KAAK,YAAY;AACjB,aAAK,eAAe;;;EAGhC;EAES,SAAS,MAAS;AACvB,QAAI,CAAC,KAAK,WAAW;AACjB,YAAM,MAAM,KAAK,MAAM,UAAU,KAAK,IAAI,OAAO,KAAK,IAAI,GAAG;AAC7D,YAAM,QAAQ,IAAI,OAAO,GAAG;AAC5B,WAAK,YAAY,QAAQ,KAAK,MAAM,KAAK,CAAC;;AAE9C,QAAI,KAAK,YAAY;AACjB,WAAK,aAAa;AAClB,WAAK,iBAAiB,CAAA;WACnB;AACH,YAAM,MAAM,KAAK,MAAM,UAAU,KAAK,IAAI,OAAO,KAAK,IAAI,GAAG;AAC7D,WAAK,eAAe,KAAK,GAAG;AAC5B,UAAI,KAAK,YAAY;AACjB,aAAK,eAAe;;;EAGhC;EAES,cAAc,MAAgB;AACnC,QAAI,KAAK,SAAS,SAAS;AAGvB,YAAM,QAAQ;AACd,UAAI,MAAM,YAAY;AAClB;;;AAGR,UAAM,cAAc,IAAI;EAC5B;;AAGJ,IAAM,UAAU,IAAI,sBAAqB;AAEnC,SAAU,iBAAiB,QAAuB;AACpD,MAAI;AACA,QAAI,OAAO,WAAW,UAAU;AAC5B,eAAS,OAAO;;AAEpB,aAAS,IAAI,MAAM;AACnB,UAAM,UAAU,aAAa,QAAQ,MAAM;AAC3C,UAAM,QAA+C,CAAA;AACrD,eAAW,eAAe,QAAQ,MAAM,OAAO;AAC3C,cAAQ,MAAM,MAAM;AACpB,cAAQ,MAAM,WAAW;AACzB,YAAM,KAAK;QACP,OAAO,QAAQ;QACf,KAAK,QAAQ;OAChB;;AAEL,WAAO;WACT,IAAM;AACJ,WAAO,CAAA;;AAEf;AApBgB;AAsBV,SAAU,mBAAmB,QAAuB;AACtD,MAAI;AACA,QAAI,OAAO,WAAW,UAAU;AAC5B,eAAS,IAAI,OAAO,MAAM;;AAE9B,aAAS,OAAO,SAAQ;AACxB,YAAQ,MAAM,MAAM;AAEpB,YAAQ,MAAM,aAAa,QAAQ,MAAM,CAAC;AAC1C,WAAO,QAAQ;WACjB,IAAM;AACJ,WAAO;;AAEf;AAbgB;AAeV,SAAU,aAAa,OAAsB;AAC/C,QAAM,SAAS,OAAO,UAAU,WAAW,IAAI,OAAO,KAAK,IAAI;AAC/D,SAAO,OAAO,KAAK,GAAG;AAC1B;AAHgB;AAKV,SAAU,aAAa,OAAa;AACtC,SAAO,MAAM,QAAQ,uBAAuB,MAAM;AACtD;AAFgB;AAIV,SAAU,0BAA0B,SAAe;AACrD,SAAO,MAAM,UAAU,IAAI,KAAK,SAAS,YACrC,KAAK,KAAK,MAAM,IAAI,IAAI,OAAO,YAAW,CAAE,GAAG,OAAO,YAAW,CAAE,MAAM,aAAa,MAAM,CAAC,EAC/F,KAAK,EAAE;AACb;AAJgB;AAYV,SAAU,eAAe,OAAwB,OAAa;AAChE,QAAM,UAAU,cAAc,KAAK;AACnC,QAAM,QAAQ,MAAM,MAAM,OAAO;AACjC,SAAO,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,SAAS;AACxC;AAJgB;AAYV,SAAU,cAAc,OAAsB;AAChD,MAAI,OAAO,UAAU,UAAU;AAC3B,YAAQ,IAAI,OAAO,KAAK;;AAE5B,QAAM,KAAK,OAAO,SAAS,MAAM;AACjC,MAAI,IAAI;AAER,WAASC,WAAO;AACZ,QAAI,SAAS,IACT;AAEJ,aAAS,UAAU,SAAe;AAC9B,gBAAU,OAAO,OAAO,GAAG,OAAO;AAClC,WAAK;IACT;AAHS;AAKT,aAAS,eAAe,SAAe;AACnC,gBAAU,QAAQ,OAAO,OAAO,GAAG,OAAO,IAAI;AAC9C,WAAK;IACT;AAHS;AAKT,WAAO,IAAI,OAAO,QAAQ;AACtB,cAAQ,OAAO,CAAC,GAAG;QACf,KAAK;AACD,kBAAQ,OAAO,IAAI,CAAC,GAAG;YACnB,KAAK;AACD,6BAAe,CAAC;AAChB;YACJ,KAAK;AACD,6BAAe,CAAC;AAChB;YACJ,KAAK;AACD,kBAAI,GAAG,SAAS;AACZ,oBAAI,OAAO,IAAI,CAAC,MAAM,KAAK;AACvB,iCAAe,OAAO,QAAQ,KAAK,CAAC,IAAI,IAAI,CAAC;uBAC1C;AACH,iCAAe,CAAC;;qBAEjB;AACH,+BAAe,CAAC;;AAEpB;YACJ,KAAK;YACL,KAAK;AACD,kBAAI,GAAG,SAAS;AACZ,+BAAe,OAAO,QAAQ,KAAK,CAAC,IAAI,IAAI,CAAC;qBAC1C;AACH,+BAAe,CAAC;;AAEpB;YACJ,KAAK;AACD,6BAAe,OAAO,QAAQ,KAAK,CAAC,IAAI,IAAI,CAAC;AAC7C;YACJ;AACI,6BAAe,CAAC;AAChB;;AAER;QAEJ,KAAK;AACD,gBAAM;AACN,cAAI,YAAY;AAChB,gBAAM,IAAI,KAAK,MAAM,KAAK,CAAA;AAC1B,yBAAe,IAAI,CAAC,EAAE,MAAM;AAC5B;QAEJ,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;QACL,KAAK;AACD,oBAAU,CAAC;AACX;QACJ,KAAK;AACD,gBAAM;AACN,cAAI,YAAY;AAChB,gBAAM,IAAI,KAAK,MAAM;AACrB,cAAI,KAAK;AACL,sBAAU,IAAI,CAAC,EAAE,MAAM;iBACpB;AACH,2BAAe,CAAC;;AAEpB;QACJ,KAAK;AACD,cAAI,OAAO,IAAI,CAAC,MAAM,KAAK;AACvB,oBAAQ,OAAO,IAAI,CAAC,GAAG;cACnB,KAAK;AACD,0BAAU;AACV,qBAAK;AACL,0BAAUA,SAAO,IAAK;AACtB;cACJ,KAAK;AACD,0BAAU;AACV,qBAAK;AACL,0BAAUA,SAAO,IAAK;AACtB;cACJ,KAAK;AACD,sBAAM;AACN,qBAAK;AACL,gBAAAA,SAAO;AACP,0BAAU,OAAO,OAAO,KAAK,IAAI,GAAG;AACpC;cACJ,KAAK;AACD,wBAAQ,OAAO,IAAI,CAAC,GAAG;kBACnB,KAAK;kBACL,KAAK;AACD,0BAAM;AACN,yBAAK;AACL,oBAAAA,SAAO;AACP,8BAAU,OAAO,OAAO,KAAK,IAAI,GAAG;AACpC;kBACJ;AACI,8BAAU,OAAO,QAAQ,KAAK,CAAC,IAAI,IAAI,CAAC;AACxC,8BAAUA,SAAO,IAAK;AACtB;;AAER;;iBAEL;AACH,sBAAU,CAAC;AACX,sBAAUA,SAAO,IAAK;;AAE1B;QACJ,KAAK;AACD,YAAE;AACF,iBAAO;QACX;AACI,yBAAe,CAAC;AAChB;;;AAIZ,WAAO;EACX;AA/HS,SAAAA,UAAA;AAiIT,SAAO,IAAI,OAAOA,SAAO,GAAI,MAAM,KAAK;AAC5C;AAzIgB;;;AJ3JV,SAAU,aAAa,SAAoB;AAC7C,SAAO,QAAQ,MAAM,KAAK,OAAS,aAAa,CAAC,KAAK,EAAE,KAAK;AACjE;AAFgB;AAOV,SAAU,eAAe,SAAoB;AAC/C,SAAO,QAAQ,MAAM,OAAO,CAAC,MAAiC,eAAe,CAAC,KAAK,EAAE,MAAM;AAC/F;AAFgB;AAYV,SAAU,qBAAqB,SAAsB,cAAqB;AAC5E,QAAM,YAAY,oBAAI,IAAG;AACzB,QAAM,YAAY,aAAa,OAAO;AACtC,MAAI,CAAC,WAAW;AACZ,WAAO,IAAI,IAAI,QAAQ,KAAK;;AAGhC,QAAM,eAAe,CAAC,SAA6B,EAAE,OAAO,eAAe,OAAO,CAAC;AACnF,aAAW,QAAQ,cAAc;AAC7B,YAAQ,MAAM,WAAW,YAAY;;AAGzC,QAAM,QAAQ,oBAAI,IAAG;AACrB,aAAW,QAAQ,QAAQ,OAAO;AAC9B,QAAI,UAAU,IAAI,KAAK,IAAI,KAAU,eAAe,IAAI,KAAK,KAAK,QAAS;AACvE,YAAM,IAAI,IAAI;;;AAGtB,SAAO;AACX;AAnBgB;AAqBhB,SAAS,QAAQ,MAAwB,YAAyB,cAAqB;AACnF,aAAW,IAAI,KAAK,IAAI;AACxB,oBAAkB,IAAI,EAAE,QAAQ,UAAO;AACnC,QAAQ,WAAW,IAAI,KAAM,gBAAoB,mBAAmB,IAAI,GAAI;AACxE,YAAM,UAAU,KAAK,KAAK;AAC1B,UAAI,WAAW,CAAC,WAAW,IAAI,QAAQ,IAAI,GAAG;AAC1C,gBAAQ,SAAS,YAAY,YAAY;;;EAGrD,CAAC;AACL;AAVS;AAoBH,SAAU,0BAA0B,UAA4B;AAClE,MAAI,SAAS,UAAU;AACnB,WAAO,SAAS;aACT,SAAS,KAAK,KAAK;AAC1B,UAAM,gBAAgB,mBAAmB,SAAS,KAAK,GAAG;AAC1D,WAAO,kBAAa,QAAb,kBAAa,SAAA,SAAb,cAAe;;AAE1B,SAAO;AACX;AARgB;AAeV,SAAU,kBAAkB,cAA8B;AAC5D,SAAO,aAAa,UAAU,CAAC,cAAc,YAAY,EAAE,KAAK,GAAG;AACvE;AAFgB;AAUV,SAAU,qBAAqB,MAA2B,UAA4B;AACxF,MAAI,CAAC,QAAQ,CAAC,UAAU;AACpB,WAAO,CAAA;;AAEX,SAAO,6BAA6B,MAAM,UAAU,KAAK,SAAS,IAAI;AAC1E;AALgB;AAgBV,SAAU,oBAAoB,MAA2B,UAA8B,OAAc;AACvG,MAAI,CAAC,QAAQ,CAAC,UAAU;AACpB,WAAO;;AAEX,QAAM,QAAQ,6BAA6B,MAAM,UAAU,KAAK,SAAS,IAAI;AAC7E,MAAI,MAAM,WAAW,GAAG;AACpB,WAAO;;AAEX,MAAI,UAAU,QAAW;AACrB,YAAQ,KAAK,IAAI,GAAG,KAAK,IAAI,OAAO,MAAM,SAAS,CAAC,CAAC;SAClD;AACH,YAAQ;;AAEZ,SAAO,MAAM,KAAK;AACtB;AAdgB;AAgBhB,SAAS,6BAA6B,MAAe,UAAkB,SAA8BC,QAAc;AAC/G,MAAI,CAACA,QAAO;AACR,UAAM,cAAc,mBAAmB,KAAK,eAAmB,YAAY;AAC3E,QAAI,eAAe,YAAY,YAAY,UAAU;AACjD,aAAO,CAAC,IAAI;;;AAGpB,MAAI,mBAAmB,IAAI,KAAK,KAAK,YAAY,SAAS;AACtD,WAAO,KAAK,QAAQ,QAAQ,OAAK,6BAA6B,GAAG,UAAU,SAAS,KAAK,CAAC;;AAE9F,SAAO,CAAA;AACX;AAXS;AAmBH,SAAU,oBAAoB,MAA2B,SAAe;AAC1E,MAAI,CAAC,MAAM;AACP,WAAO,CAAA;;AAEX,SAAO,4BAA4B,MAAM,SAAS,SAAI,QAAJ,SAAI,SAAA,SAAJ,KAAM,OAAO;AACnE;AALgB;AAgBV,SAAU,mBAAmB,MAA2B,SAAiB,OAAc;AACzF,MAAI,CAAC,MAAM;AACP,WAAO;;AAEX,QAAM,QAAQ,4BAA4B,MAAM,SAAS,SAAI,QAAJ,SAAI,SAAA,SAAJ,KAAM,OAAO;AACtE,MAAI,MAAM,WAAW,GAAG;AACpB,WAAO;;AAEX,MAAI,UAAU,QAAW;AACrB,YAAQ,KAAK,IAAI,GAAG,KAAK,IAAI,OAAO,MAAM,SAAS,CAAC,CAAC;SAClD;AACH,YAAQ;;AAEZ,SAAO,MAAM,KAAK;AACtB;AAdgB;AAgBV,SAAU,4BAA4B,MAAe,SAAiB,SAA4B;AACpG,MAAI,KAAK,YAAY,SAAS;AAC1B,WAAO,CAAA;;AAEX,MAAQ,UAAU,KAAK,aAAa,KAAK,KAAK,cAAc,UAAU,SAAS;AAC3E,WAAO,CAAC,IAAI;;AAEhB,QAAM,eAAe,UAAU,IAAI,EAAE,SAAQ;AAC7C,MAAI;AACJ,QAAM,eAA0B,CAAA;AAChC,KAAG;AACC,aAAS,aAAa,KAAI;AAC1B,QAAI,CAAC,OAAO,MAAM;AACd,YAAM,YAAY,OAAO;AACzB,UAAI,UAAU,YAAY,SAAS;AAC/B,YAAQ,UAAU,UAAU,aAAa,KAAK,UAAU,cAAc,UAAU,SAAS;AACrF,uBAAa,KAAK,SAAS;;aAE5B;AACH,qBAAa,MAAK;;;WAGrB,CAAC,OAAO;AACjB,SAAO;AACX;AAxBgB;AAgCV,SAAU,eAAe,SAAgB;;AAC3C,QAAM,UAAU,QAAQ;AAGxB,SAAO,cAAY,KAAA,QAAQ,eAAS,QAAA,OAAA,SAAA,SAAA,GAAE,UAAS;AAC3C,UAAM,aAAa,mBAAmB,QAAQ,eAAmB,YAAY;AAC7E,QAAI,YAAY;AACZ,aAAO;;AAEX,cAAU,QAAQ;;AAEtB,SAAO;AACX;AAZgB;AAmBV,SAAU,mBAAmB,MAAsB;AACrD,MAAI,YAAqB;AACzB,MAAQ,eAAe,SAAS,GAAG;AAE/B,QAAQ,SAAS,UAAU,UAAU,GAAG;AAEpC,kBAAY,UAAU,WAAW;eACtB,aAAa,UAAU,UAAU,GAAG;AAE/C,kBAAY,UAAU;WACnB;AACH,wBAAkB,UAAU,UAAU;;;AAG9C,SAAO,2BAA2B,MAAM,WAAW,oBAAI,IAAG,CAAE;AAChE;AAfgB;AAiBhB,SAAS,2BAA2B,MAAwB,WAAoB,OAAwD;;AAEpI,WAAS,GAAG,MAAe,SAAyB;AAChD,QAAI,kBAA8C;AAClD,UAAM,mBAAmB,mBAAmB,MAAU,YAAY;AAElE,QAAI,CAAC,kBAAkB;AACnB,wBAAkB,2BAA2B,SAAS,SAAS,KAAK;;AAExE,UAAM,IAAI,MAAM,eAAe;AAC/B,WAAO;EACX;AATS;AAWT,MAAI,MAAM,IAAI,IAAI,GAAG;AACjB,WAAO,MAAM,IAAI,IAAI;;AAEzB,QAAM,IAAI,MAAM,MAAS;AACzB,aAAW,QAAQ,kBAAkB,SAAS,GAAG;AAC7C,QAAQ,aAAa,IAAI,KAAK,KAAK,QAAQ,YAAW,MAAO,QAAQ;AACjE,YAAM,IAAI,MAAM,IAAI;AACpB,aAAO;eACI,WAAW,IAAI,KAAS,aAAa,KAAK,KAAK,GAAG,GAAG;AAChE,aAAO,GAAG,MAAM,KAAK,KAAK,GAAG;eAClB,aAAa,IAAI,OAAK,KAAA,KAAK,aAAO,QAAA,OAAA,SAAA,SAAA,GAAE,MAAK;AACpD,aAAO,GAAG,MAAM,KAAK,QAAQ,GAAG;;;AAGxC,SAAO;AACX;AA5BS;AA8BH,SAAU,mBAAmB,SAA4B;AAC3D,QAAM,SAAS,QAAQ;AACvB,MAAQ,QAAQ,MAAM,GAAG;AACrB,UAAM,WAAW,OAAO;AACxB,UAAM,QAAQ,SAAS,QAAQ,OAAO;AACtC,aAAS,IAAI,QAAQ,GAAG,KAAK,GAAG,KAAK;AACjC,YAAM,OAAO,SAAS,CAAC;AACvB,UAAQ,SAAS,IAAI,GAAG;AACpB,eAAO;aACJ;AACH,cAAM,SAAS,kBAAkB,SAAS,CAAC,CAAC,EAAE,KAAS,QAAQ;AAC/D,YAAI,QAAQ;AACR,iBAAO;;;;;AAKvB,MAAQ,kBAAkB,MAAM,GAAG;AAC/B,WAAO,mBAAmB,MAAM;SAC7B;AACH,WAAO;;AAEf;AAtBgB;AA2BV,SAAU,sBAAsB,aAA2B,SAA6B;AAC1F,SAAO,gBAAgB,OAAO,gBAAgB,OAAY,QAAQ,OAAO,KAAK,QAAQ,QAAQ,cAAc;AAChH;AAFgB;AAIV,SAAU,mBAAmB,aAAyB;AACxD,SAAO,gBAAgB,OAAO,gBAAgB;AAClD;AAFgB;AAIV,SAAU,gBAAgB,UAAmB;AAC/C,SAAO,aAAa;AACxB;AAFgB;AAQV,SAAU,eAAe,MAAoB;AAC/C,SAAO,uBAAuB,MAAM,oBAAI,IAAG,CAAE;AACjD;AAFgB;AAIhB,SAAS,uBAAuB,MAAsB,SAA4B;AAC9E,MAAI,QAAQ,IAAI,IAAI,GAAG;AACnB,WAAO;SACJ;AACH,YAAQ,IAAI,IAAI;;AAEpB,aAAW,QAAQ,kBAAkB,IAAI,GAAG;AACxC,QAAQ,WAAW,IAAI,GAAG;AACtB,UAAI,CAAC,KAAK,KAAK,KAAK;AAEhB,eAAO;;AAEX,UAAQ,aAAa,KAAK,KAAK,GAAG,KAAK,CAAC,uBAAuB,KAAK,KAAK,KAAK,OAAO,GAAG;AACpF,eAAO;;eAEA,aAAa,IAAI,GAAG;AAC/B,aAAO;eACI,SAAS,IAAI,GAAG;AAC3B,aAAO;;;AAGf,SAAO,QAAQ,KAAK,UAAU;AAClC;AAtBS;AAwBH,SAAU,WAAW,MAAc;AACrC,SAAO,mBAAmB,KAAK,MAAM,oBAAI,IAAG,CAAE;AAClD;AAFgB;AAIhB,SAAS,mBAAmB,MAA0B,SAAgC;AAClF,MAAI,QAAQ,IAAI,IAAI,GAAG;AACnB,WAAO;SACJ;AACH,YAAQ,IAAI,IAAI;;AAEpB,MAAQ,YAAY,IAAI,GAAG;AACvB,WAAO;aACI,gBAAgB,IAAI,GAAG;AAClC,WAAO;aACI,YAAY,IAAI,GAAG;AAC9B,WAAO,KAAK,MAAM,MAAM,OAAK,mBAAmB,GAAG,OAAO,CAAC;aAChD,aAAa,IAAI,GAAG;AAC/B,QAAI,KAAK,kBAAkB,QAAW;AAClC,aAAO;eACA,KAAK,eAAe,QAAW;AACtC,aAAO;eACA,KAAK,YAAY,QAAW;AACnC,YAAM,MAAM,KAAK,QAAQ;AACzB,UAAQ,OAAO,GAAG,GAAG;AACjB,eAAO,mBAAmB,IAAI,MAAM,OAAO;aACxC;AACH,eAAO;;WAER;AACH,aAAO;;SAER;AACH,WAAO;;AAEf;AA9BS;AAgCH,SAAU,oBAAoB,MAAoB;AACpD,MAAI,KAAK,cAAc;AACnB,WAAO,KAAK,aAAa;aAClB,KAAK,UAAU;AACtB,WAAO,KAAK;aACL,KAAK,YAAY;AACxB,UAAM,UAAU,KAAK,WAAW;AAChC,QAAG,SAAS;AAER,UAAQ,aAAa,OAAO,GAAG;AAC3B,eAAO,QAAQ;iBACJ,YAAY,OAAO,KAAS,OAAO,OAAO,GAAG;AACxD,eAAO,QAAQ;;;;AAI3B,SAAO;AACX;AAjBgB;AAmBV,SAAU,YAAY,MAAmC;;AAC3D,MAAQ,aAAa,IAAI,GAAG;AACxB,WAAO,eAAe,IAAI,IAAI,KAAK,QAAO,KAAA,oBAAoB,IAAI,OAAC,QAAA,OAAA,SAAA,KAAI,KAAK;aACjE,YAAY,IAAI,KAAS,OAAO,IAAI,KAAS,aAAa,IAAI,GAAG;AAC5E,WAAO,KAAK;aACD,SAAS,IAAI,GAAG;AAC3B,UAAM,aAAa,cAAc,IAAI;AACrC,QAAI,YAAY;AACZ,aAAO;;aAEA,eAAe,IAAI,GAAG;AACjC,WAAO,KAAK;;AAEhB,QAAM,IAAI,MAAM,iCAAiC;AACrD;AAdgB;AAgBV,SAAU,cAAc,QAAkB;;AAC5C,MAAI,OAAO,cAAc;AACrB,WAAO,OAAO,aAAa;cACpB,KAAA,OAAO,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,KAAK;AACzB,WAAO,YAAY,OAAO,KAAK,GAAG;;AAEtC,SAAO;AACX;AAPgB;AASV,SAAU,YAAY,MAAsB;;AAC9C,MAAQ,eAAe,IAAI,GAAG;AAC1B,YAAO,MAAA,KAAA,KAAK,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,UAAI,QAAA,OAAA,SAAA,KAAI;SACvB;AACH,WAAO,eAAe,IAAI,IAAI,KAAK,QAAO,KAAA,oBAAoB,IAAI,OAAC,QAAA,OAAA,SAAA,KAAI,KAAK;;AAEpF;AANgB;AAQV,SAAU,cAAc,cAA8B;AACxD,QAAM,QAAe;IACjB,GAAG;IACH,GAAG;IACH,GAAG;;AAEP,QAAM,SAAS,uBAAuB,aAAa,YAAY,KAAK;AACpE,QAAM,WAAW,OAAO,QAAQ,KAAK,EAAE,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK,EAAE,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI,EAAE,KAAK,EAAE;AACjG,SAAO,IAAI,OAAO,QAAQ,QAAQ;AACtC;AATgB;AAYhB,IAAM,WAAW,SAAS;AAQ1B,SAAS,uBAAuB,SAA8B,OAAa;AACvE,MAAQ,uBAAuB,OAAO,GAAG;AACrC,WAAO,4BAA4B,OAAO;aAC/B,gBAAgB,OAAO,GAAG;AACrC,WAAO,qBAAqB,OAAO;aACxB,iBAAiB,OAAO,GAAG;AACtC,WAAO,sBAAsB,OAAO;aACzB,mBAAmB,OAAO,GAAG;AACxC,UAAM,OAAO,QAAQ,KAAK;AAC1B,QAAI,CAAC,MAAM;AACP,YAAM,IAAI,MAAM,yBAAyB;;AAE7C,WAAO,gBAAgB,uBAAuB,KAAK,UAAU,GAAG;MAC5D,aAAa,QAAQ;MACrB,WAAW,QAAQ;KACtB;aACU,eAAe,OAAO,GAAG;AACpC,WAAO,mBAAmB,OAAO;aACtB,aAAa,OAAO,GAAG;AAClC,WAAO,kBAAkB,OAAO;aACrB,aAAa,OAAO,GAAG;AAClC,UAAM,YAAY,QAAQ,MAAM,YAAY,GAAG;AAC/C,UAAM,SAAS,QAAQ,MAAM,UAAU,GAAG,SAAS;AACnD,UAAM,aAAa,QAAQ,MAAM,UAAU,YAAY,CAAC;AACxD,QAAI,OAAO;AACP,YAAM,IAAI,WAAW,SAAS,GAAG;AACjC,YAAM,IAAI,WAAW,SAAS,GAAG;AACjC,YAAM,IAAI,WAAW,SAAS,GAAG;;AAErC,WAAO,gBAAgB,QAAQ;MAC3B,aAAa,QAAQ;MACrB,WAAW,QAAQ;MACnB,MAAM;KACT;aACU,WAAW,OAAO,GAAG;AAChC,WAAO,gBAAgB,UAAU;MAC7B,aAAa,QAAQ;MACrB,WAAW,QAAQ;KACtB;SACE;AACH,UAAM,IAAI,MAAM,6BAA6B,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,KAAK,EAAE;;AAErE;AA1CS;AA4CT,SAAS,4BAA4B,cAAsC;AACvE,SAAO,gBAAgB,aAAa,SAAS,IAAI,OAAK,uBAAuB,CAAC,CAAC,EAAE,KAAK,GAAG,GAAG;IACxF,aAAa,aAAa;IAC1B,WAAW,aAAa;GAC3B;AACL;AALS;AAOT,SAAS,qBAAqB,OAAwB;AAClD,SAAO,gBAAgB,MAAM,SAAS,IAAI,OAAK,uBAAuB,CAAC,CAAC,EAAE,KAAK,EAAE,GAAG;IAChF,aAAa,MAAM;IACnB,WAAW,MAAM;GACpB;AACL;AALS;AAOT,SAAS,kBAAkB,OAAqB;AAC5C,SAAO,gBAAgB,GAAG,QAAQ,KAAK,uBAAuB,MAAM,QAAQ,CAAC,IAAI;IAC7E,aAAa,MAAM;IACnB,WAAW,MAAM;GACpB;AACL;AALS;AAOT,SAAS,mBAAmB,QAAwB;AAChD,SAAO,gBAAgB,MAAM,uBAAuB,OAAO,QAAQ,CAAC,IAAI,QAAQ,MAAM;IAClF,aAAa,OAAO;IACpB,WAAW,OAAO;GACrB;AACL;AALS;AAOT,SAAS,sBAAsB,OAAyB;AACpD,MAAI,MAAM,OAAO;AACb,WAAO,gBAAgB,IAAI,eAAe,MAAM,IAAI,CAAC,IAAI,eAAe,MAAM,KAAK,CAAC,KAAK;MACrF,aAAa,MAAM;MACnB,WAAW,MAAM;MACjB,MAAM;KACT;;AAEL,SAAO,gBAAgB,eAAe,MAAM,IAAI,GAAG;IAC/C,aAAa,MAAM;IACnB,WAAW,MAAM;IACjB,MAAM;GACT;AACL;AAbS;AAeT,SAAS,eAAe,SAAoB;AACxC,SAAO,aAAa,QAAQ,KAAK;AACrC;AAFS;AAIT,SAAS,gBAAgB,OAAe,SAIvC;;AACG,MAAI,QAAQ,SAAS,SAAS,QAAQ,WAAW;AAC7C,YAAQ,KAAI,KAAA,QAAQ,eAAS,QAAA,OAAA,SAAA,KAAI,EAAE,GAAG,KAAK;;AAE/C,MAAI,QAAQ,aAAa;AACrB,WAAO,GAAG,KAAK,GAAG,QAAQ,WAAW;;AAEzC,SAAO;AACX;AAZS;;;AS/gBH,SAAU,oBAAoB,UAA6B;AAC7D,QAAM,QAAkB,CAAA;AACxB,QAAM,UAAU,SAAS;AACzB,aAAW,QAAQ,QAAQ,OAAO;AAC9B,QAAI,eAAe,IAAI,KAAK,kBAAkB,IAAI,KAAK,mBAAmB,cAAc,IAAI,CAAC,GAAG;AAC5F,YAAM,KAAK,KAAK,IAAI;;;AAG5B,SAAO;IACH,uBAAuB;IACvB,YAAY;;AAEpB;AAZgB;;;AC3BV,SAAU,YAAY,KAAW;AAErC,MAAI,WAAW,QAAQ,OAAO;AAC5B,YAAQ,MAAM,UAAU,GAAG,EAAE;;AAEjC;AALgB;AAOV,SAAU,cAAc,KAAW;AAEvC,MAAI,WAAW,QAAQ,MAAM;AAE3B,YAAQ,KAAK,YAAY,GAAG,EAAE;;AAElC;AANgB;;;ACPV,SAAU,MAAS,MAAa;AACpC,QAAM,SAAQ,oBAAI,KAAI,GAAG,QAAO;AAChC,QAAM,MAAM,KAAI;AAChB,QAAM,OAAM,oBAAI,KAAI,GAAG,QAAO;AAC9B,QAAM,QAAQ,MAAM;AACpB,SAAO,EAAE,MAAM,OAAO,OAAO,IAAG;AAClC;AANgB;;;ACCV,SAAU,iBAAiB,cAAiB;AAChD,WAAS,kBAAe;EAAI;AAAnB;AAGT,kBAAgB,YAAY;AAC5B,QAAM,eAAe,IAAK,gBAAuB;AAEjD,WAAS,aAAU;AACjB,WAAO,OAAO,aAAa;EAC7B;AAFS;AAMT,aAAU;AACV,aAAU;AAIV,MAAI;AAAG,WAAO;AAMd,GAAC,GAAG,MAAM,YAAY;AACxB;AAzBgB;;;ACShB,SAAS,WAAW,SAAkB;AACpC,MAAI,cAAc,OAAO,GAAG;AAC1B,WAAO,QAAQ;SACV;AACL,WAAO,QAAQ;;AAEnB;AANS;AAST,SAAS,cACP,KAAc;AAEd,SAAO,iBAAS,IAAI,KAAK,KAAK,IAAI,UAAU;AAC9C;AAJS;AAMH,IAAgB,qBAAhB,MAAkC;EAzBxC,OAyBwC;;;EAGtC,IAAW,aAAU;AACnB,WAAO,KAAK;EACd;EACA,IAAW,WAAW,OAAU;AAC9B,SAAK,cAAc;EACrB;EAEA,YAAsB,aAAgB;AAAhB,SAAA,cAAA;EAAmB;EAEzC,OAAOC,UAAqB;AAC1B,IAAAA,SAAQ,MAAM,IAAI;AAClB,oBAAQ,KAAK,YAAY,CAAC,SAAQ;AAChC,WAAK,OAAOA,QAAO;IACrB,CAAC;EACH;;AAGI,IAAO,cAAP,cACI,mBAAkB;EA9C5B,OA8C4B;;;EAQ1B,YAAY,SAKX;AACC,UAAM,CAAA,CAAE;AARH,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;EAEA,IAAI,WAAW,YAAyB;EAExC;EAEA,IAAI,aAAU;AACZ,QAAI,KAAK,mBAAmB,QAAW;AACrC,aAAO,KAAK,eAAe;;AAE7B,WAAO,CAAA;EACT;EAEA,OAAOA,UAAqB;AAC1B,IAAAA,SAAQ,MAAM,IAAI;EAEpB;;AAGI,IAAO,OAAP,cAAoB,mBAAkB;EApF5C,OAoF4C;;;EAI1C,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AAPnB,SAAA,UAAkB;AAQvB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,cAAP,cAA2B,mBAAkB;EArGnD,OAqGmD;;;EAGjD,YAAY,SAGX;AACC,UAAM,QAAQ,UAAU;AANnB,SAAA,oBAA6B;AAOlC,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,SAAP,cACI,mBAAkB;EArH5B,OAqH4B;;;EAM1B,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AARnB,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,sBAAP,cACI,mBAAkB;EAzI5B,OAyI4B;;;EAM1B,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AARnB,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,mCAAP,cACI,mBAAkB;EA7J5B,OA6J4B;;;EAO1B,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AARnB,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,aAAP,cACI,mBAAkB;EAlL5B,OAkL4B;;;EAO1B,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AARnB,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,0BAAP,cACI,mBAAkB;EAvM5B,OAuM4B;;;EAO1B,YAAY,SAIX;AACC,UAAM,QAAQ,UAAU;AARnB,SAAA,MAAc;AASnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,cAAP,cACI,mBAA+B;EA5NzC,OA4NyC;;;EAQvC,IAAW,aAAU;AACnB,WAAO,KAAK;EACd;EACA,IAAW,WAAW,OAAoB;AACxC,SAAK,cAAc;EACrB;EAEA,YAAY,SAMX;AACC,UAAM,QAAQ,UAAU;AAnBnB,SAAA,MAAc;AACd,SAAA,oBAA6B;AAC7B,SAAA,gBAAyB;AAkB9B,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;;AAGI,IAAO,WAAP,MAAe;EA1PrB,OA0PqB;;;EAKnB,YAAY,SAIX;AANM,SAAA,MAAc;AAOnB,mBACE,MACA,eAAO,SAAS,CAAC,MAAM,MAAM,MAAS,CAAC;EAE3C;EAEA,OAAOA,UAAqB;AAC1B,IAAAA,SAAQ,MAAM,IAAI;EACpB;;AAgDI,SAAU,iBAAiB,UAAgB;AAC/C,SAAO,YAAI,UAAU,mBAAmB;AAC1C;AAFgB;AAIV,SAAU,oBAAoB,MAAiB;AACnD,WAAS,kBAAkB,YAAyB;AAClD,WAAO,YAAI,YAAY,mBAAmB;EAC5C;AAFS;AAIT,MAAI,gBAAgB,aAAa;AAC/B,UAAM,wBAAgD;MACpD,MAAM;MACN,MAAM,KAAK;MACX,KAAK,KAAK;;AAGZ,QAAI,iBAAS,KAAK,KAAK,GAAG;AACxB,4BAAsB,QAAQ,KAAK;;AAGrC,WAAO;aACE,gBAAgB,aAAa;AACtC,WAAyB;MACvB,MAAM;MACN,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,QAAQ;AACjC,WAAyB;MACvB,MAAM;MACN,KAAK,KAAK;MACV,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,qBAAqB;AAC9C,WAAyB;MACvB,MAAM;MACN,KAAK,KAAK;MACV,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,kCAAkC;AAC3D,WAAyC;MACvC,MAAM;MACN,KAAK,KAAK;MACV,WACE,oBAAoB,IAAI,SAAS,EAAE,cAAc,KAAK,UAAS,CAAE,CAAC;MAEpE,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,yBAAyB;AAClD,WAAyC;MACvC,MAAM;MACN,KAAK,KAAK;MACV,WACE,oBAAoB,IAAI,SAAS,EAAE,cAAc,KAAK,UAAS,CAAE,CAAC;MAEpE,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,YAAY;AACrC,WAAyB;MACvB,MAAM;MACN,KAAK,KAAK;MACV,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,aAAa;AACtC,WAAyB;MACvB,MAAM;MACN,KAAK,KAAK;MACV,YAAY,kBAAkB,KAAK,UAAU;;aAEtC,gBAAgB,UAAU;AACnC,UAAM,qBAA0C;MAC9C,MAAM;MACN,MAAM,KAAK,aAAa;MACxB,OAAO,WAAW,KAAK,YAAY;MACnC,KAAK,KAAK;;AAGZ,QAAI,iBAAS,KAAK,KAAK,GAAG;AACxB,yBAAmB,gBAAgB,KAAK;;AAG1C,UAAM,UAAU,KAAK,aAAa;AAClC,QAAI,KAAK,aAAa,SAAS;AAC7B,yBAAmB,UAAU,iBAAS,OAAO,IACnC,QAAS,SACf;;AAGN,WAAO;aACE,gBAAgB,MAAM;AAC/B,WAA4B;MAC1B,MAAM;MACN,MAAM,KAAK;MACX,SAAS,KAAK;MACd,YAAY,kBAAkB,KAAK,UAAU;;SAG1C;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AA/FgB;;;AClTV,IAAgB,cAAhB,MAA2B;EAdjC,OAciC;;;EACxB,MAAM,MAAiB;AAC5B,UAAM,UAAe;AACrB,YAAQ,QAAQ,aAAa;MAC3B,KAAK;AACH,eAAO,KAAK,iBAAiB,OAAO;MACtC,KAAK;AACH,eAAO,KAAK,iBAAiB,OAAO;MACtC,KAAK;AACH,eAAO,KAAK,YAAY,OAAO;MACjC,KAAK;AACH,eAAO,KAAK,yBAAyB,OAAO;MAC9C,KAAK;AACH,eAAO,KAAK,sCAAsC,OAAO;MAC3D,KAAK;AACH,eAAO,KAAK,6BAA6B,OAAO;MAClD,KAAK;AACH,eAAO,KAAK,gBAAgB,OAAO;MACrC,KAAK;AACH,eAAO,KAAK,iBAAiB,OAAO;MACtC,KAAK;AACH,eAAO,KAAK,cAAc,OAAO;MACnC,KAAK;AACH,eAAO,KAAK,UAAU,OAAO;MAE/B;AACE,cAAM,MAAM,sBAAsB;;EAExC;;EAGO,iBAAiB,MAAiB;EAAQ;;EAG1C,iBAAiB,MAAiB;EAAQ;;EAG1C,YAAY,MAAY;EAAQ;;EAGhC,gBAAgB,MAAgB;EAAQ;;EAGxC,yBAAyB,MAAyB;EAAQ;;EAG1D,sCACL,MAAsC;EAChC;;EAGD,6BAA6B,MAA6B;EAAQ;;EAGlE,iBAAiB,MAAiB;EAAQ;;EAG1C,cAAc,MAAc;EAAQ;;EAGpC,UAAU,MAAU;EAAQ;;;;AC1D/B,SAAU,eACd,MAAiB;AAEjB,SACE,gBAAgB,eAChB,gBAAgB,UAChB,gBAAgB,cAChB,gBAAgB,uBAChB,gBAAgB,oCAChB,gBAAgB,2BAChB,gBAAgB,YAChB,gBAAgB;AAEpB;AAbgB;AAeV,SAAU,eACd,MACA,iBAAgC,CAAA,GAAE;AAElC,QAAM,qBACJ,gBAAgB,UAChB,gBAAgB,cAChB,gBAAgB;AAClB,MAAI,oBAAoB;AACtB,WAAO;;AAMT,MAAI,gBAAgB,aAAa;AAE/B,WAAO,aAAmB,KAAM,YAAY,CAAC,YAAwB;AACnE,aAAO,eAAe,SAAS,cAAc;IAC/C,CAAC;aACQ,gBAAgB,eAAe,iBAAS,gBAAgB,IAAI,GAAG;AAExE,WAAO;aACE,gBAAgB,oBAAoB;AAC7C,QAAI,gBAAgB,aAAa;AAC/B,qBAAe,KAAK,IAAI;;AAE1B,WAAO,cACgB,KAAM,YAC3B,CAAC,YAAwB;AACvB,aAAO,eAAe,SAAS,cAAc;IAC/C,CAAC;SAEE;AACL,WAAO;;AAEX;AApCgB;AAsCV,SAAU,gBACd,MAAiB;AAEjB,SAAO,gBAAgB;AACzB;AAJgB;AAMV,SAAU,qBAAqB,MAA+B;AAElE,MAAI,gBAAgB,aAAa;AAC/B,WAAO;aACE,gBAAgB,QAAQ;AACjC,WAAO;aACE,gBAAgB,aAAa;AACtC,WAAO;aACE,gBAAgB,qBAAqB;AAC9C,WAAO;aACE,gBAAgB,kCAAkC;AAC3D,WAAO;aACE,gBAAgB,yBAAyB;AAClD,WAAO;aACE,gBAAgB,YAAY;AACrC,WAAO;aACE,gBAAgB,UAAU;AACnC,WAAO;SAEF;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AAtBgB;;;AC1DV,IAAgB,aAAhB,MAA0B;EAjBhC,OAiBgC;;;EAC9B,KAAK,MAAqC,WAAkB,CAAA,GAAE;AAC5D,oBAAQ,KAAK,YAAY,CAAC,SAAsB,UAAS;AACvD,YAAM,WAAW,aAAK,KAAK,YAAY,QAAQ,CAAC;AAEhD,UAAI,mBAAmB,aAAa;AAClC,aAAK,YAAY,SAAS,UAAU,QAAQ;iBACnC,mBAAmB,UAAU;AACtC,aAAK,aAAa,SAAS,UAAU,QAAQ;iBACpC,mBAAmB,aAAa;AACzC,aAAK,SAAS,SAAS,UAAU,QAAQ;iBAChC,mBAAmB,QAAQ;AACpC,aAAK,WAAW,SAAS,UAAU,QAAQ;iBAClC,mBAAmB,qBAAqB;AACjD,aAAK,eAAe,SAAS,UAAU,QAAQ;iBACtC,mBAAmB,kCAAkC;AAC9D,aAAK,kBAAkB,SAAS,UAAU,QAAQ;iBACzC,mBAAmB,yBAAyB;AACrD,aAAK,YAAY,SAAS,UAAU,QAAQ;iBACnC,mBAAmB,YAAY;AACxC,aAAK,SAAS,SAAS,UAAU,QAAQ;iBAChC,mBAAmB,aAAa;AACzC,aAAK,OAAO,SAAS,UAAU,QAAQ;aAClC;AACL,cAAM,MAAM,sBAAsB;;IAEtC,CAAC;EACH;EAEA,aACE,UACA,UACA,UAAuB;EAChB;EAET,YACE,SACA,UACA,UAAuB;EAChB;EAET,SACE,UACA,UACA,UAAuB;AAGvB,UAAM,aAAa,SAAS,OAAO,QAAQ;AAC3C,SAAK,KAAK,UAAe,UAAU;EACrC;EAEA,WACE,YACA,UACA,UAAuB;AAGvB,UAAM,aAAa,SAAS,OAAO,QAAQ;AAC3C,SAAK,KAAK,YAAiB,UAAU;EACvC;EAEA,eACE,gBACA,UACA,UAAuB;AAGvB,UAAM,qBAAoC;MACxC,IAAI,OAAO,EAAE,YAAY,eAAe,WAAU,CAAE;MACpD,OAAY,UAAe,QAAQ;AACrC,SAAK,KAAK,gBAAgB,kBAAkB;EAC9C;EAEA,kBACE,mBACA,UACA,UAAuB;AAGvB,UAAM,wBAAwB,+BAC5B,mBACA,UACA,QAAQ;AAEV,SAAK,KAAK,mBAAmB,qBAAqB;EACpD;EAEA,SACE,UACA,UACA,UAAuB;AAGvB,UAAM,eAA8B;MAClC,IAAI,OAAO,EAAE,YAAY,SAAS,WAAU,CAAE;MAC9C,OAAY,UAAe,QAAQ;AACrC,SAAK,KAAK,UAAU,YAAY;EAClC;EAEA,YACE,aACA,UACA,UAAuB;AAGvB,UAAM,kBAAkB,+BACtB,aACA,UACA,QAAQ;AAEV,SAAK,KAAK,aAAa,eAAe;EACxC;EAEA,OACE,QACA,UACA,UAAuB;AAGvB,UAAM,aAAa,SAAS,OAAO,QAAQ;AAE3C,oBAAQ,OAAO,YAAY,CAAC,QAAO;AAIjC,YAAM,cAAc,IAAI,YAAY,EAAE,YAAY,CAAC,GAAG,EAAC,CAAE;AACzD,WAAK,KAAK,aAAkB,UAAU;IACxC,CAAC;EACH;;AAGF,SAAS,+BACP,YACA,UACA,UAAuB;AAEvB,QAAM,aAAa;IACjB,IAAI,OAAO;MACT,YAAY;QACV,IAAI,SAAS,EAAE,cAAc,WAAW,UAAS,CAAE;QACnD,OAAO,WAAW,UAAU;KAC/B;;AAEH,QAAM,iBAAgC,WAAW,OAAO,UAAU,QAAQ;AAC1E,SAAO;AACT;AAdS;;;AC1IH,SAAU,MAAM,MAAiB;AAErC,MAAI,gBAAgB,aAAa;AAS/B,WAAO,MAAoB,KAAM,cAAc;aACtC,gBAAgB,UAAU;AACnC,WAAO,iBAA2B,IAAI;aAC7B,eAAe,IAAI,GAAG;AAC/B,WAAO,iBAAiB,IAAI;aACnB,gBAAgB,IAAI,GAAG;AAChC,WAAO,kBAAkB,IAAI;SACxB;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AArBgB;AAuBV,SAAU,iBAAiB,MAEhC;AACC,MAAI,WAAwB,CAAA;AAC5B,QAAM,MAAM,KAAK;AACjB,MAAI,iBAAiB;AACrB,MAAI,yBAAyB,IAAI,SAAS;AAC1C,MAAI;AAEJ,MAAI,0BAA0B;AAE9B,SAAO,0BAA0B,yBAAyB;AACxD,kBAAc,IAAI,cAAc;AAChC,8BAA0B,eAAe,WAAW;AACpD,eAAW,SAAS,OAAO,MAAM,WAAW,CAAC;AAC7C,qBAAiB,iBAAiB;AAClC,6BAAyB,IAAI,SAAS;;AAGxC,SAAO,aAAK,QAAQ;AACtB;AApBgB;AAsBV,SAAU,kBAAkB,MAEjC;AACC,QAAM,wBAAuC,YAC3C,KAAK,YACL,CAAC,cAAa;AACZ,WAAO,MAAM,SAAS;EACxB,CAAC;AAEH,SAAO,aAAK,gBAAmB,qBAAqB,CAAC;AACvD;AAVgB;AAYV,SAAU,iBAAiB,UAAkB;AACjD,SAAO,CAAC,SAAS,YAAY;AAC/B;AAFgB;;;AClET,IAAM,KAAK;;;ACQZ,IAAO,sBAAP,cAAmC,WAAU;EATnD,OASmD;;;EAGjD,YAAoB,SAAa;AAC/B,UAAK;AADa,SAAA,UAAA;AAFb,SAAA,UAAuC,CAAA;EAI9C;EAEA,eAAY;AACV,SAAK,KAAK,KAAK,OAAO;AACtB,WAAO,KAAK;EACd;EAEA,aACE,UACA,UACA,UAAuB;EAGzB;EAEA,YACE,SACA,UACA,UAAuB;AAEvB,UAAM,aACJ,8BAA8B,QAAQ,gBAAgB,QAAQ,GAAG,IACjE,KAAK,QAAQ;AACf,UAAM,WAA0B,SAAS,OAAO,QAAQ;AACxD,UAAM,WAAW,IAAI,YAAY,EAAE,YAAY,SAAQ,CAAE;AACzD,UAAM,uBAAuB,MAAM,QAAQ;AAC3C,SAAK,QAAQ,UAAU,IAAI;EAC7B;;AAGI,SAAU,uBACd,gBAAsB;AAEtB,QAAM,gBAAgB,CAAA;AAEtB,kBAAQ,gBAAgB,CAAC,YAAW;AAClC,UAAM,iBAAiB,IAAI,oBAAoB,OAAO,EAAE,aAAY;AACpE,mBAAO,eAAe,cAAc;EACtC,CAAC;AACD,SAAO;AACT;AAVgB;AAYV,SAAU,8BACd,OACA,mBAAyB;AAEzB,SAAO,MAAM,OAAO,oBAAoB;AAC1C;AALgB;;;AC/ChB,IAAI,iBAAqD,CAAA;AACzD,IAAM,eAAe,IAAI,aAAY;AAU/B,SAAU,aAAa,QAAc;AACzC,QAAM,YAAY,OAAO,SAAQ;AACjC,MAAI,eAAe,eAAe,SAAS,GAAG;AAC5C,WAAO,eAAe,SAAS;SAC1B;AACL,UAAM,YAAY,aAAa,QAAQ,SAAS;AAChD,mBAAe,SAAS,IAAI;AAC5B,WAAO;;AAEX;AATgB;AAWV,SAAU,yBAAsB;AACpC,mBAAiB,CAAA;AACnB;AAFgB;;;ACjBhB,IAAM,yBACJ;AACK,IAAM,8BACX;AAEI,SAAU,8BACd,QACA,sBAAsB,OAAK;AAE3B,MAAI;AACF,UAAM,MAAM,aAAa,MAAM;AAC/B,UAAM,aAAa,0BACjB,IAAI,OACJ,CAAA,GACA,IAAI,MAAM,UAAU;AAEtB,WAAO;WACA,GAAG;AAIV,QAAI,EAAE,YAAY,wBAAwB;AACxC,UAAI,qBAAqB;AACvB,sBACE,GAAG,2BAA2B,0BACD,OAAO,SAAQ,CAAE;;;2FAGiD;;WAG9F;AACL,UAAI,YAAY;AAChB,UAAI,qBAAqB;AACvB,oBACE;;AAGJ,kBACE,GAAG,2BAA2B;qBACL,OAAO,SAAQ,CAAE;;6EAGxC,SAAS;;;AAKjB,SAAO,CAAA;AACT;AA5CgB;AA8CV,SAAU,0BACd,KACA,QACA,YAAmB;AAEnB,UAAQ,IAAI,MAAM;IAChB,KAAK;AACH,eAAS,IAAI,GAAG,IAAI,IAAI,MAAM,QAAQ,KAAK;AACzC,kCAA0B,IAAI,MAAM,CAAC,GAAG,QAAQ,UAAU;;AAE5D;IACF,KAAK;AACH,YAAM,QAAQ,IAAI;AAClB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,OAAO,MAAM,CAAC;AAGpB,gBAAQ,KAAK,MAAM;UACjB,KAAK;UAIL,KAAK;UAEL,KAAK;UACL,KAAK;UACL,KAAK;UACL,KAAK;UACL,KAAK;AACH;;AAGJ,cAAMC,QAAO;AACb,gBAAQA,MAAK,MAAM;UACjB,KAAK;AACH,oCAAwBA,MAAK,OAAO,QAAQ,UAAU;AACtD;UACF,KAAK;AACH,gBAAIA,MAAK,eAAe,MAAM;AAC5B,oBAAM,MAAM,sBAAsB;;AAEpC,4BAAQA,MAAK,OAAO,CAAC,SAAQ;AAC3B,kBAAI,OAAO,SAAS,UAAU;AAC5B,wCAAwB,MAAM,QAAQ,UAAU;qBAC3C;AAEL,sBAAM,QAAQ;AAEd,oBAAI,eAAe,MAAM;AACvB,2BACM,YAAY,MAAM,MACtB,aAAa,MAAM,IACnB,aACA;AACA,4CAAwB,WAAW,QAAQ,UAAU;;uBAIpD;AAEH,2BACM,YAAY,MAAM,MACtB,aAAa,MAAM,MAAM,YAAY,oBACrC,aACA;AACA,4CAAwB,WAAW,QAAQ,UAAU;;AAIvD,sBAAI,MAAM,MAAM,oBAAoB;AAClC,0BAAM,cACJ,MAAM,QAAQ,qBACV,MAAM,OACN;AACN,0BAAM,cAAc,MAAM;AAC1B,0BAAM,YAAY,yBAAyB,WAAW;AACtD,0BAAM,YAAY,yBAAyB,WAAW;AAEtD,6BACM,aAAa,WACjB,cAAc,WACd,cACA;AACA,6BAAO,UAAU,IAAI;;;;;YAK/B,CAAC;AACD;UACF,KAAK;AACH,sCAA0BA,MAAK,OAAO,QAAQ,UAAU;AACxD;UAEF;AACE,kBAAM,MAAM,sBAAsB;;AAItC,cAAM,uBACJA,MAAK,eAAe,UAAaA,MAAK,WAAW,YAAY;AAC/D;;;UAGGA,MAAK,SAAS,WAAW,gBAAgBA,KAAI,MAAM;UAEnDA,MAAK,SAAS,WAAW,yBAAyB;UACnD;AACA;;;AAGJ;IAEF;AACE,YAAM,MAAM,uBAAuB;;AAIvC,SAAO,eAAO,MAAM;AACtB;AAvHgB;AAyHhB,SAAS,wBACP,MACA,QACA,YAAmB;AAEnB,QAAM,mBAAmB,yBAAyB,IAAI;AACtD,SAAO,gBAAgB,IAAI;AAE3B,MAAI,eAAe,MAAM;AACvB,qBAAiB,MAAM,MAAM;;AAEjC;AAXS;AAaT,SAAS,iBACP,MACA,QAAsC;AAEtC,QAAM,OAAO,OAAO,aAAa,IAAI;AACrC,QAAM,YAAY,KAAK,YAAW;AAElC,MAAI,cAAc,MAAM;AACtB,UAAM,mBAAmB,yBAAyB,UAAU,WAAW,CAAC,CAAC;AACzE,WAAO,gBAAgB,IAAI;SACtB;AACL,UAAM,YAAY,KAAK,YAAW;AAClC,QAAI,cAAc,MAAM;AACtB,YAAM,mBAAmB,yBACvB,UAAU,WAAW,CAAC,CAAC;AAEzB,aAAO,gBAAgB,IAAI;;;AAGjC;AAnBS;AAqBT,SAAS,SAAS,SAAc,iBAAyB;AACvD,SAAO,aAAK,QAAQ,OAAO,CAAC,gBAAe;AACzC,QAAI,OAAO,gBAAgB,UAAU;AACnC,aAAO,iBAAS,iBAAiB,WAAW;WACvC;AAEL,YAAM,QAAa;AACnB,aACE,aACE,iBACA,CAAC,eAAe,MAAM,QAAQ,cAAc,cAAc,MAAM,EAAE,MAC9D;;EAGZ,CAAC;AACH;AAfS;AAiBT,SAAS,gBAAgB,KAAQ;AAC/B,QAAM,aAAc,IAAa;AACjC,MAAI,cAAc,WAAW,YAAY,GAAG;AAC1C,WAAO;;AAGT,MAAI,CAAC,IAAI,OAAO;AACd,WAAO;;AAGT,SAAO,gBAAQ,IAAI,KAAK,IACpB,cAAM,IAAI,OAAO,eAAe,IAChC,gBAAgB,IAAI,KAAK;AAC/B;AAbS;AAeT,IAAM,iBAAN,cAA6B,kBAAiB;EA5P9C,OA4P8C;;;EAG5C,YAAoB,iBAAyB;AAC3C,UAAK;AADa,SAAA,kBAAA;AAFpB,SAAA,QAAiB;EAIjB;EAEA,cAAc,MAAa;AAEzB,QAAI,KAAK,UAAU,MAAM;AACvB;;AAKF,YAAQ,KAAK,MAAM;MACjB,KAAK;AACH,aAAK,eAAe,IAAI;AACxB;MACF,KAAK;AACH,aAAK,uBAAuB,IAAI;AAChC;;AAGJ,UAAM,cAAc,IAAI;EAC1B;EAEA,eAAe,MAAe;AAC5B,QAAI,iBAAS,KAAK,iBAAiB,KAAK,KAAK,GAAG;AAC9C,WAAK,QAAQ;;EAEjB;EAEA,SAAS,MAAS;AAChB,QAAI,KAAK,YAAY;AACnB,UAAI,SAAS,MAAM,KAAK,eAAe,MAAM,QAAW;AACtD,aAAK,QAAQ;;WAEV;AACL,UAAI,SAAS,MAAM,KAAK,eAAe,MAAM,QAAW;AACtD,aAAK,QAAQ;;;EAGnB;;AAGI,SAAU,iBACd,WACA,SAAwB;AAExB,MAAI,mBAAmB,QAAQ;AAC7B,UAAM,MAAM,aAAa,OAAO;AAChC,UAAM,iBAAiB,IAAI,eAAe,SAAS;AACnD,mBAAe,MAAM,GAAG;AACxB,WAAO,eAAe;SACjB;AACL,WACE,aAAU,SAAS,CAAC,SAAQ;AAC1B,aAAO,iBAAS,WAAoB,KAAM,WAAW,CAAC,CAAC;IACzD,CAAC,MAAM;;AAGb;AAhBgB;;;AC7PhB,IAAM,UAAU;AACT,IAAM,eAAe;AACrB,IAAM,QAAQ;AAuBd,IAAI,iBACT,OAAa,IAAI,OAAO,MAAM,EAAG,WAAW;AAUxC,SAAU,kBACd,YACA,SAQC;AAED,YAAU,iBAAS,SAAS;IAC1B,WAAW;IACX,OAAO;IACP,UAAU;IACV,kBAAkB;IAClB,0BAA0B,CAAC,MAAM,IAAI;IACrC,QAAQ,wBAAC,KAAa,WAAqB,OAAM,GAAzC;GACT;AAED,QAAM,SAAS,QAAQ;AAEvB,SAAO,mCAAmC,MAAK;AAC7C,oCAA+B;EACjC,CAAC;AAED,MAAI;AACJ,SAAO,mBAAmB,MAAK;AAC7B,wBAAoB,eAAO,YAAY,CAAC,aAAY;AAClD,aAAO,SAAS,OAAO,MAAM,MAAM;IACrC,CAAC;EACH,CAAC;AAED,MAAI,YAAY;AAChB,MAAI;AACJ,SAAO,sBAAsB,MAAK;AAChC,gBAAY;AACZ,6BAAyB,YACvB,mBACA,CAAC,aAAkC;AACjC,YAAM,cAAc,SAAS,OAAO;AAGpC,UAAI,iBAAS,WAAW,GAAG;AACzB,cAAM,eAAe,YAAY;AACjC,YACE,aAAa,WAAW;QAExB,iBAAiB,OACjB,iBAAiB,OACjB,iBAAiB,OACjB,CAAC,YAAY,YACb;AACA,iBAAO;mBAEP,aAAa,WAAW,KACxB,aAAa,CAAC,MAAM;QAEpB,CAAC,iBACC;UACE;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;UACA;WAEF,aAAa,CAAC,CAAC,GAEjB;AAIA,iBAAO,aAAa,CAAC;eAChB;AACL,iBAAO,QAAQ,YACX,cAAc,WAAW,IACzB,gBAAgB,WAAW;;iBAExB,mBAAW,WAAW,GAAG;AAClC,oBAAY;AAEZ,eAAO,EAAE,MAAM,YAAW;iBACjB,OAAO,gBAAgB,UAAU;AAC1C,oBAAY;AAEZ,eAAO;iBACE,OAAO,gBAAgB,UAAU;AAC1C,YAAI,YAAY,WAAW,GAAG;AAC5B,iBAAO;eACF;AACL,gBAAM,sBAAsB,YAAY,QACtC,uBACA,MAAM;AAER,gBAAM,gBAAgB,IAAI,OAAO,mBAAmB;AACpD,iBAAO,QAAQ,YACX,cAAc,aAAa,IAC3B,gBAAgB,aAAa;;aAE9B;AACL,cAAM,MAAM,sBAAsB;;IAEtC,CAAC;EAEL,CAAC;AAED,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,SAAO,gBAAgB,MAAK;AAC1B,uBAAmB,YACjB,mBACA,CAAC,aAAa,SAAS,YAAa;AAGtC,wBAAoB,YAAI,mBAAmB,CAAC,UAAc;AACxD,YAAM,YAAY,MAAM;AAExB,UAAI,cAAc,MAAM,SAAS;AAC/B,eAAO;iBACE,iBAAS,SAAS,GAAG;AAC9B,eAAO;iBACE,oBAAY,SAAS,GAAG;AACjC,eAAO;aACF;AACL,cAAM,MAAM,sBAAsB;;IAEtC,CAAC;AAED,kCAA8B,YAAI,mBAAmB,CAAC,UAAc;AAClE,YAAM,gBAAgB,MAAM;AAE5B,UAAI,eAAe;AACjB,cAAM,kBAAkB,gBAAQ,aAAa,IACzC,YAAI,eAAe,CAAC,SAAc,gBAAQ,mBAAmB,IAAI,CAAC,IAClE,CAAC,gBAAQ,mBAAmB,aAAa,CAAC;AAC9C,eAAO;;IAEX,CAAC;AAED,2BAAuB,YACrB,mBACA,CAAC,UAAe,MAAM,SAAS;AAGjC,0BAAsB,YAAI,mBAAmB,CAAC,UAC5C,YAAI,OAAO,UAAU,CAAC;EAE1B,CAAC;AAED,MAAI;AACJ,SAAO,4BAA4B,MAAK;AACtC,UAAM,0BAA0B,aAC9B,QAAQ,wBAAyB;AAEnC,oCAAgC,YAAI,mBAAmB,CAAC,YAAY,KAAK;AACzE,QAAI,QAAQ,qBAAqB,cAAc;AAC7C,sCAAgC,YAAI,mBAAmB,CAAC,YAAW;AACjE,YAAI,YAAI,SAAS,aAAa,GAAG;AAC/B,iBAAO,CAAC,CAAC,QAAQ;eACZ;AACL,iBACE,sBAAsB,SAAS,uBAAuB,MAAM,SAC5D,iBACE,yBACA,QAAQ,OAA0B;;MAI1C,CAAC;;EAEL,CAAC;AAED,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,SAAO,mBAAmB,MAAK;AAC7B,2BAAuB,YAAI,mBAAmB,eAAe;AAC7D,wBAAoB,YAAI,wBAAwB,cAAc;AAE9D,kBAAc,eACZ,mBACA,CAAC,KAAK,UAAc;AAClB,YAAM,YAAY,MAAM;AACxB,UAAI,iBAAS,SAAS,KAAK,EAAE,cAAc,MAAM,UAAU;AACzD,YAAI,SAAS,IAAI,CAAA;;AAEnB,aAAO;IACT,GACA,CAAA,CAAuC;AAGzC,yBAAqB,YACnB,wBACA,CAAC,GAAG,QAAuB;AACzB,aAAO;QACL,SAAS,uBAAuB,GAAG;QACnC,WAAW,4BAA4B,GAAG;QAC1C,mBAAmB,8BAA8B,GAAG;QACpD,UAAU,qBAAqB,GAAG;QAClC,OAAO,kBAAkB,GAAG;QAC5B,OAAO,kBAAkB,GAAG;QAC5B,MAAM,qBAAqB,GAAG;QAC9B,KAAK,oBAAoB,GAAG;QAC5B,cAAc,iBAAiB,GAAG;QAClC,WAAW,kBAAkB,GAAG;;IAEpC,CAAC;EAEL,CAAC;AAED,MAAI,iBAAiB;AACrB,MAAI,+BACF,CAAA;AAEF,MAAI,CAAC,QAAQ,UAAU;AACrB,WAAO,2BAA2B,MAAK;AACrC,qCAA+B,eAC7B,mBACA,CAAC,QAAQ,aAAa,QAAO;AAC3B,YAAI,OAAO,YAAY,YAAY,UAAU;AAC3C,gBAAM,WAAW,YAAY,QAAQ,WAAW,CAAC;AACjD,gBAAM,eAAe,yBAAyB,QAAQ;AACtD,2BAAiB,QAAQ,cAAc,mBAAmB,GAAG,CAAC;mBACrD,gBAAQ,YAAY,gBAAgB,GAAG;AAChD,cAAI;AACJ,0BAAQ,YAAY,kBAAkB,CAAC,cAAa;AAClD,kBAAM,WACJ,OAAO,cAAc,WACjB,UAAU,WAAW,CAAC,IACtB;AACN,kBAAM,mBAAmB,yBAAyB,QAAQ;AAK1D,gBAAI,qBAAqB,kBAAkB;AACzC,iCAAmB;AACnB,+BACE,QACA,kBACA,mBAAmB,GAAG,CAAC;;UAG7B,CAAC;mBACQ,iBAAS,YAAY,OAAO,GAAG;AACxC,cAAI,YAAY,QAAQ,SAAS;AAC/B,6BAAiB;AACjB,gBAAI,QAAQ,qBAAqB;AAC/B,0BACE,GAAG,2BAA2B,wBACH,YAAY,QAAQ,SAAQ,CAAE;;;gGAG2C;;iBAGnG;AACL,kBAAM,iBAAiB,8BACrB,YAAY,SACZ,QAAQ,mBAAmB;AAK7B,gBAAI,gBAAQ,cAAc,GAAG;AAI3B,+BAAiB;;AAEnB,4BAAQ,gBAAgB,CAAC,SAAQ;AAC/B,+BAAiB,QAAQ,MAAM,mBAAmB,GAAG,CAAC;YACxD,CAAC;;eAEE;AACL,cAAI,QAAQ,qBAAqB;AAC/B,wBACE,GAAG,2BAA2B,gBACX,YAAY,IAAI;;+FAEgE;;AAGvG,2BAAiB;;AAGnB,eAAO;MACT,GACA,CAAA,CAA8C;IAElD,CAAC;;AAGH,SAAO;IACL;IACA;IACA;IACA;IACA;;AAEJ;AA5TgB;AA8TV,SAAU,iBACd,YACA,iBAAyB;AAEzB,MAAI,SAAkC,CAAA;AAEtC,QAAM,gBAAgB,oBAAoB,UAAU;AACpD,WAAS,OAAO,OAAO,cAAc,MAAM;AAE3C,QAAM,gBAAgB,oBAAoB,cAAc,KAAK;AAC7D,QAAM,kBAAkB,cAAc;AACtC,WAAS,OAAO,OAAO,cAAc,MAAM;AAE3C,WAAS,OAAO,OAAO,sBAAsB,eAAe,CAAC;AAE7D,WAAS,OAAO,OAAO,qBAAqB,eAAe,CAAC;AAE5D,WAAS,OAAO,OACd,wBAAwB,iBAAiB,eAAe,CAAC;AAG3D,WAAS,OAAO,OAAO,wBAAwB,eAAe,CAAC;AAE/D,SAAO;AACT;AAxBgB;AA0BhB,SAAS,sBACP,YAAuB;AAEvB,MAAI,SAAkC,CAAA;AACtC,QAAM,qBAAqB,eAAO,YAAY,CAAC,gBAC7C,iBAAS,YAAY,OAAO,CAAC,CAAC;AAGhC,WAAS,OAAO,OAAO,qBAAqB,kBAAkB,CAAC;AAE/D,WAAS,OAAO,OAAO,uBAAuB,kBAAkB,CAAC;AAEjE,WAAS,OAAO,OAAO,qBAAqB,kBAAkB,CAAC;AAE/D,WAAS,OAAO,OAAO,sBAAsB,kBAAkB,CAAC;AAEhE,WAAS,OAAO,OAAO,sBAAsB,kBAAkB,CAAC;AAEhE,SAAO;AACT;AAnBS;AA0BH,SAAU,oBACd,YAAuB;AAEvB,QAAM,+BAA+B,eAAO,YAAY,CAAC,aAAY;AACnE,WAAO,CAAC,YAAI,UAAU,OAAO;EAC/B,CAAC;AAED,QAAM,SAAS,YAAI,8BAA8B,CAAC,aAAY;AAC5D,WAAO;MACL,SACE,mBACA,SAAS,OACT;MACF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,QAAM,QAAQ,mBAAW,YAAY,4BAA4B;AACjE,SAAO,EAAE,QAAQ,MAAK;AACxB;AApBgB;AAsBV,SAAU,oBACd,YAAuB;AAEvB,QAAM,+BAA+B,eAAO,YAAY,CAAC,aAAY;AACnE,UAAM,UAAU,SAAS,OAAO;AAChC,WACE,CAAC,iBAAS,OAAO,KACjB,CAAC,mBAAW,OAAO,KACnB,CAAC,YAAI,SAAS,MAAM,KACpB,CAAC,iBAAS,OAAO;EAErB,CAAC;AAED,QAAM,SAAS,YAAI,8BAA8B,CAAC,aAAY;AAC5D,WAAO;MACL,SACE,mBACA,SAAS,OACT;MAEF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,QAAM,QAAQ,mBAAW,YAAY,4BAA4B;AACjE,SAAO,EAAE,QAAQ,MAAK;AACxB;AA3BgB;AA6BhB,IAAM,eAAe;AAEf,SAAU,qBACd,YAAuB;EAEvB,MAAM,wBAAwB,kBAAiB;IA3fjD,OA2fiD;;;IAA/C,cAAA;;AACE,WAAA,QAAQ;IAKV;IAHE,eAAe,MAAa;AAC1B,WAAK,QAAQ;IACf;;AAGF,QAAM,eAAe,eAAO,YAAY,CAAC,aAAY;AACnD,UAAM,UAAU,SAAS;AAEzB,QAAI;AACF,YAAM,YAAY,aAAa,OAAiB;AAChD,YAAM,mBAAmB,IAAI,gBAAe;AAC5C,uBAAiB,MAAM,SAAS;AAEhC,aAAO,iBAAiB;aACjB,GAAG;AAGV,aAAO,aAAa,KAAM,QAAmB,MAAM;;EAEvD,CAAC;AAED,QAAM,SAAS,YAAI,cAAc,CAAC,aAAY;AAC5C,WAAO;MACL,SACE,qDAEA,SAAS,OACT;MAGF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,SAAO;AACT;AA1CgB;AA4CV,SAAU,sBACd,YAAuB;AAEvB,QAAM,qBAAqB,eAAO,YAAY,CAAC,aAAY;AACzD,UAAM,UAAU,SAAS;AACzB,WAAO,QAAQ,KAAK,EAAE;EACxB,CAAC;AAED,QAAM,SAAS,YAAI,oBAAoB,CAAC,aAAY;AAClD,WAAO;MACL,SACE,mBACA,SAAS,OACT;MACF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,SAAO;AACT;AApBgB;AAsBhB,IAAM,iBAAiB;AAEjB,SAAU,uBACd,YAAuB;EAEvB,MAAM,0BAA0B,kBAAiB;IA/jBnD,OA+jBmD;;;IAAjD,cAAA;;AACE,WAAA,QAAQ;IAKV;IAHE,iBAAiB,MAAa;AAC5B,WAAK,QAAQ;IACf;;AAGF,QAAM,eAAe,eAAO,YAAY,CAAC,aAAY;AACnD,UAAM,UAAU,SAAS;AACzB,QAAI;AACF,YAAM,YAAY,aAAa,OAAO;AACtC,YAAM,qBAAqB,IAAI,kBAAiB;AAChD,yBAAmB,MAAM,SAAS;AAElC,aAAO,mBAAmB;aACnB,GAAG;AAGV,aAAO,eAAe,KAAK,QAAQ,MAAM;;EAE7C,CAAC;AAED,QAAM,SAAS,YAAI,cAAc,CAAC,aAAY;AAC5C,WAAO;MACL,SACE,qDAEA,SAAS,OACT;MAGF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,SAAO;AACT;AAzCgB;AA2CV,SAAU,qBACd,YAAuB;AAEvB,QAAM,eAAe,eAAO,YAAY,CAAC,aAAY;AACnD,UAAM,UAAU,SAAS,OAAO;AAChC,WAAO,mBAAmB,WAAW,QAAQ,aAAa,QAAQ;EACpE,CAAC;AAED,QAAM,SAAS,YAAI,cAAc,CAAC,aAAY;AAC5C,WAAO;MACL,SACE,mBACA,SAAS,OACT;MACF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,SAAO;AACT;AApBgB;AAuBV,SAAU,sBACd,YAAuB;AAEvB,QAAM,QAAqB,CAAA;AAC3B,MAAI,oBAAoB,YAAI,YAAY,CAAC,cAAkB;AACzD,WAAO,eACL,YACA,CAAC,QAAQ,cAAa;AACpB,UACE,UAAU,QAAQ,WAAY,UAAU,QAAmB,UAC3D,CAAC,iBAAS,OAAO,SAAS,KAC1B,UAAU,YAAY,MAAM,IAC5B;AAGA,cAAM,KAAK,SAAS;AACpB,eAAO,KAAK,SAAS;AACrB,eAAO;;AAET,aAAO;IACT,GACA,CAAA,CAAiB;EAErB,CAAC;AAED,sBAAoB,gBAAQ,iBAAiB;AAE7C,QAAM,oBAAoB,eAAO,mBAAmB,CAAC,qBAAoB;AACvE,WAAO,iBAAiB,SAAS;EACnC,CAAC;AAED,QAAM,SAAS,YAAI,mBAAmB,CAAC,mBAAuB;AAC5D,UAAM,iBAAiB,YAAI,gBAAgB,CAAC,aAAiB;AAC3D,aAAO,SAAS;IAClB,CAAC;AAED,UAAM,gBAAsB,aAAM,cAAc,EAAG;AACnD,WAAO;MACL,SACE,6BAA6B,aAAa,wDACY,eAAe,KACnE,IAAI,CACL;MACH,MAAM,yBAAyB;MAC/B,YAAY;;EAEhB,CAAC;AAED,SAAO;AACT;AAjDgB;AAmDV,SAAU,qBACd,YAAuB;AAEvB,QAAM,eAAe,eAAO,YAAY,CAAC,UAAc;AACrD,QAAI,CAAC,YAAI,OAAO,OAAO,GAAG;AACxB,aAAO;;AAET,UAAM,QAAQ,MAAM;AAEpB,WAAO,UAAU,MAAM,WAAW,UAAU,MAAM,MAAM,CAAC,iBAAS,KAAK;EACzE,CAAC;AAED,QAAM,SAAS,YAAI,cAAc,CAAC,aAAY;AAC5C,WAAO;MACL,SACE,mBACA,SAAS,OACT;MACF,MAAM,yBAAyB;MAC/B,YAAY,CAAC,QAAQ;;EAEzB,CAAC;AAED,SAAO;AACT;AAxBgB;AA0BV,SAAU,wBACd,YACA,YAAoB;AAEpB,QAAM,eAAe,eAAO,YAAY,CAAC,UAAc;AACrD,WACE,MAAM,cAAc,UAAa,CAAC,iBAAS,YAAY,MAAM,SAAS;EAE1E,CAAC;AAED,QAAM,SAAS,YAAI,cAAc,CAAC,YAAW;AAC3C,UAAM,MACJ,iBAAiB,QAAQ,IAAI,8DAA8D,QAAQ,SAAS;AAE9G,WAAO;MACL,SAAS;MACT,MAAM,yBAAyB;MAC/B,YAAY,CAAC,OAAO;;EAExB,CAAC;AAED,SAAO;AACT;AAtBgB;AAwBV,SAAU,wBACd,YAAuB;AAEvB,QAAM,SAAkC,CAAA;AAExC,QAAM,cAAc,eAClB,YACA,CAAC,QAAQ,SAAS,QAAO;AACvB,UAAM,UAAU,QAAQ;AAExB,QAAI,YAAY,MAAM,IAAI;AACxB,aAAO;;AAKT,QAAI,iBAAS,OAAO,GAAG;AACrB,aAAO,KAAK,EAAE,KAAK,SAAS,KAAK,WAAW,QAAO,CAAE;eAC5C,iBAAS,OAAO,KAAK,WAAW,OAAO,GAAG;AACnD,aAAO,KAAK,EAAE,KAAK,QAAQ,QAAQ,KAAK,WAAW,QAAO,CAAE;;AAE9D,WAAO;EACT,GACA,CAAA,CAA0D;AAG5D,kBAAQ,YAAY,CAAC,SAAS,YAAW;AACvC,oBAAQ,aAAa,CAAC,EAAE,KAAK,KAAK,UAAS,MAAM;AAC/C,UAAI,UAAU,OAAO,cAAc,KAAK,QAAQ,OAAO,GAAG;AACxD,cAAM,MACJ,YAAY,UAAU,IAAI;4CACmB,QAAQ,IAAI;;AAG3D,eAAO,KAAK;UACV,SAAS;UACT,MAAM,yBAAyB;UAC/B,YAAY,CAAC,SAAS,SAAS;SAChC;;IAEL,CAAC;EACH,CAAC;AAED,SAAO;AACT;AA5CgB;AA8ChB,SAAS,cAAc,KAAa,SAAY;AAE9C,MAAI,iBAAS,OAAO,GAAG;AACrB,UAAM,cAAc,QAAQ,KAAK,GAAG;AACpC,WAAO,gBAAgB,QAAQ,YAAY,UAAU;aAC5C,mBAAW,OAAO,GAAG;AAE9B,WAAO,QAAQ,KAAK,GAAG,CAAA,GAAI,CAAA,CAAE;aACpB,YAAI,SAAS,MAAM,GAAG;AAE/B,WAAO,QAAQ,KAAK,KAAK,GAAG,CAAA,GAAI,CAAA,CAAE;aACzB,OAAO,YAAY,UAAU;AACtC,WAAO,YAAY;SACd;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AAhBS;AAkBT,SAAS,WAAW,QAAc;AAEhC,QAAM,YAAY;IAChB;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;;AAEF,SACE,aAAK,WAAW,CAAC,SAAS,OAAO,OAAO,QAAQ,IAAI,MAAM,EAAE,MAAM;AAEtE;AApBS;AAsBH,SAAU,gBAAgB,SAAe;AAC7C,QAAM,QAAQ,QAAQ,aAAa,MAAM;AAGzC,SAAO,IAAI,OAAO,OAAO,QAAQ,MAAM,KAAK,KAAK;AACnD;AALgB;AAOV,SAAU,cAAc,SAAe;AAC3C,QAAM,QAAQ,QAAQ,aAAa,OAAO;AAG1C,SAAO,IAAI,OAAO,GAAG,QAAQ,MAAM,IAAI,KAAK;AAC9C;AALgB;AAOV,SAAU,qBACd,iBACA,YACA,0BAA6C;AAE7C,QAAM,SAAkC,CAAA;AAGxC,MAAI,CAAC,YAAI,iBAAiB,YAAY,GAAG;AACvC,WAAO,KAAK;MACV,SACE,wDACA,eACA;MACF,MAAM,yBAAyB;KAChC;;AAEH,MAAI,CAAC,YAAI,iBAAiB,KAAK,GAAG;AAChC,WAAO,KAAK;MACV,SACE,wDACA,QACA;MACF,MAAM,yBAAyB;KAChC;;AAGH,MACE,YAAI,iBAAiB,KAAK,KAC1B,YAAI,iBAAiB,YAAY,KACjC,CAAC,YAAI,gBAAgB,OAAO,gBAAgB,WAAW,GACvD;AACA,WAAO,KAAK;MACV,SACE,kDAAkD,YAAY,MAAM,gBAAgB,WAAW;;MAEjG,MAAM,yBAAyB;KAChC;;AAGH,MAAI,YAAI,iBAAiB,KAAK,GAAG;AAC/B,oBAAQ,gBAAgB,OAAO,CAAC,eAAe,iBAAgB;AAC7D,sBAAQ,eAAe,CAAC,aAAa,YAAW;AAC9C,YAAI,oBAAY,WAAW,GAAG;AAC5B,iBAAO,KAAK;YACV,SACE,sEACI,YAAY,gBAAgB,OAAO;;YACzC,MAAM,yBAAyB;WAChC;mBACQ,YAAI,aAAa,YAAY,GAAG;AACzC,gBAAM,YAAY,gBAAQ,YAAY,UAAU,IAC5C,YAAY,aACZ,CAAC,YAAY,UAAU;AAC3B,0BAAQ,WAAW,CAAC,kBAAiB;AACnC,gBACE,CAAC,oBAAY,aAAa,KAC1B,CAAC,iBAAS,eAAe,aAAa,GACtC;AACA,qBAAO,KAAK;gBACV,SAAS,8DAA8D,cAAc,IAAI,eAAe,YAAY,IAAI,sBAAsB,YAAY;;gBAC1J,MAAM,yBAAyB;eAChC;;UAEL,CAAC;;MAEL,CAAC;IACH,CAAC;;AAGH,SAAO;AACT;AAvEgB;AAyEV,SAAU,4BACd,iBACA,YACA,0BAA6C;AAE7C,QAAM,WAAW,CAAA;AACjB,MAAI,kBAAkB;AACtB,QAAM,gBAAgB,gBAAQ,gBAAQ,eAAO,gBAAgB,KAAK,CAAC,CAAC;AAEpE,QAAM,qBAAqB,eACzB,eACA,CAAC,aAAa,SAAS,OAAO,MAAM,MAAM,EAAE;AAE9C,QAAM,sBAAsB,aAAa,wBAAwB;AACjE,MAAI,YAAY;AACd,oBAAQ,oBAAoB,CAAC,YAAW;AACtC,YAAM,YAAY,sBAAsB,SAAS,mBAAmB;AACpE,UAAI,cAAc,OAAO;AACvB,cAAM,UAAU,2BAA2B,SAAS,SAAS;AAC7D,cAAM,oBAAoB;UACxB;UACA,MAAM,UAAU;UAChB,WAAW;;AAEb,iBAAS,KAAK,iBAAiB;aAC1B;AAEL,YAAI,YAAI,SAAS,aAAa,GAAG;AAC/B,cAAI,QAAQ,gBAAgB,MAAM;AAChC,8BAAkB;;eAEf;AACL,cACE,iBAAiB,qBAAqB,QAAQ,OAAiB,GAC/D;AACA,8BAAkB;;;;IAI1B,CAAC;;AAGH,MAAI,cAAc,CAAC,iBAAiB;AAClC,aAAS,KAAK;MACZ,SACE;MAKF,MAAM,yBAAyB;KAChC;;AAEH,SAAO;AACT;AAtDgB;AAwDV,SAAU,iBAAiB,aAEhC;AACC,QAAM,eAAoB,CAAA;AAC1B,QAAM,YAAY,aAAK,WAAW;AAElC,kBAAQ,WAAW,CAAC,YAAW;AAC7B,UAAM,iBAAiB,YAAY,OAAO;AAG1C,QAAI,gBAAQ,cAAc,GAAG;AAC3B,mBAAa,OAAO,IAAI,CAAA;WACnB;AACL,YAAM,MAAM,sBAAsB;;EAEtC,CAAC;AAED,SAAO;AACT;AAlBgB;AAqBV,SAAU,gBAAgB,WAAoB;AAClD,QAAM,UAAU,UAAU;AAE1B,MAAI,iBAAS,OAAO,GAAG;AACrB,WAAO;aACE,mBAAW,OAAO,GAAG;AAE9B,WAAO;aACE,YAAI,SAAS,MAAM,GAAG;AAE/B,WAAO;aACE,iBAAS,OAAO,GAAG;AAC5B,WAAO;SACF;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AAhBgB;AAkBV,SAAU,eAAe,SAAY;AACzC,MAAI,iBAAS,OAAO,KAAK,QAAQ,WAAW,GAAG;AAC7C,WAAO,QAAQ,WAAW,CAAC;SACtB;AACL,WAAO;;AAEX;AANgB;AAWT,IAAM,gCAAwD;;EAEnE,MAAM,gCAAU,MAAI;AAClB,UAAM,MAAM,KAAK;AACjB,aAAS,IAAI,KAAK,WAAW,IAAI,KAAK,KAAK;AACzC,YAAM,IAAI,KAAK,WAAW,CAAC;AAC3B,UAAI,MAAM,IAAI;AACZ,aAAK,YAAY,IAAI;AACrB,eAAO;iBACE,MAAM,IAAI;AACnB,YAAI,KAAK,WAAW,IAAI,CAAC,MAAM,IAAI;AACjC,eAAK,YAAY,IAAI;eAChB;AACL,eAAK,YAAY,IAAI;;AAEvB,eAAO;;;AAGX,WAAO;EACT,GAjBM;EAmBN,WAAW;;AAGb,SAAS,sBACP,SACA,yBAAiC;AASjC,MAAI,YAAI,SAAS,aAAa,GAAG;AAG/B,WAAO;SACF;AAEL,QAAI,iBAAS,QAAQ,OAAO,GAAG;AAC7B,UAAI;AAEF,yBAAiB,yBAAyB,QAAQ,OAAiB;eAC5D,GAAG;AAEV,eAAO;UACL,OAAO,yBAAyB;UAChC,QAAS,EAAY;;;AAGzB,aAAO;eACE,iBAAS,QAAQ,OAAO,GAAG;AAEpC,aAAO;eACE,gBAAgB,OAAO,GAAG;AAEnC,aAAO,EAAE,OAAO,yBAAyB,kBAAiB;WACrD;AACL,YAAM,MAAM,sBAAsB;;;AAGxC;AAvCS;AAyCH,SAAU,2BACd,SACA,SAKC;AAGD,MAAI,QAAQ,UAAU,yBAAyB,qBAAqB;AAClE,WACE;0BAC4B,QAAQ,IAAI;gBACtB,QAAQ,MAAM;;aAGzB,QAAQ,UAAU,yBAAyB,mBAAmB;AACvE,WACE;0BAC4B,QAAQ,IAAI;;SAGrC;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AA1BgB;AA4BhB,SAAS,aAAa,cAAiC;AACrD,QAAM,YAAY,YAAI,cAAc,CAAC,gBAAe;AAClD,QAAI,iBAAS,WAAW,GAAG;AACzB,aAAO,YAAY,WAAW,CAAC;WAC1B;AACL,aAAO;;EAEX,CAAC;AAED,SAAO;AACT;AAVS;AAYT,SAAS,iBACP,KACA,KACA,OAAQ;AAER,MAAI,IAAI,GAAG,MAAM,QAAW;AAC1B,QAAI,GAAG,IAAI,CAAC,KAAK;SACZ;AACL,QAAI,GAAG,EAAE,KAAK,KAAK;;AAEvB;AAVS;AAYF,IAAM,qBAAqB;AAiBlC,IAAI,4BAAsC,CAAA;AACpC,SAAU,yBAAyB,UAAgB;AACvD,SAAO,WAAW,qBACd,WACA,0BAA0B,QAAQ;AACxC;AAJgB;AAchB,SAAS,kCAA+B;AACtC,MAAI,gBAAQ,yBAAyB,GAAG;AACtC,gCAA4B,IAAI,MAAM,KAAK;AAC3C,aAAS,IAAI,GAAG,IAAI,OAAO,KAAK;AAC9B,gCAA0B,CAAC,IAAI,IAAI,MAAM,MAAM,CAAC,EAAE,IAAI,OAAO;;;AAGnE;AAPS;;;ACjoCH,SAAU,uBACd,aACA,gBAAyB;AAEzB,QAAM,eAAe,YAAY;AACjC,MAAI,iBAAiB,eAAe,cAAc;AAChD,WAAO;SACF;AACL,WACE,eAAe,aAAa,QAC5B,eAAe,mBAAoB,YAAY,MAAM;;AAG3D;AAbgB;AAiBV,SAAU,mCACd,OACA,SAAkB;AAElB,SAAO,MAAM,iBAAiB,QAAQ;AACxC;AALgB;AAOT,IAAI,oBAAoB;AACxB,IAAM,kBAAqD,CAAA;AAE5D,SAAU,kBAAkB,YAAuB;AAEvD,QAAM,uBAAuB,iBAAiB,UAAU;AAGxD,0BAAwB,oBAAoB;AAG5C,0BAAwB,oBAAoB;AAC5C,6BAA2B,oBAAoB;AAE/C,kBAAQ,sBAAsB,CAAC,YAAW;AACxC,YAAQ,WAAW,QAAQ,gBAAiB,SAAS;EACvD,CAAC;AACH;AAdgB;AAgBV,SAAU,iBAAiB,YAAuB;AACtD,MAAI,SAAS,cAAM,UAAU;AAE7B,MAAI,aAAa;AACjB,MAAI,YAAY;AAChB,SAAO,WAAW;AAChB,iBAAa,gBACX,gBAAQ,YAAI,YAAY,CAAC,gBAAgB,YAAY,UAAU,CAAC,CAAC;AAGnE,UAAM,gBAAgB,mBAAW,YAAY,MAAM;AAEnD,aAAS,OAAO,OAAO,aAAa;AAEpC,QAAI,gBAAQ,aAAa,GAAG;AAC1B,kBAAY;WACP;AACL,mBAAa;;;AAGjB,SAAO;AACT;AArBgB;AAuBV,SAAU,wBAAwB,YAAuB;AAC7D,kBAAQ,YAAY,CAAC,gBAAe;AAClC,QAAI,CAAC,oBAAoB,WAAW,GAAG;AACrC,sBAAgB,iBAAiB,IAAI;AAC/B,kBAAa,eAAe;;AAIpC,QACE,sBAAsB,WAAW,KACjC,CAAC,gBAAQ,YAAY,UAAU,GAG/B;AACA,kBAAY,aAAa,CAAC,YAAY,UAAkC;;AAG1E,QAAI,CAAC,sBAAsB,WAAW,GAAG;AACvC,kBAAY,aAAa,CAAA;;AAG3B,QAAI,CAAC,gCAAgC,WAAW,GAAG;AACjD,kBAAY,kBAAkB,CAAA;;AAGhC,QAAI,CAAC,mCAAmC,WAAW,GAAG;AACpD,kBAAY,qBAAqB,CAAA;;EAErC,CAAC;AACH;AA7BgB;AA+BV,SAAU,2BAA2B,YAAuB;AAChE,kBAAQ,YAAY,CAAC,gBAAe;AAElC,gBAAY,kBAAkB,CAAA;AAC9B,oBAAQ,YAAY,oBAAqB,CAAC,KAAK,QAAO;AACpD,kBAAY,gBAAiB,KAC3B,gBAAgB,GAAwB,EAAE,YAAa;IAE3D,CAAC;EACH,CAAC;AACH;AAVgB;AAYV,SAAU,wBAAwB,YAAuB;AAC7D,kBAAQ,YAAY,CAAC,gBAAe;AAClC,kCAA8B,CAAA,GAAI,WAAW;EAC/C,CAAC;AACH;AAJgB;AAMV,SAAU,8BACd,MACA,UAAmB;AAEnB,kBAAQ,MAAM,CAAC,aAAY;AACzB,aAAS,mBAAoB,SAAS,YAAa,IAAI;EACzD,CAAC;AAED,kBAAQ,SAAS,YAAY,CAAC,iBAAgB;AAC5C,UAAM,UAAU,KAAK,OAAO,QAAQ;AAEpC,QAAI,CAAC,iBAAS,SAAS,YAAY,GAAG;AACpC,oCAA8B,SAAS,YAAY;;EAEvD,CAAC;AACH;AAfgB;AAiBV,SAAU,oBAAoB,SAAkB;AACpD,SAAO,YAAI,SAAS,cAAc;AACpC;AAFgB;AAIV,SAAU,sBAAsB,SAAkB;AACtD,SAAO,YAAI,SAAS,YAAY;AAClC;AAFgB;AAIV,SAAU,gCAAgC,SAAkB;AAChE,SAAO,YAAI,SAAS,iBAAiB;AACvC;AAFgB;AAIV,SAAU,mCACd,SAAkB;AAElB,SAAO,YAAI,SAAS,oBAAoB;AAC1C;AAJgB;AAMV,SAAU,YAAY,SAAkB;AAC5C,SAAO,YAAI,SAAS,cAAc;AACpC;AAFgB;;;AClKT,IAAM,4BAAwD;EACnE,iCAAiC,OAAa;AAC5C,WAAO,uDAAuD,MAAM,KAAK;EAC3E;EAEA,iCACE,UACA,aACA,QACA,MACA,QAAe;AAEf,WACE,2BAA2B,SAAS,OAClC,WAAW,CACZ,iBAAiB,WAAW,aAAkB,MAAM;EAEzD;;;;AC8BF,IAAY;CAAZ,SAAYC,2BAAwB;AAClC,EAAAA,0BAAAA,0BAAA,iBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,iBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,kBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,yBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,0BAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,0BAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,0BAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,uCAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,yCAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,oDAAA,IAAA,CAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,2CAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,kBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,qBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,sBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,qBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,qBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,mBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,0BAAAA,0BAAA,iDAAA,IAAA,EAAA,IAAA;AACF,GAnBY,6BAAA,2BAAwB,CAAA,EAAA;AAyBpC,IAAM,uBAA+C;EACnD,+BAA+B;EAC/B,kBAAkB;EAClB,wBAAwB;EACxB,0BAA0B,CAAC,MAAM,IAAI;EACrC,qBAAqB;EACrB,UAAU;EACV,sBAAsB;EACtB,eAAe;EACf,iBAAiB;EACjB,iBAAiB;;AAGnB,OAAO,OAAO,oBAAoB;AAE5B,IAAO,QAAP,MAAY;EAzFlB,OAyFkB;;;EA4BhB,YACY,iBACV,SAAuB,sBAAoB;AADjC,SAAA,kBAAA;AAvBL,SAAA,wBAAiD,CAAA;AACjD,SAAA,yBAAkD,CAAA;AAE/C,SAAA,qBAAuD,CAAA;AACvD,SAAA,+BAEN,CAAA;AAEM,SAAA,QAAkB,CAAA;AAElB,SAAA,cAA+C,CAAA;AAGjD,SAAA,kBAA2B;AAC3B,SAAA,gBAAyB;AACzB,SAAA,YAAqB;AACrB,SAAA,qBAA8C,CAAA;AAu0BtD,SAAA,aAAa,CAAI,WAAmB,cAAyB;AAG3D,UAAI,KAAK,kBAAkB,MAAM;AAC/B,aAAK;AACL,cAAM,SAAS,IAAI,MAAM,KAAK,kBAAkB,CAAC,EAAE,KAAK,GAAI;AAC5D,YAAI,KAAK,kBAAkB,KAAK,mBAAmB;AACjD,kBAAQ,IAAI,GAAG,MAAM,QAAQ,SAAS,GAAG;;AAE3C,cAAM,EAAE,MAAM,MAAK,IAAK,MAAM,SAAS;AAEvC,cAAM,cAAc,OAAO,KAAK,QAAQ,OAAO,QAAQ;AACvD,YAAI,KAAK,kBAAkB,KAAK,mBAAmB;AACjD,sBAAY,GAAG,MAAM,QAAQ,SAAS,WAAW,IAAI,IAAI;;AAE3D,aAAK;AACL,eAAO;aACF;AACL,eAAO,UAAS;;IAEpB;AAj1BE,QAAI,OAAO,WAAW,WAAW;AAC/B,YAAM,MACJ,4HACiD;;AAKrD,SAAK,SAAS,eAAO,CAAA,GAAI,sBAAsB,MAAM;AAErD,UAAM,eAAe,KAAK,OAAO;AACjC,QAAI,iBAAiB,MAAM;AACzB,WAAK,oBAAoB;AACzB,WAAK,gBAAgB;eACZ,OAAO,iBAAiB,UAAU;AAC3C,WAAK,oBAAoB;AACzB,WAAK,gBAAgB;;AAEvB,SAAK,kBAAkB;AAEvB,SAAK,WAAW,qBAAqB,MAAK;AACxC,UAAI;AACJ,UAAI,oBAAoB;AACxB,WAAK,WAAW,yBAAyB,MAAK;AAC5C,YACE,KAAK,OAAO,2BACZ,qBAAqB,wBACrB;AAEA,eAAK,OAAO,yBAAyB;eAChC;AACL,cACE,KAAK,OAAO,6BACZ,qBAAqB,0BACrB;AACA,kBAAM,MACJ,iLAC2G;;;AAKjH,YAAI,OAAO,YAAY,OAAO,qBAAqB;AACjD,gBAAM,MACJ,oEAAoE;;AAIxE,aAAK,kBAAkB,kBAAkB,KACvC,KAAK,OAAO,gBAAgB;AAE9B,aAAK,gBAAgB,QAAQ,KAAK,KAAK,OAAO,gBAAgB;AAG9D,YAAI,gBAAQ,eAAe,GAAG;AAC5B,6BAAmB;YACjB,OAAO,EAAE,aAAa,cAAM,eAAe,EAAC;YAC5C,aAAa;;eAEV;AAEL,8BAAoB;AACpB,6BAAmB,cAAiC,eAAe;;MAEvE,CAAC;AAED,UAAI,KAAK,OAAO,oBAAoB,OAAO;AACzC,aAAK,WAAW,wBAAwB,MAAK;AAC3C,eAAK,wBAAwB,KAAK,sBAAsB,OACtD,qBACE,kBACA,KAAK,iBACL,KAAK,OAAO,wBAAwB,CACrC;QAEL,CAAC;AAED,aAAK,WAAW,+BAA+B,MAAK;AAClD,eAAK,yBAAyB,KAAK,uBAAuB,OACxD,4BACE,kBACA,KAAK,iBACL,KAAK,OAAO,wBAAwB,CACrC;QAEL,CAAC;;AAIH,uBAAiB,QAAQ,iBAAiB,QACtC,iBAAiB,QACjB,CAAA;AAIJ,sBAAQ,iBAAiB,OAAO,CAAC,eAAe,iBAAgB;AAC9D,yBAAiB,MAAM,YAAY,IAAI,eACrC,eACA,CAAC,gBAAgB,oBAAY,WAAW,CAAC;MAE7C,CAAC;AAED,YAAM,eAAe,aAAK,iBAAiB,KAAK;AAEhD,sBACE,iBAAiB,OACjB,CAAC,YAAyB,gBAAe;AACvC,aAAK,WAAW,UAAU,WAAW,gBAAgB,MAAK;AACxD,eAAK,MAAM,KAAK,WAAW;AAE3B,cAAI,KAAK,OAAO,oBAAoB,OAAO;AACzC,iBAAK,WAAW,oBAAoB,MAAK;AACvC,mBAAK,wBAAwB,KAAK,sBAAsB,OACtD,iBAAiB,YAAY,YAAY,CAAC;YAE9C,CAAC;;AAMH,cAAI,gBAAQ,KAAK,qBAAqB,GAAG;AACvC,8BAAkB,UAAU;AAE5B,gBAAI;AACJ,iBAAK,WAAW,qBAAqB,MAAK;AACxC,kCAAoB,kBAAkB,YAAY;gBAChD,0BACE,KAAK,OAAO;gBACd,kBAAkB,OAAO;gBACzB,qBAAqB,OAAO;gBAC5B,UAAU,OAAO;gBACjB,QAAQ,KAAK;eACd;YACH,CAAC;AAED,iBAAK,mBAAmB,WAAW,IACjC,kBAAkB;AAEpB,iBAAK,6BAA6B,WAAW,IAC3C,kBAAkB;AAEpB,iBAAK,cAAc,eACjB,CAAA,GACA,KAAK,aACL,kBAAkB,WAAW;AAG/B,iBAAK,YAAY,kBAAkB,aAAa,KAAK;AAErD,iBAAK,mBAAmB,WAAW,IACjC,kBAAkB;;QAExB,CAAC;MACH,CAAC;AAGH,WAAK,cAAc,iBAAiB;AAEpC,UACE,CAAC,gBAAQ,KAAK,qBAAqB,KACnC,CAAC,KAAK,OAAO,+BACb;AACA,cAAM,iBAAiB,YAAI,KAAK,uBAAuB,CAAC,UAAS;AAC/D,iBAAO,MAAM;QACf,CAAC;AACD,cAAM,uBAAuB,eAAe,KAC1C,2BAA2B;AAE7B,cAAM,IAAI,MACR,8CAA8C,oBAAoB;;AAKtE,sBAAQ,KAAK,wBAAwB,CAAC,sBAAqB;AACzD,sBAAc,kBAAkB,OAAO;MACzC,CAAC;AAED,WAAK,WAAW,wCAAwC,MAAK;AAI3D,YAAI,gBAAgB;AAClB,eAAK,YAAiB;AACtB,eAAK,QAAQ,KAAK;eACb;AACL,eAAK,kBAAkB;AACvB,eAAK,QAAQ,KAAK;;AAGpB,YAAI,mBAAmB;AACrB,eAAK,cAAc;;AAGrB,YAAI,KAAK,oBAAoB,OAAO;AAClC,eAAK,mBAAmB;;AAG1B,YAAI,KAAK,kBAAkB,OAAO;AAChC,eAAK,mCAAmC;;AAG1C,YAAI,QAAQ,KAAK,KAAK,OAAO,gBAAgB,GAAG;AAC9C,eAAK,sBAAsB,KAAK;mBACvB,aAAa,KAAK,KAAK,OAAO,gBAAgB,GAAG;AAC1D,eAAK,sBAAsB,KAAK;mBACvB,cAAc,KAAK,KAAK,OAAO,gBAAgB,GAAG;AAC3D,eAAK,sBAAsB,KAAK;eAC3B;AACL,gBAAM,MACJ,8CAA8C,KAAK,OAAO,gBAAgB,GAAG;;AAIjF,YAAI,KAAK,WAAW;AAClB,eAAK,WAAW,KAAK;AACrB,eAAK,gBAAgB,KAAK;eACrB;AACL,eAAK,WAAW,KAAK;AACrB,eAAK,gBAAgB,KAAK;;MAE9B,CAAC;AAED,WAAK,WAAW,gCAAgC,MAAK;AACnD,cAAM,mBAAmB,eACvB,KAAK,oBACL,CAAC,mBAAmB,gBAAgB,aAAY;AAC9C,cAAI,mBAAmB,OAAO;AAC5B,8BAAkB,KAAK,QAAQ;;AAEjC,iBAAO;QACT,GACA,CAAA,CAAc;AAGhB,YAAI,OAAO,uBAAuB,CAAC,gBAAQ,gBAAgB,GAAG;AAC5D,gBAAM,MACJ,kBAAkB,iBAAiB,KACjC,IAAI,CACL;;yEAE4E;;MAGnF,CAAC;AAED,WAAK,WAAW,0BAA0B,MAAK;AAC7C,+BAAsB;MACxB,CAAC;AAED,WAAK,WAAW,oBAAoB,MAAK;AACvC,yBAAiB,IAAI;MACvB,CAAC;IACH,CAAC;EACH;EAEO,SACL,MACA,cAAsB,KAAK,aAAW;AAEtC,QAAI,CAAC,gBAAQ,KAAK,qBAAqB,GAAG;AACxC,YAAM,iBAAiB,YAAI,KAAK,uBAAuB,CAAC,UAAS;AAC/D,eAAO,MAAM;MACf,CAAC;AACD,YAAM,uBAAuB,eAAe,KAC1C,2BAA2B;AAE7B,YAAM,IAAI,MACR,yEACE,oBAAoB;;AAI1B,WAAO,KAAK,iBAAiB,MAAM,WAAW;EAChD;;;;;EAMQ,iBAAiB,MAAc,aAAmB;AACxD,QAAI,GACF,GACA,GACA,eACA,WACA,cACA,SACA,YACA,aACA,OACA,SACA,UACA,WACA,aACA,KACA;AACF,UAAM,UAAU;AAChB,UAAM,YAAY,QAAQ;AAC1B,QAAI,SAAS;AACb,QAAI,qBAAqB;AAKzB,UAAM,wBAAwB,KAAK,YAC/B,IACA,KAAK,MAAM,KAAK,SAAS,EAAE;AAC/B,UAAM,gBAAgB,IAAI,MAAM,qBAAqB;AACrD,UAAM,SAAyB,CAAA;AAC/B,QAAI,OAAO,KAAK,kBAAkB,IAAI;AACtC,QAAI,SAAS,KAAK,kBAAkB,IAAI;AACxC,UAAM,SAAc,iBAAiB,KAAK,WAAW;AACrD,UAAM,aAAa,KAAK;AACxB,UAAM,wBAAwB,KAAK,OAAO;AAE1C,QAAI,yBAAyB;AAC7B,QAAI,qBAAuC,CAAA;AAC3C,QAAI,mCAEA,CAAA;AAEJ,UAAM,YAAsB,CAAA;AAE5B,UAAM,aAA+B,CAAA;AACrC,WAAO,OAAO,UAAU;AACxB,QAAI;AAEJ,aAAS,0BAAuB;AAC9B,aAAO;IACT;AAFS;AAIT,aAAS,6BAA6B,UAAgB;AACpD,YAAM,mBAAmB,yBAAyB,QAAQ;AAC1D,YAAM,mBACJ,iCAAiC,gBAAgB;AACnD,UAAI,qBAAqB,QAAW;AAClC,eAAO;aACF;AACL,eAAO;;IAEX;AATS;AAWT,UAAM,WAAW,wBAAC,aAAoB;AAEpC,UACE,UAAU,WAAW;;MAGrB,SAAS,UAAU,cAAc,QACjC;AAGA,cAAMC,OACJ,KAAK,OAAO,qBAAqB,iCAC/B,QAAQ;AAGZ,eAAO,KAAK;UACV,QAAQ,SAAS;UACjB,MAAM,SAAS;UACf,QAAQ,SAAS;UACjB,QAAQ,SAAS,MAAM;UACvB,SAASA;SACV;aACI;AACL,kBAAU,IAAG;AACb,cAAM,UAAU,aAAK,SAAS;AAC9B,6BAAqB,KAAK,mBAAmB,OAAO;AACpD,2CACE,KAAK,6BAA6B,OAAO;AAC3C,iCAAyB,mBAAmB;AAC5C,cAAM,qBACJ,KAAK,mBAAmB,OAAO,KAAK,KAAK,OAAO,aAAa;AAE/D,YAAI,oCAAoC,oBAAoB;AAC1D,gCAAsB;eACjB;AACL,gCAAsB;;;IAG5B,GAtCiB;AAwCjB,aAAS,UAAuB,SAAe;AAC7C,gBAAU,KAAK,OAAO;AACtB,yCACE,KAAK,6BAA6B,OAAO;AAE3C,2BAAqB,KAAK,mBAAmB,OAAO;AACpD,+BAAyB,mBAAmB;AAE5C,+BAAyB,mBAAmB;AAC5C,YAAM,qBACJ,KAAK,mBAAmB,OAAO,KAAK,KAAK,OAAO,aAAa;AAE/D,UAAI,oCAAoC,oBAAoB;AAC1D,8BAAsB;aACjB;AACL,8BAAsB;;IAE1B;AAjBS;AAqBT,cAAU,KAAK,MAAM,WAAW;AAEhC,QAAI;AAEJ,UAAM,kBAAkB,KAAK,OAAO;AAEpC,WAAO,SAAS,WAAW;AACzB,qBAAe;AAEf,YAAM,eAAe,QAAQ,WAAW,MAAM;AAC9C,YAAM,2BAA2B,oBAAoB,YAAY;AACjE,YAAM,uBAAuB,yBAAyB;AAEtD,WAAK,IAAI,GAAG,IAAI,sBAAsB,KAAK;AACzC,qBAAa,yBAAyB,CAAC;AACvC,cAAM,cAAc,WAAW;AAC/B,kBAAU;AAGV,cAAM,iBAAiB,WAAW;AAClC,YAAI,mBAAmB,OAAO;AAC5B,cAAI,iBAAiB,gBAAgB;AAEnC,2BAAe;;mBAER,WAAW,aAAa,MAAM;AACvC,kBAAS,YAA4B,KACnC,SACA,QACA,eACA,MAAM;AAER,cAAI,UAAU,MAAM;AAClB,2BAAe,MAAM,CAAC;AACtB,gBAAK,MAAqC,YAAY,QAAW;AAC/D,wBAAW,MAAqC;;iBAE7C;AACL,2BAAe;;eAEZ;AACL,eAAK,gBAAgB,aAAuB,MAAM;AAClD,yBAAe,KAAK,MAAM,aAAuB,MAAM,MAAM;;AAG/D,YAAI,iBAAiB,MAAM;AAGzB,sBAAY,WAAW;AACvB,cAAI,cAAc,QAAW;AAG3B,kBAAM,kBAAkB,UAAU;AAClC,iBAAK,IAAI,GAAG,IAAI,iBAAiB,KAAK;AACpC,oBAAM,kBAAkB,mBAAmB,UAAU,CAAC,CAAC;AACvD,oBAAM,mBAAmB,gBAAgB;AACzC,2BAAa;AAIb,kBAAI,gBAAgB,aAAa,MAAM;AACrC,wBAAS,iBAAiC,KACxC,SACA,QACA,eACA,MAAM;AAER,oBAAI,UAAU,MAAM;AAClB,kCAAgB,MAAM,CAAC;AACvB,sBACG,MAAqC,YAAY,QAClD;AACA,iCAAc,MAAqC;;uBAEhD;AACL,kCAAgB;;qBAEb;AACL,qBAAK,gBAAgB,kBAA4B,MAAM;AACvD,gCAAgB,KAAK,MACnB,kBACA,MACA,MAAM;;AAIV,kBAAI,iBAAiB,cAAc,SAAS,aAAa,QAAQ;AAC/D,+BAAe;AACf,0BAAU;AACV,6BAAa;AAGb;;;;AAIN;;;AAKJ,UAAI,iBAAiB,MAAM;AACzB,sBAAc,aAAa;AAC3B,gBAAQ,WAAW;AACnB,YAAI,UAAU,QAAW;AACvB,oBAAU,WAAW;AAGrB,qBAAW,KAAK,oBACd,cACA,QACA,SACA,WAAW,WACX,MACA,QACA,WAAW;AAGb,eAAK,cAAc,UAAU,OAAO;AAGpC,cAAI,UAAU,OAAO;AACnB,iCAAqB,KAAK,SACxB,eACA,oBACA,QAAQ;iBAEL;AACL,mBAAO,KAAK,EAAE,KAAK,QAAQ;;;AAG/B,eAAO,KAAK,UAAU,MAAM,WAAW;AACvC,iBAAS,SAAS;AAGlB,iBAAS,KAAK,iBAAiB,QAAS,WAAW;AAEnD,YAAI,eAAe,QAAQ,WAAW,sBAAsB,MAAM;AAChE,cAAI,kBAAkB;AACtB,cAAI;AACJ,cAAI;AACJ,gCAAsB,YAAY;AAClC,aAAG;AACD,8BAAkB,sBAAsB,KAAK,YAAY;AACzD,gBAAI,oBAAoB,MAAM;AAC5B,gCAAkB,sBAAsB,YAAY;AACpD;;mBAEK,oBAAoB;AAE7B,cAAI,oBAAoB,GAAG;AACzB,mBAAO,OAAQ;AACf,qBAAS,cAAc;AACvB,iBAAK,iCACH,UACA,OACA,iBACA,iBACA,MACA,QACA,WAAW;;;AAKjB,aAAK,YAAY,YAAY,UAAU,WAAW,QAAS;aACtD;AAEL,cAAM,mBAAmB;AACzB,cAAM,YAAY;AAClB,cAAM,cAAc;AACpB,YAAI,mBAAmB,oBAAoB;AAE3C,eAAO,qBAAqB,SAAS,SAAS,WAAW;AAEvD,iBAAO,KAAK,UAAU,MAAM,CAAC;AAC7B;AACA,eAAK,IAAI,GAAG,IAAI,wBAAwB,KAAK;AAC3C,kBAAMC,cAAa,mBAAmB,CAAC;AACvC,kBAAM,cAAcA,YAAW;AAG/B,kBAAM,iBAAiBA,YAAW;AAClC,gBAAI,mBAAmB,OAAO;AAC5B,kBAAI,QAAQ,WAAW,MAAM,MAAM,gBAAgB;AAEjD,mCAAmB;;uBAEZA,YAAW,aAAa,MAAM;AACvC,iCACG,YAA4B,KAC3B,SACA,QACA,eACA,MAAM,MACF;mBACH;AACL,mBAAK,gBAAgB,aAAuB,MAAM;AAClD,iCAAoB,YAAuB,KAAK,IAAI,MAAM;;AAG5D,gBAAI,qBAAqB,MAAM;AAC7B;;;;AAKN,oBAAY,SAAS;AACrB,iBAAS,KAAK,iBAAiB,QAAS,SAAS;AAEjD,cAAM,KAAK,OAAO,qBAAqB,iCACrC,SACA,kBACA,WACA,WACA,WAAW;AAEb,eAAO,KAAK;UACV,QAAQ;UACR,MAAM;UACN,QAAQ;UACR,QAAQ;UACR,SAAS;SACV;AAED,YAAI,oBAAoB,OAAO;AAC7B;;;;AAON,QAAI,CAAC,KAAK,WAAW;AAEnB,oBAAc,SAAS;;AAGzB,WAAO;MACL,QAAQ;MACR;MACA;;EAEJ;EAEQ,YACN,QACA,UACA,WACA,UAAgB;AAEhB,QAAI,OAAO,QAAQ,MAAM;AAGvB,YAAM,WAAW,OAAO;AACxB,eAAS,QAAQ;AACjB,UAAI,aAAa,QAAW;AAC1B,kBAAU,KAAK,MAAM,QAAQ;;eAEtB,OAAO,SAAS,QAAW;AACpC,gBAAU,KAAK,MAAM,OAAO,IAAI;;EAEpC;EAEQ,UAAU,MAAc,QAAc;AAC5C,WAAO,KAAK,UAAU,MAAM;EAC9B;EAEQ,gBAAgB,QAAgB,cAAoB;AAC1D,WAAO,YAAY;EACrB;;EAGQ,iCACN,UACA,OACA,WACA,iBACA,MACA,QACA,aAAmB;AAEnB,QAAI,cAAc;AAClB,QAAI,UAAU,QAAW;AAEvB,qBAAe,cAAc,cAAc;AAC3C,yBAAmB,eAAe,KAAK;AACvC,UAAI,EAAE,oBAAoB,KAAK,iBAAiB,OAAO;AAErD,iBAAS,UAAU,OAAO;AAG1B,iBAAS,YAAY,SAAS,IAAI,CAAC;;;EAIzC;EAEQ,iBAAiB,WAAmB,aAAmB;AAC7D,WAAO,YAAY;EACrB;EAMQ,sBACN,OACA,aACA,cACA,WAAoB;AAEpB,WAAO;MACL;MACA;MACA;MACA;;EAEJ;EAEQ,qBACN,OACA,aACA,cACA,WACA,WACA,aAAmB;AAEnB,WAAO;MACL;MACA;MACA;MACA;MACA;MACA;;EAEJ;EAEQ,gBACN,OACA,aACA,cACA,WACA,WACA,aACA,aAAmB;AAEnB,WAAO;MACL;MACA;MACA,WAAW,cAAc,cAAc;MACvC;MACA,SAAS;MACT;MACA,WAAW,cAAc,cAAc;MACvC;MACA;;EAEJ;EAUQ,kBACN,aACA,OACA,YAAkB;AAElB,gBAAY,KAAK,UAAU;AAC3B,WAAO;EACT;EAEQ,0BACN,aACA,OACA,YAAkB;AAElB,gBAAY,KAAK,IAAI;AACrB;AACA,WAAO;EACT;EAKQ,sBAAsB,OAAe,SAAY;EAAS;EAE1D,wBAAwB,OAAe,SAAY;AACzD,QAAI,YAAY,MAAM;AACpB,YAAM,UAAU;;EAEpB;EASQ,cACN,SACA,MACA,QAAc;AAEd,UAAM,QAAQ,QAAQ,KAAK,IAAI;AAC/B,QAAI,UAAU,MAAM;AAClB,aAAO,KAAK,UAAU,QAAQ,QAAQ,SAAS;;AAEjD,WAAO;EACT;EAEQ,cAAc,SAAiB,MAAY;AACjD,UAAM,cAAc,QAAQ,KAAK,IAAI;AACrC,WAAO,gBAAgB,OAAO,YAAY,CAAC,IAAI;EACjD;;AAx1Bc,MAAA,UACZ;AAGY,MAAA,KAAK;;;ACzFf,SAAUC,YAAW,SAAkB;AAC3C,MAAIC,eAAc,OAAO,GAAG;AAC1B,WAAO,QAAQ;SACV;AACL,WAAO,QAAQ;;AAEnB;AANgB,OAAAD,aAAA;AAYV,SAAUE,eACd,KAAc;AAEd,SAAO,iBAAS,IAAI,KAAK,KAAK,IAAI,UAAU;AAC9C;AAJgB,OAAAA,gBAAA;AAMhB,IAAM,SAAS;AACf,IAAM,aAAa;AACnB,IAAM,QAAQ;AACd,IAAM,QAAQ;AACd,IAAM,YAAY;AAClB,IAAM,WAAW;AACjB,IAAM,aAAa;AACnB,IAAM,cAAc;AACpB,IAAM,mBAAmB;AAEnB,SAAU,YAAY,QAAoB;AAC9C,SAAO,oBAAoB,MAAM;AACnC;AAFgB;AAIhB,SAAS,oBAAoB,QAAoB;AAC/C,QAAM,UAAU,OAAO;AAEvB,QAAM,YAA4B,CAAA;AAClC,YAAU,OAAO,OAAO;AAExB,MAAI,CAAC,oBAAY,OAAO,GAAG;AACzB,cAAU,UAAU;;AAGtB,MAAI,YAAI,QAAQ,MAAM,GAAG;AACvB,UACE;;AAKJ,MAAI,YAAI,QAAQ,UAAU,GAAG;AAE3B,cAAU,aAAkB,OAAO,UAAU;;AAG/C,oBAAkB,CAAC,SAAS,CAAC;AAE7B,MAAI,YAAI,QAAQ,KAAK,GAAG;AACtB,cAAU,QAAQ,OAAO,KAAK;;AAGhC,MAAI,YAAI,QAAQ,KAAK,GAAG;AACtB,cAAU,QAAQ,OAAO,KAAK;;AAGhC,MAAI,YAAI,QAAQ,QAAQ,GAAG;AACzB,cAAU,WAAW,OAAO,QAAQ;;AAGtC,MAAI,YAAI,QAAQ,SAAS,GAAG;AAC1B,cAAU,YAAY,OAAO,SAAS;;AAGxC,MAAI,YAAI,QAAQ,UAAU,GAAG;AAC3B,cAAU,aAAa,OAAO,UAAU;;AAG1C,MAAI,YAAI,QAAQ,WAAW,GAAG;AAC5B,cAAU,cAAc,OAAO,WAAW;;AAG5C,MAAI,YAAI,QAAQ,gBAAgB,GAAG;AACjC,cAAU,mBAAmB,OAAO,gBAAgB;;AAGtD,SAAO;AACT;AArDS;AAuDF,IAAM,MAAM,YAAY,EAAE,MAAM,OAAO,SAAS,MAAM,GAAE,CAAE;AACjE,kBAAkB,CAAC,GAAG,CAAC;AAEjB,SAAU,oBACd,SACA,OACA,aACA,WACA,WACA,SACA,aACA,WAAiB;AAEjB,SAAO;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA,cAAoB,QAAS;IAC7B,WAAW;;AAEf;AArBgB;AAuBV,SAAU,aAAa,OAAe,SAAkB;AAC5D,SAAO,uBAAuB,OAAO,OAAO;AAC9C;AAFgB;;;ACnGT,IAAM,6BAA0D;EACrE,0BAA0B,EAAE,UAAU,QAAQ,UAAU,SAAQ,GAAE;AAChE,UAAM,WAAWC,eAAc,QAAQ;AACvC,UAAM,cAAc,WAChB,OAAOC,YAAW,QAAQ,CAAC,SAC3B,qBAAqB,SAAS,IAAI;AAEtC,UAAM,MAAM,aAAa,WAAW,mBAAmB,OAAO,KAAK;AAEnE,WAAO;EACT;EAEA,8BAA8B,EAAE,gBAAgB,SAAQ,GAAE;AACxD,WAAO,+CAA+C,eAAe;EACvE;EAEA,wBAAwB,EACtB,qBACA,QACA,UACA,uBACA,SAAQ,GACT;AACC,UAAM,YAAY;AAElB,UAAM,aAAa,aAAM,MAAM,EAAG;AAClC,UAAM,YAAY,mBAAmB,aAAa;AAElD,QAAI,uBAAuB;AACzB,aAAO,YAAY,wBAAwB;WACtC;AACL,YAAM,oBAAoB,eACxB,qBACA,CAAC,QAAQ,iBAAiB,OAAO,OAAO,YAAY,GACpD,CAAA,CAAmB;AAErB,YAAM,0BAA0B,YAC9B,mBACA,CAAC,aACC,IAAI,YAAI,UAAU,CAAC,kBAAkBA,YAAW,aAAa,CAAC,EAAE,KAC9D,IAAI,CACL,GAAG;AAER,YAAM,yBAAyB,YAC7B,yBACA,CAAC,SAAS,QAAQ,KAAK,MAAM,CAAC,KAAK,OAAO,EAAE;AAE9C,YAAM,wBAAwB;EAA2C,uBAAuB,KAC9F,IAAI,CACL;AAED,aAAO,YAAY,wBAAwB;;EAE/C;EAEA,sBAAsB,EACpB,wBACA,QACA,uBACA,SAAQ,GACT;AACC,UAAM,YAAY;AAElB,UAAM,aAAa,aAAM,MAAM,EAAG;AAClC,UAAM,YAAY,mBAAmB,aAAa;AAElD,QAAI,uBAAuB;AACzB,aAAO,YAAY,wBAAwB;WACtC;AACL,YAAM,0BAA0B,YAC9B,wBACA,CAAC,aACC,IAAI,YAAI,UAAU,CAAC,kBAAkBA,YAAW,aAAa,CAAC,EAAE,KAC9D,GAAG,CACJ,GAAG;AAER,YAAM,wBACJ;KACI,wBAAwB,KAAK,IAAI,CAAC;AAExC,aAAO,YAAY,wBAAwB;;EAE/C;;AAGF,OAAO,OAAO,0BAA0B;AAEjC,IAAM,sCACX;EACE,uBACE,cACA,eAA0B;AAE1B,UAAM,MACJ,kEACA,cAAc,kBACd,kCAEA,aAAa,OACb;AACF,WAAO;EACT;;AAGG,IAAM,uCACX;EACE,yBACE,cACA,gBAA2C;AAE3C,aAASC,4BACP,MAA+B;AAE/B,UAAI,gBAAgB,UAAU;AAC5B,eAAO,KAAK,aAAa;iBAChB,gBAAgB,aAAa;AACtC,eAAO,KAAK;aACP;AACL,eAAO;;IAEX;AAVS,WAAAA,6BAAA;AAYT,UAAM,eAAe,aAAa;AAClC,UAAM,gBAAgB,aAAM,cAAc;AAC1C,UAAM,QAAQ,cAAc;AAC5B,UAAM,UAAU,qBAAqB,aAAa;AAClD,UAAM,gBAAgBA,4BAA2B,aAAa;AAE9D,UAAM,mBAAmB,QAAQ;AACjC,QAAI,MAAM,KAAK,OAAO,GAAG,mBAAmB,QAAQ,EAAE,MACpD,gBAAgB,oBAAoB,aAAa,OAAO,EAC1D;4CAEc,eAAe,MACjB,oCAAoC,YAAY;;;AAK5D,UAAM,IAAI,QAAQ,WAAW,GAAG;AAChC,UAAM,IAAI,QAAQ,UAAU,IAAI;AAEhC,WAAO;EACT;EAEA,4BAA4B,MAAU;AACpC,UAAM,SACJ;0EAC2E,KAAK,IAAI;;;;AAKtF,WAAO;EACT;EAEA,qCAAqC,SAKpC;AACC,UAAM,UAAU,YAAI,QAAQ,YAAY,CAAC,YACvCD,YAAW,OAAO,CAAC,EACnB,KAAK,IAAI;AACX,UAAM,aACJ,QAAQ,YAAY,QAAQ,IAAI,KAAK,QAAQ,YAAY;AAC3D,UAAM,SACJ,4BAA4B,QAAQ,iBAAiB,KACnD,IAAI,CACL;QACQ,UAAU,aAAa,QAAQ,aAAa,IAAI;GACrD,OAAO;;;AAIb,WAAO;EACT;EAEA,+BAA+B,SAK9B;AACC,UAAM,UAAU,YAAI,QAAQ,YAAY,CAAC,YACvCA,YAAW,OAAO,CAAC,EACnB,KAAK,IAAI;AACX,UAAM,aACJ,QAAQ,YAAY,QAAQ,IAAI,KAAK,QAAQ,YAAY;AAC3D,QAAI,cACF,qCAAqC,QAAQ,iBAAiB,KAC5D,IAAI,CACL,WAAW,UAAU,aACV,QAAQ,aAAa,IAAI;GACjC,OAAO;;AAEb,kBACE,cACA;;AAEF,WAAO;EACT;EAEA,0BAA0B,SAGzB;AACC,QAAI,UAAU,qBAAqB,QAAQ,UAAU;AACrD,QAAI,QAAQ,WAAW,QAAQ,GAAG;AAChC,iBAAW,QAAQ,WAAW;;AAGhC,UAAM,SACJ,mBAAmB,OAAO,kBAAkB,QAAQ,aAAa,IAAI;;AAGvE,WAAO;EACT;;;EAIA,oBAAoB,SAGnB;AAEC,WAAO;EACT;EAEA,2BAA2B,SAI1B;AACC,UAAM,SACJ,iCAAiC,QAAQ,iBAAiB,CAAC,WACjD,QAAQ,YAAY,GAAG,aAAa,QAAQ,aAAa,IAAI;;AAGzE,WAAO;EACT;EAEA,8BAA8B,SAG7B;AACC,UAAM,SACJ;KACM,QAAQ,YAAY,GAAG,aAC3B,QAAQ,aAAa,IACvB;OACE,QAAQ,YAAY,WAAW,SAAS,CAC1C;AAEF,WAAO;EACT;EAEA,wBAAwB,SAGvB;AACC,UAAM,WAAW,QAAQ,aAAa;AACtC,UAAM,YAAY,YAChB,QAAQ,mBACR,CAAC,aAAa,SAAS,IAAI;AAE7B,UAAM,oBAAoB,GAAG,QAAQ,QAAQ,UAC1C,OAAO,CAAC,QAAQ,CAAC,EACjB,KAAK,OAAO,CAAC;AAChB,UAAM,SACJ;SACU,QAAQ;;GACwD,iBAAiB;;;AAI7F,WAAO;EACT;;;EAIA,0BAA0B,SAGzB;AAEC,WAAO;EACT;EAEA,4BAA4B,SAG3B;AACC,QAAI;AACJ,QAAI,QAAQ,wBAAwB,MAAM;AACxC,iBAAW,QAAQ,aAAa;WAC3B;AACL,iBAAW,QAAQ;;AAGrB,UAAM,SAAS,iCAAiC,QAAQ,2CAA2C,QAAQ,WAAW;AAEtH,WAAO;EACT;;;;ACxTE,SAAU,eACd,WACA,gBAAoD;AAEpD,QAAM,cAAc,IAAI,uBAAuB,WAAW,cAAc;AACxE,cAAY,YAAW;AACvB,SAAO,YAAY;AACrB;AAPgB;AASV,IAAO,yBAAP,cAAsC,YAAW;EApBvD,OAoBuD;;;EAIrD,YACU,eACA,gBAAoD;AAE5D,UAAK;AAHG,SAAA,gBAAA;AACA,SAAA,iBAAA;AALH,SAAA,SAAgD,CAAA;EAQvD;EAEO,cAAW;AAChB,oBAAQ,eAAO,KAAK,aAAa,GAAG,CAAC,SAAQ;AAC3C,WAAK,eAAe;AACpB,WAAK,OAAO,IAAI;IAClB,CAAC;EACH;EAEO,iBAAiB,MAAiB;AACvC,UAAM,MAAM,KAAK,cAAc,KAAK,eAAe;AAEnD,QAAI,CAAC,KAAK;AACR,YAAM,MAAM,KAAK,eAAe,uBAC9B,KAAK,cACL,IAAI;AAEN,WAAK,OAAO,KAAK;QACf,SAAS;QACT,MAAM,0BAA0B;QAChC,UAAU,KAAK,aAAa;QAC5B,mBAAmB,KAAK;OACzB;WACI;AACL,WAAK,iBAAiB;;EAE1B;;;;ACtBI,IAAgB,mCAAhB,cAAyD,WAAU;EAjCzE,OAiCyE;;;EAUvE,YACY,SACA,MAAkB;AAE5B,UAAK;AAHK,SAAA,UAAA;AACA,SAAA,OAAA;AAXF,SAAA,mBAAgC,CAAA;AAIhC,SAAA,qBAAqB;AACrB,SAAA,2BAA2B;AAC3B,SAAA,QAAQ;AACR,SAAA,gBAAgB;EAO1B;EAEA,eAAY;AACV,SAAK,QAAQ;AAEb,QAAI,KAAK,KAAK,UAAU,CAAC,MAAM,KAAK,QAAQ,MAAM;AAChD,YAAM,MAAM,qDAAqD;;AAInE,SAAK,YAAY,cAAM,KAAK,KAAK,SAAS,EAAE,QAAO;AACnD,SAAK,kBAAkB,cAAM,KAAK,KAAK,eAAe,EAAE,QAAO;AAG/D,SAAK,UAAU,IAAG;AAClB,SAAK,gBAAgB,IAAG;AAExB,SAAK,mBAAkB;AACvB,SAAK,KAAK,KAAK,OAAO;AAEtB,WAAO,KAAK;EACd;EAEA,KACE,MACA,WAA0B,CAAA,GAAE;AAG5B,QAAI,CAAC,KAAK,OAAO;AACf,YAAM,KAAK,MAAM,QAAQ;;EAE7B;EAEA,YACE,SACA,UACA,UAAuB;AAGvB,QACE,QAAQ,eAAe,SAAS,KAAK,sBACrC,QAAQ,QAAQ,KAAK,0BACrB;AACA,YAAM,WAAW,SAAS,OAAO,QAAQ;AACzC,WAAK,mBAAkB;AACvB,WAAK,KAAK,QAAQ,gBAAqB,QAAQ;;EAEnD;EAEA,qBAAkB;AAEhB,QAAI,gBAAQ,KAAK,SAAS,GAAG;AAG3B,WAAK,qBAAqB;AAC1B,WAAK,2BAA2B;AAChC,WAAK,gBAAgB;WAChB;AACL,WAAK,qBAAqB,KAAK,UAAU,IAAG;AAC5C,WAAK,2BAA2B,KAAK,gBAAgB,IAAG;;EAE5D;;AAGI,IAAO,uBAAP,cAAoC,iCAAgC;EAhH1E,OAgH0E;;;EAIxE,YACE,SACU,MAAuB;AAEjC,UAAM,SAAS,IAAI;AAFT,SAAA,OAAA;AALJ,SAAA,mBAAmB;AACnB,SAAA,yBAAyB;AAO/B,SAAK,mBAAmB,KAAK,KAAK,QAAQ;AAC1C,SAAK,yBAAyB,KAAK,KAAK;EAC1C;EAEA,aACE,UACA,UACA,UAAuB;AAEvB,QACE,KAAK,iBACL,SAAS,aAAa,SAAS,KAAK,oBACpC,SAAS,QAAQ,KAAK,0BACtB,CAAC,KAAK,OACN;AACA,YAAM,WAAW,SAAS,OAAO,QAAQ;AACzC,YAAM,WAAW,IAAI,YAAY,EAAE,YAAY,SAAQ,CAAE;AACzD,WAAK,mBAAmB,MAAM,QAAQ;AACtC,WAAK,QAAQ;;EAEjB;;AAeI,IAAO,4CAAP,cAAyD,WAAU;EA5JzE,OA4JyE;;;EAOvE,YACY,SACA,YAAkB;AAE5B,UAAK;AAHK,SAAA,UAAA;AACA,SAAA,aAAA;AARF,SAAA,SAAgC;MACxC,OAAO;MACP,YAAY;MACZ,aAAa;;EAQf;EAEA,eAAY;AACV,SAAK,KAAK,KAAK,OAAO;AACtB,WAAO,KAAK;EACd;;AAGI,IAAO,8BAAP,cAA2C,0CAAyC;EAhL1F,OAgL0F;;;EACxF,SACE,UACA,UACA,UAAuB;AAEvB,QAAI,SAAS,QAAQ,KAAK,YAAY;AACpC,YAAM,iBAAiB,aAAO,SAAS,OAAO,QAAQ,CAAC;AACvD,WAAK,OAAO,cAAc,mBAAmB;AAC7C,UAAI,0BAA0B,UAAU;AACtC,aAAK,OAAO,QAAQ,eAAe;AACnC,aAAK,OAAO,aAAa,eAAe;;WAErC;AACL,YAAM,SAAS,UAAU,UAAU,QAAQ;;EAE/C;;AAGI,IAAO,iCAAP,cAA8C,0CAAyC;EAnM7F,OAmM6F;;;EAC3F,YACE,aACA,UACA,UAAuB;AAEvB,QAAI,YAAY,QAAQ,KAAK,YAAY;AACvC,YAAM,oBAAoB,aAAO,SAAS,OAAO,QAAQ,CAAC;AAC1D,WAAK,OAAO,cAAc,sBAAsB;AAChD,UAAI,6BAA6B,UAAU;AACzC,aAAK,OAAO,QAAQ,kBAAkB;AACtC,aAAK,OAAO,aAAa,kBAAkB;;WAExC;AACL,YAAM,YAAY,aAAa,UAAU,QAAQ;;EAErD;;AAGI,IAAO,oCAAP,cAAiD,0CAAyC;EAtNhG,OAsNgG;;;EAC9F,eACE,gBACA,UACA,UAAuB;AAEvB,QAAI,eAAe,QAAQ,KAAK,YAAY;AAC1C,YAAM,uBAAuB,aAAO,SAAS,OAAO,QAAQ,CAAC;AAC7D,WAAK,OAAO,cAAc,yBAAyB;AACnD,UAAI,gCAAgC,UAAU;AAC5C,aAAK,OAAO,QAAQ,qBAAqB;AACzC,aAAK,OAAO,aAAa,qBAAqB;;WAE3C;AACL,YAAM,eAAe,gBAAgB,UAAU,QAAQ;;EAE3D;;AAII,IAAO,uCAAP,cAAoD,0CAAyC;EA1OnG,OA0OmG;;;EACjG,kBACE,mBACA,UACA,UAAuB;AAEvB,QAAI,kBAAkB,QAAQ,KAAK,YAAY;AAC7C,YAAM,oCAAoC,aACxC,SAAS,OAAO,QAAQ,CAAC;AAE3B,WAAK,OAAO,cAAc,sCAAsC;AAChE,UAAI,6CAA6C,UAAU;AACzD,aAAK,OAAO,QAAQ,kCAAkC;AACtD,aAAK,OAAO,aAAa,kCAAkC;;WAExD;AACL,YAAM,kBAAkB,mBAAmB,UAAU,QAAQ;;EAEjE;;AAQI,SAAU,kBACd,WACA,WACA,WAAwB,CAAA,GAAE;AAG1B,aAAW,cAAM,QAAQ;AACzB,MAAI,SAAmC,CAAA;AACvC,MAAI,IAAI;AAGR,WAAS,kBAAkB,SAAsB;AAC/C,WAAO,QAAQ,OAAO,aAAK,WAAW,IAAI,CAAC,CAAC;EAC9C;AAFS;AAKT,WAAS,uBAAuB,YAAyB;AACvD,UAAM,eAAe,kBACnB,kBAAkB,UAAU,GAC5B,WACA,QAAQ;AAEV,WAAO,OAAO,OAAO,YAAY;EACnC;AAPS;AAgBT,SAAO,SAAS,SAAS,aAAa,IAAI,UAAU,QAAQ;AAC1D,UAAM,OAAO,UAAU,CAAC;AAGxB,QAAI,gBAAgB,aAAa;AAC/B,aAAO,uBAAuB,KAAK,UAAU;eACpC,gBAAgB,aAAa;AACtC,aAAO,uBAAuB,KAAK,UAAU;eACpC,gBAAgB,QAAQ;AACjC,eAAS,uBAAuB,KAAK,UAAU;eACtC,gBAAgB,qBAAqB;AAC9C,YAAM,SAAS,KAAK,WAAW,OAAO;QACpC,IAAI,WAAW;UACb,YAAY,KAAK;SAClB;OACF;AACD,aAAO,uBAAuB,MAAM;eAC3B,gBAAgB,kCAAkC;AAC3D,YAAM,SAAS;QACb,IAAI,YAAY,EAAE,YAAY,KAAK,WAAU,CAAE;QAC/C,IAAI,WAAW;UACb,YAAY,CAAC,IAAI,SAAS,EAAE,cAAc,KAAK,UAAS,CAAE,CAAC,EAAE,OACtD,KAAK,UAAU;SAEvB;;AAEH,aAAO,uBAAuB,MAAM;eAC3B,gBAAgB,yBAAyB;AAClD,YAAM,SAAS,KAAK,WAAW,OAAO;QACpC,IAAI,WAAW;UACb,YAAY,CAAC,IAAI,SAAS,EAAE,cAAc,KAAK,UAAS,CAAE,CAAC,EAAE,OACtD,KAAK,UAAU;SAEvB;OACF;AACD,eAAS,uBAAuB,MAAM;eAC7B,gBAAgB,YAAY;AACrC,YAAM,SAAS,KAAK,WAAW,OAAO;QACpC,IAAI,WAAW;UACb,YAAY,KAAK;SAClB;OACF;AACD,eAAS,uBAAuB,MAAM;eAC7B,gBAAgB,aAAa;AACtC,sBAAQ,KAAK,YAAY,CAAC,YAAW;AAInC,YAAI,gBAAQ,QAAQ,UAAU,MAAM,OAAO;AACzC,mBAAS,uBAAuB,QAAQ,UAAU;;MAEtD,CAAC;AACD,aAAO;eACE,gBAAgB,UAAU;AACnC,eAAS,KAAK,KAAK,YAAY;WAC1B;AACL,YAAM,MAAM,sBAAsB;;AAGpC;;AAEF,SAAO,KAAK;IACV,aAAa;IACb,WAAW,aAAK,WAAW,CAAC;GAC7B;AAED,SAAO;AACT;AAnGgB;AA4GV,SAAU,wBACd,YACA,aACA,YACA,cAAoB;AAEpB,QAAM,oBAAyB;AAE/B,QAAM,wBAAwB,CAAC,iBAAiB;AAChD,QAAM,mBAAwB;AAC9B,MAAI,oBAAoB;AAExB,QAAM,oBAAoB,YAAY;AACtC,QAAM,2BAA2B,oBAAoB,eAAe;AAEpE,QAAM,SAAwC,CAAA;AAE9C,QAAM,gBAAkC,CAAA;AACxC,gBAAc,KAAK;IACjB,KAAK;IACL,KAAK;IACL,WAAW,CAAA;IACX,iBAAiB,CAAA;GAClB;AAED,SAAO,CAAC,gBAAQ,aAAa,GAAG;AAC9B,UAAM,WAAW,cAAc,IAAG;AAGlC,QAAI,aAAa,kBAAkB;AACjC,UACE,qBACA,aAAK,aAAa,EAAG,OAAO,0BAC5B;AAEA,sBAAc,IAAG;;AAEnB;;AAGF,UAAM,UAAU,SAAS;AACzB,UAAM,UAAU,SAAS;AACzB,UAAM,gBAAgB,SAAS;AAC/B,UAAM,sBAAsB,SAAS;AAGrC,QAAI,gBAAQ,OAAO,GAAG;AACpB;;AAGF,UAAM,OAAO,QAAQ,CAAC;AAEtB,QAAI,SAAS,mBAAmB;AAC9B,YAAM,WAAW;QACf,KAAK;QACL,KAAK,aAAK,OAAO;QACjB,WAAW,kBAAU,aAAa;QAClC,iBAAiB,kBAAU,mBAAmB;;AAEhD,oBAAc,KAAK,QAAQ;eAClB,gBAAgB,UAAU;AAEnC,UAAI,UAAU,oBAAoB,GAAG;AACnC,cAAM,UAAU,UAAU;AAC1B,cAAM,cAAc,YAAY,OAAO;AACvC,YAAI,WAAY,aAAa,KAAK,YAAY,GAAG;AAC/C,gBAAM,WAAW;YACf,KAAK;YACL,KAAK,aAAK,OAAO;YACjB,WAAW;YACX,iBAAiB;;AAEnB,wBAAc,KAAK,QAAQ;;iBAGpB,YAAY,oBAAoB,GAAG;AAE5C,eAAO,KAAK;UACV,eAAe,KAAK;UACpB,qBAAqB,KAAK;UAC1B,WAAW;UACX,iBAAiB;SAClB;AACD,4BAAoB;aACf;AACL,cAAM,MAAM,sBAAsB;;eAE3B,gBAAgB,aAAa;AACtC,YAAM,eAAe,cAAM,aAAa;AACxC,mBAAa,KAAK,KAAK,eAAe;AAEtC,YAAM,qBAAqB,cAAM,mBAAmB;AACpD,yBAAmB,KAAK,KAAK,GAAG;AAEhC,YAAM,WAAW;QACf,KAAK;QACL,KAAK,KAAK,WAAW,OAAO,uBAAuB,aAAK,OAAO,CAAC;QAChE,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,QAAQ;eAClB,gBAAgB,QAAQ;AAEjC,YAAM,kBAAkB;QACtB,KAAK;QACL,KAAK,aAAK,OAAO;QACjB,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,eAAe;AAElC,oBAAc,KAAK,gBAAgB;AAEnC,YAAM,eAAe;QACnB,KAAK;QACL,KAAK,KAAK,WAAW,OAAO,aAAK,OAAO,CAAC;QACzC,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,YAAY;eACtB,gBAAgB,qBAAqB;AAE9C,YAAM,kBAAkB,IAAI,WAAW;QACrC,YAAY,KAAK;QACjB,KAAK,KAAK;OACX;AACD,YAAM,UAAU,KAAK,WAAW,OAAO,CAAC,eAAe,GAAG,aAAK,OAAO,CAAC;AACvE,YAAM,WAAW;QACf,KAAK;QACL,KAAK;QACL,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,QAAQ;eAClB,gBAAgB,kCAAkC;AAE3D,YAAM,gBAAgB,IAAI,SAAS;QACjC,cAAc,KAAK;OACpB;AACD,YAAM,kBAAkB,IAAI,WAAW;QACrC,YAAY,CAAM,aAAa,EAAE,OAAO,KAAK,UAAU;QACvD,KAAK,KAAK;OACX;AACD,YAAM,UAAU,KAAK,WAAW,OAAO,CAAC,eAAe,GAAG,aAAK,OAAO,CAAC;AACvE,YAAM,WAAW;QACf,KAAK;QACL,KAAK;QACL,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,QAAQ;eAClB,gBAAgB,yBAAyB;AAElD,YAAM,kBAAkB;QACtB,KAAK;QACL,KAAK,aAAK,OAAO;QACjB,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,eAAe;AAElC,oBAAc,KAAK,gBAAgB;AAEnC,YAAM,gBAAgB,IAAI,SAAS;QACjC,cAAc,KAAK;OACpB;AACD,YAAM,gBAAgB,IAAI,WAAW;QACnC,YAAY,CAAM,aAAa,EAAE,OAAO,KAAK,UAAU;QACvD,KAAK,KAAK;OACX;AACD,YAAM,UAAU,KAAK,WAAW,OAAO,CAAC,aAAa,GAAG,aAAK,OAAO,CAAC;AACrE,YAAM,eAAe;QACnB,KAAK;QACL,KAAK;QACL,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,YAAY;eACtB,gBAAgB,YAAY;AAErC,YAAM,kBAAkB;QACtB,KAAK;QACL,KAAK,aAAK,OAAO;QACjB,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,eAAe;AAElC,oBAAc,KAAK,gBAAgB;AAGnC,YAAM,gBAAgB,IAAI,WAAW;QACnC,YAAY,KAAK;QACjB,KAAK,KAAK;OACX;AACD,YAAM,UAAU,KAAK,WAAW,OAAO,CAAC,aAAa,GAAG,aAAK,OAAO,CAAC;AACrE,YAAM,eAAe;QACnB,KAAK;QACL,KAAK;QACL,WAAW;QACX,iBAAiB;;AAEnB,oBAAc,KAAK,YAAY;eACtB,gBAAgB,aAAa;AAEtC,eAAS,IAAI,KAAK,WAAW,SAAS,GAAG,KAAK,GAAG,KAAK;AACpD,cAAM,UAAe,KAAK,WAAW,CAAC;AACtC,cAAM,cAAc;UAClB,KAAK;UACL,KAAK,QAAQ,WAAW,OAAO,aAAK,OAAO,CAAC;UAC5C,WAAW;UACX,iBAAiB;;AAEnB,sBAAc,KAAK,WAAW;AAC9B,sBAAc,KAAK,gBAAgB;;eAE5B,gBAAgB,aAAa;AACtC,oBAAc,KAAK;QACjB,KAAK;QACL,KAAK,KAAK,WAAW,OAAO,aAAK,OAAO,CAAC;QACzC,WAAW;QACX,iBAAiB;OAClB;eACQ,gBAAgB,MAAM;AAE/B,oBAAc,KACZ,mBAAmB,MAAM,SAAS,eAAe,mBAAmB,CAAC;WAElE;AACL,YAAM,MAAM,sBAAsB;;;AAGtC,SAAO;AACT;AAzOgB;AA2OhB,SAAS,mBACP,SACA,SACA,eACA,qBAA6B;AAE7B,QAAM,eAAe,cAAM,aAAa;AACxC,eAAa,KAAK,QAAQ,IAAI;AAE9B,QAAM,yBAAyB,cAAM,mBAAmB;AAExD,yBAAuB,KAAK,CAAC;AAE7B,SAAO;IACL,KAAK;IACL,KAAK,QAAQ;IACb,WAAW;IACX,iBAAiB;;AAErB;AAnBS;;;AC9jBT,IAAY;CAAZ,SAAYE,YAAS;AACnB,EAAAA,WAAAA,WAAA,QAAA,IAAA,CAAA,IAAA;AACA,EAAAA,WAAAA,WAAA,YAAA,IAAA,CAAA,IAAA;AACA,EAAAA,WAAAA,WAAA,sBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,WAAAA,WAAA,qCAAA,IAAA,CAAA,IAAA;AACA,EAAAA,WAAAA,WAAA,2BAAA,IAAA,CAAA,IAAA;AACA,EAAAA,WAAAA,WAAA,aAAA,IAAA,CAAA,IAAA;AACF,GAPY,cAAA,YAAS,CAAA,EAAA;AASf,SAAU,YACd,MAA2C;AAG3C,MAAI,gBAAgB,UAAU,SAAS,UAAU;AAC/C,WAAO,UAAU;aACR,gBAAgB,cAAc,SAAS,cAAc;AAC9D,WAAO,UAAU;aAEjB,gBAAgB,uBAChB,SAAS,uBACT;AACA,WAAO,UAAU;aAEjB,gBAAgB,oCAChB,SAAS,oCACT;AACA,WAAO,UAAU;aAEjB,gBAAgB,2BAChB,SAAS,2BACT;AACA,WAAO,UAAU;aACR,gBAAgB,eAAe,SAAS,eAAe;AAChE,WAAO,UAAU;SACZ;AACL,UAAM,MAAM,sBAAsB;;AAEtC;AA5BgB;AA8BV,SAAU,kBAAkB,SAKjC;AACC,QAAM,EAAE,YAAY,MAAM,UAAU,aAAY,IAAK;AACrD,QAAM,OAAO,YAAY,QAAQ;AACjC,MAAI,SAAS,UAAU,aAAa;AAClC,WAAO,uBAAuB,YAAY,MAAM,YAAY;SACvD;AACL,WAAO,iCACL,YACA,MACA,MACA,YAAY;;AAGlB;AAlBgB;AAoBV,SAAU,wBACd,YACA,aACA,cACA,eACA,sBACA,eAAuB;AAEvB,QAAM,iBAAiB,uBACrB,YACA,aACA,YAAY;AAGd,QAAMC,gBAAe,0BAA0B,cAAc,IACzD,qCACA;AAEJ,SAAO,cACL,gBACA,eACAA,eACA,oBAAoB;AAExB;AAxBgB;AAsCV,SAAU,kCACd,YACA,aACA,GACA,sBACA,UACA,kBAIkB;AAElB,QAAM,iBAAiB,iCACrB,YACA,aACA,UACA,CAAC;AAGH,QAAMA,gBAAe,0BAA0B,cAAc,IACzD,qCACA;AAEJ,SAAO,iBACL,eAAe,CAAC,GAChBA,eACA,oBAAoB;AAExB;AA5BgB;AAgCV,SAAU,+BACd,MACA,eACAA,eACA,sBAA6B;AAE7B,QAAM,YAAY,KAAK;AACvB,QAAM,0BAA0B,cAAM,MAAM,CAAC,YAAW;AACtD,WAAO,cAAM,SAAS,CAAC,aAAY;AACjC,aAAO,SAAS,WAAW;IAC7B,CAAC;EACH,CAAC;AAGD,MAAI,eAAe;AAIjB,WAAO,SAEL,QAAqB;AAKrB,YAAM,aAAwC,YAC5C,QACA,CAAC,YAAY,QAAQ,IAAI;AAG3B,eAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,cAAM,UAAU,KAAK,CAAC;AACtB,cAAM,iBAAiB,QAAQ;AAE/B,cAAM,gBAAgB,WAAW,CAAC;AAClC,YAAI,kBAAkB,UAAa,cAAc,KAAK,IAAI,MAAM,OAAO;AAErE;;AAEF,iBAAU,UAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACjD,gBAAM,WAAW,QAAQ,CAAC;AAC1B,gBAAM,iBAAiB,SAAS;AAChC,mBAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,kBAAM,YAAY,KAAK,GAAG,IAAI,CAAC;AAC/B,gBAAIA,cAAa,WAAW,SAAS,CAAC,CAAC,MAAM,OAAO;AAGlD,uBAAS;;;AAKb,iBAAO;;;AAMX,aAAO;IACT;aACS,2BAA2B,CAAC,sBAAsB;AAG3D,UAAM,kBAAkB,YAAI,MAAM,CAAC,YAAW;AAC5C,aAAO,gBAAQ,OAAO;IACxB,CAAC;AAED,UAAM,cAAc,eAClB,iBACA,CAAC,QAAQ,SAAS,QAAO;AACvB,sBAAQ,SAAS,CAAC,gBAAe;AAC/B,YAAI,CAAC,YAAI,QAAQ,YAAY,YAAa,GAAG;AAC3C,iBAAO,YAAY,YAAa,IAAI;;AAEtC,wBAAQ,YAAY,iBAAkB,CAAC,sBAAqB;AAC1D,cAAI,CAAC,YAAI,QAAQ,iBAAiB,GAAG;AACnC,mBAAO,iBAAiB,IAAI;;QAEhC,CAAC;MACH,CAAC;AACD,aAAO;IACT,GACA,CAAA,CAA4B;AAM9B,WAAO,WAAA;AACL,YAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,aAAO,YAAY,UAAU,YAAY;IAC3C;SACK;AAML,WAAO,WAAA;AACL,eAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,cAAM,UAAU,KAAK,CAAC;AACtB,cAAM,iBAAiB,QAAQ;AAC/B,iBAAU,UAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACjD,gBAAM,WAAW,QAAQ,CAAC;AAC1B,gBAAM,iBAAiB,SAAS;AAChC,mBAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,kBAAM,YAAY,KAAK,GAAG,IAAI,CAAC;AAC/B,gBAAIA,cAAa,WAAW,SAAS,CAAC,CAAC,MAAM,OAAO;AAGlD,uBAAS;;;AAKb,iBAAO;;;AAMX,aAAO;IACT;;AAEJ;AA5HgB;AA8HV,SAAU,wCACd,KACAA,eACA,sBAA6B;AAE7B,QAAM,0BAA0B,cAAM,KAAK,CAAC,aAAY;AACtD,WAAO,SAAS,WAAW;EAC7B,CAAC;AAED,QAAM,aAAa,IAAI;AAIvB,MAAI,2BAA2B,CAAC,sBAAsB;AACpD,UAAM,oBAAoB,gBAAQ,GAAG;AAErC,QACE,kBAAkB,WAAW,KAC7B,gBAAc,kBAAkB,CAAC,EAAG,eAAe,GACnD;AACA,YAAM,oBAAoB,kBAAkB,CAAC;AAC7C,YAAM,yBAA+B,kBAAmB;AAExD,aAAO,WAAA;AACL,eAAO,KAAK,GAAG,CAAC,EAAE,iBAAiB;MACrC;WACK;AACL,YAAM,cAAc,eAClB,mBACA,CAAC,QAAQ,aAAa,QAAO;AAC3B,eAAO,YAAY,YAAa,IAAI;AACpC,wBAAQ,YAAY,iBAAkB,CAAC,sBAAqB;AAC1D,iBAAO,iBAAiB,IAAI;QAC9B,CAAC;AACD,eAAO;MACT,GACA,CAAA,CAAe;AAGjB,aAAO,WAAA;AACL,cAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,eAAO,YAAY,UAAU,YAAY,MAAM;MACjD;;SAEG;AACL,WAAO,WAAA;AACL,eAAU,UAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AAC7C,cAAM,WAAW,IAAI,CAAC;AACtB,cAAM,iBAAiB,SAAS;AAChC,iBAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,gBAAM,YAAY,KAAK,GAAG,IAAI,CAAC;AAC/B,cAAIA,cAAa,WAAW,SAAS,CAAC,CAAC,MAAM,OAAO;AAGlD,qBAAS;;;AAIb,eAAO;;AAIT,aAAO;IACT;;AAEJ;AAjEgB;AAmEhB,IAAM,6BAAN,cAAyC,WAAU;EA/VnD,OA+VmD;;;EAGjD,YACU,SACA,kBACA,gBAAyB;AAEjC,UAAK;AAJG,SAAA,UAAA;AACA,SAAA,mBAAA;AACA,SAAA,iBAAA;EAGV;EAEA,eAAY;AACV,SAAK,KAAK,KAAK,OAAO;AACtB,WAAO,KAAK;EACd;EAEQ,cACN,MACA,kBACA,UACA,UAAuB;AAEvB,QACE,KAAK,QAAQ,KAAK,oBAClB,KAAK,mBAAmB,kBACxB;AACA,WAAK,UAAU,SAAS,OAAO,QAAQ;AACvC,aAAO;;AAGT,WAAO;EACT;EAEA,WACE,YACA,UACA,UAAuB;AAEvB,QAAI,CAAC,KAAK,cAAc,YAAY,UAAU,QAAQ,UAAU,QAAQ,GAAG;AACzE,YAAM,WAAW,YAAY,UAAU,QAAQ;;EAEnD;EAEA,eACE,gBACA,UACA,UAAuB;AAEvB,QACE,CAAC,KAAK,cACJ,gBACA,UAAU,sBACV,UACA,QAAQ,GAEV;AACA,YAAM,WAAW,gBAAgB,UAAU,QAAQ;;EAEvD;EAEA,kBACE,mBACA,UACA,UAAuB;AAEvB,QACE,CAAC,KAAK,cACJ,mBACA,UAAU,qCACV,UACA,QAAQ,GAEV;AACA,YAAM,WAAW,mBAAmB,UAAU,QAAQ;;EAE1D;EAEA,SACE,UACA,UACA,UAAuB;AAEvB,QACE,CAAC,KAAK,cAAc,UAAU,UAAU,YAAY,UAAU,QAAQ,GACtE;AACA,YAAM,WAAW,UAAU,UAAU,QAAQ;;EAEjD;EAEA,YACE,aACA,UACA,UAAuB;AAEvB,QACE,CAAC,KAAK,cACJ,aACA,UAAU,2BACV,UACA,QAAQ,GAEV;AACA,YAAM,WAAW,aAAa,UAAU,QAAQ;;EAEpD;;AAMF,IAAM,gCAAN,cAA4C,YAAW;EA7cvD,OA6cuD;;;EAGrD,YACU,kBACA,gBACA,WAAe;AAEvB,UAAK;AAJG,SAAA,mBAAA;AACA,SAAA,iBAAA;AACA,SAAA,YAAA;AALH,SAAA,SAAwB,CAAA;EAQ/B;EAEQ,cACN,MACA,kBAA2B;AAE3B,QACE,KAAK,QAAQ,KAAK,oBAClB,KAAK,mBAAmB,qBACvB,KAAK,cAAc,UAAa,SAAS,KAAK,YAC/C;AACA,WAAK,SAAS,KAAK;;EAEvB;EAEO,YAAY,MAAY;AAC7B,SAAK,cAAc,MAAM,UAAU,MAAM;EAC3C;EAEO,gBAAgB,MAAgB;AACrC,SAAK,cAAc,MAAM,UAAU,UAAU;EAC/C;EAEO,yBAAyB,MAAyB;AACvD,SAAK,cAAc,MAAM,UAAU,oBAAoB;EACzD;EAEO,sCACL,MAAsC;AAEtC,SAAK,cAAc,MAAM,UAAU,mCAAmC;EACxE;EAEO,6BAA6B,MAA6B;AAC/D,SAAK,cAAc,MAAM,UAAU,yBAAyB;EAC9D;EAEO,iBAAiB,MAAiB;AACvC,SAAK,cAAc,MAAM,UAAU,WAAW;EAChD;;AAGF,SAAS,wBAAwB,MAAY;AAC3C,QAAM,SAAS,IAAI,MAAM,IAAI;AAC7B,WAAS,IAAI,GAAG,IAAI,MAAM,KAAK;AAC7B,WAAO,CAAC,IAAI,CAAA;;AAEd,SAAO;AACT;AANS;AAaT,SAAS,eAAe,MAAiB;AACvC,MAAI,OAAO,CAAC,EAAE;AACd,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,UAAU,KAAK,CAAC;AACtB,UAAM,aAAa,CAAA;AACnB,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,YAAM,iBAAiB,KAAK,CAAC;AAC7B,iBAAW,KAAK,iBAAiB,MAAM,QAAQ,YAAY;AAC3D,eAAS,IAAI,GAAG,IAAI,QAAQ,gBAAiB,QAAQ,KAAK;AACxD,cAAM,sBAAsB,MAAM,QAAQ,gBAAiB,CAAC;AAC5D,mBAAW,KAAK,iBAAiB,mBAAmB;;;AAGxD,WAAO;;AAET,SAAO;AACT;AAhBS;AAqBT,SAAS,mBACP,mBACA,gBACA,KAAW;AAEX,WACM,aAAa,GACjB,aAAa,kBAAkB,QAC/B,cACA;AAEA,QAAI,eAAe,KAAK;AACtB;;AAEF,UAAM,yBAAyB,kBAAkB,UAAU;AAC3D,aAAS,YAAY,GAAG,YAAY,eAAe,QAAQ,aAAa;AACtE,YAAM,YAAY,eAAe,SAAS;AAC1C,UAAI,uBAAuB,SAAS,MAAM,MAAM;AAC9C,eAAO;;;;AAKb,SAAO;AACT;AAxBS;AA0BH,SAAU,kCACd,UACA,GAAS;AAET,QAAM,cAAc,YAAI,UAAU,CAAC,YACjC,kBAAkB,CAAC,OAAO,GAAG,CAAC,CAAC;AAEjC,QAAM,cAAc,wBAAwB,YAAY,MAAM;AAC9D,QAAM,aAAa,YAAI,aAAa,CAAC,iBAAgB;AACnD,UAAM,OAAmC,CAAA;AACzC,oBAAQ,cAAc,CAAC,SAAQ;AAC7B,YAAM,OAAO,eAAe,KAAK,WAAW;AAC5C,sBAAQ,MAAM,CAAC,YAAW;AACxB,aAAK,OAAO,IAAI;MAClB,CAAC;IACH,CAAC;AACD,WAAO;EACT,CAAC;AACD,MAAI,UAAU;AAGd,WAAS,aAAa,GAAG,cAAc,GAAG,cAAc;AACtD,UAAM,cAAc;AACpB,cAAU,wBAAwB,YAAY,MAAM;AAGpD,aAAS,SAAS,GAAG,SAAS,YAAY,QAAQ,UAAU;AAC1D,YAAM,0BAA0B,YAAY,MAAM;AAElD,eACM,cAAc,GAClB,cAAc,wBAAwB,QACtC,eACA;AACA,cAAM,iBAAiB,wBAAwB,WAAW,EAAE;AAC5D,cAAM,YAAY,wBAAwB,WAAW,EAAE;AACvD,cAAM,aAAa,eAAe,cAAc;AAChD,cAAM,WAAW,mBAAmB,YAAY,YAAY,MAAM;AAElE,YAAI,YAAY,gBAAQ,SAAS,KAAK,eAAe,WAAW,GAAG;AACjE,gBAAM,gBAAgB,YAAY,MAAM;AAExC,cAAI,aAAa,eAAe,cAAc,MAAM,OAAO;AACzD,0BAAc,KAAK,cAAc;AAEjC,qBAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,oBAAM,UAAU,WAAW,CAAC;AAC5B,yBAAW,MAAM,EAAE,OAAO,IAAI;;;eAK/B;AACH,gBAAM,6BAA6B,kBACjC,WACA,aAAa,GACb,cAAc;AAEhB,kBAAQ,MAAM,IAAI,QAAQ,MAAM,EAAE,OAAO,0BAA0B;AAGnE,0BAAQ,4BAA4B,CAAC,SAAQ;AAC3C,kBAAMC,cAAa,eAAe,KAAK,WAAW;AAClD,4BAAQA,aAAY,CAAC,QAAO;AAC1B,yBAAW,MAAM,EAAE,GAAG,IAAI;YAC5B,CAAC;UACH,CAAC;;;;;AAMT,SAAO;AACT;AAzEgB;AA2EV,SAAU,uBACd,YACA,aACA,GACA,QAAoB;AAEpB,QAAMC,WAAU,IAAI,8BAClB,YACA,UAAU,aACV,MAAM;AAER,cAAY,OAAOA,QAAO;AAC1B,SAAO,kCAAkCA,SAAQ,QAAQ,CAAC;AAC5D;AAbgB;AAeV,SAAU,iCACd,YACA,aACA,UACA,GAAS;AAET,QAAM,mBAAmB,IAAI,8BAC3B,YACA,QAAQ;AAEV,cAAY,OAAO,gBAAgB;AACnC,QAAM,YAAY,iBAAiB;AAEnC,QAAM,iBAAiB,IAAI,2BACzB,aACA,YACA,QAAQ;AAEV,QAAM,WAAW,eAAe,aAAY;AAE5C,QAAM,aAAa,IAAI,YAAgB,EAAE,YAAY,UAAS,CAAE;AAChE,QAAM,YAAY,IAAI,YAAgB,EAAE,YAAY,SAAQ,CAAE;AAE9D,SAAO,kCAAkC,CAAC,YAAY,SAAS,GAAG,CAAC;AACrE;AAxBgB;AA0BV,SAAU,aACd,aACA,YAAuB;AAEvB,mBAAkB,UAAS,IAAI,GAAG,IAAI,YAAY,QAAQ,KAAK;AAC7D,UAAM,YAAY,YAAY,CAAC;AAC/B,QAAI,UAAU,WAAW,WAAW,QAAQ;AAC1C;;AAEF,aAAS,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;AACzC,YAAM,YAAY,WAAW,CAAC;AAC9B,YAAM,WAAW,UAAU,CAAC;AAE5B,YAAM,iBACJ,cAAc,YACd,SAAS,mBAAoB,UAAU,YAAa,MAAM;AAC5D,UAAI,mBAAmB,OAAO;AAC5B,iBAAS;;;AAGb,WAAO;;AAGT,SAAO;AACT;AAxBgB;AA0BV,SAAU,qBACd,QACA,OAAkB;AAElB,SACE,OAAO,SAAS,MAAM,UACtB,cAAM,QAAQ,CAAC,SAAS,QAAO;AAC7B,UAAM,eAAe,MAAM,GAAG;AAC9B,WACE,YAAY,gBACZ,aAAa,mBAAoB,QAAQ,YAAa;EAE1D,CAAC;AAEL;AAdgB;AAgBV,SAAU,0BACd,gBAAmC;AAEnC,SAAO,cAAM,gBAAgB,CAAC,mBAC5B,cAAM,gBAAgB,CAAC,eACrB,cAAM,YAAY,CAAC,UAAU,gBAAQ,MAAM,eAAgB,CAAC,CAAC,CAC9D;AAEL;AARgB;;;AC5pBV,SAAU,kBAAkB,SAKjC;AACC,QAAM,mCAAmC,QAAQ,kBAAkB,SAAS;IAC1E,OAAO,QAAQ;IACf,YAAY,QAAQ;IACpB,aAAa,QAAQ;GACtB;AACD,SAAO,YAAI,kCAAkC,CAAC,iBAAiB,OAAA,OAAA,EAC7D,MAAM,0BAA0B,4BAA2B,GACxD,YAAY,CACf;AACJ;AAfgB;AAiBV,SAAU,gBACd,WACA,YACA,gBACA,aAAmB;AAEnB,QAAM,kBAA4C,gBAChD,WACA,CAAC,iBACC,6BAA6B,cAAc,cAAc,CAAC;AAG9D,QAAM,+BAA+B,uCACnC,WACA,YACA,cAAc;AAGhB,QAAM,oBAAoB,gBAAQ,WAAW,CAAC,YAC5C,oBAAoB,SAAS,cAAc,CAAC;AAG9C,QAAM,sBAAsB,gBAAQ,WAAW,CAAC,YAC9C,gCACE,SACA,WACA,aACA,cAAc,CACf;AAGH,SAAO,gBAAgB,OACrB,8BACA,mBACA,mBAAmB;AAEvB;AApCgB;AAsChB,SAAS,6BACP,cACA,gBAAqD;AAErD,QAAMC,oBAAmB,IAAI,8BAA6B;AAC1D,eAAa,OAAOA,iBAAgB;AACpC,QAAM,qBAAqBA,kBAAiB;AAE5C,QAAM,mBAAmB,gBACvB,oBACA,+BAA+B;AAGjC,QAAM,aAAkB,eAAO,kBAAkB,CAAC,cAAa;AAC7D,WAAO,UAAU,SAAS;EAC5B,CAAC;AAED,QAAM,SAAS,YAAI,eAAO,UAAU,GAAG,CAAC,mBAAuB;AAC7D,UAAM,YAAiB,aAAM,cAAc;AAC3C,UAAM,MAAM,eAAe,yBACzB,cACA,cAAc;AAEhB,UAAM,UAAU,qBAAqB,SAAS;AAC9C,UAAM,WAA6C;MACjD,SAAS;MACT,MAAM,0BAA0B;MAChC,UAAU,aAAa;MACvB;MACA,YAAY,UAAU;;AAGxB,UAAM,QAAQ,2BAA2B,SAAS;AAClD,QAAI,OAAO;AACT,eAAS,YAAY;;AAGvB,WAAO;EACT,CAAC;AACD,SAAO;AACT;AAxCS;AA0CH,SAAU,gCACd,MAA+B;AAE/B,SAAO,GAAG,qBAAqB,IAAI,CAAC,MAClC,KAAK,GACP,MAAM,2BAA2B,IAAI,CAAC;AACxC;AANgB;AAQhB,SAAS,2BAA2B,MAA+B;AACjE,MAAI,gBAAgB,UAAU;AAC5B,WAAO,KAAK,aAAa;aAChB,gBAAgB,aAAa;AACtC,WAAO,KAAK;SACP;AACL,WAAO;;AAEX;AARS;AAUH,IAAO,gCAAP,cAA6C,YAAW;EAjL9D,OAiL8D;;;EAA9D,cAAA;;AACS,SAAA,iBAA8C,CAAA;EAmCvD;EAjCS,iBAAiB,SAAoB;AAC1C,SAAK,eAAe,KAAK,OAAO;EAClC;EAEO,YAAYC,SAAc;AAC/B,SAAK,eAAe,KAAKA,OAAM;EACjC;EAEO,6BAA6B,SAAgC;AAClE,SAAK,eAAe,KAAK,OAAO;EAClC;EAEO,yBAAyB,YAA+B;AAC7D,SAAK,eAAe,KAAK,UAAU;EACrC;EAEO,sCACL,eAA+C;AAE/C,SAAK,eAAe,KAAK,aAAa;EACxC;EAEO,gBAAgB,MAAgB;AACrC,SAAK,eAAe,KAAK,IAAI;EAC/B;EAEO,iBAAiB,IAAe;AACrC,SAAK,eAAe,KAAK,EAAE;EAC7B;EAEO,cAAc,UAAkB;AACrC,SAAK,eAAe,KAAK,QAAQ;EACnC;;AAGI,SAAU,gCACd,MACA,UACA,WACA,gBAAqD;AAErD,QAAM,SAAS,CAAA;AACf,QAAM,cAAc,eAClB,UACA,CAAC,QAAQ,YAAW;AAClB,QAAI,QAAQ,SAAS,KAAK,MAAM;AAC9B,aAAO,SAAS;;AAElB,WAAO;EACT,GACA,CAAC;AAEH,MAAI,cAAc,GAAG;AACnB,UAAM,SAAS,eAAe,4BAA4B;MACxD,cAAc;MACd,aAAa;KACd;AACD,WAAO,KAAK;MACV,SAAS;MACT,MAAM,0BAA0B;MAChC,UAAU,KAAK;KAChB;;AAGH,SAAO;AACT;AA9BgB;AAmCV,SAAU,yBACd,UACA,mBACA,WAAiB;AAEjB,QAAM,SAAS,CAAA;AACf,MAAI;AAEJ,MAAI,CAAC,iBAAS,mBAAmB,QAAQ,GAAG;AAC1C,aACE,kCAAkC,QAAQ,6CAA6C,SAAS;AAElG,WAAO,KAAK;MACV,SAAS;MACT,MAAM,0BAA0B;MAChC;KACD;;AAGH,SAAO;AACT;AApBgB;AAsBV,SAAU,wBACd,SACA,UACA,gBACA,OAAe,CAAA,GAAE;AAEjB,QAAM,SAAmC,CAAA;AACzC,QAAM,mBAAmB,qBAAqB,SAAS,UAAU;AACjE,MAAI,gBAAQ,gBAAgB,GAAG;AAC7B,WAAO,CAAA;SACF;AACL,UAAM,WAAW,QAAQ;AACzB,UAAM,qBAAqB,iBAAS,kBAAkB,OAAO;AAC7D,QAAI,oBAAoB;AACtB,aAAO,KAAK;QACV,SAAS,eAAe,wBAAwB;UAC9C,cAAc;UACd,mBAAmB;SACpB;QACD,MAAM,0BAA0B;QAChC;OACD;;AAKH,UAAM,iBAAiB,mBAAW,kBAAkB,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC;AAC1E,UAAM,sBAAsB,gBAAQ,gBAAgB,CAAC,gBAAe;AAClE,YAAM,UAAU,cAAM,IAAI;AAC1B,cAAQ,KAAK,WAAW;AACxB,aAAO,wBACL,SACA,aACA,gBACA,OAAO;IAEX,CAAC;AAED,WAAO,OAAO,OAAO,mBAAmB;;AAE5C;AAxCgB;AA0CV,SAAU,qBAAqB,YAAyB;AAC5D,MAAI,SAAiB,CAAA;AACrB,MAAI,gBAAQ,UAAU,GAAG;AACvB,WAAO;;AAET,QAAM,YAAY,aAAM,UAAU;AAGlC,MAAI,qBAAqB,aAAa;AACpC,WAAO,KAAK,UAAU,cAAc;aAEpC,qBAAqB,eACrB,qBAAqB,UACrB,qBAAqB,uBACrB,qBAAqB,oCACrB,qBAAqB,2BACrB,qBAAqB,YACrB;AACA,aAAS,OAAO,OACd,qBAAoC,UAAU,UAAU,CAAC;aAElD,qBAAqB,aAAa;AAE3C,aAAS,gBACP,YAAI,UAAU,YAAY,CAAC,eACzB,qBAAuC,WAAY,UAAU,CAAC,CAC/D;aAEM,qBAAqB,UAAU;SAEnC;AACL,UAAM,MAAM,sBAAsB;;AAGpC,QAAM,kBAAkB,eAAe,SAAS;AAChD,QAAM,UAAU,WAAW,SAAS;AACpC,MAAI,mBAAmB,SAAS;AAC9B,UAAM,OAAO,aAAK,UAAU;AAC5B,WAAO,OAAO,OAAO,qBAAqB,IAAI,CAAC;SAC1C;AACL,WAAO;;AAEX;AA1CgB;AA4ChB,IAAM,cAAN,cAA0B,YAAW;EAtWrC,OAsWqC;;;EAArC,cAAA;;AACS,SAAA,eAA8B,CAAA;EAKvC;EAHS,iBAAiB,MAAiB;AACvC,SAAK,aAAa,KAAK,IAAI;EAC7B;;AAGI,SAAU,2BACd,cACA,gBAAqD;AAErD,QAAM,cAAc,IAAI,YAAW;AACnC,eAAa,OAAO,WAAW;AAC/B,QAAM,MAAM,YAAY;AAExB,QAAM,SAAS,gBACb,KACA,CAAC,WAAU;AACT,UAAM,aAAa,kBAAU,OAAO,UAAU;AAC9C,WAAO,gBAAQ,YAAY,CAAC,iBAAiB,eAAc;AACzD,YAAM,qBAAqB,wBACzB,CAAC,eAAe,GAChB,CAAA,GACA,wBACA,CAAC;AAEH,UAAI,gBAAQ,kBAAkB,GAAG;AAC/B,eAAO;UACL;YACE,SAAS,eAAe,2BAA2B;cACjD;cACA,aAAa;cACb,gBAAgB;aACjB;YACD,MAAM,0BAA0B;YAChC,UAAU,aAAa;YACvB,YAAY,OAAO;YACnB,aAAa,aAAa;;;aAGzB;AACL,eAAO,CAAA;;IAEX,CAAC;EACH,CAAC;AAGH,SAAO;AACT;AAzCgB;AA2CV,SAAU,yCACd,cACA,oBACA,gBAAqD;AAErD,QAAM,cAAc,IAAI,YAAW;AACnC,eAAa,OAAO,WAAW;AAC/B,MAAI,MAAM,YAAY;AAItB,QAAM,eAAO,KAAK,CAAC,WAAW,OAAO,sBAAsB,IAAI;AAE/D,QAAM,SAAS,gBAAQ,KAAK,CAAC,WAAuB;AAClD,UAAM,iBAAiB,OAAO;AAC9B,UAAM,qBAAqB,OAAO,gBAAgB;AAClD,UAAM,eAAe,uBACnB,gBACA,cACA,oBACA,MAAM;AAER,UAAM,sBAAsB,6BAC1B,cACA,QACA,cACA,cAAc;AAEhB,UAAM,4BAA4B,mCAChC,cACA,QACA,cACA,cAAc;AAGhB,WAAO,oBAAoB,OAAO,yBAAyB;EAC7D,CAAC;AAED,SAAO;AACT;AAvCgB;AAyCV,IAAO,sBAAP,cAAmC,YAAW;EAlcpD,OAkcoD;;;EAApD,cAAA;;AACS,SAAA,iBAEA,CAAA;EAmBT;EAjBS,6BAA6B,SAAgC;AAClE,SAAK,eAAe,KAAK,OAAO;EAClC;EAEO,yBAAyB,YAA+B;AAC7D,SAAK,eAAe,KAAK,UAAU;EACrC;EAEO,sCACL,eAA+C;AAE/C,SAAK,eAAe,KAAK,aAAa;EACxC;EAEO,gBAAgB,MAAgB;AACrC,SAAK,eAAe,KAAK,IAAI;EAC/B;;AAGI,SAAU,oBACd,cACA,gBAAqD;AAErD,QAAM,cAAc,IAAI,YAAW;AACnC,eAAa,OAAO,WAAW;AAC/B,QAAM,MAAM,YAAY;AAExB,QAAM,SAAS,gBAAQ,KAAK,CAAC,WAAU;AACrC,QAAI,OAAO,WAAW,SAAS,KAAK;AAClC,aAAO;QACL;UACE,SAAS,eAAe,8BAA8B;YACpD;YACA,aAAa;WACd;UACD,MAAM,0BAA0B;UAChC,UAAU,aAAa;UACvB,YAAY,OAAO;;;WAGlB;AACL,aAAO,CAAA;;EAEX,CAAC;AAED,SAAO;AACT;AA3BgB;AA6BV,SAAU,kCACd,eACA,cACA,gBAAqD;AAErD,QAAM,SAAmC,CAAA;AACzC,kBAAQ,eAAe,CAAC,gBAAe;AACrC,UAAMD,oBAAmB,IAAI,oBAAmB;AAChD,gBAAY,OAAOA,iBAAgB;AACnC,UAAM,qBAAqBA,kBAAiB;AAC5C,oBAAQ,oBAAoB,CAAC,aAAY;AACvC,YAAM,WAAW,YAAY,QAAQ;AACrC,YAAM,qBAAqB,SAAS,gBAAgB;AACpD,YAAM,iBAAiB,SAAS;AAChC,YAAM,QAAQ,iCACZ,gBACA,aACA,UACA,kBAAkB;AAEpB,YAAM,wBAAwB,MAAM,CAAC;AACrC,UAAI,gBAAQ,gBAAQ,qBAAqB,CAAC,GAAG;AAC3C,cAAM,SAAS,eAAe,0BAA0B;UACtD,cAAc;UACd,YAAY;SACb;AACD,eAAO,KAAK;UACV,SAAS;UACT,MAAM,0BAA0B;UAChC,UAAU,YAAY;SACvB;;IAEL,CAAC;EACH,CAAC;AAED,SAAO;AACT;AApCgB;AA2ChB,SAAS,6BACP,cACAE,cACA,MACA,gBAAqD;AAErD,QAAM,sBAAmC,CAAA;AACzC,QAAM,uBAAuB,eAC3B,cACA,CAAC,QAAQ,SAAS,eAAc;AAE9B,QAAIA,aAAY,WAAW,UAAU,EAAE,sBAAsB,MAAM;AACjE,aAAO;;AAGT,oBAAQ,SAAS,CAAC,aAAY;AAC5B,YAAM,wBAAwB,CAAC,UAAU;AACzC,sBAAQ,cAAc,CAAC,cAAc,oBAAmB;AACtD,YACE,eAAe,mBACf,aAAa,cAAc,QAAQ;QAEnCA,aAAY,WAAW,eAAe,EAAE,sBAAsB,MAC9D;AACA,gCAAsB,KAAK,eAAe;;MAE9C,CAAC;AAED,UACE,sBAAsB,SAAS,KAC/B,CAAC,aAAa,qBAAqB,QAAQ,GAC3C;AACA,4BAAoB,KAAK,QAAQ;AACjC,eAAO,KAAK;UACV,MAAM;UACN,MAAM;SACP;;IAEL,CAAC;AACD,WAAO;EACT,GACA,CAAA,CAA6C;AAG/C,QAAM,aAAa,YAAI,sBAAsB,CAAC,sBAAqB;AACjE,UAAM,cAAc,YAClB,kBAAkB,MAClB,CAAC,eAAe,aAAa,CAAC;AAGhC,UAAM,cAAc,eAAe,+BAA+B;MAChE,cAAc;MACd,aAAaA;MACb,kBAAkB;MAClB,YAAY,kBAAkB;KAC/B;AAED,WAAO;MACL,SAAS;MACT,MAAM,0BAA0B;MAChC,UAAU,KAAK;MACf,YAAYA,aAAY;MACxB,cAAc,kBAAkB;;EAEpC,CAAC;AAED,SAAO;AACT;AAnES;AAqEH,SAAU,mCACd,cACAA,cACA,MACA,gBAAqD;AAGrD,QAAM,kBAAkB,eACtB,cACA,CAAC,QAAQ,SAAS,QAAO;AACvB,UAAM,kBAAkB,YAAI,SAAS,CAAC,aAAY;AAChD,aAAO,EAAE,KAAU,MAAM,SAAQ;IACnC,CAAC;AACD,WAAO,OAAO,OAAO,eAAe;EACtC,GACA,CAAA,CAA0C;AAG5C,QAAM,SAAS,gBACb,gBAAQ,iBAAiB,CAAC,mBAAkB;AAC1C,UAAM,kBAAkBA,aAAY,WAAW,eAAe,GAAG;AAEjE,QAAI,gBAAgB,sBAAsB,MAAM;AAC9C,aAAO,CAAA;;AAET,UAAM,YAAY,eAAe;AACjC,UAAM,aAAa,eAAe;AAElC,UAAM,mCAAmC,eACvC,iBACA,CAAC,qBAAoB;AAEnB;;QAEEA,aAAY,WAAW,iBAAiB,GAAG,EAAE,sBAC3C,QACF,iBAAiB,MAAM;;QAGvB,qBAAqB,iBAAiB,MAAM,UAAU;;IAE1D,CAAC;AAGH,UAAM,uBAAuB,YAC3B,kCACA,CAAC,sBAAkE;AACjE,YAAM,cAAc,CAAC,kBAAkB,MAAM,GAAG,YAAY,CAAC;AAC7D,YAAM,aAAaA,aAAY,QAAQ,IAAI,KAAKA,aAAY;AAE5D,YAAM,UAAU,eAAe,qCAAqC;QAClE,cAAc;QACd,aAAaA;QACb,kBAAkB;QAClB,YAAY,kBAAkB;OAC/B;AACD,aAAO;QACL;QACA,MAAM,0BAA0B;QAChC,UAAU,KAAK;QACf;QACA,cAAc;;IAElB,CAAC;AAGH,WAAO;EACT,CAAC,CAAC;AAGJ,SAAO;AACT;AAvEgB;AAyEhB,SAAS,uCACP,WACA,YACA,gBAAqD;AAErD,QAAM,SAAmC,CAAA;AAEzC,QAAM,aAAa,YAAI,YAAY,CAAC,cAAc,UAAU,IAAI;AAEhE,kBAAQ,WAAW,CAAC,aAAY;AAC9B,UAAM,eAAe,SAAS;AAC9B,QAAI,iBAAS,YAAY,YAAY,GAAG;AACtC,YAAM,SAAS,eAAe,4BAA4B,QAAQ;AAElE,aAAO,KAAK;QACV,SAAS;QACT,MAAM,0BAA0B;QAChC,UAAU;OACX;;EAEL,CAAC;AAED,SAAO;AACT;AAvBS;;;AC7pBH,SAAUC,gBACd,SAA2B;AAE3B,QAAM,gBAA8C,iBAAS,SAAS;IACpE,gBAAgB;GACjB;AAED,QAAM,gBAA8C,CAAA;AACpD,kBAAQ,QAAQ,OAAO,CAAC,SAAQ;AAC9B,kBAAc,KAAK,IAAI,IAAI;EAC7B,CAAC;AACD,SAAO,eAAkB,eAAe,cAAc,cAAc;AACtE;AAZgB,OAAAA,iBAAA;AAcV,SAAUC,iBAAgB,SAK/B;AACC,YAAU,iBAAS,SAAS;IAC1B,gBAAgB;GACjB;AAED,SAAO,gBACL,QAAQ,OACR,QAAQ,YACR,QAAQ,gBACR,QAAQ,WAAW;AAEvB;AAhBgB,OAAAA,kBAAA;;;AC1BhB,IAAM,6BAA6B;AACnC,IAAM,0BAA0B;AAChC,IAAM,uBAAuB;AAC7B,IAAM,iCAAiC;AAEvC,IAAM,8BAA8B;EAClC;EACA;EACA;EACA;;AAGF,OAAO,OAAO,2BAA2B;AAGnC,SAAU,uBAAuB,OAAY;AAEjD,SAAO,iBAAS,6BAA6B,MAAM,IAAI;AACzD;AAHgB;AAKhB,IAAe,uBAAf,cACU,MAAK;EA5Bf,OA4Be;;;EAMb,YACE,SACO,OAAa;AAEpB,UAAM,OAAO;AAFN,SAAA,QAAA;AAJT,SAAA,iBAA2B,CAAA;AASzB,WAAO,eAAe,MAAM,WAAW,SAAS;AAGhD,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,KAAK,WAAW;;EAElD;;AAGI,IAAO,2BAAP,cAAwC,qBAAoB;EAlDlE,OAkDkE;;;EAChE,YACE,SACA,OACO,eAAqB;AAE5B,UAAM,SAAS,KAAK;AAFb,SAAA,gBAAA;AAGP,SAAK,OAAO;EACd;;AAGI,IAAO,uBAAP,cAAoC,qBAAoB;EA7D9D,OA6D8D;;;EAC5D,YACE,SACA,OACO,eAAqB;AAE5B,UAAM,SAAS,KAAK;AAFb,SAAA,gBAAA;AAGP,SAAK,OAAO;EACd;;AAGI,IAAO,6BAAP,cAA0C,qBAAoB;EAxEpE,OAwEoE;;;EAClE,YAAY,SAAiB,OAAa;AACxC,UAAM,SAAS,KAAK;AACpB,SAAK,OAAO;EACd;;AAGI,IAAO,qBAAP,cAAkC,qBAAoB;EA/E5D,OA+E4D;;;EAC1D,YACE,SACA,OACO,eAAqB;AAE5B,UAAM,SAAS,KAAK;AAFb,SAAA,gBAAA;AAGP,SAAK,OAAO;EACd;;;;ACzDK,IAAM,iBAAsB,CAAA;AAQ5B,IAAM,6BAA6B;AAEpC,IAAO,0BAAP,cAAuC,MAAK;EAxClD,OAwCkD;;;EAChD,YAAY,SAAe;AACzB,UAAM,OAAO;AACb,SAAK,OAAO;EACd;;AAMI,IAAO,cAAP,MAAkB;EAlDxB,OAkDwB;;;EAKtB,gBAAgB,QAAqB;AACnC,SAAK,mBAAmB,CAAA;AACxB,SAAK,gBAAgB,CAAA;AAErB,SAAK,kBAAkB,YAAI,QAAQ,iBAAiB,IAC/C,OAAO,kBACR,sBAAsB;AAK1B,QAAI,KAAK,iBAAiB;AACxB,WAAK,8BAA8B;;EAEvC;EAEO,iBAAiB,SAAkB;AACxC,UAAM,cAAc,oBAClB,SACA,IACA,KACA,KACA,KACA,KACA,KACA,GAAG;AAEL,gBAAY,uBAAuB;AACnC,WAAO;EACT;EAEO,iCAAiC,SAAkB;AACxD,WAAO;EACT;EAEO,gCAAgC,SAAkB;AACvD,WAAO;EACT;EAEA,wBAEE,aACA,iBACA,eACA,iBAA0B;AAG1B,UAAM,gBAAgB,KAAK,oBAAmB;AAC9C,UAAM,kBAAkB,KAAK,iBAAgB;AAC7C,UAAM,iBAA2B,CAAA;AACjC,QAAI,oBAAoB;AAExB,UAAM,yBAAyB,KAAK,GAAG,CAAC;AACxC,QAAI,YAAY,KAAK,GAAG,CAAC;AAEzB,UAAM,uBAAuB,6BAAK;AAChC,YAAM,gBAAgB,KAAK,GAAG,CAAC;AAG/B,YAAM,MAAM,KAAK,qBAAqB,0BAA0B;QAC9D,UAAU;QACV,QAAQ;QACR,UAAU;QACV,UAAU,KAAK,oBAAmB;OACnC;AACD,YAAM,QAAQ,IAAI,yBAChB,KACA,wBACA,KAAK,GAAG,CAAC,CAAC;AAGZ,YAAM,iBAAiB,kBAAU,cAAc;AAC/C,WAAK,WAAW,KAAK;IACvB,GAlB6B;AAoB7B,WAAO,CAAC,mBAAmB;AAEzB,UAAI,KAAK,aAAa,WAAW,eAAe,GAAG;AACjD,6BAAoB;AACpB;iBACS,cAAc,KAAK,IAAI,GAAG;AAEnC,6BAAoB;AAEpB,oBAAY,MAAM,MAAM,eAAe;AACvC;iBACS,KAAK,aAAa,WAAW,aAAa,GAAG;AACtD,4BAAoB;aACf;AACL,oBAAY,KAAK,WAAU;AAC3B,aAAK,kBAAkB,WAAW,cAAc;;;AAOpD,SAAK,iBAAiB,eAAe;EACvC;EAEA,kCAEE,yBACA,YACA,UAA6B;AAI7B,QAAI,aAAa,OAAO;AACtB,aAAO;;AAIT,QAAI,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,uBAAuB,GAAG;AAC1D,aAAO;;AAKT,QAAI,KAAK,eAAc,GAAI;AACzB,aAAO;;AAMT,QACE,KAAK,yBACH,yBACA,KAAK,4BAA4B,yBAAyB,UAAU,CAAC,GAEvE;AACA,aAAO;;AAGT,WAAO;EACT;;EAGA,4BAEE,SACA,cAAoB;AAEpB,UAAM,cAAc,KAAK,sBAAsB,SAAS,YAAY;AACpE,UAAM,UAAU,KAAK,0BAA0B,WAAW;AAC1D,WAAO;EACT;EAEA,kBAEE,iBACA,SAAoB;AAEpB,QAAI,KAAK,mCAAmC,iBAAiB,OAAO,GAAG;AACrE,YAAM,cAAc,KAAK,iBAAiB,eAAe;AACzD,aAAO;;AAGT,QAAI,KAAK,kCAAkC,eAAe,GAAG;AAC3D,YAAM,UAAU,KAAK,WAAU;AAC/B,WAAK,aAAY;AACjB,aAAO;;AAGT,UAAM,IAAI,wBAAwB,eAAe;EACnD;EAEA,yBAEE,eACA,SAAoB;AAEpB,WACE,KAAK,mCAAmC,eAAe,OAAO,KAC9D,KAAK,kCAAkC,aAAa;EAExD;EAEA,mCAEE,iBACA,SAAoB;AAEpB,QAAI,CAAC,KAAK,iCAAiC,eAAe,GAAG;AAC3D,aAAO;;AAIT,QAAI,gBAAQ,OAAO,GAAG;AACpB,aAAO;;AAGT,UAAM,gBAAgB,KAAK,GAAG,CAAC;AAC/B,UAAM,2BACJ,aAAK,SAAS,CAAC,2BAAqC;AAClD,aAAO,KAAK,aAAa,eAAe,sBAAsB;IAChE,CAAC,MAAM;AAET,WAAO;EACT;EAEA,kCAEE,iBAA0B;AAE1B,QAAI,CAAC,KAAK,gCAAgC,eAAe,GAAG;AAC1D,aAAO;;AAGT,UAAM,4BAA4B,KAAK,aACrC,KAAK,GAAG,CAAC,GACT,eAAe;AAEjB,WAAO;EACT;EAEA,yBAEE,cAAuB;AAEvB,UAAM,YAAY,KAAK,iBAAgB;AACvC,UAAM,uBAAuB,KAAK,0BAA0B,SAAS;AACrE,WAAO,iBAAS,sBAAsB,YAAY;EACpD;EAEA,sBAAmB;AACjB,UAAM,4BAA4B,KAAK,iBAAgB;AAEvD,QAAI,YAAY,KAAK,GAAG,CAAC;AACzB,QAAI,IAAI;AACR,WAAO,MAAM;AACX,YAAM,aAAa,aAAK,2BAA2B,CAAC,kBAAiB;AACnE,cAAM,WAAW,aAAa,WAAW,aAAa;AACtD,eAAO;MACT,CAAC;AACD,UAAI,eAAe,QAAW;AAC5B,eAAO;;AAET,kBAAY,KAAK,GAAG,CAAC;AACrB;;EAEJ;EAEA,mBAAgB;AAEd,QAAI,KAAK,WAAW,WAAW,GAAG;AAChC,aAAO;;AAET,UAAM,oBAAoB,KAAK,6BAA4B;AAC3D,UAAM,cAAc,KAAK,mCAAkC;AAC3D,UAAM,oBAAoB,KAAK,iCAAgC;AAE/D,WAAO;MACL,UAAU,KAAK,wBAAwB,iBAAiB;MACxD,kBAAkB;MAClB,QAAQ,KAAK,wBAAwB,iBAAiB;;EAE1D;EAEA,0BAAuB;AACrB,UAAM,oBAAoB,KAAK;AAC/B,UAAM,0BAA0B,KAAK;AAErC,WAAO,YAAI,mBAAmB,CAAC,UAAU,QAAO;AAC9C,UAAI,QAAQ,GAAG;AACb,eAAO;;AAET,aAAO;QACL,UAAU,KAAK,wBAAwB,QAAQ;QAC/C,kBAAkB,wBAAwB,GAAG;QAC7C,QAAQ,KAAK,wBAAwB,kBAAkB,MAAM,CAAC,CAAC;;IAEnE,CAAC;EACH;EAEA,mBAAgB;AACd,UAAM,cAAc,YAAI,KAAK,wBAAuB,GAAI,CAAC,YAAW;AAClE,aAAO,KAAK,0BAA0B,OAAO;IAC/C,CAAC;AACD,WAAY,gBAAQ,WAAW;EACjC;EAEA,0BAEE,WAAqB;AAErB,QAAI,cAAc,gBAAgB;AAChC,aAAO,CAAC,GAAG;;AAGb,UAAM,aACJ,UAAU,WAAW,UAAU,mBAAmB,KAAK,UAAU;AAEnE,WAAO,KAAK,cAAc,UAAU;EACtC;;;EAIA,kBAEE,OACA,cAAsB;AAEtB,QAAI,CAAC,KAAK,aAAa,OAAO,GAAG,GAAG;AAClC,mBAAa,KAAK,KAAK;;AAEzB,WAAO;EACT;EAEA,SAA8B,SAAkB;AAC9C,UAAM,iBAA2B,CAAA;AACjC,QAAI,UAAU,KAAK,GAAG,CAAC;AACvB,WAAO,KAAK,aAAa,SAAS,OAAO,MAAM,OAAO;AACpD,gBAAU,KAAK,WAAU;AACzB,WAAK,kBAAkB,SAAS,cAAc;;AAGhD,WAAO,kBAAU,cAAc;EACjC;EAEA,4BAEE,UACA,MACA,eACA,cACA,gBACA,gBACA,UAAkB;EAIpB;EAEA,sBAEE,SACA,cAAoB;AAEpB,UAAM,gBAA0B,KAAK,0BAAyB;AAC9D,UAAM,sBAAgC,cAAM,KAAK,qBAAqB;AACtE,UAAM,cAAmB;MACvB,WAAW;MACX,iBAAiB;MACjB,SAAS;MACT,mBAAmB;;AAGrB,WAAO;EACT;EACA,4BAAyB;AACvB,WAAO,YAAI,KAAK,YAAY,CAAC,kBAC3B,KAAK,wBAAwB,aAAa,CAAC;EAE/C;;AAGI,SAAU,4BAEd,UACA,MACA,eACA,cACA,gBACA,gBACA,UAAkB;AAElB,QAAM,MAAM,KAAK,4BAA4B,cAAc,cAAc;AACzE,MAAI,oBAAoB,KAAK,iBAAiB,GAAG;AACjD,MAAI,sBAAsB,QAAW;AACnC,UAAM,eAAe,KAAK,oBAAmB;AAC7C,UAAM,cAAc,KAAK,mBAAkB,EAAG,YAAY;AAC1D,UAAM,SACJ,IAAI,eAAe,aAAa,cAAc;AAChD,wBAAoB,OAAO,aAAY;AACvC,SAAK,iBAAiB,GAAG,IAAI;;AAG/B,MAAI,0BAA0B,kBAAkB;AAChD,MAAI,aAAa,kBAAkB;AACnC,QAAM,cAAc,kBAAkB;AAItC,MACE,KAAK,WAAW,WAAW,KAC3B,eACA,4BAA4B,QAC5B;AACA,8BAA0B;AAC1B,iBAAa;;AAKf,MAAI,4BAA4B,UAAa,eAAe,QAAW;AACrE;;AAGF,MACE,KAAK,kCACH,yBACA,YACA,QAAQ,GAEV;AAIA,SAAK,wBACH,UACA,MACA,eACA,uBAAuB;;AAG7B;AA3DgB;;;ACtZT,IAAM,uBAAuB;AAC7B,IAAM,0BAA0B;AAGhC,IAAM,mBAAmB;AAKzB,IAAM,SAAS,KAAK;AACpB,IAAM,aAAa,KAAK;AACxB,IAAM,WAAW,KAAK;AACtB,IAAM,mBAAmB,KAAK;AAC9B,IAAM,eAAe,KAAK;AAC1B,IAAM,uBAAuB,KAAK;AAGnC,SAAU,4BACd,SACA,cACA,YAAkB;AAElB,SAAO,aAAa,eAAe;AACrC;AANgB;AAQhB,IAAM,yBAAyB,KAAK;;;ACN9B,IAAO,uBAAP,MAA2B;EAlBjC,OAkBiC;;;EAG/B,YAAY,SAAmC;;AAC7C,SAAK,gBACH,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,kBAAY,QAAA,OAAA,SAAA,KAAI,sBAAsB;EACnD;EAEA,SAAS,SAIR;AACC,UAAM,sBAAsB,KAAK,wBAAwB,QAAQ,KAAK;AAEtE,QAAI,gBAAQ,mBAAmB,GAAG;AAChC,YAAM,iBAAiB,KAAK,4BAA4B,QAAQ,KAAK;AACrE,YAAM,sBAAsB,KAAK,yCAC/B,QAAQ,OACR,KAAK,YAAY;AAEnB,YAAM,wBAAwB,KAAK,kCACjC,QAAQ,OACR,KAAK,YAAY;AAEnB,YAAM,YAAY;QAChB,GAAG;QACH,GAAG;QACH,GAAG;QACH,GAAG;;AAEL,aAAO;;AAET,WAAO;EACT;EAEA,wBAAwB,OAAa;AACnC,WAAO,gBAAQ,OAAO,CAAC,gBACrB,wBACE,aACA,aACA,oCAAoC,CACrC;EAEL;EAEA,4BAA4B,OAAa;AACvC,WAAO,gBAAQ,OAAO,CAAC,gBACrB,2BACE,aACA,oCAAoC,CACrC;EAEL;EAEA,yCACE,OACA,cAAoB;AAEpB,WAAO,gBAAQ,OAAO,CAAC,gBACrB,yCACE,aACA,cACA,oCAAoC,CACrC;EAEL;EAEA,kCACE,OACA,cAAoB;AAEpB,WAAO,kCACL,OACA,cACA,oCAAoC;EAExC;EAEA,6BAA6B,SAM5B;AACC,WAAO,wBACL,QAAQ,gBACR,QAAQ,MACR,QAAQ,cACR,QAAQ,eACR,QAAQ,sBACR,8BAA8B;EAElC;EAEA,0BAA0B,SAMzB;AACC,WAAO,kCACL,QAAQ,gBACR,QAAQ,MACR,QAAQ,cACR,QAAQ,sBACR,YAAY,QAAQ,QAAQ,GAC5B,uCAAuC;EAE3C;;;;ACxGI,IAAO,aAAP,MAAiB;EAjCvB,OAiCuB;;;EAMrB,eAAe,QAAqB;AAClC,SAAK,uBAAuB,YAAI,QAAQ,sBAAsB,IACzD,OAAO,uBACR,sBAAsB;AAE1B,SAAK,eAAe,YAAI,QAAQ,cAAc,IACzC,OAAO,eACR,sBAAsB;AAE1B,SAAK,oBAAoB,YAAI,QAAQ,mBAAmB,IACnD,OAAO,oBACR,IAAI,qBAAqB,EAAE,cAAc,KAAK,aAAY,CAAE;AAEhE,SAAK,sBAAsB,oBAAI,IAAG;EACpC;EAEA,6BAAkD,OAAa;AAC7D,oBAAQ,OAAO,CAAC,aAAY;AAC1B,WAAK,WAAW,GAAG,SAAS,IAAI,mBAAmB,MAAK;AACtD,cAAM,EACJ,aAAAC,cACA,YAAAC,aACA,QAAAC,SACA,qBAAAC,sBACA,kCACA,wBAAuB,IACrB,eAAe,QAAQ;AAE3B,wBAAQH,cAAa,CAAC,aAAY;AAChC,gBAAM,UAAU,SAAS,QAAQ,IAAI,KAAK,SAAS;AACnD,eAAK,WAAW,GAAG,qBAAqB,QAAQ,CAAC,GAAG,OAAO,IAAI,MAAK;AAClE,kBAAM,SAAS,KAAK,kBAAkB,6BAA6B;cACjE,gBAAgB,SAAS;cACzB,MAAM;cACN,cAAc,SAAS,gBAAgB,KAAK;cAC5C,eAAe,SAAS;cACxB,sBAAsB,KAAK;aAC5B;AAED,kBAAM,MAAM,4BACV,KAAK,oBAAoB,SAAS,IAAI,GACtC,QACA,SAAS,GAAG;AAEd,iBAAK,eAAe,KAAK,MAAM;UACjC,CAAC;QACH,CAAC;AAED,wBAAQC,aAAY,CAAC,aAAY;AAC/B,eAAK,qBACH,UACA,SAAS,KACT,UACA,cACA,SAAS,cACT,qBAAqB,QAAQ,CAAC;QAElC,CAAC;AAED,wBAAQC,SAAQ,CAAC,aAAY;AAC3B,eAAK,qBACH,UACA,SAAS,KACT,YACA,UACA,SAAS,cACT,qBAAqB,QAAQ,CAAC;QAElC,CAAC;AAED,wBAAQC,sBAAqB,CAAC,aAAY;AACxC,eAAK,qBACH,UACA,SAAS,KACT,kBACA,uBACA,SAAS,cACT,qBAAqB,QAAQ,CAAC;QAElC,CAAC;AAED,wBAAQ,kCAAkC,CAAC,aAAY;AACrD,eAAK,qBACH,UACA,SAAS,KACT,sBACA,oCACA,SAAS,cACT,qBAAqB,QAAQ,CAAC;QAElC,CAAC;AAED,wBAAQ,yBAAyB,CAAC,aAAY;AAC5C,eAAK,qBACH,UACA,SAAS,KACT,cACA,2BACA,SAAS,cACT,qBAAqB,QAAQ,CAAC;QAElC,CAAC;MACH,CAAC;IACH,CAAC;EACH;EAEA,qBAEE,MACA,gBACA,SACA,UACA,kBACA,eAAqB;AAErB,SAAK,WACH,GAAG,aAAa,GAAG,mBAAmB,IAAI,KAAK,cAAc,IAC7D,MAAK;AACH,YAAM,SAAS,KAAK,kBAAkB,0BAA0B;QAC9D;QACA;QACA,cAAc,oBAAoB,KAAK;QACvC,sBAAsB,KAAK;QAC3B;OACD;AACD,YAAM,MAAM,4BACV,KAAK,oBAAoB,KAAK,IAAI,GAClC,SACA,cAAc;AAEhB,WAAK,eAAe,KAAK,MAAM;IACjC,CAAC;EAEL;;EAGA,4BAEE,cACA,YAAkB;AAElB,UAAM,oBAAyB,KAAK,6BAA4B;AAChE,WAAO,4BACL,mBACA,cACA,UAAU;EAEd;EAEA,mBAAwC,KAAW;AACjD,WAAO,KAAK,oBAAoB,IAAI,GAAG;EACzC;;EAGA,eAAoC,KAAa,OAAe;AAC9D,SAAK,oBAAoB,IAAI,KAAK,KAAK;EACzC;;AAGF,IAAM,6BAAN,cAAyC,YAAW;EAtMpD,OAsMoD;;;EAApD,cAAA;;AACS,SAAA,aAOH;MACF,QAAQ,CAAA;MACR,aAAa,CAAA;MACb,YAAY,CAAA;MACZ,yBAAyB,CAAA;MACzB,qBAAqB,CAAA;MACrB,kCAAkC,CAAA;;EAuCtC;EApCE,QAAK;AACH,SAAK,aAAa;MAChB,QAAQ,CAAA;MACR,aAAa,CAAA;MACb,YAAY,CAAA;MACZ,yBAAyB,CAAA;MACzB,qBAAqB,CAAA;MACrB,kCAAkC,CAAA;;EAEtC;EAEO,YAAYD,SAAc;AAC/B,SAAK,WAAW,OAAO,KAAKA,OAAM;EACpC;EAEO,6BAA6B,SAAgC;AAClE,SAAK,WAAW,wBAAwB,KAAK,OAAO;EACtD;EAEO,yBAAyB,YAA+B;AAC7D,SAAK,WAAW,oBAAoB,KAAK,UAAU;EACrD;EAEO,sCACL,eAA+C;AAE/C,SAAK,WAAW,iCAAiC,KAAK,aAAa;EACrE;EAEO,gBAAgB,MAAgB;AACrC,SAAK,WAAW,WAAW,KAAK,IAAI;EACtC;EAEO,iBAAiB,IAAe;AACrC,SAAK,WAAW,YAAY,KAAK,EAAE;EACrC;;AAGF,IAAM,mBAAmB,IAAI,2BAA0B;AACjD,SAAU,eAAe,MAAU;AAQvC,mBAAiB,MAAK;AACtB,OAAK,OAAO,gBAAgB;AAC5B,QAAM,aAAa,iBAAiB;AAEpC,mBAAiB,MAAK;AACtB,SAAY;AACd;AAdgB;;;ACrPV,SAAU,0BACd,kBACA,iBAAoE;AAGpE,MAAI,MAAM,iBAAiB,WAAW,MAAM,MAAM;AAIhD,qBAAiB,cAAc,gBAAgB;AAC/C,qBAAiB,YAAY,gBAAgB;aAMtC,iBAAiB,YAAa,gBAAgB,cAAc,MAAM;AACzE,qBAAiB,YAAY,gBAAgB;;AAEjD;AAnBgB;AA4BV,SAAU,oBACd,kBACA,iBAAgC;AAGhC,MAAI,MAAM,iBAAiB,WAAW,MAAM,MAAM;AAIhD,qBAAiB,cAAc,gBAAgB;AAC/C,qBAAiB,cAAc,gBAAgB;AAC/C,qBAAiB,YAAY,gBAAgB;AAC7C,qBAAiB,YAAY,gBAAgB;AAC7C,qBAAiB,YAAY,gBAAgB;AAC7C,qBAAiB,UAAU,gBAAgB;aAMpC,iBAAiB,YAAa,gBAAgB,cAAe,MAAM;AAC1E,qBAAiB,YAAY,gBAAgB;AAC7C,qBAAiB,YAAY,gBAAgB;AAC7C,qBAAiB,UAAU,gBAAgB;;AAE/C;AAzBgB;AA2BV,SAAU,iBACd,MACA,OACA,eAAqB;AAErB,MAAI,KAAK,SAAS,aAAa,MAAM,QAAW;AAC9C,SAAK,SAAS,aAAa,IAAI,CAAC,KAAK;SAChC;AACL,SAAK,SAAS,aAAa,EAAE,KAAK,KAAK;;AAE3C;AAVgB;AAYV,SAAU,qBACd,MACA,UACA,YAAe;AAEf,MAAI,KAAK,SAAS,QAAQ,MAAM,QAAW;AACzC,SAAK,SAAS,QAAQ,IAAI,CAAC,UAAU;SAChC;AACL,SAAK,SAAS,QAAQ,EAAE,KAAK,UAAU;;AAE3C;AAVgB;;;AC5EhB,IAAM,OAAO;AAEP,SAAU,eAAe,KAAS,WAAiB;AACvD,SAAO,eAAe,KAAK,MAAM;IAC/B,YAAY;IACZ,cAAc;IACd,UAAU;IACV,OAAO;GACR;AACH;AAPgB;;;ACYV,SAAU,aAAiB,KAAU,OAAS;AAClD,QAAM,gBAAgB,aAAK,GAAG;AAC9B,QAAM,sBAAsB,cAAc;AAC1C,WAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC5C,UAAM,gBAAgB,cAAc,CAAC;AACrC,UAAM,iBAAiB,IAAI,aAAa;AACxC,UAAM,uBAAuB,eAAe;AAC5C,aAAS,IAAI,GAAG,IAAI,sBAAsB,KAAK;AAC7C,YAAM,YAAiB,eAAe,CAAC;AAEvC,UAAI,UAAU,iBAAiB,QAAW;AACxC,aAAK,UAAU,IAAI,EAAE,UAAU,UAAU,KAAK;;;;AAKtD;AAhBgB;AAkBV,SAAU,qCACd,aACA,WAAmB;AAInB,QAAM,qBAA0B,kCAAA;EAAa,GAAb;AAKhC,iBAAe,oBAAoB,cAAc,eAAe;AAEhE,QAAM,gBAAgB;IACpB,OAAO,gCAAU,SAA8B,OAAU;AAEvD,UAAI,gBAAQ,OAAO,GAAG;AAGpB,kBAAU,QAAQ,CAAC;;AAIrB,UAAI,oBAAY,OAAO,GAAG;AACxB,eAAO;;AAGT,aAAO,KAAK,QAAQ,IAAI,EAAE,QAAQ,UAAU,KAAK;IACnD,GAdO;IAgBP,iBAAiB,kCAAA;AACf,YAAM,2BAA2B,gBAAgB,MAAM,SAAS;AAChE,UAAI,CAAC,gBAAQ,wBAAwB,GAAG;AACtC,cAAM,gBAAgB,YACpB,0BACA,CAAC,iBAAiB,aAAa,GAAG;AAEpC,cAAM,MACJ,mCAAmC,KAAK,YAAY,IAAI;GACnD,cAAc,KAAK,MAAM,EAAE,QAAQ,OAAO,KAAM,CAAC,EAAE;;IAG9D,GAZiB;;AAenB,qBAAmB,YAAY;AAC/B,qBAAmB,UAAU,cAAc;AAE3C,qBAAmB,cAAc;AAEjC,SAAO;AACT;AAnDgB;AAqDV,SAAU,yCACd,aACA,WACA,iBAAyB;AAIzB,QAAM,qBAA0B,kCAAA;EAAa,GAAb;AAKhC,iBAAe,oBAAoB,cAAc,2BAA2B;AAE5E,QAAM,oBAAoB,OAAO,OAAO,gBAAgB,SAAS;AACjE,kBAAQ,WAAW,CAAC,aAAY;AAC9B,sBAAkB,QAAQ,IAAI;EAChC,CAAC;AAED,qBAAmB,YAAY;AAC/B,qBAAmB,UAAU,cAAc;AAE3C,SAAO;AACT;AAvBgB;AAyBhB,IAAY;CAAZ,SAAYE,4BAAyB;AACnC,EAAAA,2BAAAA,2BAAA,kBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,gBAAA,IAAA,CAAA,IAAA;AACF,GAHY,8BAAA,4BAAyB,CAAA,EAAA;AAW/B,SAAU,gBACd,iBACA,WAAmB;AAEnB,QAAM,gBAAgB,0BAA0B,iBAAiB,SAAS;AAE1E,SAAO;AACT;AAPgB;AASV,SAAU,0BACd,iBACA,WAAmB;AAEnB,QAAM,mBAAmB,eAAO,WAAW,CAAC,iBAAgB;AAC1D,WAAO,mBAAY,gBAAwB,YAAY,CAAC,MAAM;EAChE,CAAC;AAED,QAAM,SAAoC,YACxC,kBACA,CAAC,iBAAgB;AACf,WAAO;MACL,KAAK,4BAA4B,YAAY,QAC3C,gBAAgB,YAAY,IAC7B;MACD,MAAM,0BAA0B;MAChC,YAAY;;EAEhB,CAAC;AAGH,SAAO,gBAAiC,MAAM;AAChD;AAtBgB;;;ACzGV,IAAO,cAAP,MAAkB;EAzBxB,OAyBwB;;;EAoBtB,gBAAqC,QAAqB;AACxD,SAAK,YAAY,CAAA;AAGjB,SAAK,YAAa,OAAe;AAEjC,SAAK,uBAAuB,YAAI,QAAQ,sBAAsB,IACzD,OAAO,uBACR,sBAAsB;AAE1B,QAAI,CAAC,KAAK,WAAW;AACnB,WAAK,2BAA2B;AAChC,WAAK,wBAAwB;AAC7B,WAAK,kBAAkB;AACvB,WAAK,qBAAqB;AAC1B,WAAK,cAAc;WACd;AACL,UAAI,QAAQ,KAAK,KAAK,oBAAoB,GAAG;AAC3C,YAAI,KAAK,iBAAiB;AACxB,eAAK,2BAA2B;AAChC,eAAK,0BAA0B;AAC/B,eAAK,cAAc;AACnB,eAAK,yBAAyB,KAAK;eAC9B;AACL,eAAK,2BAA2B;AAChC,eAAK,0BAA0B;AAC/B,eAAK,cAAc,KAAK;AACxB,eAAK,yBAAyB,KAAK;;iBAE5B,cAAc,KAAK,KAAK,oBAAoB,GAAG;AACxD,YAAI,KAAK,iBAAiB;AACxB,eAAK,2BAAgC;AACrC,eAAK,0BAA+B;AACpC,eAAK,cAAc;AACnB,eAAK,yBACH,KAAK;eACF;AACL,eAAK,2BAA2B;AAChC,eAAK,0BAA0B;AAC/B,eAAK,cAAc,KAAK;AACxB,eAAK,yBACH,KAAK;;iBAEA,QAAQ,KAAK,KAAK,oBAAoB,GAAG;AAClD,aAAK,2BAA2B;AAChC,aAAK,0BAA0B;AAC/B,aAAK,cAAc;AACnB,aAAK,yBAAyB;aACzB;AACL,cAAM,MACJ,kDAAkD,OAAO,oBAAoB,GAAG;;;EAIxF;EAEA,yCAEE,SAAY;AAEZ,YAAQ,WAAW;MACjB,aAAa;MACb,WAAW;;EAEf;EAEA,wCAEE,SAAY;AAEZ,YAAQ,WAAW;;;;;MAKjB,aAAa,KAAK,GAAG,CAAC,EAAE;MACxB,WAAW;;EAEf;EAEA,mCAAwD,SAAY;AAClE,YAAQ,WAAW;MACjB,aAAa;MACb,WAAW;MACX,aAAa;MACb,WAAW;MACX,SAAS;MACT,WAAW;;EAEf;;;;;;EAOA,kCAAuD,SAAY;AACjE,UAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,YAAQ,WAAW;MACjB,aAAa,UAAU;MACvB,WAAW,UAAU;MACrB,aAAa,UAAU;MACvB,WAAW;MACX,SAAS;MACT,WAAW;;EAEf;EAEA,yBAA8C,cAAoB;AAChE,UAAM,UAAmB;MACvB,MAAM;MACN,UAAU,uBAAO,OAAO,IAAI;;AAG9B,SAAK,uBAAuB,OAAO;AACnC,SAAK,UAAU,KAAK,OAAO;EAC7B;EAEA,wBAAqB;AACnB,SAAK,UAAU,IAAG;EACpB;EAEA,gBAAqC,aAAoB;AAEvD,UAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,UAAM,MAAM,YAAY;AAIxB,QAAI,IAAI,eAAe,UAAU,gBAAgB,MAAM;AACrD,UAAI,YAAY,UAAU;AAC1B,UAAI,UAAU,UAAU;AACxB,UAAI,YAAY,UAAU;WAGvB;AACH,UAAI,cAAc;AAClB,UAAI,YAAY;AAChB,UAAI,cAAc;;EAEtB;EAEA,sBAA2C,aAAoB;AAC7D,UAAM,YAAY,KAAK,GAAG,CAAC;AAE3B,UAAM,MAAM,YAAY;AAIxB,QAAI,IAAI,eAAe,UAAU,gBAAgB,MAAM;AACrD,UAAI,YAAY,UAAU;WAGvB;AACH,UAAI,cAAc;;EAEtB;EAEA,gBAEE,KACA,eAAqB;AAErB,UAAM,UAAU,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;AACxD,qBAAiB,SAAS,eAAe,GAAG;AAE5C,SAAK,yBAAyB,QAAQ,UAAgB,aAAa;EACrE;EAEA,mBAEE,eACA,UAAgB;AAEhB,UAAM,aAAa,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;AAC3D,yBAAqB,YAAY,UAAU,aAAa;AAExD,SAAK,wBAAwB,WAAW,UAAW,cAAc,QAAS;EAC5E;EAEA,+BAA4B;AAK1B,QAAI,oBAAY,KAAK,yBAAyB,GAAG;AAC/C,YAAM,+BAA+B,qCACnC,KAAK,WACL,aAAK,KAAK,oBAAoB,CAAC;AAEjC,WAAK,4BAA4B;AACjC,aAAO;;AAGT,WAAY,KAAK;EACnB;EAEA,2CAAwC;AAKtC,QAAI,oBAAY,KAAK,qCAAqC,GAAG;AAC3D,YAAM,iBAAiB,yCACrB,KAAK,WACL,aAAK,KAAK,oBAAoB,GAC9B,KAAK,6BAA4B,CAAE;AAErC,WAAK,wCAAwC;AAC7C,aAAO;;AAGT,WAAY,KAAK;EACnB;EAEA,+BAA4B;AAC1B,UAAM,YAAY,KAAK;AACvB,WAAO,UAAU,UAAU,SAAS,CAAC;EACvC;EAEA,mCAAgC;AAC9B,UAAM,YAAY,KAAK;AACvB,WAAO,UAAU,UAAU,SAAS,CAAC;EACvC;EAEA,qCAAkC;AAChC,UAAM,kBAAkB,KAAK;AAC7B,WAAO,gBAAgB,gBAAgB,SAAS,CAAC;EACnD;;;;ACtQI,IAAO,eAAP,MAAmB;EAXzB,OAWyB;;;EAKvB,mBAAgB;AACd,SAAK,YAAY,CAAA;AACjB,SAAK,kBAAkB;AACvB,SAAK,UAAU;EACjB;EAEA,IAAI,MAAM,UAAkB;AAG1B,QAAI,KAAK,qBAAqB,MAAM;AAClC,YAAM,MACJ,kFAAkF;;AAKtF,SAAK,MAAK;AACV,SAAK,YAAY;AACjB,SAAK,kBAAkB,SAAS;EAClC;EAEA,IAAI,QAAK;AACP,WAAO,KAAK;EACd;;EAGA,aAAU;AACR,QAAI,KAAK,WAAW,KAAK,UAAU,SAAS,GAAG;AAC7C,WAAK,aAAY;AACjB,aAAO,KAAK,GAAG,CAAC;WACX;AACL,aAAO;;EAEX;;;EAIA,GAAwB,SAAe;AACrC,UAAM,YAAY,KAAK,UAAU;AACjC,QAAI,YAAY,KAAK,KAAK,mBAAmB,WAAW;AACtD,aAAO;WACF;AACL,aAAO,KAAK,UAAU,SAAS;;EAEnC;EAEA,eAAY;AACV,SAAK;EACP;EAEA,mBAAgB;AACd,WAAO,KAAK;EACd;EAEA,iBAAsCC,WAAgB;AACpD,SAAK,UAAUA;EACjB;EAEA,kBAAe;AACb,SAAK,UAAU;EACjB;EAEA,wBAAqB;AACnB,SAAK,UAAU,KAAK,UAAU,SAAS;EACzC;EAEA,mBAAgB;AACd,WAAO,KAAK,iBAAgB;EAC9B;;;;ACnDI,IAAO,gBAAP,MAAoB;EAlB1B,OAkB0B;;;EACxB,OAA+B,MAAa;AAC1C,WAAO,KAAK,KAAK,IAAI;EACvB;EAEA,QAEE,KACA,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,KAAK,OAAO;EACnD;EAEA,QAEE,KACA,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,KAAK,OAAO;EACtD;EAEA,OAEE,KACA,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,GAAG;EACnD;EAEA,GAEE,KACA,YAA6C;AAE7C,WAAO,KAAK,WAAW,YAAY,GAAG;EACxC;EAEA,KAEE,KACA,mBAA0D;AAE1D,WAAO,KAAK,aAAa,KAAK,iBAAiB;EACjD;EAEA,WAEE,KACA,mBAAiE;AAEjE,WAAO,KAAK,mBAAmB,KAAK,iBAAiB;EACvD;EAEA,QAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,SAEE,SACA,SAA2B;AAE3B,WAAO,KAAK,gBAAgB,SAAS,GAAG,OAAO;EACjD;EAEA,QAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,SAEE,YACA,SAAiC;AAEjC,WAAO,KAAK,gBAAgB,YAAY,GAAG,OAAO;EACpD;EAEA,OAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,QAEE,mBAA0D;AAE1D,WAAO,KAAK,eAAe,mBAAmB,CAAC;EACjD;EAEA,GAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,IAEE,YAAiD;AAEjD,WAAO,KAAK,WAAW,YAAY,CAAC;EACtC;EAEA,KAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,MAEE,mBAA0D;AAE1D,SAAK,aAAa,GAAG,iBAAiB;EACxC;EAEA,SAAmC,SAA+B;AAChE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,UAAoC,SAA+B;AACjE,SAAK,qBAAqB,GAAG,OAAO;EACtC;EAEA,aAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,WAAO,KAAK,mBAAmB,GAAG,iBAAiB;EACrD;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,cAEE,mBAAiE;AAEjE,SAAK,mBAAmB,GAAG,iBAAiB;EAC9C;EAEA,iBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,kBAEE,SAAqC;AAErC,SAAK,2BAA2B,GAAG,OAAO;EAC5C;EAEA,KAEE,MACA,gBACA,SAAyB,qBAAmB;AAE5C,QAAI,iBAAS,KAAK,mBAAmB,IAAI,GAAG;AAC1C,YAAM,SACJ,qCAAqC,4BAA4B;QAC/D,cAAc;QACd,aAAa,KAAK;OACnB;AAEH,YAAM,QAAQ;QACZ,SAAS;QACT,MAAM,0BAA0B;QAChC,UAAU;;AAEZ,WAAK,iBAAiB,KAAK,KAAK;;AAGlC,SAAK,kBAAkB,KAAK,IAAI;AAEhC,UAAM,qBAAqB,KAAK,WAAW,MAAM,gBAAgB,MAAM;AACtE,SAAa,IAAI,IAAI;AACtB,WAAO;EACT;EAEA,cAEE,MACA,MACA,SAAyB,qBAAmB;AAE5C,UAAM,aAAuC,yBAC3C,MACA,KAAK,mBACL,KAAK,SAAS;AAEhB,SAAK,mBAAmB,KAAK,iBAAiB,OAAO,UAAU;AAE/D,UAAM,qBAAqB,KAAK,WAAW,MAAM,MAAM,MAAM;AAC5D,SAAa,IAAI,IAAI;AACtB,WAAO;EACT;EAEA,UAEE,aACA,MAAY;AAEZ,WAAO,WAAA;AAEL,WAAK,oBAAoB,KAAK,CAAC;AAC/B,YAAM,WAAW,KAAK,eAAc;AACpC,UAAI;AACF,oBAAY,MAAM,MAAM,IAAI;AAE5B,eAAO;eACA,GAAG;AACV,YAAI,uBAAuB,CAAC,GAAG;AAC7B,iBAAO;eACF;AACL,gBAAM;;;AAGR,aAAK,iBAAiB,QAAQ;AAC9B,aAAK,oBAAoB,IAAG;;IAEhC;EACF;;EAGO,qBAAkB;AACvB,WAAO,KAAK;EACd;EAEO,+BAA4B;AACjC,WAAO,iBAAiB,eAAO,KAAK,oBAAoB,CAAC;EAC3D;;;;ACvoBI,IAAO,mBAAP,MAAuB;EApD7B,OAoD6B;;;EAe3B,qBACE,iBACA,QAAqB;AAErB,SAAK,YAAY,KAAK,YAAY;AAElC,SAAK,sBAAsB,CAAA;AAC3B,SAAK,sBAAsB,CAAA;AAC3B,SAAK,mBAAmB;AACxB,SAAK,eAAe;AACpB,SAAK,aAAa;AAElB,SAAK,oBAAoB,CAAA;AACzB,SAAK,YAAY,CAAA;AACjB,SAAK,sBAAsB,CAAA;AAC3B,SAAK,aAAa,CAAA;AAClB,SAAK,wBAAwB,CAAA;AAC7B,SAAK,uBAAuB,CAAA;AAE5B,QAAI,YAAI,QAAQ,mBAAmB,GAAG;AACpC,YAAM,MACJ,gLAE0B;;AAI9B,QAAI,gBAAQ,eAAe,GAAG;AAI5B,UAAI,gBAAQ,eAAwB,GAAG;AACrC,cAAM,MACJ,2IAE+C;;AAInD,UAAI,OAAQ,gBAA0B,CAAC,EAAE,gBAAgB,UAAU;AACjE,cAAM,MACJ,8KAE0B;;;AAKhC,QAAI,gBAAQ,eAAe,GAAG;AAC5B,WAAK,YAAY,eACf,iBACA,CAAC,KAAK,YAAsB;AAC1B,YAAI,QAAQ,IAAI,IAAI;AACpB,eAAO;MACT,GACA,CAAA,CAAwC;eAG1C,YAAI,iBAAiB,OAAO,KAC5B,cAAM,gBAAQ,eAAa,gBAAiB,KAAK,CAAC,GAAG,WAAW,GAChE;AACA,YAAMC,iBAAgB,gBAAQ,eAAa,gBAAiB,KAAK,CAAC;AAClE,YAAM,eAAe,aAAKA,cAAa;AACvC,WAAK,YAAiB,eACpB,cACA,CAAC,KAAK,YAAsB;AAC1B,YAAI,QAAQ,IAAI,IAAI;AACpB,eAAO;MACT,GACA,CAAA,CAAwC;eAEjC,iBAAS,eAAe,GAAG;AACpC,WAAK,YAAY,cAAM,eAAsC;WACxD;AACL,YAAM,IAAI,MACR,wIACuE;;AAM3E,SAAK,UAAU,KAAK,IAAI;AAExB,UAAM,gBAAgB,YAAI,iBAAiB,OAAO,IAC9C,gBAAQ,eAAa,gBAAiB,KAAK,CAAC,IAC5C,eAAO,eAAe;AAC1B,UAAM,wBAAwB,cAAM,eAAe,CAAC,qBAClD,gBAAQ,iBAAiB,eAAe,CAAC;AAG3C,SAAK,eAAe,wBAChB,qCACA;AAKJ,sBAAkB,eAAO,KAAK,SAAS,CAAC;EAC1C;EAEA,WAEE,UACA,MACA,QAAsB;AAEtB,QAAI,KAAK,kBAAkB;AACzB,YAAM,MACJ,iBAAiB,QAAQ;6FACuE;;AAGpG,UAAM,gBAAyB,YAAI,QAAQ,eAAe,IACrD,OAAO,gBACR,oBAAoB;AACxB,UAAM,oBAAoB,YAAI,QAAQ,mBAAmB,IACpD,OAAO,oBACR,oBAAoB;AAIxB,UAAM,YACJ,KAAK,oBAAqB,uBAAuB;AAEnD,SAAK;AACL,SAAK,oBAAoB,SAAS,IAAI;AACtC,SAAK,oBAAoB,QAAQ,IAAI;AAErC,QAAI;AAIJ,QAAI,KAAK,cAAc,MAAM;AAC3B,0BAAoB,gCAASC,sBAExB,MAAU;AAEb,YAAI;AACF,eAAK,0BAA0B,WAAW,UAAU,KAAK,UAAU;AACnE,eAAK,MAAM,MAAM,IAAI;AACrB,gBAAM,MAAM,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;AACpD,eAAK,YAAY,GAAG;AACpB,iBAAO;iBACA,GAAG;AACV,iBAAO,KAAK,gBAAgB,GAAG,eAAe,iBAAiB;;AAE/D,eAAK,uBAAsB;;MAE/B,GAfoB;WAgBf;AACL,0BAAoB,gCAAS,wBAExB,MAAU;AAEb,YAAI;AACF,eAAK,0BAA0B,WAAW,UAAU,KAAK,UAAU;AACnE,iBAAO,KAAK,MAAM,MAAM,IAAI;iBACrB,GAAG;AACV,iBAAO,KAAK,gBAAgB,GAAG,eAAe,iBAAiB;;AAE/D,eAAK,uBAAsB;;MAE/B,GAZoB;;AAetB,UAAM,qBAAoD,OAAO,OAC/D,mBACA,EAAE,UAAU,uBAAuB,KAAI,CAAE;AAG3C,WAAO;EACT;EAEA,gBAEE,GACA,qBACA,mBAA2B;AAE3B,UAAM,qBAAqB,KAAK,WAAW,WAAW;AAKtD,UAAM,gBACJ,uBAAuB,CAAC,KAAK,eAAc,KAAM,KAAK;AAExD,QAAI,uBAAuB,CAAC,GAAG;AAC7B,YAAM,aAAkB;AACxB,UAAI,eAAe;AACjB,cAAM,gBAAgB,KAAK,oBAAmB;AAC9C,YAAI,KAAK,yBAAyB,aAAa,GAAG;AAChD,qBAAW,iBAAiB,KAAK,SAAS,aAAa;AACvD,cAAI,KAAK,WAAW;AAClB,kBAAM,mBACJ,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;AAC1C,6BAAiB,gBAAgB;AACjC,mBAAO;iBACF;AACL,mBAAO,kBAAkB,CAAC;;eAEvB;AACL,cAAI,KAAK,WAAW;AAClB,kBAAM,mBACJ,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;AAC1C,6BAAiB,gBAAgB;AACjC,uBAAW,mBAAmB;;AAGhC,gBAAM;;iBAEC,oBAAoB;AAE7B,aAAK,sBAAqB;AAG1B,eAAO,kBAAkB,CAAC;aACrB;AAEL,cAAM;;WAEH;AAEL,YAAM;;EAEV;;EAGA,eAEE,mBACA,YAAkB;AAElB,UAAM,MAAM,KAAK,4BAA4B,YAAY,UAAU;AACnE,WAAO,KAAK,oBAAoB,mBAAmB,YAAY,GAAG;EACpE;EAEA,oBAEE,mBACA,YACA,KAAW;AAEX,QAAI,gBAAgB,KAAK,mBAAmB,GAAG;AAC/C,QAAI;AACJ,QAAI,OAAO,sBAAsB,YAAY;AAC3C,eAAS,kBAAkB;AAC3B,YAAM,YAAY,kBAAkB;AAEpC,UAAI,cAAc,QAAW;AAC3B,cAAM,uBAAuB;AAC7B,wBAAgB,6BAAK;AACnB,iBAAO,UAAU,KAAK,IAAI,KAAK,qBAAqB,KAAK,IAAI;QAC/D,GAFgB;;WAIb;AACL,eAAS;;AAGX,QAAI,cAAc,KAAK,IAAI,MAAM,MAAM;AACrC,aAAO,OAAO,KAAK,IAAI;;AAEzB,WAAO;EACT;EAEA,mBAEE,gBACA,mBAAiE;AAEjE,UAAM,QAAQ,KAAK,4BACjB,kBACA,cAAc;AAEhB,WAAO,KAAK,wBACV,gBACA,mBACA,KAAK;EAET;EAEA,wBAEE,gBACA,mBACA,KAAW;AAEX,QAAI,gBAAgB,KAAK,mBAAmB,GAAG;AAC/C,QAAI;AACJ,QAAI,OAAO,sBAAsB,YAAY;AAC3C,eAAS,kBAAkB;AAC3B,YAAM,YAAY,kBAAkB;AAEpC,UAAI,cAAc,QAAW;AAC3B,cAAM,uBAAuB;AAC7B,wBAAgB,6BAAK;AACnB,iBAAO,UAAU,KAAK,IAAI,KAAK,qBAAqB,KAAK,IAAI;QAC/D,GAFgB;;WAIb;AACL,eAAS;;AAGX,QAAe,cAAe,KAAK,IAAI,MAAM,MAAM;AACjD,UAAI,WAAW,KAAK,mBAAmB,MAAM;AAC7C,aACa,cAAe,KAAK,IAAI,MAAM,QACzC,aAAa,MACb;AACA,mBAAW,KAAK,mBAAmB,MAAM;;WAEtC;AACL,YAAM,KAAK,wBACT,gBACA,UAAU,sBACkB,kBAAmB,OAAO;;AAS1D,SAAK,4BACH,KAAK,oBACL,CAAC,gBAAgB,iBAAiB,GAC7B,eACL,kBACA,gBACA,iCAAiC;EAErC;EAEA,2BAEE,gBACA,SAAqC;AAErC,UAAM,QAAQ,KAAK,4BACjB,sBACA,cAAc;AAEhB,SAAK,gCAAgC,gBAAgB,SAAS,KAAK;EACrE;EAEA,gCAEE,gBACA,SACA,KAAW;AAEX,UAAM,SAAS,QAAQ;AACvB,UAAM,YAAY,QAAQ;AAE1B,UAAM,8BAA8B,KAAK,mBAAmB,GAAG;AAG/D,QAAI,4BAA4B,KAAK,IAAI,MAAM,MAAM;AAC9B,aAAQ,KAAK,IAAI;AAItC,YAAM,yBAAyB,6BAAK;AAClC,eAAO,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,SAAS;MAChD,GAF+B;AAK/B,aAAO,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,SAAS,MAAM,MAAM;AAGxD,aAAK,QAAQ,SAAS;AAED,eAAQ,KAAK,IAAI;;AAIxC,WAAK,4BACH,KAAK,6BACL;QACE;QACA;QACA;QACA;QACA;SAEF,wBACA,sBACA,gBACA,oCAAoC;WAEjC;AACL,YAAM,KAAK,wBACT,gBACA,UAAU,qCACV,QAAQ,OAAO;;EAGrB;EAEA,aAEE,gBACA,mBAA0D;AAE1D,UAAM,QAAQ,KAAK,4BAA4B,UAAU,cAAc;AACvE,WAAO,KAAK,kBAAkB,gBAAgB,mBAAmB,KAAK;EACxE;EAEA,kBAEE,gBACA,mBACA,KAAW;AAEX,QAAI,oBAAoB,KAAK,mBAAmB,GAAG;AACnD,QAAI;AACJ,QAAI,OAAO,sBAAsB,YAAY;AAC3C,eAAS,kBAAkB;AAC3B,YAAM,YAAY,kBAAkB;AAEpC,UAAI,cAAc,QAAW;AAC3B,cAAM,uBAAuB;AAC7B,4BAAoB,6BAAK;AACvB,iBAAO,UAAU,KAAK,IAAI,KAAK,qBAAqB,KAAK,IAAI;QAC/D,GAFoB;;WAIjB;AACL,eAAS;;AAGX,QAAI,WAAW;AACf,WAAO,kBAAkB,KAAK,IAAI,MAAM,QAAQ,aAAa,MAAM;AACjE,iBAAW,KAAK,mBAAmB,MAAM;;AAI3C,SAAK;MACH,KAAK;MACL,CAAC,gBAAgB,iBAAiB;MAC7B;MACL;MACA;MACA;;;;;;MAMA;IAAQ;EAEZ;EAEA,qBAEE,gBACA,SAA+B;AAE/B,UAAM,QAAQ,KAAK,4BACjB,cACA,cAAc;AAEhB,SAAK,0BAA0B,gBAAgB,SAAS,KAAK;EAC/D;EAEA,0BAEE,gBACA,SACA,KAAW;AAEX,UAAM,SAAS,QAAQ;AACvB,UAAM,YAAY,QAAQ;AAC1B,UAAM,uBAAuB,KAAK,mBAAmB,GAAG;AAGxD,QAAI,qBAAqB,KAAK,IAAI,MAAM,MAAM;AAC5C,aAAO,KAAK,IAAI;AAEhB,YAAM,yBAAyB,6BAAK;AAClC,eAAO,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,SAAS;MAChD,GAF+B;AAI/B,aAAO,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,SAAS,MAAM,MAAM;AAGxD,aAAK,QAAQ,SAAS;AAEtB,eAAO,KAAK,IAAI;;AAIlB,WAAK,4BACH,KAAK,6BACL;QACE;QACA;QACA;QACA;QACA;SAEF,wBACA,cACA,gBACA,8BAA8B;;EAGpC;EAEA,4BAEE,gBACA,WACA,wBACA,QACA,yBAAyE;AAEzE,WAAO,uBAAsB,GAAI;AAG/B,WAAK,QAAQ,SAAS;AACtB,aAAO,KAAK,IAAI;;AASlB,SAAK,4BACH,KAAK,6BACL;MACE;MACA;MACA;MACA;MACA;OAEF,wBACA,sBACA,gBACA,uBAAuB;EAE3B;EAEA,mBAAwC,QAAgB;AACtD,UAAM,kBAAkB,KAAK,iBAAgB;AAC7C,WAAO,KAAK,IAAI;AAChB,UAAM,iBAAiB,KAAK,iBAAgB;AAI5C,WAAO,iBAAiB;EAC1B;EAEA,WAEE,YACA,YAAkB;AAElB,UAAM,QAAQ,KAAK,4BAA4B,QAAQ,UAAU;AACjE,UAAM,OAAO,gBAAQ,UAAU,IAAI,aAAa,WAAW;AAE3D,UAAM,SAAS,KAAK,mBAAmB,KAAK;AAC5C,UAAM,eAAe,OAAO,KAAK,MAAM,IAAI;AAC3C,QAAI,iBAAiB,QAAW;AAC9B,YAAM,oBAAyB,KAAK,YAAY;AAChD,aAAO,kBAAkB,IAAI,KAAK,IAAI;;AAExC,SAAK,oBACH,YACC,WAAqC,OAAO;EAEjD;EAEA,yBAAsB;AACpB,SAAK,WAAW,IAAG;AACnB,SAAK,sBAAsB,IAAG;AAG9B,SAAK,sBAAqB;AAE1B,QAAI,KAAK,WAAW,WAAW,KAAK,KAAK,eAAc,MAAO,OAAO;AACnE,YAAM,oBAAoB,KAAK,GAAG,CAAC;AACnC,YAAM,SAAS,KAAK,qBAAqB,8BAA8B;QACrE,gBAAgB;QAChB,UAAU,KAAK,oBAAmB;OACnC;AACD,WAAK,WACH,IAAI,2BAA2B,QAAQ,iBAAiB,CAAC;;EAG/D;EAEA,gBAEE,YACA,KACA,SAAiC;AAEjC,QAAI;AACJ,QAAI;AACF,YAAM,OAAO,YAAY,SAAY,QAAQ,OAAO;AACpD,WAAK,aAAa;AAClB,mBAAa,WAAW,MAAM,MAAM,IAAI;AACxC,WAAK,mBACH,YACA,YAAY,UAAa,QAAQ,UAAU,SACvC,QAAQ,QACR,WAAW,QAAQ;AAEzB,aAAO;aACA,GAAG;AACV,YAAM,KAAK,qBAAqB,GAAG,SAAS,WAAW,QAAQ;;EAEnE;EAEA,qBAEE,GACA,SACA,UAAgB;AAEhB,QAAI,uBAAuB,CAAC,KAAK,EAAE,qBAAqB,QAAW;AACjE,WAAK,mBACH,EAAE,kBACF,YAAY,UAAa,QAAQ,UAAU,SACvC,QAAQ,QACR,QAAQ;AAGd,aAAO,EAAE;;AAEX,UAAM;EACR;EAEA,gBAEE,SACA,KACA,SAAsC;AAEtC,QAAI;AACJ,QAAI;AACF,YAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,UAAI,KAAK,aAAa,WAAW,OAAO,MAAM,MAAM;AAClD,aAAK,aAAY;AACjB,wBAAgB;aACX;AACL,aAAK,qBAAqB,SAAS,WAAW,OAAO;;aAEhD,kBAAkB;AACzB,sBAAgB,KAAK,wBACnB,SACA,KACA,gBAAgB;;AAIpB,SAAK,gBACH,YAAY,UAAa,QAAQ,UAAU,SACvC,QAAQ,QACR,QAAQ,MACZ,aAAa;AAEf,WAAO;EACT;EAEA,qBAEE,SACA,WACA,SAAsC;AAEtC,QAAI;AACJ,UAAM,gBAAgB,KAAK,GAAG,CAAC;AAC/B,QAAI,YAAY,UAAa,QAAQ,SAAS;AAC5C,YAAM,QAAQ;WACT;AACL,YAAM,KAAK,qBAAqB,0BAA0B;QACxD,UAAU;QACV,QAAQ;QACR,UAAU;QACV,UAAU,KAAK,oBAAmB;OACnC;;AAEH,UAAM,KAAK,WACT,IAAI,yBAAyB,KAAK,WAAW,aAAa,CAAC;EAE/D;EAEA,wBAEE,SACA,KACA,kBAAuB;AAIvB,QACE,KAAK;IAEL,iBAAiB,SAAS,8BAC1B,CAAC,KAAK,eAAc,GACpB;AACA,YAAM,UAAU,KAAK,4BAAiC,SAAS,GAAG;AAClE,UAAI;AACF,eAAO,KAAK,kBAAuB,SAAS,OAAO;eAC5C,qBAAqB;AAC5B,YAAI,oBAAoB,SAAS,4BAA4B;AAG3D,gBAAM;eACD;AACL,gBAAM;;;WAGL;AACL,YAAM;;EAEV;EAEA,iBAAc;AAEZ,UAAM,cAAc,KAAK;AACzB,UAAM,iBAAiB,cAAM,KAAK,UAAU;AAC5C,WAAO;MACL,QAAQ;MACR,YAAY,KAAK,iBAAgB;MACjC,YAAY;MACZ,WAAW,KAAK;;EAEpB;EAEA,iBAAsCC,WAAsB;AAC1D,SAAK,SAASA,UAAS;AACvB,SAAK,iBAAiBA,UAAS,UAAU;AACzC,SAAK,aAAaA,UAAS;EAC7B;EAEA,0BAEE,WACA,UACA,kBAAwB;AAExB,SAAK,sBAAsB,KAAK,gBAAgB;AAChD,SAAK,WAAW,KAAK,SAAS;AAE9B,SAAK,yBAAyB,QAAQ;EACxC;EAEA,iBAAc;AACZ,WAAO,KAAK,oBAAoB,WAAW;EAC7C;EAEA,sBAAmB;AACjB,UAAM,YAAY,KAAK,6BAA4B;AACnD,WAAO,KAAK,oBAAoB,SAAS;EAC3C;EAEA,wBAA6C,WAAiB;AAC5D,WAAO,KAAK,oBAAoB,SAAS;EAC3C;EAEO,iBAAc;AACnB,WAAO,KAAK,aAAa,KAAK,GAAG,CAAC,GAAG,GAAG;EAC1C;EAEO,QAAK;AACV,SAAK,gBAAe;AACpB,SAAK,aAAa;AAClB,SAAK,sBAAsB,CAAA;AAC3B,SAAK,SAAS,CAAA;AACd,SAAK,aAAa,CAAA;AAElB,SAAK,YAAY,CAAA;AACjB,SAAK,wBAAwB,CAAA;EAC/B;;;;AC30BI,IAAO,eAAP,MAAmB;EAjBzB,OAiByB;;;EAIvB,iBAAiB,QAAqB;AACpC,SAAK,UAAU,CAAA;AACf,SAAK,uBAAuB,YAAI,QAAQ,sBAAsB,IACzD,OAAO,uBACR,sBAAsB;EAC5B;EAEA,WAEE,OAA4B;AAE5B,QAAI,uBAAuB,KAAK,GAAG;AACjC,YAAM,UAAU;QACd,WAAW,KAAK,0BAAyB;QACzC,qBAAqB,cAAM,KAAK,qBAAqB;;AAEvD,WAAK,QAAQ,KAAK,KAAK;AACvB,aAAO;WACF;AACL,YAAM,MACJ,6DAA6D;;EAGnE;EAEA,IAAI,SAAM;AACR,WAAO,cAAM,KAAK,OAAO;EAC3B;EAEA,IAAI,OAAO,WAAkC;AAC3C,SAAK,UAAU;EACjB;;EAGA,wBAEE,YACA,UACA,mBAAqC;AAErC,UAAM,WAAW,KAAK,oBAAmB;AACzC,UAAM,cAAc,KAAK,mBAAkB,EAAG,QAAQ;AACtD,UAAM,+BAA+B,iCACnC,YACA,aACA,UACA,KAAK,YAAY;AAEnB,UAAM,kBAAkB,6BAA6B,CAAC;AACtD,UAAM,eAAe,CAAA;AACrB,aAAS,IAAI,GAAG,KAAK,KAAK,cAAc,KAAK;AAC3C,mBAAa,KAAK,KAAK,GAAG,CAAC,CAAC;;AAE9B,UAAM,MAAM,KAAK,qBAAqB,sBAAsB;MAC1D,wBAAwB;MACxB,QAAQ;MACR,UAAU,KAAK,GAAG,CAAC;MACnB,uBAAuB;MACvB;KACD;AAED,UAAM,KAAK,WAAW,IAAI,mBAAmB,KAAK,KAAK,GAAG,CAAC,GAAG,KAAK,GAAG,CAAC,CAAC,CAAC;EAC3E;;EAGA,oBAEE,YACA,aAA+B;AAE/B,UAAM,WAAW,KAAK,oBAAmB;AACzC,UAAM,cAAc,KAAK,mBAAkB,EAAG,QAAQ;AAEtD,UAAM,+BAA+B,uBACnC,YACA,aACA,KAAK,YAAY;AAGnB,UAAM,eAAe,CAAA;AACrB,aAAS,IAAI,GAAG,KAAK,KAAK,cAAc,KAAK;AAC3C,mBAAa,KAAK,KAAK,GAAG,CAAC,CAAC;;AAE9B,UAAM,gBAAgB,KAAK,GAAG,CAAC;AAE/B,UAAM,SAAS,KAAK,qBAAqB,wBAAwB;MAC/D,qBAAqB;MACrB,QAAQ;MACR,UAAU;MACV,uBAAuB;MACvB,UAAU,KAAK,oBAAmB;KACnC;AAED,UAAM,KAAK,WACT,IAAI,qBAAqB,QAAQ,KAAK,GAAG,CAAC,GAAG,aAAa,CAAC;EAE/D;;;;AC7GI,IAAO,gBAAP,MAAoB;EAP1B,OAO0B;;;EACxB,oBAAiB;EAAI;EAEd,qBAEL,eACA,gBAAwB;AAExB,UAAM,gBAAgB,KAAK,qBAAqB,aAAa;AAE7D,QAAI,oBAAY,aAAa,GAAG;AAC9B,YAAM,MAAM,UAAU,aAAa,oCAAoC;;AAGzE,WAAO,wBACL,CAAC,aAAa,GACd,gBACA,KAAK,cACL,KAAK,YAAY;EAErB;;;EAIO,0BAEL,aAA8B;AAE9B,UAAM,cAAc,aAAM,YAAY,SAAS;AAC/C,UAAM,kBAAkB,KAAK,mBAAkB;AAC/C,UAAM,gBAAgB,gBAAgB,WAAW;AACjD,UAAM,yBAAyB,IAAI,qBACjC,eACA,WAAW,EACX,aAAY;AACd,WAAO;EACT;;;;ACEF,IAAM,wBAAwB;EAC5B,aAAa;;AAEf,OAAO,OAAO,qBAAqB;AAEnC,IAAM,mBAAmB;AACzB,IAAM,iBAAiB,KAAK,IAAI,GAAG,uBAAuB,IAAI;AAE9D,IAAM,MAAM,YAAY,EAAE,MAAM,yBAAyB,SAAS,MAAM,GAAE,CAAE;AAC5E,kBAAkB,CAAC,GAAG,CAAC;AACvB,IAAM,wBAAwB;EAC5B;EACA;;;EAKA;EACA;EACA;EACA;EACA;EACA;AAAE;AAEJ,OAAO,OAAO,qBAAqB;AAEnC,IAAM,0BAAmC;EACvC,MACE;EAEF,UAAU,CAAA;;AAMN,IAAO,eAAP,MAAmB;EAvEzB,OAuEyB;;;EAIvB,iBAAsC,QAAqB;AACzD,SAAK,qBAAqB,CAAA;AAC1B,SAAK,kBAAkB;EACzB;EAEA,kBAAe;AACb,SAAK,kBAAkB;AAEvB,SAAK,WAAW,oBAAoB,MAAK;AAUvC,eAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,cAAM,MAAM,IAAI,IAAI,IAAI;AACxB,aAAK,UAAU,GAAG,EAAe,IAAI,SAAU,MAAM,MAAI;AACvD,iBAAO,KAAK,sBAAsB,MAAM,GAAG,IAAI;QACjD;AACA,aAAK,UAAU,GAAG,EAAe,IAAI,SAAU,MAAM,MAAI;AACvD,iBAAO,KAAK,sBAAsB,MAAM,GAAG,IAAI;QACjD;AACA,aAAK,SAAS,GAAG,EAAc,IAAI,SAAU,MAAI;AAC/C,iBAAO,KAAK,qBAAqB,MAAM,CAAC;QAC1C;AACA,aAAK,KAAK,GAAG,EAAU,IAAI,SAAU,MAAI;AACvC,iBAAO,KAAK,iBAAiB,MAAM,CAAC;QACtC;AACA,aAAK,OAAO,GAAG,EAAY,IAAI,SAAU,MAAI;AAC3C,eAAK,mBAAmB,GAAG,IAAI;QACjC;AACA,aAAK,WAAW,GAAG,EAAgB,IAAI,SAAU,MAAI;AACnD,eAAK,2BAA2B,GAAG,IAAI;QACzC;AACA,aAAK,eAAe,GAAG,EAAoB,IAAI,SAAU,MAAI;AAC3D,eAAK,yBAAyB,GAAG,IAAI;QACvC;AACA,aAAK,mBAAmB,GAAG,EAAwB,IAAI,SAAU,MAAI;AACnE,eAAK,iCAAiC,GAAG,IAAI;QAC/C;;AAIF,WAAK,SAAS,IAAI,SAAU,KAAK,MAAM,MAAI;AACzC,eAAO,KAAK,sBAAsB,MAAM,KAAK,IAAI;MACnD;AACA,WAAK,SAAS,IAAI,SAAU,KAAK,MAAM,MAAI;AACzC,eAAO,KAAK,sBAAsB,MAAM,KAAK,IAAI;MACnD;AACA,WAAK,QAAQ,IAAI,SAAU,KAAK,MAAI;AAClC,eAAO,KAAK,qBAAqB,MAAM,GAAG;MAC5C;AACA,WAAK,IAAI,IAAI,SAAU,KAAK,MAAI;AAC9B,eAAO,KAAK,iBAAiB,MAAM,GAAG;MACxC;AACA,WAAK,MAAM,IAAI,SAAU,KAAK,MAAI;AAChC,aAAK,mBAAmB,KAAK,IAAI;MACnC;AACA,WAAK,YAAY,IAAI,SAAU,KAAK,MAAI;AACtC,aAAK,yBAAyB,KAAK,IAAI;MACzC;AAEA,WAAK,SAAS,KAAK;AACnB,WAAK,YAAY,KAAK;AACtB,WAAK,KAAK,KAAK;IACjB,CAAC;EACH;EAEA,mBAAgB;AACd,SAAK,kBAAkB;AAKvB,SAAK,WAAW,8BAA8B,MAAK;AACjD,YAAM,OAAY;AAElB,eAAS,IAAI,GAAG,IAAI,IAAI,KAAK;AAC3B,cAAM,MAAM,IAAI,IAAI,IAAI;AACxB,eAAO,KAAK,UAAU,GAAG,EAAE;AAC3B,eAAO,KAAK,UAAU,GAAG,EAAE;AAC3B,eAAO,KAAK,SAAS,GAAG,EAAE;AAC1B,eAAO,KAAK,KAAK,GAAG,EAAE;AACtB,eAAO,KAAK,OAAO,GAAG,EAAE;AACxB,eAAO,KAAK,WAAW,GAAG,EAAE;AAC5B,eAAO,KAAK,eAAe,GAAG,EAAE;AAChC,eAAO,KAAK,mBAAmB,GAAG,EAAE;;AAGtC,aAAO,KAAK,SAAS;AACrB,aAAO,KAAK,SAAS;AACrB,aAAO,KAAK,QAAQ;AACpB,aAAO,KAAK,IAAI;AAChB,aAAO,KAAK,MAAM;AAClB,aAAO,KAAK,YAAY;AAExB,aAAO,KAAK;AACZ,aAAO,KAAK;AACZ,aAAO,KAAK;IACd,CAAC;EACH;;;;EAKA,cAAsC,MAAa;EAEnD;;EAGA,iBACE,aACA,MAAY;AAEZ,WAAO,MAAM;EACf;;;EAIA,UAAU,SAAe;AAGvB,WAAO;EACT;EAEA,mBAAmB,MAAc,KAAa;AAC5C,QAAI;AACF,YAAM,kBAAkB,IAAI,KAAK,EAAE,YAAY,CAAA,GAAI,KAAU,CAAE;AAC/D,sBAAgB,OAAO;AACvB,WAAK,mBAAmB,KAAK,eAAe;AAC5C,UAAI,KAAK,IAAI;AACb,WAAK,mBAAmB,IAAG;AAC3B,aAAO;aACA,eAAe;AACtB,UAAI,cAAc,yBAAyB,MAAM;AAC/C,YAAI;AACF,wBAAc,UACZ,cAAc,UACd;iBAEK,iBAAiB;AAExB,gBAAM;;;AAGV,YAAM;;EAEV;;EAGA,qBAEE,mBACA,YAAkB;AAElB,WAAO,WAAW,KAAK,MAAM,QAAQ,mBAAmB,UAAU;EACpE;EAEA,yBAEE,YACA,mBAAiE;AAEjE,eAAW,KAAK,MAAM,qBAAqB,mBAAmB,UAAU;EAC1E;EAEA,iCAEE,YACA,SAAqC;AAErC,eAAW,KACT,MACA,kCACA,SACA,YACA,gBAAgB;EAEpB;EAEA,mBAEE,YACA,mBAA0D;AAE1D,eAAW,KAAK,MAAM,YAAY,mBAAmB,UAAU;EACjE;EAEA,2BAEE,YACA,SAA+B;AAE/B,eAAW,KACT,MACA,yBACA,SACA,YACA,gBAAgB;EAEpB;EAEA,iBAEE,YACA,YAAkB;AAElB,WAAO,aAAa,KAAK,MAAM,YAAY,UAAU;EACvD;EAEA,sBAEE,YACA,YACA,SAAiC;AAEjC,2BAAuB,UAAU;AACjC,QAAI,CAAC,cAAc,YAAI,YAAY,UAAU,MAAM,OAAO;AACxD,YAAM,QAAa,IAAI,MACrB,WAAW,aAAa,UAAU,CAAC,uEACiB,KAAK,UACrD,UAAU,CACX;2BAEQ,KAAK,mBAAmB,CAAC,EAAG,IACrC,GAAG;AAEP,YAAM,uBAAuB;AAC7B,YAAM;;AAGR,UAAM,WAAgB,aAAK,KAAK,kBAAkB;AAClD,UAAM,WAAW,WAAW;AAC5B,UAAM,kBAAkB,IAAI,YAAY;MACtC,KAAK;MACL,iBAAiB;MACjB,OAAO,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS;;MAEhB,gBAAgB;KACjB;AACD,aAAS,WAAW,KAAK,eAAe;AAExC,WAAO,KAAK,YACR,0BACK;EACX;EAEA,sBAEE,SACA,YACA,SAA2B;AAE3B,2BAAuB,UAAU;AACjC,QAAI,CAAC,oBAAoB,OAAO,GAAG;AACjC,YAAM,QAAa,IAAI,MACrB,WAAW,aAAa,UAAU,CAAC,mEACa,KAAK,UACjD,OAAO,CACR;2BAEQ,KAAK,mBAAmB,CAAC,EAAG,IACrC,GAAG;AAEP,YAAM,uBAAuB;AAC7B,YAAM;;AAER,UAAM,WAAgB,aAAK,KAAK,kBAAkB;AAClD,UAAM,kBAAkB,IAAI,SAAS;MACnC,KAAK;MACL,cAAc;MACd,OAAO,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS;KACjB;AACD,aAAS,WAAW,KAAK,eAAe;AAExC,WAAO;EACT;;AAGF,SAAS,WACP,iBACA,aACA,YACA,YAAqB,OAAK;AAE1B,yBAAuB,UAAU;AACjC,QAAM,WAAgB,aAAK,KAAK,kBAAkB;AAClD,QAAM,gBAAgB,mBAAW,WAAW,IAAI,cAAc,YAAY;AAE1E,QAAM,UAAU,IAAI,gBAAgB,EAAE,YAAY,CAAA,GAAI,KAAK,WAAU,CAAE;AACvE,MAAI,WAAW;AACb,YAAQ,YAAY,YAAY;;AAElC,MAAI,YAAI,aAAa,eAAe,GAAG;AACrC,YAAQ,eAAe,YAAY;;AAGrC,OAAK,mBAAmB,KAAK,OAAO;AACpC,gBAAc,KAAK,IAAI;AACvB,WAAS,WAAW,KAAK,OAAO;AAChC,OAAK,mBAAmB,IAAG;AAE3B,SAAO;AACT;AAxBS;AA0BT,SAAS,aAAa,aAAkB,YAAkB;AACxD,yBAAuB,UAAU;AACjC,QAAM,WAAgB,aAAK,KAAK,kBAAkB;AAElD,QAAM,aAAa,gBAAQ,WAAW,MAAM;AAC5C,QAAM,OACJ,eAAe,QAAQ,cAAc,YAAY;AAEnD,QAAM,YAAY,IAAI,YAAY;IAChC,YAAY,CAAA;IACZ,KAAK;IACL,mBAAmB,cAAc,YAAY,uBAAuB;GACrE;AACD,MAAI,YAAI,aAAa,eAAe,GAAG;AACrC,cAAU,eAAe,YAAY;;AAGvC,QAAM,gBAAgB,aAAK,MAAM,CAAC,YAAiB,mBAAW,QAAQ,IAAI,CAAC;AAC3E,YAAU,gBAAgB;AAE1B,WAAS,WAAW,KAAK,SAAS;AAElC,kBAAQ,MAAM,CAAC,YAAW;AACxB,UAAM,cAAc,IAAI,YAAY,EAAE,YAAY,CAAA,EAAE,CAAE;AACtD,cAAU,WAAW,KAAK,WAAW;AACrC,QAAI,YAAI,SAAS,oBAAoB,GAAG;AACtC,kBAAY,oBAAoB,QAAQ;eAGjC,YAAI,SAAS,MAAM,GAAG;AAC7B,kBAAY,oBAAoB;;AAElC,SAAK,mBAAmB,KAAK,WAAW;AACxC,YAAQ,IAAI,KAAK,IAAI;AACrB,SAAK,mBAAmB,IAAG;EAC7B,CAAC;AACD,SAAO;AACT;AArCS;AAuCT,SAAS,aAAa,KAAW;AAC/B,SAAO,QAAQ,IAAI,KAAK,GAAG,GAAG;AAChC;AAFS;AAIT,SAAS,uBAAuB,KAAW;AACzC,MAAI,MAAM,KAAK,MAAM,gBAAgB;AACnC,UAAM,QAAa,IAAI;;MAErB,kCAAkC,GAAG;wDAEjC,iBAAiB,CACnB;IAAE;AAEN,UAAM,uBAAuB;AAC7B,UAAM;;AAEV;AAZS;;;AClbH,IAAO,oBAAP,MAAwB;EAR9B,OAQ8B;;;EAK5B,sBAAsB,QAAqB;AACzC,QAAI,YAAI,QAAQ,eAAe,GAAG;AAChC,YAAM,oBAAoB,OAAO;AACjC,YAAM,gBAAgB,OAAO,sBAAsB;AACnD,WAAK,oBAAoB,gBACb,oBACR;AACJ,WAAK,gBAAgB,gBACjB,oBAAoB,IACnB;WACA;AACL,WAAK,oBAAoB;AACzB,WAAK,gBAAgB,sBAAsB;;AAG7C,SAAK,kBAAkB;EACzB;EAEA,WAAmC,WAAmB,WAAkB;AAGtE,QAAI,KAAK,kBAAkB,MAAM;AAC/B,WAAK;AACL,YAAM,SAAS,IAAI,MAAM,KAAK,kBAAkB,CAAC,EAAE,KAAK,GAAI;AAC5D,UAAI,KAAK,kBAAkB,KAAK,mBAAmB;AACjD,gBAAQ,IAAI,GAAG,MAAM,QAAQ,SAAS,GAAG;;AAE3C,YAAM,EAAE,MAAM,MAAK,IAAK,MAAM,SAAS;AAEvC,YAAM,cAAc,OAAO,KAAK,QAAQ,OAAO,QAAQ;AACvD,UAAI,KAAK,kBAAkB,KAAK,mBAAmB;AACjD,oBAAY,GAAG,MAAM,QAAQ,SAAS,WAAW,IAAI,IAAI;;AAE3D,WAAK;AACL,aAAO;WACF;AACL,aAAO,UAAS;;EAEpB;;;;ACpDI,SAAU,YAAY,aAAkB,WAAgB;AAC5D,YAAU,QAAQ,CAAC,aAAY;AAC7B,UAAM,YAAY,SAAS;AAC3B,WAAO,oBAAoB,SAAS,EAAE,QAAQ,CAAC,aAAY;AACzD,UAAI,aAAa,eAAe;AAC9B;;AAGF,YAAM,qBAAqB,OAAO,yBAChC,WACA,QAAQ;AAGV,UACE,uBACC,mBAAmB,OAAO,mBAAmB,MAC9C;AACA,eAAO,eACL,YAAY,WACZ,UACA,kBAAkB;aAEf;AACL,oBAAY,UAAU,QAAQ,IAAI,SAAS,UAAU,QAAQ;;IAEjE,CAAC;EACH,CAAC;AACH;AA3BgB;;;ACuCT,IAAM,cAAc,oBACzB,KACA,IACA,KACA,KACA,KACA,KACA,KACA,GAAG;AAEL,OAAO,OAAO,WAAW;AAIlB,IAAM,wBAET,OAAO,OAAO;EAChB,iBAAiB;EACjB,cAAc;EACd,sBAAsB;EACtB,WAAW;EACX,sBAAsB;EACtB,sBAAsB;EACtB,eAAe;EACf,iBAAiB;CAClB;AAEM,IAAM,sBAAkD,OAAO,OAAO;EAC3E,mBAAmB,6BAAM,QAAN;EACnB,eAAe;CAChB;AAED,IAAY;CAAZ,SAAYC,4BAAyB;AACnC,EAAAA,2BAAAA,2BAAA,mBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,qBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,uBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,uBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,wBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,gBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,qBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,gBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,iCAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,oBAAA,IAAA,CAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,wBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,uBAAA,IAAA,EAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,eAAA,IAAA,EAAA,IAAA;AACA,EAAAA,2BAAAA,2BAAA,6BAAA,IAAA,EAAA,IAAA;AACF,GAfY,8BAAA,4BAAyB,CAAA,EAAA;AAoD/B,SAAU,UAAU,QAAa,QAAS;AAC9C,SAAO,WAAA;AACL,WAAO;EACT;AACF;AAJgB;AAMV,IAAO,SAAP,MAAO,QAAM;EAjInB,OAiImB;;;;;;EAYjB,OAAO,oBAAoB,gBAAsB;AAC/C,UAAM,MACJ,4HAC+D;EAEnE;EAEO,sBAAmB;AACxB,SAAK,WAAW,uBAAuB,MAAK;AAC1C,UAAI;AAEJ,WAAK,mBAAmB;AACxB,YAAM,YAAY,KAAK;AAEvB,WAAK,WAAW,eAAe,MAAK;AAIlC,yBAAiB,IAAI;MACvB,CAAC;AAED,WAAK,WAAW,qBAAqB,MAAK;AACxC,YAAI;AACF,eAAK,gBAAe;AAEpB,0BAAQ,KAAK,mBAAmB,CAAC,iBAAgB;AAC/C,kBAAM,cAAe,KACnB,YAAY;AAEd,kBAAM,wBAAwB,YAAY,uBAAuB;AACjE,gBAAI;AACJ,iBAAK,WAAW,GAAG,YAAY,SAAS,MAAK;AAC3C,iCAAmB,KAAK,mBACtB,cACA,qBAAqB;YAEzB,CAAC;AACD,iBAAK,qBAAqB,YAAY,IAAI;UAC5C,CAAC;;AAED,eAAK,iBAAgB;;MAEzB,CAAC;AAED,UAAI,iBAA2C,CAAA;AAC/C,WAAK,WAAW,qBAAqB,MAAK;AACxC,yBAAiBC,gBAAe;UAC9B,OAAO,eAAO,KAAK,oBAAoB;SACxC;AACD,aAAK,mBAAmB,KAAK,iBAAiB,OAAO,cAAc;MACrE,CAAC;AAED,WAAK,WAAW,uBAAuB,MAAK;AAG1C,YAAI,gBAAQ,cAAc,KAAK,KAAK,oBAAoB,OAAO;AAC7D,gBAAM,mBAAmBC,iBAAgB;YACvC,OAAO,eAAO,KAAK,oBAAoB;YACvC,YAAY,eAAO,KAAK,SAAS;YACjC,gBAAgB;YAChB,aAAa;WACd;AACD,gBAAM,4BAA4B,kBAAkB;YAClD,mBAAmB,KAAK;YACxB,OAAO,eAAO,KAAK,oBAAoB;YACvC,YAAY,eAAO,KAAK,SAAS;YACjC,aAAa;WACd;AACD,eAAK,mBAAmB,KAAK,iBAAiB,OAC5C,kBACA,yBAAyB;;MAG/B,CAAC;AAGD,UAAI,gBAAQ,KAAK,gBAAgB,GAAG;AAElC,YAAI,KAAK,iBAAiB;AACxB,eAAK,WAAW,0BAA0B,MAAK;AAC7C,kBAAM,aAAa,uBACjB,eAAO,KAAK,oBAAoB,CAAC;AAEnC,iBAAK,gBAAgB;UACvB,CAAC;;AAGH,aAAK,WAAW,6BAA6B,MAAK;;AAChD,WAAA,MAAA,KAAA,KAAK,mBAAkB,gBAAU,QAAA,OAAA,SAAA,SAAA,GAAA,KAAA,IAAG;YAClC,OAAO,eAAO,KAAK,oBAAoB;WACxC;AACD,eAAK,6BAA6B,eAAO,KAAK,oBAAoB,CAAC;QACrE,CAAC;;AAGH,UACE,CAAC,QAAO,oCACR,CAAC,gBAAQ,KAAK,gBAAgB,GAC9B;AACA,wBAAgB,YACd,KAAK,kBACL,CAAC,aAAa,SAAS,OAAO;AAEhC,cAAM,IAAI,MACR;GAAwC,cAAc,KACpD,qCAAqC,CACtC,EAAE;;IAGT,CAAC;EACH;EAMA,YAAY,iBAAkC,QAAqB;AAJnE,SAAA,mBAA6C,CAAA;AAC7C,SAAA,mBAAmB;AAIjB,UAAM,OAAsB;AAC5B,SAAK,iBAAiB,MAAM;AAC5B,SAAK,iBAAgB;AACrB,SAAK,eAAe,MAAM;AAC1B,SAAK,qBAAqB,iBAAiB,MAAM;AACjD,SAAK,gBAAgB,MAAM;AAC3B,SAAK,gBAAgB,MAAM;AAC3B,SAAK,kBAAiB;AACtB,SAAK,iBAAiB,MAAM;AAC5B,SAAK,sBAAsB,MAAM;AAEjC,QAAI,YAAI,QAAQ,eAAe,GAAG;AAChC,YAAM,IAAI,MACR,kQAGwB;;AAI5B,SAAK,kBAAkB,YAAI,QAAQ,iBAAiB,IAC/C,OAAO,kBACR,sBAAsB;EAC5B;;AAjJO,OAAA,mCAA4C;AAoJrD,YAAY,QAAQ;EAClB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;CACD;AAaK,IAAO,wBAAP,cAAqC,OAAM;EApTjD,OAoTiD;;;EAC/C,YACE,iBACA,SAAgC,uBAAqB;AAErD,UAAM,cAAc,cAAM,MAAM;AAChC,gBAAY,YAAY;AACxB,UAAM,iBAAiB,WAAW;EACpC;;;;ACnSI,SAAU,YAAY,MAAY,MAA+B,YAAkB;AACrF,SAAO,GAAG,KAAK,IAAI,IAAI,IAAI,IAAI,UAAU;AAC7C;AAFgB;AAaT,IAAM,YAAY;AAClB,IAAM,iBAAiB;AACvB,IAAM,uBAAuB;AAC7B,IAAM,uBAAuB;AAG7B,IAAM,gBAAgB;AACtB,IAAM,gBAAgB;AACtB,IAAM,qBAAqB;AAC3B,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,eAAe;AAuFtB,IAAgB,qBAAhB,MAAkC;EAxIxC,OAwIwC;;;EAGpC,YAAY,QAAgB;AACxB,SAAK,SAAS;EAClB;EAEA,YAAS;AACL,WAAO;EACX;;AAGE,IAAO,iBAAP,cAA8B,mBAAkB;EApJtD,OAoJsD;;;EAGlD,YAAY,QAAkB,WAAoB;AAC9C,UAAM,MAAM;AACZ,SAAK,YAAY;EACrB;;AAGE,IAAO,oBAAP,cAAiC,mBAAkB;EA7JzD,OA6JyD;;;EACrD,YAAY,QAAgB;AACxB,UAAM,MAAM;EAChB;EAEA,YAAS;AACL,WAAO;EACX;;AAGE,IAAO,iBAAP,cAA8B,mBAAkB;EAvKtD,OAuKsD;;;EAIlD,YAAY,WAA2B,MAAY,aAAqB;AACpE,UAAM,SAAS;AACf,SAAK,OAAO;AACZ,SAAK,cAAc;EACvB;EAEA,YAAS;AACL,WAAO;EACX;;AAQE,SAAU,UAAU,OAAa;AACnC,QAAM,MAAW;IACb,aAAa,CAAA;IACb,gBAAgB,CAAA;IAChB,kBAAkB,oBAAI,IAAG;IACzB,iBAAiB,oBAAI,IAAG;IACxB,QAAQ,CAAA;;AAEZ,kCAAgC,KAAK,KAAK;AAC1C,QAAM,aAAa,MAAM;AACzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACjC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,YAAY,MAAM,KAAK,MAAM,IAAI;AACvC,QAAI,cAAc,QAAW;AACzB;;AAEJ,oBAAgB,KAAK,MAAM,SAAS;;AAExC,SAAO;AACX;AAnBgB;AAqBhB,SAAS,gCAAgC,KAAU,OAAa;AAC5D,QAAM,aAAa,MAAM;AACzB,WAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACjC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,QAAQ,SAAyB,KAAK,MAAM,QAAW;MACzD,MAAM;KACT;AACD,UAAM,OAAO,SAAwB,KAAK,MAAM,QAAW;MACvD,MAAM;KACT;AACD,UAAM,OAAO;AACb,QAAI,iBAAiB,IAAI,MAAM,KAAK;AACpC,QAAI,gBAAgB,IAAI,MAAM,IAAI;;AAE1C;AAdS;AAgBT,SAAS,KACL,KACA,MACA,YAAuB;AAEvB,MAAI,sBAAsB,UAAU;AAChC,WAAO,SAAS,KAAK,MAAM,WAAW,cAAc,UAAU;aACvD,sBAAsB,aAAa;AAC1C,WAAO,QAAQ,KAAK,MAAM,UAAU;aAC7B,sBAAsB,aAAa;AAC1C,WAAO,YAAY,KAAK,MAAM,UAAU;aACjC,sBAAsB,QAAQ;AACrC,WAAO,OAAO,KAAK,MAAM,UAAU;aAC5B,sBAAsB,YAAY;AACzC,WAAO,WAAW,KAAK,MAAM,UAAU;aAChC,sBAAsB,yBAAyB;AACtD,WAAO,cAAc,KAAK,MAAM,UAAU;aACnC,sBAAsB,qBAAqB;AAClD,WAAO,oBAAoB,KAAK,MAAM,UAAU;aACzC,sBAAsB,kCAAkC;AAC/D,WAAO,uBAAuB,KAAK,MAAM,UAAU;SAChD;AACH,WAAO,MAAM,KAAK,MAAM,UAAyB;;AAEzD;AAxBS;AA0BT,SAAS,WAAW,KAAU,MAAYC,aAAsB;AAC5D,QAAM,YAAY,SAA8B,KAAK,MAAMA,aAAY;IACnE,MAAM;GACT;AACD,sBAAoB,KAAK,SAAS;AAClC,QAAM,SAAS,SACX,KACA,MACA,WACAA,aACA,MAAM,KAAK,MAAMA,WAAU,CAAC;AAEhC,SAAO,KAAK,KAAK,MAAMA,aAAY,MAAM;AAC7C;AAbS;AAeT,SAAS,cACL,KACA,MACAA,aAAmC;AAEnC,QAAM,YAAY,SAA8B,KAAK,MAAMA,aAAY;IACnE,MAAM;GACT;AACD,sBAAoB,KAAK,SAAS;AAClC,QAAM,SAAS,SACX,KACA,MACA,WACAA,aACA,MAAM,KAAK,MAAMA,WAAU,CAAC;AAEhC,QAAM,MAAM,SAAS,KAAK,MAAMA,YAAW,WAAWA,WAAU;AAChE,SAAO,KAAK,KAAK,MAAMA,aAAY,QAAQ,GAAG;AAClD;AAlBS;AAoBT,SAAS,oBACL,KACA,MACAA,aAA+B;AAE/B,QAAM,YAAY,SAA8B,KAAK,MAAMA,aAAY;IACnE,MAAM;GACT;AACD,sBAAoB,KAAK,SAAS;AAClC,QAAM,SAAS,SACX,KACA,MACA,WACAA,aACA,MAAM,KAAK,MAAMA,WAAU,CAAC;AAEhC,SAAO,KAAK,KAAK,MAAMA,aAAY,MAAM;AAC7C;AAjBS;AAmBT,SAAS,uBACL,KACA,MACAA,aAA4C;AAE5C,QAAM,YAAY,SAA8B,KAAK,MAAMA,aAAY;IACnE,MAAM;GACT;AACD,sBAAoB,KAAK,SAAS;AAClC,QAAM,SAAS,SACX,KACA,MACA,WACAA,aACA,MAAM,KAAK,MAAMA,WAAU,CAAC;AAEhC,QAAM,MAAM,SAAS,KAAK,MAAMA,YAAW,WAAWA,WAAU;AAChE,SAAO,KAAK,KAAK,MAAMA,aAAY,QAAQ,GAAG;AAClD;AAlBS;AAoBT,SAAS,YACL,KACA,MACAC,cAAwB;AAExB,QAAM,QAAQ,SAA+B,KAAK,MAAMA,cAAa;IACjE,MAAM;GACT;AACD,sBAAoB,KAAK,KAAK;AAC9B,QAAM,OAAO,YAAIA,aAAY,YAAY,CAAC,MAAM,KAAK,KAAK,MAAM,CAAC,CAAC;AAClE,QAAM,SAAS,SAAS,KAAK,MAAM,OAAOA,cAAa,GAAG,IAAI;AAC9D,SAAO;AACX;AAZS;AAcT,SAAS,OAAO,KAAU,MAAYC,SAAc;AAChD,QAAM,QAAQ,SAA+B,KAAK,MAAMA,SAAQ;IAC5D,MAAM;GACT;AACD,sBAAoB,KAAK,KAAK;AAC9B,QAAM,SAAS,SAAS,KAAK,MAAM,OAAOA,SAAQ,MAAM,KAAK,MAAMA,OAAM,CAAC;AAC1E,SAAO,SAAS,KAAK,MAAMA,SAAQ,MAAM;AAC7C;AAPS;AAST,SAAS,MACL,KACA,MACAC,QAAoC;AAEpC,QAAM,UAAU,eACZ,YAAIA,OAAM,YAAY,CAAC,MAAM,KAAK,KAAK,MAAM,CAAC,CAAC,GAC/C,CAAC,MAAM,MAAM,MAAS;AAE1B,MAAI,QAAQ,WAAW,GAAG;AACtB,WAAO,QAAQ,CAAC;aACT,QAAQ,WAAW,GAAG;AAC7B,WAAO;SACJ;AACH,WAAO,UAAU,KAAK,OAAO;;AAErC;AAhBS;AAkBT,SAAS,KACL,KACA,MACAC,OACA,QACA,KAAe;AAEf,QAAM,WAAW,OAAO;AACxB,QAAM,SAAS,OAAO;AAEtB,QAAM,OAAO,SAA4B,KAAK,MAAMA,OAAM;IACtD,MAAM;GACT;AACD,sBAAoB,KAAK,IAAI;AAC7B,QAAM,MAAM,SAAuB,KAAK,MAAMA,OAAM;IAChD,MAAM;GACT;AACD,WAAS,WAAW;AACpB,MAAI,WAAW;AACf,MAAI,YAAY,YAAY,MAAM,MAAM,qCAAqC,uBAAuBA,MAAK,GAAG,CAAC,IAAI;AACjH,UAAQ,QAAQ,IAAI;AAIpB,MAAI,QAAQ,QAAW;AACnB,YAAQ,MAAM,QAAQ;AACtB,YAAQ,MAAM,GAAG;SACd;AACH,YAAQ,MAAM,GAAG;AAEjB,YAAQ,MAAM,IAAI,IAAI;AACtB,YAAQ,IAAI,OAAO,QAAQ;;AAG/B,SAAO;IACH,MAAM;IACN,OAAO;;AAEf;AAtCS;AAwCT,SAAS,KACL,KACA,MACAC,OACA,QACA,KAAe;AAEf,QAAM,QAAQ,OAAO;AACrB,QAAM,MAAM,OAAO;AAEnB,QAAM,QAAQ,SAA6B,KAAK,MAAMA,OAAM;IACxD,MAAM;GACT;AACD,sBAAoB,KAAK,KAAK;AAC9B,QAAM,UAAU,SAAuB,KAAK,MAAMA,OAAM;IACpD,MAAM;GACT;AACD,QAAM,OAAO,SAA4B,KAAK,MAAMA,OAAM;IACtD,MAAM;GACT;AACD,QAAM,WAAW;AACjB,UAAQ,WAAW;AAEnB,UAAQ,OAAO,KAAK;AACpB,UAAQ,OAAO,OAAO;AACtB,UAAQ,KAAK,IAAI;AAEjB,MAAI,QAAQ,QAAW;AACnB,YAAQ,MAAM,OAAO;AAErB,YAAQ,MAAM,IAAI,IAAI;AACtB,YAAQ,IAAI,OAAO,KAAK;SACrB;AACH,YAAQ,MAAM,KAAK;;AAGvB,MAAI,YAAY,YAAY,MAAM,MAAM,4BAA4B,cAAcA,MAAK,GAAG,CAAC,IAAI;AAC/F,SAAO;IACH,MAAM;IACN,OAAO;;AAEf;AAzCS;AA2CT,SAAS,SAAS,KAAU,MAAYC,WAAkB,QAAiB;AACvE,QAAM,QAAQ,OAAO;AACrB,QAAM,MAAM,OAAO;AAEnB,UAAQ,OAAO,GAAG;AAElB,MAAI,YAAY,YAAY,MAAM,UAAUA,UAAS,GAAG,CAAC,IAAI;AAC7D,SAAO;AACX;AARS;AAUT,SAAS,oBAAoB,KAAU,OAAoB;AACvD,MAAI,eAAe,KAAK,KAAK;AAC7B,QAAM,WAAW,IAAI,eAAe,SAAS;AAC7C,SAAO,MAAM;AACjB;AAJS;AAMT,SAAS,SACL,KACA,MACA,OACA,eACG,MAA+B;AAElC,QAAM,MAAM,SAAwB,KAAK,MAAM,YAAY;IACvD,MAAM;IACN;GACH;AACD,QAAM,MAAM;AACZ,aAAW,OAAO,MAAM;AACpB,QAAI,QAAQ,QAAW;AAEnB,cAAQ,OAAO,IAAI,IAAI;AACvB,cAAQ,IAAI,OAAO,GAAG;WACnB;AACH,cAAQ,OAAO,GAAG;;;AAI1B,QAAM,SAAoB;IACtB,MAAM;IACN,OAAO;;AAEX,MAAI,YAAY,YAAY,MAAMC,aAAY,UAAU,GAAG,WAAW,GAAG,CAAC,IAAI;AAC9E,SAAO;AACX;AA5BS;AA8BT,SAASA,aAAY,YAAuB;AACxC,MAAI,sBAAsB,aAAa;AACnC,WAAO;aACA,sBAAsB,QAAQ;AACrC,WAAO;aACA,sBAAsB,YAAY;AACzC,WAAO;aACA,sBAAsB,yBAAyB;AACtD,WAAO;aACA,sBAAsB,qBAAqB;AAClD,WAAO;aACA,sBAAsB,kCAAkC;AAC/D,WAAO;SACJ;AACH,UAAM,IAAI,MAAM,qCAAqC;;AAE7D;AAhBS,OAAAA,cAAA;AAkBT,SAAS,UAAU,KAAU,MAAiB;AAC1C,QAAM,aAAa,KAAK;AACxB,WAAS,IAAI,GAAG,IAAI,aAAa,GAAG,KAAK;AACrC,UAAM,SAAS,KAAK,CAAC;AACrB,QAAI;AACJ,QAAI,OAAO,KAAK,YAAY,WAAW,GAAG;AACtC,mBAAa,OAAO,KAAK,YAAY,CAAC;;AAE1C,UAAM,mBAAmB,sBAAsB;AAC/C,UAAM,iBAAiB;AACvB,UAAM,OAAO,KAAK,IAAI,CAAC,EAAE;AACzB,QACI,OAAO,KAAK,SAAS,aACrB,OAAO,MAAM,SAAS,aACtB,eAAe,WACb,oBAAoB,eAAe,gBAAgB,OAAO,SACxD,WAAW,WAAW,OAAO,QACnC;AAEE,UAAI,kBAAkB;AAClB,uBAAe,cAAc;aAC1B;AACH,mBAAW,SAAS;;AAExB,kBAAY,KAAK,OAAO,KAAK;WAC1B;AAEH,cAAQ,OAAO,OAAO,IAAI;;;AAIlC,QAAMC,SAAQ,KAAK,CAAC;AACpB,QAAM,OAAO,KAAK,aAAa,CAAC;AAChC,SAAO;IACH,MAAMA,OAAM;IACZ,OAAO,KAAK;;AAEpB;AArCS;AAuCT,SAAS,SACL,KACA,MACA,WACA,YAAqC;AAErC,QAAM,OAAO,SAAqB,KAAK,MAAM,YAAY;IACrD,MAAM;GACT;AACD,QAAM,QAAQ,SAAqB,KAAK,MAAM,YAAY;IACtD,MAAM;GACT;AACD,gBAAc,MAAM,IAAI,eAAe,OAAO,SAAS,CAAC;AACxD,SAAO;IACH;IACA;;AAER;AAjBS;AAmBT,SAAS,QACL,KACA,aACA,aAAwB;AAExB,QAAM,OAAO,YAAY;AACzB,QAAM,QAAQ,IAAI,iBAAiB,IAAI,IAAI;AAC3C,QAAM,OAAO,SAA+B,KAAK,aAAa,aAAa;IACvE,MAAM;GACT;AACD,QAAM,QAAQ,SAA+B,KAAK,aAAa,aAAa;IACxE,MAAM;GACT;AAED,QAAM,OAAO,IAAI,eAAe,OAAO,MAAM,KAAK;AAClD,gBAAc,MAAM,IAAI;AAExB,SAAO;IACH;IACA;;AAER;AArBS;AAuBT,SAAS,gBAAgB,KAAU,MAAYL,QAAgB;AAC3D,QAAM,QAAQ,IAAI,iBAAiB,IAAI,IAAI;AAC3C,UAAQ,OAAOA,OAAM,IAAI;AACzB,QAAM,OAAO,IAAI,gBAAgB,IAAI,IAAI;AACzC,UAAQA,OAAM,OAAO,IAAI;AACzB,QAAM,SAAoB;IACtB,MAAM;IACN,OAAO;;AAEX,SAAO;AACX;AAVS;AAYT,SAAS,QAAQ,GAAiB,GAAe;AAC7C,QAAM,aAAa,IAAI,kBAAkB,CAAa;AACtD,gBAAc,GAAG,UAAU;AAC/B;AAHS;AAKT,SAAS,SACL,KACA,MACA,YACA,SAAmB;AAEnB,QAAM,IAAO,OAAA,OAAA;IACT;IACA;IACA,wBAAwB;IACxB;IACA,aAAa,CAAA;IACb,qBAAqB,CAAA;IACrB,aAAa,IAAI,OAAO;EAAM,GAC3B,OAAO;AAEd,MAAI,OAAO,KAAK,CAAC;AACjB,SAAO;AACX;AAlBS;AAoBT,SAAS,cAAc,OAAqB,YAAsB;AAG9D,MAAI,MAAM,YAAY,WAAW,GAAG;AAChC,UAAM,yBAAyB,WAAW,UAAS;;AAEvD,QAAM,YAAY,KAAK,UAAU;AACrC;AAPS;AAST,SAAS,YAAY,KAAU,OAAe;AAC1C,MAAI,OAAO,OAAO,IAAI,OAAO,QAAQ,KAAK,GAAG,CAAC;AAClD;AAFS;;;ACxmBF,IAAM,YAAY,CAAA;AAQnB,IAAO,eAAP,MAAmB;EA/BzB,OA+ByB;;;EAAzB,cAAA;AACU,SAAA,MAA8B,CAAA;AAC9B,SAAA,UAAuB,CAAA;EAsCjC;EAlCE,IAAI,OAAI;AACN,WAAO,KAAK,QAAQ;EACtB;EAEA,WAAQ;AAEN,SAAK,MAAM,CAAA;EACb;EAEA,IAAI,QAAiB;AACnB,UAAM,MAAM,gBAAgB,MAAM;AAGlC,QAAI,EAAE,OAAO,KAAK,MAAM;AACtB,WAAK,IAAI,GAAG,IAAI,KAAK,QAAQ;AAC7B,WAAK,QAAQ,KAAK,MAAM;;EAE5B;EAEA,IAAI,WAAQ;AACV,WAAO,KAAK;EACd;EAEA,IAAI,OAAI;AACN,WAAO,YAAI,KAAK,SAAS,CAAC,MAAM,EAAE,GAAG;EACvC;EAEA,IAAI,MAAG;AACL,QAAI,QAAQ;AACZ,eAAW,KAAK,KAAK,KAAK;AACxB,eAAS,IAAI;;AAEf,WAAO;EACT;;AAGI,SAAU,gBAAgB,QAAmB,MAAM,MAAI;AAC3D,SAAO,GAAG,MAAM,IAAI,OAAO,GAAG,KAAK,EAAE,IACnC,OAAO,MAAM,WACf,IAAI,OAAO,MAAM,IAAI,CAAC,MAAM,EAAE,YAAY,SAAQ,CAAE,EAAE,KAAK,GAAG,CAAC;AACjE;AAJgB;;;ACZhB,SAAS,eAAe,YAA2B,UAAgB;AAC/D,QAAM,MAAuC,CAAA;AAC7C,SAAO,CAAC,iBAAgB;AACpB,UAAM,MAAM,aAAa,SAAQ;AACjC,QAAI,WAAW,IAAI,GAAG;AACtB,QAAI,aAAa,QAAW;AACxB,aAAO;WACJ;AACH,iBAAW;QACP,eAAe;QACf;QACA,QAAQ,CAAA;;AAEZ,UAAI,GAAG,IAAI;AACX,aAAO;;EAEf;AACJ;AAjBS;AAmBT,IAAM,eAAN,MAAkB;EAhFlB,OAgFkB;;;EAAlB,cAAA;AACY,SAAA,aAAwB,CAAA;EAkBpC;EAhBI,GAAG,OAAa;AACZ,WAAO,SAAS,KAAK,WAAW,UAAU,KAAK,WAAW,KAAK;EACnE;EAEA,IAAI,OAAe,OAAc;AAC7B,SAAK,WAAW,KAAK,IAAI;EAC7B;EAEA,WAAQ;AACJ,QAAI,QAAQ;AACZ,UAAM,OAAO,KAAK,WAAW;AAC7B,aAAS,IAAI,GAAG,IAAI,MAAM,KAAK;AAC3B,eAAS,KAAK,WAAW,CAAC,MAAM,OAAO,MAAM;;AAEjD,WAAO;EACX;;AASJ,IAAM,mBAAmB,IAAI,aAAY;AAMnC,IAAO,0BAAP,cAAuC,qBAAoB;EAjHjE,OAiHiE;;;EAM7D,YAAY,SAAgC;;AACxC,UAAK;AACL,SAAK,WAAU,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,aAAO,QAAA,OAAA,SAAA,KAAK,CAAC,YAAY,QAAQ,IAAI,OAAO;EACxE;EAES,WAAW,SAA0B;AAC1C,SAAK,MAAM,UAAU,QAAQ,KAAK;AAClC,SAAK,OAAO,iBAAiB,KAAK,GAAG;EACzC;EAES,2CAAwC;AAC7C,WAAO,CAAA;EACX;EAES,8BAA2B;AAChC,WAAO,CAAA;EACX;EAES,6BAA6B,SAMrC;AACG,UAAM,EAAE,gBAAgB,MAAM,eAAe,qBAAoB,IAAK;AACtE,UAAM,OAAO,KAAK;AAClB,UAAM,UAAU,KAAK;AACrB,UAAM,MAAM,YAAY,MAAM,eAAe,cAAc;AAC3D,UAAM,gBAAgB,KAAK,IAAI,YAAY,GAAG;AAC9C,UAAM,gBAAgB,cAAc;AACpC,UAAM,cAA2C,YAC7C,kBAAkB;MACd,cAAc;MACd,YAAY;MACZ,UAAU;MACV;KACH,GACD,CAAC,YAAY,YAAI,SAAS,CAAC,SAAS,KAAK,CAAC,CAAC,CAAC;AAGhD,QAAI,cAAc,aAAa,KAAK,KAAK,CAAC,sBAAsB;AAC5D,YAAM,cAAc,eAChB,aACA,CAAC,QAAQ,SAAS,QAAO;AACrB,wBAAQ,SAAS,CAAC,gBAAe;AAC7B,cAAI,aAAa;AACb,mBAAO,YAAY,YAAa,IAAI;AACpC,4BAAQ,YAAY,iBAAkB,CAAC,sBAAqB;AACxD,qBAAO,iBAAiB,IAAI;YAChC,CAAC;;QAET,CAAC;AACD,eAAO;MACX,GACA,CAAA,CAA4B;AAGhC,UAAI,eAAe;AACf,eAAO,SAA4B,QAAM;;AACrC,gBAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,gBAAM,aAAiC,YAAY,UAAU,YAAY;AACzE,cAAI,WAAW,UAAa,eAAe,QAAW;AAClD,kBAAM,QAAO,KAAA,OAAO,UAAU,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE;AACjC,gBAAI,SAAS,UAAa,KAAK,KAAK,IAAI,MAAM,OAAO;AACjD,qBAAO;;;AAGf,iBAAO;QACX;aACG;AACH,eAAO,WAAA;AACH,gBAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,iBAAO,YAAY,UAAU,YAAY;QAC7C;;eAEG,eAAe;AACtB,aAAO,SAA4B,QAAM;AACrC,cAAM,aAAa,IAAI,aAAY;AACnC,cAAM,SAAS,WAAW,SAAY,IAAI,OAAO;AACjD,iBAAS,IAAI,GAAG,IAAI,QAAQ,KAAK;AAC7B,gBAAM,OAAO,WAAM,QAAN,WAAM,SAAA,SAAN,OAAS,CAAC,EAAE;AACzB,qBAAW,IAAI,GAAG,SAAS,UAAa,KAAK,KAAK,IAAI,CAAC;;AAE3D,cAAM,SAAS,gBAAgB,KAAK,MAAM,MAAM,eAAe,YAAY,OAAO;AAClF,eAAO,OAAO,WAAW,WAAW,SAAS;MACjD;WACG;AACH,aAAO,WAAA;AACH,cAAM,SAAS,gBAAgB,KAAK,MAAM,MAAM,eAAe,kBAAkB,OAAO;AACxF,eAAO,OAAO,WAAW,WAAW,SAAS;MACjD;;EAER;EAES,0BAA0B,SAMlC;AACG,UAAM,EAAE,gBAAgB,MAAM,UAAU,qBAAoB,IAAK;AACjE,UAAM,OAAO,KAAK;AAClB,UAAM,UAAU,KAAK;AACrB,UAAM,MAAM,YAAY,MAAM,UAAU,cAAc;AACtD,UAAM,gBAAgB,KAAK,IAAI,YAAY,GAAG;AAC9C,UAAM,gBAAgB,cAAc;AACpC,UAAM,OAAO,YACT,kBAAkB;MACd,cAAc;MACd,YAAY;MACZ;MACA;KACH,GACD,CAAC,MAAK;AACJ,aAAO,YAAI,GAAG,CAAC,MAAM,EAAE,CAAC,CAAC;IAC3B,CAAC;AAGH,QAAI,cAAc,IAAI,KAAK,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,sBAAsB;AAC9D,YAAM,MAAM,KAAK,CAAC;AAClB,YAAM,oBAAoB,gBAAQ,GAAG;AAErC,UACE,kBAAkB,WAAW,KAC7B,gBAAQ,kBAAkB,CAAC,EAAE,eAAe,GAC5C;AACA,cAAM,oBAAoB,kBAAkB,CAAC;AAC7C,cAAM,yBAAyB,kBAAkB;AAEjD,eAAO,WAAA;AACL,iBAAO,KAAK,GAAG,CAAC,EAAE,iBAAiB;QACrC;aACK;AACL,cAAM,cAAc,eAClB,mBACA,CAAC,QAAQ,gBAAe;AACtB,cAAI,gBAAgB,QAAW;AAC7B,mBAAO,YAAY,YAAa,IAAI;AACpC,4BAAQ,YAAY,iBAAiB,CAAC,sBAAqB;AACzD,qBAAO,iBAAiB,IAAI;YAC9B,CAAC;;AAEH,iBAAO;QACT,GACA,CAAA,CAA6B;AAG/B,eAAO,WAAA;AACL,gBAAM,YAAY,KAAK,GAAG,CAAC;AAC3B,iBAAO,YAAY,UAAU,YAAY,MAAM;QACjD;;;AAGJ,WAAO,WAAA;AACL,YAAM,SAAS,gBAAgB,KAAK,MAAM,MAAM,eAAe,kBAAkB,OAAO;AACtF,aAAO,OAAO,WAAW,WAAW,QAAQ,WAAW;IAC3D;EACN;;AAIJ,SAAS,cAAc,WAAwC,aAAa,MAAI;AAC5E,QAAM,UAAU,oBAAI,IAAG;AAEvB,aAAW,OAAO,WAAW;AACzB,UAAM,SAAS,oBAAI,IAAG;AACtB,eAAW,WAAW,KAAK;AACvB,UAAI,YAAY,QAAW;AACvB,YAAI,YAAY;AAEZ;eACG;AACH,iBAAO;;;AAGf,YAAM,UAAU,CAAC,QAAQ,YAAa,EAAE,OAAO,QAAQ,eAAgB;AACvE,iBAAW,SAAS,SAAS;AACzB,YAAI,QAAQ,IAAI,KAAK,GAAG;AACpB,cAAI,CAAC,OAAO,IAAI,KAAK,GAAG;AACpB,mBAAO;;eAER;AACH,kBAAQ,IAAI,KAAK;AACjB,iBAAO,IAAI,KAAK;;;;;AAKhC,SAAO;AACX;AA5BS;AA8BT,SAAS,iBAAiB,KAAQ;AAC9B,QAAM,iBAAiB,IAAI,eAAe;AAC1C,QAAM,gBAA4B,MAAM,cAAc;AACtD,WAAS,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACrC,kBAAc,CAAC,IAAI,eAAe,IAAI,eAAe,CAAC,GAAG,CAAC;;AAE9D,SAAO;AACX;AAPS;AAST,SAAS,gBAEL,WACA,UACA,cACA,SAAwB;AAExB,QAAM,MAAM,UAAU,QAAQ,EAAE,YAAY;AAC5C,MAAI,QAAQ,IAAI;AAChB,MAAI,UAAU,QAAW;AACrB,UAAMM,WAAU,kBAAkB,IAAI,aAAyB;AAC/D,YAAQ,YAAY,KAAK,YAAYA,QAAO,CAAC;AAC7C,QAAI,QAAQ;;AAGhB,QAAM,MAAM,iBAAiB,MAAM,MAAM,CAAC,KAAK,OAAO,cAAc,OAAO,CAAC;AAC5E,SAAO;AACX;AAjBS;AAmBT,SAAS,iBAEL,KACA,IACA,cACA,SAAwB;AAExB,MAAI,YAAY;AAEhB,MAAI,IAAI;AACR,QAAM,OAAiB,CAAA;AACvB,MAAI,IAAI,KAAK,GAAG,GAAG;AAEnB,SAAO,MAAM;AACT,QAAI,IAAI,uBAAuB,WAAW,CAAC;AAC3C,QAAI,MAAM,QAAW;AACjB,UAAI,uBAAuB,MAAM,MAAM,CAAC,KAAK,WAAW,GAAG,GAAG,cAAc,OAAO,CAAC;;AAGxF,QAAI,MAAM,WAAW;AACjB,aAAO,0BAA0B,MAAM,WAAW,CAAC;;AAGvD,QAAI,EAAE,kBAAkB,MAAM;AAC1B,aAAO,EAAE;;AAGb,gBAAY;AACZ,SAAK,KAAK,CAAC;AACX,QAAI,KAAK,GAAG,GAAG;;AAEvB;AA/BS;AAiCT,SAAS,uBAEL,KACA,WACA,OACA,WACA,cACA,SAAwB;AAExB,QAAM,QAAQ,gBAAgB,UAAU,SAAS,OAAO,YAAY;AACpE,MAAI,MAAM,SAAS,GAAG;AAClB,eAAW,KAAK,WAAW,OAAO,SAAS;AAC3C,WAAO;;AAGX,MAAIC,YAAW,YAAY,KAAK;AAChC,QAAM,eAAe,aAAa,OAAO,YAAY;AAErD,MAAI,iBAAiB,QAAW;AAC5B,IAAAA,UAAS,gBAAgB;AACzB,IAAAA,UAAS,aAAa;AACtB,IAAAA,UAAS,QAAQ,YAAY;aACtB,iCAAiC,KAAK,GAAG;AAChD,UAAM,aAAa,YAAI,MAAM,IAAI;AACjC,IAAAA,UAAS,gBAAgB;AACzB,IAAAA,UAAS,aAAa;AACtB,IAAAA,UAAS,QAAQ,YAAY;AAC7B,6BAAyB,MAAM,MAAM,CAAC,KAAK,WAAW,MAAM,MAAM,OAAO,CAAC;;AAG9E,EAAAA,YAAW,WAAW,KAAK,WAAW,OAAOA,SAAQ;AACrD,SAAOA;AACX;AAhCS;AAkCT,SAAS,yBAEL,KACA,WACA,kBACA,SAAwB;AAExB,QAAM,aAA0B,CAAA;AAChC,WAAS,IAAI,GAAG,KAAK,WAAW,KAAK;AACjC,eAAW,KAAK,KAAK,GAAG,CAAC,EAAE,SAAS;;AAExC,QAAM,WAAW,IAAI;AACrB,QAAM,eAAe,SAAS;AAC9B,QAAM,aAAa,SAAS;AAC5B,QAAM,UAAU,oBAAoB;IAChC;IACA;IACA;IACA;GACH;AACD,UAAQ,OAAO;AACnB;AArBS;AAuBT,SAAS,oBAAoB,SAK5B;AACG,QAAM,UAAU,YAAI,QAAQ,YAAY,CAAC,YACrCC,YAAW,OAAO,CAAC,EACrB,KAAK,IAAI;AACX,QAAM,aACF,QAAQ,WAAW,QAAQ,IAAI,KAAK,QAAQ,WAAW;AAC3D,MAAI,cACA,qCAAqC,QAAQ,iBAAiB,KAC1D,IAAI,CACP,SAASC,sBAAqB,QAAQ,UAAU,CAAC,GAAG,UAAU,aACnD,QAAQ,aAAa,IAAI;GACjC,OAAO;;AAEf,gBACI,cACA;;AAEJ,SAAO;AACX;AAvBS;AAyBT,SAASA,sBAAqB,MAA+B;AACzD,MAAI,gBAAgB,aAAa;AAC7B,WAAO;aACA,gBAAgB,QAAQ;AAC/B,WAAO;aACA,gBAAgB,aAAa;AACpC,WAAO;aACA,gBAAgB,qBAAqB;AAC5C,WAAO;aACA,gBAAgB,kCAAkC;AACzD,WAAO;aACA,gBAAgB,yBAAyB;AAChD,WAAO;aACA,gBAAgB,YAAY;AACnC,WAAO;aACA,gBAAgB,UAAU;AACjC,WAAO;SACJ;AACH,UAAM,MAAM,sBAAsB;;AAE1C;AApBS,OAAAA,uBAAA;AAsBT,SAAS,0BACL,MACA,UACA,SAAe;AAEf,QAAM,kBAAkB,gBACpB,SAAS,QAAQ,UACjB,CAAC,MAAM,EAAE,MAAM,WAAW;AAE9B,QAAM,iBAAiB,eACnB,gBACK,OAAO,CAAC,MAA2B,aAAa,cAAc,EAC9D,IAAI,CAAC,MAAM,EAAE,SAAS,GAC3B,CAAC,MAAM,EAAE,YAAY;AAEzB,SAAO;IACH,aAAa;IACb,oBAAoB;IACpB,WAAW;;AAEnB;AApBS;AAsBT,SAAS,uBACL,OACA,OAAa;AAEb,SAAO,MAAM,MAAM,MAAM,YAAY;AACzC;AALS;AAOT,SAAS,gBACL,SACA,OACA,cAA0B;AAE1B,QAAM,eAAe,IAAI,aAAY;AACrC,QAAM,oBAAiC,CAAA;AAEvC,aAAW,KAAK,QAAQ,UAAU;AAC9B,QAAI,aAAa,GAAG,EAAE,GAAG,MAAM,OAAO;AAClC;;AAEJ,QAAI,EAAE,MAAM,SAAS,eAAe;AAChC,wBAAkB,KAAK,CAAC;AACxB;;AAEJ,UAAM,mBAAmB,EAAE,MAAM,YAAY;AAC7C,aAAS,IAAI,GAAG,IAAI,kBAAkB,KAAK;AACvC,YAAM,aAAa,EAAE,MAAM,YAAY,CAAC;AACxC,YAAM,SAAS,mBAAmB,YAAY,KAAK;AACnD,UAAI,WAAW,QAAW;AACtB,qBAAa,IAAI;UACb,OAAO;UACP,KAAK,EAAE;UACP,OAAO,EAAE;SACZ;;;;AAKb,MAAI;AAEJ,MAAI,kBAAkB,WAAW,KAAK,aAAa,SAAS,GAAG;AAC3D,YAAQ;;AAGZ,MAAI,UAAU,QAAW;AACrB,YAAQ,IAAI,aAAY;AACxB,eAAW,KAAK,aAAa,UAAU;AACnC,cAAQ,GAAG,KAAK;;;AAIxB,MAAI,kBAAkB,SAAS,KAAK,CAAC,yBAAyB,KAAK,GAAG;AAClE,eAAW,KAAK,mBAAmB;AAC/B,YAAM,IAAI,CAAC;;;AAInB,SAAO;AACX;AAlDS;AAoDT,SAAS,mBACL,YACA,OAAa;AAEb,MACI,sBAAsB,kBACtB,aAAa,OAAO,WAAW,SAAS,GAC1C;AACE,WAAO,WAAW;;AAEtB,SAAO;AACX;AAXS;AAaT,SAAS,aACL,SACA,cAA0B;AAE1B,MAAI;AACJ,aAAW,KAAK,QAAQ,UAAU;AAC9B,QAAI,aAAa,GAAG,EAAE,GAAG,MAAM,MAAM;AACjC,UAAI,QAAQ,QAAW;AACnB,cAAM,EAAE;iBACD,QAAQ,EAAE,KAAK;AACtB,eAAO;;;;AAInB,SAAO;AACX;AAfS;AAiBT,SAAS,YAAYH,UAAqB;AACtC,SAAO;IACH,SAASA;IACT,OAAO,CAAA;IACP,eAAe;IACf,YAAY;;AAEpB;AAPS;AAST,SAAS,WACL,KACA,MACA,OACA,IAAY;AAEZ,OAAK,YAAY,KAAK,EAAE;AACxB,OAAK,MAAM,MAAM,YAAY,IAAI;AACjC,SAAO;AACX;AATS;AAWT,SAAS,YAAY,KAAU,OAAe;AAC1C,MAAI,UAAU,WAAW;AACrB,WAAO;;AAIX,QAAM,SAAS,MAAM,QAAQ;AAC7B,QAAM,WAAW,IAAI,OAAO,MAAM;AAClC,MAAI,aAAa,QAAW;AACxB,WAAO;;AAEX,QAAM,QAAQ,SAAQ;AACtB,MAAI,OAAO,MAAM,IAAI;AACrB,SAAO;AACX;AAdS;AAgBT,SAAS,kBAAkB,UAAkB;AACzC,QAAM,UAAU,IAAI,aAAY;AAEhC,QAAM,sBAAsB,SAAS,YAAY;AACjD,WAAS,IAAI,GAAG,IAAI,qBAAqB,KAAK;AAC1C,UAAM,SAAS,SAAS,YAAY,CAAC,EAAE;AACvC,UAAM,SAAoB;MACtB,OAAO;MACP,KAAK;MACL,OAAO,CAAA;;AAEX,YAAQ,QAAQ,OAAO;;AAG3B,SAAO;AACX;AAfS;AAiBT,SAAS,QAAQ,QAAmB,SAAqB;AACrD,QAAM,IAAI,OAAO;AAEjB,MAAI,EAAE,SAAS,eAAe;AAC1B,QAAI,OAAO,MAAM,SAAS,GAAG;AACzB,YAAM,WAAW,CAAC,GAAG,OAAO,KAAK;AACjC,YAAM,cAAc,SAAS,IAAG;AAChC,YAAM,eAA0B;QAC5B,OAAO;QACP,KAAK,OAAO;QACZ,OAAO;;AAEX,cAAQ,cAAc,OAAO;WAC1B;AAGH,cAAQ,IAAI,MAAM;;AAEtB;;AAGJ,MAAI,CAAC,EAAE,wBAAwB;AAC3B,YAAQ,IAAI,MAAM;;AAGtB,QAAM,mBAAmB,EAAE,YAAY;AACvC,WAAS,IAAI,GAAG,IAAI,kBAAkB,KAAK;AACvC,UAAM,aAAa,EAAE,YAAY,CAAC;AAClC,UAAM,IAAI,iBAAiB,QAAQ,UAAU;AAE7C,QAAI,MAAM,QAAW;AACjB,cAAQ,GAAG,OAAO;;;AAG9B;AAlCS;AAoCT,SAAS,iBACL,QACA,YAAsB;AAEtB,MAAI,sBAAsB,mBAAmB;AACzC,WAAO;MACH,OAAO,WAAW;MAClB,KAAK,OAAO;MACZ,OAAO,OAAO;;aAEX,sBAAsB,gBAAgB;AAC7C,UAAM,QAAQ,CAAC,GAAG,OAAO,OAAO,WAAW,WAAW;AACtD,WAAO;MACH,OAAO,WAAW;MAClB,KAAK,OAAO;MACZ;;;AAGR,SAAO;AACX;AAnBS;AAqBT,SAAS,yBAAyB,SAAqB;AACnD,aAAW,KAAK,QAAQ,UAAU;AAC9B,QAAI,EAAE,MAAM,SAAS,eAAe;AAChC,aAAO;;;AAGf,SAAO;AACX;AAPS;AAST,SAAS,2BAA2B,SAAqB;AACrD,aAAW,KAAK,QAAQ,UAAU;AAC9B,QAAI,EAAE,MAAM,SAAS,eAAe;AAChC,aAAO;;;AAGf,SAAO;AACX;AAPS;AAST,SAAS,iCAAiC,SAAqB;AAC3D,MAAI,2BAA2B,OAAO,GAAG;AACrC,WAAO;;AAEX,QAAM,UAAU,sBAAsB,QAAQ,QAAQ;AACtD,QAAM,YACF,qBAAqB,OAAO,KAAK,CAAC,6BAA6B,OAAO;AAC1E,SAAO;AACX;AARS;AAUT,SAAS,sBACL,SAA6B;AAE7B,QAAM,eAAe,oBAAI,IAAG;AAC5B,aAAW,KAAK,SAAS;AACrB,UAAM,MAAM,gBAAgB,GAAG,KAAK;AACpC,QAAI,OAAO,aAAa,IAAI,GAAG;AAC/B,QAAI,SAAS,QAAW;AACpB,aAAO,CAAA;AACP,mBAAa,IAAI,KAAK,IAAI;;AAE9B,SAAK,EAAE,GAAG,IAAI;;AAElB,SAAO;AACX;AAdS;AAgBT,SAAS,qBACL,SAA6C;AAE7C,aAAW,SAAS,MAAM,KAAK,QAAQ,OAAM,CAAE,GAAG;AAC9C,QAAI,OAAO,KAAK,KAAK,EAAE,SAAS,GAAG;AAC/B,aAAO;;;AAGf,SAAO;AACX;AATS;AAWT,SAAS,6BACL,SAA6C;AAE7C,aAAW,SAAS,MAAM,KAAK,QAAQ,OAAM,CAAE,GAAG;AAC9C,QAAI,OAAO,KAAK,KAAK,EAAE,WAAW,GAAG;AACjC,aAAO;;;AAGf,SAAO;AACX;AATS;;;AC5uBF,IAAI;AAAA,CACV,SAAUI,cAAa;AACpB,WAAS,GAAG,OAAO;AACf,WAAO,OAAO,UAAU;AAAA,EAC5B;AAFS;AAGT,EAAAA,aAAY,KAAK;AACrB,GAAG,gBAAgB,cAAc,CAAC,EAAE;AAC7B,IAAI;AAAA,CACV,SAAUC,MAAK;AACZ,WAAS,GAAG,OAAO;AACf,WAAO,OAAO,UAAU;AAAA,EAC5B;AAFS;AAGT,EAAAA,KAAI,KAAK;AACb,GAAG,QAAQ,MAAM,CAAC,EAAE;AACb,IAAI;AAAA,CACV,SAAUC,UAAS;AAChB,EAAAA,SAAQ,YAAY;AACpB,EAAAA,SAAQ,YAAY;AACpB,WAAS,GAAG,OAAO;AACf,WAAO,OAAO,UAAU,YAAYA,SAAQ,aAAa,SAAS,SAASA,SAAQ;AAAA,EACvF;AAFS;AAGT,EAAAA,SAAQ,KAAK;AACjB,GAAG,YAAY,UAAU,CAAC,EAAE;AACrB,IAAI;AAAA,CACV,SAAUC,WAAU;AACjB,EAAAA,UAAS,YAAY;AACrB,EAAAA,UAAS,YAAY;AACrB,WAAS,GAAG,OAAO;AACf,WAAO,OAAO,UAAU,YAAYA,UAAS,aAAa,SAAS,SAASA,UAAS;AAAA,EACzF;AAFS;AAGT,EAAAA,UAAS,KAAK;AAClB,GAAG,aAAa,WAAW,CAAC,EAAE;AAKvB,IAAI;AAAA,CACV,SAAUC,WAAU;AAMjB,WAAS,OAAO,MAAM,WAAW;AAC7B,QAAI,SAAS,OAAO,WAAW;AAC3B,aAAO,SAAS;AAAA,IACpB;AACA,QAAI,cAAc,OAAO,WAAW;AAChC,kBAAY,SAAS;AAAA,IACzB;AACA,WAAO,EAAE,MAAM,UAAU;AAAA,EAC7B;AARS;AAST,EAAAA,UAAS,SAAS;AAIlB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,SAAS,UAAU,IAAI,KAAK,GAAG,SAAS,UAAU,SAAS;AAAA,EACxG;AAHS;AAIT,EAAAA,UAAS,KAAK;AAClB,GAAG,aAAa,WAAW,CAAC,EAAE;AAKvB,IAAI;AAAA,CACV,SAAUC,QAAO;AACd,WAAS,OAAO,KAAK,KAAK,OAAO,MAAM;AACnC,QAAI,GAAG,SAAS,GAAG,KAAK,GAAG,SAAS,GAAG,KAAK,GAAG,SAAS,KAAK,KAAK,GAAG,SAAS,IAAI,GAAG;AACjF,aAAO,EAAE,OAAO,SAAS,OAAO,KAAK,GAAG,GAAG,KAAK,SAAS,OAAO,OAAO,IAAI,EAAE;AAAA,IACjF,WACS,SAAS,GAAG,GAAG,KAAK,SAAS,GAAG,GAAG,GAAG;AAC3C,aAAO,EAAE,OAAO,KAAK,KAAK,IAAI;AAAA,IAClC,OACK;AACD,YAAM,IAAI,MAAM,8CAA8C,GAAG,KAAK,GAAG,KAAK,KAAK,KAAK,IAAI,GAAG;AAAA,IACnG;AAAA,EACJ;AAVS;AAWT,EAAAA,OAAM,SAAS;AAIf,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,cAAc,SAAS,KAAK,SAAS,GAAG,UAAU,KAAK,KAAK,SAAS,GAAG,UAAU,GAAG;AAAA,EACnG;AAHS;AAIT,EAAAA,OAAM,KAAK;AACf,GAAG,UAAU,QAAQ,CAAC,EAAE;AAKjB,IAAI;AAAA,CACV,SAAUC,WAAU;AAMjB,WAAS,OAAO,KAAK,OAAO;AACxB,WAAO,EAAE,KAAK,MAAM;AAAA,EACxB;AAFS;AAGT,EAAAA,UAAS,SAAS;AAIlB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,cAAc,SAAS,KAAK,MAAM,GAAG,UAAU,KAAK,MAAM,GAAG,OAAO,UAAU,GAAG,KAAK,GAAG,UAAU,UAAU,GAAG;AAAA,EAC9H;AAHS;AAIT,EAAAA,UAAS,KAAK;AAClB,GAAG,aAAa,WAAW,CAAC,EAAE;AAKvB,IAAI;AAAA,CACV,SAAUC,eAAc;AAQrB,WAAS,OAAO,WAAW,aAAa,sBAAsB,sBAAsB;AAChF,WAAO,EAAE,WAAW,aAAa,sBAAsB,qBAAqB;AAAA,EAChF;AAFS;AAGT,EAAAA,cAAa,SAAS;AAItB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,cAAc,SAAS,KAAK,MAAM,GAAG,UAAU,WAAW,KAAK,GAAG,OAAO,UAAU,SAAS,KAC/F,MAAM,GAAG,UAAU,oBAAoB,MACtC,MAAM,GAAG,UAAU,oBAAoB,KAAK,GAAG,UAAU,UAAU,oBAAoB;AAAA,EACnG;AALS;AAMT,EAAAA,cAAa,KAAK;AACtB,GAAG,iBAAiB,eAAe,CAAC,EAAE;AAK/B,IAAI;AAAA,CACV,SAAUC,QAAO;AAId,WAAS,OAAO,KAAK,OAAO,MAAM,OAAO;AACrC,WAAO;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AAPS;AAQT,EAAAA,OAAM,SAAS;AAIf,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,YAAY,UAAU,KAAK,GAAG,CAAC,KACjE,GAAG,YAAY,UAAU,OAAO,GAAG,CAAC,KACpC,GAAG,YAAY,UAAU,MAAM,GAAG,CAAC,KACnC,GAAG,YAAY,UAAU,OAAO,GAAG,CAAC;AAAA,EAC/C;AANS;AAOT,EAAAA,OAAM,KAAK;AACf,GAAG,UAAU,QAAQ,CAAC,EAAE;AAKjB,IAAI;AAAA,CACV,SAAUC,mBAAkB;AAIzB,WAAS,OAAO,OAAO,OAAO;AAC1B,WAAO;AAAA,MACH;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AALS;AAMT,EAAAA,kBAAiB,SAAS;AAI1B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,MAAM,GAAG,UAAU,KAAK,KAAK,MAAM,GAAG,UAAU,KAAK;AAAA,EAC/F;AAHS;AAIT,EAAAA,kBAAiB,KAAK;AAC1B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AAKvC,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAI1B,WAAS,OAAO,OAAO,UAAU,qBAAqB;AAClD,WAAO;AAAA,MACH;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AANS;AAOT,EAAAA,mBAAkB,SAAS;AAI3B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,OAAO,UAAU,KAAK,MACvD,GAAG,UAAU,UAAU,QAAQ,KAAK,SAAS,GAAG,SAAS,OACzD,GAAG,UAAU,UAAU,mBAAmB,KAAK,GAAG,WAAW,UAAU,qBAAqB,SAAS,EAAE;AAAA,EACnH;AALS;AAMT,EAAAA,mBAAkB,KAAK;AAC3B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAIzC,IAAI;AAAA,CACV,SAAUC,mBAAkB;AAIzB,EAAAA,kBAAiB,UAAU;AAI3B,EAAAA,kBAAiB,UAAU;AAI3B,EAAAA,kBAAiB,SAAS;AAC9B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AAKvC,IAAI;AAAA,CACV,SAAUC,eAAc;AAIrB,WAAS,OAAO,WAAW,SAAS,gBAAgB,cAAc,MAAM,eAAe;AACnF,UAAM,SAAS;AAAA,MACX;AAAA,MACA;AAAA,IACJ;AACA,QAAI,GAAG,QAAQ,cAAc,GAAG;AAC5B,aAAO,iBAAiB;AAAA,IAC5B;AACA,QAAI,GAAG,QAAQ,YAAY,GAAG;AAC1B,aAAO,eAAe;AAAA,IAC1B;AACA,QAAI,GAAG,QAAQ,IAAI,GAAG;AAClB,aAAO,OAAO;AAAA,IAClB;AACA,QAAI,GAAG,QAAQ,aAAa,GAAG;AAC3B,aAAO,gBAAgB;AAAA,IAC3B;AACA,WAAO;AAAA,EACX;AAlBS;AAmBT,EAAAA,cAAa,SAAS;AAItB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,SAAS,UAAU,SAAS,KAAK,GAAG,SAAS,UAAU,SAAS,MACjG,GAAG,UAAU,UAAU,cAAc,KAAK,GAAG,SAAS,UAAU,cAAc,OAC9E,GAAG,UAAU,UAAU,YAAY,KAAK,GAAG,SAAS,UAAU,YAAY,OAC1E,GAAG,UAAU,UAAU,IAAI,KAAK,GAAG,OAAO,UAAU,IAAI;AAAA,EACpE;AANS;AAOT,EAAAA,cAAa,KAAK;AACtB,GAAG,iBAAiB,eAAe,CAAC,EAAE;AAK/B,IAAI;AAAA,CACV,SAAUC,+BAA8B;AAIrC,WAAS,OAAO,UAAU,SAAS;AAC/B,WAAO;AAAA,MACH;AAAA,MACA;AAAA,IACJ;AAAA,EACJ;AALS;AAMT,EAAAA,8BAA6B,SAAS;AAItC,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,SAAS,GAAG,UAAU,QAAQ,KAAK,GAAG,OAAO,UAAU,OAAO;AAAA,EAClG;AAHS;AAIT,EAAAA,8BAA6B,KAAK;AACtC,GAAG,iCAAiC,+BAA+B,CAAC,EAAE;AAI/D,IAAI;AAAA,CACV,SAAUC,qBAAoB;AAI3B,EAAAA,oBAAmB,QAAQ;AAI3B,EAAAA,oBAAmB,UAAU;AAI7B,EAAAA,oBAAmB,cAAc;AAIjC,EAAAA,oBAAmB,OAAO;AAC9B,GAAG,uBAAuB,qBAAqB,CAAC,EAAE;AAM3C,IAAI;AAAA,CACV,SAAUC,gBAAe;AAOtB,EAAAA,eAAc,cAAc;AAM5B,EAAAA,eAAc,aAAa;AAC/B,GAAG,kBAAkB,gBAAgB,CAAC,EAAE;AAMjC,IAAI;AAAA,CACV,SAAUC,kBAAiB;AACxB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,OAAO,UAAU,IAAI;AAAA,EAClE;AAHS;AAIT,EAAAA,iBAAgB,KAAK;AACzB,GAAG,oBAAoB,kBAAkB,CAAC,EAAE;AAKrC,IAAI;AAAA,CACV,SAAUC,aAAY;AAInB,WAAS,OAAO,OAAO,SAAS,UAAU,MAAM,QAAQ,oBAAoB;AACxE,QAAI,SAAS,EAAE,OAAO,QAAQ;AAC9B,QAAI,GAAG,QAAQ,QAAQ,GAAG;AACtB,aAAO,WAAW;AAAA,IACtB;AACA,QAAI,GAAG,QAAQ,IAAI,GAAG;AAClB,aAAO,OAAO;AAAA,IAClB;AACA,QAAI,GAAG,QAAQ,MAAM,GAAG;AACpB,aAAO,SAAS;AAAA,IACpB;AACA,QAAI,GAAG,QAAQ,kBAAkB,GAAG;AAChC,aAAO,qBAAqB;AAAA,IAChC;AACA,WAAO;AAAA,EACX;AAfS;AAgBT,EAAAA,YAAW,SAAS;AAIpB,WAAS,GAAG,OAAO;AACf,QAAI;AACJ,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KACpB,MAAM,GAAG,UAAU,KAAK,KACxB,GAAG,OAAO,UAAU,OAAO,MAC1B,GAAG,OAAO,UAAU,QAAQ,KAAK,GAAG,UAAU,UAAU,QAAQ,OAChE,GAAG,QAAQ,UAAU,IAAI,KAAK,GAAG,OAAO,UAAU,IAAI,KAAK,GAAG,UAAU,UAAU,IAAI,OACtF,GAAG,UAAU,UAAU,eAAe,KAAM,GAAG,QAAQ,KAAK,UAAU,qBAAqB,QAAQ,OAAO,SAAS,SAAS,GAAG,IAAI,OACnI,GAAG,OAAO,UAAU,MAAM,KAAK,GAAG,UAAU,UAAU,MAAM,OAC5D,GAAG,UAAU,UAAU,kBAAkB,KAAK,GAAG,WAAW,UAAU,oBAAoB,6BAA6B,EAAE;AAAA,EACrI;AAXS;AAYT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAK3B,IAAI;AAAA,CACV,SAAUC,UAAS;AAIhB,WAAS,OAAO,OAAO,YAAY,MAAM;AACrC,QAAI,SAAS,EAAE,OAAO,QAAQ;AAC9B,QAAI,GAAG,QAAQ,IAAI,KAAK,KAAK,SAAS,GAAG;AACrC,aAAO,YAAY;AAAA,IACvB;AACA,WAAO;AAAA,EACX;AANS;AAOT,EAAAA,SAAQ,SAAS;AAIjB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,KAAK,KAAK,GAAG,OAAO,UAAU,OAAO;AAAA,EAC7F;AAHS;AAIT,EAAAA,SAAQ,KAAK;AACjB,GAAG,YAAY,UAAU,CAAC,EAAE;AAKrB,IAAI;AAAA,CACV,SAAUC,WAAU;AAMjB,WAAS,QAAQ,OAAO,SAAS;AAC7B,WAAO,EAAE,OAAO,QAAQ;AAAA,EAC5B;AAFS;AAGT,EAAAA,UAAS,UAAU;AAMnB,WAAS,OAAO,UAAU,SAAS;AAC/B,WAAO,EAAE,OAAO,EAAE,OAAO,UAAU,KAAK,SAAS,GAAG,QAAQ;AAAA,EAChE;AAFS;AAGT,EAAAA,UAAS,SAAS;AAKlB,WAAS,IAAI,OAAO;AAChB,WAAO,EAAE,OAAO,SAAS,GAAG;AAAA,EAChC;AAFS;AAGT,EAAAA,UAAS,MAAM;AACf,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAC1B,GAAG,OAAO,UAAU,OAAO,KAC3B,MAAM,GAAG,UAAU,KAAK;AAAA,EACnC;AALS;AAMT,EAAAA,UAAS,KAAK;AAClB,GAAG,aAAa,WAAW,CAAC,EAAE;AACvB,IAAI;AAAA,CACV,SAAUC,mBAAkB;AACzB,WAAS,OAAO,OAAO,mBAAmB,aAAa;AACnD,UAAM,SAAS,EAAE,MAAM;AACvB,QAAI,sBAAsB,QAAW;AACjC,aAAO,oBAAoB;AAAA,IAC/B;AACA,QAAI,gBAAgB,QAAW;AAC3B,aAAO,cAAc;AAAA,IACzB;AACA,WAAO;AAAA,EACX;AATS;AAUT,EAAAA,kBAAiB,SAAS;AAC1B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,GAAG,OAAO,UAAU,KAAK,MAC1D,GAAG,QAAQ,UAAU,iBAAiB,KAAK,UAAU,sBAAsB,YAC3E,GAAG,OAAO,UAAU,WAAW,KAAK,UAAU,gBAAgB;AAAA,EACvE;AALS;AAMT,EAAAA,kBAAiB,KAAK;AAC1B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AACvC,IAAI;AAAA,CACV,SAAUC,6BAA4B;AACnC,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,OAAO,SAAS;AAAA,EAC9B;AAHS;AAIT,EAAAA,4BAA2B,KAAK;AACpC,GAAG,+BAA+B,6BAA6B,CAAC,EAAE;AAC3D,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAQ1B,WAAS,QAAQ,OAAO,SAAS,YAAY;AACzC,WAAO,EAAE,OAAO,SAAS,cAAc,WAAW;AAAA,EACtD;AAFS;AAGT,EAAAA,mBAAkB,UAAU;AAQ5B,WAAS,OAAO,UAAU,SAAS,YAAY;AAC3C,WAAO,EAAE,OAAO,EAAE,OAAO,UAAU,KAAK,SAAS,GAAG,SAAS,cAAc,WAAW;AAAA,EAC1F;AAFS;AAGT,EAAAA,mBAAkB,SAAS;AAO3B,WAAS,IAAI,OAAO,YAAY;AAC5B,WAAO,EAAE,OAAO,SAAS,IAAI,cAAc,WAAW;AAAA,EAC1D;AAFS;AAGT,EAAAA,mBAAkB,MAAM;AACxB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,SAAS,GAAG,SAAS,MAAM,iBAAiB,GAAG,UAAU,YAAY,KAAK,2BAA2B,GAAG,UAAU,YAAY;AAAA,EACzI;AAHS;AAIT,EAAAA,mBAAkB,KAAK;AAC3B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAKzC,IAAI;AAAA,CACV,SAAUC,mBAAkB;AAIzB,WAAS,OAAO,cAAc,OAAO;AACjC,WAAO,EAAE,cAAc,MAAM;AAAA,EACjC;AAFS;AAGT,EAAAA,kBAAiB,SAAS;AAC1B,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KACpB,wCAAwC,GAAG,UAAU,YAAY,KACjE,MAAM,QAAQ,UAAU,KAAK;AAAA,EACxC;AALS;AAMT,EAAAA,kBAAiB,KAAK;AAC1B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AACvC,IAAI;AAAA,CACV,SAAUC,aAAY;AACnB,WAAS,OAAO,KAAK,SAAS,YAAY;AACtC,QAAI,SAAS;AAAA,MACT,MAAM;AAAA,MACN;AAAA,IACJ;AACA,QAAI,YAAY,WAAc,QAAQ,cAAc,UAAa,QAAQ,mBAAmB,SAAY;AACpG,aAAO,UAAU;AAAA,IACrB;AACA,QAAI,eAAe,QAAW;AAC1B,aAAO,eAAe;AAAA,IAC1B;AACA,WAAO;AAAA,EACX;AAZS;AAaT,EAAAA,YAAW,SAAS;AACpB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,aAAa,UAAU,SAAS,YAAY,GAAG,OAAO,UAAU,GAAG,MAAM,UAAU,YAAY,WAChG,UAAU,QAAQ,cAAc,UAAa,GAAG,QAAQ,UAAU,QAAQ,SAAS,OAAO,UAAU,QAAQ,mBAAmB,UAAa,GAAG,QAAQ,UAAU,QAAQ,cAAc,QAAS,UAAU,iBAAiB,UAAa,2BAA2B,GAAG,UAAU,YAAY;AAAA,EACtS;AAJS;AAKT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAC3B,IAAI;AAAA,CACV,SAAUC,aAAY;AACnB,WAAS,OAAO,QAAQ,QAAQ,SAAS,YAAY;AACjD,QAAI,SAAS;AAAA,MACT,MAAM;AAAA,MACN;AAAA,MACA;AAAA,IACJ;AACA,QAAI,YAAY,WAAc,QAAQ,cAAc,UAAa,QAAQ,mBAAmB,SAAY;AACpG,aAAO,UAAU;AAAA,IACrB;AACA,QAAI,eAAe,QAAW;AAC1B,aAAO,eAAe;AAAA,IAC1B;AACA,WAAO;AAAA,EACX;AAbS;AAcT,EAAAA,YAAW,SAAS;AACpB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,aAAa,UAAU,SAAS,YAAY,GAAG,OAAO,UAAU,MAAM,KAAK,GAAG,OAAO,UAAU,MAAM,MAAM,UAAU,YAAY,WAClI,UAAU,QAAQ,cAAc,UAAa,GAAG,QAAQ,UAAU,QAAQ,SAAS,OAAO,UAAU,QAAQ,mBAAmB,UAAa,GAAG,QAAQ,UAAU,QAAQ,cAAc,QAAS,UAAU,iBAAiB,UAAa,2BAA2B,GAAG,UAAU,YAAY;AAAA,EACtS;AAJS;AAKT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAC3B,IAAI;AAAA,CACV,SAAUC,aAAY;AACnB,WAAS,OAAO,KAAK,SAAS,YAAY;AACtC,QAAI,SAAS;AAAA,MACT,MAAM;AAAA,MACN;AAAA,IACJ;AACA,QAAI,YAAY,WAAc,QAAQ,cAAc,UAAa,QAAQ,sBAAsB,SAAY;AACvG,aAAO,UAAU;AAAA,IACrB;AACA,QAAI,eAAe,QAAW;AAC1B,aAAO,eAAe;AAAA,IAC1B;AACA,WAAO;AAAA,EACX;AAZS;AAaT,EAAAA,YAAW,SAAS;AACpB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,aAAa,UAAU,SAAS,YAAY,GAAG,OAAO,UAAU,GAAG,MAAM,UAAU,YAAY,WAChG,UAAU,QAAQ,cAAc,UAAa,GAAG,QAAQ,UAAU,QAAQ,SAAS,OAAO,UAAU,QAAQ,sBAAsB,UAAa,GAAG,QAAQ,UAAU,QAAQ,iBAAiB,QAAS,UAAU,iBAAiB,UAAa,2BAA2B,GAAG,UAAU,YAAY;AAAA,EAC5S;AAJS;AAKT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAC3B,IAAI;AAAA,CACV,SAAUC,gBAAe;AACtB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,cACF,UAAU,YAAY,UAAa,UAAU,oBAAoB,YACjE,UAAU,oBAAoB,UAAa,UAAU,gBAAgB,MAAM,CAAC,WAAW;AACpF,UAAI,GAAG,OAAO,OAAO,IAAI,GAAG;AACxB,eAAO,WAAW,GAAG,MAAM,KAAK,WAAW,GAAG,MAAM,KAAK,WAAW,GAAG,MAAM;AAAA,MACjF,OACK;AACD,eAAO,iBAAiB,GAAG,MAAM;AAAA,MACrC;AAAA,IACJ,CAAC;AAAA,EACT;AAZS;AAaT,EAAAA,eAAc,KAAK;AACvB,GAAG,kBAAkB,gBAAgB,CAAC,EAAE;AAuSjC,IAAI;AAAA,CACV,SAAUC,yBAAwB;AAK/B,WAAS,OAAO,KAAK;AACjB,WAAO,EAAE,IAAI;AAAA,EACjB;AAFS;AAGT,EAAAA,wBAAuB,SAAS;AAIhC,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,GAAG;AAAA,EAC3D;AAHS;AAIT,EAAAA,wBAAuB,KAAK;AAChC,GAAG,2BAA2B,yBAAyB,CAAC,EAAE;AAKnD,IAAI;AAAA,CACV,SAAUC,kCAAiC;AAMxC,WAAS,OAAO,KAAK,SAAS;AAC1B,WAAO,EAAE,KAAK,QAAQ;AAAA,EAC1B;AAFS;AAGT,EAAAA,iCAAgC,SAAS;AAIzC,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,GAAG,KAAK,GAAG,QAAQ,UAAU,OAAO;AAAA,EAC5F;AAHS;AAIT,EAAAA,iCAAgC,KAAK;AACzC,GAAG,oCAAoC,kCAAkC,CAAC,EAAE;AAKrE,IAAI;AAAA,CACV,SAAUC,0CAAyC;AAMhD,WAAS,OAAO,KAAK,SAAS;AAC1B,WAAO,EAAE,KAAK,QAAQ;AAAA,EAC1B;AAFS;AAGT,EAAAA,yCAAwC,SAAS;AAIjD,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,GAAG,MAAM,UAAU,YAAY,QAAQ,GAAG,QAAQ,UAAU,OAAO;AAAA,EAC3H;AAHS;AAIT,EAAAA,yCAAwC,KAAK;AACjD,GAAG,4CAA4C,0CAA0C,CAAC,EAAE;AAKrF,IAAI;AAAA,CACV,SAAUC,mBAAkB;AAQzB,WAAS,OAAO,KAAK,YAAY,SAAS,MAAM;AAC5C,WAAO,EAAE,KAAK,YAAY,SAAS,KAAK;AAAA,EAC5C;AAFS;AAGT,EAAAA,kBAAiB,SAAS;AAI1B,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,GAAG,KAAK,GAAG,OAAO,UAAU,UAAU,KAAK,GAAG,QAAQ,UAAU,OAAO,KAAK,GAAG,OAAO,UAAU,IAAI;AAAA,EAC5J;AAHS;AAIT,EAAAA,kBAAiB,KAAK;AAC1B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AAQvC,IAAI;AAAA,CACV,SAAUC,aAAY;AAInB,EAAAA,YAAW,YAAY;AAIvB,EAAAA,YAAW,WAAW;AAItB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,cAAcA,YAAW,aAAa,cAAcA,YAAW;AAAA,EAC1E;AAHS;AAIT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAC3B,IAAI;AAAA,CACV,SAAUC,gBAAe;AAItB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,KAAK,KAAK,WAAW,GAAG,UAAU,IAAI,KAAK,GAAG,OAAO,UAAU,KAAK;AAAA,EAChG;AAHS;AAIT,EAAAA,eAAc,KAAK;AACvB,GAAG,kBAAkB,gBAAgB,CAAC,EAAE;AAIjC,IAAI;AAAA,CACV,SAAUC,qBAAoB;AAC3B,EAAAA,oBAAmB,OAAO;AAC1B,EAAAA,oBAAmB,SAAS;AAC5B,EAAAA,oBAAmB,WAAW;AAC9B,EAAAA,oBAAmB,cAAc;AACjC,EAAAA,oBAAmB,QAAQ;AAC3B,EAAAA,oBAAmB,WAAW;AAC9B,EAAAA,oBAAmB,QAAQ;AAC3B,EAAAA,oBAAmB,YAAY;AAC/B,EAAAA,oBAAmB,SAAS;AAC5B,EAAAA,oBAAmB,WAAW;AAC9B,EAAAA,oBAAmB,OAAO;AAC1B,EAAAA,oBAAmB,QAAQ;AAC3B,EAAAA,oBAAmB,OAAO;AAC1B,EAAAA,oBAAmB,UAAU;AAC7B,EAAAA,oBAAmB,UAAU;AAC7B,EAAAA,oBAAmB,QAAQ;AAC3B,EAAAA,oBAAmB,OAAO;AAC1B,EAAAA,oBAAmB,YAAY;AAC/B,EAAAA,oBAAmB,SAAS;AAC5B,EAAAA,oBAAmB,aAAa;AAChC,EAAAA,oBAAmB,WAAW;AAC9B,EAAAA,oBAAmB,SAAS;AAC5B,EAAAA,oBAAmB,QAAQ;AAC3B,EAAAA,oBAAmB,WAAW;AAC9B,EAAAA,oBAAmB,gBAAgB;AACvC,GAAG,uBAAuB,qBAAqB,CAAC,EAAE;AAK3C,IAAI;AAAA,CACV,SAAUC,mBAAkB;AAIzB,EAAAA,kBAAiB,YAAY;AAW7B,EAAAA,kBAAiB,UAAU;AAC/B,GAAG,qBAAqB,mBAAmB,CAAC,EAAE;AAOvC,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAI1B,EAAAA,mBAAkB,aAAa;AACnC,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAMzC,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAI1B,WAAS,OAAO,SAAS,QAAQ,SAAS;AACtC,WAAO,EAAE,SAAS,QAAQ,QAAQ;AAAA,EACtC;AAFS;AAGT,EAAAA,mBAAkB,SAAS;AAI3B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,aAAa,GAAG,OAAO,UAAU,OAAO,KAAK,MAAM,GAAG,UAAU,MAAM,KAAK,MAAM,GAAG,UAAU,OAAO;AAAA,EAChH;AAHS;AAIT,EAAAA,mBAAkB,KAAK;AAC3B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAOzC,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAQvB,EAAAA,gBAAe,OAAO;AAUtB,EAAAA,gBAAe,oBAAoB;AACvC,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AACnC,IAAI;AAAA,CACV,SAAUC,6BAA4B;AACnC,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,cAAc,GAAG,OAAO,UAAU,MAAM,KAAK,UAAU,WAAW,YACpE,GAAG,OAAO,UAAU,WAAW,KAAK,UAAU,gBAAgB;AAAA,EACvE;AAJS;AAKT,EAAAA,4BAA2B,KAAK;AACpC,GAAG,+BAA+B,6BAA6B,CAAC,EAAE;AAK3D,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAKvB,WAAS,OAAO,OAAO;AACnB,WAAO,EAAE,MAAM;AAAA,EACnB;AAFS;AAGT,EAAAA,gBAAe,SAAS;AAC5B,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AAKnC,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAOvB,WAAS,OAAO,OAAO,cAAc;AACjC,WAAO,EAAE,OAAO,QAAQ,QAAQ,CAAC,GAAG,cAAc,CAAC,CAAC,aAAa;AAAA,EACrE;AAFS;AAGT,EAAAA,gBAAe,SAAS;AAC5B,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AACnC,IAAI;AAAA,CACV,SAAUC,eAAc;AAMrB,WAAS,cAAc,WAAW;AAC9B,WAAO,UAAU,QAAQ,yBAAyB,MAAM;AAAA,EAC5D;AAFS;AAGT,EAAAA,cAAa,gBAAgB;AAI7B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,OAAO,SAAS,KAAM,GAAG,cAAc,SAAS,KAAK,GAAG,OAAO,UAAU,QAAQ,KAAK,GAAG,OAAO,UAAU,KAAK;AAAA,EAC7H;AAHS;AAIT,EAAAA,cAAa,KAAK;AACtB,GAAG,iBAAiB,eAAe,CAAC,EAAE;AAC/B,IAAI;AAAA,CACV,SAAUC,QAAO;AAId,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,CAAC,CAAC,aAAa,GAAG,cAAc,SAAS,MAAM,cAAc,GAAG,UAAU,QAAQ,KACrF,aAAa,GAAG,UAAU,QAAQ,KAClC,GAAG,WAAW,UAAU,UAAU,aAAa,EAAE,OAAO,MAAM,UAAU,UAAa,MAAM,GAAG,MAAM,KAAK;AAAA,EACjH;AALS;AAMT,EAAAA,OAAM,KAAK;AACf,GAAG,UAAU,QAAQ,CAAC,EAAE;AAKjB,IAAI;AAAA,CACV,SAAUC,uBAAsB;AAO7B,WAAS,OAAO,OAAO,eAAe;AAClC,WAAO,gBAAgB,EAAE,OAAO,cAAc,IAAI,EAAE,MAAM;AAAA,EAC9D;AAFS;AAGT,EAAAA,sBAAqB,SAAS;AAClC,GAAG,yBAAyB,uBAAuB,CAAC,EAAE;AAK/C,IAAI;AAAA,CACV,SAAUC,uBAAsB;AAC7B,WAAS,OAAO,OAAO,kBAAkB,YAAY;AACjD,QAAI,SAAS,EAAE,MAAM;AACrB,QAAI,GAAG,QAAQ,aAAa,GAAG;AAC3B,aAAO,gBAAgB;AAAA,IAC3B;AACA,QAAI,GAAG,QAAQ,UAAU,GAAG;AACxB,aAAO,aAAa;AAAA,IACxB,OACK;AACD,aAAO,aAAa,CAAC;AAAA,IACzB;AACA,WAAO;AAAA,EACX;AAZS;AAaT,EAAAA,sBAAqB,SAAS;AAClC,GAAG,yBAAyB,uBAAuB,CAAC,EAAE;AAI/C,IAAI;AAAA,CACV,SAAUC,wBAAuB;AAI9B,EAAAA,uBAAsB,OAAO;AAI7B,EAAAA,uBAAsB,OAAO;AAI7B,EAAAA,uBAAsB,QAAQ;AAClC,GAAG,0BAA0B,wBAAwB,CAAC,EAAE;AAKjD,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAM1B,WAAS,OAAO,OAAO,MAAM;AACzB,QAAI,SAAS,EAAE,MAAM;AACrB,QAAI,GAAG,OAAO,IAAI,GAAG;AACjB,aAAO,OAAO;AAAA,IAClB;AACA,WAAO;AAAA,EACX;AANS;AAOT,EAAAA,mBAAkB,SAAS;AAC/B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAIzC,IAAI;AAAA,CACV,SAAUC,aAAY;AACnB,EAAAA,YAAW,OAAO;AAClB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,YAAY;AACvB,EAAAA,YAAW,UAAU;AACrB,EAAAA,YAAW,QAAQ;AACnB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,WAAW;AACtB,EAAAA,YAAW,QAAQ;AACnB,EAAAA,YAAW,cAAc;AACzB,EAAAA,YAAW,OAAO;AAClB,EAAAA,YAAW,YAAY;AACvB,EAAAA,YAAW,WAAW;AACtB,EAAAA,YAAW,WAAW;AACtB,EAAAA,YAAW,WAAW;AACtB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,UAAU;AACrB,EAAAA,YAAW,QAAQ;AACnB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,MAAM;AACjB,EAAAA,YAAW,OAAO;AAClB,EAAAA,YAAW,aAAa;AACxB,EAAAA,YAAW,SAAS;AACpB,EAAAA,YAAW,QAAQ;AACnB,EAAAA,YAAW,WAAW;AACtB,EAAAA,YAAW,gBAAgB;AAC/B,GAAG,eAAe,aAAa,CAAC,EAAE;AAM3B,IAAI;AAAA,CACV,SAAUC,YAAW;AAIlB,EAAAA,WAAU,aAAa;AAC3B,GAAG,cAAc,YAAY,CAAC,EAAE;AACzB,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAU1B,WAAS,OAAO,MAAM,MAAM,OAAO,KAAK,eAAe;AACnD,QAAI,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,UAAU,EAAE,KAAK,MAAM;AAAA,IAC3B;AACA,QAAI,eAAe;AACf,aAAO,gBAAgB;AAAA,IAC3B;AACA,WAAO;AAAA,EACX;AAVS;AAWT,EAAAA,mBAAkB,SAAS;AAC/B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AACzC,IAAI;AAAA,CACV,SAAUC,kBAAiB;AAUxB,WAAS,OAAO,MAAM,MAAM,KAAK,OAAO;AACpC,WAAO,UAAU,SACX,EAAE,MAAM,MAAM,UAAU,EAAE,KAAK,MAAM,EAAE,IACvC,EAAE,MAAM,MAAM,UAAU,EAAE,IAAI,EAAE;AAAA,EAC1C;AAJS;AAKT,EAAAA,iBAAgB,SAAS;AAC7B,GAAG,oBAAoB,kBAAkB,CAAC,EAAE;AACrC,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAWvB,WAAS,OAAO,MAAM,QAAQ,MAAM,OAAO,gBAAgB,UAAU;AACjE,QAAI,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACJ;AACA,QAAI,aAAa,QAAW;AACxB,aAAO,WAAW;AAAA,IACtB;AACA,WAAO;AAAA,EACX;AAZS;AAaT,EAAAA,gBAAe,SAAS;AAIxB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,aACH,GAAG,OAAO,UAAU,IAAI,KAAK,GAAG,OAAO,UAAU,IAAI,KACrD,MAAM,GAAG,UAAU,KAAK,KAAK,MAAM,GAAG,UAAU,cAAc,MAC7D,UAAU,WAAW,UAAa,GAAG,OAAO,UAAU,MAAM,OAC5D,UAAU,eAAe,UAAa,GAAG,QAAQ,UAAU,UAAU,OACrE,UAAU,aAAa,UAAa,MAAM,QAAQ,UAAU,QAAQ,OACpE,UAAU,SAAS,UAAa,MAAM,QAAQ,UAAU,IAAI;AAAA,EACrE;AATS;AAUT,EAAAA,gBAAe,KAAK;AACxB,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AAInC,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAIvB,EAAAA,gBAAe,QAAQ;AAIvB,EAAAA,gBAAe,WAAW;AAI1B,EAAAA,gBAAe,WAAW;AAY1B,EAAAA,gBAAe,kBAAkB;AAWjC,EAAAA,gBAAe,iBAAiB;AAahC,EAAAA,gBAAe,kBAAkB;AAMjC,EAAAA,gBAAe,SAAS;AAIxB,EAAAA,gBAAe,wBAAwB;AASvC,EAAAA,gBAAe,eAAe;AAClC,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AAMnC,IAAI;AAAA,CACV,SAAUC,wBAAuB;AAI9B,EAAAA,uBAAsB,UAAU;AAOhC,EAAAA,uBAAsB,YAAY;AACtC,GAAG,0BAA0B,wBAAwB,CAAC,EAAE;AAKjD,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAI1B,WAAS,OAAO,aAAa,MAAM,aAAa;AAC5C,QAAI,SAAS,EAAE,YAAY;AAC3B,QAAI,SAAS,UAAa,SAAS,MAAM;AACrC,aAAO,OAAO;AAAA,IAClB;AACA,QAAI,gBAAgB,UAAa,gBAAgB,MAAM;AACnD,aAAO,cAAc;AAAA,IACzB;AACA,WAAO;AAAA,EACX;AATS;AAUT,EAAAA,mBAAkB,SAAS;AAI3B,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,WAAW,UAAU,aAAa,WAAW,EAAE,MAC1E,UAAU,SAAS,UAAa,GAAG,WAAW,UAAU,MAAM,GAAG,MAAM,OACvE,UAAU,gBAAgB,UAAa,UAAU,gBAAgB,sBAAsB,WAAW,UAAU,gBAAgB,sBAAsB;AAAA,EAC9J;AALS;AAMT,EAAAA,mBAAkB,KAAK;AAC3B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AACzC,IAAI;AAAA,CACV,SAAUC,aAAY;AACnB,WAAS,OAAO,OAAO,qBAAqB,MAAM;AAC9C,QAAI,SAAS,EAAE,MAAM;AACrB,QAAI,YAAY;AAChB,QAAI,OAAO,wBAAwB,UAAU;AACzC,kBAAY;AACZ,aAAO,OAAO;AAAA,IAClB,WACS,QAAQ,GAAG,mBAAmB,GAAG;AACtC,aAAO,UAAU;AAAA,IACrB,OACK;AACD,aAAO,OAAO;AAAA,IAClB;AACA,QAAI,aAAa,SAAS,QAAW;AACjC,aAAO,OAAO;AAAA,IAClB;AACA,WAAO;AAAA,EACX;AAjBS;AAkBT,EAAAA,YAAW,SAAS;AACpB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,aAAa,GAAG,OAAO,UAAU,KAAK,MACxC,UAAU,gBAAgB,UAAa,GAAG,WAAW,UAAU,aAAa,WAAW,EAAE,OACzF,UAAU,SAAS,UAAa,GAAG,OAAO,UAAU,IAAI,OACxD,UAAU,SAAS,UAAa,UAAU,YAAY,YACtD,UAAU,YAAY,UAAa,QAAQ,GAAG,UAAU,OAAO,OAC/D,UAAU,gBAAgB,UAAa,GAAG,QAAQ,UAAU,WAAW,OACvE,UAAU,SAAS,UAAa,cAAc,GAAG,UAAU,IAAI;AAAA,EACxE;AATS;AAUT,EAAAA,YAAW,KAAK;AACpB,GAAG,eAAe,aAAa,CAAC,EAAE;AAK3B,IAAI;AAAA,CACV,SAAUC,WAAU;AAIjB,WAAS,OAAO,OAAO,MAAM;AACzB,QAAI,SAAS,EAAE,MAAM;AACrB,QAAI,GAAG,QAAQ,IAAI,GAAG;AAClB,aAAO,OAAO;AAAA,IAClB;AACA,WAAO;AAAA,EACX;AANS;AAOT,EAAAA,UAAS,SAAS;AAIlB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,MAAM,GAAG,UAAU,KAAK,MAAM,GAAG,UAAU,UAAU,OAAO,KAAK,QAAQ,GAAG,UAAU,OAAO;AAAA,EACjI;AAHS;AAIT,EAAAA,UAAS,KAAK;AAClB,GAAG,aAAa,WAAW,CAAC,EAAE;AAKvB,IAAI;AAAA,CACV,SAAUC,oBAAmB;AAI1B,WAAS,OAAO,SAAS,cAAc;AACnC,WAAO,EAAE,SAAS,aAAa;AAAA,EACnC;AAFS;AAGT,EAAAA,mBAAkB,SAAS;AAI3B,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,SAAS,UAAU,OAAO,KAAK,GAAG,QAAQ,UAAU,YAAY;AAAA,EACvG;AAHS;AAIT,EAAAA,mBAAkB,KAAK;AAC3B,GAAG,sBAAsB,oBAAoB,CAAC,EAAE;AAKzC,IAAI;AAAA,CACV,SAAUC,eAAc;AAIrB,WAAS,OAAO,OAAO,QAAQ,MAAM;AACjC,WAAO,EAAE,OAAO,QAAQ,KAAK;AAAA,EACjC;AAFS;AAGT,EAAAA,cAAa,SAAS;AAItB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,MAAM,GAAG,UAAU,KAAK,MAAM,GAAG,UAAU,UAAU,MAAM,KAAK,GAAG,OAAO,UAAU,MAAM;AAAA,EAC9H;AAHS;AAIT,EAAAA,cAAa,KAAK;AACtB,GAAG,iBAAiB,eAAe,CAAC,EAAE;AAK/B,IAAI;AAAA,CACV,SAAUC,iBAAgB;AAMvB,WAAS,OAAO,OAAO,QAAQ;AAC3B,WAAO,EAAE,OAAO,OAAO;AAAA,EAC3B;AAFS;AAGT,EAAAA,gBAAe,SAAS;AACxB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,cAAc,SAAS,KAAK,MAAM,GAAG,UAAU,KAAK,MAAM,UAAU,WAAW,UAAaA,gBAAe,GAAG,UAAU,MAAM;AAAA,EAC5I;AAHS;AAIT,EAAAA,gBAAe,KAAK;AACxB,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AAQnC,IAAI;AAAA,CACV,SAAUC,qBAAoB;AAC3B,EAAAA,oBAAmB,WAAW,IAAI;AAKlC,EAAAA,oBAAmB,MAAM,IAAI;AAC7B,EAAAA,oBAAmB,OAAO,IAAI;AAC9B,EAAAA,oBAAmB,MAAM,IAAI;AAC7B,EAAAA,oBAAmB,WAAW,IAAI;AAClC,EAAAA,oBAAmB,QAAQ,IAAI;AAC/B,EAAAA,oBAAmB,eAAe,IAAI;AACtC,EAAAA,oBAAmB,WAAW,IAAI;AAClC,EAAAA,oBAAmB,UAAU,IAAI;AACjC,EAAAA,oBAAmB,UAAU,IAAI;AACjC,EAAAA,oBAAmB,YAAY,IAAI;AACnC,EAAAA,oBAAmB,OAAO,IAAI;AAC9B,EAAAA,oBAAmB,UAAU,IAAI;AACjC,EAAAA,oBAAmB,QAAQ,IAAI;AAC/B,EAAAA,oBAAmB,OAAO,IAAI;AAC9B,EAAAA,oBAAmB,SAAS,IAAI;AAChC,EAAAA,oBAAmB,UAAU,IAAI;AACjC,EAAAA,oBAAmB,SAAS,IAAI;AAChC,EAAAA,oBAAmB,QAAQ,IAAI;AAC/B,EAAAA,oBAAmB,QAAQ,IAAI;AAC/B,EAAAA,oBAAmB,QAAQ,IAAI;AAC/B,EAAAA,oBAAmB,UAAU,IAAI;AAIjC,EAAAA,oBAAmB,WAAW,IAAI;AACtC,GAAG,uBAAuB,qBAAqB,CAAC,EAAE;AAQ3C,IAAI;AAAA,CACV,SAAUC,yBAAwB;AAC/B,EAAAA,wBAAuB,aAAa,IAAI;AACxC,EAAAA,wBAAuB,YAAY,IAAI;AACvC,EAAAA,wBAAuB,UAAU,IAAI;AACrC,EAAAA,wBAAuB,QAAQ,IAAI;AACnC,EAAAA,wBAAuB,YAAY,IAAI;AACvC,EAAAA,wBAAuB,UAAU,IAAI;AACrC,EAAAA,wBAAuB,OAAO,IAAI;AAClC,EAAAA,wBAAuB,cAAc,IAAI;AACzC,EAAAA,wBAAuB,eAAe,IAAI;AAC1C,EAAAA,wBAAuB,gBAAgB,IAAI;AAC/C,GAAG,2BAA2B,yBAAyB,CAAC,EAAE;AAInD,IAAI;AAAA,CACV,SAAUC,iBAAgB;AACvB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,MAAM,UAAU,aAAa,UAAa,OAAO,UAAU,aAAa,aACrG,MAAM,QAAQ,UAAU,IAAI,MAAM,UAAU,KAAK,WAAW,KAAK,OAAO,UAAU,KAAK,CAAC,MAAM;AAAA,EACtG;AAJS;AAKT,EAAAA,gBAAe,KAAK;AACxB,GAAG,mBAAmB,iBAAiB,CAAC,EAAE;AAMnC,IAAI;AAAA,CACV,SAAUC,kBAAiB;AAIxB,WAAS,OAAO,OAAO,MAAM;AACzB,WAAO,EAAE,OAAO,KAAK;AAAA,EACzB;AAFS;AAGT,EAAAA,iBAAgB,SAAS;AACzB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,cAAc,UAAa,cAAc,QAAQ,MAAM,GAAG,UAAU,KAAK,KAAK,GAAG,OAAO,UAAU,IAAI;AAAA,EACjH;AAHS;AAIT,EAAAA,iBAAgB,KAAK;AACzB,GAAG,oBAAoB,kBAAkB,CAAC,EAAE;AAMrC,IAAI;AAAA,CACV,SAAUC,4BAA2B;AAIlC,WAAS,OAAO,OAAO,cAAc,qBAAqB;AACtD,WAAO,EAAE,OAAO,cAAc,oBAAoB;AAAA,EACtD;AAFS;AAGT,EAAAA,2BAA0B,SAAS;AACnC,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,cAAc,UAAa,cAAc,QAAQ,MAAM,GAAG,UAAU,KAAK,KAAK,GAAG,QAAQ,UAAU,mBAAmB,MACrH,GAAG,OAAO,UAAU,YAAY,KAAK,UAAU,iBAAiB;AAAA,EAC5E;AAJS;AAKT,EAAAA,2BAA0B,KAAK;AACnC,GAAG,8BAA8B,4BAA4B,CAAC,EAAE;AAMzD,IAAI;AAAA,CACV,SAAUC,mCAAkC;AAIzC,WAAS,OAAO,OAAO,YAAY;AAC/B,WAAO,EAAE,OAAO,WAAW;AAAA,EAC/B;AAFS;AAGT,EAAAA,kCAAiC,SAAS;AAC1C,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,cAAc,UAAa,cAAc,QAAQ,MAAM,GAAG,UAAU,KAAK,MACxE,GAAG,OAAO,UAAU,UAAU,KAAK,UAAU,eAAe;AAAA,EACxE;AAJS;AAKT,EAAAA,kCAAiC,KAAK;AAC1C,GAAG,qCAAqC,mCAAmC,CAAC,EAAE;AAOvE,IAAI;AAAA,CACV,SAAUC,qBAAoB;AAI3B,WAAS,OAAO,SAAS,iBAAiB;AACtC,WAAO,EAAE,SAAS,gBAAgB;AAAA,EACtC;AAFS;AAGT,EAAAA,oBAAmB,SAAS;AAI5B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,QAAQ,SAAS,KAAK,MAAM,GAAG,MAAM,eAAe;AAAA,EAClE;AAHS;AAIT,EAAAA,oBAAmB,KAAK;AAC5B,GAAG,uBAAuB,qBAAqB,CAAC,EAAE;AAM3C,IAAI;AAAA,CACV,SAAUC,gBAAe;AAItB,EAAAA,eAAc,OAAO;AAIrB,EAAAA,eAAc,YAAY;AAC1B,WAAS,GAAG,OAAO;AACf,WAAO,UAAU,KAAK,UAAU;AAAA,EACpC;AAFS;AAGT,EAAAA,eAAc,KAAK;AACvB,GAAG,kBAAkB,gBAAgB,CAAC,EAAE;AACjC,IAAI;AAAA,CACV,SAAUC,qBAAoB;AAC3B,WAAS,OAAO,OAAO;AACnB,WAAO,EAAE,MAAM;AAAA,EACnB;AAFS;AAGT,EAAAA,oBAAmB,SAAS;AAC5B,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,MACzB,UAAU,YAAY,UAAa,GAAG,OAAO,UAAU,OAAO,KAAK,cAAc,GAAG,UAAU,OAAO,OACrG,UAAU,aAAa,UAAa,SAAS,GAAG,UAAU,QAAQ,OAClE,UAAU,YAAY,UAAa,QAAQ,GAAG,UAAU,OAAO;AAAA,EAC3E;AANS;AAOT,EAAAA,oBAAmB,KAAK;AAC5B,GAAG,uBAAuB,qBAAqB,CAAC,EAAE;AAC3C,IAAI;AAAA,CACV,SAAUC,YAAW;AAClB,WAAS,OAAO,UAAU,OAAO,MAAM;AACnC,UAAM,SAAS,EAAE,UAAU,MAAM;AACjC,QAAI,SAAS,QAAW;AACpB,aAAO,OAAO;AAAA,IAClB;AACA,WAAO;AAAA,EACX;AANS;AAOT,EAAAA,WAAU,SAAS;AACnB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,SAAS,GAAG,UAAU,QAAQ,MAC5D,GAAG,OAAO,UAAU,KAAK,KAAK,GAAG,WAAW,UAAU,OAAO,mBAAmB,EAAE,OAClF,UAAU,SAAS,UAAa,cAAc,GAAG,UAAU,IAAI,MAC/D,UAAU,cAAc,UAAc,GAAG,WAAW,UAAU,WAAW,SAAS,EAAE,MACpF,UAAU,YAAY,UAAa,GAAG,OAAO,UAAU,OAAO,KAAK,cAAc,GAAG,UAAU,OAAO,OACrG,UAAU,gBAAgB,UAAa,GAAG,QAAQ,UAAU,WAAW,OACvE,UAAU,iBAAiB,UAAa,GAAG,QAAQ,UAAU,YAAY;AAAA,EACrF;AATS;AAUT,EAAAA,WAAU,KAAK;AACnB,GAAG,cAAc,YAAY,CAAC,EAAE;AACzB,IAAI;AAAA,CACV,SAAUC,cAAa;AACpB,WAAS,cAAc,OAAO;AAC1B,WAAO,EAAE,MAAM,WAAW,MAAM;AAAA,EACpC;AAFS;AAGT,EAAAA,aAAY,gBAAgB;AAChC,GAAG,gBAAgB,cAAc,CAAC,EAAE;AAC7B,IAAI;AAAA,CACV,SAAUC,uBAAsB;AAC7B,WAAS,OAAO,YAAY,YAAY,OAAO,SAAS;AACpD,WAAO,EAAE,YAAY,YAAY,OAAO,QAAQ;AAAA,EACpD;AAFS;AAGT,EAAAA,sBAAqB,SAAS;AAClC,GAAG,yBAAyB,uBAAuB,CAAC,EAAE;AAC/C,IAAI;AAAA,CACV,SAAUC,uBAAsB;AAC7B,WAAS,OAAO,OAAO;AACnB,WAAO,EAAE,MAAM;AAAA,EACnB;AAFS;AAGT,EAAAA,sBAAqB,SAAS;AAClC,GAAG,yBAAyB,uBAAuB,CAAC,EAAE;AAO/C,IAAI;AAAA,CACV,SAAUC,8BAA6B;AAIpC,EAAAA,6BAA4B,UAAU;AAItC,EAAAA,6BAA4B,YAAY;AAC5C,GAAG,gCAAgC,8BAA8B,CAAC,EAAE;AAC7D,IAAI;AAAA,CACV,SAAUC,yBAAwB;AAC/B,WAAS,OAAO,OAAO,MAAM;AACzB,WAAO,EAAE,OAAO,KAAK;AAAA,EACzB;AAFS;AAGT,EAAAA,wBAAuB,SAAS;AACpC,GAAG,2BAA2B,yBAAyB,CAAC,EAAE;AACnD,IAAI;AAAA,CACV,SAAUC,0BAAyB;AAChC,WAAS,OAAO,aAAa,wBAAwB;AACjD,WAAO,EAAE,aAAa,uBAAuB;AAAA,EACjD;AAFS;AAGT,EAAAA,yBAAwB,SAAS;AACrC,GAAG,4BAA4B,0BAA0B,CAAC,EAAE;AACrD,IAAI;AAAA,CACV,SAAUC,kBAAiB;AACxB,WAAS,GAAG,OAAO;AACf,UAAM,YAAY;AAClB,WAAO,GAAG,cAAc,SAAS,KAAK,IAAI,GAAG,UAAU,GAAG,KAAK,GAAG,OAAO,UAAU,IAAI;AAAA,EAC3F;AAHS;AAIT,EAAAA,iBAAgB,KAAK;AACzB,GAAG,oBAAoB,kBAAkB,CAAC,EAAE;AAKrC,IAAI;AAAA,CACV,SAAUC,eAAc;AAQrB,WAAS,OAAO,KAAK,YAAY,SAAS,SAAS;AAC/C,WAAO,IAAI,iBAAiB,KAAK,YAAY,SAAS,OAAO;AAAA,EACjE;AAFS;AAGT,EAAAA,cAAa,SAAS;AAItB,WAAS,GAAG,OAAO;AACf,QAAI,YAAY;AAChB,WAAO,GAAG,QAAQ,SAAS,KAAK,GAAG,OAAO,UAAU,GAAG,MAAM,GAAG,UAAU,UAAU,UAAU,KAAK,GAAG,OAAO,UAAU,UAAU,MAAM,GAAG,SAAS,UAAU,SAAS,KAC/J,GAAG,KAAK,UAAU,OAAO,KAAK,GAAG,KAAK,UAAU,UAAU,KAAK,GAAG,KAAK,UAAU,QAAQ,IAAI,OAAO;AAAA,EAC/G;AAJS;AAKT,EAAAA,cAAa,KAAK;AAClB,WAAS,WAAW,UAAU,OAAO;AACjC,QAAI,OAAO,SAAS,QAAQ;AAC5B,QAAI,cAAcC,WAAU,OAAO,CAAC,GAAG,MAAM;AACzC,UAAI,OAAO,EAAE,MAAM,MAAM,OAAO,EAAE,MAAM,MAAM;AAC9C,UAAI,SAAS,GAAG;AACZ,eAAO,EAAE,MAAM,MAAM,YAAY,EAAE,MAAM,MAAM;AAAA,MACnD;AACA,aAAO;AAAA,IACX,CAAC;AACD,QAAI,qBAAqB,KAAK;AAC9B,aAAS,IAAI,YAAY,SAAS,GAAG,KAAK,GAAG,KAAK;AAC9C,UAAI,IAAI,YAAY,CAAC;AACrB,UAAI,cAAc,SAAS,SAAS,EAAE,MAAM,KAAK;AACjD,UAAI,YAAY,SAAS,SAAS,EAAE,MAAM,GAAG;AAC7C,UAAI,aAAa,oBAAoB;AACjC,eAAO,KAAK,UAAU,GAAG,WAAW,IAAI,EAAE,UAAU,KAAK,UAAU,WAAW,KAAK,MAAM;AAAA,MAC7F,OACK;AACD,cAAM,IAAI,MAAM,kBAAkB;AAAA,MACtC;AACA,2BAAqB;AAAA,IACzB;AACA,WAAO;AAAA,EACX;AAvBS;AAwBT,EAAAD,cAAa,aAAa;AAC1B,WAASC,WAAU,MAAM,SAAS;AAC9B,QAAI,KAAK,UAAU,GAAG;AAElB,aAAO;AAAA,IACX;AACA,UAAM,IAAK,KAAK,SAAS,IAAK;AAC9B,UAAM,OAAO,KAAK,MAAM,GAAG,CAAC;AAC5B,UAAM,QAAQ,KAAK,MAAM,CAAC;AAC1B,IAAAA,WAAU,MAAM,OAAO;AACvB,IAAAA,WAAU,OAAO,OAAO;AACxB,QAAI,UAAU;AACd,QAAI,WAAW;AACf,QAAI,IAAI;AACR,WAAO,UAAU,KAAK,UAAU,WAAW,MAAM,QAAQ;AACrD,UAAI,MAAM,QAAQ,KAAK,OAAO,GAAG,MAAM,QAAQ,CAAC;AAChD,UAAI,OAAO,GAAG;AAEV,aAAK,GAAG,IAAI,KAAK,SAAS;AAAA,MAC9B,OACK;AAED,aAAK,GAAG,IAAI,MAAM,UAAU;AAAA,MAChC;AAAA,IACJ;AACA,WAAO,UAAU,KAAK,QAAQ;AAC1B,WAAK,GAAG,IAAI,KAAK,SAAS;AAAA,IAC9B;AACA,WAAO,WAAW,MAAM,QAAQ;AAC5B,WAAK,GAAG,IAAI,MAAM,UAAU;AAAA,IAChC;AACA,WAAO;AAAA,EACX;AA/BS,SAAAA,YAAA;AAgCb,GAAG,iBAAiB,eAAe,CAAC,EAAE;AAItC,IAAM,mBAAN,MAAuB;AAAA,EAjiEvB,OAiiEuB;AAAA;AAAA;AAAA,EACnB,YAAY,KAAK,YAAY,SAAS,SAAS;AAC3C,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AAAA,EACxB;AAAA,EACA,IAAI,MAAM;AACN,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,IAAI,aAAa;AACb,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,IAAI,UAAU;AACV,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,QAAQ,OAAO;AACX,QAAI,OAAO;AACP,UAAI,QAAQ,KAAK,SAAS,MAAM,KAAK;AACrC,UAAI,MAAM,KAAK,SAAS,MAAM,GAAG;AACjC,aAAO,KAAK,SAAS,UAAU,OAAO,GAAG;AAAA,IAC7C;AACA,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,OAAO,OAAO,SAAS;AACnB,SAAK,WAAW,MAAM;AACtB,SAAK,WAAW;AAChB,SAAK,eAAe;AAAA,EACxB;AAAA,EACA,iBAAiB;AACb,QAAI,KAAK,iBAAiB,QAAW;AACjC,UAAI,cAAc,CAAC;AACnB,UAAI,OAAO,KAAK;AAChB,UAAI,cAAc;AAClB,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,YAAI,aAAa;AACb,sBAAY,KAAK,CAAC;AAClB,wBAAc;AAAA,QAClB;AACA,YAAI,KAAK,KAAK,OAAO,CAAC;AACtB,sBAAe,OAAO,QAAQ,OAAO;AACrC,YAAI,OAAO,QAAQ,IAAI,IAAI,KAAK,UAAU,KAAK,OAAO,IAAI,CAAC,MAAM,MAAM;AACnE;AAAA,QACJ;AAAA,MACJ;AACA,UAAI,eAAe,KAAK,SAAS,GAAG;AAChC,oBAAY,KAAK,KAAK,MAAM;AAAA,MAChC;AACA,WAAK,eAAe;AAAA,IACxB;AACA,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,WAAW,QAAQ;AACf,aAAS,KAAK,IAAI,KAAK,IAAI,QAAQ,KAAK,SAAS,MAAM,GAAG,CAAC;AAC3D,QAAI,cAAc,KAAK,eAAe;AACtC,QAAI,MAAM,GAAG,OAAO,YAAY;AAChC,QAAI,SAAS,GAAG;AACZ,aAAO,SAAS,OAAO,GAAG,MAAM;AAAA,IACpC;AACA,WAAO,MAAM,MAAM;AACf,UAAI,MAAM,KAAK,OAAO,MAAM,QAAQ,CAAC;AACrC,UAAI,YAAY,GAAG,IAAI,QAAQ;AAC3B,eAAO;AAAA,MACX,OACK;AACD,cAAM,MAAM;AAAA,MAChB;AAAA,IACJ;AAGA,QAAI,OAAO,MAAM;AACjB,WAAO,SAAS,OAAO,MAAM,SAAS,YAAY,IAAI,CAAC;AAAA,EAC3D;AAAA,EACA,SAAS,UAAU;AACf,QAAI,cAAc,KAAK,eAAe;AACtC,QAAI,SAAS,QAAQ,YAAY,QAAQ;AACrC,aAAO,KAAK,SAAS;AAAA,IACzB,WACS,SAAS,OAAO,GAAG;AACxB,aAAO;AAAA,IACX;AACA,QAAI,aAAa,YAAY,SAAS,IAAI;AAC1C,QAAI,iBAAkB,SAAS,OAAO,IAAI,YAAY,SAAU,YAAY,SAAS,OAAO,CAAC,IAAI,KAAK,SAAS;AAC/G,WAAO,KAAK,IAAI,KAAK,IAAI,aAAa,SAAS,WAAW,cAAc,GAAG,UAAU;AAAA,EACzF;AAAA,EACA,IAAI,YAAY;AACZ,WAAO,KAAK,eAAe,EAAE;AAAA,EACjC;AACJ;AACA,IAAI;AAAA,CACH,SAAUC,KAAI;AACX,QAAMC,YAAW,OAAO,UAAU;AAClC,WAAS,QAAQ,OAAO;AACpB,WAAO,OAAO,UAAU;AAAA,EAC5B;AAFS;AAGT,EAAAD,IAAG,UAAU;AACb,WAASE,WAAU,OAAO;AACtB,WAAO,OAAO,UAAU;AAAA,EAC5B;AAFS,SAAAA,YAAA;AAGT,EAAAF,IAAG,YAAYE;AACf,WAAS,QAAQ,OAAO;AACpB,WAAO,UAAU,QAAQ,UAAU;AAAA,EACvC;AAFS;AAGT,EAAAF,IAAG,UAAU;AACb,WAAS,OAAO,OAAO;AACnB,WAAOC,UAAS,KAAK,KAAK,MAAM;AAAA,EACpC;AAFS;AAGT,EAAAD,IAAG,SAAS;AACZ,WAAS,OAAO,OAAO;AACnB,WAAOC,UAAS,KAAK,KAAK,MAAM;AAAA,EACpC;AAFS;AAGT,EAAAD,IAAG,SAAS;AACZ,WAAS,YAAY,OAAO,KAAK,KAAK;AAClC,WAAOC,UAAS,KAAK,KAAK,MAAM,qBAAqB,OAAO,SAAS,SAAS;AAAA,EAClF;AAFS;AAGT,EAAAD,IAAG,cAAc;AACjB,WAASG,SAAQ,OAAO;AACpB,WAAOF,UAAS,KAAK,KAAK,MAAM,qBAAqB,eAAe,SAAS,SAAS;AAAA,EAC1F;AAFS,SAAAE,UAAA;AAGT,EAAAH,IAAG,UAAUG;AACb,WAASC,UAAS,OAAO;AACrB,WAAOH,UAAS,KAAK,KAAK,MAAM,qBAAqB,KAAK,SAAS,SAAS;AAAA,EAChF;AAFS,SAAAG,WAAA;AAGT,EAAAJ,IAAG,WAAWI;AACd,WAAS,KAAK,OAAO;AACjB,WAAOH,UAAS,KAAK,KAAK,MAAM;AAAA,EACpC;AAFS;AAGT,EAAAD,IAAG,OAAO;AACV,WAAS,cAAc,OAAO;AAI1B,WAAO,UAAU,QAAQ,OAAO,UAAU;AAAA,EAC9C;AALS;AAMT,EAAAA,IAAG,gBAAgB;AACnB,WAAS,WAAW,OAAO,OAAO;AAC9B,WAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,MAAM,KAAK;AAAA,EACpD;AAFS;AAGT,EAAAA,IAAG,aAAa;AACpB,GAAG,OAAO,KAAK,CAAC,EAAE;;;AC/pEZ,IAAO,iBAAP,MAAqB;EAd3B,OAc2B;;;EAA3B,cAAA;AAGY,SAAA,YAAoC,CAAA;EAmFhD;EAjFI,IAAY,UAAO;AACf,WAAO,KAAK,UAAU,KAAK,UAAU,SAAS,CAAC;EACnD;EAEA,cAAc,OAAa;AACvB,SAAK,WAAW,IAAI,gBAAgB,KAAK;AACzC,SAAK,SAAS,OAAO,KAAK;AAC1B,SAAK,YAAY,CAAC,KAAK,QAAQ;AAC/B,WAAO,KAAK;EAChB;EAEA,mBAAmB,SAAwB;AACvC,UAAM,gBAAgB,IAAI,qBAAoB;AAC9C,kBAAc,gBAAgB;AAC9B,kBAAc,OAAO,KAAK;AAC1B,SAAK,QAAQ,QAAQ,KAAK,aAAa;AACvC,SAAK,UAAU,KAAK,aAAa;AACjC,WAAO;EACX;EAEA,cAAc,OAAe,SAAwB;AACjD,UAAM,WAAW,IAAI,gBAAgB,MAAM,aAAa,MAAM,MAAM,QAAQ,aAAa,KAAK,GAAG,MAAM,WAAW,KAAK;AACvH,aAAS,gBAAgB;AACzB,aAAS,OAAO,KAAK;AACrB,SAAK,QAAQ,QAAQ,KAAK,QAAQ;AAClC,WAAO;EACX;EAEA,WAAW,MAAa;AACpB,UAAM,SAAS,KAAK;AACpB,QAAI,QAAQ;AACR,YAAM,QAAQ,OAAO,QAAQ,QAAQ,IAAI;AACzC,UAAI,SAAS,GAAG;AACZ,eAAO,QAAQ,OAAO,OAAO,CAAC;;;EAG1C;EAEA,UAAU,MAA+D;AACrE,UAAM,UAAmB,KAAK;AAG9B,QAAI,OAAO,KAAK,UAAU,UAAU;AAChC,WAAK,QAAQ,UAAmB;;AAEpC,SAAK,WAAW;AAChB,UAAM,OAAO,KAAK,UAAU,IAAG;AAG/B,SAAI,SAAI,QAAJ,SAAI,SAAA,SAAJ,KAAM,QAAQ,YAAW,GAAG;AAC5B,WAAK,WAAW,IAAI;;EAE5B;EAEA,gBAAgB,cAAsB;AAClC,eAAW,SAAS,cAAc;AAC9B,YAAM,aAAa,IAAI,gBAAgB,MAAM,aAAa,MAAM,MAAM,QAAQ,aAAa,KAAK,GAAG,MAAM,WAAW,IAAI;AACxH,iBAAW,OAAO,KAAK;AACvB,WAAK,eAAe,KAAK,UAAU,UAAU;;EAErD;EAEQ,eAAe,MAAwB,OAAkB;AAC7D,UAAM,EAAE,QAAQ,YAAY,KAAK,SAAQ,IAAK;AAE9C,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC1C,YAAM,QAAQ,KAAK,QAAQ,CAAC;AAC5B,YAAM,EAAE,QAAQ,YAAY,KAAK,SAAQ,IAAK;AAC9C,UAAI,mBAAmB,KAAK,KAAK,aAAa,cAAc,WAAW,UAAU;AAC7E,aAAK,eAAe,OAAO,KAAK;AAChC;iBACO,YAAY,YAAY;AAC/B,aAAK,QAAQ,OAAO,GAAG,GAAG,KAAK;AAC/B;;;AAMR,SAAK,QAAQ,KAAK,KAAK;EAC3B;;AAGE,IAAgB,kBAAhB,MAA+B;EAtGrC,OAsGqC;;;;EAYjC,IAAI,SAAM;AACN,WAAO,KAAK;EAChB;;EAGA,IAAI,UAAO;AACP,WAAO,KAAK;EAChB;EAEA,IAAI,SAAM;AACN,WAAO;EACX;EAEA,IAAI,UAAO;;AACP,UAAM,OAAO,SAAO,KAAA,KAAK,cAAQ,QAAA,OAAA,SAAA,SAAA,GAAE,WAAU,WAAW,KAAK,YAAW,KAAA,KAAK,eAAS,QAAA,OAAA,SAAA,SAAA,GAAE;AACxF,QAAI,CAAC,MAAM;AACP,YAAM,IAAI,MAAM,yCAAyC;;AAE7D,WAAO;EACX;EAEA,IAAI,QAAQ,OAAc;AACtB,SAAK,WAAW;EACpB;;EAGA,IAAI,UAAO;AACP,WAAO,KAAK;EAChB;EAEA,IAAI,OAAI;AACJ,WAAO,KAAK,KAAK,SAAS,UAAU,KAAK,QAAQ,KAAK,GAAG;EAC7D;;AAGE,IAAO,kBAAP,cAA+B,gBAAe;EArJpD,OAqJoD;;;EAChD,IAAI,SAAM;AACN,WAAO,KAAK;EAChB;EAEA,IAAI,SAAM;AACN,WAAO,KAAK;EAChB;EAEA,IAAI,MAAG;AACH,WAAO,KAAK,UAAU,KAAK;EAC/B;EAEA,IAAa,SAAM;AACf,WAAO,KAAK;EAChB;EAEA,IAAI,YAAS;AACT,WAAO,KAAK;EAChB;EAEA,IAAI,QAAK;AACL,WAAO,KAAK;EAChB;EAQA,YAAY,QAAgB,QAAgB,OAAc,WAAsB,SAAS,OAAK;AAC1F,UAAK;AACL,SAAK,UAAU;AACf,SAAK,UAAU;AACf,SAAK,aAAa;AAClB,SAAK,UAAU;AACf,SAAK,SAAS;EAClB;;AAGE,IAAO,uBAAP,cAAoC,gBAAe;EA9LzD,OA8LyD;;;EAAzD,cAAA;;AACa,SAAA,UAAqB,IAAI,iBAAiB,IAAI;EAqD3D;;EAjDI,IAAI,WAAQ;AACR,WAAO,KAAK;EAChB;EAEA,IAAI,SAAM;;AACN,YAAO,MAAA,KAAA,KAAK,wBAAkB,QAAA,OAAA,SAAA,SAAA,GAAE,YAAM,QAAA,OAAA,SAAA,KAAI;EAC9C;EAEA,IAAI,SAAM;AACN,WAAO,KAAK,MAAM,KAAK;EAC3B;EAEA,IAAI,MAAG;;AACH,YAAO,MAAA,KAAA,KAAK,uBAAiB,QAAA,OAAA,SAAA,SAAA,GAAE,SAAG,QAAA,OAAA,SAAA,KAAI;EAC1C;EAEA,IAAI,QAAK;AACL,UAAM,YAAY,KAAK;AACvB,UAAM,WAAW,KAAK;AACtB,QAAI,aAAa,UAAU;AACvB,UAAI,KAAK,gBAAgB,QAAW;AAChC,cAAM,EAAE,OAAO,WAAU,IAAK;AAC9B,cAAM,EAAE,OAAO,UAAS,IAAK;AAC7B,aAAK,cAAc,EAAE,OAAO,WAAW,OAAO,KAAK,UAAU,IAAI,OAAO,WAAW,MAAM,OAAO,WAAW,QAAQ,UAAU,IAAG;;AAEpI,aAAO,KAAK;WACT;AACH,aAAO,EAAE,OAAO,SAAS,OAAO,GAAG,CAAC,GAAG,KAAK,SAAS,OAAO,GAAG,CAAC,EAAC;;EAEzE;EAEA,IAAY,qBAAkB;AAC1B,eAAW,SAAS,KAAK,SAAS;AAC9B,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;;AAGf,WAAO,KAAK,QAAQ,CAAC;EACzB;EAEA,IAAY,oBAAiB;AACzB,aAAS,IAAI,KAAK,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,YAAM,QAAQ,KAAK,QAAQ,CAAC;AAC5B,UAAI,CAAC,MAAM,QAAQ;AACf,eAAO;;;AAGf,WAAO,KAAK,QAAQ,KAAK,QAAQ,SAAS,CAAC;EAC/C;;AAGJ,IAAM,mBAAN,MAAM,0BAAyB,MAAc;EAtP7C,OAsP6C;;;EAGzC,YAAY,QAAwB;AAChC,UAAK;AACL,SAAK,SAAS;AACd,WAAO,eAAe,MAAM,kBAAiB,SAAS;EAC1D;EAES,QAAQ,OAAgB;AAC7B,SAAK,WAAW,KAAK;AACrB,WAAO,MAAM,KAAK,GAAG,KAAK;EAC9B;EAES,WAAW,OAAgB;AAChC,SAAK,WAAW,KAAK;AACrB,WAAO,MAAM,QAAQ,GAAG,KAAK;EACjC;EAES,OAAO,OAAe,UAAkB,OAAgB;AAC7D,SAAK,WAAW,KAAK;AACrB,WAAO,MAAM,OAAO,OAAO,OAAO,GAAG,KAAK;EAC9C;EAEQ,WAAW,OAAgB;AAC/B,eAAW,QAAQ,OAAO;AACJ,WAAM,YAAY,KAAK;;EAEjD;;AAGE,IAAO,kBAAP,cAA+B,qBAAoB;EArRzD,OAqRyD;;;EAGrD,IAAa,OAAI;AACb,WAAO,KAAK,MAAM,UAAU,KAAK,QAAQ,KAAK,GAAG;EACrD;EAEA,IAAI,WAAQ;AACR,WAAO,KAAK;EAChB;EAEA,YAAY,OAAc;AACtB,UAAK;AAXD,SAAA,QAAQ;AAYZ,SAAK,QAAQ,UAAK,QAAL,UAAK,SAAL,QAAS;EAC1B;;;;ACvQG,IAAM,iBAAiB,OAAO,UAAU;AAU/C,SAAS,eAAe,MAA4C;AAChE,SAAO,KAAK,UAAU;AAC1B;AAFS;AA8BT,IAAM,aAAa;AACnB,IAAM,iBAAiB,wBAAC,SAAyB,KAAK,SAAS,UAAU,IAAI,OAAO,OAAO,YAApE;AAEjB,IAAgB,wBAAhB,MAAqC;EAvE3C,OAuE2C;;;EAMvC,YAAY,UAA6B;AAF/B,SAAA,mBAA2C,oBAAI,IAAG;AAGxD,SAAK,QAAQ,SAAS,OAAO;AAC7B,UAAM,SAAS,KAAK,MAAM;AAC1B,SAAK,UAAU,IAAI,kBAAkB,QAAM,OAAA,OAAA,OAAA,OAAA,CAAA,GACpC,SAAS,OAAO,YAAY,GAAA,EAC/B,sBAAsB,SAAS,OAAO,2BAA0B,CAAA,CAAA;EAExE;EAEA,aAAa,KAAa,SAA2B;AACjD,SAAK,QAAQ,OAAO,KAAK,OAAO;EACpC;EAEA,SAAS,KAAa,UAAgC;AAClD,SAAK,QAAQ,WAAW,KAAK,QAAQ;EACzC;EAEA,KAAK,KAAa,UAAgC;AAC9C,SAAK,QAAQ,SAAS,KAAK,QAAQ;EACvC;EAEA,WAAW,KAAa,UAAgC;AACpD,SAAK,QAAQ,eAAe,KAAK,QAAQ;EAC7C;EAQA,cAAW;AACP,WAAO,KAAK,QAAQ;EACxB;EAEA,IAAI,kBAAe;AACf,WAAO,KAAK;EAChB;EAEA,eAAY;AACR,WAAQ,KAAK,QAAgB;EACjC;EAEA,WAAQ;AACJ,SAAK,QAAQ,iBAAgB;EACjC;;AAGE,IAAO,gBAAP,cAA6B,sBAAqB;EA7HxD,OA6HwD;;;EASpD,IAAY,UAAO;AACf,WAAO,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;EAC3C;EAEA,YAAY,UAA6B;AACrC,UAAM,QAAQ;AAVD,SAAA,cAAc,IAAI,eAAc;AACzC,SAAA,QAAe,CAAA;AAEf,SAAA,gBAAgB,oBAAI,IAAG;AAQ3B,SAAK,SAAS,SAAS,WAAW;AAClC,SAAK,YAAY,SAAS,OAAO;AACjC,SAAK,gBAAgB,SAAS,OAAO;EACzC;EAEA,KAAK,MAAkB,MAAc;AACjC,UAAM,OAAO,KAAK,WAAW,SAAY,eAAe,IAAI,IAAI,iBAAiB,YAAY,IAAI;AACjG,UAAM,aAAa,KAAK,QAAQ,YAAY,eAAe,KAAK,IAAI,GAAG,KAAK,oBAAoB,MAAM,IAAI,EAAE,KAAK,IAAI,CAAC;AACtH,QAAI,KAAK,OAAO;AACZ,WAAK,WAAW;;AAEpB,WAAO;EACX;EAEA,MAAmC,OAAa;AAC5C,SAAK,YAAY,cAAc,KAAK;AACpC,UAAM,cAAc,KAAK,MAAM,SAAS,KAAK;AAC7C,SAAK,QAAQ,QAAQ,YAAY;AACjC,UAAM,SAAS,KAAK,SAAS,KAAK,KAAK,SAAS,CAAA,CAAE;AAClD,SAAK,YAAY,gBAAgB,YAAY,MAAM;AACnD,SAAK,gBAAgB,MAAK;AAC1B,WAAO;MACH,OAAO;MACP,aAAa,YAAY;MACzB,cAAc,KAAK,QAAQ;;EAEnC;EAEQ,oBAAoB,OAAoC,gBAAwB;AACpF,WAAO,CAAC,SAAQ;AACZ,UAAI,CAAC,KAAK,YAAW,GAAI;AACrB,cAAM,OAAY,EAAE,MAAK;AACzB,aAAK,MAAM,KAAK,IAAI;AACpB,YAAI,UAAU,gBAAgB;AAC1B,eAAK,QAAQ;;;AAGrB,UAAI;AACJ,UAAI;AACA,iBAAS,eAAe,IAAI;eACvB,KAAK;AACV,iBAAS;;AAEb,UAAI,CAAC,KAAK,YAAW,KAAM,WAAW,QAAW;AAC7C,iBAAS,KAAK,UAAS;;AAE3B,aAAO;IACX;EACJ;EAEA,QAAQ,KAAa,WAAsB,SAAwB;AAC/D,UAAM,QAAQ,KAAK,QAAQ,YAAY,KAAK,SAAS;AACrD,QAAI,CAAC,KAAK,YAAW,KAAM,KAAK,aAAa,KAAK,GAAG;AACjD,YAAM,WAAW,KAAK,YAAY,cAAc,OAAO,OAAO;AAC9D,YAAM,EAAE,YAAY,WAAU,IAAK,KAAK,cAAc,OAAO;AAC7D,YAAM,UAAU,KAAK;AACrB,UAAI,YAAY;AACZ,cAAM,iBAAiB,UAAU,OAAO,IAAI,MAAM,QAAQ,KAAK,UAAU,QAAQ,MAAM,OAAO,QAAQ;AACtG,aAAK,OAAO,WAAW,UAAU,WAAW,SAAS,gBAAgB,UAAU,UAAU;iBAClF,eAAe,OAAO,GAAG;AAChC,YAAI,OAAO,MAAM;AACjB,YAAI,CAAC,UAAU,OAAO,GAAG;AACrB,iBAAO,KAAK,UAAU,QAAQ,MAAM,QAAQ,EAAE,SAAQ;;AAE1D,gBAAQ,SAAS;;;EAG7B;;;;;;;EAQQ,aAAa,OAAa;AAC9B,WAAO,CAAC,MAAM,wBAAwB,CAAC,MAAM,MAAM,WAAW,KAAK,OAAO,MAAM,cAAc,YAAY,CAAC,MAAM,MAAM,SAAS;EACpI;EAEA,QAAQ,KAAa,MAAkB,SAA0B,MAAU;AACvE,QAAI;AACJ,QAAI,CAAC,KAAK,YAAW,GAAI;AACrB,gBAAU,KAAK,YAAY,mBAAmB,OAAO;;AAEzD,UAAM,gBAAgB,KAAK,QAAQ,YAAY,KAAK,MAAM,IAAI;AAC9D,QAAI,CAAC,KAAK,YAAW,KAAM,WAAW,QAAQ,SAAS,GAAG;AACtD,WAAK,yBAAyB,eAAe,SAAS,OAAO;;EAErE;EAEQ,yBAAyB,QAAa,SAA0B,SAAyB;AAC7F,UAAM,EAAE,YAAY,WAAU,IAAK,KAAK,cAAc,OAAO;AAC7D,QAAI,YAAY;AACZ,WAAK,OAAO,WAAW,UAAU,WAAW,SAAS,QAAQ,SAAS,UAAU;eACzE,CAAC,YAAY;AAMpB,YAAM,UAAU,KAAK;AACrB,UAAI,eAAe,OAAO,GAAG;AACzB,gBAAQ,SAAS,OAAO,SAAQ;iBACzB,OAAO,WAAW,YAAY,QAAQ;AAC7C,cAAM,aAAa,OAAO;AAC1B,cAAM,SAAS,KAAK,sBAAsB,QAAQ,OAAO;AACzD,YAAI,YAAY;AACZ,iBAAO,QAAQ;;AAEnB,cAAM,UAAU;AAChB,aAAK,MAAM,IAAG;AACd,aAAK,MAAM,KAAK,OAAO;;;EAGnC;EAEA,OAAO,OAAe,QAAc;AAChC,QAAI,CAAC,KAAK,YAAW,GAAI;AACrB,UAAI,OAAO,KAAK;AAIhB,UAAI,CAAC,KAAK,YAAY,OAAO,WAAW,OAAO,UAAU;AACrD,eAAO,KAAK,UAAU,KAAK;AAC3B,cAAM,UAAU,KAAK,SAAS;AAC9B,aAAK,YAAY,mBAAmB,OAAO;;AAE/C,YAAM,UAAU,EAAE,MAAK;AACvB,WAAK,MAAM,IAAG;AACd,WAAK,MAAM,KAAK,OAAO;AACvB,UAAI,OAAO,WAAW,OAAO,UAAU;AACnC,aAAK,OAAO,OAAO,UAAU,OAAO,SAAS,MAAM,KAAK,UAAU,KAAK;;;EAGnF;EAEA,UAAU,MAAM,MAAI;AAChB,QAAI,KAAK,YAAW,GAAI;AACpB,aAAO;;AAEX,UAAM,MAAM,KAAK;AACjB,2BAAuB,GAAG;AAC1B,SAAK,YAAY,UAAU,GAAG;AAC9B,QAAI,KAAK;AACL,WAAK,MAAM,IAAG;;AAElB,QAAI,eAAe,GAAG,GAAG;AACrB,aAAO,KAAK,UAAU,QAAQ,IAAI,OAAO,IAAI,QAAQ;WAClD;AACH,gCAA0B,KAAK,eAAe,GAAG;;AAErD,WAAO;EACX;EAEQ,cAAc,SAAwB;AAC1C,QAAI,CAAC,KAAK,cAAc,IAAI,OAAO,GAAG;AAClC,YAAM,aAAa,mBAAmB,SAAS,YAAY;AAC3D,WAAK,cAAc,IAAI,SAAS;QAC5B;QACA,YAAY,aAAa,iBAAiB,WAAW,QAAQ,IAAI;OACpE;;AAEL,WAAO,KAAK,cAAc,IAAI,OAAO;EACzC;EAEQ,OAAO,UAAkB,SAAiB,OAAgB,SAAkB,YAAmB;AACnG,UAAM,MAAM,KAAK;AACjB,QAAI;AACJ,QAAI,cAAc,OAAO,UAAU,UAAU;AACzC,aAAO,KAAK,OAAO,eAAe,KAAK,SAAS,SAAS,KAAK;WAC3D;AACH,aAAO;;AAEX,YAAQ,UAAU;MACd,KAAK,KAAK;AACN,YAAI,OAAO,IAAI;AACf;;MAEJ,KAAK,MAAM;AACP,YAAI,OAAO,IAAI;AACf;;MAEJ,KAAK,MAAM;AACP,YAAI,CAAC,MAAM,QAAQ,IAAI,OAAO,CAAC,GAAG;AAC9B,cAAI,OAAO,IAAI,CAAA;;AAEnB,YAAI,OAAO,EAAE,KAAK,IAAI;;;EAGlC;EAEQ,sBAAsB,QAAa,QAAW;AAClD,eAAW,CAAC,MAAM,aAAa,KAAK,OAAO,QAAQ,MAAM,GAAG;AACxD,YAAM,WAAW,OAAO,IAAI;AAC5B,UAAI,aAAa,QAAW;AACxB,eAAO,IAAI,IAAI;iBACR,MAAM,QAAQ,QAAQ,KAAK,MAAM,QAAQ,aAAa,GAAG;AAChE,sBAAc,KAAK,GAAG,QAAQ;AAC9B,eAAO,IAAI,IAAI;;;AAGvB,WAAO;EACX;EAEA,IAAI,mBAAgB;AAChB,WAAO,KAAK,QAAQ;EACxB;;AASE,IAAgB,qCAAhB,MAAkD;EAnWxD,OAmWwD;;;EAEpD,0BAA0B,SAKzB;AACG,WAAO,2BAA2B,0BAA0B,OAAO;EACvE;EAEA,8BAA8B,SAG7B;AACG,WAAO,2BAA2B,8BAA8B,OAAO;EAC3E;EAEA,wBAAwB,SAMvB;AACG,WAAO,2BAA2B,wBAAwB,OAAO;EACrE;EAEA,sBAAsB,SAMrB;AACG,WAAO,2BAA2B,sBAAsB,OAAO;EACnE;;AAIE,IAAO,oCAAP,cAAiD,mCAAkC;EA3YzF,OA2YyF;;;EAE5E,0BAA0B,EAAE,UAAU,OAAM,GAKpD;AACG,UAAM,cAAc,SAAS,QACvB,MAAM,SAAS,QAAQ,MACvB,SAAS,KAAK,SAAS,KAAK,IACxB,YAAY,SAAS,KAAK,UAAU,GAAG,SAAS,KAAK,SAAS,CAAC,CAAC,MAChE,kBAAkB,SAAS,IAAI;AACzC,WAAO,aAAa,WAAW,gBAAgB,OAAO,KAAK;EAC/D;EAES,8BAA8B,EAAE,eAAc,GAGtD;AACG,WAAO,qCAAqC,eAAe,KAAK;EACpE;;AASE,IAAO,0BAAP,cAAuC,sBAAqB;EAzalE,OAyakE;;;EAAlE,cAAA;;AAEY,SAAA,SAAmB,CAAA;AAEnB,SAAA,eAAkC,CAAA;AAClC,SAAA,mBAAsC,CAAA;AACtC,SAAA,iBAAiB;AACjB,SAAA,YAAY;EAkGxB;EAhGI,SAAM;EAEN;EAEA,YAAS;AAEL,WAAO;EACX;EAEA,MAAM,OAAa;AACf,SAAK,WAAU;AACf,UAAM,SAAS,KAAK,MAAM,SAAS,KAAK;AACxC,SAAK,SAAS,OAAO;AACrB,SAAK,QAAQ,QAAQ,CAAC,GAAG,KAAK,MAAM;AACpC,SAAK,SAAS,KAAK,KAAK,SAAS,CAAA,CAAE;AACnC,SAAK,gBAAgB,MAAK;AAC1B,WAAO;MACH,QAAQ,KAAK;MACb,cAAc,CAAC,GAAG,KAAK,gBAAgB;MACvC,YAAY,KAAK;;EAEzB;EAEA,KAAK,MAAkB,MAAc;AACjC,UAAM,aAAa,KAAK,QAAQ,YAAY,eAAe,KAAK,IAAI,GAAG,KAAK,oBAAoB,IAAI,EAAE,KAAK,IAAI,CAAC;AAChH,QAAI,KAAK,OAAO;AACZ,WAAK,WAAW;;AAEpB,WAAO;EACX;EAEQ,aAAU;AACd,SAAK,eAAe,CAAA;AACpB,SAAK,mBAAmB,CAAA;AACxB,SAAK,iBAAiB;AACtB,SAAK,YAAY;EACrB;EAEQ,oBAAoB,gBAAwB;AAChD,WAAO,CAAC,SAAQ;AACZ,YAAM,OAAO,KAAK,cAAa;AAC/B,UAAI;AACA,uBAAe,IAAI;;AAEnB,aAAK,eAAe,IAAI;;IAEhC;EACJ;EAEQ,2BAAwB;AAC5B,SAAK,aAAa,OAAO,KAAK,SAAS;EAC3C;EAEA,gBAAa;AACT,UAAM,OAAO,KAAK,aAAa;AAC/B,SAAK,YAAY;AACjB,WAAO;EACX;EAEA,eAAe,MAAY;AACvB,SAAK,yBAAwB;AAC7B,SAAK,YAAY;EACrB;EAEA,QAAQ,KAAa,WAAsB,SAAwB;AAC/D,SAAK,QAAQ,YAAY,KAAK,SAAS;AACvC,QAAI,CAAC,KAAK,YAAW,GAAI;AACrB,WAAK,mBAAmB,CAAC,GAAG,KAAK,cAAc,OAAO;AACtD,WAAK,iBAAiB,KAAK,UAAU;;EAE7C;EAEA,QAAQ,KAAa,MAAkB,SAA0B,MAAU;AACvE,SAAK,OAAO,OAAO;AACnB,SAAK,QAAQ,YAAY,KAAK,MAAM,IAAI;AACxC,SAAK,MAAM,OAAO;EACtB;EAEA,OAAO,SAAwB;AAC3B,QAAI,CAAC,KAAK,YAAW,GAAI;AACrB,WAAK,aAAa,KAAK,OAAO;;EAEtC;EAEA,MAAM,SAAwB;AAC1B,QAAI,CAAC,KAAK,YAAW,GAAI;AACrB,YAAM,QAAQ,KAAK,aAAa,YAAY,OAAO;AACnD,UAAI,SAAS,GAAG;AACZ,aAAK,aAAa,OAAO,KAAK;;;EAG1C;EAEA,IAAI,UAAO;AACP,WAAQ,KAAK,QAAgB;EACjC;;AAGJ,IAAM,gBAA+B;EACjC,iBAAiB;EACjB,sBAAsB;EACtB,iBAAiB;EACjB,sBAAsB,IAAI,kCAAiC;;AAO/D,IAAM,oBAAN,cAAgC,sBAAqB;EA/hBrD,OA+hBqD;;;EAKjD,YAAY,QAAyB,QAAsB;AACvD,UAAM,sBAAsB,UAAU,kBAAkB;AACxD,UAAM,QAAM,OAAA,OAAA,OAAA,OAAA,OAAA,OAAA,CAAA,GACL,aAAa,GAAA,EAChB,mBAAmB,sBACb,IAAI,qBAAqB,EAAE,cAAc,OAAO,aAAY,CAAE,IAC9D,IAAI,wBAAuB,EAAE,CAAA,GAChC,MAAM,CAAA;EAEjB;EAEA,IAAI,eAAY;AACZ,WAAO,KAAK;EAChB;EAEA,YAAY,MAAc,MAAc;AACpC,WAAO,KAAK,KAAK,MAAM,IAAI;EAC/B;EAEA,mBAAgB;AACZ,SAAK,oBAAmB;EAC5B;EAEA,YAAY,KAAa,WAAoB;AACzC,WAAO,KAAK,QAAQ,KAAK,SAAS;EACtC;EAEA,YAAY,KAAa,MAAkB,MAAU;AACjD,WAAO,KAAK,QAAQ,KAAK,MAAM;MAC3B,MAAM,CAAC,IAAI;KACd;EACL;EAEA,OAAO,KAAa,SAA2B;AAC3C,SAAK,GAAG,KAAK,OAAO;EACxB;EAEA,WAAW,KAAa,UAAgC;AACpD,SAAK,OAAO,KAAK,QAAQ;EAC7B;EAEA,SAAS,KAAa,UAAgC;AAClD,SAAK,KAAK,KAAK,QAAQ;EAC3B;EAEA,eAAe,KAAa,UAAgC;AACxD,SAAK,WAAW,KAAK,QAAQ;EACjC;;;;AC3iBE,SAAU,aAAmC,SAAkB,QAAW,QAA2B;AACvG,QAAM,QAAQ,oBAAI,IAAG;AACrB,QAAM,gBAA+B;IACjC;IACA;IACA;IACA,WAAW,oBAAI,IAAG;;AAEtB,aAAW,eAAe,OAAO;AACjC,SAAO;AACX;AAVgB;AAYhB,SAAS,WAAW,eAA8B,SAAgB;AAC9D,QAAM,YAAY,qBAAqB,SAAS,KAAK;AACrD,QAAM,cAAc,OAAO,QAAQ,KAAK,EAAE,OAAO,YAAY,EAAE,OAAO,UAAQ,UAAU,IAAI,IAAI,CAAC;AACjG,aAAW,QAAQ,aAAa;AAC5B,UAAM,MAAG,OAAA,OAAA,OAAA,OAAA,CAAA,GACF,aAAa,GAAA,EAChB,SAAS,GACT,UAAU,GACV,SAAS,GACT,MAAM,GACN,IAAI,EAAC,CAAA;AAET,QAAI,MAAM,IACN,KAAK,MACL,cAAc,OAAO,KAAK,MAAM,aAAa,KAAK,KAAK,UAAU,CAAC,CAAC;;AAG/E;AAjBS;AAmBT,SAAS,aAAa,KAAkB,SAA0B,cAAc,OAAK;AACjF,MAAI;AACJ,MAAI,UAAU,OAAO,GAAG;AACpB,aAAS,aAAa,KAAK,OAAO;aAC3B,SAAS,OAAO,GAAG;AAC1B,aAAS,YAAY,KAAK,OAAO;aAC1B,aAAa,OAAO,GAAG;AAC9B,aAAS,aAAa,KAAK,QAAQ,QAAQ;aACpC,iBAAiB,OAAO,GAAG;AAClC,aAAS,oBAAoB,KAAK,OAAO;aAClC,WAAW,OAAO,GAAG;AAC5B,aAAS,cAAc,KAAK,OAAO;aAC5B,eAAe,OAAO,GAAG;AAChC,aAAS,kBAAkB,KAAK,OAAO;aAChC,iBAAiB,OAAO,GAAG;AAClC,aAAS,oBAAoB,KAAK,OAAO;aAClC,QAAQ,OAAO,GAAG;AACzB,aAAS,WAAW,KAAK,OAAO;aAC1B,YAAY,OAAO,GAAG;AAC5B,UAAM,MAAM,IAAI;AAChB,aAAS,6BAAM,IAAI,OAAO,QAAQ,KAAK,KAAK,OAAO,GAA1C;SACN;AACH,UAAM,IAAI,kBAAkB,QAAQ,UAAU,4BAA4B,QAAQ,KAAK,EAAE;;AAE7F,SAAO,KAAK,KAAK,cAAc,SAAY,kBAAkB,OAAO,GAAG,QAAQ,QAAQ,WAAW;AACtG;AAzBS;AA2BT,SAAS,YAAY,KAAkB,QAAc;AACjD,QAAM,aAAa,YAAY,MAAM;AACrC,SAAO,MAAM,IAAI,OAAO,OAAO,YAAY,MAAM;AACrD;AAHS;AAKT,SAAS,cAAc,KAAkB,UAAkB;AACvD,QAAM,OAAO,SAAS,KAAK;AAC3B,MAAI,aAAa,IAAI,GAAG;AACpB,UAAM,MAAM,IAAI;AAChB,UAAM,YAAY,SAAS,UAAU,SAAS,IAAI,uBAAuB,MAAM,SAAS,SAAS,IAAI,OAAO,CAAA;AAC5G,WAAO,CAAC,SAAS,IAAI,OAAO,QAAQ,KAAK,QAAQ,KAAK,IAAI,GAAG,UAAU,UAAU,IAAI,CAAC;aAC/E,eAAe,IAAI,GAAG;AAC7B,UAAM,MAAM,IAAI;AAChB,UAAM,SAAS,SAAS,KAAK,KAAK,IAAI;AACtC,WAAO,MAAM,IAAI,OAAO,QAAQ,KAAK,QAAQ,QAAQ;aAC9C,CAAC,MAAM;AACd,UAAM,IAAI,kBAAkB,SAAS,UAAU,wBAAwB,SAAS,KAAK,EAAE;SACpF;AACH,sBAAkB,IAAI;;AAE9B;AAfS;AAiBT,SAAS,uBAAuB,MAAkB,WAA0B;AACxE,QAAM,aAAa,UAAU,IAAI,OAAK,eAAe,EAAE,KAAK,CAAC;AAC7D,SAAO,CAAC,SAAQ;AACZ,UAAM,WAAiB,CAAA;AACvB,aAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AACxC,YAAM,aAAa,KAAK,WAAW,CAAC;AACpC,YAAM,YAAY,WAAW,CAAC;AAC9B,eAAS,WAAW,IAAI,IAAI,UAAU,IAAI;;AAE9C,WAAO;EACX;AACJ;AAXS;AAkBT,SAAS,eAAe,WAAoB;AACxC,MAAI,cAAc,SAAS,GAAG;AAC1B,UAAM,OAAO,eAAe,UAAU,IAAI;AAC1C,UAAM,QAAQ,eAAe,UAAU,KAAK;AAC5C,WAAO,CAAC,SAAU,KAAK,IAAI,KAAK,MAAM,IAAI;aACnC,cAAc,SAAS,GAAG;AACjC,UAAM,OAAO,eAAe,UAAU,IAAI;AAC1C,UAAM,QAAQ,eAAe,UAAU,KAAK;AAC5C,WAAO,CAAC,SAAU,KAAK,IAAI,KAAK,MAAM,IAAI;aACnC,WAAW,SAAS,GAAG;AAC9B,UAAM,QAAQ,eAAe,UAAU,KAAK;AAC5C,WAAO,CAAC,SAAS,CAAC,MAAM,IAAI;aACrB,qBAAqB,SAAS,GAAG;AACxC,UAAM,OAAO,UAAU,UAAU,IAAK;AACtC,WAAO,CAAC,SAAS,SAAS,UAAa,KAAK,IAAI,MAAM;aAC/C,iBAAiB,SAAS,GAAG;AACpC,UAAM,QAAQ,QAAQ,UAAU,IAAI;AACpC,WAAO,MAAM;;AAEjB,oBAAkB,SAAS;AAC/B;AApBS;AAsBT,SAAS,kBAAkB,KAAkB,cAA0B;AACnE,MAAI,aAAa,SAAS,WAAW,GAAG;AACpC,WAAO,aAAa,KAAK,aAAa,SAAS,CAAC,CAAC;SAC9C;AACH,UAAM,UAA8B,CAAA;AAEpC,eAAW,WAAW,aAAa,UAAU;AACzC,YAAM,mBAAqC;;;QAGvC,KAAK,aAAa,KAAK,SAAS,IAAI;;AAExC,YAAM,QAAQ,kBAAkB,OAAO;AACvC,UAAI,OAAO;AACP,yBAAiB,OAAO,eAAe,KAAK;;AAEhD,cAAQ,KAAK,gBAAgB;;AAGjC,UAAM,MAAM,IAAI;AAChB,WAAO,CAAC,SAAS,IAAI,OAAO,aAAa,KAAK,QAAQ,IAAI,YAAS;AAC/D,YAAM,MAAuB;QACzB,KAAK,6BAAM,OAAO,IAAI,IAAI,GAArB;;AAET,YAAM,OAAO,OAAO;AACpB,UAAI,MAAM;AACN,YAAI,OAAO,MAAM,KAAK,IAAI;;AAE9B,aAAO;IACX,CAAC,CAAC;;AAEV;AA/BS;AAiCT,SAAS,oBAAoB,KAAkB,OAAqB;AAChE,MAAI,MAAM,SAAS,WAAW,GAAG;AAC7B,WAAO,aAAa,KAAK,MAAM,SAAS,CAAC,CAAC;;AAE9C,QAAM,UAA8B,CAAA;AAEpC,aAAW,WAAW,MAAM,UAAU;AAClC,UAAM,mBAAqC;;;MAGvC,KAAK,aAAa,KAAK,SAAS,IAAI;;AAExC,UAAM,QAAQ,kBAAkB,OAAO;AACvC,QAAI,OAAO;AACP,uBAAiB,OAAO,eAAe,KAAK;;AAEhD,YAAQ,KAAK,gBAAgB;;AAGjC,QAAM,QAAQ,IAAI;AAElB,QAAM,SAAS,wBAAC,UAAkB,YAAuB;AACrD,UAAM,UAAU,QAAQ,aAAY,EAAG,KAAK,GAAG;AAC/C,WAAO,UAAU,QAAQ,IAAI,OAAO;EACxC,GAHe;AAIf,QAAM,eAAuB,wBAAC,SAAS,IAAI,OAAO,aAAa,OAAO,QAAQ,IAAI,CAAC,QAAQ,QAAO;AAC9F,UAAM,MAAuB,EAAE,KAAK,6BAAM,MAAN,OAAU;AAC9C,UAAM,SAAS,IAAI;AACnB,QAAI,MAAM,MAAK;AACX,aAAO,IAAI,IAAI;AACf,UAAI,CAAC,OAAO,YAAW,GAAI;AACvB,cAAM,MAAM,OAAO,OAAO,MAAM;AAChC,YAAI,CAAC,OAAO,gBAAgB,IAAI,GAAG,GAAG;AAElC,iBAAO,gBAAgB,IAAI,KAAK,CAAA,CAAE;;AAEtC,cAAM,aAAa,OAAO,gBAAgB,IAAI,GAAG;AACjD,YAAI,QAAO,eAAU,QAAV,eAAU,SAAA,SAAV,WAAa,GAAG,OAAM,aAAa;AAE1C,qBAAW,GAAG,IAAI;;;IAG9B;AACA,UAAM,OAAO,OAAO;AACpB,QAAI,MAAM;AACN,UAAI,OAAO,MAAM,KAAK,IAAI;WACvB;AACH,UAAI,OAAO,MAAK;AACZ,cAAM,sBAAsB,OAAO,gBAAgB,IAAI,OAAO,OAAO,MAAM,CAAC;AAC5E,cAAM,QAAQ,EAAC,wBAAmB,QAAnB,wBAAmB,SAAA,SAAnB,oBAAsB,GAAG;AACxC,eAAO;MACX;;AAEJ,WAAO;EACX,CAAC,CAAC,GA7B2B;AA8B7B,QAAM,UAAU,KAAK,KAAK,kBAAkB,KAAK,GAAG,cAAc,GAAG;AACrE,SAAO,CAAC,SAAQ;AACZ,YAAQ,IAAI;AACZ,QAAI,CAAC,IAAI,OAAO,YAAW,GAAI;AAC3B,UAAI,OAAO,gBAAgB,OAAO,OAAO,OAAO,IAAI,MAAM,CAAC;;EAEnE;AACJ;AA9DS;AAgET,SAAS,WAAW,KAAkB,OAAY;AAC9C,QAAM,UAAU,MAAM,SAAS,IAAI,OAAK,aAAa,KAAK,CAAC,CAAC;AAC5D,SAAO,CAAC,SAAS,QAAQ,QAAQ,YAAU,OAAO,IAAI,CAAC;AAC3D;AAHS;AAKT,SAAS,kBAAkB,SAAwB;AAC/C,MAAI,QAAQ,OAAO,GAAG;AAClB,WAAO,QAAQ;;AAEnB,SAAO;AACX;AALS;AAOT,SAAS,oBAAoB,KAAkB,UAA0B,WAAW,SAAS,UAAQ;AACjG,MAAI,CAAC,UAAU;AACX,QAAI,CAAC,SAAS,KAAK,KAAK;AACpB,YAAM,IAAI,MAAM,0CAA0C,SAAS,KAAK,QAAQ;;AAEpF,UAAM,aAAa,mBAAmB,SAAS,KAAK,GAAG;AACvD,UAAM,iBAAiB,eAAU,QAAV,eAAU,SAAA,SAAV,WAAY;AACnC,QAAI,CAAC,gBAAgB;AACjB,YAAM,IAAI,MAAM,8CAA8C,YAAY,SAAS,KAAK,GAAG,CAAC;;AAEhG,WAAO,oBAAoB,KAAK,UAAU,cAAc;aACjD,WAAW,QAAQ,KAAK,aAAa,SAAS,KAAK,GAAG,GAAG;AAChE,UAAM,MAAM,IAAI;AAChB,WAAO,CAAC,SAAS,IAAI,OAAO,QAAQ,KAAK,QAAQ,KAAK,SAAS,KAAK,GAAiB,GAAG,UAAU,IAAI;aAC/F,WAAW,QAAQ,KAAK,eAAe,SAAS,KAAK,GAAG,GAAG;AAClE,UAAM,MAAM,IAAI;AAChB,UAAM,eAAe,SAAS,KAAK,SAAS,KAAK,IAAI,IAAI;AACzD,WAAO,MAAM,IAAI,OAAO,QAAQ,KAAK,cAAc,QAAQ;aACpD,UAAU,QAAQ,GAAG;AAC5B,UAAM,MAAM,IAAI;AAChB,UAAM,UAAU,SAAS,KAAK,SAAS,KAAK;AAC5C,WAAO,MAAM,IAAI,OAAO,QAAQ,KAAK,SAAS,QAAQ;SAErD;AACD,UAAM,IAAI,MAAM,wCAAwC;;AAEhE;AA1BS;AA4BT,SAAS,aAAa,KAAkB,SAAgB;AACpD,QAAM,MAAM,IAAI;AAChB,QAAM,QAAQ,IAAI,OAAO,QAAQ,KAAK;AACtC,MAAI,CAAC,OAAO;AACR,UAAM,IAAI,MAAM,uCAAuC,QAAQ,KAAK;;AAExE,SAAO,MAAM,IAAI,OAAO,QAAQ,KAAK,OAAO,OAAO;AACvD;AAPS;AAST,SAAS,KAAK,KAAkB,OAA8B,QAAgB,aAAwB;AAClG,QAAM,OAAO,SAAS,eAAe,KAAK;AAE1C,MAAI,CAAC,aAAa;AACd,QAAI,MAAM;AACN,YAAM,MAAM,IAAI;AAChB,aAAO,CAAC,SAAS,IAAI,OAAO,aAAa,KAAK;QAC1C;UACI,KAAK,6BAAM,OAAO,IAAI,GAAjB;UACL,MAAM,6BAAM,KAAK,IAAI,GAAf;;QAEV;UACI,KAAK,UAAS;UACd,MAAM,6BAAM,CAAC,KAAK,IAAI,GAAhB;;OAEb;WACE;AACH,aAAO;;;AAIf,MAAI,gBAAgB,KAAK;AACrB,UAAM,MAAM,IAAI;AAChB,WAAO,CAAC,SAAS,IAAI,OAAO,KAAK,KAAK;MAClC,KAAK,6BAAM,OAAO,IAAI,GAAjB;MACL,MAAM,OAAO,MAAM,KAAK,IAAI,IAAI;KACnC;aACM,gBAAgB,KAAK;AAC5B,UAAM,MAAM,IAAI;AAChB,QAAI,MAAM;AACN,YAAM,QAAQ,IAAI;AAKlB,aAAO,CAAC,SAAS,IAAI,OAAO,aAAa,OAAO;QAC5C;UACI,KAAK,6BAAM,IAAI,OAAO,WAAW,KAAK;YAClC,KAAK,6BAAM,OAAO,IAAI,GAAjB;WACR,GAFI;UAGL,MAAM,6BAAM,KAAK,IAAI,GAAf;;QAEV;UACI,KAAK,UAAS;UACd,MAAM,6BAAM,CAAC,KAAK,IAAI,GAAhB;;OAEb;WACE;AACH,aAAO,CAAC,SAAS,IAAI,OAAO,WAAW,KAAK;QACxC,KAAK,6BAAM,OAAO,IAAI,GAAjB;OACR;;aAEE,gBAAgB,KAAK;AAC5B,UAAM,MAAM,IAAI;AAChB,WAAO,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK;MACtC,KAAK,6BAAM,OAAO,IAAI,GAAjB;MACL,MAAM,OAAO,MAAM,KAAK,IAAI,IAAI;KACnC;SACE;AACH,sBAAkB,WAAW;;AAErC;AA7DS;AA+DT,SAAS,QAAQ,KAAoB,SAAqC;AACtE,QAAM,OAAO,YAAY,KAAK,OAAO;AACrC,QAAM,OAAO,IAAI,MAAM,IAAI,IAAI;AAC/B,MAAI,CAAC;AAAM,UAAM,IAAI,MAAM,SAAS,IAAI,eAAe;AACvD,SAAO;AACX;AALS;AAOT,SAAS,YAAY,KAAoB,SAAqC;AAC1E,MAAI,aAAa,OAAO,GAAG;AACvB,WAAO,QAAQ;aACR,IAAI,UAAU,IAAI,OAAO,GAAG;AACnC,WAAO,IAAI,UAAU,IAAI,OAAO;SAC7B;AACH,QAAI,OAAgB;AACpB,QAAI,SAAkB,KAAK;AAC3B,QAAI,WAAmB,QAAQ;AAC/B,WAAO,CAAC,aAAa,MAAM,GAAG;AAC1B,UAAI,QAAQ,MAAM,KAAK,eAAe,MAAM,KAAK,iBAAiB,MAAM,GAAG;AACvE,cAAM,QAAQ,OAAO,SAAS,QAAQ,IAAuB;AAC7D,mBAAW,MAAM,SAAQ,IAAK,MAAM;;AAExC,aAAO;AACP,eAAS,OAAO;;AAEpB,UAAM,OAAO;AACb,eAAW,KAAK,OAAO,MAAM;AAC7B,QAAI,UAAU,IAAI,SAAS,QAAQ;AACnC,WAAO;;AAEf;AAtBS;AAwBT,SAAS,SAAS,KAAoB,MAAY;AAC9C,QAAM,QAAQ,IAAI,OAAO,IAAI;AAC7B,MAAI,CAAC;AAAO,UAAM,IAAI,MAAM,UAAU,IAAI,eAAe;AACzD,SAAO;AACX;AAJS;;;ACtYH,SAAU,uBAAuB,UAA6B;AAChE,QAAM,UAAU,SAAS;AACzB,QAAM,QAAQ,SAAS,OAAO;AAC9B,QAAM,SAAS,IAAI,wBAAwB,QAAQ;AACnD,eAAa,SAAS,QAAQ,MAAM,UAAU;AAC9C,SAAO,SAAQ;AACf,SAAO;AACX;AAPgB;;;ACIV,SAAU,oBAAoB,UAA6B;AAC7D,QAAM,SAAS,qBAAqB,QAAQ;AAC5C,SAAO,SAAQ;AACf,SAAO;AACX;AAJgB;AAUV,SAAU,qBAAqB,UAA6B;AAC9D,QAAM,UAAU,SAAS;AACzB,QAAM,QAAQ,SAAS,OAAO;AAC9B,QAAM,SAAS,IAAI,cAAc,QAAQ;AACzC,SAAO,aAAa,SAAS,QAAQ,MAAM,UAAU;AACzD;AALgB;;;ACAV,IAAO,sBAAP,MAA0B;EAxBhC,OAwBgC;;;EAE5B,YAAY,SAAkB,SAA6B;AACvD,UAAM,iBAAiB,OAAO,qBAAqB,SAAS,KAAK,CAAC;AAClE,UAAM,iBAA8B,KAAK,oBAAoB,cAAc;AAC3E,UAAM,SAAsB,KAAK,mBAAmB,gBAAgB,gBAAgB,OAAO;AAE3F,mBAAe,QAAQ,mBAAgB;AACnC,YAAM,UAAU,cAAc;AAC9B,UAAI,OAAO,YAAY,YAAY,WAAW,UAAU,WAAW,aAAa,OAAO,GAAG;AACtF,eAAO,QAAQ,aAAa;aACzB;AACH,eAAO,KAAK,aAAa;;IAEjC,CAAC;AAGD,WAAO;EACX;EAEU,oBAAoB,OAA2B;AACrD,WAAO,MAAM,OAAO,cAAc,EAAE,OAAO,OAAK,CAAC,EAAE,QAAQ,EACtD,IAAI,cAAY,KAAK,mBAAmB,QAAQ,CAAC,EAAE,QAAO;EACnE;EAEU,mBAAmB,UAAsB;AAC/C,UAAM,QAAQ,cAAc,QAAQ;AACpC,UAAM,UAAU,KAAK,sBAAsB,KAAK,IAAI,KAAK,qBAAqB,KAAK,IAAI;AACvF,UAAM,YAAuB;MACzB,MAAM,SAAS;MACf,SAAS;MACT,aAAa;;AAEjB,QAAI,SAAS,QAAQ;AAEjB,gBAAU,QAAQ,aAAa,KAAK,IAAI,MAAM,UAAU;;AAE5D,WAAO;EACX;EAEU,sBAAsB,OAAa;AACzC,QAAI,MAAM,MAAM,SAAS,GAAG,GAAG;AAE3B,aAAO;eACA,MAAM,OAAO,SAAS,KAAK,KAAK,MAAM,OAAO,SAAS,KAAK,GAAG;AAErE,aAAO;WACJ;AACH,aAAO;;EAEf;EAEU,qBAAqB,OAAa;AACxC,UAAM,cAAc,IAAI,OAAO,OAAO,MAAM,QAAQ,GAAG;AACvD,WAAO,CAAC,MAAM,WAAU;AACpB,kBAAY,YAAY;AACxB,YAAM,aAAa,YAAY,KAAK,IAAI;AACxC,aAAO;IACX;EACJ;EAEU,mBAAmB,OAA6B,gBAA6B,SAA6B;AAChH,WAAO,MAEF,OAAO,YAAY,EACnB,QAAQ,UAAQ,kBAAkB,IAAI,EAAE,OAAO,SAAS,CAAC,EACzD,SAAS,OAAK,EAAE,KAAK,EAAE,QAAO,EAE9B,KAAK,CAAC,GAAG,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM,MAAM,EAC9C,IAAI,aAAW,KAAK,kBAAkB,SAAS,gBAAgB,QAAQ,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,eAAe,CAAC,CAAC;EAC1G;EAEU,kBAAkB,SAAkB,gBAA6B,iBAAwB;AAC/F,WAAO;MACH,MAAM,QAAQ;MACd,SAAS,KAAK,oBAAoB,SAAS,eAAe;MAC1D,YAAY,KAAK,cAAc,SAAS,cAAc;;EAE9D;EAEU,oBAAoB,SAAkB,iBAAwB;AACpE,WAAO,kBACH,IAAI,OAAO,0BAA0B,QAAQ,KAAK,CAAC,IACnD,QAAQ;EAChB;EAEU,cAAc,SAAkB,gBAA2B;AACjE,WAAO,eAAe,OAAO,CAAC,YAAyB,UAAS;AAC5D,YAAM,UAAU,UAAK,QAAL,UAAK,SAAA,SAAL,MAAO;AACvB,WAAI,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,WAAU,eAAe,MAAM,QAAQ,SAAS,KAAK,QAAQ,KAAK,GAAG;AAC9E,mBAAW,KAAK,KAAK;;AAEzB,aAAO;IACX,GAAG,CAAA,CAAE;EACT;;;;AC/FE,IAAO,wBAAP,MAA4B;EAvBlC,OAuBkC;;;EAE9B,QAAQ,OAAe,SAAgB;AACnC,QAAI,UAAuC,QAAQ;AACnD,QAAI,iBAAiB,OAAO,GAAG;AAC3B,gBAAU,0BAA0B,OAAO;;AAE/C,QAAI,WAAW,OAAO,GAAG;AACrB,YAAM,OAAO,QAAQ,KAAK;AAC1B,UAAI,CAAC,MAAM;AACP,cAAM,IAAI,MAAM,yCAAyC;;AAE7D,aAAO,KAAK,aAAa,MAAM,OAAO,OAAO;;AAEjD,WAAO;EACX;;EAGU,aAAa,MAAoB,OAAe,SAAgB;;AACtE,YAAQ,KAAK,KAAK,YAAW,GAAI;MAC7B,KAAK;AAAO,eAAO,eAAe,WAAW,KAAK;MAClD,KAAK;AAAU,eAAO,eAAe,cAAc,KAAK;MACxD,KAAK;AAAM,eAAO,eAAe,UAAU,KAAK;;AAEpD,aAAQ,KAAA,YAAY,IAAI,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE,YAAW,GAAI;MACtC,KAAK;AAAU,eAAO,eAAe,cAAc,KAAK;MACxD,KAAK;AAAW,eAAO,eAAe,eAAe,KAAK;MAC1D,KAAK;AAAU,eAAO,eAAe,cAAc,KAAK;MACxD,KAAK;AAAQ,eAAO,eAAe,YAAY,KAAK;MACpD;AAAS,eAAO;;EAExB;;AAGE,IAAW;CAAjB,SAAiBK,iBAAc;AAE3B,WAAgB,cAAc,OAAa;AACvC,QAAI,SAAS;AACb,aAAS,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;AACvC,YAAM,IAAI,MAAM,OAAO,CAAC;AACxB,UAAI,MAAM,MAAM;AACZ,cAAM,KAAK,MAAM,OAAO,EAAE,CAAC;AAC3B,kBAAU,uBAAuB,EAAE;aAChC;AACH,kBAAU;;;AAGlB,WAAO;EACX;AAZgB;AAAA,EAAAA,gBAAA,gBAAa;AAc7B,WAAS,uBAAuB,MAAY;AACxC,YAAQ,MAAM;MACV,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB,KAAK;AAAK,eAAO;MACjB;AAAS,eAAO;;EAExB;AAXS;AAaT,WAAgB,UAAU,OAAa;AACnC,QAAI,MAAM,OAAO,CAAC,MAAM,KAAK;AACzB,aAAO,MAAM,UAAU,CAAC;WACrB;AACH,aAAO;;EAEf;AANgB;AAAA,EAAAA,gBAAA,YAAS;AAQzB,WAAgB,WAAW,OAAa;AACpC,WAAO,SAAS,KAAK;EACzB;AAFgB;AAAA,EAAAA,gBAAA,aAAU;AAI1B,WAAgB,cAAc,OAAa;AACvC,WAAO,OAAO,KAAK;EACvB;AAFgB;AAAA,EAAAA,gBAAA,gBAAa;AAI7B,WAAgB,YAAY,OAAa;AACrC,WAAO,IAAI,KAAK,KAAK;EACzB;AAFgB;AAAA,EAAAA,gBAAA,cAAW;AAI3B,WAAgB,cAAc,OAAa;AACvC,WAAO,OAAO,KAAK;EACvB;AAFgB;AAAA,EAAAA,gBAAA,gBAAa;AAI7B,WAAgB,eAAe,OAAa;AACxC,WAAO,MAAM,YAAW,MAAO;EACnC;AAFgB;AAAA,EAAAA,gBAAA,iBAAc;AAIlC,GAzDiB,mBAAA,iBAAc,CAAA,EAAA;;;ACzD/B;AAOA,iCAAc;;;ACOR,SAAU,gBAAa;AACzB,SAAO,IAAI,QAAQ,aAAU;AAGzB,QAAI,OAAO,iBAAiB,aAAa;AACrC,iBAAW,SAAS,CAAC;WAClB;AACH,mBAAa,OAAO;;EAE5B,CAAC;AACL;AAVgB;AAYhB,IAAI,WAAW;AACf,IAAI,2BAA2B;AAKzB,SAAU,2BAAwB;AACpC,aAAW,KAAK,IAAG;AACnB,SAAO,IAAI,6CAAuB;AACtC;AAHgB;AASV,SAAU,sBAAsB,QAAc;AAChD,6BAA2B;AAC/B;AAFgB;AAST,IAAM,qBAAqB,OAAO,oBAAoB;AAMvD,SAAU,qBAAqB,KAAY;AAC7C,SAAO,QAAQ;AACnB;AAFgB;AAehB,eAAsB,kBAAkB,OAAwB;AAC5D,MAAI,UAAU,uCAAkB,MAAM;AAElC;;AAEJ,QAAM,UAAU,KAAK,IAAG;AACxB,MAAI,UAAU,YAAY,0BAA0B;AAChD,eAAW;AACX,UAAM,cAAa;;AAEvB,MAAI,MAAM,yBAAyB;AAC/B,UAAM;;AAEd;AAbsB;AAmBhB,IAAO,WAAP,MAAe;EA1FrB,OA0FqB;;;EAArB,cAAA;AAII,SAAA,UAAU,IAAI,QAAW,CAAC,SAAS,WAAU;AACzC,WAAK,UAAU,CAAC,QAAO;AACnB,gBAAQ,GAAG;AACX,eAAO;MACX;AACA,WAAK,SAAS,CAAC,QAAO;AAClB,eAAO,GAAG;AACV,eAAO;MACX;IACJ,CAAC;EACL;;;;ACnGA,IAAMC,oBAAN,MAAM,kBAAiB;AAAA,EALvB,OAKuB;AAAA;AAAA;AAAA,EACnB,YAAY,KAAK,YAAY,SAAS,SAAS;AAC3C,SAAK,OAAO;AACZ,SAAK,cAAc;AACnB,SAAK,WAAW;AAChB,SAAK,WAAW;AAChB,SAAK,eAAe;AAAA,EACxB;AAAA,EACA,IAAI,MAAM;AACN,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,IAAI,aAAa;AACb,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,IAAI,UAAU;AACV,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,QAAQ,OAAO;AACX,QAAI,OAAO;AACP,YAAM,QAAQ,KAAK,SAAS,MAAM,KAAK;AACvC,YAAM,MAAM,KAAK,SAAS,MAAM,GAAG;AACnC,aAAO,KAAK,SAAS,UAAU,OAAO,GAAG;AAAA,IAC7C;AACA,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,OAAO,SAAS,SAAS;AACrB,eAAW,UAAU,SAAS;AAC1B,UAAI,kBAAiB,cAAc,MAAM,GAAG;AAExC,cAAM,QAAQ,mBAAmB,OAAO,KAAK;AAE7C,cAAM,cAAc,KAAK,SAAS,MAAM,KAAK;AAC7C,cAAM,YAAY,KAAK,SAAS,MAAM,GAAG;AACzC,aAAK,WAAW,KAAK,SAAS,UAAU,GAAG,WAAW,IAAI,OAAO,OAAO,KAAK,SAAS,UAAU,WAAW,KAAK,SAAS,MAAM;AAE/H,cAAM,YAAY,KAAK,IAAI,MAAM,MAAM,MAAM,CAAC;AAC9C,cAAM,UAAU,KAAK,IAAI,MAAM,IAAI,MAAM,CAAC;AAC1C,YAAI,cAAc,KAAK;AACvB,cAAM,mBAAmB,mBAAmB,OAAO,MAAM,OAAO,WAAW;AAC3E,YAAI,UAAU,cAAc,iBAAiB,QAAQ;AACjD,mBAAS,IAAI,GAAG,MAAM,iBAAiB,QAAQ,IAAI,KAAK,KAAK;AACzD,wBAAY,IAAI,YAAY,CAAC,IAAI,iBAAiB,CAAC;AAAA,UACvD;AAAA,QACJ,OACK;AACD,cAAI,iBAAiB,SAAS,KAAO;AACjC,wBAAY,OAAO,YAAY,GAAG,UAAU,WAAW,GAAG,gBAAgB;AAAA,UAC9E,OACK;AACD,iBAAK,eAAe,cAAc,YAAY,MAAM,GAAG,YAAY,CAAC,EAAE,OAAO,kBAAkB,YAAY,MAAM,UAAU,CAAC,CAAC;AAAA,UACjI;AAAA,QACJ;AACA,cAAM,OAAO,OAAO,KAAK,UAAU,YAAY;AAC/C,YAAI,SAAS,GAAG;AACZ,mBAAS,IAAI,YAAY,IAAI,iBAAiB,QAAQ,MAAM,YAAY,QAAQ,IAAI,KAAK,KAAK;AAC1F,wBAAY,CAAC,IAAI,YAAY,CAAC,IAAI;AAAA,UACtC;AAAA,QACJ;AAAA,MACJ,WACS,kBAAiB,OAAO,MAAM,GAAG;AACtC,aAAK,WAAW,OAAO;AACvB,aAAK,eAAe;AAAA,MACxB,OACK;AACD,cAAM,IAAI,MAAM,+BAA+B;AAAA,MACnD;AAAA,IACJ;AACA,SAAK,WAAW;AAAA,EACpB;AAAA,EACA,iBAAiB;AACb,QAAI,KAAK,iBAAiB,QAAW;AACjC,WAAK,eAAe,mBAAmB,KAAK,UAAU,IAAI;AAAA,IAC9D;AACA,WAAO,KAAK;AAAA,EAChB;AAAA,EACA,WAAW,QAAQ;AACf,aAAS,KAAK,IAAI,KAAK,IAAI,QAAQ,KAAK,SAAS,MAAM,GAAG,CAAC;AAC3D,UAAM,cAAc,KAAK,eAAe;AACxC,QAAI,MAAM,GAAG,OAAO,YAAY;AAChC,QAAI,SAAS,GAAG;AACZ,aAAO,EAAE,MAAM,GAAG,WAAW,OAAO;AAAA,IACxC;AACA,WAAO,MAAM,MAAM;AACf,YAAM,MAAM,KAAK,OAAO,MAAM,QAAQ,CAAC;AACvC,UAAI,YAAY,GAAG,IAAI,QAAQ;AAC3B,eAAO;AAAA,MACX,OACK;AACD,cAAM,MAAM;AAAA,MAChB;AAAA,IACJ;AAGA,UAAM,OAAO,MAAM;AACnB,aAAS,KAAK,gBAAgB,QAAQ,YAAY,IAAI,CAAC;AACvD,WAAO,EAAE,MAAM,WAAW,SAAS,YAAY,IAAI,EAAE;AAAA,EACzD;AAAA,EACA,SAAS,UAAU;AACf,UAAM,cAAc,KAAK,eAAe;AACxC,QAAI,SAAS,QAAQ,YAAY,QAAQ;AACrC,aAAO,KAAK,SAAS;AAAA,IACzB,WACS,SAAS,OAAO,GAAG;AACxB,aAAO;AAAA,IACX;AACA,UAAM,aAAa,YAAY,SAAS,IAAI;AAC5C,QAAI,SAAS,aAAa,GAAG;AACzB,aAAO;AAAA,IACX;AACA,UAAM,iBAAkB,SAAS,OAAO,IAAI,YAAY,SAAU,YAAY,SAAS,OAAO,CAAC,IAAI,KAAK,SAAS;AACjH,UAAM,SAAS,KAAK,IAAI,aAAa,SAAS,WAAW,cAAc;AACvE,WAAO,KAAK,gBAAgB,QAAQ,UAAU;AAAA,EAClD;AAAA,EACA,gBAAgB,QAAQ,YAAY;AAChC,WAAO,SAAS,cAAc,MAAM,KAAK,SAAS,WAAW,SAAS,CAAC,CAAC,GAAG;AACvE;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EACA,IAAI,YAAY;AACZ,WAAO,KAAK,eAAe,EAAE;AAAA,EACjC;AAAA,EACA,OAAO,cAAc,OAAO;AACxB,UAAM,YAAY;AAClB,WAAO,cAAc,UAAa,cAAc,QAC5C,OAAO,UAAU,SAAS,YAAY,UAAU,UAAU,WACzD,UAAU,gBAAgB,UAAa,OAAO,UAAU,gBAAgB;AAAA,EACjF;AAAA,EACA,OAAO,OAAO,OAAO;AACjB,UAAM,YAAY;AAClB,WAAO,cAAc,UAAa,cAAc,QAC5C,OAAO,UAAU,SAAS,YAAY,UAAU,UAAU,UAAa,UAAU,gBAAgB;AAAA,EACzG;AACJ;AACO,IAAIC;AAAA,CACV,SAAUA,eAAc;AASrB,WAAS,OAAO,KAAK,YAAY,SAAS,SAAS;AAC/C,WAAO,IAAID,kBAAiB,KAAK,YAAY,SAAS,OAAO;AAAA,EACjE;AAFS;AAGT,EAAAC,cAAa,SAAS;AAUtB,WAAS,OAAO,UAAU,SAAS,SAAS;AACxC,QAAI,oBAAoBD,mBAAkB;AACtC,eAAS,OAAO,SAAS,OAAO;AAChC,aAAO;AAAA,IACX,OACK;AACD,YAAM,IAAI,MAAM,sEAAsE;AAAA,IAC1F;AAAA,EACJ;AARS;AAST,EAAAC,cAAa,SAAS;AACtB,WAAS,WAAW,UAAU,OAAO;AACjC,UAAM,OAAO,SAAS,QAAQ;AAC9B,UAAM,cAAc,UAAU,MAAM,IAAI,iBAAiB,GAAG,CAAC,GAAG,MAAM;AAClE,YAAM,OAAO,EAAE,MAAM,MAAM,OAAO,EAAE,MAAM,MAAM;AAChD,UAAI,SAAS,GAAG;AACZ,eAAO,EAAE,MAAM,MAAM,YAAY,EAAE,MAAM,MAAM;AAAA,MACnD;AACA,aAAO;AAAA,IACX,CAAC;AACD,QAAI,qBAAqB;AACzB,UAAM,QAAQ,CAAC;AACf,eAAW,KAAK,aAAa;AACzB,YAAM,cAAc,SAAS,SAAS,EAAE,MAAM,KAAK;AACnD,UAAI,cAAc,oBAAoB;AAClC,cAAM,IAAI,MAAM,kBAAkB;AAAA,MACtC,WACS,cAAc,oBAAoB;AACvC,cAAM,KAAK,KAAK,UAAU,oBAAoB,WAAW,CAAC;AAAA,MAC9D;AACA,UAAI,EAAE,QAAQ,QAAQ;AAClB,cAAM,KAAK,EAAE,OAAO;AAAA,MACxB;AACA,2BAAqB,SAAS,SAAS,EAAE,MAAM,GAAG;AAAA,IACtD;AACA,UAAM,KAAK,KAAK,OAAO,kBAAkB,CAAC;AAC1C,WAAO,MAAM,KAAK,EAAE;AAAA,EACxB;AA1BS;AA2BT,EAAAA,cAAa,aAAa;AAC9B,GAAGA,kBAAiBA,gBAAe,CAAC,EAAE;AACtC,SAAS,UAAU,MAAM,SAAS;AAC9B,MAAI,KAAK,UAAU,GAAG;AAElB,WAAO;AAAA,EACX;AACA,QAAM,IAAK,KAAK,SAAS,IAAK;AAC9B,QAAM,OAAO,KAAK,MAAM,GAAG,CAAC;AAC5B,QAAM,QAAQ,KAAK,MAAM,CAAC;AAC1B,YAAU,MAAM,OAAO;AACvB,YAAU,OAAO,OAAO;AACxB,MAAI,UAAU;AACd,MAAI,WAAW;AACf,MAAI,IAAI;AACR,SAAO,UAAU,KAAK,UAAU,WAAW,MAAM,QAAQ;AACrD,UAAM,MAAM,QAAQ,KAAK,OAAO,GAAG,MAAM,QAAQ,CAAC;AAClD,QAAI,OAAO,GAAG;AAEV,WAAK,GAAG,IAAI,KAAK,SAAS;AAAA,IAC9B,OACK;AAED,WAAK,GAAG,IAAI,MAAM,UAAU;AAAA,IAChC;AAAA,EACJ;AACA,SAAO,UAAU,KAAK,QAAQ;AAC1B,SAAK,GAAG,IAAI,KAAK,SAAS;AAAA,EAC9B;AACA,SAAO,WAAW,MAAM,QAAQ;AAC5B,SAAK,GAAG,IAAI,MAAM,UAAU;AAAA,EAChC;AACA,SAAO;AACX;AA/BS;AAgCT,SAAS,mBAAmB,MAAM,eAAe,aAAa,GAAG;AAC7D,QAAM,SAAS,gBAAgB,CAAC,UAAU,IAAI,CAAC;AAC/C,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AAClC,UAAM,KAAK,KAAK,WAAW,CAAC;AAC5B,QAAI,MAAM,EAAE,GAAG;AACX,UAAI,OAAO,MAAoC,IAAI,IAAI,KAAK,UAAU,KAAK,WAAW,IAAI,CAAC,MAAM,IAA4B;AACzH;AAAA,MACJ;AACA,aAAO,KAAK,aAAa,IAAI,CAAC;AAAA,IAClC;AAAA,EACJ;AACA,SAAO;AACX;AAZS;AAaT,SAAS,MAAM,MAAM;AACjB,SAAO,SAAS,MAAoC,SAAS;AACjE;AAFS;AAGT,SAAS,mBAAmB,OAAO;AAC/B,QAAM,QAAQ,MAAM;AACpB,QAAM,MAAM,MAAM;AAClB,MAAI,MAAM,OAAO,IAAI,QAAS,MAAM,SAAS,IAAI,QAAQ,MAAM,YAAY,IAAI,WAAY;AACvF,WAAO,EAAE,OAAO,KAAK,KAAK,MAAM;AAAA,EACpC;AACA,SAAO;AACX;AAPS;AAQT,SAAS,kBAAkB,UAAU;AACjC,QAAM,QAAQ,mBAAmB,SAAS,KAAK;AAC/C,MAAI,UAAU,SAAS,OAAO;AAC1B,WAAO,EAAE,SAAS,SAAS,SAAS,MAAM;AAAA,EAC9C;AACA,SAAO;AACX;AANS;;;;;;;ACvOT,aAASC,GAAWC,IAAAA;AAClB,UAAoB,YAAA,OAATA,GACT,OAAM,IAAIC,UAAU,qCAAqCC,KAAKC,UAAUH,EAAAA,CAAAA;IAE5E;AAJSD,WAAAA,IAAAA;AAOT,aAASK,GAAqBJ,IAAMK,IAAAA;AAMlC,eADIC,IAJAC,KAAM,IACNC,IAAoB,GACpBC,IAAAA,IACAC,IAAO,GAEFC,IAAI,GAAGA,KAAKX,GAAKY,QAAAA,EAAUD,GAAG;AACrC,YAAIA,IAAIX,GAAKY,OACXN,CAAAA,KAAON,GAAKa,WAAWF,CAAAA;aACpB;AAAA,cAAa,OAATL,GACP;AAEAA,UAAAA,KAAO;QAAQ;AACjB,YAAa,OAATA,IAAmB;AACrB,cAAIG,MAAcE,IAAI,KAAc,MAATD,EAAAA;mBAEhBD,MAAcE,IAAI,KAAc,MAATD,GAAY;AAC5C,gBAAIH,GAAIK,SAAS,KAA2B,MAAtBJ,KAA8D,OAAnCD,GAAIM,WAAWN,GAAIK,SAAS,CAAA,KAAsD,OAAnCL,GAAIM,WAAWN,GAAIK,SAAS,CAAA;AAC1H,kBAAIL,GAAIK,SAAS,GAAG;AAClB,oBAAIE,IAAiBP,GAAIQ,YAAY,GAAA;AACrC,oBAAID,MAAmBP,GAAIK,SAAS,GAAG;AAAA,yBACjCE,KACFP,KAAM,IACNC,IAAoB,KAGpBA,KADAD,KAAMA,GAAIS,MAAM,GAAGF,CAAAA,GACKF,SAAS,IAAIL,GAAIQ,YAAY,GAAA,GAEvDN,IAAYE,GACZD,IAAO;AACP;gBACF;cACF,WAA0B,MAAfH,GAAIK,UAA+B,MAAfL,GAAIK,QAAc;AAC/CL,gBAAAA,KAAM,IACNC,IAAoB,GACpBC,IAAYE,GACZD,IAAO;AACP;cACF;;AAEEL,YAAAA,OACEE,GAAIK,SAAS,IACfL,MAAO,QAEPA,KAAM,MACRC,IAAoB;UAExB,MACMD,CAAAA,GAAIK,SAAS,IACfL,MAAO,MAAMP,GAAKgB,MAAMP,IAAY,GAAGE,CAAAA,IAEvCJ,KAAMP,GAAKgB,MAAMP,IAAY,GAAGE,CAAAA,GAClCH,IAAoBG,IAAIF,IAAY;AAEtCA,cAAYE,GACZD,IAAO;QACT,MAAoB,QAATJ,MAAAA,OAAqBI,IAAAA,EAC5BA,IAEFA,IAAAA;MAEJ;AACA,aAAOH;IACT;AA/DSH,WAAAA,IAAAA;AA6ET,QAAIa,KAAQ,EAEVC,SAAS,kCAAA;AAKP,eAFIC,IAFAC,KAAe,IACfC,IAAAA,OAGKV,IAAIW,UAAUV,SAAS,GAAGD,KAAAA,MAAM,CAAMU,GAAkBV,KAAK;AACpE,YAAIX;AACAW,aAAK,IACPX,IAAOsB,UAAUX,CAAAA,KAAAA,WAEbQ,OACFA,KAAMI,QAAQJ,IAAAA,IAChBnB,IAAOmB,KAGTpB,GAAWC,CAAAA,GAGS,MAAhBA,EAAKY,WAITQ,KAAepB,IAAO,MAAMoB,IAC5BC,IAA0C,OAAvBrB,EAAKa,WAAW,CAAA;MACrC;AAQA,aAFAO,KAAehB,GAAqBgB,IAAAA,CAAeC,CAAAA,GAE/CA,IACED,GAAaR,SAAS,IACjB,MAAMQ,KAEN,MACAA,GAAaR,SAAS,IACxBQ,KAEA;IAEX,GA1CS,YA4CTI,WAAW,gCAAmBxB,IAAAA;AAG5B,UAFAD,GAAWC,EAAAA,GAES,MAAhBA,GAAKY,OAAc,QAAO;AAE9B,UAAIa,KAAoC,OAAvBzB,GAAKa,WAAW,CAAA,GAC7Ba,IAAyD,OAArC1B,GAAKa,WAAWb,GAAKY,SAAS,CAAA;AAQtD,aAHoB,OAFpBZ,KAAOI,GAAqBJ,IAAAA,CAAOyB,EAAAA,GAE1Bb,UAAiBa,OAAYzB,KAAO,MACzCA,GAAKY,SAAS,KAAKc,MAAmB1B,MAAQ,MAE9CyB,KAAmB,MAAMzB,KACtBA;IACT,GAhBW,cAkBXyB,YAAY,gCAAoBzB,IAAAA;AAE9B,aADAD,GAAWC,EAAAA,GACJA,GAAKY,SAAS,KAA4B,OAAvBZ,GAAKa,WAAW,CAAA;IAC5C,GAHY,eAKZc,MAAM,kCAAA;AACJ,UAAyB,MAArBL,UAAUV,OACZ,QAAO;AAET,eADIgB,IACKjB,KAAI,GAAGA,KAAIW,UAAUV,QAAAA,EAAUD,IAAG;AACzC,YAAIkB,IAAMP,UAAUX,EAAAA;AACpBZ,QAAAA,GAAW8B,CAAAA,GACPA,EAAIjB,SAAS,MAAA,WACXgB,KACFA,KAASC,IAETD,MAAU,MAAMC;MAEtB;AACA,aAAA,WAAID,KACK,MACFX,GAAMO,UAAUI,EAAAA;IACzB,GAjBM,SAmBNE,UAAU,gCAAkBC,IAAMC,IAAAA;AAIhC,UAHAjC,GAAWgC,EAAAA,GACXhC,GAAWiC,EAAAA,GAEPD,OAASC,GAAI,QAAO;AAKxB,WAHAD,KAAOd,GAAMC,QAAQa,EAAAA,QACrBC,KAAKf,GAAMC,QAAQc,EAAAA,GAEF,QAAO;AAIxB,eADIC,IAAY,GACTA,IAAYF,GAAKnB,UACa,OAA/BmB,GAAKlB,WAAWoB,CAAAA,GAAAA,EADYA,EAAAA;AASlC,eALIC,IAAUH,GAAKnB,QACfuB,IAAUD,IAAUD,GAGpBG,IAAU,GACPA,IAAUJ,GAAGpB,UACa,OAA3BoB,GAAGnB,WAAWuB,CAAAA,GAAAA,EADUA,EAAAA;AAW9B,eANIC,IADQL,GAAGpB,SACKwB,GAGhBxB,IAASuB,IAAUE,IAAQF,IAAUE,GACrCC,IAAAA,IACA3B,IAAI,GACDA,KAAKC,GAAAA,EAAUD,GAAG;AACvB,YAAIA,MAAMC,GAAQ;AAChB,cAAIyB,IAAQzB,GAAQ;AAClB,gBAAmC,OAA/BoB,GAAGnB,WAAWuB,IAAUzB,CAAAA,EAG1B,QAAOqB,GAAGhB,MAAMoB,IAAUzB,IAAI,CAAA;AACzB,gBAAU,MAANA,EAGT,QAAOqB,GAAGhB,MAAMoB,IAAUzB,CAAAA;UAE9B,MAAWwB,KAAUvB,MACoB,OAAnCmB,GAAKlB,WAAWoB,IAAYtB,CAAAA,IAG9B2B,IAAgB3B,IACD,MAANA,MAGT2B,IAAgB;AAGpB;QACF;AACA,YAAIC,IAAWR,GAAKlB,WAAWoB,IAAYtB,CAAAA;AAE3C,YAAI4B,MADSP,GAAGnB,WAAWuB,IAAUzB,CAAAA,EAEnC;AACoB,eAAb4B,MACPD,IAAgB3B;MACpB;AAEA,UAAI6B,IAAM;AAGV,WAAK7B,IAAIsB,IAAYK,IAAgB,GAAG3B,KAAKuB,GAAAA,EAAWvB,EAClDA,OAAMuB,KAAkC,OAAvBH,GAAKlB,WAAWF,CAAAA,MAChB,MAAf6B,EAAI5B,SACN4B,KAAO,OAEPA,KAAO;AAMb,aAAIA,EAAI5B,SAAS,IACR4B,IAAMR,GAAGhB,MAAMoB,IAAUE,CAAAA,KAEhCF,KAAWE,GACoB,OAA3BN,GAAGnB,WAAWuB,CAAAA,KAAAA,EACdA,GACGJ,GAAGhB,MAAMoB,CAAAA;IAEpB,GAxFU,aA0FVK,WAAW,gCAAmBzC,IAAAA;AAC5B,aAAOA;IACT,GAFW,cAIX0C,SAAS,gCAAiB1C,IAAAA;AAExB,UADAD,GAAWC,EAAAA,GACS,MAAhBA,GAAKY,OAAc,QAAO;AAK9B,eAJIN,KAAON,GAAKa,WAAW,CAAA,GACvB8B,KAAmB,OAATrC,IACVsC,IAAAA,IACAC,IAAAA,MACKlC,IAAIX,GAAKY,SAAS,GAAGD,KAAK,GAAA,EAAKA,EAEtC,KAAa,QADbL,KAAON,GAAKa,WAAWF,CAAAA,IAAAA;AAEnB,YAAA,CAAKkC,GAAc;AACjBD,cAAMjC;AACN;QACF;MAAA,MAGFkC,KAAAA;AAIJ,aAAA,OAAID,IAAmBD,KAAU,MAAM,MACnCA,MAAmB,MAARC,IAAkB,OAC1B5C,GAAKgB,MAAM,GAAG4B,CAAAA;IACvB,GAvBS,YAyBTE,UAAU,gCAAkB9C,IAAM+C,IAAAA;AAChC,UAAA,WAAIA,MAAoC,YAAA,OAARA,GAAkB,OAAM,IAAI9C,UAAU,iCAAA;AACtEF,MAAAA,GAAWC,EAAAA;AAEX,UAGIW,IAHAqC,IAAQ,GACRJ,IAAAA,IACAC,IAAAA;AAGJ,UAAA,WAAIE,MAAqBA,GAAInC,SAAS,KAAKmC,GAAInC,UAAUZ,GAAKY,QAAQ;AACpE,YAAImC,GAAInC,WAAWZ,GAAKY,UAAUmC,OAAQ/C,GAAM,QAAO;AACvD,YAAIiD,IAASF,GAAInC,SAAS,GACtBsC,IAAAA;AACJ,aAAKvC,KAAIX,GAAKY,SAAS,GAAGD,MAAK,GAAA,EAAKA,IAAG;AACrC,cAAIL,IAAON,GAAKa,WAAWF,EAAAA;AAC3B,cAAa,OAATL,GAAAA;AAGA,gBAAA,CAAKuC,GAAc;AACjBG,kBAAQrC,KAAI;AACZ;YACF;UAAA,MAAA,QAEEuC,MAGFL,IAAAA,OACAK,IAAmBvC,KAAI,IAErBsC,KAAU,MAER3C,MAASyC,GAAIlC,WAAWoC,CAAAA,IAAAA,MACR,EAAZA,MAGJL,IAAMjC,OAKRsC,IAAAA,IACAL,IAAMM;QAId;AAGA,eADIF,MAAUJ,IAAKA,IAAMM,IAAAA,OAA0BN,MAAYA,IAAM5C,GAAKY,SACnEZ,GAAKgB,MAAMgC,GAAOJ,CAAAA;MAC3B;AACE,WAAKjC,KAAIX,GAAKY,SAAS,GAAGD,MAAK,GAAA,EAAKA,GAClC,KAA2B,OAAvBX,GAAKa,WAAWF,EAAAA,GAAAA;AAGhB,YAAA,CAAKkC,GAAc;AACjBG,cAAQrC,KAAI;AACZ;QACF;MAAA,MAAA,QACSiC,MAGXC,IAAAA,OACAD,IAAMjC,KAAI;AAId,aAAA,OAAIiC,IAAmB,KAChB5C,GAAKgB,MAAMgC,GAAOJ,CAAAA;IAE7B,GArEU,aAuEVO,SAAS,gCAAiBnD,IAAAA;AACxBD,MAAAA,GAAWC,EAAAA;AAQX,eAPIoD,KAAAA,IACAC,KAAY,GACZT,IAAAA,IACAC,IAAAA,MAGAS,IAAc,GACT3C,IAAIX,GAAKY,SAAS,GAAGD,KAAK,GAAA,EAAKA,GAAG;AACzC,YAAIL,IAAON,GAAKa,WAAWF,CAAAA;AAC3B,YAAa,OAATL,EAAAA,QASAsC,MAGFC,IAAAA,OACAD,IAAMjC,IAAI,IAEC,OAATL,IAAAA,OAEI8C,KACFA,KAAWzC,IACY,MAAhB2C,MACPA,IAAc,KAAA,OACTF,OAGTE,IAAAA;iBArBE,CAAKT,GAAc;AACjBQ,UAAAA,KAAY1C,IAAI;AAChB;QACF;MAoBN;AAEA,aAAA,OAAIyC,MAAAA,OAAmBR,KAEH,MAAhBU,KAEgB,MAAhBA,KAAqBF,OAAaR,IAAM,KAAKQ,OAAaC,KAAY,IACjE,KAEFrD,GAAKgB,MAAMoC,IAAUR,CAAAA;IAC9B,GA/CS,YAiDTW,QAAQ,gCAAgBC,IAAAA;AACtB,UAAmB,SAAfA,MAA6C,YAAA,OAAfA,GAChC,OAAM,IAAIvD,UAAU,qEAAA,OAA4EuD,EAAAA;AAElG,aAvVJ,SAAiBC,IAAKD,IAAAA;AACpB,YAAIE,KAAMF,GAAWE,OAAOF,GAAWG,MACnCC,KAAOJ,GAAWI,SAASJ,GAAWK,QAAQ,OAAOL,GAAWT,OAAO;AAC3E,eAAKW,KAGDA,OAAQF,GAAWG,OACdD,KAAME,KAERF,KA8UU,MA9UEE,KALVA;MAMX,EA6UmB,GAAKJ,EAAAA;IACtB,GALQ,WAORM,OAAO,gCAAe9D,IAAAA;AACpBD,MAAAA,GAAWC,EAAAA;AAEX,UAAI+D,KAAM,EAAEJ,MAAM,IAAID,KAAK,IAAIE,MAAM,IAAIb,KAAK,IAAIc,MAAM,GAAA;AACxD,UAAoB,MAAhB7D,GAAKY,OAAc,QAAOmD;AAC9B,UAEIf,IAFA1C,IAAON,GAAKa,WAAW,CAAA,GACvBY,IAAsB,OAATnB;AAEbmB,WACFsC,GAAIJ,OAAO,KACXX,KAAQ,KAERA,KAAQ;AAaV,eAXII,IAAAA,IACAC,IAAY,GACZT,IAAAA,IACAC,IAAAA,MACAlC,IAAIX,GAAKY,SAAS,GAIlB0C,IAAc,GAGX3C,KAAKqC,IAAAA,EAASrC,EAEnB,KAAa,QADbL,IAAON,GAAKa,WAAWF,CAAAA,GAAAA,QAUnBiC,MAGFC,IAAAA,OACAD,IAAMjC,IAAI,IAEC,OAATL,IAAAA,OAEI8C,IAAiBA,IAAWzC,IAA2B,MAAhB2C,MAAmBA,IAAc,KAAA,OACnEF,MAGXE,IAAAA;eAlBE,CAAKT,GAAc;AACjBQ,YAAY1C,IAAI;AAChB;MACF;AAwCN,aAAA,OArBIyC,KAAAA,OAAmBR,KAEP,MAAhBU,KAEgB,MAAhBA,KAAqBF,MAAaR,IAAM,KAAKQ,MAAaC,IAAY,IAAA,OAChET,MACiCmB,GAAIH,OAAOG,GAAIF,OAAhC,MAAdR,KAAmB5B,IAAkCzB,GAAKgB,MAAM,GAAG4B,CAAAA,IAAgC5C,GAAKgB,MAAMqC,GAAWT,CAAAA,MAG7G,MAAdS,KAAmB5B,KACrBsC,GAAIF,OAAO7D,GAAKgB,MAAM,GAAGoC,CAAAA,GACzBW,GAAIH,OAAO5D,GAAKgB,MAAM,GAAG4B,CAAAA,MAEzBmB,GAAIF,OAAO7D,GAAKgB,MAAMqC,GAAWD,CAAAA,GACjCW,GAAIH,OAAO5D,GAAKgB,MAAMqC,GAAWT,CAAAA,IAEnCmB,GAAIhB,MAAM/C,GAAKgB,MAAMoC,GAAUR,CAAAA,IAG7BS,IAAY,IAAGU,GAAIL,MAAM1D,GAAKgB,MAAM,GAAGqC,IAAY,CAAA,IAAY5B,MAAYsC,GAAIL,MAAM,MAElFK;IACT,GA1EO,UA4EPN,KAAK,KACLO,WAAW,KACXC,OAAO,MACPhD,OAAO,KAAA;AAGTA,IAAAA,GAAMA,QAAQA,IAEdiD,GAAOC,UAAUlD;EAAAA,EAAAA,GC/gBbmD,IAA2B,CAAC;AAGhC,WAASC,EAAoBC,IAAAA;AAE5B,QAAIC,IAAeH,EAAyBE,EAAAA;AAC5C,QAAA,WAAIC,EACH,QAAOA,EAAaJ;AAGrB,QAAID,IAASE,EAAyBE,EAAAA,IAAY,EAGjDH,SAAS,CAAC,EAAA;AAOX,WAHAK,EAAoBF,EAAAA,EAAUJ,GAAQA,EAAOC,SAASE,CAAAA,GAG/CH,EAAOC;EACf;AAlBSE;ACHTA,IAAoBI,IAAI,CAACN,IAASO,OAAAA;AACjC,aAAQC,MAAOD,GACXL,GAAoBO,EAAEF,IAAYC,EAAAA,KAAAA,CAASN,EAAoBO,EAAET,IAASQ,EAAAA,KAC5EE,OAAOC,eAAeX,IAASQ,IAAK,EAAEI,YAAAA,MAAkBC,KAAKN,GAAWC,EAAAA,EAAAA,CAAAA;EAE1E,GCNDN,EAAoBO,IAAI,CAACK,IAAKC,OAAUL,OAAOM,UAAUC,eAAeC,KAAKJ,IAAKC,EAAAA,GCClFb,EAAoBiB,IAAKnB,CAAAA,OAAAA;AACH,mBAAA,OAAXoB,UAA0BA,OAAOC,eAC1CX,OAAOC,eAAeX,IAASoB,OAAOC,aAAa,EAAEC,OAAO,SAAA,CAAA,GAE7DZ,OAAOC,eAAeX,IAAS,cAAc,EAAEsB,OAAAA,KAAO,CAAA;EAAO;AAAA,MAAA,IAAA,CAAA;AAAA,GAAA,MAAA;ACQvD,QAAIC;AAEX,QAAA,EAAA,EAAA,CAAA,GAAA,EAAA,EAAA,GAAA,EAAA,KAAA,6BAAA,GAAA,QAAA,OAAA,6BAAA,GAAA,SAAA,CAAA,GAAuB,YAAA,OAAZnE,QACVmE,CAAAA,KAAiC,YAArBnE,QAAQoE;aACW,YAAA,OAAdC,WAAwB;AACzC,UAAIC,KAAYD,UAAUC;AAC1BH,MAAAA,KAAYG,GAAUC,QAAQ,SAAA,KAAc;IAAA;ACV7C,UAAMC,KAAiB,kBACjBC,IAAoB,OACpBC,IAAoB;AAE1B,aAASC,EAAanC,IAAUoC,IAAAA;AAG/B,UAAA,CAAKpC,GAAIqC,UAAUD,GAClB,OAAM,IAAIE,MAAM,2DAA2DtC,GAAIuC,SAAAA,aAAsBvC,GAAI/D,IAAAA,cAAkB+D,GAAIwC,KAAAA,iBAAsBxC,GAAIyC,QAAAA,IAAAA;AAK1J,UAAIzC,GAAIqC,UAAAA,CAAWL,GAAeU,KAAK1C,GAAIqC,MAAAA,EAC1C,OAAM,IAAIC,MAAM,iDAAA;AAQjB,UAAItC,GAAI/D;AACP,YAAI+D,GAAIuC,WAAAA;AACP,cAAA,CAAKN,EAAkBS,KAAK1C,GAAI/D,IAAAA,EAC/B,OAAM,IAAIqG,MAAM,0IAAA;QAAA,WAGbJ,EAAkBQ,KAAK1C,GAAI/D,IAAAA,EAC9B,OAAM,IAAIqG,MAAM,2HAAA;;IAIpB;AA7BSH;AA+DT,UAAMQ,IAAS,IACTC,IAAS,KACTC,IAAU;IAkBT,MAAMC,EAAAA;aAAAA;;;MAEZ,OAAA,MAAaC,IAAAA;AACZ,eAAIA,cAAiBD,KAAAA,CAAAA,CAGhBC,MAGoC,YAAA,OAArBA,GAAOR,aACU,YAAA,OAApBQ,GAAON,YACS,YAAA,OAAhBM,GAAO9G,QACU,YAAA,OAAjB8G,GAAOP,SACW,YAAA,OAAlBO,GAAOV,UACW,YAAA,OAAlBU,GAAOC,UACS,cAAA,OAAhBD,GAAOE,QACa,cAAA,OAApBF,GAAOG;MACzB;MAMSb;MAMAE;MAKAtG;MAKAuG;MAKAC;MAeT,YAAsBU,IAAsCZ,IAAoBtG,IAAeuG,IAAgBC,IAAmBL,KAAAA,OAAmB;AAExH,oBAAA,OAAjBe,MACVC,KAAKf,SAASc,GAAad,UAAUM,GACrCS,KAAKb,YAAYY,GAAaZ,aAAaI,GAC3CS,KAAKnH,OAAOkH,GAAalH,QAAQ0G,GACjCS,KAAKZ,QAAQW,GAAaX,SAASG,GACnCS,KAAKX,WAAWU,GAAaV,YAAYE,MAKzCS,KAAKf,SAvHR,yBAAoBA,IAAgBD,IAAAA;AACnC,iBAAKC,MAAWD,KAGTC,KAFC;QAGT,EAkH4Bc,IAAcf,EAAAA,GACvCgB,KAAKb,YAAYA,MAAaI,GAC9BS,KAAKnH,OAjHR,SAA8BoG,IAAgBpG,IAAAA;AAM7C,kBAAQoG,IAAAA;YACP,KAAK;YACL,KAAK;YACL,KAAK;AACCpG,cAAAA,KAEMA,GAAK,CAAA,MAAO2G,MACtB3G,KAAO2G,IAAS3G,MAFhBA,KAAO2G;UAAAA;AAMV,iBAAO3G;QACR,EA+FoCmH,KAAKf,QAAQpG,MAAQ0G,CAAAA,GACtDS,KAAKZ,QAAQA,MAASG,GACtBS,KAAKX,WAAWA,MAAYE,GAE5BR,EAAaiB,MAAMhB,EAAAA;MAErB;MA4BA,IAAA,SAAIY;AAIH,eAAOK,EAAYD,MAAAA,KAAM;MAC1B;MAIA,KAAKE,IAAAA;AAEJ,YAAA,CAAKA,GACJ,QAAOF;AAGR,YAAA,EAAI,QAAEf,IAAM,WAAEE,IAAS,MAAEtG,IAAI,OAAEuG,IAAK,UAAEC,GAAAA,IAAaa;AA2BnD,eAAA,WA1BIjB,KACHA,KAASe,KAAKf,SACO,SAAXA,OACVA,KAASM,IAAAA,WAENJ,KACHA,KAAYa,KAAKb,YACO,SAAdA,OACVA,KAAYI,IAAAA,WAET1G,KACHA,KAAOmH,KAAKnH,OACO,SAATA,OACVA,KAAO0G,IAAAA,WAEJH,KACHA,KAAQY,KAAKZ,QACO,SAAVA,OACVA,KAAQG,IAAAA,WAELF,KACHA,KAAWW,KAAKX,WACO,SAAbA,OACVA,KAAWE,IAGRN,OAAWe,KAAKf,UAChBE,OAAca,KAAKb,aACnBtG,OAASmH,KAAKnH,QACduG,OAAUY,KAAKZ,SACfC,OAAaW,KAAKX,WAEdW,OAGD,IAAIG,EAAIlB,IAAQE,IAAWtG,IAAMuG,IAAOC,EAAAA;MAChD;MAUA,OAAA,MAAaf,IAAeU,KAAAA,OAAmB;AAC9C,cAAMoB,KAAQX,EAAQY,KAAK/B,EAAAA;AAC3B,eAAK8B,KAGE,IAAID,EACVC,GAAM,CAAA,KAAMb,GACZe,EAAcF,GAAM,CAAA,KAAMb,CAAAA,GAC1Be,EAAcF,GAAM,CAAA,KAAMb,CAAAA,GAC1Be,EAAcF,GAAM,CAAA,KAAMb,CAAAA,GAC1Be,EAAcF,GAAM,CAAA,KAAMb,CAAAA,GAC1BP,EAAAA,IARO,IAAImB,EAAIZ,GAAQA,GAAQA,GAAQA,GAAQA,CAAAA;MAUjD;MAuBA,OAAA,KAAY1G,IAAAA;AAEX,YAAIsG,KAAYI;AAWhB,YANIhB,OACH1F,KAAOA,GAAK0H,QAAQ,OAAOf,CAAAA,IAKxB3G,GAAK,CAAA,MAAO2G,KAAU3G,GAAK,CAAA,MAAO2G,GAAQ;AAC7C,gBAAMgB,KAAM3H,GAAK8F,QAAQa,GAAQ,CAAA;AAAA,iBAC7BgB,MACHrB,KAAYtG,GAAK4H,UAAU,CAAA,GAC3B5H,KAAO2G,MAEPL,KAAYtG,GAAK4H,UAAU,GAAGD,EAAAA,GAC9B3H,KAAOA,GAAK4H,UAAUD,EAAAA,KAAQhB;QAAAA;AAIhC,eAAO,IAAIW,EAAI,QAAQhB,IAAWtG,IAAM0G,GAAQA,CAAAA;MACjD;MAEA,OAAA,KAAYmB,IAAAA;AACX,cAAMC,KAAS,IAAIR,EAClBO,GAAWzB,QACXyB,GAAWvB,WACXuB,GAAW7H,MACX6H,GAAWtB,OACXsB,GAAWrB,QAAAA;AAGZ,eADAN,EAAa4B,IAAAA,IAAQ,GACdA;MACR;MAeA,SAASC,KAAAA,OAAwB;AAChC,eAAOC,EAAab,MAAMY,EAAAA;MAC3B;MAEA,SAAAE;AACC,eAAOd;MACR;MAMA,OAAA,OAAce,IAAAA;AACb,YAAKA,IAEE;AAAA,cAAIA,cAAgBrB,EAC1B,QAAOqB;AACD;AACN,kBAAMJ,KAAS,IAAIR,EAAIY,EAAAA;AAGvB,mBAFAJ,GAAOK,aAAwBD,GAAME,UACrCN,GAAOO,UAAqBH,GAAMI,SAASC,IAA4BL,GAAMnB,SAAS,MAC/Ee;UAAAA;QAAAA;AAPP,eAAYI;MASd;IAAA;AAkBD,UAAMK,IAAiB7C,KAAY,IAAA;IAGnC,MAAM4B,UAAYT,EAAAA;aAAAA;;;MAEjBsB,aAA4B;MAC5BE,UAAyB;MAEzB,IAAA,SAAatB;AAIZ,eAHKI,KAAKkB,YACTlB,KAAKkB,UAAUjB,EAAYD,MAAAA,KAAM,IAE3BA,KAAKkB;MACb;MAES,SAASN,KAAAA,OAAwB;AACzC,eAAKA,KAOGC,EAAab,MAAAA,IAAM,KANrBA,KAAKgB,eACThB,KAAKgB,aAAaH,EAAab,MAAAA,KAAM,IAE/BA,KAAKgB;MAKd;MAES,SAAAF;AACR,cAAM1H,KAAgB,EACrBiI,MAAM,EAAA;AA0BP,eAvBIrB,KAAKkB,YACR9H,GAAIwG,SAASI,KAAKkB,SAClB9H,GAAI+H,OAAOC,IAERpB,KAAKgB,eACR5H,GAAI6H,WAAWjB,KAAKgB,aAGjBhB,KAAKnH,SACRO,GAAIP,OAAOmH,KAAKnH,OAEbmH,KAAKf,WACR7F,GAAI6F,SAASe,KAAKf,SAEfe,KAAKb,cACR/F,GAAI+F,YAAYa,KAAKb,YAElBa,KAAKZ,UACRhG,GAAIgG,QAAQY,KAAKZ,QAEdY,KAAKX,aACRjG,GAAIiG,WAAWW,KAAKX,WAEdjG;MACR;IAAA;AAID,UAAMkI,IAAwC,EAC7C,IAAkB,OAClB,IAAkB,OAClB,IAAyB,OACzB,IAAiB,OACjB,IAA8B,OAC9B,IAA+B,OAC/B,IAAmB,OAEnB,IAA4B,OAC5B,IAAuB,OACvB,IAAsB,OACtB,IAAwB,OACxB,IAAsB,OACtB,IAAuB,OACvB,IAAqB,OACrB,IAAiB,OACjB,IAAkB,OAClB,IAAsB,OACtB,IAAmB,OAEnB,IAAkB,MAAA;AAGnB,aAASC,EAAuBC,IAAsBC,IAAiBC,IAAAA;AACtE,UAAItI,IACAuI,KAAAA;AAEJ,eAASC,KAAM,GAAGA,KAAMJ,GAAa/H,QAAQmI,MAAO;AACnD,cAAMzI,KAAOqI,GAAa9H,WAAWkI,EAAAA;AAGrC,YACEzI,MAAQ,MAAcA,MAAQ,OAC3BA,MAAQ,MAAcA,MAAQ,MAC9BA,MAAQ,MAAmBA,MAAQ,MAC3B,OAATA,MACS,OAATA,MACS,OAATA,MACS,QAATA,MACCsI,MAAmB,OAATtI,MACVuI,MAAwB,OAATvI,MACfuI,MAAwB,OAATvI,MACfuI,MAAwB,OAATvI,GAAAA,QAGfwI,OACHvI,MAAOyI,mBAAmBL,GAAaf,UAAUkB,IAAiBC,EAAAA,CAAAA,GAClED,KAAAA,KAAmB,WAGhBvI,OACHA,MAAOoI,GAAaM,OAAOF,EAAAA;aAGtB;AAAA,qBAEFxI,OACHA,KAAMoI,GAAaO,OAAO,GAAGH,EAAAA;AAI9B,gBAAMI,KAAUV,EAAYnI,EAAAA;AAAAA,qBACxB6I,MAAAA,OAGCL,OACHvI,MAAOyI,mBAAmBL,GAAaf,UAAUkB,IAAiBC,EAAAA,CAAAA,GAClED,KAAAA,KAIDvI,MAAO4I,MAAAA,OAEGL,OAEVA,KAAkBC;QAAAA;MAAAA;AASrB,aAAA,OAJID,OACHvI,MAAOyI,mBAAmBL,GAAaf,UAAUkB,EAAAA,CAAAA,IAAAA,WAG3CvI,KAAoBA,KAAMoI;IAClC;AA9DSD;AAgET,aAASU,EAA0BpJ,IAAAA;AAClC,UAAIO;AACJ,eAASwI,KAAM,GAAGA,KAAM/I,GAAKY,QAAQmI,MAAO;AAC3C,cAAMzI,KAAON,GAAKa,WAAWkI,EAAAA;AAChB,eAATzI,MAAmC,OAATA,MAAAA,WACzBC,OACHA,KAAMP,GAAKkJ,OAAO,GAAGH,EAAAA,IAEtBxI,MAAOkI,EAAYnI,EAAAA,KAAAA,WAEfC,OACHA,MAAOP,GAAK+I,EAAAA;MAAAA;AAIf,aAAA,WAAOxI,KAAoBA,KAAMP;IAClC;AAhBSoJ;AAqBF,aAAShC,EAAYiC,IAAUC,IAAAA;AAErC,UAAI7D;AAsBJ,aAnBCA,KAFG4D,GAAI/C,aAAa+C,GAAIrJ,KAAKY,SAAS,KAAoB,WAAfyI,GAAIjD,SAEvC,KAAKiD,GAAI/C,SAAAA,GAAY+C,GAAIrJ,IAAAA,KAEN,OAA3BqJ,GAAIrJ,KAAKa,WAAW,CAAA,MAChBwI,GAAIrJ,KAAKa,WAAW,CAAA,KAAM,MAAcwI,GAAIrJ,KAAKa,WAAW,CAAA,KAAM,MAAcwI,GAAIrJ,KAAKa,WAAW,CAAA,KAAM,MAAcwI,GAAIrJ,KAAKa,WAAW,CAAA,KAAM,QACxH,OAA3BwI,GAAIrJ,KAAKa,WAAW,CAAA,IAElByI,KAIID,GAAIrJ,KAAKkJ,OAAO,CAAA,IAFhBG,GAAIrJ,KAAK,CAAA,EAAGuJ,YAAAA,IAAgBF,GAAIrJ,KAAKkJ,OAAO,CAAA,IAM7CG,GAAIrJ,MAET0F,OACHD,KAAQA,GAAMiC,QAAQ,OAAO,IAAA,IAEvBjC;IACR;AAzBgB2B;AA8BhB,aAASY,EAAaqB,IAAUtB,IAAAA;AAE/B,YAAMyB,KAAWzB,KAEdqB,IADAV;AAGH,UAAInI,KAAM,IAAA,EACN,QAAE6F,IAAM,WAAEE,IAAS,MAAEtG,IAAI,OAAEuG,IAAK,UAAEC,GAAAA,IAAa6C;AASnD,UARIjD,OACH7F,MAAO6F,IACP7F,MAAO,OAEJ+F,MAAwB,WAAXF,QAChB7F,MAAOoG,GACPpG,MAAOoG,IAEJL,IAAW;AACd,YAAIqB,KAAMrB,GAAUR,QAAQ,GAAA;AAC5B,YAAA,OAAI6B,IAAY;AAEf,gBAAM8B,KAAWnD,GAAU4C,OAAO,GAAGvB,EAAAA;AACrCrB,UAAAA,KAAYA,GAAU4C,OAAOvB,KAAM,CAAA,GACnCA,KAAM8B,GAAS1I,YAAY,GAAA,GAAA,OACvB4G,KACHpH,MAAOiJ,GAAQC,IAAAA,OAAU,KAAO,KAGhClJ,MAAOiJ,GAAQC,GAASP,OAAO,GAAGvB,EAAAA,GAAAA,OAAM,KAAO,GAC/CpH,MAAO,KACPA,MAAOiJ,GAAQC,GAASP,OAAOvB,KAAM,CAAA,GAAA,OAAI,IAAO,IAEjDpH,MAAO;QAAA;AAER+F,QAAAA,KAAYA,GAAUiD,YAAAA,GACtB5B,KAAMrB,GAAUvF,YAAY,GAAA,GAAA,OACxB4G,KACHpH,MAAOiJ,GAAQlD,IAAAA,OAAW,IAAO,KAGjC/F,MAAOiJ,GAAQlD,GAAU4C,OAAO,GAAGvB,EAAAA,GAAAA,OAAM,IAAO,GAChDpH,MAAO+F,GAAU4C,OAAOvB,EAAAA;MAAAA;AAG1B,UAAI3H,IAAM;AAET,YAAIA,GAAKY,UAAU,KAA4B,OAAvBZ,GAAKa,WAAW,CAAA,KAAgD,OAAvBb,GAAKa,WAAW,CAAA,GAAuB;AACvG,gBAAMP,KAAON,GAAKa,WAAW,CAAA;AACzBP,UAAAA,MAAQ,MAAcA,MAAQ,OACjCN,KAAO,IAAI0J,OAAOC,aAAarJ,KAAO,EAAA,CAAA,IAAON,GAAKkJ,OAAO,CAAA,CAAA;QAAA,WAEhDlJ,GAAKY,UAAU,KAA4B,OAAvBZ,GAAKa,WAAW,CAAA,GAAuB;AACrE,gBAAMP,KAAON,GAAKa,WAAW,CAAA;AACzBP,UAAAA,MAAQ,MAAcA,MAAQ,OACjCN,KAAO,GAAG0J,OAAOC,aAAarJ,KAAO,EAAA,CAAA,IAAON,GAAKkJ,OAAO,CAAA,CAAA;QAAA;AAI1D3I,QAAAA,MAAOiJ,GAAQxJ,IAAAA,MAAM,KAAM;MAAA;AAU5B,aARIuG,OACHhG,MAAO,KACPA,MAAOiJ,GAAQjD,IAAAA,OAAO,KAAO,IAE1BC,OACHjG,MAAO,KACPA,MAAQwH,KAAgEvB,KAAjDkC,EAAuBlC,IAAAA,OAAU,KAAO,IAEzDjG;IACR;AApESyH;AAwET,aAAS4B,EAA2BC,IAAAA;AACnC,UAAA;AACC,eAAOC,mBAAmBD,EAAAA;MAAAA,QACzB;AACD,eAAIA,GAAIjJ,SAAS,IACTiJ,GAAIX,OAAO,GAAG,CAAA,IAAKU,EAA2BC,GAAIX,OAAO,CAAA,CAAA,IAEzDW;MAAAA;IAGV;AAVSD;AAYT,UAAMG,IAAiB;AAEvB,aAAStC,EAAcoC,IAAAA;AACtB,aAAKA,GAAItC,MAAMwC,CAAAA,IAGRF,GAAInC,QAAQqC,GAAiBxC,CAAAA,OAAUqC,EAA2BrC,EAAAA,CAAAA,IAFjEsC;IAGT;AALSpC;AAKT,QAAA,IAAA,EAAA,GAAA;ACjqBA,UAAMuC,IAAY,EAAA,SAAkB,GAC9BC,IAAQ;AAEP,QAAUC;AAAAA,KAAjB,SAAiBA,IAAAA;AAeG,MAAAC,GAAAC,WAAhB,SAAyBf,OAAagB,IAAAA;AAClC,eAAOhB,GAAIrC,KAAK,EAAEhH,MAAMgK,EAAUrI,KAAK0H,GAAIrJ,MAAAA,GAASqK,EAAAA,EAAAA,CAAAA;MACxD,GAgBgBF,GAAAG,cAAhB,SAA4BjB,OAAagB,IAAAA;AACrC,YAAIrK,KAAOqJ,GAAIrJ,MACXuK,KAAAA;AACAvK,QAAAA,GAAK,CAAA,MAAOiK,MACZjK,KAAOiK,IAAQjK,IACfuK,KAAAA;AAEJ,YAAInJ,KAAe4I,EAAU9I,QAAQlB,IAAAA,GAASqK,EAAAA;AAI9C,eAHIE,MAAcnJ,GAAa,CAAA,MAAO6I,KAAAA,CAAUZ,GAAI/C,cAChDlF,KAAeA,GAAawG,UAAU,CAAA,IAEnCyB,GAAIrC,KAAK,EAAEhH,MAAMoB,GAAAA,CAAAA;MAC5B,GAUgB+I,GAAAzH,UAAhB,SAAwB2G,IAAAA;AACpB,YAAwB,MAApBA,GAAIrJ,KAAKY,UAAgByI,GAAIrJ,SAASiK,EACtC,QAAOZ;AAEX,YAAIrJ,KAAOgK,EAAUtH,QAAQ2G,GAAIrJ,IAAAA;AAIjC,eAHoB,MAAhBA,GAAKY,UAAuC,OAAvBZ,GAAKa,WAAW,CAAA,MACrCb,KAAO,KAEJqJ,GAAIrC,KAAK,EAAEhH,MAAAA,GAAAA,CAAAA;MACtB,GAUgBmK,GAAArH,WAAhB,SAAyBuG,IAAAA;AACrB,eAAOW,EAAUlH,SAASuG,GAAIrJ,IAAAA;MAClC,GAUgBmK,GAAAhH,UAAhB,SAAwBkG,IAAAA;AACpB,eAAOW,EAAU7G,QAAQkG,GAAIrJ,IAAAA;MACjC;IACH,EAzFgBkK,MAAAA,IAAK,CAAA,EAAA;EAAA,GAAA,GAAA,MAAA;AAAA,GAAA;AAAA,IAAA,EAAA,KAAArD,MAAA,MAAA,IAAA;;;ACJhB,IAAW;CAAjB,SAAiB2D,WAAQ;AAER,EAAAA,UAAA,WAAW,MAAM;AACjB,EAAAA,UAAA,UAAU,MAAM;AAChB,EAAAA,UAAA,UAAU,MAAM;AAChB,EAAAA,UAAA,WAAW,MAAM;AACjB,EAAAA,UAAA,cAAc,MAAM;AAEjC,WAAgB,OAAO,GAAkB,GAAgB;AACrD,YAAO,MAAC,QAAD,MAAC,SAAA,SAAD,EAAG,SAAQ,QAAO,MAAC,QAAD,MAAC,SAAA,SAAD,EAAG,SAAQ;EACxC;AAFgB;AAAA,EAAAA,UAAA,SAAM;AAItB,WAAgB,SAAS,MAAoB,IAAgB;AACzD,UAAM,WAAW,OAAO,SAAS,WAAW,OAAO,KAAK;AACxD,UAAM,SAAS,OAAO,OAAO,WAAW,KAAK,GAAG;AAChD,UAAM,YAAY,SAAS,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAC9D,UAAM,UAAU,OAAO,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAC1D,QAAI,IAAI;AACR,WAAO,IAAI,UAAU,QAAQ,KAAK;AAC9B,UAAI,UAAU,CAAC,MAAM,QAAQ,CAAC,GAAG;AAC7B;;;AAGR,UAAM,WAAW,MAAM,OAAO,UAAU,SAAS,CAAC;AAClD,UAAM,SAAS,QAAQ,MAAM,CAAC,EAAE,KAAK,GAAG;AACxC,WAAO,WAAW;EACtB;AAdgB;AAAA,EAAAA,UAAA,WAAQ;AAgB5B,GA5BiB,aAAA,WAAQ,CAAA,EAAA;;;AC2CzB,IAAY;CAAZ,SAAYC,gBAAa;AAKrB,EAAAA,eAAAA,eAAA,SAAA,IAAA,CAAA,IAAA;AAMA,EAAAA,eAAAA,eAAA,QAAA,IAAA,CAAA,IAAA;AAKA,EAAAA,eAAAA,eAAA,gBAAA,IAAA,CAAA,IAAA;AAQA,EAAAA,eAAAA,eAAA,gBAAA,IAAA,CAAA,IAAA;AAKA,EAAAA,eAAAA,eAAA,QAAA,IAAA,CAAA,IAAA;AAMA,EAAAA,eAAAA,eAAA,mBAAA,IAAA,CAAA,IAAA;AAKA,EAAAA,eAAAA,eAAA,WAAA,IAAA,CAAA,IAAA;AACJ,GAzCY,kBAAA,gBAAa,CAAA,EAAA;AA8GnB,IAAO,gCAAP,MAAoC;EAnK1C,OAmK0C;;;EAMtC,YAAY,UAAmC;AAC3C,SAAK,kBAAkB,SAAS;AAChC,SAAK,gBAAgB,SAAS,UAAU;AACxC,SAAK,qBAAqB,SAAS,UAAU;EACjD;EAEA,MAAM,QAAqC,KAAU,oBAAoB,uCAAkB,MAAI;AAC3F,UAAM,UAAU,MAAM,KAAK,mBAAmB,SAAS,GAAG;AAC1D,WAAO,KAAK,YAAe,KAAK,SAAS,iBAAiB;EAC9D;EAIA,iBAA8C,cAA4B,KAAW,mBAAqC;AACtH,UAAM,QAAG,QAAH,QAAG,SAAH,MAAOC,KAAI,MAAM,aAAa,GAAG;AACvC,QAAI,mBAAmB;AACnB,aAAO,KAAK,YAAe,KAAK,cAAc,iBAAiB;WAC5D;AACH,aAAO,KAAK,OAAU,KAAK,YAAY;;EAE/C;EAIA,WAAwC,MAAc,KAAU,mBAAqC;AACjG,QAAI,mBAAmB;AACnB,aAAO,KAAK,YAAe,KAAK,MAAM,iBAAiB;WACpD;AACH,aAAO,KAAK,OAAU,KAAK,IAAI;;EAEvC;EAEA,UAAuC,OAAU,KAAQ;AACrD,WAAO,KAAK,OAAU,KAAK,EAAE,QAAQ,MAAK,CAAE;EAChD;EAEU,OAAoC,KAAU,SAA8C;AAClG,QAAI,OAAO,YAAY,UAAU;AAC7B,YAAM,cAAc,KAAK,MAAS,KAAK,OAAO;AAC9C,aAAO,KAAK,sBAAyB,aAAa,KAAK,QAAW,OAAO;eAElE,YAAY,SAAS;AAC5B,YAAM,cAAc,EAAE,OAAO,QAAQ,QAAQ,cAAc,CAAA,GAAI,aAAa,CAAA,EAAE;AAC9E,aAAO,KAAK,sBAAyB,aAAa,GAAG;WAElD;AACH,YAAM,cAAc,KAAK,MAAS,KAAK,QAAQ,QAAO,CAAE;AACxD,aAAO,KAAK,sBAAsB,aAAa,KAAK,OAAO;;EAEnE;EAEU,MAAM,YAAyC,KAAU,SAAgC,aAA8B;AAC7H,QAAI,OAAO,YAAY,UAAU;AAC7B,YAAM,cAAc,MAAM,KAAK,WAAc,KAAK,SAAS,WAAW;AACtE,aAAO,KAAK,sBAAyB,aAAa,KAAK,QAAW,OAAO;WACtE;AACH,YAAM,cAAc,MAAM,KAAK,WAAc,KAAK,QAAQ,QAAO,GAAI,WAAW;AAChF,aAAO,KAAK,sBAAsB,aAAa,KAAK,OAAO;;EAEnE;;;;;;;;;;;;EAaU,sBAAmD,aAA6B,KAAU,cAA6B,MAAa;AAC1I,QAAI;AACJ,QAAI,cAAc;AACd,iBAAW;QACP;QACA;QACA,OAAO,cAAc;QACrB,YAAY,CAAA;QACZ;;WAED;AACH,YAAM,qBAAqB,KAAK,yBAAyB,KAAK,IAAI;AAClE,iBAAW;QACP;QACA;QACA,OAAO,cAAc;QACrB,YAAY,CAAA;QACZ,IAAI,eAAY;AACZ,iBAAO,mBAAkB;QAC7B;;;AAGP,gBAAY,MAA2B,YAAY;AACpD,WAAO;EACX;EAEA,MAAM,OAAoC,UAAuC,mBAAoC;;AAEjH,UAAM,WAAU,KAAA,SAAS,YAAY,MAAM,cAAQ,QAAA,OAAA,SAAA,SAAA,GAAE,KAAK;AAC1D,UAAM,gBAAe,KAAA,KAAK,mBAAa,QAAA,OAAA,SAAA,SAAA,GAAE,IAAI,SAAS,IAAI,SAAQ,CAAE;AACpE,UAAM,OAAO,eAAe,aAAa,QAAO,IAAK,MAAM,KAAK,mBAAmB,SAAS,SAAS,GAAG;AAExG,QAAI,cAAc;AACd,aAAO,eACH,UACA,gBACA;QACI,OAAO;OACV;WAEF;AACH,YAAM,qBAAqB,KAAK,yBAAyB,SAAS,KAAK,IAAI;AAC3E,aAAO,eACH,UACA,gBACA;QACI,KAAK;OACR;;AAMT,QAAI,YAAY,MAAM;AAClB,eAAS,cAAc,MAAM,KAAK,WAAW,SAAS,KAAK,MAAM,iBAAiB;AACjF,eAAS,YAAY,MAA2B,YAAY;;AAEjE,aAAS,QAAQ,cAAc;AAC/B,WAAO;EACX;EAEU,MAAyB,KAAU,MAAY;AACrD,UAAM,WAAW,KAAK,gBAAgB,YAAY,GAAG;AACrD,WAAO,SAAS,OAAO,cAAc,MAAS,IAAI;EACtD;EAEU,WAA8B,KAAU,MAAc,mBAAoC;AAChG,UAAM,WAAW,KAAK,gBAAgB,YAAY,GAAG;AACrD,WAAO,SAAS,OAAO,YAAY,MAAS,MAAM,iBAAiB;EACvE;EAEU,yBAAyB,KAAU,MAAa;AACtD,UAAM,kBAAkB,KAAK;AAC7B,QAAI,UAAoC;AACxC,WAAO,MAAK;AACR,aAAO,YAAO,QAAP,YAAO,SAAP,UAAA,UAAYC,cAAa,OAC5B,IAAI,SAAQ,GAAI,gBAAgB,YAAY,GAAG,EAAE,iBAAiB,YAAY,GAAG,SAAI,QAAJ,SAAI,SAAJ,OAAQ,EAAE;IAEnG;EACJ;;AAuEE,IAAO,0BAAP,MAA8B;EAvYpC,OAuYoC;;;EAMhC,YAAY,UAAmC;AAF5B,SAAA,cAA4C,oBAAI,IAAG;AAGlE,SAAK,yBAAyB,SAAS,UAAU;EACrD;EAEA,IAAI,MAAG;AACH,WAAO,OAAO,KAAK,YAAY,OAAM,CAAE;EAC3C;EAEA,YAAY,UAAyB;AACjC,UAAM,YAAY,SAAS,IAAI,SAAQ;AACvC,QAAI,KAAK,YAAY,IAAI,SAAS,GAAG;AACjC,YAAM,IAAI,MAAM,4BAA4B,SAAS,uBAAuB;;AAEhF,SAAK,YAAY,IAAI,WAAW,QAAQ;EAC5C;EAEA,YAAY,KAAQ;AAChB,UAAM,YAAY,IAAI,SAAQ;AAC9B,WAAO,KAAK,YAAY,IAAI,SAAS;EACzC;EAEA,MAAM,oBAAoB,KAAU,mBAAqC;AACrE,QAAI,WAAW,KAAK,YAAY,GAAG;AACnC,QAAI,UAAU;AACV,aAAO;;AAEX,eAAW,MAAM,KAAK,uBAAuB,QAAQ,KAAK,iBAAiB;AAC3E,SAAK,YAAY,QAAQ;AACzB,WAAO;EACX;EAIA,eAAe,KAAU,MAAc,mBAAqC;AACxE,QAAI,mBAAmB;AACnB,aAAO,KAAK,uBAAuB,WAAW,MAAM,KAAK,iBAAiB,EAAE,KAAK,cAAW;AACxF,aAAK,YAAY,QAAQ;AACzB,eAAO;MACX,CAAC;WACE;AACH,YAAM,WAAW,KAAK,uBAAuB,WAAW,MAAM,GAAG;AACjE,WAAK,YAAY,QAAQ;AACzB,aAAO;;EAEf;EAEA,YAAY,KAAQ;AAChB,WAAO,KAAK,YAAY,IAAI,IAAI,SAAQ,CAAE;EAC9C;EAEA,mBAAmB,KAAQ;AACvB,UAAM,YAAY,IAAI,SAAQ;AAC9B,UAAM,aAAa,KAAK,YAAY,IAAI,SAAS;AACjD,QAAI,YAAY;AACZ,iBAAW,QAAQ,cAAc;AACjC,iBAAW,oBAAoB;AAC/B,iBAAW,aAAa,CAAA;AACxB,iBAAW,cAAc;;AAE7B,WAAO;EACX;EAEA,eAAe,KAAQ;AACnB,UAAM,YAAY,IAAI,SAAQ;AAC9B,UAAM,aAAa,KAAK,YAAY,IAAI,SAAS;AACjD,QAAI,YAAY;AACZ,iBAAW,QAAQ,cAAc;AACjC,WAAK,YAAY,OAAO,SAAS;;AAErC,WAAO;EACX;;;;ACzYE,IAAO,gBAAP,MAAoB;EA1E1B,OA0E0B;;;EAMtB,YAAY,UAA6B;AACrC,SAAK,aAAa,SAAS,OAAO;AAClC,SAAK,mBAAmB,MAAM,SAAS,OAAO,UAAU;AACxD,SAAK,gBAAgB,SAAS,WAAW;AACzC,SAAK,iBAAiB,SAAS,UAAU;EAC7C;EAEA,MAAM,KAAK,UAA2B,cAAc,uCAAkB,MAAI;AACtE,eAAW,QAAQ,UAAU,SAAS,YAAY,KAAK,GAAG;AACtD,YAAM,kBAAkB,WAAW;AACnC,uBAAiB,IAAI,EAAE,QAAQ,SAAO,KAAK,OAAO,KAAK,QAAQ,CAAC;;EAExE;EAEU,OAAO,SAAwB,UAAyB;AAC9D,UAAM,MAAM,QAAQ;AAEpB,QAAI,IAAI,SAAS,QAAW;AACxB,UAAI;AACA,cAAM,cAAc,KAAK,aAAa,OAAO;AAC7C,YAAI,eAAe,WAAW,GAAG;AAC7B,cAAI,OAAO;eACR;AACH,cAAI,mBAAmB;AACvB,cAAI,KAAK,iBAAgB,EAAG,YAAY,YAAY,WAAW,GAAG;AAE9D,kBAAM,aAAa,KAAK,YAAY,WAAW;AAC/C,gBAAI,OAAO,eAAU,QAAV,eAAU,SAAV,aAAc,KAAK,mBAAmB,SAAS,WAAW;;;eAGxE,KAAK;AACV,YAAI,OAAI,OAAA,OAAA,OAAA,OAAA,CAAA,GACD,OAAO,GAAA,EACV,SAAS,mDAAmD,IAAI,QAAQ,MAAM,GAAG,GAAE,CAAA;;;AAK/F,aAAS,WAAW,KAAK,GAAG;EAChC;EAEA,OAAO,UAAyB;AAC5B,eAAW,OAAO,SAAS,YAAY;AACnC,aAAQ,IAAyB;AACjC,aAAQ,IAAyB;;AAErC,aAAS,aAAa,CAAA;EAC1B;EAEA,aAAa,SAAsB;AAC/B,UAAM,QAAQ,KAAK,cAAc,SAAS,OAAO;AACjD,UAAM,cAAc,MAAM,WAAW,QAAQ,UAAU,QAAQ;AAC/D,WAAO,gBAAW,QAAX,gBAAW,SAAX,cAAe,KAAK,mBAAmB,OAAO;EACzD;EAEA,eAAe,MAAe,UAAkB,SAA8B,SAAe;AAGzF,UAAM,SAAS;AACf,UAAM,YAA8B;MAChC,UAAU;MACV,UAAU;MAEV,IAAI,MAAG;;AACH,YAAI,UAAU,KAAK,IAAI,GAAG;AAEtB,iBAAO,KAAK;mBACL,qBAAqB,KAAK,gBAAgB,GAAG;AAEpD,gBAAM,aAAa,OAAO,YAAY,KAAK,gBAAgB;AAC3D,eAAK,OAAO,eAAU,QAAV,eAAU,SAAV,aACR,OAAO,mBAAmB,EAAE,WAAW,WAAW,MAAM,SAAQ,GAAI,KAAK,gBAAgB;mBACtF,KAAK,SAAS,QAAW;AAEhC,gBAAM,UAAU,OAAO,cAAc,EAAE,WAAW,WAAW,MAAM,SAAQ,CAAE;AAC7E,cAAI,QAAQ,SAAS,YAAY,IAAI,EAAE,QAAQ,cAAc,gBAAgB;AAEzE,mBAAO;;AAEX,eAAK,QAAO,KAAA,QAAQ,UAAI,QAAA,OAAA,SAAA,KAAI,QAAQ;AACpC,eAAK,mBAAmB,QAAQ;;AAEpC,eAAO,UAAU,KAAK,IAAI,IAAI,KAAK,OAAO;MAC9C;MACA,IAAI,mBAAgB;AAChB,eAAO,KAAK;MAChB;MACA,IAAI,QAAK;AACL,eAAO,eAAe,KAAK,IAAI,IAAI,KAAK,OAAO;MACnD;;AAEJ,WAAO;EACX;EAEU,cAAc,SAAsB;AAC1C,QAAI;AACA,YAAM,cAAc,KAAK,aAAa,OAAO;AAC7C,UAAI,eAAe,WAAW,GAAG;AAC7B,eAAO,EAAE,OAAO,YAAW;;AAE/B,YAAM,aAAa,KAAK,YAAY,WAAW;AAC/C,UAAI,YAAY;AACZ,eAAO,EAAE,MAAM,YAAY,OAAO,YAAW;aAE5C;AACD,eAAO;UACH,OAAO;UACP,OACI,KAAK,mBAAmB,SAAS,WAAW;;;aAGnD,KAAK;AACV,aAAO;QACH,OAAK,OAAA,OAAA,OAAA,OAAA,CAAA,GACE,OAAO,GAAA,EACV,SAAS,mDAAmD,QAAQ,UAAU,QAAQ,MAAM,GAAG,GAAE,CAAA;;;EAIjH;EAEU,YAAY,iBAAmC;AACrD,QAAI,gBAAgB,MAAM;AACtB,aAAO,gBAAgB;;AAE3B,UAAM,MAAM,KAAK,iBAAgB,EAAG,YAAY,gBAAgB,WAAW;AAC3E,QAAI,CAAC,KAAK;AACN,aAAO;;AAEX,WAAO,KAAK,eAAe,WAAW,IAAI,YAAY,OAAO,gBAAgB,IAAI;EACrF;EAEU,mBAAmB,SAAwB,mBAAsC;AAGvF,UAAM,WAAW,YAAY,QAAQ,SAAS;AAC9C,QAAI,SAAS,QAAQ,cAAc,gBAAgB;AAC/C,cAAQ,KAAK,gFAAgF,SAAS,GAAG,IAAI;;AAEjH,UAAM,gBAAgB,KAAK,WAAW,iBAAiB,OAAO;AAC9D,WAAA,OAAA,OAAA,OAAA,OAAA,CAAA,GACO,OAAO,GAAA,EACV,SAAS,kCAAkC,aAAa,WAAW,QAAQ,UAAU,QAAQ,MAC7F,kBAAiB,CAAA;EAEzB;;;;ACpNE,SAAU,QAAQ,MAAa;AACjC,SAAO,OAAQ,KAAsB,SAAS;AAClD;AAFgB;AAoBV,IAAO,sBAAP,MAA0B;EAjChC,OAiCgC;;;EAC5B,QAAQ,MAAa;AACjB,QAAI,QAAQ,IAAI,GAAG;AACf,aAAO,KAAK;;AAEhB,WAAO;EACX;EAEA,YAAY,MAAa;AACrB,WAAO,oBAAoB,KAAK,UAAU,MAAM;EACpD;;;;ACsBE,IAAO,oBAAP,MAAwB;EAjE9B,OAiE8B;;;EAK1B,YAAY,UAA6B;AACrC,SAAK,eAAe,SAAS,WAAW;AACxC,SAAK,QAAQ,SAAS,OAAO,UAAU;AACvC,SAAK,cAAc,SAAS,UAAU;EAC1C;EAEA,gBAAgB,eAAsB;AAClC,QAAI,eAAe;AACf,YAAM,aAAa,eAAe,aAAa;AAC/C,YAAM,WAAW,cAAc;AAC/B,UAAI,cAAc,UAAU;AACxB,cAAM,YAAa,SAA4B,WAAW,OAAO;AAEjE,YAAI,YAAY,SAAS,GAAG;AACxB,iBAAO,UAAU;mBACV,MAAM,QAAQ,SAAS,GAAG;AACjC,qBAAW,OAAO,WAAW;AACzB,gBAAI,YAAY,GAAG,KAAK,IAAI,YACrB,IAAI,SAAS,UAAU,cAAc,UACrC,IAAI,SAAS,OAAO,cAAc,KAAK;AAC1C,qBAAO,IAAI;;;;;AAK3B,UAAI,UAAU;AACV,cAAM,WAAW,KAAK,aAAa,YAAY,QAAQ;AAEvD,YAAI,aAAa,aAAa,iBAAiB,YAAY,eAAe,QAAQ,IAAI;AAClF,iBAAO;;;;AAInB,WAAO;EACX;EAEA,oBAAoB,eAAsB;AACtC,UAAM,UAAU,KAAK,gBAAgB,aAAa;AAClD,QAAI,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,UAAU;AACnB,YAAM,aAAa,KAAK,aAAa,YAAY,OAAO;AACxD,aAAO,eAAU,QAAV,eAAU,SAAV,aAAc,QAAQ;;AAEjC,WAAO;EACX;EAEA,eAAe,YAAqB,SAA8B;AAC9D,UAAM,OAA+B,CAAA;AACrC,QAAI,QAAQ,oBAAoB;AAC5B,YAAM,MAAM,KAAK,mBAAmB,UAAU;AAC9C,UAAI,KAAK;AACL,aAAK,KAAK,GAAG;;;AAGrB,QAAI,kBAAkB,KAAK,MAAM,kBAAkB,YAAY,KAAK,YAAY,eAAe,UAAU,CAAC;AAC1G,QAAI,QAAQ,aAAa;AACrB,wBAAkB,gBAAgB,OAAO,SAAO,SAAS,OAAO,IAAI,WAAW,QAAQ,WAAW,CAAC;;AAEvG,SAAK,KAAK,GAAG,eAAe;AAC5B,WAAO,OAAO,IAAI;EACtB;EAEU,mBAAmB,YAAmB;AAC5C,UAAM,WAAW,KAAK,aAAa,YAAY,UAAU;AACzD,QAAI,UAAU;AACV,YAAM,MAAM,YAAY,UAAU;AAClC,YAAM,OAAO,KAAK,YAAY,eAAe,UAAU;AACvD,aAAO;QACH,WAAW,IAAI;QACf,YAAY;QACZ,WAAW,IAAI;QACf,YAAY;QACZ,SAAS,kBAAkB,QAAQ;QACnC,OAAO;;;AAGf,WAAO;EACX;;;;ACtIE,IAAO,WAAP,MAAe;EAZrB,OAYqB;;;EAMjB,YAAY,UAAwB;AAJ5B,SAAA,MAAM,oBAAI,IAAG;AAKjB,QAAI,UAAU;AACV,iBAAW,CAAC,KAAK,KAAK,KAAK,UAAU;AACjC,aAAK,IAAI,KAAK,KAAK;;;EAG/B;;;;EAKA,IAAI,OAAI;AACJ,WAAO,UAAU,IAAI,OAAO,KAAK,IAAI,OAAM,CAAE,EAAE,IAAI,OAAK,EAAE,MAAM,CAAC;EACrE;;;;EAKA,QAAK;AACD,SAAK,IAAI,MAAK;EAClB;;;;;;;;;EAUA,OAAO,KAAQ,OAAS;AACpB,QAAI,UAAU,QAAW;AACrB,aAAO,KAAK,IAAI,OAAO,GAAG;WACvB;AACH,YAAM,SAAS,KAAK,IAAI,IAAI,GAAG;AAC/B,UAAI,QAAQ;AACR,cAAM,QAAQ,OAAO,QAAQ,KAAK;AAClC,YAAI,SAAS,GAAG;AACZ,cAAI,OAAO,WAAW,GAAG;AACrB,iBAAK,IAAI,OAAO,GAAG;iBAChB;AACH,mBAAO,OAAO,OAAO,CAAC;;AAE1B,iBAAO;;;AAGf,aAAO;;EAEf;;;;;;;;EASA,IAAI,KAAM;;AACN,YAAO,KAAA,KAAK,IAAI,IAAI,GAAG,OAAC,QAAA,OAAA,SAAA,KAAI,CAAA;EAChC;;;;;;EAOA,IAAI,KAAQ,OAAS;AACjB,QAAI,UAAU,QAAW;AACrB,aAAO,KAAK,IAAI,IAAI,GAAG;WACpB;AACH,YAAM,SAAS,KAAK,IAAI,IAAI,GAAG;AAC/B,UAAI,QAAQ;AACR,eAAO,OAAO,QAAQ,KAAK,KAAK;;AAEpC,aAAO;;EAEf;;;;EAKA,IAAI,KAAQ,OAAQ;AAChB,QAAI,KAAK,IAAI,IAAI,GAAG,GAAG;AACnB,WAAK,IAAI,IAAI,GAAG,EAAG,KAAK,KAAK;WAC1B;AACH,WAAK,IAAI,IAAI,KAAK,CAAC,KAAK,CAAC;;AAE7B,WAAO;EACX;;;;EAKA,OAAO,KAAQ,QAAmB;AAC9B,QAAI,KAAK,IAAI,IAAI,GAAG,GAAG;AACnB,WAAK,IAAI,IAAI,GAAG,EAAG,KAAK,GAAG,MAAM;WAC9B;AACH,WAAK,IAAI,IAAI,KAAK,MAAM,KAAK,MAAM,CAAC;;AAExC,WAAO;EACX;;;;EAKA,QAAQ,YAAiD;AACrD,SAAK,IAAI,QAAQ,CAAC,OAAO,QACrB,MAAM,QAAQ,WAAS,WAAW,OAAO,KAAK,IAAI,CAAC,CAAC;EAE5D;;;;EAKA,CAAC,OAAO,QAAQ,IAAC;AACb,WAAO,KAAK,QAAO,EAAG,SAAQ;EAClC;;;;EAKA,UAAO;AACH,WAAO,OAAO,KAAK,IAAI,QAAO,CAAE,EAC3B,QAAQ,CAAC,CAAC,KAAK,KAAK,MAAM,MAAM,IAAI,WAAS,CAAC,KAAK,KAAK,CAAW,CAAC;EAC7E;;;;EAKA,OAAI;AACA,WAAO,OAAO,KAAK,IAAI,KAAI,CAAE;EACjC;;;;EAKA,SAAM;AACF,WAAO,OAAO,KAAK,IAAI,OAAM,CAAE,EAAE,KAAI;EACzC;;;;EAKA,sBAAmB;AACf,WAAO,OAAO,KAAK,IAAI,QAAO,CAAE;EACpC;;AAIE,IAAO,QAAP,MAAY;EAvKlB,OAuKkB;;;EAKd,IAAI,OAAI;AACJ,WAAO,KAAK,IAAI;EACpB;EAIA,YAAY,UAAwB;AAT5B,SAAA,MAAM,oBAAI,IAAG;AACb,SAAA,UAAU,oBAAI,IAAG;AASrB,QAAI,UAAU;AACV,iBAAW,CAAC,KAAK,KAAK,KAAK,UAAU;AACjC,aAAK,IAAI,KAAK,KAAK;;;EAG/B;EAEA,QAAK;AACD,SAAK,IAAI,MAAK;AACd,SAAK,QAAQ,MAAK;EACtB;EAEA,IAAI,KAAQ,OAAQ;AAChB,SAAK,IAAI,IAAI,KAAK,KAAK;AACvB,SAAK,QAAQ,IAAI,OAAO,GAAG;AAC3B,WAAO;EACX;EAEA,IAAI,KAAM;AACN,WAAO,KAAK,IAAI,IAAI,GAAG;EAC3B;EAEA,OAAO,OAAQ;AACX,WAAO,KAAK,QAAQ,IAAI,KAAK;EACjC;EAEA,OAAO,KAAM;AACT,UAAM,QAAQ,KAAK,IAAI,IAAI,GAAG;AAC9B,QAAI,UAAU,QAAW;AACrB,WAAK,IAAI,OAAO,GAAG;AACnB,WAAK,QAAQ,OAAO,KAAK;AACzB,aAAO;;AAEX,WAAO;EACX;;;;ACpJE,IAAO,0BAAP,MAA8B;EAjEpC,OAiEoC;;;EAKhC,YAAY,UAA6B;AACrC,SAAK,eAAe,SAAS,WAAW;AACxC,SAAK,eAAe,SAAS,UAAU;EAC3C;EAEA,MAAM,eAAe,UAA2B,cAAc,uCAAkB,MAAI;AAChF,WAAO,KAAK,sBAAsB,SAAS,YAAY,OAAO,UAAU,QAAW,WAAW;EAClG;;;;;;;;;;;;;EAcA,MAAM,sBAAsB,YAAqB,UAAoC,WAAiD,gBAAgB,cAAiC,uCAAkB,MAAI;AACzM,UAAM,UAAgC,CAAA;AAEtC,SAAK,WAAW,YAAY,SAAS,QAAQ;AAC7C,eAAW,QAAQ,SAAS,UAAU,GAAG;AACrC,YAAM,kBAAkB,WAAW;AACnC,WAAK,WAAW,MAAM,SAAS,QAAQ;;AAE3C,WAAO;EACX;;;;;EAMU,WAAW,MAAe,SAA+B,UAAyB;AACxF,UAAM,OAAO,KAAK,aAAa,QAAQ,IAAI;AAC3C,QAAI,MAAM;AACN,cAAQ,KAAK,KAAK,aAAa,kBAAkB,MAAM,MAAM,QAAQ,CAAC;;EAE9E;EAEA,MAAM,mBAAmB,UAA2B,cAAc,uCAAkB,MAAI;AACpF,UAAM,WAAW,SAAS,YAAY;AACtC,UAAM,SAAS,IAAI,SAAQ;AAE3B,eAAW,QAAQ,kBAAkB,QAAQ,GAAG;AAC5C,YAAM,kBAAkB,WAAW;AACnC,WAAK,YAAY,MAAM,UAAU,MAAM;;AAE3C,WAAO;EACX;;;;;;EAOU,YAAY,MAAe,UAA2B,QAAyB;AACrF,UAAM,YAAY,KAAK;AACvB,QAAI,WAAW;AACX,YAAM,OAAO,KAAK,aAAa,QAAQ,IAAI;AAC3C,UAAI,MAAM;AACN,eAAO,IAAI,WAAW,KAAK,aAAa,kBAAkB,MAAM,MAAM,QAAQ,CAAC;;;EAG3F;;;;AChGE,IAAO,cAAP,MAAkB;EAzCxB,OAyCwB;;;EAKpB,YAAY,UAAsC,YAAoB,SAAsB;;AACxF,SAAK,WAAW;AAChB,SAAK,aAAa;AAClB,SAAK,mBAAkB,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,qBAAe,QAAA,OAAA,SAAA,KAAI;EACvD;EAEA,iBAAc;AACV,QAAI,KAAK,YAAY;AACjB,aAAO,KAAK,SAAS,OAAO,KAAK,WAAW,eAAc,CAAE;WACzD;AACH,aAAO,KAAK;;EAEpB;EAEA,WAAW,MAAY;AACnB,UAAM,QAAQ,KAAK,kBACb,KAAK,SAAS,KAAK,OAAK,EAAE,KAAK,YAAW,MAAO,KAAK,YAAW,CAAE,IACnE,KAAK,SAAS,KAAK,OAAK,EAAE,SAAS,IAAI;AAC7C,QAAI,OAAO;AACP,aAAO;;AAEX,QAAI,KAAK,YAAY;AACjB,aAAO,KAAK,WAAW,WAAW,IAAI;;AAE1C,WAAO;EACX;;AAGE,IAAO,WAAP,MAAe;EA1ErB,OA0EqB;;;EAKjB,YAAY,UAAwC,YAAoB,SAAsB;;AAC1F,SAAK,WAAW,oBAAI,IAAG;AACvB,SAAK,mBAAkB,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,qBAAe,QAAA,OAAA,SAAA,KAAI;AACnD,eAAW,WAAW,UAAU;AAC5B,YAAM,OAAO,KAAK,kBACZ,QAAQ,KAAK,YAAW,IACxB,QAAQ;AACd,WAAK,SAAS,IAAI,MAAM,OAAO;;AAEnC,SAAK,aAAa;EACtB;EAEA,WAAW,MAAY;AACnB,UAAM,YAAY,KAAK,kBAAkB,KAAK,YAAW,IAAK;AAC9D,UAAM,QAAQ,KAAK,SAAS,IAAI,SAAS;AACzC,QAAI,OAAO;AACP,aAAO;;AAEX,QAAI,KAAK,YAAY;AACjB,aAAO,KAAK,WAAW,WAAW,IAAI;;AAE1C,WAAO;EACX;EAEA,iBAAc;AACV,QAAI,gBAAgB,OAAO,KAAK,SAAS,OAAM,CAAE;AACjD,QAAI,KAAK,YAAY;AACjB,sBAAgB,cAAc,OAAO,KAAK,WAAW,eAAc,CAAE;;AAEzE,WAAO;EACX;;AAIG,IAAM,cAAqB;EAC9B,aAAU;AACN,WAAO;EACX;EACA,iBAAc;AACV,WAAO;EACX;;;;AC7GE,IAAgB,kBAAhB,MAA+B;EAVrC,OAUqC;;;EAArC,cAAA;AAEc,SAAA,YAA0B,CAAA;AAC1B,SAAA,aAAa;EAoB3B;EAlBI,UAAU,YAAsB;AAC5B,SAAK,UAAU,KAAK,UAAU;EAClC;EAEA,UAAO;AACH,SAAK,gBAAe;AACpB,SAAK,MAAK;AACV,SAAK,aAAa;AAClB,SAAK,UAAU,QAAQ,gBAAc,WAAW,QAAO,CAAE;EAC7D;EAEU,kBAAe;AACrB,QAAI,KAAK,YAAY;AACjB,YAAM,IAAI,MAAM,sCAAsC;;EAE9D;;AAKE,IAAO,cAAP,cAAiC,gBAAe;EAnCtD,OAmCsD;;;EAAtD,cAAA;;AACuB,SAAA,QAAQ,oBAAI,IAAG;EAoCtC;EAlCI,IAAI,KAAM;AACN,SAAK,gBAAe;AACpB,WAAO,KAAK,MAAM,IAAI,GAAG;EAC7B;EAEA,IAAI,KAAQ,OAAQ;AAChB,SAAK,gBAAe;AACpB,SAAK,MAAM,IAAI,KAAK,KAAK;EAC7B;EAIA,IAAI,KAAQ,UAAkB;AAC1B,SAAK,gBAAe;AACpB,QAAI,KAAK,MAAM,IAAI,GAAG,GAAG;AACrB,aAAO,KAAK,MAAM,IAAI,GAAG;eAClB,UAAU;AACjB,YAAM,QAAQ,SAAQ;AACtB,WAAK,MAAM,IAAI,KAAK,KAAK;AACzB,aAAO;WACJ;AACH,aAAO;;EAEf;EAEA,OAAO,KAAM;AACT,SAAK,gBAAe;AACpB,WAAO,KAAK,MAAM,OAAO,GAAG;EAChC;EAEA,QAAK;AACD,SAAK,gBAAe;AACpB,SAAK,MAAM,MAAK;EACpB;;AAGE,IAAO,eAAP,cAAuE,gBAAe;EA1E5F,OA0E4F;;;EAKxF,YAAY,WAA0C;AAClD,UAAK;AAJQ,SAAA,QAAQ,oBAAI,IAAG;AAK5B,SAAK,YAAY,cAAS,QAAT,cAAS,SAAT,YAAc,WAAS;EAC5C;EAEA,IAAI,YAAqB,KAAQ;AAC7B,SAAK,gBAAe;AACpB,WAAO,KAAK,gBAAgB,UAAU,EAAE,IAAI,GAAG;EACnD;EAEA,IAAI,YAAqB,KAAU,OAAY;AAC3C,SAAK,gBAAe;AACpB,SAAK,gBAAgB,UAAU,EAAE,IAAI,KAAK,KAAK;EACnD;EAIA,IAAI,YAAqB,KAAU,UAAsB;AACrD,SAAK,gBAAe;AACpB,UAAM,eAAe,KAAK,gBAAgB,UAAU;AACpD,QAAI,aAAa,IAAI,GAAG,GAAG;AACvB,aAAO,aAAa,IAAI,GAAG;eACpB,UAAU;AACjB,YAAM,QAAQ,SAAQ;AACtB,mBAAa,IAAI,KAAK,KAAK;AAC3B,aAAO;WACJ;AACH,aAAO;;EAEf;EAEA,OAAO,YAAqB,KAAQ;AAChC,SAAK,gBAAe;AACpB,WAAO,KAAK,gBAAgB,UAAU,EAAE,OAAO,GAAG;EACtD;EAIA,MAAM,YAAoB;AACtB,SAAK,gBAAe;AACpB,QAAI,YAAY;AACZ,YAAM,SAAS,KAAK,UAAU,UAAU;AACxC,WAAK,MAAM,OAAO,MAAM;WACrB;AACH,WAAK,MAAM,MAAK;;EAExB;EAEU,gBAAgB,YAAmB;AACzC,UAAM,SAAS,KAAK,UAAU,UAAU;AACxC,QAAI,gBAAgB,KAAK,MAAM,IAAI,MAAM;AACzC,QAAI,CAAC,eAAe;AAChB,sBAAgB,oBAAI,IAAG;AACvB,WAAK,MAAM,IAAI,QAAQ,aAAa;;AAExC,WAAO;EACX;;AAOE,IAAO,gBAAP,cAAmC,aAAwC;EA9IjF,OA8IiF;;;EAC7E,YAAY,gBAAyC;AACjD,UAAM,SAAO,IAAI,SAAQ,CAAE;AAC3B,SAAK,UAAU,eAAe,UAAU,gBAAgB,SAAS,CAAC,SAAS,YAAW;AAClF,YAAM,UAAU,QAAQ,OAAO,OAAO;AACtC,iBAAW,OAAO,SAAS;AACvB,aAAK,MAAM,GAAG;;IAEtB,CAAC,CAAC;EACN;;AAOE,IAAO,iBAAP,cAAoC,YAAiB;EA9J3D,OA8J2D;;;EACvD,YAAY,gBAAyC;AACjD,UAAK;AACL,SAAK,UAAU,eAAe,UAAU,gBAAgB,SAAS,MAAK;AAClE,WAAK,MAAK;IACd,CAAC,CAAC;EACN;;;;ACnIE,IAAO,uBAAP,MAA2B;EAjCjC,OAiCiC;;;EAS7B,YAAY,UAA6B;AACrC,SAAK,aAAa,SAAS,OAAO;AAClC,SAAK,eAAe,SAAS,WAAW;AACxC,SAAK,eAAe,SAAS,UAAU;AACvC,SAAK,eAAe,SAAS,OAAO,UAAU;AAC9C,SAAK,mBAAmB,IAAI,eAA8B,SAAS,MAAM;EAC7E;EAEA,SAAS,SAAsB;AAC3B,UAAM,SAA4C,CAAA;AAClD,UAAM,gBAAgB,KAAK,WAAW,iBAAiB,OAAO;AAE9D,UAAM,cAAc,YAAY,QAAQ,SAAS,EAAE;AACnD,QAAI,aAAa;AACb,UAAI,cAAmC,QAAQ;AAC/C,SAAG;AACC,cAAM,kBAAkB,YAAY,IAAI,WAAW;AACnD,YAAI,gBAAgB,SAAS,GAAG;AAC5B,iBAAO,KAAK,OAAO,eAAe,EAAE,OAChC,UAAQ,KAAK,WAAW,UAAU,KAAK,MAAM,aAAa,CAAC,CAAC;;AAEpE,sBAAc,YAAY;eACrB;;AAGb,QAAI,SAAgB,KAAK,eAAe,eAAe,OAAO;AAC9D,aAAS,IAAI,OAAO,SAAS,GAAG,KAAK,GAAG,KAAK;AACzC,eAAS,KAAK,YAAY,OAAO,CAAC,GAAG,MAAM;;AAE/C,WAAO;EACX;;;;EAKU,YAAY,UAAwC,YAAoB,SAAsB;AACpG,WAAO,IAAI,YAAY,OAAO,QAAQ,GAAG,YAAY,OAAO;EAChE;;;;;EAMU,oBAAoB,UAA6B,YAAoB,SAAsB;AACjG,UAAM,IAAI,OAAO,QAAQ,EAAE,IAAI,OAAI;AAC/B,YAAM,OAAO,KAAK,aAAa,QAAQ,CAAC;AACxC,UAAI,MAAM;AACN,eAAO,KAAK,aAAa,kBAAkB,GAAG,IAAI;;AAEtD,aAAO;IACX,CAAC,EAAE,YAAW;AACd,WAAO,IAAI,YAAY,GAAG,YAAY,OAAO;EACjD;;;;EAKU,eAAe,eAAuB,UAAuB;AACnE,WAAO,KAAK,iBAAiB,IAAI,eAAe,MAAM,IAAI,SAAS,KAAK,aAAa,YAAY,aAAa,CAAC,CAAC;EACpH;;;;AC/CE,SAAU,qBAAqB,MAAa;AAC9C,SAAO,OAAQ,KAA4B,aAAa;AAC5D;AAFgB;AAkDhB,SAAS,wBAAwB,KAAY;AACzC,SAAO,OAAO,QAAQ,YAAY,CAAC,CAAC,QAAQ,UAAU,OAAO,YAAY;AAC7E;AAFS;AAIH,IAAO,wBAAP,MAA4B;EA5GlC,OA4GkC;;;EAa9B,YAAY,UAA6B;AAVzC,SAAA,mBAAmB,oBAAI,IAAI,CAAC,cAAc,sBAAsB,mBAAmB,aAAa,UAAU,CAAC;AAWvG,SAAK,mBAAmB,SAAS,OAAO,UAAU;AAClD,SAAK,iBAAiB,SAAS,UAAU;AACzC,SAAK,eAAe,SAAS,WAAW;AACxC,SAAK,kBAAkB,SAAS,cAAc;EAClD;EAEA,UAAU,MAAe,UAAgC,CAAA,GAAE;AACvD,UAAM,mBAAmB,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS;AAClC,UAAM,kBAAkB,wBAAC,KAAa,UAAmB,KAAK,SAAS,KAAK,OAAO,OAAO,GAAlE;AACxB,UAAM,WAAW,mBAAmB,CAAC,KAAa,UAAmB,iBAAiB,KAAK,OAAO,eAAe,IAAI;AAErH,QAAI;AACA,WAAK,kBAAkB,YAAY,IAAI;AACvC,aAAO,KAAK,UAAU,MAAM,UAAU,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,KAAK;;AAEpD,WAAK,kBAAkB;;EAE/B;EAEA,YAAyC,SAAiB,UAAkC,CAAA,GAAE;AAC1F,UAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,SAAK,SAAS,MAAM,MAAM,OAAO;AACjC,WAAO;EACX;EAEU,SAAS,KAAa,OAAgB,EAAE,SAAS,YAAY,aAAa,UAAU,aAAY,GAAwB;;AAC9H,QAAI,KAAK,iBAAiB,IAAI,GAAG,GAAG;AAChC,aAAO;eACA,YAAY,KAAK,GAAG;AAC3B,YAAM,WAAW,MAAM;AACvB,YAAM,WAAW,UAAU,MAAM,WAAW;AAC5C,UAAI,UAAU;AACV,cAAM,iBAAiB,YAAY,QAAQ;AAC3C,YAAI,YAAY;AAChB,YAAI,KAAK,mBAAmB,KAAK,oBAAoB,gBAAgB;AACjE,cAAI,cAAc;AACd,wBAAY,aAAa,eAAe,KAAK,KAAK;iBAC/C;AACH,wBAAY,eAAe,IAAI,SAAQ;;;AAG/C,cAAM,aAAa,KAAK,eAAe,eAAe,QAAQ;AAC9D,eAAO;UACH,MAAM,GAAG,SAAS,IAAI,UAAU;UAChC;;aAED;AACH,eAAO;UACH,SAAQ,MAAA,KAAA,MAAM,WAAK,QAAA,OAAA,SAAA,SAAA,GAAE,aAAO,QAAA,OAAA,SAAA,KAAI;UAChC;;;eAGD,UAAU,KAAK,GAAG;AACzB,UAAI,UAA6C;AACjD,UAAI,aAAa;AACb,kBAAU,KAAK,kCAAiC,OAAA,OAAA,CAAA,GAAM,KAAK,CAAA;AAC3D,aAAK,CAAC,OAAO,MAAM,eAAc,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,cAAa;AAEnD,kBAAQ,YAAY,eAAc,KAAA,KAAK,qBAAe,QAAA,OAAA,SAAA,SAAA,GAAE,IAAI,SAAQ;;;AAG5E,UAAI,cAAc,CAAC,KAAK;AACpB,oBAAO,QAAP,YAAO,SAAP,UAAA,UAAO,OAAA,OAAA,CAAA,GAAU,KAAK;AACtB,gBAAQ,eAAc,KAAA,MAAM,cAAQ,QAAA,OAAA,SAAA,SAAA,GAAE;;AAE1C,UAAI,UAAU;AACV,oBAAO,QAAP,YAAO,SAAP,UAAA,UAAO,OAAA,OAAA,CAAA,GAAU,KAAK;AACtB,cAAM,UAAU,KAAK,gBAAgB,WAAW,KAAK;AACrD,YAAI,SAAS;AACR,kBAA+B,WAAW,QAAQ,QAAQ,OAAO,EAAE;;;AAG5E,aAAO,YAAO,QAAP,YAAO,SAAP,UAAW;WACf;AACH,aAAO;;EAEf;EAEU,kCAAkC,MAA2B;AACnE,UAAM,wBAA4E,qCAA4B;MAC1G,QAAQ,QAAQ;MAChB,KAAK,QAAQ;MACb,QAAQ,QAAQ;MAChB,OAAO,QAAQ;QAJ+D;AAOlF,QAAI,KAAK,UAAU;AACf,YAAM,aAAa,KAAK,cAAc,sBAAsB,KAAK,QAAQ;AACzE,YAAM,cAAiD,WAAW,cAAc,CAAA;AAEhF,aAAO,KAAK,IAAI,EAAE,OAAO,SAAO,CAAC,IAAI,WAAW,GAAG,CAAC,EAAE,QAAQ,SAAM;AAChE,cAAM,sBAAsB,qBAAqB,KAAK,UAAU,GAAG,EAAE,IAAI,qBAAqB;AAC9F,YAAI,oBAAoB,WAAW,GAAG;AAClC,sBAAY,GAAG,IAAI;;MAE3B,CAAC;AAED,aAAO;;AAEX,WAAO;EACX;EAEU,SAAS,MAAsB,MAAe,SAAiC,WAAqB,mBAA4B,gBAAuB;AAC7J,eAAW,CAAC,cAAc,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AACrD,UAAI,MAAM,QAAQ,IAAI,GAAG;AACrB,iBAAS,QAAQ,GAAG,QAAQ,KAAK,QAAQ,SAAS;AAC9C,gBAAM,UAAU,KAAK,KAAK;AAC1B,cAAI,wBAAwB,OAAO,GAAG;AAClC,iBAAK,KAAK,IAAI,KAAK,gBAAgB,MAAM,cAAc,MAAM,SAAS,OAAO;qBACtE,UAAU,OAAO,GAAG;AAC3B,iBAAK,SAAS,SAA2B,MAAM,SAAS,MAAM,cAAc,KAAK;;;iBAGlF,wBAAwB,IAAI,GAAG;AACtC,aAAK,YAAY,IAAI,KAAK,gBAAgB,MAAM,cAAc,MAAM,MAAM,OAAO;iBAC1E,UAAU,IAAI,GAAG;AACxB,aAAK,SAAS,MAAwB,MAAM,SAAS,MAAM,YAAY;;;AAG/E,UAAM,UAAU;AAChB,YAAQ,aAAa;AACrB,YAAQ,qBAAqB;AAC7B,YAAQ,kBAAkB;EAC9B;EAEU,gBAAgB,WAAoB,UAAkB,MAAe,WAAkC,SAA+B;AAC5I,QAAI,UAAU,UAAU;AACxB,QAAI,QAAQ,UAAU;AACtB,QAAI,UAAU,MAAM;AAChB,YAAM,MAAM,KAAK,WAAW,MAAM,UAAU,MAAM,QAAQ,YAAY;AACtE,UAAI,UAAU,GAAG,GAAG;AAChB,YAAI,CAAC,SAAS;AACV,oBAAU,KAAK,aAAa,QAAQ,GAAG;;AAE3C,eAAO;UACH,UAAU,YAAO,QAAP,YAAO,SAAP,UAAW;UACrB;;aAED;AACH,gBAAQ;;;AAGhB,QAAI,OAAO;AACP,YAAM,MAA0B;QAC5B,UAAU,YAAO,QAAP,YAAO,SAAP,UAAW;;AAEzB,UAAI,QAAQ;QACR;QACA;QACA,SAAS;QACT,WAAW;;AAEf,aAAO;WACJ;AACH,aAAO;;EAEf;EAEU,WAAW,MAAe,KAAa,cAAmC;AAChF,QAAI;AACA,YAAM,gBAAgB,IAAI,QAAQ,GAAG;AACrC,UAAI,kBAAkB,GAAG;AACrB,cAAMC,QAAO,KAAK,eAAe,WAAW,MAAM,IAAI,UAAU,CAAC,CAAC;AAClE,YAAI,CAACA,OAAM;AACP,iBAAO,6BAA6B;;AAExC,eAAOA;;AAEX,UAAI,gBAAgB,GAAG;AACnB,cAAMC,eAAc,eAAe,aAAa,GAAG,IAAIC,KAAI,MAAM,GAAG;AACpE,cAAMC,YAAW,KAAK,iBAAiB,YAAYF,YAAW;AAC9D,YAAI,CAACE,WAAU;AACX,iBAAO,sCAAsC;;AAEjD,eAAOA,UAAS,YAAY;;AAEhC,YAAM,cAAc,eAAe,aAAa,IAAI,UAAU,GAAG,aAAa,CAAC,IAAID,KAAI,MAAM,IAAI,UAAU,GAAG,aAAa,CAAC;AAC5H,YAAM,WAAW,KAAK,iBAAiB,YAAY,WAAW;AAC9D,UAAI,CAAC,UAAU;AACX,eAAO,sCAAsC;;AAEjD,UAAI,kBAAkB,IAAI,SAAS,GAAG;AAClC,eAAO,SAAS,YAAY;;AAEhC,YAAM,OAAO,KAAK,eAAe,WAAW,SAAS,YAAY,OAAO,IAAI,UAAU,gBAAgB,CAAC,CAAC;AACxG,UAAI,CAAC,MAAM;AACP,eAAO,4BAA4B;;AAEvC,aAAO;aACF,KAAK;AACV,aAAO,OAAO,GAAG;;EAEzB;;;;ACvRE,IAAO,yBAAP,MAA6B;EAnCnC,OAmCmC;;;EAK/B,SAAS,UAA6B;AAClC,QAAI,CAAC,KAAK,aAAa,CAAC,KAAK,KAAK;AAE9B,WAAK,YAAY;AACjB;;AAEJ,QAAI,CAAC,KAAK,KAAK;AACX,WAAK,MAAM,CAAA;AACX,UAAI,KAAK,WAAW;AAEhB,mBAAW,OAAO,KAAK,UAAU,iBAAiB,gBAAgB;AAC9D,eAAK,IAAI,GAAG,IAAI,KAAK;;AAEzB,aAAK,YAAY;;;AAIzB,eAAW,OAAO,SAAS,iBAAiB,gBAAgB;AACxD,UAAI,KAAK,IAAI,GAAG,MAAM,UAAa,KAAK,IAAI,GAAG,MAAM,UAAU;AAC3D,gBAAQ,KAAK,sBAAsB,GAAG,0DAA0D,SAAS,iBAAiB,UAAU,IAAI;;AAE5I,WAAK,IAAI,GAAG,IAAI;;EAExB;EAEA,YAAY,KAAQ;AAChB,QAAI,KAAK,cAAc,QAAW;AAC9B,aAAO,KAAK;;AAEhB,QAAI,KAAK,QAAQ,QAAW;AACxB,YAAM,IAAI,MAAM,uFAAuF;;AAE3G,UAAM,MAAM,SAAS,QAAQ,GAAG;AAChC,UAAM,WAAW,KAAK,IAAI,GAAG;AAC7B,QAAI,CAAC,UAAU;AACX,YAAM,IAAI,MAAM,gEAAgE,GAAG,IAAI;;AAE3F,WAAO;EACX;EAEA,IAAI,MAAG;AACH,QAAI,KAAK,cAAc,QAAW;AAC9B,aAAO,CAAC,KAAK,SAAS;;AAE1B,QAAI,KAAK,QAAQ,QAAW;AACxB,aAAO,OAAO,OAAO,KAAK,GAAG;;AAEjC,WAAO,CAAA;EACX;;;;ACjCE,SAAU,eAAe,MAAY;AACvC,SAAO,EAAE,KAAI;AACjB;AAFgB;AAuCV,IAAW;CAAjB,SAAiBE,qBAAkB;AAClB,EAAAA,oBAAA,MAAqC,CAAC,QAAQ,QAAQ,UAAU;AACjF,GAFiB,uBAAA,qBAAkB,CAAA,EAAA;AAY7B,IAAO,qBAAP,MAAyB;EA1G/B,OA0G+B;;;EAI3B,YAAY,UAA6B;AAHxB,SAAA,UAAU,IAAI,SAAQ;AAInC,SAAK,aAAa,SAAS,OAAO;EACtC;;;;;;;;;EAUA,SAAY,cAAmC,UAAsC,MAAM,WAA+B,QAAM;AAC5H,QAAI,aAAa,YAAY;AACzB,YAAM,IAAI,MAAM,2EAA2E;;AAE/F,eAAW,CAAC,MAAM,EAAE,KAAK,OAAO,QAAQ,YAAY,GAAG;AACnD,YAAM,YAAY;AAClB,UAAI,MAAM,QAAQ,SAAS,GAAG;AAC1B,mBAAW,SAAS,WAAW;AAC3B,gBAAM,QAA8B;YAChC,OAAO,KAAK,wBAAwB,OAAO,OAAO;YAClD;;AAEJ,eAAK,SAAS,MAAM,KAAK;;iBAEtB,OAAO,cAAc,YAAY;AACxC,cAAM,QAA8B;UAChC,OAAO,KAAK,wBAAwB,WAAW,OAAO;UACtD;;AAEJ,aAAK,SAAS,MAAM,KAAK;;;EAGrC;EAEU,wBAAwB,OAAwB,SAAgB;AACtE,WAAO,OAAO,MAAM,QAAQ,gBAAe;AACvC,UAAI;AACA,cAAM,MAAM,KAAK,SAAS,MAAM,QAAQ,WAAW;eAC9C,KAAK;AACV,YAAI,qBAAqB,GAAG,GAAG;AAC3B,gBAAM;;AAEV,gBAAQ,MAAM,wCAAwC,GAAG;AACzD,cAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,YAAI,eAAe,SAAS,IAAI,OAAO;AACnC,kBAAQ,MAAM,IAAI,KAAK;;AAE3B,eAAO,SAAS,0CAA0C,SAAS,EAAE,KAAI,CAAE;;IAEnF;EACJ;EAEU,SAAS,MAAc,OAA2B;AACxD,QAAI,SAAS,WAAW;AACpB,WAAK,QAAQ,IAAI,WAAW,KAAK;AACjC;;AAEJ,eAAW,WAAW,KAAK,WAAW,eAAe,IAAI,GAAG;AACxD,WAAK,QAAQ,IAAI,SAAS,KAAK;;EAEvC;EAEA,UAAU,MAAc,YAAiC;AACrD,QAAI,SAAS,OAAO,KAAK,QAAQ,IAAI,IAAI,CAAC,EACrC,OAAO,KAAK,QAAQ,IAAI,SAAS,CAAC;AACvC,QAAI,YAAY;AACZ,eAAS,OAAO,OAAO,WAAS,WAAW,SAAS,MAAM,QAAQ,CAAC;;AAEvE,WAAO,OAAO,IAAI,WAAS,MAAM,KAAK;EAC1C;;;;ACnIE,IAAO,2BAAP,MAA+B;EAlDrC,OAkDqC;;;EAKjC,YAAY,UAA6B;AACrC,SAAK,qBAAqB,SAAS,WAAW;AAC9C,SAAK,WAAW,SAAS;EAC7B;EAEA,MAAM,iBAAiB,UAA2B,UAA6B,CAAA,GAAI,cAAc,uCAAkB,MAAI;AACnH,UAAM,cAAc,SAAS;AAC7B,UAAM,cAA4B,CAAA;AAElC,UAAM,kBAAkB,WAAW;AAEnC,QAAI,CAAC,QAAQ,cAAc,QAAQ,WAAW,SAAS,UAAU,GAAG;AAChE,WAAK,oBAAoB,aAAa,aAAa,OAAO;AAC1D,UAAI,QAAQ,yBAAyB,YAAY,KAAK,OAAI;AAAA,YAAA;AAAC,iBAAA,KAAA,EAAE,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,UAAS,kBAAkB;MAAW,CAAA,GAAG;AACxG,eAAO;;AAGX,WAAK,qBAAqB,aAAa,aAAa,OAAO;AAC3D,UAAI,QAAQ,0BAA0B,YAAY,KAAK,OAAI;AAAA,YAAA;AAAC,iBAAA,KAAA,EAAE,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,UAAS,kBAAkB;MAAY,CAAA,GAAG;AAC1G,eAAO;;AAGX,WAAK,qBAAqB,UAAU,aAAa,OAAO;AACxD,UAAI,QAAQ,0BAA0B,YAAY,KAAK,OAAI;AAAA,YAAA;AAAC,iBAAA,KAAA,EAAE,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,UAAS,kBAAkB;MAAY,CAAA,GAAG;AAC1G,eAAO;;;AAKf,QAAI;AACA,kBAAY,KAAK,GAAG,MAAM,KAAK,YAAY,YAAY,OAAO,SAAS,WAAW,CAAC;aAC9E,KAAK;AACV,UAAI,qBAAqB,GAAG,GAAG;AAC3B,cAAM;;AAEV,cAAQ,MAAM,wCAAwC,GAAG;;AAG7D,UAAM,kBAAkB,WAAW;AAEnC,WAAO;EACX;EAEU,oBAAoB,aAA0B,aAA2B,UAA2B;AAC1G,eAAW,cAAc,YAAY,aAAa;AAC9C,YAAM,aAAyB;QAC3B,UAAU,qBAAqB,OAAO;QACtC,OAAO;UACH,OAAO;YACH,MAAM,WAAW,OAAQ;YACzB,WAAW,WAAW,SAAU;;UAEpC,KAAK;YACD,MAAM,WAAW,OAAQ;YACzB,WAAW,WAAW,SAAU,WAAW,SAAS;;;QAG5D,SAAS,WAAW;QACpB,MAAM,eAAe,kBAAkB,WAAW;QAClD,QAAQ,KAAK,UAAS;;AAE1B,kBAAY,KAAK,UAAU;;EAEnC;EAEU,qBAAqB,aAA0B,aAA2B,UAA2B;AAC3G,eAAW,eAAe,YAAY,cAAc;AAChD,UAAI,QAA2B;AAI/B,UAAI,MAAM,YAAY,MAAM,WAAW,GAAG;AAGtC,YAAI,mBAAmB,aAAa;AAChC,gBAAM,QAAS,YAAyC;AACxD,cAAI,CAAC,MAAM,MAAM,WAAW,GAAG;AAC3B,kBAAM,WAAqB,EAAE,MAAM,MAAM,UAAW,GAAG,WAAW,MAAM,UAAU;AAClF,oBAAQ,EAAE,OAAO,UAAU,KAAK,SAAQ;iBACrC;AAGH,kBAAM,WAAqB,EAAE,MAAM,GAAG,WAAW,EAAC;AAClD,oBAAQ,EAAE,OAAO,UAAU,KAAK,SAAQ;;;aAG7C;AACH,gBAAQ,aAAa,YAAY,KAAK;;AAE1C,UAAI,OAAO;AACP,cAAM,aAAyB;UAC3B,UAAU,qBAAqB,OAAO;UACtC;UACA,SAAS,YAAY;UACrB,MAAM,eAAe,kBAAkB,YAAY;UACnD,QAAQ,KAAK,UAAS;;AAE1B,oBAAY,KAAK,UAAU;;;EAGvC;EAEU,qBAAqB,UAA2B,aAA2B,UAA2B;AAC5G,eAAW,aAAa,SAAS,YAAY;AACzC,YAAM,eAAe,UAAU;AAC/B,UAAI,cAAc;AACd,cAAM,OAAwC;UAC1C,MAAM,aAAa;UACnB,UAAU,aAAa;UACvB,OAAO,aAAa;UACpB,MAAM;YACF,MAAM,kBAAkB;YACxB,eAAe,aAAa,UAAU;YACtC,UAAU,aAAa;YACvB,SAAS,aAAa,UAAU;;;AAGxC,oBAAY,KAAK,KAAK,aAAa,SAAS,aAAa,SAAS,IAAI,CAAC;;;EAGnF;EAEU,MAAM,YAAY,UAAmB,SAA4B,cAAc,uCAAkB,MAAI;AAC3G,UAAM,kBAAgC,CAAA;AACtC,UAAM,WAA+B,wBAAoB,UAAiD,SAAiB,SAA2B;AAClJ,sBAAgB,KAAK,KAAK,aAAa,UAAU,SAAS,IAAI,CAAC;IACnE,GAFqC;AAIrC,UAAM,QAAQ,IAAI,UAAU,QAAQ,EAAE,IAAI,OAAM,SAAO;AACnD,YAAM,kBAAkB,WAAW;AACnC,YAAM,SAAS,KAAK,mBAAmB,UAAU,KAAK,OAAO,QAAQ,UAAU;AAC/E,iBAAW,SAAS,QAAQ;AACxB,cAAM,MAAM,MAAM,UAAU,WAAW;;IAE/C,CAAC,CAAC;AACF,WAAO;EACX;EAEU,aAAgC,UAAiD,SAAiB,MAA+B;AACvI,WAAO;MACH;MACA,OAAO,mBAAmB,IAAI;MAC9B,UAAU,qBAAqB,QAAQ;MACvC,MAAM,KAAK;MACX,iBAAiB,KAAK;MACtB,MAAM,KAAK;MACX,oBAAoB,KAAK;MACzB,MAAM,KAAK;MACX,QAAQ,KAAK,UAAS;;EAE9B;EAEU,YAAS;AACf,WAAO,KAAK,SAAS;EACzB;;AAGE,SAAU,mBAAsC,MAA+B;AACjF,MAAI,KAAK,OAAO;AACZ,WAAO,KAAK;;AAEhB,MAAI;AACJ,MAAI,OAAO,KAAK,aAAa,UAAU;AACnC,cAAU,oBAAoB,KAAK,KAAK,UAAU,KAAK,UAAU,KAAK,KAAK;aACpE,OAAO,KAAK,YAAY,UAAU;AACzC,cAAU,mBAAmB,KAAK,KAAK,UAAU,KAAK,SAAS,KAAK,KAAK;;AAE7E,cAAO,QAAP,YAAO,SAAP,UAAA,UAAY,KAAK,KAAK;AACtB,MAAI,CAAC,SAAS;AACV,WAAO;MACH,OAAO,EAAE,MAAM,GAAG,WAAW,EAAC;MAC9B,KAAK,EAAE,MAAM,GAAG,WAAW,EAAC;;;AAGpC,SAAO,QAAQ;AACnB;AAlBgB;AAoBV,SAAU,qBAAqB,UAA+C;AAChF,UAAQ,UAAU;IACd,KAAK;AACD,aAAO;IACX,KAAK;AACD,aAAO;IACX,KAAK;AACD,aAAO;IACX,KAAK;AACD,aAAO;IACX;AACI,YAAM,IAAI,MAAM,kCAAkC,QAAQ;;AAEtE;AAbgB;AAeV,IAAW;CAAjB,SAAiBC,oBAAiB;AACjB,EAAAA,mBAAA,cAAc;AACd,EAAAA,mBAAA,eAAe;AACf,EAAAA,mBAAA,eAAe;AAChC,GAJiB,sBAAA,oBAAiB,CAAA,EAAA;;;ACjN5B,IAAO,oCAAP,MAAwC;EAtC9C,OAsC8C;;;EAK1C,YAAY,UAA6B;AACrC,SAAK,iBAAiB,SAAS,UAAU;AACzC,SAAK,eAAe,SAAS,WAAW;EAC5C;EAEA,kBAAkB,MAAe,MAA0B,WAA4B,YAAY,IAAI,GAAC;AACpG,aAAI,QAAJ,SAAI,SAAJ,OAAA,OAAS,KAAK,aAAa,QAAQ,IAAI;AACvC,UAAM,OAAO,KAAK,eAAe,eAAe,IAAI;AACpD,QAAI,CAAC,MAAM;AACP,YAAM,IAAI,MAAM,gBAAgB,IAAI,eAAe;;AAEvD,QAAI;AACJ,UAAM,oBAAoB,6BAAK;AAAA,UAAA;AAAA,aAAC,oBAAe,QAAf,oBAAe,SAAf,kBAAA,kBAAoB,mBAAkB,KAAA,KAAK,aAAa,YAAY,IAAI,OAAC,QAAA,OAAA,SAAA,KAAI,KAAK,QAAQ;IAAC,GAAjG;AAC1B,WAAO;MACH;MACA;MACA,IAAI,cAAW;AACX,eAAO,kBAAiB;MAC5B;MACA,kBAAkB,kBAAkB,KAAK,QAAQ;MACjD,MAAM,KAAK;MACX,aAAa,SAAS;MACtB;;EAER;;AAuCE,IAAO,sCAAP,MAA0C;EA1GhD,OA0GgD;;;EAI5C,YAAY,UAA6B;AACrC,SAAK,cAAc,SAAS,UAAU;EAC1C;EAEA,MAAM,mBAAmB,UAA2B,cAAc,uCAAkB,MAAI;AACpF,UAAM,QAAgC,CAAA;AACtC,UAAM,WAAW,SAAS,YAAY;AACtC,eAAW,WAAW,UAAU,QAAQ,GAAG;AACvC,YAAM,kBAAkB,WAAW;AACnC,uBAAiB,OAAO,EAAE,OAAO,aAAW,CAAC,eAAe,OAAO,CAAC,EAAE,QAAQ,aAAU;AAEpF,cAAM,cAAc,KAAK,kBAAkB,OAAO;AAClD,YAAI,aAAa;AACb,gBAAM,KAAK,WAAW;;MAE9B,CAAC;;AAEL,WAAO;EACX;EAEU,kBAAkB,SAAsB;AAC9C,UAAM,kBAAkB,QAAQ,UAAU;AAC1C,UAAM,aAAa,QAAQ,UAAU;AACrC,QAAI,CAAC,mBAAmB,CAAC,YAAY;AACjC,aAAO;;AAEX,UAAM,SAAS,YAAY,QAAQ,SAAS,EAAE;AAC9C,WAAO;MACH,WAAW;MACX,YAAY,KAAK,YAAY,eAAe,QAAQ,SAAS;MAC7D,WAAW,gBAAgB;MAC3B,YAAY,gBAAgB;MAC5B,SAAS,kBAAkB,UAAU;MACrC,OAAO,SAAS,OAAO,gBAAgB,aAAa,MAAM;;EAElE;;;;AC9GE,IAAO,wBAAP,MAA4B;EAnClC,OAmCkC;;;EAAlC,cAAA;AACc,SAAA,mBAAmB;AACnB,SAAA,iBAAiB;EAuC/B;EArCI,eAAe,MAAa;AACxB,QAAI,KAAK,YAAY;AACjB,YAAM,gBAAgB,KAAK,eAAe,KAAK,UAAU;AACzD,YAAM,aAAa,KAAK,eAAe,IAAI;AAC3C,YAAM,WAAW,gBAAgB,KAAK,mBAAmB;AACzD,aAAO;;AAEX,WAAO;EACX;EAEU,eAAe,EAAE,oBAAoB,gBAAe,GAAW;AACrE,QAAI,CAAC,oBAAoB;AACrB,YAAM,IAAI,MAAM,2CAA2C;;AAE/D,QAAI,oBAAoB,QAAW;AAC/B,aAAO,qBAAqB,KAAK,iBAAiB;;AAEtD,WAAO;EACX;EAEA,WAAwC,MAAe,MAAY;AAC/D,UAAM,WAAW,KAAK,MAAM,KAAK,gBAAgB;AACjD,WAAO,SAAS,OAAO,CAAC,eAAe,iBAAgB;AACnD,UAAI,CAAC,iBAAiB,aAAa,WAAW,GAAG;AAC7C,eAAO;;AAEX,YAAM,gBAAgB,aAAa,QAAQ,KAAK,cAAc;AAC9D,UAAI,gBAAgB,GAAG;AACnB,cAAM,WAAW,aAAa,UAAU,GAAG,aAAa;AACxD,cAAM,aAAa,SAAS,aAAa,UAAU,gBAAgB,CAAC,CAAC;AACrE,cAAM,QAAS,cAAuD,QAAQ;AAC9E,eAAO,UAAK,QAAL,UAAK,SAAA,SAAL,MAAQ,UAAU;;AAE7B,aAAQ,cAAqD,YAAY;IAC7E,GAAG,IAAI;EACX;;;;ACjBE,IAAO,+BAAP,MAAmC;EAzDzC,OAyDyC;;;EAOrC,YAAY,UAAmC;AAJ5B,SAAA,SAAS,IAAI,SAAQ;AAC9B,SAAA,WAAgD,CAAA;AAChD,SAAA,kBAAkB;AAGxB,SAAK,kBAAkB,SAAS;EACpC;EAEA,IAAI,QAAK;AACL,WAAO,KAAK,OAAO;EACvB;EAEA,WAAW,QAAwB;;AAC/B,SAAK,mBAAkB,MAAA,KAAA,OAAO,aAAa,eAAS,QAAA,OAAA,SAAA,SAAA,GAAE,mBAAa,QAAA,OAAA,SAAA,KAAI;EAC3E;EAEA,MAAM,YAAY,QAAsC;AACpD,QAAI,KAAK,iBAAiB;AACtB,UAAI,OAAO,UAAU;AAIjB,cAAM,YAAY,KAAK,gBAAgB;AACvC,eAAO,SAAS;;UAEZ,SAAS,UAAU,IAAI,UAAQ,KAAK,cAAc,KAAK,iBAAiB,UAAU,CAAC;SACtF;;AAGL,UAAI,OAAO,oBAAoB;AAG3B,cAAM,iBAAiB,KAAK,gBAAgB,IAAI,IAAI,WAA2B;;UAE3E,SAAS,KAAK,cAAc,KAAK,iBAAiB,UAAU;UAC/D;AAGD,cAAM,UAAU,MAAM,OAAO,mBAAmB,cAAc;AAC9D,uBAAe,QAAQ,CAAC,MAAM,QAAO;AACjC,eAAK,2BAA2B,KAAK,SAAU,QAAQ,GAAG,CAAC;QAC/D,CAAC;;;AAGT,SAAK,OAAO,QAAO;EACvB;;;;;;;EAQA,oBAAoB,QAAoC;AACpD,QAAI,CAAC,OAAO,UAAU;AAClB;;AAEJ,WAAO,KAAK,OAAO,QAAQ,EAAE,QAAQ,aAAU;AAC3C,WAAK,2BAA2B,SAAS,OAAO,SAAS,OAAO,CAAC;IACrE,CAAC;EACL;EAEU,2BAA2B,SAAiB,eAAkB;AACpE,SAAK,SAAS,OAAO,IAAI;EAC7B;;;;;;;EAQA,MAAM,iBAAiB,UAAkB,eAAqB;AAC1D,UAAM,KAAK;AAEX,UAAM,cAAc,KAAK,cAAc,QAAQ;AAC/C,QAAI,KAAK,SAAS,WAAW,GAAG;AAC5B,aAAO,KAAK,SAAS,WAAW,EAAE,aAAa;;EAEvD;EAEU,cAAc,YAAkB;AACtC,WAAO,GAAG,UAAU;EACxB;;;;AC3HE,IAAW;CAAjB,SAAiBC,aAAU;AAGvB,WAAgB,OAAO,UAAoC;AACvD,WAAO;MACH,SAAS,mCAAY,MAAM,SAAQ,GAA1B;;EAEjB;AAJgB;AAAA,EAAAA,YAAA,SAAM;AAK1B,GARiB,eAAA,aAAU,CAAA,EAAA;;;ACuFrB,IAAO,yBAAP,MAA6B;EA3GnC,OA2GmC;;;EAmB/B,YAAY,UAAmC;AAjB/C,SAAA,qBAAmC;;MAE/B,YAAY;QACR,YAAY,CAAC,YAAY,MAAM;;;AAQpB,SAAA,kBAA4C,CAAA;AAC5C,SAAA,sBAAsB,IAAI,SAAQ;AAClC,SAAA,aAAa,oBAAI,IAAG;AACpB,SAAA,uBAAuB,oBAAI,IAAG;AACvC,SAAA,eAAe,cAAc;AAGnC,SAAK,mBAAmB,SAAS,UAAU;AAC3C,SAAK,yBAAyB,SAAS,UAAU;AACjD,SAAK,eAAe,SAAS,UAAU;AACvC,SAAK,kBAAkB,SAAS;EACpC;EAEA,MAAM,MAAyB,WAAsC,UAAwB,CAAA,GAAI,cAAc,uCAAkB,MAAI;;AACjI,eAAW,YAAY,WAAW;AAC9B,YAAM,MAAM,SAAS,IAAI,SAAQ;AACjC,UAAI,SAAS,UAAU,cAAc,WAAW;AAC5C,YAAI,OAAO,QAAQ,eAAe,aAAa,QAAQ,YAAY;AAE/D,mBAAS,QAAQ,cAAc;AAC/B,mBAAS,cAAc;AACvB,eAAK,WAAW,OAAO,GAAG;mBACnB,OAAO,QAAQ,eAAe,UAAU;AAC/C,gBAAM,aAAa,KAAK,WAAW,IAAI,GAAG;AAC1C,gBAAM,sBAAqB,KAAA,eAAU,QAAV,eAAU,SAAA,SAAV,WAAY,YAAM,QAAA,OAAA,SAAA,SAAA,GAAE;AAC/C,cAAI,oBAAoB;AAGpB,kBAAM,iBAAgB,KAAA,QAAQ,WAAW,gBAAU,QAAA,OAAA,SAAA,KAAI,mBAAmB;AAC1E,kBAAM,aAAa,cAAc,OAAO,OAAK,CAAC,mBAAmB,SAAS,CAAC,CAAC;AAC5E,gBAAI,WAAW,SAAS,GAAG;AACvB,mBAAK,WAAW,IAAI,KAAK;gBACrB,WAAW;gBACX,SAAS;kBACL,YAAU,OAAA,OAAA,OAAA,OAAA,CAAA,GACH,QAAQ,UAAU,GAAA,EACrB,WAAU,CAAA;;gBAGlB,QAAQ,WAAW;eACtB;AACD,uBAAS,QAAQ,cAAc;;;;aAIxC;AAEH,aAAK,WAAW,OAAO,GAAG;;;AAGlC,SAAK,eAAe,cAAc;AAClC,UAAM,KAAK,WAAW,UAAU,IAAI,OAAK,EAAE,GAAG,GAAG,CAAA,CAAE;AACnD,UAAM,KAAK,eAAe,WAAW,SAAS,WAAW;EAC7D;EAEA,MAAM,OAAO,SAAgB,SAAgB,cAAc,uCAAkB,MAAI;AAC7E,SAAK,eAAe,cAAc;AAElC,eAAW,cAAc,SAAS;AAC9B,WAAK,iBAAiB,eAAe,UAAU;AAC/C,WAAK,WAAW,OAAO,WAAW,SAAQ,CAAE;AAC5C,WAAK,aAAa,OAAO,UAAU;;AAGvC,eAAW,cAAc,SAAS;AAC9B,YAAM,cAAc,KAAK,iBAAiB,mBAAmB,UAAU;AACvE,UAAI,CAAC,aAAa;AAId,cAAM,cAAc,KAAK,uBAAuB,UAAU,EAAE,OAAO,UAAS,GAAI,UAAU;AAC1F,oBAAY,QAAQ,cAAc;AAClC,aAAK,iBAAiB,YAAY,WAAW;;AAEjD,WAAK,WAAW,OAAO,WAAW,SAAQ,CAAE;;AAGhD,UAAM,iBAAiB,OAAO,OAAO,EAAE,OAAO,OAAO,EAAE,IAAI,SAAO,IAAI,SAAQ,CAAE,EAAE,MAAK;AACvF,SAAK,iBAAiB,IACjB,OAAO,SAAO,CAAC,eAAe,IAAI,IAAI,IAAI,SAAQ,CAAE,KAAK,KAAK,aAAa,KAAK,cAAc,CAAC,EAC/F,QAAQ,SAAM;AACX,YAAM,SAAS,KAAK,gBAAgB,YAAY,IAAI,GAAG,EAAE,WAAW;AACpE,aAAO,OAAO,GAAG;AACjB,UAAI,QAAQ,KAAK,IAAI,IAAI,OAAO,cAAc,cAAc;AAC5D,UAAI,cAAc;IACtB,CAAC;AAEL,UAAM,KAAK,WAAW,SAAS,OAAO;AAEtC,UAAM,kBAAkB,WAAW;AAGnC,UAAM,mBAAmB,KAAK,iBAAiB,IAC1C,OAAO,SAAM;;AAEV,aAAA,IAAI,QAAQ,cAAc,UAEvB,GAAC,KAAA,KAAK,WAAW,IAAI,IAAI,IAAI,SAAQ,CAAE,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE;KAAS,EAEzD,QAAO;AACZ,UAAM,KAAK,eAAe,kBAAkB,KAAK,oBAAoB,WAAW;EACpF;EAEU,MAAM,WAAW,SAAgB,SAAc;AACrD,UAAM,QAAQ,IAAI,KAAK,gBAAgB,IAAI,cAAY,SAAS,SAAS,OAAO,CAAC,CAAC;EACtF;;;;EAKU,aAAa,UAA2B,aAAwB;AAEtE,QAAI,SAAS,WAAW,KAAK,SAAO,IAAI,UAAU,MAAS,GAAG;AAC1D,aAAO;;AAGX,WAAO,KAAK,aAAa,WAAW,UAAU,WAAW;EAC7D;EAEA,SAAS,UAAgC;AACrC,SAAK,gBAAgB,KAAK,QAAQ;AAClC,WAAO,WAAW,OAAO,MAAK;AAC1B,YAAM,QAAQ,KAAK,gBAAgB,QAAQ,QAAQ;AACnD,UAAI,SAAS,GAAG;AACZ,aAAK,gBAAgB,OAAO,OAAO,CAAC;;IAE5C,CAAC;EACL;;;;;EAMU,MAAM,eAAe,WAA8B,SAAuB,aAA8B;AAC9G,SAAK,aAAa,WAAW,OAAO;AAEpC,UAAM,KAAK,cAAc,WAAW,cAAc,QAAQ,aAAa,SACnE,KAAK,uBAAuB,OAAO,KAAK,WAAW,CAAC;AAGxD,UAAM,KAAK,cAAc,WAAW,cAAc,gBAAgB,aAAa,SAC3E,KAAK,aAAa,cAAc,KAAK,WAAW,CAAC;AAGrD,UAAM,KAAK,cAAc,WAAW,cAAc,gBAAgB,aAAa,OAAM,QAAM;AACvF,YAAM,mBAAmB,KAAK,gBAAgB,YAAY,IAAI,GAAG,EAAE,WAAW;AAC9E,UAAI,oBAAoB,MAAM,iBAAiB,mBAAmB,KAAK,WAAW;IACtF,CAAC;AAED,UAAM,KAAK,cAAc,WAAW,cAAc,QAAQ,aAAa,SAAM;AACzE,YAAM,SAAS,KAAK,gBAAgB,YAAY,IAAI,GAAG,EAAE,WAAW;AACpE,aAAO,OAAO,KAAK,KAAK,WAAW;IACvC,CAAC;AAED,UAAM,KAAK,cAAc,WAAW,cAAc,mBAAmB,aAAa,SAC9E,KAAK,aAAa,iBAAiB,KAAK,WAAW,CAAC;AAGxD,UAAM,gBAAgB,UAAU,OAAO,SAAO,KAAK,eAAe,GAAG,CAAC;AACtE,UAAM,KAAK,cAAc,eAAe,cAAc,WAAW,aAAa,SAC1E,KAAK,SAAS,KAAK,WAAW,CAAC;AAInC,eAAW,OAAO,WAAW;AACzB,YAAM,QAAQ,KAAK,WAAW,IAAI,IAAI,IAAI,SAAQ,CAAE;AACpD,UAAI,OAAO;AACP,cAAM,YAAY;;;EAG9B;EAEU,aAAa,WAA8B,SAAqB;AACtE,eAAW,OAAO,WAAW;AACzB,YAAM,MAAM,IAAI,IAAI,SAAQ;AAC5B,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AAIrC,UAAI,CAAC,SAAS,MAAM,WAAW;AAC3B,aAAK,WAAW,IAAI,KAAK;UACrB,WAAW;UACX;UACA,QAAQ,UAAK,QAAL,UAAK,SAAA,SAAL,MAAO;SAClB;;;EAGb;EAEU,MAAM,cAAc,WAA8B,aAA4B,aACpF,UAA8D;AAC9D,UAAM,WAAW,UAAU,OAAO,OAAK,EAAE,QAAQ,WAAW;AAC5D,eAAW,YAAY,UAAU;AAC7B,YAAM,kBAAkB,WAAW;AACnC,YAAM,SAAS,QAAQ;AACvB,eAAS,QAAQ;;AAErB,UAAM,KAAK,iBAAiB,UAAU,aAAa,WAAW;AAC9D,SAAK,eAAe;EACxB;EAEA,aAAa,aAA4B,UAA+B;AACpE,SAAK,oBAAoB,IAAI,aAAa,QAAQ;AAClD,WAAO,WAAW,OAAO,MAAK;AAC1B,WAAK,oBAAoB,OAAO,aAAa,QAAQ;IACzD,CAAC;EACL;EAIA,UAAU,OAAsB,YAAsC,aAA+B;AACjG,QAAI,MAAuB;AAC3B,QAAI,cAAc,UAAU,YAAY;AACpC,YAAM;WACH;AACH,oBAAc;;AAElB,oBAAW,QAAX,gBAAW,SAAX,cAAA,cAAgB,uCAAkB;AAClC,QAAI,KAAK;AACL,YAAM,WAAW,KAAK,iBAAiB,YAAY,GAAG;AACtD,UAAI,YAAY,SAAS,QAAQ,OAAO;AACpC,eAAO,QAAQ,QAAQ,GAAG;;;AAGlC,QAAI,KAAK,gBAAgB,OAAO;AAC5B,aAAO,QAAQ,QAAQ,MAAS;eACzB,YAAY,yBAAyB;AAC5C,aAAO,QAAQ,OAAO,kBAAkB;;AAE5C,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAU;AACnC,YAAM,kBAAkB,KAAK,aAAa,OAAO,MAAK;AAClD,wBAAgB,QAAO;AACvB,yBAAiB,QAAO;AACxB,YAAI,KAAK;AACL,gBAAM,WAAW,KAAK,iBAAiB,YAAY,GAAG;AACtD,kBAAQ,aAAQ,QAAR,aAAQ,SAAA,SAAR,SAAU,GAAG;eAClB;AACH,kBAAQ,MAAS;;MAEzB,CAAC;AACD,YAAM,mBAAmB,YAAa,wBAAwB,MAAK;AAC/D,wBAAgB,QAAO;AACvB,yBAAiB,QAAO;AACxB,eAAO,kBAAkB;MAC7B,CAAC;IACL,CAAC;EACL;EAEU,MAAM,iBAAiB,WAA8B,OAAsB,aAA8B;AAC/G,QAAI,UAAU,WAAW,GAAG;AAExB;;AAEJ,UAAM,YAAY,KAAK,oBAAoB,IAAI,KAAK;AACpD,eAAW,YAAY,WAAW;AAC9B,YAAM,kBAAkB,WAAW;AACnC,YAAM,SAAS,WAAW,WAAW;;EAE7C;;;;;;EAOU,eAAe,UAAyB;AAC9C,WAAO,QAAQ,KAAK,gBAAgB,QAAQ,EAAE,UAAU;EAC5D;;;;;EAMU,MAAM,SAAS,UAA2B,aAA8B;;AAC9E,UAAM,YAAY,KAAK,gBAAgB,YAAY,SAAS,GAAG,EAAE,WAAW;AAC5E,UAAM,oBAAoB,KAAK,gBAAgB,QAAQ,EAAE;AACzD,UAAM,UAAU,OAAO,sBAAsB,WAAW,oBAAoB;AAC5E,UAAM,cAAc,MAAM,UAAU,iBAAiB,UAAU,SAAS,WAAW;AACnF,QAAI,SAAS,aAAa;AACtB,eAAS,YAAY,KAAK,GAAG,WAAW;WACrC;AACH,eAAS,cAAc;;AAI3B,UAAM,QAAQ,KAAK,WAAW,IAAI,SAAS,IAAI,SAAQ,CAAE;AACzD,QAAI,OAAO;AACP,OAAA,KAAA,MAAM,YAAM,QAAA,OAAA,SAAA,KAAZ,MAAM,SAAW,CAAA;AACjB,YAAM,iBAAgB,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,gBAAU,QAAA,OAAA,SAAA,KAAI,mBAAmB;AAChE,UAAI,MAAM,OAAO,kBAAkB;AAC/B,cAAM,OAAO,iBAAiB,KAAK,GAAG,aAAa;aAChD;AACH,cAAM,OAAO,mBAAmB,CAAC,GAAG,aAAa;;;EAG7D;EAEU,gBAAgB,UAAyB;;AAC/C,YAAO,MAAA,KAAA,KAAK,WAAW,IAAI,SAAS,IAAI,SAAQ,CAAE,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE,aAAO,QAAA,OAAA,SAAA,KAAI,CAAA;EACpE;;;;AChVE,IAAO,sBAAP,MAA0B;EAnFhC,OAmFgC;;;EAuB5B,YAAY,UAAmC;AAb5B,SAAA,cAAc,oBAAI,IAAG;AAKrB,SAAA,oBAAoB,IAAI,aAAY;AAMpC,SAAA,iBAAiB,oBAAI,IAAG;AAGvC,SAAK,YAAY,SAAS,UAAU;AACpC,SAAK,kBAAkB,SAAS;AAChC,SAAK,gBAAgB,SAAS;EAClC;EAEA,kBAAkB,YAAqB,aAAmB;AACtD,UAAM,eAAe,YAAY,UAAU,EAAE;AAC7C,UAAM,SAAiC,CAAA;AACvC,SAAK,eAAe,QAAQ,aAAU;AAClC,cAAQ,QAAQ,cAAW;AACvB,YAAI,SAAS,OAAO,SAAS,WAAW,YAAY,KAAK,SAAS,eAAe,aAAa;AAC1F,iBAAO,KAAK,QAAQ;;MAE5B,CAAC;IACL,CAAC;AACD,WAAO,OAAO,MAAM;EACxB;EAEA,YAAY,UAAmB,MAAkB;AAC7C,QAAI,eAAe,OAAO,KAAK,YAAY,KAAI,CAAE;AACjD,QAAI,MAAM;AACN,qBAAe,aAAa,OAAO,SAAO,CAAC,QAAQ,KAAK,IAAI,GAAG,CAAC;;AAEpE,WAAO,aACF,IAAI,SAAO,KAAK,oBAAoB,KAAK,QAAQ,CAAC,EAClD,KAAI;EACb;EAEU,oBAAoB,KAAa,UAAiB;;AACxD,QAAI,CAAC,UAAU;AACX,cAAO,KAAA,KAAK,YAAY,IAAI,GAAG,OAAC,QAAA,OAAA,SAAA,KAAI,CAAA;;AAExC,UAAM,eAAe,KAAK,kBAAkB,IAAI,KAAK,UAAU,MAAK;;AAChE,YAAM,uBAAsBC,MAAA,KAAK,YAAY,IAAI,GAAG,OAAC,QAAAA,QAAA,SAAAA,MAAI,CAAA;AACzD,aAAO,oBAAoB,OAAO,OAAK,KAAK,cAAc,UAAU,EAAE,MAAM,QAAQ,CAAC;IACzF,CAAC;AACD,WAAO;EACX;EAEA,OAAO,KAAQ;AACX,UAAM,YAAY,IAAI,SAAQ;AAC9B,SAAK,YAAY,OAAO,SAAS;AACjC,SAAK,kBAAkB,MAAM,SAAS;AACtC,SAAK,eAAe,OAAO,SAAS;EACxC;EAEA,MAAM,cAAc,UAA2B,cAAc,uCAAkB,MAAI;AAC/E,UAAM,WAAW,KAAK,gBAAgB,YAAY,SAAS,GAAG;AAC9D,UAAM,UAAU,MAAM,SAAS,WAAW,iBAAiB,eAAe,UAAU,WAAW;AAC/F,UAAM,MAAM,SAAS,IAAI,SAAQ;AACjC,SAAK,YAAY,IAAI,KAAK,OAAO;AACjC,SAAK,kBAAkB,MAAM,GAAG;EACpC;EAEA,MAAM,iBAAiB,UAA2B,cAAc,uCAAkB,MAAI;AAClF,UAAM,WAAW,KAAK,gBAAgB,YAAY,SAAS,GAAG;AAC9D,UAAM,YAAY,MAAM,SAAS,UAAU,6BAA6B,mBAAmB,UAAU,WAAW;AAChH,SAAK,eAAe,IAAI,SAAS,IAAI,SAAQ,GAAI,SAAS;EAC9D;EAEA,WAAW,UAA2B,aAAwB;AAC1D,UAAM,aAAa,KAAK,eAAe,IAAI,SAAS,IAAI,SAAQ,CAAE;AAClE,QAAI,CAAC,YAAY;AACb,aAAO;;AAEX,WAAO,WAAW,KAAK,SAAO,CAAC,IAAI,SAAS,YAAY,IAAI,IAAI,UAAU,SAAQ,CAAE,CAAC;EACzF;;;;ACjHE,IAAO,0BAAP,MAA8B;EA5DpC,OA4DoC;;;EAYhC,YAAY,UAAmC;AAV/C,SAAA,sBAAoC,CAAA;AAOjB,SAAA,SAAS,IAAI,SAAQ;AAIpC,SAAK,kBAAkB,SAAS;AAChC,SAAK,mBAAmB,SAAS,UAAU;AAC3C,SAAK,kBAAkB,SAAS,UAAU;AAC1C,SAAK,qBAAqB,SAAS,UAAU;AAC7C,SAAK,QAAQ,SAAS,UAAU;EACpC;EAEA,IAAI,QAAK;AACL,WAAO,KAAK,OAAO;EACvB;EAEA,WAAW,QAAwB;;AAC/B,SAAK,WAAU,KAAA,OAAO,sBAAgB,QAAA,OAAA,SAAA,KAAI;EAC9C;EAEA,YAAY,SAA0B;AAGlC,WAAO,KAAK,MAAM,MAAM,WAAQ;AAAA,UAAA;AAAC,aAAA,KAAK,qBAAoB,KAAA,KAAK,aAAO,QAAA,OAAA,SAAA,KAAI,CAAA,GAAI,KAAK;IAAC,CAAA;EACxF;EAEA,MAAM,oBAAoB,SAA4B,cAAc,uCAAkB,MAAI;AACtF,UAAM,YAAY,MAAM,KAAK,eAAe,OAAO;AAGnD,UAAM,kBAAkB,WAAW;AACnC,UAAM,KAAK,gBAAgB,MAAM,WAAW,KAAK,qBAAqB,WAAW;EACrF;;;;;EAMU,MAAM,eAAe,SAA0B;AACrD,UAAM,iBAAiB,KAAK,gBAAgB,IAAI,QAAQ,OAAK,EAAE,iBAAiB,cAAc;AAC9F,UAAM,YAA+B,CAAA;AACrC,UAAM,YAAY,wBAAC,aAA6B;AAC5C,gBAAU,KAAK,QAAQ;AACvB,UAAI,CAAC,KAAK,iBAAiB,YAAY,SAAS,GAAG,GAAG;AAClD,aAAK,iBAAiB,YAAY,QAAQ;;IAElD,GALkB;AASlB,UAAM,KAAK,wBAAwB,SAAS,SAAS;AACrD,UAAM,QAAQ,IACV,QAAQ,IAAI,QAAM,CAAC,IAAI,KAAK,cAAc,EAAE,CAAC,CAA2B,EACnE,IAAI,OAAM,UAAS,KAAK,eAAe,GAAG,OAAO,gBAAgB,SAAS,CAAC,CAAC;AAErF,SAAK,OAAO,QAAO;AACnB,WAAO;EACX;;;;;;EAOU,wBAAwB,UAA6B,YAA+C;AAC1G,WAAO,QAAQ,QAAO;EAC1B;;;;;;EAOU,cAAc,iBAAgC;AACpD,WAAOC,KAAI,MAAM,gBAAgB,GAAG;EACxC;;;;;EAMU,MAAM,eAAe,iBAAkC,YAAiB,gBAA0B,WAA8C;AACtJ,UAAM,UAAU,MAAM,KAAK,mBAAmB,cAAc,UAAU;AACtE,UAAM,QAAQ,IAAI,QAAQ,IAAI,OAAM,UAAQ;AACxC,UAAI,KAAK,aAAa,iBAAiB,OAAO,cAAc,GAAG;AAC3D,YAAI,MAAM,aAAa;AACnB,gBAAM,KAAK,eAAe,iBAAiB,MAAM,KAAK,gBAAgB,SAAS;mBACxE,MAAM,QAAQ;AACrB,gBAAM,WAAW,MAAM,KAAK,iBAAiB,oBAAoB,MAAM,GAAG;AAC1E,oBAAU,QAAQ;;;IAG9B,CAAC,CAAC;EACN;;;;EAKU,aAAa,kBAAmC,OAAuB,gBAAwB;AACrG,UAAM,OAAO,SAAS,SAAS,MAAM,GAAG;AACxC,QAAI,KAAK,WAAW,GAAG,GAAG;AACtB,aAAO;;AAEX,QAAI,MAAM,aAAa;AACnB,aAAO,SAAS,kBAAkB,SAAS;eACpC,MAAM,QAAQ;AACrB,YAAM,UAAU,SAAS,QAAQ,MAAM,GAAG;AAC1C,aAAO,eAAe,SAAS,OAAO;;AAE1C,WAAO;EACX;;;;ACpJE,IAAO,eAAP,MAAmB;EA9BzB,OA8ByB;;;EAKrB,YAAY,UAA6B;AACrC,UAAM,SAAS,SAAS,OAAO,aAAa,YAAY,SAAS,SAAS;MACtE,iBAAiB,SAAS,iBAAiB;KAC9C;AACD,SAAK,aAAa,KAAK,sBAAsB,MAAM;AACnD,UAAM,cAAc,sBAAsB,MAAM,IAAI,OAAO,OAAO,MAAM,IAAI;AAC5E,SAAK,kBAAkB,IAAI,MAAgB,aAAa;MACpD,kBAAkB;KACrB;EACL;EAEA,IAAI,aAAU;AACV,WAAO,KAAK;EAChB;EAEA,SAAS,MAAY;;AACjB,UAAM,mBAAmB,KAAK,gBAAgB,SAAS,IAAI;AAC3D,WAAO;MACH,QAAQ,iBAAiB;MACzB,QAAQ,iBAAiB;MACzB,SAAQ,KAAA,iBAAiB,OAAO,YAAM,QAAA,OAAA,SAAA,KAAI,CAAA;;EAElD;EAEU,sBAAsB,aAA4B;AACxD,QAAI,sBAAsB,WAAW;AAAG,aAAO;AAC/C,UAAM,SAAS,4BAA4B,WAAW,IAAI,OAAO,OAAO,YAAY,KAAK,EAAE,KAAI,IAAK;AACpG,UAAM,MAA2B,CAAA;AACjC,WAAO,QAAQ,WAAS,IAAI,MAAM,IAAI,IAAI,KAAK;AAC/C,WAAO;EACX;;AAME,SAAU,iBAAiB,iBAAgC;AAC7D,SAAO,MAAM,QAAQ,eAAe,MAAM,gBAAgB,WAAW,KAAK,UAAU,gBAAgB,CAAC;AACzG;AAFgB;AAOV,SAAU,4BAA4B,iBAAgC;AACxE,SAAO,mBAAmB,WAAW,mBAAmB,iBAAiB;AAC7E;AAFgB;AAOV,SAAU,sBAAsB,iBAAgC;AAClE,SAAO,CAAC,iBAAiB,eAAe,KAAK,CAAC,4BAA4B,eAAe;AAC7F;AAFgB;;;ACmBV,SAAU,WAAW,MAAwB,OAAsC,SAA2B;AAChH,MAAI;AACJ,MAAI;AACJ,MAAI,OAAO,SAAS,UAAU;AAC1B,eAAW;AACX,WAAO;SACJ;AACH,eAAW,KAAK,MAAM;AACtB,WAAO;;AAEX,MAAI,CAAC,UAAU;AACX,eAAW,SAAS,OAAO,GAAG,CAAC;;AAGnC,QAAM,QAAQ,SAAS,IAAI;AAC3B,QAAM,oBAAoB,iBAAiB,IAAI;AAE/C,QAAM,SAAS,SAAS;IACpB;IACA;IACA,SAAS;GACZ;AAED,SAAO,kBAAkB;IACrB,OAAO;IACP;IACA;GACH;AACL;AA5BgB;AA8BV,SAAU,QAAQ,MAAwB,SAA2B;AACvE,QAAM,oBAAoB,iBAAiB,OAAO;AAClD,QAAM,QAAQ,SAAS,IAAI;AAC3B,MAAI,MAAM,WAAW,GAAG;AACpB,WAAO;;AAGX,QAAMC,SAAQ,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,MAAM,SAAS,CAAC;AACnC,QAAM,aAAa,kBAAkB;AACrC,QAAM,YAAY,kBAAkB;AAEpC,SAAO,QAAQ,eAAU,QAAV,eAAU,SAAA,SAAV,WAAY,KAAKA,MAAK,CAAC,KAAK,QAAQ,cAAS,QAAT,cAAS,SAAA,SAAT,UAAW,KAAK,IAAI,CAAC;AAC5E;AAbgB;AAehB,SAAS,SAAS,MAAsB;AACpC,MAAI,UAAU;AACd,MAAI,OAAO,SAAS,UAAU;AAC1B,cAAU;SACP;AACH,cAAU,KAAK;;AAEnB,QAAM,QAAQ,QAAQ,MAAM,cAAc;AAC1C,SAAO;AACX;AATS;AAmBT,IAAM,WAAW;AACjB,IAAM,iBAAiB;AAEvB,SAAS,SAAS,SAA4B;;AAC1C,QAAM,SAAuB,CAAA;AAC7B,MAAI,cAAc,QAAQ,SAAS;AACnC,MAAI,mBAAmB,QAAQ,SAAS;AACxC,WAAS,IAAI,GAAG,IAAI,QAAQ,MAAM,QAAQ,KAAK;AAC3C,UAAMA,SAAQ,MAAM;AACpB,UAAM,OAAO,MAAM,QAAQ,MAAM,SAAS;AAC1C,QAAI,OAAO,QAAQ,MAAM,CAAC;AAC1B,QAAI,QAAQ;AAEZ,QAAIA,UAAS,QAAQ,QAAQ,OAAO;AAChC,YAAM,SAAQ,KAAA,QAAQ,QAAQ,WAAK,QAAA,OAAA,SAAA,SAAA,GAAE,KAAK,IAAI;AAC9C,UAAI,OAAO;AACP,gBAAQ,MAAM,QAAQ,MAAM,CAAC,EAAE;;WAEhC;AACH,YAAM,SAAQ,KAAA,QAAQ,QAAQ,UAAI,QAAA,OAAA,SAAA,SAAA,GAAE,KAAK,IAAI;AAC7C,UAAI,OAAO;AACP,gBAAQ,MAAM,QAAQ,MAAM,CAAC,EAAE;;;AAGvC,QAAI,MAAM;AACN,YAAM,SAAQ,KAAA,QAAQ,QAAQ,SAAG,QAAA,OAAA,SAAA,SAAA,GAAE,KAAK,IAAI;AAC5C,UAAI,OAAO;AACP,eAAO,KAAK,UAAU,GAAG,MAAM,KAAK;;;AAI5C,WAAO,KAAK,UAAU,GAAG,cAAc,IAAI,CAAC;AAC5C,UAAM,gBAAgB,eAAe,MAAM,KAAK;AAEhD,QAAI,iBAAiB,KAAK,QAAQ;AAE9B,UAAI,OAAO,SAAS,GAAG;AACnB,cAAM,WAAW,SAAS,OAAO,aAAa,gBAAgB;AAC9D,eAAO,KAAK;UACR,MAAM;UACN,SAAS;UACT,OAAO,MAAM,OAAO,UAAU,QAAQ;SACzC;;WAEF;AACH,eAAS,YAAY;AACrB,YAAM,WAAW,SAAS,KAAK,IAAI;AACnC,UAAI,UAAU;AACV,cAAM,YAAY,SAAS,CAAC;AAC5B,cAAM,QAAQ,SAAS,CAAC;AACxB,cAAM,QAAQ,SAAS,OAAO,aAAa,mBAAmB,KAAK;AACnE,cAAM,MAAM,SAAS,OAAO,aAAa,mBAAmB,QAAQ,UAAU,MAAM;AACpF,eAAO,KAAK;UACR,MAAM;UACN,SAAS;UACT,OAAO,MAAM,OAAO,OAAO,GAAG;SACjC;AACD,iBAAS,UAAU;AACnB,gBAAQ,eAAe,MAAM,KAAK;;AAGtC,UAAI,QAAQ,KAAK,QAAQ;AACrB,cAAM,OAAO,KAAK,UAAU,KAAK;AACjC,cAAM,mBAAmB,MAAM,KAAK,KAAK,SAAS,cAAc,CAAC;AACjE,eAAO,KAAK,GAAG,kBAAkB,kBAAkB,MAAM,aAAa,mBAAmB,KAAK,CAAC;;;AAIvG;AACA,uBAAmB;;AAIvB,MAAI,OAAO,SAAS,KAAK,OAAO,OAAO,SAAS,CAAC,EAAE,SAAS,SAAS;AACjE,WAAO,OAAO,MAAM,GAAG,EAAE;;AAG7B,SAAO;AACX;AA3ES;AA6ET,SAAS,kBAAkB,MAA0B,MAAc,WAAmB,gBAAsB;AACxG,QAAM,SAAuB,CAAA;AAE7B,MAAI,KAAK,WAAW,GAAG;AACnB,UAAM,QAAQ,SAAS,OAAO,WAAW,cAAc;AACvD,UAAM,MAAM,SAAS,OAAO,WAAW,iBAAiB,KAAK,MAAM;AACnE,WAAO,KAAK;MACR,MAAM;MACN,SAAS;MACT,OAAO,MAAM,OAAO,OAAO,GAAG;KACjC;SACE;AACH,QAAI,YAAY;AAChB,eAAW,SAAS,MAAM;AACtB,YAAM,aAAa,MAAM;AACzB,YAAM,eAAe,KAAK,UAAU,WAAW,UAAU;AACzD,UAAI,aAAa,SAAS,GAAG;AACzB,eAAO,KAAK;UACR,MAAM;UACN,SAAS,KAAK,UAAU,WAAW,UAAU;UAC7C,OAAO,MAAM,OACT,SAAS,OAAO,WAAW,YAAY,cAAc,GACrD,SAAS,OAAO,WAAW,aAAa,cAAc,CAAC;SAE9D;;AAEL,UAAI,SAAS,aAAa,SAAS;AACnC,YAAM,UAAU,MAAM,CAAC;AACvB,aAAO,KAAK;QACR,MAAM;QACN,SAAS;QACT,OAAO,MAAM,OACT,SAAS,OAAO,WAAW,YAAY,SAAS,cAAc,GAC9D,SAAS,OAAO,WAAW,YAAY,SAAS,QAAQ,SAAS,cAAc,CAAC;OAEvF;AACD,gBAAU,QAAQ;AAClB,UAAI,MAAM,WAAW,GAAG;AACpB,kBAAU,MAAM,CAAC,EAAE;AACnB,cAAM,QAAQ,MAAM,CAAC;AACrB,eAAO,KAAK;UACR,MAAM;UACN,SAAS;UACT,OAAO,MAAM,OACT,SAAS,OAAO,WAAW,YAAY,SAAS,cAAc,GAC9D,SAAS,OAAO,WAAW,YAAY,SAAS,MAAM,SAAS,cAAc,CAAC;SAErF;aACE;AACH,eAAO,KAAK;UACR,MAAM;UACN,SAAS;UACT,OAAO,MAAM,OACT,SAAS,OAAO,WAAW,YAAY,SAAS,cAAc,GAC9D,SAAS,OAAO,WAAW,YAAY,SAAS,cAAc,CAAC;SAEtE;;AAEL,kBAAY,aAAa,MAAM,CAAC,EAAE;;AAEtC,UAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,QAAI,WAAW,SAAS,GAAG;AACvB,aAAO,KAAK;QACR,MAAM;QACN,SAAS;QACT,OAAO,MAAM,OACT,SAAS,OAAO,WAAW,YAAY,cAAc,GACrD,SAAS,OAAO,WAAW,YAAY,iBAAiB,WAAW,MAAM,CAAC;OAEjF;;;AAIT,SAAO;AACX;AA1ES;AA4ET,IAAM,qBAAqB;AAC3B,IAAM,qBAAqB;AAE3B,SAAS,eAAe,MAAc,OAAa;AAC/C,QAAM,QAAQ,KAAK,UAAU,KAAK,EAAE,MAAM,kBAAkB;AAC5D,MAAI,OAAO;AACP,WAAO,QAAQ,MAAM;SAClB;AACH,WAAO,KAAK;;AAEpB;AAPS;AAST,SAAS,cAAc,MAAY;AAC/B,QAAM,QAAQ,KAAK,MAAM,kBAAkB;AAC3C,MAAI,SAAS,OAAO,MAAM,UAAU,UAAU;AAC1C,WAAO,MAAM;;AAEjB,SAAO;AACX;AANS;AAUT,SAAS,kBAAkB,SAAqB;;AAC5C,QAAM,gBAA0B,SAAS,OAAO,QAAQ,SAAS,MAAM,QAAQ,SAAS,SAAS;AACjG,MAAI,QAAQ,OAAO,WAAW,GAAG;AAC7B,WAAO,IAAI,iBAAiB,CAAA,GAAI,MAAM,OAAO,eAAe,aAAa,CAAC;;AAE9E,QAAM,WAA2B,CAAA;AACjC,SAAO,QAAQ,QAAQ,QAAQ,OAAO,QAAQ;AAC1C,UAAM,UAAU,kBAAkB,SAAS,SAAS,SAAS,SAAS,CAAC,CAAC;AACxE,QAAI,SAAS;AACT,eAAS,KAAK,OAAO;;;AAG7B,QAAM,SAAQ,MAAA,KAAA,SAAS,CAAC,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE,MAAM,WAAK,QAAA,OAAA,SAAA,KAAI;AAC1C,QAAM,OAAM,MAAA,KAAA,SAAS,SAAS,SAAS,CAAC,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE,MAAM,SAAG,QAAA,OAAA,SAAA,KAAI;AACxD,SAAO,IAAI,iBAAiB,UAAU,MAAM,OAAO,OAAO,GAAG,CAAC;AAClE;AAfS;AAiBT,SAAS,kBAAkB,SAAuB,MAAmB;AACjE,QAAM,OAAO,QAAQ,OAAO,QAAQ,KAAK;AACzC,MAAI,KAAK,SAAS,OAAO;AACrB,WAAO,cAAc,SAAS,KAAK;aAC5B,KAAK,SAAS,UAAU,KAAK,SAAS,cAAc;AAC3D,WAAO,eAAe,OAAO;SAC1B;AACH,oBAAgB,MAAM,IAAI;AAC1B,YAAQ;AACR,WAAO;;AAEf;AAXS;AAaT,SAAS,gBAAgB,OAAmB,SAAsB;AAC9D,MAAI,SAAS;AACT,UAAM,OAAO,IAAI,cAAc,IAAI,MAAM,KAAK;AAC9C,QAAI,aAAa,SAAS;AACtB,cAAQ,QAAQ,KAAK,IAAI;WACtB;AACH,cAAQ,QAAQ,QAAQ,KAAK,IAAI;;;AAG7C;AATS;AAWT,SAAS,eAAe,SAAqB;AACzC,MAAI,QAAQ,QAAQ,OAAO,QAAQ,KAAK;AACxC,QAAM,aAAa;AACnB,MAAI,YAAY;AAChB,QAAM,QAAuB,CAAA;AAC7B,SAAO,SAAS,MAAM,SAAS,WAAW,MAAM,SAAS,OAAO;AAC5D,UAAM,KAAK,iBAAiB,OAAO,CAAC;AACpC,gBAAY;AACZ,YAAQ,QAAQ,OAAO,QAAQ,KAAK;;AAExC,SAAO,IAAI,cAAc,OAAO,MAAM,OAAO,WAAW,MAAM,OAAO,UAAU,MAAM,GAAG,CAAC;AAC7F;AAXS;AAaT,SAAS,iBAAiB,SAAqB;AAC3C,QAAM,QAAQ,QAAQ,OAAO,QAAQ,KAAK;AAC1C,MAAI,MAAM,SAAS,cAAc;AAC7B,WAAO,cAAc,SAAS,IAAI;SAC/B;AACH,WAAO,eAAe,OAAO;;AAErC;AAPS;AAST,SAAS,cAAc,SAAuB,QAAe;AACzD,QAAM,WAAW,QAAQ,OAAO,QAAQ,OAAO;AAC/C,QAAM,OAAO,SAAS,QAAQ,UAAU,CAAC;AACzC,QAAM,YAAY,QAAQ,OAAO,QAAQ,KAAK;AAC9C,OAAI,cAAS,QAAT,cAAS,SAAA,SAAT,UAAW,UAAS,QAAQ;AAC5B,QAAI,QAAQ;AACR,YAAM,UAAU,eAAe,OAAO;AACtC,aAAO,IAAI,aACP,MACA,IAAI,cAAc,CAAC,OAAO,GAAG,QAAQ,KAAK,GAC1C,QACA,MAAM,OAAO,SAAS,MAAM,OAAO,QAAQ,MAAM,GAAG,CAAC;WAEtD;AACH,YAAM,UAAU,eAAe,OAAO;AACtC,aAAO,IAAI,aACP,MACA,SACA,QACA,MAAM,OAAO,SAAS,MAAM,OAAO,QAAQ,MAAM,GAAG,CAAC;;SAG1D;AACH,UAAM,QAAQ,SAAS;AACvB,WAAO,IAAI,aAAa,MAAM,IAAI,cAAc,CAAA,GAAI,KAAK,GAAG,QAAQ,KAAK;;AAEjF;AA1BS;AA4BT,SAAS,eAAe,SAAqB;AACzC,QAAM,QAAQ,QAAQ,OAAO,QAAQ,OAAO;AAC5C,SAAO,IAAI,cAAc,MAAM,SAAS,MAAM,KAAK;AACvD;AAHS;AAuBT,SAAS,iBAAiB,SAA2B;AACjD,MAAI,CAAC,SAAS;AACV,WAAO,iBAAiB;MACpB,OAAO;MACP,KAAK;MACL,MAAM;KACT;;AAEL,QAAM,EAAE,OAAO,KAAK,KAAI,IAAK;AAC7B,SAAO;IACH,OAAO,gBAAgB,OAAO,IAAI;IAClC,KAAK,gBAAgB,KAAK,KAAK;IAC/B,MAAM,gBAAgB,MAAM,IAAI;;AAExC;AAdS;AAgBT,SAAS,gBAAgBC,SAAqC,OAAc;AACxE,MAAI,OAAOA,YAAW,YAAY,OAAOA,YAAW,UAAU;AAC1D,UAAM,UAAU,OAAOA,YAAW,WAAW,aAAaA,OAAM,IAAIA,QAAO;AAC3E,QAAI,OAAO;AACP,aAAO,IAAI,OAAO,QAAQ,OAAO,EAAE;WAChC;AACH,aAAO,IAAI,OAAO,OAAO,OAAO,OAAO;;SAExC;AACH,WAAOA;;AAEf;AAXS;AAaT,IAAM,mBAAN,MAAsB;EAzetB,OAyesB;;;EAKlB,YAAY,UAA0B,OAAY;AAC9C,SAAK,WAAW;AAChB,SAAK,QAAQ;EACjB;EAEA,OAAO,MAAY;AACf,WAAO,KAAK,WAAU,EAAG,KAAK,OAAK,EAAE,SAAS,IAAI;EACtD;EAEA,QAAQ,MAAY;AAChB,WAAO,KAAK,WAAU,EAAG,OAAO,OAAK,EAAE,SAAS,IAAI;EACxD;EAEQ,aAAU;AACd,WAAO,KAAK,SAAS,OAAO,CAAC,MAAqB,UAAU,CAAC;EACjE;EAEA,WAAQ;AACJ,QAAI,QAAQ;AACZ,eAAW,WAAW,KAAK,UAAU;AACjC,UAAI,MAAM,WAAW,GAAG;AACpB,gBAAQ,QAAQ,SAAQ;aACrB;AACH,cAAM,OAAO,QAAQ,SAAQ;AAC7B,iBAAS,aAAa,KAAK,IAAI;;;AAGvC,WAAO,MAAM,KAAI;EACrB;EAEA,WAAW,SAA4B;AACnC,QAAI,QAAQ;AACZ,eAAW,WAAW,KAAK,UAAU;AACjC,UAAI,MAAM,WAAW,GAAG;AACpB,gBAAQ,QAAQ,WAAW,OAAO;aAC/B;AACH,cAAM,OAAO,QAAQ,WAAW,OAAO;AACvC,iBAAS,aAAa,KAAK,IAAI;;;AAGvC,WAAO,MAAM,KAAI;EACrB;;AAGJ,IAAM,eAAN,MAAkB;EA1hBlB,OA0hBkB;;;EAMd,YAAY,MAAc,SAAyB,QAAiB,OAAY;AAC5E,SAAK,OAAO;AACZ,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,QAAQ;EACjB;EAEA,WAAQ;AACJ,QAAI,OAAO,IAAI,KAAK,IAAI;AACxB,UAAM,UAAU,KAAK,QAAQ,SAAQ;AACrC,QAAI,KAAK,QAAQ,QAAQ,WAAW,GAAG;AACnC,aAAO,GAAG,IAAI,IAAI,OAAO;eAClB,KAAK,QAAQ,QAAQ,SAAS,GAAG;AACxC,aAAO,GAAG,IAAI;EAAK,OAAO;;AAE9B,QAAI,KAAK,QAAQ;AAEb,aAAO,IAAI,IAAI;WACZ;AACH,aAAO;;EAEf;EAEA,WAAW,SAA4B;;AACnC,YAAO,MAAA,KAAA,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,eAAS,QAAA,OAAA,SAAA,SAAA,GAAA,KAAA,SAAG,IAAI,OAAC,QAAA,OAAA,SAAA,KAAI,KAAK,kBAAkB,OAAO;EACvE;EAEQ,kBAAkB,SAA4B;AAClD,UAAM,UAAU,KAAK,QAAQ,WAAW,OAAO;AAC/C,QAAI,KAAK,QAAQ;AACb,YAAM,WAAW,gBAAgB,KAAK,MAAM,SAAS,YAAO,QAAP,YAAO,SAAP,UAAW,CAAA,CAAE;AAClE,UAAI,OAAO,aAAa,UAAU;AAC9B,eAAO;;;AAGf,QAAI,SAAS;AACb,SAAI,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,SAAQ,aAAY,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,SAAQ,QAAW;AACzD,eAAS;gBACF,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,SAAQ,QAAQ;AAChC,eAAS;gBACF,YAAO,QAAP,YAAO,SAAA,SAAP,QAAS,SAAQ,eAAe;AACvC,eAAS;;AAEb,QAAI,OAAO,GAAG,MAAM,IAAI,KAAK,IAAI,GAAG,MAAM;AAC1C,QAAI,KAAK,QAAQ,QAAQ,WAAW,GAAG;AACnC,aAAO,GAAG,IAAI,WAAM,OAAO;eACpB,KAAK,QAAQ,QAAQ,SAAS,GAAG;AACxC,aAAO,GAAG,IAAI;EAAK,OAAO;;AAE9B,QAAI,KAAK,QAAQ;AAEb,aAAO,IAAI,IAAI;WACZ;AACH,aAAO;;EAEf;;AAGJ,SAAS,gBAAgB,KAAa,SAAiB,SAA2B;;AAC9E,MAAI,QAAQ,eAAe,QAAQ,cAAc,QAAQ,QAAQ;AAC7D,UAAM,QAAQ,QAAQ,QAAQ,GAAG;AACjC,QAAI,UAAU;AACd,QAAI,QAAQ,GAAG;AACX,YAAM,eAAe,eAAe,SAAS,KAAK;AAClD,gBAAU,QAAQ,UAAU,YAAY;AACxC,gBAAU,QAAQ,UAAU,GAAG,KAAK;;AAExC,QAAI,QAAQ,cAAe,QAAQ,UAAU,QAAQ,SAAS,QAAS;AAEnE,gBAAU,KAAK,OAAO;;AAE1B,UAAM,gBAAe,MAAA,KAAA,QAAQ,gBAAU,QAAA,OAAA,SAAA,SAAA,GAAA,KAAA,SAAG,SAAS,OAAO,OAAC,QAAA,OAAA,SAAA,KAAI,kBAAkB,SAAS,OAAO;AACjG,WAAO;;AAEX,SAAO;AACX;AAjBS;AAmBT,SAAS,kBAAkB,SAAiB,SAAe;AACvD,MAAI;AACA,IAAAC,KAAI,MAAM,SAAS,IAAI;AACvB,WAAO,IAAI,OAAO,KAAK,OAAO;WAChC,IAAM;AACJ,WAAO;;AAEf;AAPS;AAST,IAAM,gBAAN,MAAmB;EAtnBnB,OAsnBmB;;;EAIf,YAAY,OAAsB,OAAY;AAC1C,SAAK,UAAU;AACf,SAAK,QAAQ;EACjB;EAEA,WAAQ;AACJ,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC1C,YAAM,SAAS,KAAK,QAAQ,CAAC;AAC7B,YAAM,OAAO,KAAK,QAAQ,IAAI,CAAC;AAC/B,cAAQ,OAAO,SAAQ;AACvB,UAAI,QAAQ,KAAK,MAAM,MAAM,OAAO,OAAO,MAAM,MAAM,MAAM;AACzD,gBAAQ;;;AAGhB,WAAO;EACX;EAEA,WAAW,SAA4B;AACnC,QAAI,OAAO;AACX,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK;AAC1C,YAAM,SAAS,KAAK,QAAQ,CAAC;AAC7B,YAAM,OAAO,KAAK,QAAQ,IAAI,CAAC;AAC/B,cAAQ,OAAO,WAAW,OAAO;AACjC,UAAI,QAAQ,KAAK,MAAM,MAAM,OAAO,OAAO,MAAM,MAAM,MAAM;AACzD,gBAAQ;;;AAGhB,WAAO;EACX;;AAGJ,IAAM,gBAAN,MAAmB;EA1pBnB,OA0pBmB;;;EAIf,YAAY,MAAc,OAAY;AAClC,SAAK,OAAO;AACZ,SAAK,QAAQ;EACjB;EAEA,WAAQ;AACJ,WAAO,KAAK;EAChB;EACA,aAAU;AACN,WAAO,KAAK;EAChB;;AAIJ,SAAS,aAAa,MAAY;AAC9B,MAAI,KAAK,SAAS,IAAI,GAAG;AACrB,WAAO;SACJ;AACH,WAAO;;AAEf;AANS;;;AClpBH,IAAO,6BAAP,MAAiC;EA1BvC,OA0BuC;;;EAKnC,YAAY,UAA6B;AACrC,SAAK,eAAe,SAAS,OAAO,UAAU;AAC9C,SAAK,kBAAkB,SAAS,cAAc;EAClD;EAEA,iBAAiB,MAAa;AAC1B,UAAM,UAAU,KAAK,gBAAgB,WAAW,IAAI;AACpD,QAAI,WAAW,QAAQ,OAAO,GAAG;AAC7B,YAAM,cAAc,WAAW,OAAO;AACtC,aAAO,YAAY,WAAW;QAC1B,YAAY,wBAAC,MAAM,YAAW;AAC1B,iBAAO,KAAK,0BAA0B,MAAM,MAAM,OAAO;QAC7D,GAFY;QAGZ,WAAW,wBAAC,QAAO;AACf,iBAAO,KAAK,yBAAyB,MAAM,GAAG;QAClD,GAFW;OAGd;;AAEL,WAAO;EACX;EAEU,0BAA0B,MAAe,MAAc,SAAe;;AAC5E,UAAM,eAAc,KAAA,KAAK,4BAA4B,MAAM,IAAI,OAAC,QAAA,OAAA,SAAA,KAAI,KAAK,sBAAsB,MAAM,IAAI;AACzG,QAAI,eAAe,YAAY,aAAa;AACxC,YAAM,OAAO,YAAY,YAAY,MAAM,MAAM,OAAO;AACxD,YAAM,YAAY,YAAY,YAAY,MAAM,MAAM,YAAY;AAClE,YAAM,MAAM,YAAY,YAAY,KAAK,EAAE,UAAU,IAAI,IAAI,IAAI,SAAS,GAAE,CAAE;AAC9E,aAAO,IAAI,OAAO,KAAK,IAAI,SAAQ,CAAE;WAClC;AACH,aAAO;;EAEf;EAEU,yBAAyB,OAAgB,MAAc;AAE7D,WAAO;EACX;EAEU,4BAA4B,MAAe,MAAY;AAC7D,UAAM,WAAW,YAAY,IAAI;AACjC,UAAM,cAAc,SAAS;AAC7B,QAAI,CAAC,aAAa;AACd,aAAO;;AAEX,QAAI,cAAmC;AACvC,OAAG;AACC,YAAM,kBAAkB,YAAY,IAAI,WAAW;AACnD,YAAM,cAAc,gBAAgB,KAAK,OAAK,EAAE,SAAS,IAAI;AAC7D,UAAI,aAAa;AACb,eAAO;;AAEX,oBAAc,YAAY;aACrB;AAET,WAAO;EACX;EAEU,sBAAsB,MAAe,MAAY;AACvD,UAAM,cAAc,KAAK,aAAa,YAAW,EAAG,KAAK,OAAK,EAAE,SAAS,IAAI;AAC7E,WAAO;EACX;;;;ACnEE,IAAO,yBAAP,MAA6B;EAxBnC,OAwBmC;;;EAE/B,YAAY,UAA6B;AACrC,SAAK,gBAAgB,MAAM,SAAS,OAAO;EAC/C;EACA,WAAW,MAAa;;AACpB,QAAG,qBAAqB,IAAI,GAAG;AAC3B,aAAO,KAAK;;AAEhB,YAAO,KAAA,gBAAgB,KAAK,UAAU,KAAK,cAAa,EAAG,qBAAqB,OAAC,QAAA,OAAA,SAAA,SAAA,GAAE;EACvF;;;;AClCJ;AAOA,0BAAc;;;ACwBR,IAAO,qBAAP,MAAyB;EA/B/B,OA+B+B;;;EAI3B,YAAY,UAA6B;AACrC,SAAK,aAAa,SAAS,OAAO;EACtC;EAEA,MAAyB,MAAY;AACjC,WAAO,QAAQ,QAAQ,KAAK,WAAW,MAAS,IAAI,CAAC;EACzD;;AAGE,IAAgB,8BAAhB,MAA2C;EA5CjD,OA4CiD;;;EAiB7C,YAAY,UAA6B;AAX/B,SAAA,cAAc;AAKd,SAAA,mBAAmB;AACnB,SAAA,aAA6B,CAAA;AAC7B,SAAA,QAAuC,CAAA;AAK7C,SAAK,WAAW,SAAS,WAAW;EACxC;EAEU,oBAAiB;AACvB,WAAO,KAAK,WAAW,SAAS,KAAK,aAAa;AAC9C,YAAM,SAAS,KAAK,aAAY;AAChC,aAAO,QAAQ,MAAK;AAChB,YAAI,KAAK,MAAM,SAAS,GAAG;AACvB,gBAAM,WAAW,KAAK,MAAM,MAAK;AACjC,cAAI,UAAU;AACV,mBAAO,KAAI;AACX,qBAAS,QAAQ,MAAM;;;MAGnC,CAAC;AACD,WAAK,WAAW,KAAK,MAAM;;EAEnC;EAEA,MAAM,MAAyB,MAAc,aAA8B;AACvE,UAAM,SAAS,MAAM,KAAK,oBAAoB,WAAW;AACzD,UAAM,WAAW,IAAI,SAAQ;AAC7B,QAAI;AAIJ,UAAM,eAAe,YAAY,wBAAwB,MAAK;AAC1D,gBAAU,WAAW,MAAK;AACtB,aAAK,gBAAgB,MAAM;MAC/B,GAAG,KAAK,gBAAgB;IAC5B,CAAC;AACD,WAAO,MAAM,IAAI,EAAE,KAAK,YAAS;AAC7B,YAAM,WAAW,KAAK,SAAS,QAAW,MAAM;AAChD,eAAS,QAAQ,QAAQ;IAC7B,CAAC,EAAE,MAAM,SAAM;AACX,eAAS,OAAO,GAAG;IACvB,CAAC,EAAE,QAAQ,MAAK;AACZ,mBAAa,QAAO;AACpB,mBAAa,OAAO;IACxB,CAAC;AACD,WAAO,SAAS;EACpB;EAEU,gBAAgB,QAAoB;AAC1C,WAAO,UAAS;AAChB,UAAM,QAAQ,KAAK,WAAW,QAAQ,MAAM;AAC5C,QAAI,SAAS,GAAG;AACZ,WAAK,WAAW,OAAO,OAAO,CAAC;;EAEvC;EAEU,MAAM,oBAAoB,aAA8B;AAC9D,SAAK,kBAAiB;AACtB,eAAW,UAAU,KAAK,YAAY;AAClC,UAAI,OAAO,OAAO;AACd,eAAO,KAAI;AACX,eAAO;;;AAGf,UAAM,WAAW,IAAI,SAAQ;AAC7B,gBAAY,wBAAwB,MAAK;AACrC,YAAM,QAAQ,KAAK,MAAM,QAAQ,QAAQ;AACzC,UAAI,SAAS,GAAG;AACZ,aAAK,MAAM,OAAO,OAAO,CAAC;;AAE9B,eAAS,OAAO,kBAAkB;IACtC,CAAC;AACD,SAAK,MAAM,KAAK,QAAQ;AACxB,WAAO,SAAS;EACpB;;AAQE,IAAO,eAAP,MAAmB;EA3IzB,OA2IyB;;;EAUrB,IAAI,QAAK;AACL,WAAO,KAAK;EAChB;EAEA,IAAI,UAAO;AACP,WAAO,KAAK,eAAe;EAC/B;EAEA,YAAY,aAAgC,WAAkC,SAAgC,WAAqB;AAdhH,SAAA,iBAAiB,IAAI,sBAAO;AAErC,SAAA,WAAW,IAAI,SAAQ;AACvB,SAAA,SAAS;AACT,SAAA,WAAW;AAWjB,SAAK,cAAc;AACnB,SAAK,aAAa;AAClB,cAAU,YAAS;AACf,YAAM,cAAc;AACpB,WAAK,SAAS,QAAQ,WAAW;AACjC,WAAK,OAAM;IACf,CAAC;AACD,YAAQ,WAAQ;AACZ,WAAK,SAAS,OAAO,KAAK;AAC1B,WAAK,OAAM;IACf,CAAC;EACL;EAEA,YAAS;AACL,SAAK,SAAS,OAAO,kBAAkB;AACvC,SAAK,WAAU;EACnB;EAEA,OAAI;AACA,SAAK,SAAS;EAClB;EAEA,SAAM;AACF,SAAK,WAAW;AAChB,SAAK,SAAS;AACd,SAAK,eAAe,KAAI;EAC5B;EAEA,MAAM,MAAY;AACd,QAAI,KAAK,UAAU;AACf,YAAM,IAAI,MAAM,uBAAuB;;AAE3C,SAAK,WAAW;AAChB,SAAK,WAAW,IAAI,SAAQ;AAC5B,SAAK,YAAY,IAAI;AACrB,WAAO,KAAK,SAAS;EACzB;;;;ACnJE,IAAO,uBAAP,MAA2B;EA/CjC,OA+CiC;;;EAAjC,cAAA;AAEY,SAAA,sBAAsB,IAAI,6CAAuB;AACjD,SAAA,aAA0B,CAAA;AAC1B,SAAA,YAAyB,CAAA;AACzB,SAAA,OAAO;EA6DnB;EA3DI,MAAM,QAAwD;AAC1D,SAAK,YAAW;AAChB,UAAM,cAAc,IAAI,6CAAuB;AAC/C,SAAK,sBAAsB;AAC3B,WAAO,KAAK,QAAQ,KAAK,YAAY,QAAQ,YAAY,KAAK;EAClE;EAEA,KAAQ,QAA6B;AACjC,WAAO,KAAK,QAAQ,KAAK,WAAW,MAAM;EAC9C;EAEQ,QAAkB,OAAoB,QAAuB,mBAAqC;AACtG,UAAM,WAAW,IAAI,SAAQ;AAC7B,UAAM,QAAmB;MACrB;MACA;MACA,mBAAmB,sBAAiB,QAAjB,sBAAiB,SAAjB,oBAAqB,uCAAkB;;AAE9D,UAAM,KAAK,KAAK;AAChB,SAAK,qBAAoB;AACzB,WAAO,SAAS;EACpB;EAEQ,MAAM,uBAAoB;AAC9B,QAAI,CAAC,KAAK,MAAM;AACZ;;AAEJ,UAAM,UAAuB,CAAA;AAC7B,QAAI,KAAK,WAAW,SAAS,GAAG;AAE5B,cAAQ,KAAK,KAAK,WAAW,MAAK,CAAG;eAC9B,KAAK,UAAU,SAAS,GAAG;AAElC,cAAQ,KAAK,GAAG,KAAK,UAAU,OAAO,GAAG,KAAK,UAAU,MAAM,CAAC;WAC5D;AACH;;AAEJ,SAAK,OAAO;AACZ,UAAM,QAAQ,IAAI,QAAQ,IAAI,OAAO,EAAE,QAAQ,UAAU,kBAAiB,MAAM;AAC5E,UAAI;AAEA,cAAM,SAAS,MAAM,QAAQ,QAAO,EAAG,KAAK,MAAM,OAAO,iBAAiB,CAAC;AAC3E,iBAAS,QAAQ,MAAM;eAClB,KAAK;AACV,YAAI,qBAAqB,GAAG,GAAG;AAE3B,mBAAS,QAAQ,MAAS;eACvB;AACH,mBAAS,OAAO,GAAG;;;IAG/B,CAAC,CAAC;AACF,SAAK,OAAO;AACZ,SAAK,qBAAoB;EAC7B;EAEA,cAAW;AACP,SAAK,oBAAoB,OAAM;EACnC;;;;AClEE,IAAO,kBAAP,MAAsB;EA9C5B,OA8C4B;;;EASxB,YAAY,UAA6B;AAHtB,SAAA,sBAAsB,IAAI,MAAK;AAC/B,SAAA,iBAAiB,IAAI,MAAK;AAGzC,SAAK,UAAU,SAAS;AACxB,SAAK,QAAQ,SAAS,OAAO;AAC7B,SAAK,SAAS,SAAS,WAAW;EACtC;EAEA,UAAU,QAA4B;AAClC,WAAO;;;MAGH,aAAa,OAAO,YAAY,IAAI,OAAK,OAAA,OAAA,CAAA,GAAM,CAAC,CAAG;MACnD,cAAc,OAAO,aAAa,IAAI,OAAK,OAAA,OAAA,CAAA,GAAM,CAAC,CAAG;MACrD,OAAO,KAAK,iBAAiB,OAAO,OAAO,KAAK,wBAAwB,OAAO,KAAK,CAAC;;EAE7F;EAEU,wBAAwB,MAAa;AAC3C,UAAM,WAAW,oBAAI,IAAG;AACxB,UAAM,WAAW,oBAAI,IAAG;AACxB,eAAW,WAAW,UAAU,IAAI,GAAG;AACnC,eAAS,IAAI,SAAS,CAAA,CAAE;;AAE5B,QAAI,KAAK,UAAU;AACf,iBAAW,WAAW,UAAU,KAAK,QAAQ,GAAG;AAC5C,iBAAS,IAAI,SAAS,CAAA,CAAE;;;AAGhC,WAAO;MACH;MACA;;EAER;EAEU,iBAAiB,MAAe,SAAyB;AAC/D,UAAM,MAAM,QAAQ,SAAS,IAAI,IAAI;AACrC,QAAI,QAAQ,KAAK;AACjB,QAAI,kBAAkB,KAAK;AAC3B,QAAI,qBAAqB,KAAK;AAC9B,QAAI,KAAK,aAAa,QAAW;AAC7B,UAAI,WAAW,KAAK,iBAAiB,KAAK,UAAU,OAAO;;AAE/D,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC9C,UAAI,KAAK,WAAW,GAAG,GAAG;AACtB;;AAEJ,UAAI,MAAM,QAAQ,KAAK,GAAG;AACtB,cAAM,MAAa,CAAA;AACnB,YAAI,IAAI,IAAI;AACZ,mBAAW,QAAQ,OAAO;AACtB,cAAI,UAAU,IAAI,GAAG;AACjB,gBAAI,KAAK,KAAK,iBAAiB,MAAM,OAAO,CAAC;qBACtC,YAAY,IAAI,GAAG;AAC1B,gBAAI,KAAK,KAAK,mBAAmB,MAAM,OAAO,CAAC;iBAC5C;AACH,gBAAI,KAAK,IAAI;;;iBAGd,UAAU,KAAK,GAAG;AACzB,YAAI,IAAI,IAAI,KAAK,iBAAiB,OAAO,OAAO;iBACzC,YAAY,KAAK,GAAG;AAC3B,YAAI,IAAI,IAAI,KAAK,mBAAmB,OAAO,OAAO;iBAC3C,UAAU,QAAW;AAC5B,YAAI,IAAI,IAAI;;;AAGpB,WAAO;EACX;EAEU,mBAAmB,WAAsB,SAAyB;AACxE,UAAM,MAA+B,CAAA;AACrC,QAAI,WAAW,UAAU;AACzB,QAAI,UAAU,UAAU;AACpB,UAAI,WAAW,QAAQ,SAAS,IAAI,UAAU,QAAQ;;AAE1D,WAAO;EACX;EAEU,iBAAiB,MAAe,SAAyB;AAC/D,UAAM,UAAU,QAAQ,SAAS,IAAI,IAAI;AACzC,QAAI,cAAc,IAAI,GAAG;AACrB,cAAQ,WAAW,KAAK;WACrB;AAEH,cAAQ,gBAAgB,KAAK,oBAAoB,KAAK,aAAa;;AAEvE,YAAQ,SAAS,KAAK;AACtB,YAAQ,UAAU,QAAQ,SAAS,IAAI,KAAK,OAAO;AACnD,QAAI,mBAAmB,IAAI,GAAG;AAC1B,cAAQ,UAAU,KAAK,QAAQ,IAAI,WAAS,KAAK,iBAAiB,OAAO,OAAO,CAAC;eAC1E,cAAc,IAAI,GAAG;AAC5B,cAAQ,YAAY,KAAK,UAAU;AACnC,cAAQ,SAAS,KAAK;AACtB,cAAQ,SAAS,KAAK;AACtB,cAAQ,YAAY,KAAK,MAAM,MAAM;AACrC,cAAQ,cAAc,KAAK,MAAM,MAAM;AACvC,cAAQ,UAAU,KAAK,MAAM,IAAI;AACjC,cAAQ,YAAY,KAAK,MAAM,IAAI;;AAEvC,WAAO;EACX;EAEA,QAAqC,QAA2B;AAC5D,UAAM,OAAO,OAAO;AACpB,UAAM,UAAU,KAAK,uBAAuB,IAAI;AAChD,QAAI,cAAc,MAAM;AACpB,WAAK,eAAe,KAAK,UAAU,OAAO;;AAE9C,WAAO;MACH,aAAa,OAAO;MACpB,cAAc,OAAO;MACrB,OAAO,KAAK,eAAe,MAAM,OAAO;;EAEhD;EAEU,uBAAuB,MAAS;AACtC,UAAM,WAAW,oBAAI,IAAG;AACxB,UAAM,WAAW,oBAAI,IAAG;AACxB,eAAW,WAAW,UAAU,IAAI,GAAG;AACnC,eAAS,IAAI,SAAS,CAAA,CAAa;;AAEvC,QAAI;AACJ,QAAI,KAAK,UAAU;AACf,iBAAW,WAAW,UAAU,KAAK,QAAQ,GAAG;AAC5C,YAAI;AACJ,YAAI,cAAc,SAAS;AACvB,gBAAM,IAAI,gBAAgB,QAAQ,QAAkB;AACpD,iBAAO;mBACA,aAAa,SAAS;AAC7B,gBAAM,IAAI,qBAAoB;mBACvB,eAAe,SAAS;AAC/B,gBAAM,KAAK,mBAAmB,OAAO;;AAEzC,YAAI,KAAK;AACL,mBAAS,IAAI,SAAS,GAAG;AACzB,cAAI,OAAO;;;;AAIvB,WAAO;MACH;MACA;;EAER;EAEU,eAAe,MAAW,SAAuB;AACvD,UAAM,UAAU,QAAQ,SAAS,IAAI,IAAI;AACzC,YAAQ,QAAQ,KAAK;AACrB,YAAQ,kBAAkB,KAAK;AAC/B,YAAQ,qBAAqB,KAAK;AAClC,QAAI,KAAK,UAAU;AACf,cAAQ,WAAW,QAAQ,SAAS,IAAI,KAAK,QAAQ;;AAEzD,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC9C,UAAI,KAAK,WAAW,GAAG,GAAG;AACtB;;AAEJ,UAAI,MAAM,QAAQ,KAAK,GAAG;AACtB,cAAM,MAAiB,CAAA;AACvB,gBAAQ,IAAI,IAAI;AAChB,mBAAW,QAAQ,OAAO;AACtB,cAAI,UAAU,IAAI,GAAG;AACjB,gBAAI,KAAK,KAAK,UAAU,KAAK,eAAe,MAAM,OAAO,GAAG,OAAO,CAAC;qBAC7D,YAAY,IAAI,GAAG;AAC1B,gBAAI,KAAK,KAAK,iBAAiB,MAAM,SAAS,MAAM,OAAO,CAAC;iBACzD;AACH,gBAAI,KAAK,IAAI;;;iBAGd,UAAU,KAAK,GAAG;AACzB,gBAAQ,IAAI,IAAI,KAAK,UAAU,KAAK,eAAe,OAAO,OAAO,GAAG,OAAO;iBACpE,YAAY,KAAK,GAAG;AAC3B,gBAAQ,IAAI,IAAI,KAAK,iBAAiB,OAAO,SAAS,MAAM,OAAO;iBAC5D,UAAU,QAAW;AAC5B,gBAAQ,IAAI,IAAI;;;AAGxB,WAAO;EACX;EAEU,UAAU,MAAW,QAAW;AACtC,SAAK,aAAa;AAClB,WAAO;EACX;EAEU,iBAAiB,WAAgB,MAAe,MAAc,SAAuB;AAC3F,WAAO,KAAK,OAAO,eAAe,MAAM,MAAM,QAAQ,SAAS,IAAI,UAAU,QAAQ,GAAI,UAAU,QAAQ;EAC/G;EAEU,eAAe,SAAc,SAAyB,MAAM,GAAC;AACnE,UAAM,aAAa,QAAQ,SAAS,IAAI,OAAO;AAC/C,QAAI,OAAO,QAAQ,kBAAkB,UAAU;AAC3C,iBAAW,gBAAgB,KAAK,kBAAkB,QAAQ,aAAa;;AAE3E,eAAW,UAAU,QAAQ,SAAS,IAAI,QAAQ,OAAO;AACzD,QAAI,mBAAmB,UAAU,GAAG;AAChC,iBAAW,SAAS,QAAQ,SAAS;AACjC,cAAM,WAAW,KAAK,eAAe,OAAO,SAAS,KAAK;AAC1D,mBAAW,QAAQ,KAAK,QAAQ;;;AAGxC,WAAO;EACX;EAEU,mBAAmB,SAAY;AACrC,UAAM,YAAY,KAAK,aAAa,QAAQ,SAAS;AACrD,UAAM,SAAS,QAAQ;AACvB,UAAM,SAAS,QAAQ;AACvB,UAAM,YAAY,QAAQ;AAC1B,UAAM,cAAc,QAAQ;AAC5B,UAAM,UAAU,QAAQ;AACxB,UAAM,YAAY,QAAQ;AAC1B,UAAM,SAAS,QAAQ;AACvB,UAAM,OAAO,IAAI,gBACb,QACA,QACA;MACI,OAAO;QACH,MAAM;QACN,WAAW;;MAEf,KAAK;QACD,MAAM;QACN,WAAW;;OAGnB,WACA,MAAM;AAEV,WAAO;EACX;EAEU,aAAa,MAAY;AAC/B,WAAO,KAAK,MAAM,WAAW,IAAI;EACrC;EAEU,oBAAoB,MAAqB;AAC/C,QAAI,KAAK,oBAAoB,SAAS,GAAG;AACrC,WAAK,0BAAyB;;AAElC,WAAO,KAAK,oBAAoB,IAAI,IAAI;EAC5C;EAEU,kBAAkB,IAAU;AAClC,QAAI,KAAK,oBAAoB,SAAS,GAAG;AACrC,WAAK,0BAAyB;;AAElC,UAAM,UAAU,KAAK,oBAAoB,OAAO,EAAE;AAClD,QAAI,SAAS;AACT,aAAO;WACJ;AACH,YAAM,IAAI,MAAM,iCAAiC,EAAE;;EAE3D;EAEU,4BAAyB;AAC/B,QAAI,KAAK;AACT,eAAW,WAAW,UAAU,KAAK,OAAO,GAAG;AAC3C,UAAI,kBAAkB,OAAO,GAAG;AAC5B,aAAK,oBAAoB,IAAI,SAAS,IAAI;;;EAGtD;;;;AC3QE,SAAU,wBAAwB,SAAiC;AACrE,SAAO;IACH,eAAe;MACX,iBAAiB,wBAAC,aAAa,IAAI,uBAAuB,QAAQ,GAAjD;MACjB,uBAAuB,wBAAC,aAAa,IAAI,2BAA2B,QAAQ,GAArD;;IAE3B,QAAQ;MACJ,aAAa,wBAAC,aAAa,IAAI,mBAAmB,QAAQ,GAA7C;MACb,eAAe,wBAAC,aAAa,oBAAoB,QAAQ,GAA1C;MACf,eAAe,wBAAC,aAAa,oBAAoB,QAAQ,GAA1C;MACf,kBAAkB,wBAAC,aAAa,uBAAuB,QAAQ,GAA7C;MAClB,gBAAgB,6BAAM,IAAI,sBAAqB,GAA/B;MAChB,cAAc,6BAAM,IAAI,oBAAmB,GAA7B;MACd,OAAO,wBAAC,aAAa,IAAI,aAAa,QAAQ,GAAvC;MACP,4BAA4B,6BAAM,IAAI,kCAAiC,GAA3C;;IAEhC,WAAW;MACP,gBAAgB,6BAAM,IAAI,sBAAqB,GAA/B;MAChB,4BAA4B,wBAAC,aAAa,IAAI,kCAAkC,QAAQ,GAA5D;MAC5B,8BAA8B,wBAAC,aAAa,IAAI,oCAAoC,QAAQ,GAA9D;;IAElC,YAAY;MACR,QAAQ,wBAAC,aAAa,IAAI,cAAc,QAAQ,GAAxC;MACR,cAAc,6BAAM,IAAI,oBAAmB,GAA7B;MACd,eAAe,wBAAC,aAAa,IAAI,qBAAqB,QAAQ,GAA/C;MACf,kBAAkB,wBAAC,aAAa,IAAI,wBAAwB,QAAQ,GAAlD;MAClB,YAAY,wBAAC,aAAa,IAAI,kBAAkB,QAAQ,GAA5C;;IAEhB,YAAY;MACR,UAAU,wBAAC,aAAa,IAAI,gBAAgB,QAAQ,GAA1C;MACV,gBAAgB,wBAAC,aAAa,IAAI,sBAAsB,QAAQ,GAAhD;;IAEpB,YAAY;MACR,mBAAmB,wBAAC,aAAa,IAAI,yBAAyB,QAAQ,GAAnD;MACnB,oBAAoB,wBAAC,aAAa,IAAI,mBAAmB,QAAQ,GAA7C;;IAExB,QAAQ,6BAAM,QAAQ,QAAd;;AAEhB;AAtCgB;AA0DV,SAAU,8BAA8B,SAAuC;AACjF,SAAO;IACH,iBAAiB,6BAAM,IAAI,uBAAsB,GAAhC;IACjB,WAAW;MACP,kBAAkB,wBAAC,aAAa,IAAI,wBAAwB,QAAQ,GAAlD;MAClB,wBAAwB,wBAAC,aAAa,IAAI,8BAA8B,QAAQ,GAAxD;MACxB,iBAAiB,wBAAC,aAAa,IAAI,uBAAuB,QAAQ,GAAjD;MACjB,cAAc,wBAAC,aAAa,IAAI,oBAAoB,QAAQ,GAA9C;MACd,kBAAkB,wBAAC,aAAa,IAAI,wBAAwB,QAAQ,GAAlD;MAClB,oBAAoB,wBAAC,aAAa,QAAQ,mBAAmB,QAAQ,GAAjD;MACpB,eAAe,6BAAM,IAAI,qBAAoB,GAA9B;MACf,uBAAuB,wBAAC,aAAa,IAAI,6BAA6B,QAAQ,GAAvD;;;AAGnC;AAdgB;;;ACtFV,IAAW;CAAjB,SAAiBC,SAAM;AACN,EAAAA,QAAA,QAAQ,CAA4B,IAAmB,OAAuB,OAAO,OAAO,CAAA,GAAI,EAAE,GAAG,EAAE;AACxH,GAFiB,WAAA,SAAM,CAAA,EAAA;AA0BjB,SAAU,OACZ,SAAwB,SAAyB,SAAyB,SAAyB,SAAyB,SAAyB,SAAyB,SAAyB,SAAuB;AAE9N,QAAM,SAAS,CAAC,SAAS,SAAS,SAAS,SAAS,SAAS,SAAS,SAAS,SAAS,OAAO,EAAE,OAAO,QAAQ,CAAA,CAAE;AAClH,SAAO,QAAQ,MAAM;AACzB;AALgB;AAOhB,IAAM,UAAU,OAAO,SAAS;AAM1B,SAAU,UAAa,MAAO;AAChC,MAAI,QAAS,KAAa,OAAO,GAAG;AAChC,eAAW,SAAS,OAAO,OAAO,IAAI,GAAG;AACrC,gBAAU,KAAK;;;AAGvB,SAAO;AACX;AAPgB;AAahB,SAAS,QAAc,QAAsB,UAAc;AACvD,QAAM,QAAa,IAAI,MAAM,CAAA,GAAW;IACpC,gBAAgB,6BAAM,OAAN;IAChB,KAAK,wBAAC,KAAK,SAAS,SAAS,KAAK,MAAM,QAAQ,YAAY,KAAK,GAA5D;IACL,0BAA0B,wBAAC,KAAK,UAAU,SAAS,KAAK,MAAM,QAAQ,YAAY,KAAK,GAAG,OAAO,yBAAyB,KAAK,IAAI,IAAzG;IAC1B,KAAK,wBAAC,GAAG,SAAS,QAAQ,QAArB;IACL,SAAS,6BAAM,CAAC,GAAG,QAAQ,QAAQ,MAAM,GAAG,OAAO,GAA1C;;GACZ;AACD,QAAM,OAAO,IAAI;AACjB,SAAO;AACX;AAVS;AAgBT,IAAM,gBAAgB,OAAM;AAc5B,SAAS,SAAe,KAAU,MAAgC,QAAsB,UAAW;AAC/F,MAAI,QAAQ,KAAK;AACb,QAAI,IAAI,IAAI,aAAa,OAAO;AAC5B,YAAM,IAAI,MAAM,oFAAoF,EAAC,OAAO,IAAI,IAAI,EAAC,CAAC;;AAE1H,QAAI,IAAI,IAAI,MAAM,eAAe;AAC7B,YAAM,IAAI,MAAM,kCAAkC,OAAO,IAAI,IAAI,4FAA4F;;AAEjK,WAAO,IAAI,IAAI;aACR,QAAQ,QAAQ;AACvB,UAAM,QAA+D,OAAO,IAAe;AAC3F,QAAI,IAAI,IAAI;AACZ,QAAI;AACA,UAAI,IAAI,IAAK,OAAO,UAAU,aAAc,MAAM,QAAQ,IAAI,QAAQ,OAAO,QAAQ;aAChF,OAAO;AACZ,UAAI,IAAI,IAAI,iBAAiB,QAAQ,QAAQ;AAC7C,YAAM;;AAEV,WAAO,IAAI,IAAI;SACZ;AACH,WAAO;;AAEf;AAtBS;AA+BT,SAAS,OAAO,QAAqB,QAAoB;AACrD,MAAI,QAAQ;AACR,eAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,MAAM,GAAG;AAChD,UAAI,WAAW,QAAW;AACtB,cAAM,SAAS,OAAO,GAAG;AACzB,YAAI,WAAW,QAAQ,WAAW,QAAQ,OAAO,WAAW,YAAY,OAAO,WAAW,UAAU;AAChG,iBAAO,GAAG,IAAI,OAAO,QAAQ,MAAM;eAChC;AACH,iBAAO,GAAG,IAAI;;;;;AAK9B,SAAO;AACX;AAdS;;;ACtIT;;;;;;;;;;;;;;;;;;;;;;aAAAC;EAAA;;;;;;;;;;;AAOA,0BAAc;;;ACyBR,IAAO,0BAAP,MAA8B;EAhCpC,OAgCoC;;;EAEhC,WAAQ;AACJ,UAAM,IAAI,MAAM,8BAA8B;EAClD;EAEA,MAAM,gBAAa;AACf,WAAO,CAAA;EACX;;AAIG,IAAM,kBAAkB;EAC3B,oBAAoB,6BAAM,IAAI,wBAAuB,GAAjC;;;;AC9BxB,IAAM,uBAAgF;EAClF,SAAS,6BAAM,QAAN;EACT,kBAAkB,8BAAO;IACrB,iBAAiB;IACjB,gBAAgB,CAAC,UAAU;IAC3B,YAAY;MAHE;;AAOtB,IAAM,6BAAkG;EACpG,eAAe,6BAAM,IAAQ,4BAA2B,GAAzC;;AAGnB,SAAS,+BAA4B;AACjC,QAAM,SAAS,OACX,8BAA8B,eAAe,GAC7C,0BAA0B;AAE9B,QAAM,UAAU,OACZ,wBAAwB,EAAE,OAAM,CAAE,GAClC,oBAAoB;AAExB,SAAO,gBAAgB,SAAS,OAAO;AACvC,SAAO;AACX;AAXS;AAiBH,SAAU,oBAAoB,MAAY;;AAC5C,QAAM,WAAW,6BAA4B;AAC7C,QAAM,UAAU,SAAS,WAAW,eAAe,YAAY,IAAI;AACnE,WAAS,OAAO,UAAU,uBAAuB,UAAU,SAASC,KAAI,MAAM,aAAY,KAAA,QAAQ,UAAI,QAAA,OAAA,SAAA,KAAI,SAAS,UAAU,CAAC;AAC9H,SAAO;AACX;AALgB;;;A9G7BhB,wBAAc;;;A+GhBd,IAAI,YAAY,OAAO;AACvB,IAAIC,UAAS,wBAAC,QAAQ,UAAU,UAAU,QAAQ,QAAQ,EAAE,OAAO,cAAc,KAAK,CAAC,GAA1E;AAIb,IAAI,YAAY;AAChB,IAAI,SAAS;AACb,SAAS,SAAS,MAAM;AACtB,SAAOC,YAAW,WAAW,MAAM,MAAM;AAC3C;AAFS;AAGTD,QAAO,UAAU,UAAU;AAC3B,IAAI,WAAW;AACf,IAAI,gBAAgB;AACpB,IAAI,SAAS;AACb,SAAS,SAAS,MAAM;AACtB,SAAOC,YAAW,WAAW,MAAM,MAAM;AAC3C;AAFS;AAGTD,QAAO,UAAU,UAAU;AAC3B,IAAI,SAAS;AACb,SAAS,SAAS,MAAM;AACtB,SAAOC,YAAW,WAAW,MAAM,MAAM;AAC3C;AAFS;AAGTD,QAAO,UAAU,UAAU;AAC3B,IAAI,WAAW;AACf,SAAS,WAAW,MAAM;AACxB,SAAOC,YAAW,WAAW,MAAM,QAAQ;AAC7C;AAFS;AAGTD,QAAO,YAAY,YAAY;AAC/B,IAAI,OAAO;AACX,SAAS,OAAO,MAAM;AACpB,SAAOC,YAAW,WAAW,MAAM,IAAI;AACzC;AAFS;AAGTD,QAAO,QAAQ,QAAQ;AACvB,IAAI,QAAQ;AACZ,SAAS,QAAQ,MAAM;AACrB,SAAOC,YAAW,WAAW,MAAM,KAAK;AAC1C;AAFS;AAGTD,QAAO,SAAS,SAAS;AACzB,IAAI,SAAS;AACb,SAAS,SAAS,MAAM;AACtB,SAAOC,YAAW,WAAW,MAAM,MAAM;AAC3C;AAFS;AAGTD,QAAO,UAAU,UAAU;AAC3B,IAAI,cAAc;AAClB,SAAS,cAAc,MAAM;AAC3B,SAAOC,YAAW,WAAW,MAAM,WAAW;AAChD;AAFS;AAGTD,QAAO,eAAe,eAAe;AACrC,IAAI,MAAM;AACV,SAAS,MAAM,MAAM;AACnB,SAAOC,YAAW,WAAW,MAAM,GAAG;AACxC;AAFS;AAGTD,QAAO,OAAO,OAAO;AACrB,IAAI,aAAa;AACjB,SAAS,aAAa,MAAM;AAC1B,SAAOC,YAAW,WAAW,MAAM,UAAU;AAC/C;AAFS;AAGTD,QAAO,cAAc,cAAc;AACnC,IAAI,YAAY;AAChB,IAAI,uBAAuB,cAAc,sBAAsB;AAAA,EA3D/D,OA2D+D;AAAA;AAAA;AAAA,EAC7D,OAAO;AACL,IAAAA,QAAO,MAAM,sBAAsB;AAAA,EACrC;AAAA,EACA,cAAc;AACZ,WAAO,CAAC,UAAU,YAAY,iBAAiB,UAAU,UAAU,aAAa,YAAY,QAAQ,SAAS,UAAU,eAAe,OAAO,cAAc,WAAW;AAAA,EACxK;AAAA,EACA,iBAAiB,SAAS,WAAW;AACnC,YAAQ,SAAS;AAAA,MACf,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,OAAO;AACV,eAAO,KAAK,UAAU,WAAW,SAAS;AAAA,MAC5C;AAAA,MACA,KAAK,WAAW;AACd,eAAO,KAAK,UAAU,UAAU,SAAS;AAAA,MAC3C;AAAA,MACA,SAAS;AACP,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAAA,EACA,iBAAiB,SAAS;AACxB,UAAM,cAAc,GAAG,QAAQ,UAAU,KAAK,IAAI,QAAQ,QAAQ;AAClE,YAAQ,aAAa;AAAA,MACnB,SAAS;AACP,cAAM,IAAI,MAAM,GAAG,WAAW,+BAA+B;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AAAA,EACA,gBAAgB,MAAM;AACpB,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,OAAO;AAAA,YACf,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,YAAY;AACf,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,SAAS;AAAA,UACnB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,iBAAiB;AACpB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,KAAK;AAAA,YACb,EAAE,MAAM,SAAS;AAAA,YACjB,EAAE,MAAM,QAAQ,cAAc,CAAC,EAAE;AAAA,UACnC;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,KAAK;AAAA,YACb,EAAE,MAAM,UAAU;AAAA,YAClB,EAAE,MAAM,QAAQ,cAAc,CAAC,EAAE;AAAA,YACjC,EAAE,MAAM,OAAO;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,YAAY;AACf,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,cAAc,cAAc,CAAC,EAAE;AAAA,YACvC,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,SAAS;AACZ,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,SAAS;AAAA,YACjB,EAAE,MAAM,KAAK;AAAA,YACb,EAAE,MAAM,QAAQ,cAAc,CAAC,EAAE;AAAA,YACjC,EAAE,MAAM,OAAO;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,UAAU,cAAc,CAAC,EAAE;AAAA,YACnC,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,MAAM;AAAA,YACd,EAAE,MAAM,QAAQ;AAAA,YAChB,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,OAAO;AACV,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,YAAY,cAAc,CAAC,EAAE;AAAA,YACrC,EAAE,MAAM,YAAY,cAAc,MAAM;AAAA,YACxC,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,cAAc;AACjB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,QAAQ;AAAA,YAChB,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,KAAK,aAAa;AAChB,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY;AAAA,YACV,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,WAAW;AAAA,YACnB,EAAE,MAAM,MAAM;AAAA,YACd,EAAE,MAAM,cAAc,cAAc,CAAC,EAAE;AAAA,YACvC,EAAE,MAAM,QAAQ;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAAA,MACA,SAAS;AACP,eAAO;AAAA,UACL,MAAM;AAAA,UACN,YAAY,CAAC;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AACA,IAAIC,cAAa,IAAI,qBAAqB;AAI1C,IAAI;AACJ,IAAI,cAA8B,gBAAAD,QAAO,MAAM,sBAAsB,oBAAoB,oBAAoB,ulHAAulH,IAAI,aAAa;AACrtH,IAAI;AACJ,IAAI,gBAAgC,gBAAAA,QAAO,MAAM,wBAAwB,sBAAsB,oBAAoB,2hKAA2hK,IAAI,eAAe;AACjqK,IAAI;AACJ,IAAI,aAA6B,gBAAAA,QAAO,MAAM,qBAAqB,mBAAmB,oBAAoB,+9JAA+9J,IAAI,YAAY;AACzlK,IAAI;AACJ,IAAI,kBAAkC,gBAAAA,QAAO,MAAM,0BAA0B,wBAAwB,oBAAoB,6sUAA6sU,IAAI,iBAAiB;AAG31U,IAAI,uBAAuB;AAAA,EACzB,YAAY;AAAA,EACZ,gBAAgB,CAAC,QAAQ,UAAU;AAAA,EACnC,iBAAiB;AACnB;AACA,IAAI,yBAAyB;AAAA,EAC3B,YAAY;AAAA,EACZ,gBAAgB,CAAC,QAAQ,UAAU;AAAA,EACnC,iBAAiB;AACnB;AACA,IAAI,sBAAsB;AAAA,EACxB,YAAY;AAAA,EACZ,gBAAgB,CAAC,QAAQ,UAAU;AAAA,EACnC,iBAAiB;AACnB;AACA,IAAI,2BAA2B;AAAA,EAC7B,YAAY;AAAA,EACZ,gBAAgB,CAAC,QAAQ,UAAU;AAAA,EACnC,iBAAiB;AACnB;AACA,IAAI,+BAA+B;AAAA,EACjC,eAA+B,gBAAAA,QAAO,MAAM,IAAI,qBAAqB,GAAG,eAAe;AACzF;AACA,IAAI,sBAAsB;AAAA,EACxB,SAAyB,gBAAAA,QAAO,MAAM,YAAY,GAAG,SAAS;AAAA,EAC9D,kBAAkC,gBAAAA,QAAO,MAAM,sBAAsB,kBAAkB;AAAA,EACvF,QAAQ,CAAC;AACX;AACA,IAAI,wBAAwB;AAAA,EAC1B,SAAyB,gBAAAA,QAAO,MAAM,cAAc,GAAG,SAAS;AAAA,EAChE,kBAAkC,gBAAAA,QAAO,MAAM,wBAAwB,kBAAkB;AAAA,EACzF,QAAQ,CAAC;AACX;AACA,IAAI,qBAAqB;AAAA,EACvB,SAAyB,gBAAAA,QAAO,MAAM,WAAW,GAAG,SAAS;AAAA,EAC7D,kBAAkC,gBAAAA,QAAO,MAAM,qBAAqB,kBAAkB;AAAA,EACtF,QAAQ,CAAC;AACX;AACA,IAAI,0BAA0B;AAAA,EAC5B,SAAyB,gBAAAA,QAAO,MAAM,gBAAgB,GAAG,SAAS;AAAA,EAClE,kBAAkC,gBAAAA,QAAO,MAAM,0BAA0B,kBAAkB;AAAA,EAC3F,QAAQ,CAAC;AACX;AAMA,IAAI,0BAA0B;AAC9B,IAAI,0BAA0B;AAC9B,IAAI,aAAa;AAGjB,IAAI,eAAe;AAAA,EACjB,WAAW;AAAA,EACX,WAAW;AAAA,EACX,OAAO;AACT;AACA,IAAI,gCAAgC,cAAc,sBAAsB;AAAA,EApTxE,OAoTwE;AAAA;AAAA;AAAA,EACtE,OAAO;AACL,IAAAA,QAAO,MAAM,+BAA+B;AAAA,EAC9C;AAAA,EACA,aAAa,MAAM,OAAO,SAAS;AACjC,QAAI,QAAQ,KAAK,mBAAmB,MAAM,OAAO,OAAO;AACxD,QAAI,UAAU,QAAQ;AACpB,cAAQ,KAAK,mBAAmB,MAAM,OAAO,OAAO;AAAA,IACtD;AACA,QAAI,UAAU,QAAQ;AACpB,aAAO,MAAM,aAAa,MAAM,OAAO,OAAO;AAAA,IAChD;AACA,WAAO;AAAA,EACT;AAAA,EACA,mBAAmB,MAAM,OAAO,UAAU;AACxC,UAAM,QAAQ,aAAa,KAAK,IAAI;AACpC,QAAI,UAAU,QAAQ;AACpB,aAAO;AAAA,IACT;AACA,UAAM,QAAQ,MAAM,KAAK,KAAK;AAC9B,QAAI,UAAU,MAAM;AAClB,aAAO;AAAA,IACT;AACA,QAAI,MAAM,CAAC,MAAM,QAAQ;AACvB,aAAO,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,eAAe,GAAG;AAAA,IACnD;AACA,QAAI,MAAM,CAAC,MAAM,QAAQ;AACvB,aAAO,MAAM,CAAC,EAAE,QAAQ,UAAU,EAAE,EAAE,QAAQ,UAAU,EAAE,EAAE,QAAQ,eAAe,GAAG,EAAE,QAAQ,gBAAgB,IAAI;AAAA,IACtH;AACA,WAAO;AAAA,EACT;AACF;AACA,IAAI,uBAAuB,cAAc,8BAA8B;AAAA,EApVvE,OAoVuE;AAAA;AAAA;AAAA,EACrE,OAAO;AACL,IAAAA,QAAO,MAAM,sBAAsB;AAAA,EACrC;AAAA,EACA,mBAAmB,OAAO,QAAQ,UAAU;AAC1C,WAAO;AAAA,EACT;AACF;AAIA,IAAI,8BAA8B,cAAc,oBAAoB;AAAA,EA/VpE,OA+VoE;AAAA;AAAA;AAAA,EAClE,OAAO;AACL,IAAAA,QAAO,MAAM,6BAA6B;AAAA,EAC5C;AAAA,EACA,YAAY,UAAU;AACpB,UAAM;AACN,SAAK,WAAW,IAAI,IAAI,QAAQ;AAAA,EAClC;AAAA,EACA,mBAAmB,OAAO,gBAAgB,SAAS;AACjD,UAAM,aAAa,MAAM,mBAAmB,OAAO,gBAAgB,OAAO;AAC1E,eAAW,QAAQ,CAAC,cAAc;AAChC,UAAI,KAAK,SAAS,IAAI,UAAU,IAAI,KAAK,UAAU,YAAY,QAAQ;AACrE,kBAAU,UAAU,IAAI,OAAO,UAAU,QAAQ,SAAS,IAAI,oBAAoB;AAAA,MACpF;AAAA,IACF,CAAC;AACD,WAAO;AAAA,EACT;AACF;AACA,IAAI,qBAAqB,cAAc,4BAA4B;AAAA,EAjXnE,OAiXmE;AAAA;AAAA;AAAA,EACjE,OAAO;AACL,IAAAA,QAAO,MAAM,oBAAoB;AAAA,EACnC;AACF;",
  "names": ["RAL", "Event", "Emitter", "Is", "CancellationToken", "CancellationTokenSource", "TextDocument", "URI", "stream", "Reduction", "RangeComparison", "reflection", "newState", "atom", "process", "first", "visitor", "atom", "LexerDefinitionErrorType", "msg", "currConfig", "tokenLabel", "hasTokenLabel", "hasTokenLabel", "hasTokenLabel", "tokenLabel", "getExtraProductionArgument", "PROD_TYPE", "tokenMatcher", "prefixKeys", "visitor", "collectorVisitor", "option", "alternation", "resolveGrammar", "validateGrammar", "alternation", "repetition", "option", "repetitionMandatory", "CstVisitorDefinitionError", "newState", "allTokenTypes", "invokeRuleWithTry", "newState", "ParserDefinitionErrorType", "resolveGrammar", "validateGrammar", "repetition", "alternation", "option", "block", "plus", "star", "optional", "getProdType", "first", "closure", "newState", "tokenLabel", "getProductionDslName", "DocumentUri", "URI", "integer", "uinteger", "Position", "Range", "Location", "LocationLink", "Color", "ColorInformation", "ColorPresentation", "FoldingRangeKind", "FoldingRange", "DiagnosticRelatedInformation", "DiagnosticSeverity", "DiagnosticTag", "CodeDescription", "Diagnostic", "Command", "TextEdit", "ChangeAnnotation", "ChangeAnnotationIdentifier", "AnnotatedTextEdit", "TextDocumentEdit", "CreateFile", "RenameFile", "DeleteFile", "WorkspaceEdit", "TextDocumentIdentifier", "VersionedTextDocumentIdentifier", "OptionalVersionedTextDocumentIdentifier", "TextDocumentItem", "MarkupKind", "MarkupContent", "CompletionItemKind", "InsertTextFormat", "CompletionItemTag", "InsertReplaceEdit", "InsertTextMode", "CompletionItemLabelDetails", "CompletionItem", "CompletionList", "MarkedString", "Hover", "ParameterInformation", "SignatureInformation", "DocumentHighlightKind", "DocumentHighlight", "SymbolKind", "SymbolTag", "SymbolInformation", "WorkspaceSymbol", "DocumentSymbol", "CodeActionKind", "CodeActionTriggerKind", "CodeActionContext", "CodeAction", "CodeLens", "FormattingOptions", "DocumentLink", "SelectionRange", "SemanticTokenTypes", "SemanticTokenModifiers", "SemanticTokens", "InlineValueText", "InlineValueVariableLookup", "InlineValueEvaluatableExpression", "InlineValueContext", "InlayHintKind", "InlayHintLabelPart", "InlayHint", "StringValue", "InlineCompletionItem", "InlineCompletionList", "InlineCompletionTriggerKind", "SelectedCompletionInfo", "InlineCompletionContext", "WorkspaceFolder", "TextDocument", "mergeSort", "Is", "toString", "undefined", "integer", "uinteger", "ValueConverter", "FullTextDocument", "TextDocument", "assertPath", "path", "TypeError", "JSON", "stringify", "normalizeStringPosix", "allowAboveRoot", "code", "res", "lastSegmentLength", "lastSlash", "dots", "i", "length", "charCodeAt", "lastSlashIndex", "lastIndexOf", "slice", "posix", "resolve", "cwd", "resolvedPath", "resolvedAbsolute", "arguments", "process", "normalize", "isAbsolute", "trailingSeparator", "join", "joined", "arg", "relative", "from", "to", "fromStart", "fromEnd", "fromLen", "toStart", "toLen", "lastCommonSep", "fromCode", "out", "_makeLong", "dirname", "hasRoot", "end", "matchedSlash", "basename", "ext", "start", "extIdx", "firstNonSlashEnd", "extname", "startDot", "startPart", "preDotState", "format", "pathObject", "sep", "dir", "root", "base", "name", "parse", "ret", "delimiter", "win32", "module", "exports", "__webpack_module_cache__", "__webpack_require__", "moduleId", "cachedModule", "__webpack_modules__", "d", "definition", "key", "o", "Object", "defineProperty", "enumerable", "get", "obj", "prop", "prototype", "hasOwnProperty", "call", "r", "Symbol", "toStringTag", "value", "isWindows", "platform", "navigator", "userAgent", "indexOf", "_schemePattern", "_singleSlashStart", "_doubleSlashStart", "_validateUri", "_strict", "scheme", "Error", "authority", "query", "fragment", "test", "_empty", "_slash", "_regexp", "URI", "thing", "fsPath", "with", "toString", "schemeOrData", "this", "uriToFsPath", "change", "Uri", "match", "exec", "percentDecode", "replace", "idx", "substring", "components", "result", "skipEncoding", "_asFormatted", "toJSON", "data", "_formatted", "external", "_fsPath", "_sep", "_pathSepMarker", "$mid", "encodeTable", "encodeURIComponentFast", "uriComponent", "isPath", "isAuthority", "nativeEncodePos", "pos", "encodeURIComponent", "charAt", "substr", "escaped", "encodeURIComponentMinimal", "uri", "keepDriveLetterCasing", "toLowerCase", "encoder", "userinfo", "String", "fromCharCode", "decodeURIComponentGraceful", "str", "decodeURIComponent", "_rEncodedAsHex", "posixPath", "slash", "Utils", "t", "joinPath", "paths", "resolvePath", "slashAdded", "UriUtils", "DocumentState", "URI", "TextDocument", "node", "documentUri", "URI", "document", "ValidationCategory", "DocumentValidator", "Disposable", "_a", "URI", "first", "option", "URI", "Module", "URI", "URI", "__name", "reflection"]
}