All Downloads are FREE. Search and download functionalities are using the official Maven repository.

package.dist.umd.index.min.js.map Maven / Gradle / Ivy

The newest version!
{"version":3,"file":"index.min.js","sources":["../../lib/regex.mjs","../../lib/strings.mjs","../../lib/index.mjs"],"sourcesContent":["\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {string[]} chars\n * @return {string}\n */\nexport const arrayToPattern = (chars) =>{\n\n\tchars = chars.filter( Boolean );\n\n\tif( chars.length < 2 ){\n\t\treturn chars[0] || '';\n\t}\n\n\treturn (maxValueLength(chars) == 1) ? '['+chars.join('')+']' : '(?:'+chars.join('|')+')';\n};\n\n/**\n * @param {string[]} array\n * @return {string}\n */\nexport const sequencePattern = (array)=>{\n\n\tif( !hasDuplicates(array) ){\n\t\treturn array.join('');\n\t}\n\n\tlet pattern = '';\n\tlet prev_char_count = 0;\n\n\tconst prev_pattern = ()=>{\n\t\tif( prev_char_count > 1 ){\n\t\t\tpattern += '{'+prev_char_count+'}';\n\t\t}\n\t}\n\n\tarray.forEach((char,i)=>{\n\n\t\tif( char === array[i-1] ){\n\t\t\tprev_char_count++;\n\t\t\treturn;\n\t\t}\n\n\t\tprev_pattern();\n\n\t\tpattern += char;\n\t\tprev_char_count = 1;\n\t});\n\n\tprev_pattern();\n\n\treturn pattern;\n\n}\n\n\n\n/**\n * Convert array of strings to a regular expression\n *\tex ['ab','a'] => (?:ab|a)\n * \tex ['a','b'] => [ab]\n * @param {Set} chars\n * @return {string}\n */\nexport const setToPattern = (chars)=>{\n\tlet array = toArray(chars);\n\treturn arrayToPattern(array);\n}\n\n\n\n/**\n *\n * https://stackoverflow.com/questions/7376598/in-javascript-how-do-i-check-if-an-array-has-duplicate-values\n * @param {any[]} array\n */\nexport const hasDuplicates = (array) => {\n    return (new Set(array)).size !== array.length;\n}\n\n\n/**\n * https://stackoverflow.com/questions/63006601/why-does-u-throw-an-invalid-escape-error\n * @param {string} str\n * @return {string}\n */\nexport const escape_regex = (str) => {\n\treturn (str + '').replace(/([\\$\\(\\)\\*\\+\\.\\?\\[\\]\\^\\{\\|\\}\\\\])/gu, '\\\\$1');\n};\n\n/**\n * Return the max length of array values\n * @param {string[]} array\n *\n */\nexport const maxValueLength = (array) => {\n\treturn array.reduce( (longest, value) => Math.max(longest,unicodeLength(value)),0);\n}\n\n\n/**\n * @param {string} str\n */\nexport const unicodeLength = (str) => {\n\treturn toArray(str).length;\n}\n\n/**\n * @param {any} p\n * @return {any[]}\n */\nexport const toArray = (p) => Array.from(p);\n","\n\n\n/**\n * Get all possible combinations of substrings that add up to the given string\n * https://stackoverflow.com/questions/30169587/find-all-the-combination-of-substrings-that-add-up-to-the-given-string\n * @param {string} input\n * @return {string[][]}\n */\nexport const allSubstrings = (input) => {\n\n    if( input.length === 1) return [[input]];\n\n\t/** @type {string[][]} */\n    let result = [];\n\n\tconst start = input.substring(1);\n    const suba = allSubstrings(start);\n\n\tsuba.forEach(function(subresult) {\n        let tmp = subresult.slice(0);\n        tmp[0] = input.charAt(0) + tmp[0];\n        result.push(tmp);\n\n        tmp = subresult.slice(0);\n        tmp.unshift(input.charAt(0));\n        result.push(tmp);\n    });\n\n    return result;\n}\n","\n/**\n * @typedef {{[key:string]:string}} TUnicodeMap\n * @typedef {{[key:string]:Set}} TUnicodeSets\n * @typedef {[[number,number]]} TCodePoints\n * @typedef {{folded:string,composed:string,code_point:number}} TCodePointObj\n * @typedef {{start:number,end:number,length:number,substr:string}} TSequencePart\n */\n\n\nimport { setToPattern, arrayToPattern, escape_regex, sequencePattern, toArray } from './regex.mjs';\nimport { allSubstrings } from './strings.mjs';\n\n\n/** @type {TCodePoints} */\nexport const code_points = [[ 0, 65535 ]];\n\nconst accent_pat = '[\\u0300-\\u036F\\u{b7}\\u{2be}\\u{2bc}]';\n\n/** @type {TUnicodeMap} */\nexport let unicode_map;\n\n/** @type {RegExp} */\nlet multi_char_reg;\n\nconst max_char_length = 3;\n\n/** @type {TUnicodeMap} */\nconst latin_convert = {}\n\n/** @type {TUnicodeMap} */\nconst latin_condensed = {\n\t'/': '⁄∕',\n\t'0': '߀',\n\t\"a\": \"ⱥɐɑ\",\n\t\"aa\": \"ꜳ\",\n\t\"ae\": \"æǽǣ\",\n\t\"ao\": \"ꜵ\",\n\t\"au\": \"ꜷ\",\n\t\"av\": \"ꜹꜻ\",\n\t\"ay\": \"ꜽ\",\n\t\"b\": \"ƀɓƃ\",\n\t\"c\": \"ꜿƈȼↄ\",\n\t\"d\": \"đɗɖᴅƌꮷԁɦ\",\n\t\"e\": \"ɛǝᴇɇ\",\n\t\"f\": \"ꝼƒ\",\n\t\"g\": \"ǥɠꞡᵹꝿɢ\",\n\t\"h\": \"ħⱨⱶɥ\",\n\t\"i\": \"ɨı\",\n\t\"j\": \"ɉȷ\",\n\t\"k\": \"ƙⱪꝁꝃꝅꞣ\",\n\t\"l\": \"łƚɫⱡꝉꝇꞁɭ\",\n\t\"m\": \"ɱɯϻ\",\n\t\"n\": \"ꞥƞɲꞑᴎлԉ\",\n\t\"o\": \"øǿɔɵꝋꝍᴑ\",\n\t\"oe\": \"œ\",\n\t\"oi\": \"ƣ\",\n\t\"oo\": \"ꝏ\",\n\t\"ou\": \"ȣ\",\n\t\"p\": \"ƥᵽꝑꝓꝕρ\",\n\t\"q\": \"ꝗꝙɋ\",\n\t\"r\": \"ɍɽꝛꞧꞃ\",\n\t\"s\": \"ßȿꞩꞅʂ\",\n\t\"t\": \"ŧƭʈⱦꞇ\",\n\t\"th\": \"þ\",\n\t\"tz\": \"ꜩ\",\n\t\"u\": \"ʉ\",\n\t\"v\": \"ʋꝟʌ\",\n\t\"vy\": \"ꝡ\",\n\t\"w\": \"ⱳ\",\n\t\"y\": \"ƴɏỿ\",\n\t\"z\": \"ƶȥɀⱬꝣ\",\n\t\"hv\": \"ƕ\"\n}\n\n\nfor( let latin in latin_condensed ){\n\tlet unicode = latin_condensed[latin] || '';\n\tfor( let i = 0; i < unicode.length; i++){\n\t\tlet char\t= unicode.substring(i,i+1);\n\t\tlatin_convert[char] = latin;\n\t}\n}\n\n\nconst convert_pat = new RegExp(Object.keys(latin_convert).join('|')+'|'+accent_pat,'gu');\n\n\n\n/**\n * Initialize the unicode_map from the give code point ranges\n *\n * @param {TCodePoints=} _code_points\n */\nexport const initialize = (_code_points) => {\n\tif( unicode_map !== undefined ) return;\n\tunicode_map = generateMap(_code_points || code_points );\n}\n\n\n/**\n * Helper method for normalize a string\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/normalize\n * @param {string} str\n * @param {string} form\n */\nexport const normalize = (str,form='NFKD') => str.normalize(form);\n\n\n\n/**\n * Remove accents without reordering string\n * calling str.normalize('NFKD') on \\u{594}\\u{595}\\u{596} becomes \\u{596}\\u{594}\\u{595}\n * via https://github.com/krisk/Fuse/issues/133#issuecomment-318692703\n * @param {string} str\n * @return {string}\n */\nexport const asciifold = (str) => {\n\n\treturn toArray(str).reduce(\n\t\t/**\n\t\t * @param {string} result\n\t\t * @param {string} char\n\t\t */\n\t\t(result, char) =>{\n\t\t\treturn result + _asciifold(char)\n\t\t},\n\t\t''\n\t);\n};\n\n/**\n * @param {string} str\n * @return {string}\n */\nexport const _asciifold = (str) => {\n\tstr = normalize(str)\n\t\t.toLowerCase()\n\t\t.replace(convert_pat,(/** @type {string} */ char) => {\n\t\t\treturn latin_convert[char] || '';\n\t\t})\n\n\t//return str;\n\treturn normalize(str,'NFC')\n};\n\n\n\n\n\n\n/**\n * Generate a list of unicode variants from the list of code points\n * @param {TCodePoints} code_points\n * @yield {TCodePointObj}\n */\nexport function* generator(code_points){\n\n\tfor(const [code_point_min, code_point_max] of code_points){\n\t\tfor(let i = code_point_min; i <= code_point_max; i++){\n\n\t\t\tlet composed\t\t= String.fromCharCode(i);\n\t\t\tlet folded\t\t\t= asciifold(composed);\n\n\n\t\t\tif( folded == composed.toLowerCase() ){\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// skip when folded is a string longer than 3 characters long\n\t\t\t// bc the resulting regex patterns will be long\n\t\t\t// eg:\n\t\t\t// folded صلى الله عليه وسلم length 18 code point 65018\n\t\t\t// folded جل جلاله length 8 code point 65019\n\t\t\tif( folded.length > max_char_length ){\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tif( folded.length == 0 ){\n\t\t\t\tcontinue\n\t\t\t}\n\n\n\t\t\tyield {folded:folded,composed:composed,code_point:i};\n\t\t}\n\t}\n}\n\n\n/**\n * Generate a unicode map from the list of code points\n * @param {TCodePoints} code_points\n * @return {TUnicodeSets}\n */\nexport const generateSets = (code_points) => {\n\n\t/** @type {{[key:string]:Set}} */\n\tconst unicode_sets = {};\n\n\n\t/**\n\t * @param {string} folded\n\t * @param {string} to_add\n\t */\n\tconst addMatching = (folded,to_add) => {\n\n\t\t/** @type {Set} */\n\t\tconst folded_set = unicode_sets[folded] || new Set();\n\n\t\tconst patt = new RegExp( '^'+setToPattern(folded_set)+'$','iu');\n\t\tif( to_add.match(patt) ){\n\t\t\treturn;\n\t\t}\n\n\t\tfolded_set.add(escape_regex(to_add));\n\t\tunicode_sets[folded] = folded_set;\n\t}\n\n\n\tfor( let value of generator(code_points) ){\n\t\taddMatching(value.folded,value.folded);\n\t\taddMatching(value.folded,value.composed);\n\t}\n\n\treturn unicode_sets;\n}\n\n/**\n * Generate a unicode map from the list of code points\n * ae => (?:(?:ae|Æ|Ǽ|Ǣ)|(?:A|Ⓐ|A...)(?:E|ɛ|Ⓔ...))\n *\n * @param {TCodePoints} code_points\n * @return {TUnicodeMap}\n */\nexport const generateMap = (code_points) => {\n\n\t/** @type {TUnicodeSets} */\n\tconst unicode_sets = generateSets(code_points);\n\n\t/** @type {TUnicodeMap} */\n\tconst unicode_map = {};\n\n\t/** @type {string[]} */\n\tlet multi_char = [];\n\n\tfor( let folded in unicode_sets ){\n\n\t\tlet set = unicode_sets[folded];\n\t\tif( set ){\n\t\t\tunicode_map[folded] = setToPattern(set);\n\t\t}\n\n\t\tif( folded.length > 1 ){\n\t\t\tmulti_char.push(escape_regex(folded));\n\t\t}\n\t}\n\n\tmulti_char.sort((a, b) => b.length - a.length );\n\tconst multi_char_patt = arrayToPattern(multi_char);\n\tmulti_char_reg = new RegExp('^'+multi_char_patt,'u');\n\n\treturn unicode_map;\n}\n\n\n/**\n * Map each element of an array from it's folded value to all possible unicode matches\n * @param {string[]} strings\n * @param {number} min_replacement\n * @return {string}\n */\nexport const mapSequence = (strings,min_replacement=1) =>{\n\tlet chars_replaced = 0;\n\n\n\tstrings = strings.map((str)=>{\n\t\tif( unicode_map[str] ){\n\t\t\tchars_replaced += str.length;\n\t\t}\n\t\treturn unicode_map[str] || str;\n\t});\n\n\tif( chars_replaced >= min_replacement ){\n\t\treturn sequencePattern(strings);\n\t}\n\n\treturn '';\n}\n\n/**\n * Convert a short string and split it into all possible patterns\n * Keep a pattern only if min_replacement is met\n *\n * 'abc'\n * \t\t=> [['abc'],['ab','c'],['a','bc'],['a','b','c']]\n *\t\t=> ['abc-pattern','ab-c-pattern'...]\n *\n *\n * @param {string} str\n * @param {number} min_replacement\n * @return {string}\n */\nexport const substringsToPattern = (str,min_replacement=1) => {\n\n\tmin_replacement = Math.max(min_replacement,str.length-1);\n\n\treturn arrayToPattern(\n\t\tallSubstrings(str).map( (sub_pat) =>{\n\t\t\treturn mapSequence(sub_pat,min_replacement)\n\t\t})\n\t);\n}\n\n/**\n * Convert an array of sequences into a pattern\n * [{start:0,end:3,length:3,substr:'iii'}...] => (?:iii...)\n *\n * @param {Sequence[]} sequences\n * @param {boolean} all\n */\nconst sequencesToPattern = (sequences,all=true) => {\n\n\tlet min_replacement = sequences.length > 1 ? 1 : 0;\n\treturn arrayToPattern(\n\t\tsequences.map( (sequence) =>{\n\t\t\tlet seq = [];\n\t\t\tconst len = all ? sequence.length() : sequence.length() - 1;\n\t\t\tfor( let j = 0; j < len; j++){\n\t\t\t\tseq.push(substringsToPattern(sequence.substrs[j]||'',min_replacement));\n\t\t\t}\n\n\t\t\treturn sequencePattern(seq);\n\t\t})\n\t);\n}\n\n/**\n * Return true if the sequence is already in the sequences\n * @param {Sequence} needle_seq\n * @param {Sequence[]} sequences\n */\nconst inSequences = (needle_seq, sequences) => {\n\n\tfor(const seq of sequences){\n\n\t\tif( seq.start != needle_seq.start || seq.end != needle_seq.end ){\n\t\t\tcontinue;\n\t\t}\n\n\t\tif( seq.substrs.join('') !== needle_seq.substrs.join('') ){\n\t\t\tcontinue;\n\t\t}\n\n\n\t\tlet needle_parts\t= needle_seq.parts;\n\n\t\t/**\n\t\t * @param {TSequencePart} part\n\t\t */\n\t\tconst filter = (part) =>{\n\n\t\t\tfor(const needle_part of needle_parts){\n\n\t\t\t\tif( needle_part.start === part.start && needle_part.substr === part.substr ){\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\n\t\t\t\tif( part.length == 1 || needle_part.length == 1 ){\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\n\t\t\t\t// check for overlapping parts\n\t\t\t\t// a = ['::=','==']\n\t\t\t\t// b = ['::','===']\n\t\t\t\t// a = ['r','sm']\n\t\t\t\t// b = ['rs','m']\n\t\t\t\tif( part.start < needle_part.start && part.end > needle_part.start ){\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\n\t\t\t\tif( needle_part.start < part.start && needle_part.end > part.start ){\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\treturn false;\n\t\t};\n\n\t\tlet filtered = seq.parts.filter(filter);\n\n\t\tif( filtered.length > 0 ){\n\t\t\tcontinue;\n\t\t}\n\n\t\treturn true;\n\t}\n\n\treturn false;\n}\n\nclass Sequence{\n\n\tconstructor(){\n\n\t\t/** @type {TSequencePart[]} */\n\t\tthis.parts\t\t= [];\n\n\t\t/** @type {string[]} */\n\t\tthis.substrs\t= [];\n\t\tthis.start\t\t= 0;\n\t\tthis.end\t\t= 0;\n\t}\n\n\t/**\n\t * @param {TSequencePart|undefined} part\n\t */\n\tadd(part){\n\t\tif( part ){\n\t\t\tthis.parts.push(part);\n\t\t\tthis.substrs.push(part.substr);\n\t\t\tthis.start\t= Math.min(part.start,this.start);\n\t\t\tthis.end\t= Math.max(part.end,this.end);\n\t\t}\n\t}\n\n\tlast(){\n\t\treturn this.parts[this.parts.length-1];\n\t}\n\n\tlength(){\n\t\treturn this.parts.length;\n\t}\n\n\t/**\n\t * @param {number} position\n\t * @param {TSequencePart} last_piece\n\t */\n\tclone(position, last_piece){\n\t\tlet clone = new Sequence();\n\n\t\tlet parts = JSON.parse(JSON.stringify(this.parts));\n\t\tlet last_part = parts.pop();\n\t\tfor( const part of parts ){\n\t\t\tclone.add(part);\n\t\t}\n\n\t\tlet last_substr = last_piece.substr.substring(0,position-last_part.start);\n\t\tlet clone_last_len = last_substr.length;\n\t\tclone.add({start:last_part.start,end:last_part.start+clone_last_len,length:clone_last_len,substr:last_substr});\n\n\t\treturn clone;\n\t}\n\n}\n\n/**\n * Expand a regular expression pattern to include unicode variants\n * \teg /a/ becomes /aⓐaẚàáâầấẫẩãāăằắẵẳȧǡäǟảåǻǎȁȃạậặḁąⱥɐɑAⒶAÀÁÂẦẤẪẨÃĀĂẰẮẴẲȦǠÄǞẢÅǺǍȀȂẠẬẶḀĄȺⱯ/\n *\n * Issue:\n *  ﺊﺋ [ 'ﺊ = \\\\u{fe8a}', 'ﺋ = \\\\u{fe8b}' ]\n *\tbecomes:\tئئ [ 'ي = \\\\u{64a}', 'ٔ = \\\\u{654}', 'ي = \\\\u{64a}', 'ٔ = \\\\u{654}' ]\n *\n *\tİIJ = IIJ = ⅡJ\n *\n * \t1/2/4\n *\n * @param {string} str\n * @return {string|undefined}\n */\nexport const getPattern = (str) => {\n\tinitialize();\n\n\tstr\t\t\t\t\t= asciifold(str);\n\n\tlet pattern\t\t\t= '';\n\tlet sequences\t\t= [new Sequence()];\n\n\tfor( let i = 0; i < str.length; i++ ){\n\n\t\tlet substr\t= str.substring(i);\n\t\tlet match\t= substr.match(multi_char_reg);\n\t\tconst char\t= str.substring(i,i+1);\n\t\tconst match_str = match ? match[0] : null;\n\n\n\t\t// loop through sequences\n\t\t// add either the char or multi_match\n\t\tlet overlapping\t\t= [];\n\t\tlet added_types\t\t= new Set();\n\t\tfor(const sequence of sequences){\n\n\t\t\tconst last_piece\t= sequence.last();\n\n\n\t\t\tif( !last_piece || last_piece.length == 1 || last_piece.end <= i ){\n\n\t\t\t\t// if we have a multi match\n\t\t\t\tif( match_str ){\n\t\t\t\t\tconst len = match_str.length;\n\t\t\t\t\tsequence.add({start:i,end:i+len,length:len,substr:match_str});\n\t\t\t\t\tadded_types.add('1');\n\t\t\t\t}else{\n\t\t\t\t\tsequence.add({start:i,end:i+1,length:1,substr:char});\n\t\t\t\t\tadded_types.add('2');\n\t\t\t\t}\n\n\t\t\t}else if( match_str ){\n\n\t\t\t\tlet clone = sequence.clone(i,last_piece);\n\n\t\t\t\tconst len = match_str.length;\n\t\t\t\tclone.add({start:i,end:i+len,length:len,substr:match_str});\n\n\t\t\t\toverlapping.push(clone);\n\n\t\t\t}else{\n\t\t\t\t// don't add char\n\t\t\t\t// adding would create invalid patterns: 234 => [2,34,4]\n\t\t\t\tadded_types.add('3');\n\t\t\t}\n\n\t\t}\n\n\n\t\t// if we have overlapping\n\t\tif( overlapping.length > 0 ){\n\n\t\t\t// ['ii','iii'] before ['i','i','iii']\n\t\t\toverlapping = overlapping.sort((a,b)=>{\n\t\t\t\treturn a.length() - b.length();\n\t\t\t});\n\n\t\t\tfor( let clone of overlapping){\n\n\t\t\t\t// don't add if we already have an equivalent sequence\n\t\t\t\tif( inSequences(clone, sequences) ){\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tsequences.push(clone);\n\t\t\t}\n\n\t\t\tcontinue;\n\t\t}\n\n\n\t\t// if we haven't done anything unique\n\t\t// clean up the patterns\n\t\t// helps keep patterns smaller\n\t\t// if str = 'r₨㎧aarss', pattern will be 446 instead of 655\n\t\tif( i > 0 && added_types.size == 1 && !added_types.has('3') ){\n\t\t\tpattern += sequencesToPattern(sequences,false);\n\t\t\tlet new_seq = new Sequence();\n\t\t\tconst old_seq = sequences[0];\n\t\t\tif( old_seq ){\n\t\t\t\tnew_seq.add(old_seq.last());\n\t\t\t}\n\t\t\tsequences = [new_seq];\n\t\t}\n\n\t}\n\n\tpattern += sequencesToPattern(sequences,true);\n\n\treturn pattern;\n}\n\n\nexport { escape_regex };\n"],"names":["arrayToPattern","chars","filter","Boolean","length","maxValueLength","join","sequencePattern","array","hasDuplicates","pattern","prev_char_count","prev_pattern","forEach","char","i","setToPattern","toArray","Set","size","escape_regex","str","replace","reduce","longest","value","Math","max","unicodeLength","p","Array","from","allSubstrings","input","result","start","substring","subresult","tmp","slice","charAt","push","unshift","code_points","multi_char_reg","exports","unicode_map","latin_convert","latin_condensed","a","aa","ae","ao","au","av","ay","b","c","d","e","f","g","h","j","k","l","m","n","o","oe","oi","oo","ou","q","r","s","t","th","tz","u","v","vy","w","y","z","hv","latin","unicode","convert_pat","RegExp","Object","keys","initialize","_code_points","undefined","generateMap","normalize","form","asciifold","_asciifold","toLowerCase","generator","code_point_min","code_point_max","composed","String","fromCharCode","folded","code_point","generateSets","unicode_sets","addMatching","to_add","folded_set","patt","match","add","multi_char","set","sort","multi_char_patt","mapSequence","strings","min_replacement","chars_replaced","map","substringsToPattern","sub_pat","sequencesToPattern","sequences","all","sequence","seq","len","substrs","inSequences","needle_seq","end","needle_parts","parts","part","needle_part","substr","Sequence","constructor","this","min","last","clone","position","last_piece","JSON","parse","stringify","last_part","pop","last_substr","clone_last_len","match_str","overlapping","added_types","has","new_seq","old_seq"],"mappings":";;AAQO,MAAMA,EAAkBC,IAE9BA,EAAQA,EAAMC,OAAQC,UAEZC,OAAS,EACXH,EAAM,IAAM,GAGa,GAAzBI,EAAeJ,GAAe,IAAIA,EAAMK,KAAK,IAAI,IAAM,MAAML,EAAMK,KAAK,KAAK,IAOzEC,EAAmBC,IAE/B,IAAKC,EAAcD,GAClB,OAAOA,EAAMF,KAAK;AAGnB,IAAII,EAAU,GACVC,EAAkB;AAEtB,MAAMC,EAAe,KAChBD,EAAkB,IACrBD,GAAW,IAAIC,EAAgB,IAC/B;AAkBF,OAfAH,EAAMK,SAAQ,CAACC,EAAKC,KAEfD,IAASN,EAAMO,EAAE,IAKrBH,IAEAF,GAAWI,EACXH,EAAkB,GAPjBA,GAOD,IAGDC,IAEOF,CAAP,EAaYM,EAAgBf,IAC5B,IAAIO,EAAQS,EAAQhB;AACpB,OAAOD,EAAeQ,EAAtB,EAUYC,EAAiBD,GAClB,IAAIU,IAAIV,GAAQW,OAASX,EAAMJ,OAS9BgB,EAAgBC,IACpBA,EAAM,IAAIC,QAAQ,qCAAsC,QAQpDjB,EAAkBG,GACvBA,EAAMe,QAAQ,CAACC,EAASC,IAAUC,KAAKC,IAAIH,EAAQI,EAAcH,KAAQ,GAOpEG,EAAiBP,GACtBJ,EAAQI,GAAKjB,OAORa,EAAWY,GAAMC,MAAMC,KAAKF,GCxG5BG,EAAiBC,IAE1B,GAAqB,IAAjBA,EAAM7B,OAAc,MAAO,CAAC,CAAC6B;AAGjC,IAAIC,EAAS;AAEhB,MAAMC,EAAQF,EAAMG,UAAU;AAa3B,OAZaJ,EAAcG,GAEzBtB,SAAQ,SAASwB,GACf,IAAIC,EAAMD,EAAUE,MAAM;AAC1BD,EAAI,GAAKL,EAAMO,OAAO,GAAKF,EAAI,GAC/BJ,EAAOO,KAAKH,GAEZA,EAAMD,EAAUE,MAAM,GACtBD,EAAII,QAAQT,EAAMO,OAAO,IACzBN,EAAOO,KAAKH,MAGTJ,CAAP,ECdSS,EAAc,CAAC,CAAE,EAAG;AAQjC,IAAIC;AAHGC,EAAAC,iBAAA;AAKP,MAGMC,EAAgB,CAAA,EAGhBC,EAAkB,CACvB,IAAK,KACL,EAAK,IACLC,EAAK,MACLC,GAAM,IACNC,GAAM,MACNC,GAAM,IACNC,GAAM,IACNC,GAAM,KACNC,GAAM,IACNC,EAAK,MACLC,EAAK,OACLC,EAAK,WACLC,EAAK,OACLC,EAAK,KACLC,EAAK,SACLC,EAAK,OACL/C,EAAK,KACLgD,EAAK,KACLC,EAAK,SACLC,EAAK,WACLC,EAAK,MACLC,EAAK,UACLC,EAAK,UACLC,GAAM,IACNC,GAAM,IACNC,GAAM,IACNC,GAAM,IACN3C,EAAK,SACL4C,EAAK,MACLC,EAAK,QACLC,EAAK,QACLC,EAAK,QACLC,GAAM,IACNC,GAAM,IACNC,EAAK,IACLC,EAAK,MACLC,GAAM,IACNC,EAAK,IACLC,EAAK,MACLC,EAAK,QACLC,GAAM;AAIP,IAAK,IAAIC,KAAStC,EAAiB,CAClC,IAAIuC,EAAUvC,EAAgBsC,IAAU;AACxC,IAAK,IAAIvE,EAAI,EAAGA,EAAIwE,EAAQnF,OAAQW,IAAI,CACvC,IAAID,EAAOyE,EAAQnD,UAAUrB,EAAEA,EAAE;AACjCgC,EAAcjC,GAAQwE,CACtB,CACD,CAGD,MAAME,EAAc,IAAIC,OAAOC,OAAOC,KAAK5C,GAAezC,KAAK,KAAhCoF,YAAoD,MAStEE,EAAcC,SACNC,IAAhBhD,EAAAA,cACJA,EAAAA,YAAciD,EAAYF,GAAgBlD,GAA1C,EAUYqD,EAAY,CAAC3E,EAAI4E,EAAK,SAAW5E,EAAI2E,UAAUC,GAW/CC,EAAa7E,GAElBJ,EAAQI,GAAKE,QAKnB,CAACW,EAAQpB,IACDoB,EAASiE,EAAWrF,IAE5B,IAQWqF,EAAc9E,IAC1BA,EAAM2E,EAAU3E,GACd+E,cACA9E,QAAQkE,GAAmC1E,GACpCiC,EAAcjC,IAAS,KAIzBkF,EAAU3E,EAAI;AAaf,SAAUgF,EAAU1D,GAE1B,IAAI,MAAO2D,EAAgBC,KAAmB5D,EAC7C,IAAI,IAAI5B,EAAIuF,EAAgBvF,GAAKwF,EAAgBxF,IAAI,CAEpD,IAAIyF,EAAYC,OAAOC,aAAa3F,GAChC4F,EAAWT,EAAUM;AAGrBG,GAAUH,EAASJ,gBASnBO,EAAOvG,OArJU,GAyJA,GAAjBuG,EAAOvG,cAKL,CAACuG,OAAOA,EAAOH,SAASA,EAASI,WAAW7F,IAClD,CAEF,CAQY8F,MAAAA,EAAgBlE,IAG5B,MAAMmE,EAAe,CAAA,EAOfC,EAAc,CAACJ,EAAOK,KAG3B,MAAMC,EAAaH,EAAaH,IAAW,IAAIzF,IAEzCgG,EAAO,IAAIzB,OAAQ,IAAIzE,EAAaiG,GAAY,IAAI;AACtDD,EAAOG,MAAMD,KAIjBD,EAAWG,IAAIhG,EAAa4F,IAC5BF,EAAaH,GAAUM,EAAvB;AAID,IAAK,IAAIxF,KAAS4E,EAAU1D,GAC3BoE,EAAYtF,EAAMkF,OAAOlF,EAAMkF,QAC/BI,EAAYtF,EAAMkF,OAAOlF,EAAM+E;AAGhC,OAAOM,CAAP,EAUYf,EAAepD,IAG3B,MAAMmE,EAAeD,EAAalE,GAG5BG,EAAc,CAAA;AAGpB,IAAIuE,EAAa;AAEjB,IAAK,IAAIV,KAAUG,EAAc,CAEhC,IAAIQ,EAAMR,EAAaH;AACnBW,IACHxE,EAAY6D,GAAU3F,EAAasG,IAGhCX,EAAOvG,OAAS,GACnBiH,EAAW5E,KAAKrB,EAAauF,GAE9B,CAEDU,EAAWE,MAAK,CAACtE,EAAGO,IAAMA,EAAEpD,OAAS6C,EAAE7C;AACvC,MAAMoH,EAAkBxH,EAAeqH;AAGvC,OAFAzE,EAAiB,IAAI6C,OAAO,IAAI+B,EAAgB,KAEzC1E,CAAP,EAUY2E,EAAc,CAACC,EAAQC,EAAgB,KACnD,IAAIC,EAAiB;AAUrB,OAPAF,EAAUA,EAAQG,KAAKxG,IAClByB,EAAAA,YAAYzB,KACfuG,GAAkBvG,EAAIjB,QAEhB0C,EAAWA,YAACzB,IAAQA,KAGxBuG,GAAkBD,EACdpH,EAAgBmH,GAGjB,EAAP,EAgBYI,EAAsB,CAACzG,EAAIsG,EAAgB,KAEvDA,EAAkBjG,KAAKC,IAAIgG,EAAgBtG,EAAIjB,OAAO,GAE/CJ,EACNgC,EAAcX,GAAKwG,KAAME,GACjBN,EAAYM,EAAQJ,OAYxBK,EAAqB,CAACC,EAAUC,GAAI,KAEzC,IAAIP,EAAkBM,EAAU7H,OAAS,EAAI,EAAI;AACjD,OAAOJ,EACNiI,EAAUJ,KAAMM,IACf,IAAIC,EAAM;AACV,MAAMC,EAAMH,EAAMC,EAAS/H,SAAW+H,EAAS/H,SAAW;AAC1D,IAAK,IAAI2D,EAAI,EAAGA,EAAIsE,EAAKtE,IACxBqE,EAAI3F,KAAKqF,EAAoBK,EAASG,QAAQvE,IAAI,GAAG4D;AAGtD,OAAOpH,EAAgB6H,EAAvB,IARF,EAkBKG,EAAc,CAACC,EAAYP,KAEhC,IAAI,MAAMG,KAAOH,EAAU,CAE1B,GAAIG,EAAIjG,OAASqG,EAAWrG,OAASiG,EAAIK,KAAOD,EAAWC,IAC1D;AAGD,GAAIL,EAAIE,QAAQhI,KAAK,MAAQkI,EAAWF,QAAQhI,KAAK,IACpD;AAID,IAAIoI,EAAeF,EAAWG;AAK9B,MAAMzI,EAAU0I,IAEf,IAAI,MAAMC,KAAeH,EAAa,CAErC,GAAIG,EAAY1G,QAAUyG,EAAKzG,OAAS0G,EAAYC,SAAWF,EAAKE,OACnE,OAAO;AAGR,GAAmB,GAAfF,EAAKxI,QAAqC,GAAtByI,EAAYzI,OAApC,CAUA,GAAIwI,EAAKzG,MAAQ0G,EAAY1G,OAASyG,EAAKH,IAAMI,EAAY1G,MAC5D,OAAO;AAGR,GAAI0G,EAAY1G,MAAQyG,EAAKzG,OAAS0G,EAAYJ,IAAMG,EAAKzG,MAC5D,OAAO,CArB6B,CAwBrC,CAED,OAAO,CAAP;AAKD,KAFeiG,EAAIO,MAAMzI,OAAOA,GAEnBE,OAAS,GAItB,OAAO,CACP,CAED,OAAO,CAAP;AAGD,MAAM2I,EAELC,cAGCC,KAAKN,MAAS,GAGdM,KAAKX,QAAU,GACfW,KAAK9G,MAAS,EACd8G,KAAKR,IAAO,CACZ,CAKDrB,IAAIwB,GACCA,IACHK,KAAKN,MAAMlG,KAAKmG,GAChBK,KAAKX,QAAQ7F,KAAKmG,EAAKE,QACvBG,KAAK9G,MAAQT,KAAKwH,IAAIN,EAAKzG,MAAM8G,KAAK9G,OACtC8G,KAAKR,IAAM/G,KAAKC,IAAIiH,EAAKH,IAAIQ,KAAKR,KAEnC,CAEDU,OACC,OAAOF,KAAKN,MAAMM,KAAKN,MAAMvI,OAAO,EACpC,CAEDA,SACC,OAAO6I,KAAKN,MAAMvI,MAClB,CAMDgJ,MAAMC,EAAUC,GACf,IAAIF,EAAQ,IAAIL,EAEZJ,EAAQY,KAAKC,MAAMD,KAAKE,UAAUR,KAAKN,QACvCe,EAAYf,EAAMgB;AACtB,IAAK,MAAMf,KAAQD,EAClBS,EAAMhC,IAAIwB;AAGX,IAAIgB,EAAcN,EAAWR,OAAO1G,UAAU,EAAEiH,EAASK,EAAUvH,OAC/D0H,EAAiBD,EAAYxJ;AAGjC,OAFAgJ,EAAMhC,IAAI,CAACjF,MAAMuH,EAAUvH,MAAMsG,IAAIiB,EAAUvH,MAAM0H,EAAezJ,OAAOyJ,EAAef,OAAOc,IAE1FR,CACP,4HAmByB/H,IAC1BuE,IAEAvE,EAAU6E,EAAU7E;AAEpB,IAAIX,EAAY,GACZuH,EAAa,CAAC,IAAIc;AAEtB,IAAK,IAAIhI,EAAI,EAAGA,EAAIM,EAAIjB,OAAQW,IAAK,CAEpC,IACIoG,EADS9F,EAAIe,UAAUrB,GACRoG,MAAMvE;AACzB,MAAM9B,EAAOO,EAAIe,UAAUrB,EAAEA,EAAE,GACzB+I,EAAY3C,EAAQA,EAAM,GAAK;AAKrC,IAAI4C,EAAe,GACfC,EAAe,IAAI9I;AACvB,IAAI,MAAMiH,KAAYF,EAAU,CAE/B,MAAMqB,EAAanB,EAASgB;AAG5B,IAAKG,GAAmC,GAArBA,EAAWlJ,QAAekJ,EAAWb,KAAO1H,EAG9D,GAAI+I,EAAW,CACd,MAAMzB,EAAMyB,EAAU1J;AACtB+H,EAASf,IAAI,CAACjF,MAAMpB,EAAE0H,IAAI1H,EAAEsH,EAAIjI,OAAOiI,EAAIS,OAAOgB,IAClDE,EAAY5C,IAAI,IAChB,MACAe,EAASf,IAAI,CAACjF,MAAMpB,EAAE0H,IAAI1H,EAAE,EAAEX,OAAO,EAAE0I,OAAOhI,IAC9CkJ,EAAY5C,IAAI;KAGZ,GAAI0C,EAAW,CAEpB,IAAIV,EAAQjB,EAASiB,MAAMrI,EAAEuI;AAE7B,MAAMjB,EAAMyB,EAAU1J;AACtBgJ,EAAMhC,IAAI,CAACjF,MAAMpB,EAAE0H,IAAI1H,EAAEsH,EAAIjI,OAAOiI,EAAIS,OAAOgB,IAE/CC,EAAYtH,KAAK2G,EAEjB,MAGAY,EAAY5C,IAAI,IAzCkB,CAgDpC,GAAI2C,EAAY3J,OAAS,EAAzB,CAGC2J,EAAcA,EAAYxC,MAAK,CAACtE,EAAEO,IAC1BP,EAAE7C,SAAWoD,EAAEpD;AAGvB,IAAK,IAAIgJ,KAASW,EAGbxB,EAAYa,EAAOnB,IAIvBA,EAAUxF,KAAK2G,EA9DmB,MAyEpC,GAAIrI,EAAI,GAAyB,GAApBiJ,EAAY7I,OAAc6I,EAAYC,IAAI,KAAM,CAC5DvJ,GAAWsH,EAAmBC,GAAU;AACxC,IAAIiC,EAAU,IAAInB;AAClB,MAAMoB,EAAUlC,EAAU;AACtBkC,GACHD,EAAQ9C,IAAI+C,EAAQhB,QAErBlB,EAAY,CAACiC,EACb,CAED,CAID,OAFAxJ,GAAWsH,EAAmBC,GAAU,GAEjCvH,CAAP"}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy