webapp.flowDiagram-b222e15a.f77f79b1.js.map Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of langgraph4j-studio-jetty Show documentation
Show all versions of langgraph4j-studio-jetty Show documentation
an **embed playground webapp** to run a Langgraph4j workflow in visual way
The newest version!
{"mappings":"A,S,E,C,C,C,C,C,C,C,E,O,c,C,E,E,C,I,E,I,E,W,C,E,a,C,C,E,C,I,E,A,W,iB,C,E,E,Q,C,E,Q,S,C,C,C,E,E,E,O,C,I,I,G,E,E,O,C,I,I,G,I,E,E,S,E,E,S,E,E,SCiKA,SAAS,EAAY,CAAW,CAAE,CAAS,CAAE,CAAU,EACrD,OAAO,EAAY,MAAM,CAAC,SAAS,IAAI,CAAC,QAAS,oBAAoB,IAAI,CAAC,IAAK,GAAG,IAAI,CAAC,IAAK,EAAY,EAAa,GAAM,MAAM,IAAI,CAAC,KAAM,EAAa,KAC3J,CASA,SAAS,EAAuB,CAAU,CAAE,CAAU,CAAE,CAAI,EAC1D,IAAI,EACJ,IAAM,EAAc,EAAW,MAAM,CAAC,QAChC,EAAW,EAAY,EAAa,EAAG,GAC7C,EAA2B,EAAU,CAAC,CAAE,QAAS,EAAM,KAAM,QAAS,EAAE,EACxE,IAAM,EAAiB,AAAyB,MAAzB,CAAA,EAAK,EAAS,IAAI,EAAA,EAAc,KAAK,EAAI,EAAG,qBAAqB,GAIxF,OAHI,GACF,EAAY,MAAM,GAEb,CACT,CAyBA,SAAS,EAA2B,CAAK,CAAE,CAAW,EACpD,EAAM,IAAI,CAAC,IACX,EAAY,OAAO,CAAC,CAAC,EAAM,KACzB,IAAM,EAAa,EAAM,MAAM,CAAC,SAAS,IAAI,CAAC,aAAc,AAAc,aAAd,EAAK,IAAI,CAAkB,SAAW,UAAU,IAAI,CAAC,QAAS,oBAAoB,IAAI,CAAC,cAAe,AAAc,WAAd,EAAK,IAAI,CAAgB,OAAS,SAChM,AAAU,CAAA,IAAV,EACF,EAAW,IAAI,CAAC,EAAK,OAAO,EAE5B,EAAW,IAAI,CAAC,IAAM,EAAK,OAAO,CAEtC,EACF,CACA,IAAM,EAAa,CAAC,EAAI,EAAO,EAAE,CAAE,CAAA,MACjC,EAAQ,EAAA,CAAA,QACR,EAAU,CAAA,CAAA,CAAA,QACV,EAAU,EAAA,CAAA,cACV,EAAgB,CAAA,CAAA,CAAA,OAChB,EAAS,CAAA,CAAA,CAAA,MACT,EAAQ,GAAA,CAAA,iBACR,EAAmB,CAAA,CAAA,CACpB,CAAG,CAAC,CAAC,IAEJ,GADA,AAAA,CAAA,EAAA,EAAA,CAAE,AAAF,EAAI,IAAI,CAAC,aAAc,EAAM,EAAO,EAAS,EAAS,EAAe,EAAQ,IACzE,EAgBF,OADiB,AA5DrB,SAA6B,CAAK,CAAE,CAAC,CAAE,CAAc,CAAE,EAAgB,CAAA,CAAK,EAE1E,IAAM,EAAa,EAAE,MAAM,CAAC,KACtB,EAAM,EAAW,MAAM,CAAC,QAAQ,IAAI,CAAC,QAAS,cAC9C,EAAc,EAAW,MAAM,CAAC,QAAQ,IAAI,CAAC,IAAK,SACpD,EAAY,EAChB,IAAK,IAAM,KAAQ,EAAgB,CACjC,IAAM,EAAa,AAAC,GAAU,AA1BlC,CAAA,SAA4B,CAAU,CAAE,CAAU,CAAE,CAAI,EACtD,IAAM,EAAc,EAAW,MAAM,CAAC,QAChC,EAAW,EAAY,EAAa,EAkBvB,KAjBnB,EAA2B,EAAU,GACrC,IAAM,EAAa,EAAS,IAAI,GAAG,qBAAqB,GAExD,OADA,EAAY,MAAM,GACX,CACT,CAAA,EAmBqD,EANhC,EAMwD,IAAU,EAEnF,IAAK,IAAM,KADa,EAAW,GAAQ,CAAC,EAAK,CAAG,AAtGxD,SAA6B,CAAI,CAAE,CAAQ,EACzC,GAAI,EAAK,IAAI,CAAC,CAAC,CAAA,QAAE,CAAO,CAAE,GAAK,EAAQ,QAAQ,CAAC,OAC9C,MAAM,AAAI,MAAM,6DAElB,OAAO,AAET,SAAS,EAA6B,CAAK,CAAE,CAAQ,CAAE,EAAQ,EAAE,CAAE,EAAU,EAAE,EAC7E,GAAI,AAAiB,IAAjB,EAAM,MAAM,CAId,OAHI,EAAQ,MAAM,CAAG,GACnB,EAAM,IAAI,CAAC,GAEN,EAAM,MAAM,CAAG,EAAI,EAAQ,EAAE,CAEtC,IAAI,EAAS,EACY,CAAA,MAArB,CAAK,CAAC,EAAE,CAAC,OAAO,GAClB,EAAS,IACT,EAAM,KAAK,IAEb,IAAM,EAAW,EAAM,KAAK,IAAM,CAAE,QAAS,IAAK,KAAM,QAAS,EAC3D,EAAmB,IAAI,EAAQ,CAKrC,GAJe,KAAX,GACF,EAAiB,IAAI,CAAC,CAAE,QAAS,EAAQ,KAAM,QAAS,GAE1D,EAAiB,IAAI,CAAC,GAClB,EAAS,GACX,OAAO,EAA6B,EAAO,EAAU,EAAO,GAE9D,GAAI,EAAQ,MAAM,CAAG,EACnB,EAAM,IAAI,CAAC,GACX,EAAM,OAAO,CAAC,QACT,GAAI,EAAS,OAAO,CAAE,KA7DL,EA8DtB,GAAM,CAAC,EAAM,EAAK,CAtDb,AAET,SAAS,EAA6B,CAAQ,CAAE,CAAS,CAAE,CAAc,CAAE,CAAI,EAC7E,GAAI,AAA0B,IAA1B,EAAe,MAAM,CACvB,MAAO,CACL,CAAE,QAAS,EAAU,IAAI,CAAC,IAAK,KAAA,CAAK,EACpC,CAAE,QAAS,GAAI,KAAA,CAAK,EACrB,CAEH,GAAM,CAAC,EAAU,GAAG,EAAK,CAAG,EACtB,EAAU,IAAI,EAAW,EAAS,QACxC,AAAI,EAAS,CAAC,CAAE,QAAS,EAAQ,IAAI,CAAC,IAAK,KAAA,CAAK,EAAE,EACzC,EAA6B,EAAU,EAAS,EAAM,IAEtC,IAArB,EAAU,MAAM,EAAU,IAC5B,EAAU,IAAI,CAAC,GACf,EAAe,KAAK,IAEf,CACL,CAAE,QAAS,EAAU,IAAI,CAAC,IAAK,KAAA,CAAK,EACpC,CAAE,QAAS,EAAe,IAAI,CAAC,IAAK,KAAA,CAAK,EAC1C,CACH,EAgC6C,EAtDG,EAAE,EARxB,EAOY,AAuDiB,EAvDZ,OAAO,CANhD,AAAI,KAAK,SAAS,CACT,IAAI,IAAI,KAAK,SAAS,GAAG,OAAO,CAAC,GAAM,CAAC,GAAG,CAAC,AAAC,GAAM,EAAE,OAAO,EAE9D,IAAI,EAAK,EAI8C,AAsDT,EAtDc,IAAI,EAuDrE,EAAM,IAAI,CAAC,CAAC,EAAK,EACb,EAAK,OAAO,EACd,EAAM,OAAO,CAAC,EAElB,CACA,OAAO,EAA6B,EAAO,EAAU,EACvD,EAlCsC,EAAM,EAC5C,EAiG4E,EAAM,GAG5E,EADc,EAAY,EAAa,EATxB,KAUmB,GAClC,GAEJ,CACA,IAAI,EAMF,OAAO,EAAY,IAAI,EANN,EACjB,IAAM,EAAO,EAAY,IAAI,GAAG,OAAO,GAGvC,OADA,EAAI,IAAI,CAAC,IAAK,IAAU,IAAI,CAAC,IAAK,IAAU,IAAI,CAAC,QAAS,EAAK,KAAK,CAAG,GAAa,IAAI,CAAC,SAAU,EAAK,MAAM,CAAG,GAC1G,EAAW,IAAI,EACxB,CAGF,EAqCyC,EAAO,EADrB,AA1O3B,SAAyB,CAAQ,EAC/B,IAAM,EAAuB,AAN/B,SAA4B,CAAQ,EAClC,IAAM,EAA0B,EAAS,OAAO,CAAC,UAAW,MAE5D,MAD2B,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAEpC,EAEkD,GAC1C,CAAA,SAAE,CAAQ,CAAE,CAAG,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GAC5B,EAAQ,CAAC,EAAE,CAAC,CACd,EAAc,EA4BlB,OAPA,EAAS,OAAO,CAAC,AAAC,IACM,cAAlB,EAAS,IAAI,EACf,EAAS,QAAQ,CAAC,OAAO,CAAC,AAAC,KACzB,AAvBN,SAAS,EAAY,CAAI,CAAE,EAAa,QAAQ,EAC1C,AAAc,SAAd,EAAK,IAAI,CAEX,AADkB,EAAK,KAAK,CAAC,KAAK,CAAC,MACzB,OAAO,CAAC,CAAC,EAAU,KACb,IAAV,IACF,IACA,EAAM,IAAI,CAAC,EAAE,GAEf,EAAS,KAAK,CAAC,KAAK,OAAO,CAAC,AAAC,IACvB,GACF,CAAK,CAAC,EAAY,CAAC,IAAI,CAAC,CAAE,QAAS,EAAM,KAAM,CAAW,EAE9D,EACF,GACS,CAAA,AAAc,WAAd,EAAK,IAAI,EAAiB,AAAc,aAAd,EAAK,IAAI,AAAK,GACjD,EAAK,QAAQ,CAAC,OAAO,CAAC,AAAC,IACrB,EAAY,EAAa,EAAK,IAAI,CACpC,EAEJ,EAIkB,EACd,EAEJ,GACO,CACT,EAyM2C,GACyB,EAf/C,EACjB,IAAM,EAAW,AA3LrB,SAAwB,CAAQ,EAC9B,GAAM,CAAA,SAAE,CAAQ,CAAE,CAAG,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GAalC,OAAO,EAAS,GAAG,CAZnB,SAAS,EAAO,CAAI,QAClB,AAAI,AAAc,SAAd,EAAK,IAAI,CACJ,EAAK,KAAK,CAAC,OAAO,CAAC,MAAO,SACxB,AAAc,WAAd,EAAK,IAAI,CACX,CAAC,QAAQ,EAAE,EAAK,QAAQ,CAAC,GAAG,CAAC,GAAQ,IAAI,CAAC,IAAI,SAAS,CAAC,CACtD,AAAc,aAAd,EAAK,IAAI,CACX,CAAC,IAAI,EAAE,EAAK,QAAQ,CAAC,GAAG,CAAC,GAAQ,IAAI,CAAC,IAAI,KAAK,CAAC,CAC9C,AAAc,cAAd,EAAK,IAAI,CACX,CAAC,GAAG,EAAE,EAAK,QAAQ,CAAC,GAAG,CAAC,GAAQ,IAAI,CAAC,IAAI,IAAI,CAAC,CAEhD,CAAC,sBAAsB,EAAE,EAAK,IAAI,CAAC,CAAC,AAC7C,GAC4B,IAAI,CAAC,GACnC,EA4KoC,GAWhC,OADmB,AA1GvB,SAAqB,CAAO,CAAE,CAAI,CAAE,CAAK,CAAE,CAAO,CAAE,EAAgB,CAAA,CAAK,MALhD,EAMvB,IAAM,EAAK,EAAQ,MAAM,CAAC,iBACpB,EAAM,EAAG,MAAM,CAAC,aAChB,EAAQ,EAAK,KAAK,CAClB,EAAa,EAAK,MAAM,CAAG,YAAc,YAC/C,EAAI,IAAI,CACN,CAAC;iBACY,EAAE,EAAW,CAAC,EAAE,EAAQ,EAAE,CAAC,CAAI,CAAA,EAAK,UAAU,CAAG,UAAY,EAAK,UAAU,CAAG,IAAM,EAAA,EAAM,IAAM,EAAQ,YAZjG,EAcP,EAAK,UAAU,GAZ7B,AAYS,EAZL,IAAI,CAAC,QAAS,GAapB,EAAI,KAAK,CAAC,UAAW,cACrB,EAAI,KAAK,CAAC,cAAe,UACzB,EAAI,KAAK,CAAC,YAAa,EAAQ,MAC/B,EAAI,IAAI,CAAC,QAAS,gCACd,GACF,EAAI,IAAI,CAAC,QAAS,YAEpB,IAAI,EAAO,EAAI,IAAI,GAAG,qBAAqB,GAS3C,OARI,EAAK,KAAK,GAAK,IACjB,EAAI,KAAK,CAAC,UAAW,SACrB,EAAI,KAAK,CAAC,cAAe,gBACzB,EAAI,KAAK,CAAC,QAAS,EAAQ,MAC3B,EAAO,EAAI,IAAI,GAAG,qBAAqB,IAEzC,EAAG,KAAK,CAAC,QAAS,EAAK,KAAK,EAC5B,EAAG,KAAK,CAAC,SAAU,EAAK,MAAM,EACvB,EAAG,IAAI,EAChB,EA+EmC,EATlB,CACX,OAAA,EACA,MAAO,AAAA,CAAA,EAAA,EAAA,CAAa,AAAb,EAAe,GAAU,OAAO,CACrC,uBAEA,AAAC,GAAM,CAAC,UAAU,EAAE,EAAE,OAAO,CAAC,IAAK,KAAK,MAAM,CAAC,EAEjD,WAAY,EAAM,OAAO,CAAC,QAAS,SACrC,EACyC,EAAO,EAAS,EAE3D,CAKF,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,G,I,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,SC3FA,IAAM,EAAM,CAAC,EAAE,cAAc,CAYhB,EAcX,SAAU,CAAK,CAAE,CAAQ,CAAE,CAAO,EAKhC,MAJwB,UAApB,OAAO,IACT,EAAU,EACV,EAAW,KAAA,GAEN,AAYX,CAAA,SAAkB,CAAO,EAEvB,IAAM,EAAS,CACb,WAAY,EAAE,CACd,eAAgB,CAAC,WAAY,WAAY,UAAW,YAAa,SAAS,CAC1E,MAAO,CACL,SAAU,EAAO,GACjB,iBAAkB,EAClB,cAAe,EACf,WAAY,EAAO,GACnB,WAAY,EAm6BhB,WACE,MAAO,CACL,KAAM,aACN,SAAU,EAAE,AACd,CACF,GAv6BI,gBAAiB,EACjB,mBAAoB,EACpB,WAAY,EAAO,GACnB,oBAAqB,EACrB,oBAAqB,EACrB,aAAc,EAAO,EAAU,GAC/B,SAAU,EA86Bd,WACE,MAAO,CACL,KAAM,aACN,MAAO,EACT,CACF,EAn7B+B,GAC3B,aAAc,EACd,KAAM,EACN,cAAe,EACf,WAAY,EAk7BhB,WACE,MAAO,CACL,KAAM,aACN,WAAY,GACZ,MAAO,KACP,MAAO,KACP,IAAK,EACP,CACF,GAz7BI,4BAA6B,EAC7B,sBAAuB,EACvB,sBAAuB,EACvB,SAAU,EAy7Bd,WACE,MAAO,CACL,KAAM,WACN,SAAU,EAAE,AACd,CACF,GA77BI,gBAAiB,EAAO,GACxB,kBAAmB,EAAO,GAC1B,SAAU,EAAO,EAAM,GACvB,aAAc,EACd,SAAU,EAAO,EAAM,GACvB,aAAc,EACd,MAAO,EAm9BX,WACE,MAAO,CACL,KAAM,QACN,MAAO,KACP,IAAK,GACL,IAAK,IACP,CACF,GAz9BI,MAAO,EACP,KAAM,EAAO,GACb,SAAU,EAq/Bd,SAAkB,CAAK,EACrB,MAAO,CACL,KAAM,WACN,OAAQ,EAAM,OAAO,CACrB,QAAS,KACT,SAAU,EAAE,AACd,CACF,GA3/BI,cAsdJ,SAA8B,CAAK,EAnJ1B,EAoJK,2BApJI,GAsJd,AADiB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,OAAO,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,GAAQ,IApK/D,EAqKU,2BArKD,CADW,KAAA,EAwKtB,EA3dI,YAAa,EAAO,EA6cxB,WAzJE,EA0JQ,2BA1JC,CA0J8B,CAAA,CACzC,GA9cI,cAAe,EAAO,GACtB,UAAW,EA2/Bf,WACE,MAAO,CACL,KAAM,YACN,SAAU,EAAE,AACd,CACF,GA//BI,UAqzBJ,WApgBE,EAqgBQ,aArgBC,CAqgBgB,WAC3B,EAtzBI,gBAAiB,EACjB,0BAA2B,EAC3B,oBAAqB,EACrB,cAAe,EAAO,GACtB,OAAQ,EA6/BZ,WACE,MAAO,CACL,KAAM,SACN,SAAU,EAAE,AACd,CACF,GAjgCI,cAAe,EA4gCnB,WACE,MAAO,CACL,KAAM,eACR,CACF,EA/gCE,EACA,KAAM,CACJ,WAAY,IACZ,mBAyiBJ,SAAkC,CAAK,EACrC,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAC9C,GAAI,CAAC,EAAK,KAAK,CAAE,CACf,IAAM,EAAQ,IAAI,CAAC,cAAc,CAAC,GAAO,MAAM,AAC/C,CAAA,EAAK,KAAK,CAAG,CACf,CACF,EA9iBI,SAAU,IACV,cA82BJ,SAA6B,CAAK,EAChC,EAAW,IAAI,CAAC,IAAI,CAAE,GAEtB,AADa,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,GAAG,CAAG,UAAY,IAAI,CAAC,cAAc,CAAC,EAC7C,EAj3BI,iBAm2BJ,SAAgC,CAAK,EACnC,EAAW,IAAI,CAAC,IAAI,CAAE,GAEtB,AADa,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,GAAG,CAAG,IAAI,CAAC,cAAc,CAAC,EACjC,EAt2BI,WAAY,IACZ,qBAAsB,EACtB,oCAAqC,EACrC,gCAAiC,EACjC,wBAs0BJ,SAAuC,CAAK,MAItC,EAHJ,IAAM,EAAO,IAAI,CAAC,cAAc,CAAC,GAC3B,EA3hBC,EA2hBc,sBA3hBL,CA8hBZ,GACF,EAAQ,AAAA,CAAA,EAAA,EAAA,+BAA8B,AAA9B,EACN,EACA,AAAS,oCAAT,EAA6C,GAAK,IA/iBtD,EAijBU,sBAjjBD,CADW,KAAA,GAqjBlB,EADe,AAAA,CAAA,EAAA,EAAA,6BAA4B,AAA5B,EAA8B,GAG/C,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,GAAG,EAC3B,CAAA,EAAK,KAAK,EAAI,EACd,EAAK,QAAQ,CAAC,GAAG,CAAG,EAAM,EAAM,GAAG,CACrC,EAv1BI,WAAY,EA0ehB,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,EAAK,OAAO,CAAC,2BAA4B,IA/MtD,EAgNQ,cAhNC,CADW,KAAA,CAkNtB,GA9eI,gBA8dJ,YAnLS,EAqLK,cArLI,GAsLhB,IAAI,CAAC,MAAM,GApMX,EAqMQ,cArMC,CAqMiB,CAAA,EAC5B,EAleI,oBAycJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,IAAI,CAAG,CACd,EA5cI,oBAkdJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,IAAI,CAAG,CACd,EArdI,cAAe,EACf,aAAc,EAgflB,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,EAAK,OAAO,CAAC,eAAgB,GAC5C,GAnfI,SAAU,EAspBd,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,CACf,GAzpBI,aAAc,EACd,KAAM,EACN,WAAY,IACZ,4BA4gBJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,GAAG,CAAG,CACb,EA/gBI,sBAofJ,SAAqC,CAAK,EACxC,IAAM,EAAQ,IAAI,CAAC,MAAM,GACnB,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,AAC9C,CAAA,EAAK,KAAK,CAAG,EACb,EAAK,UAAU,CAAG,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EAChB,IAAI,CAAC,cAAc,CAAC,IACpB,WAAW,EACf,EA1fI,sBAggBJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,CACf,EAngBI,SAAU,IACV,gBAAiB,EAAO,GACxB,kBAAmB,EAAO,GAC1B,SAAU,EAsnBd,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,CACf,GAznBI,aAAc,EACd,SAAU,EA+nBd,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,CACf,GAloBI,aAAc,EACd,MAAO,EA+qBX,WACE,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAI9C,GA5ZO,EA4ZK,WA5ZI,CA4ZY,CAE1B,IAAM,EAAgB,AA9ZjB,EA8ZyB,aA9ZhB,EA8ZoC,UAClD,CAAA,EAAK,IAAI,EAAI,YAEb,EAAK,aAAa,CAAG,EAErB,OAAO,EAAK,GAAG,CACf,OAAO,EAAK,KAAK,AACnB,MAEE,OAAO,EAAK,UAAU,CAEtB,OAAO,EAAK,KAAK,AAvbnB,CAAA,EAybQ,aAzbC,CADW,KAAA,CA2btB,GAnsBI,MAytBJ,WACE,IAAM,EAAW,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAC5C,EAAQ,IAAI,CAAC,MAAM,GACnB,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAG9C,GAtdA,EAqdQ,WArdC,CAqdc,CAAA,EACnB,AAAc,SAAd,EAAK,IAAI,CAAa,CAGxB,IAAM,EAAW,EAAS,QAAQ,AAClC,CAAA,EAAK,QAAQ,CAAG,CAClB,MACE,EAAK,GAAG,CAAG,CAEf,EAtuBI,UAysBJ,SAAyB,CAAK,EAC5B,IAAM,EAAS,IAAI,CAAC,cAAc,CAAC,GAC7B,EAAW,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,AAGlD,CAAA,EAAS,KAAK,CAAG,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GAE9B,EAAS,UAAU,CAAG,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EAAoB,GAAQ,WAAW,EAC/D,EAhtBI,WA+kBJ,SAA0B,CAAK,EAC7B,IAAM,EAAU,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAEjD,GA7TO,EA6TK,WA7TI,CA6TY,CAE1B,AADa,EAAQ,QAAQ,CAAC,EAAQ,QAAQ,CAAC,MAAM,CAAG,EAAE,CACrD,QAAQ,CAAC,GAAG,CAAG,EAAM,EAAM,GAAG,EA7UrC,EA8UU,WA9UD,CADW,KAAA,EAgVlB,MACF,CAEE,CApUK,EAoUI,4BApUK,EAqUd,EAAO,cAAc,CAAC,QAAQ,CAAC,EAAQ,IAAI,IAE3C,EAAY,IAAI,CAAC,IAAI,CAAE,GACvB,EAAW,IAAI,CAAC,IAAI,CAAE,GAE1B,EA9lBI,KAAM,EA+oBV,WACE,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAI9C,GAhYO,EAgYK,WAhYI,CAgYY,CAE1B,IAAM,EAAgB,AAlYjB,EAkYyB,aAlYhB,EAkYoC,UAClD,CAAA,EAAK,IAAI,EAAI,YAEb,EAAK,aAAa,CAAG,EAErB,OAAO,EAAK,GAAG,CACf,OAAO,EAAK,KAAK,AACnB,MAEE,OAAO,EAAK,UAAU,CAEtB,OAAO,EAAK,KAAK,AA3ZnB,CAAA,EA6ZQ,aA7ZC,CADW,KAAA,CA+ZtB,GAnqBI,SAAU,IACV,YAAa,IACb,cAAe,IACf,UAAW,IACX,gBA8wBJ,SAA+B,CAAK,EAClC,IAAM,EAAQ,IAAI,CAAC,MAAM,GACnB,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,AAG9C,CAAA,EAAK,KAAK,CAAG,EAEb,EAAK,UAAU,CAAG,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EAChB,IAAI,CAAC,cAAc,CAAC,IACpB,WAAW,GAthBb,EAuhBQ,aAvhBC,CAuhBgB,MAC3B,EAxxBI,0BAquBJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,GAAG,CAAG,CACb,EAxuBI,oBA+uBJ,WACE,IAAM,EAAO,IAAI,CAAC,MAAM,EAExB,AADa,CAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,CACf,EAlvBI,SAyvBJ,WA3fE,EA4fQ,WA5fC,CADW,KAAA,CA8ftB,EA1vBI,cAAe,EA4hBnB,WA/RE,EAgSQ,4BAhSC,CADW,KAAA,CAkStB,GA7hBI,0BAkhBJ,SAAyC,CAAK,EAE5C,AADa,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CACzC,KAAK,CAAG,AAA6C,KAA7C,IAAI,CAAC,cAAc,CAAC,GAAO,UAAU,CAAC,GAAY,EAAI,CACrE,EAphBI,kBAygBJ,WA9QE,EA+QQ,4BA/QC,CA+Q+B,CAAA,CAC1C,EA1gBI,OAAQ,IACR,cAAe,GACjB,CACF,EACA,AAm/BF,CAAA,SAAS,EAAU,CAAQ,CAAE,CAAU,EACrC,IAAI,EAAQ,GACZ,KAAO,EAAE,EAAQ,EAAW,MAAM,EAAE,CAClC,IAAM,EAAQ,CAAU,CAAC,EAAM,CAC3B,MAAM,OAAO,CAAC,GAChB,EAAU,EAAU,GAEpB,AAUN,SAAmB,CAAQ,CAAE,CAAS,EAEpC,IAAI,EACJ,IAAK,KAAO,EACV,GAAI,EAAI,IAAI,CAAC,EAAW,IACtB,GAAI,AAAQ,mBAAR,EAA0B,CAC5B,IAAM,EAAQ,CAAS,CAAC,EAAI,CACxB,GACF,CAAQ,CAAC,EAAI,CAAC,IAAI,IAAI,EAE1B,MAAO,GAAI,AAAQ,eAAR,EAAsB,CAC/B,IAAM,EAAQ,CAAS,CAAC,EAAI,CACxB,GACF,CAAQ,CAAC,EAAI,CAAC,IAAI,IAAI,EAE1B,MAAO,GAAI,AAAQ,UAAR,GAAmB,AAAQ,SAAR,EAAgB,CAC5C,IAAM,EAAQ,CAAS,CAAC,EAAI,CACxB,GACF,OAAO,MAAM,CAAC,CAAQ,CAAC,EAAI,CAAE,EAEjC,EAGN,EAjCgB,EAAU,EAExB,CACF,CAAA,EA7/BY,EAAS,AAAA,CAAA,GAAW,CAAC,CAAA,EAAG,eAAe,EAAI,EAAE,EAGvD,IAAM,EAAO,CAAC,EACd,OAUA,SAAiB,CAAM,EAErB,IAAI,EAAO,CACT,KAAM,OACN,SAAU,EAAE,AACd,EAEM,EAAU,CACd,MAAO,CAAC,EAAK,CACb,WAAY,EAAE,CACd,OAAA,EACA,MAAA,EACA,KAAA,EACA,OAAA,EACA,OAAA,EACA,QAAA,EACA,QAAA,CACF,EAEM,EAAY,EAAE,CAChB,EAAQ,GACZ,KAAO,EAAE,EAAQ,EAAO,MAAM,EAI1B,CAAA,AAA0B,gBAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACrB,AAA0B,kBAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,AAFvB,IAIM,AAAqB,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,CAClB,EAAU,IAAI,CAAC,GAGf,EAAQ,AA+DhB,SAAqB,CAAM,CAAE,CAAK,CAAE,CAAM,EACxC,IAII,EAEA,EAEA,EAEA,EAVA,EAAQ,EAAQ,EAChB,EAAmB,GACnB,EAAa,CAAA,EASjB,KAAO,EAAE,GAAS,GAAQ,CACxB,IAAM,EAAQ,CAAM,CAAC,EAAM,CAmC3B,GAjCE,AAAkB,kBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,gBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,eAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EAET,AAAa,UAAb,CAAK,CAAC,EAAE,CACV,IAEA,IAEF,EAAW,KAAA,GACF,AAAkB,oBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,CACL,UAAb,CAAK,CAAC,EAAE,IAER,GACC,GACA,GACA,GAED,CAAA,EAAsB,CANxB,EAQA,EAAW,KAAA,GAGK,eAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,kBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,mBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,mBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAAkB,6BAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EAIb,CAAA,EAAW,KAAA,CAAX,EAGC,CAAC,GACA,AAAa,UAAb,CAAK,CAAC,EAAE,EACR,AAAkB,mBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACd,AAAqB,KAArB,GACC,AAAa,SAAb,CAAK,CAAC,EAAE,EACP,CAAA,AAAkB,kBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACZ,AAAkB,gBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,AAAK,EACtB,CACA,GAAI,EAAU,CACZ,IAAI,EAAY,EAEhB,IADA,EAAY,KAAA,EACL,KAAa,CAClB,IAAM,EAAY,CAAM,CAAC,EAAU,CACnC,GACE,AAAsB,eAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,EACjB,AAAsB,oBAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,CACjB,CACA,GAAI,AAAiB,SAAjB,CAAS,CAAC,EAAE,CAAa,SACzB,IACF,CAAM,CAAC,EAAU,CAAC,EAAE,CAAC,IAAI,CAAG,kBAC5B,EAAa,CAAA,GAEf,CAAS,CAAC,EAAE,CAAC,IAAI,CAAG,aACpB,EAAY,CACd,MAAO,GACL,AAAsB,eAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,EACjB,AAAsB,qBAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,EACjB,AAAsB,+BAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,EACjB,AAAsB,qBAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,EACjB,AAAsB,mBAAtB,CAAS,CAAC,EAAE,CAAC,IAAI,OAIjB,KAEJ,CAEE,GACC,CAAA,CAAC,GAAa,EAAsB,CAAA,GAErC,CAAA,EAAS,OAAO,CAAG,CAAA,CAJrB,EAQA,EAAS,GAAG,CAAG,OAAO,MAAM,CAC1B,CAAC,EACD,EAAY,CAAM,CAAC,EAAU,CAAC,EAAE,CAAC,KAAK,CAAG,CAAK,CAAC,EAAE,CAAC,GAAG,EAEvD,EAAO,MAAM,CAAC,GAAa,EAAO,EAAG,CAAC,OAAQ,EAAU,CAAK,CAAC,EAAE,CAAC,EACjE,IACA,GACF,CAGsB,mBAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,GACf,EAAW,CACT,KAAM,WACN,QAAS,CAAA,EACT,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAK,CAAC,EAAE,CAAC,KAAK,EAEvC,IAAK,KAAA,CACP,EAEA,EAAO,MAAM,CAAC,EAAO,EAAG,CAAC,QAAS,EAAU,CAAK,CAAC,EAAE,CAAC,EACrD,IACA,IACA,EAAsB,KAAA,EACtB,EAAW,CAAA,EAEf,CACF,CAEA,OADA,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,OAAO,CAAG,EACpB,CACT,EAzL4B,EADP,EAAU,GAAG,GACQ,IAKxC,IADA,EAAQ,GACD,EAAE,EAAQ,EAAO,MAAM,EAAE,CAC9B,IAAM,EAAU,CAAM,CAAC,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,CACpC,EAAI,IAAI,CAAC,EAAS,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,GACzC,CAAO,CAAC,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,IAAI,CACjC,OAAO,MAAM,CACX,CACE,eAAgB,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,cAAc,AACjD,EACA,GAEF,CAAM,CAAC,EAAM,CAAC,EAAE,CAGtB,CAGA,GAAI,EAAQ,UAAU,CAAC,MAAM,CAAG,EAAG,CACjC,IAAM,EAAO,EAAQ,UAAU,CAAC,EAAQ,UAAU,CAAC,MAAM,CAAG,EAAE,CAE9D,AADgB,CAAA,CAAI,CAAC,EAAE,EAAI,CAA3B,EACQ,IAAI,CAAC,EAAS,KAAA,EAAW,CAAI,CAAC,EAAE,CAC1C,CA0BA,IAvBA,EAAK,QAAQ,CAAG,CACd,MAAO,EACL,EAAO,MAAM,CAAG,EACZ,CAAM,CAAC,EAAE,CAAC,EAAE,CAAC,KAAK,CAClB,CACE,KAAM,EACN,OAAQ,EACR,OAAQ,CACV,GAEN,IAAK,EACH,EAAO,MAAM,CAAG,EACZ,CAAM,CAAC,EAAO,MAAM,CAAG,EAAE,CAAC,EAAE,CAAC,GAAG,CAChC,CACE,KAAM,EACN,OAAQ,EACR,OAAQ,CACV,EAER,EAGA,EAAQ,GACD,EAAE,EAAQ,EAAO,UAAU,CAAC,MAAM,EACvC,EAAO,EAAO,UAAU,CAAC,EAAM,CAAC,IAAS,EAE3C,OAAO,CACT,EAgJA,SAAS,EAAQ,CAAG,CAAE,CAAK,EACzB,CAAI,CAAC,EAAI,CAAG,CACd,CAYA,SAAS,EAAQ,CAAG,EAClB,OAAO,CAAI,CAAC,EAAI,AAClB,CAYA,SAAS,EAAO,CAAM,CAAE,CAAG,EACzB,OAOA,SAAc,CAAK,EACjB,EAAM,IAAI,CAAC,IAAI,CAAE,EAAO,GAAQ,GAC5B,GAAK,EAAI,IAAI,CAAC,IAAI,CAAE,EAC1B,CACF,CAMA,SAAS,IACP,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CACd,KAAM,WACN,SAAU,EAAE,AACd,EACF,CAgBA,SAAS,EAAM,CAAI,CAAE,CAAK,CAAE,CAAY,EAUtC,OAPA,AAFe,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAEzC,QAAQ,CAAC,IAAI,CAAC,GACrB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAChB,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,EAAO,EAAa,EAE1C,EAAK,QAAQ,CAAG,CACd,MAAO,EAAM,EAAM,KAAK,CAC1B,EACO,CACT,CAUA,SAAS,EAAO,CAAG,EACjB,OAOA,SAAe,CAAK,EACd,GAAK,EAAI,IAAI,CAAC,IAAI,CAAE,GACxB,EAAK,IAAI,CAAC,IAAI,CAAE,EAClB,CACF,CAYA,SAAS,EAAK,CAAK,CAAE,CAAW,EAC9B,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,GAAG,GACrB,EAAO,IAAI,CAAC,UAAU,CAAC,GAAG,GAChC,GAAK,EAWM,CAAI,CAAC,EAAE,CAAC,IAAI,GAAK,EAAM,IAAI,GAChC,EACF,EAAY,IAAI,CAAC,IAAI,CAAE,EAAO,CAAI,CAAC,EAAE,EAGrC,AADgB,CAAA,CAAI,CAAC,EAAE,EAAI,CAA3B,EACQ,IAAI,CAAC,IAAI,CAAE,EAAO,CAAI,CAAC,EAAE,QAfnC,MAAM,AAAI,MACR,iBACE,EAAM,IAAI,CACV,MACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,CAChB,MAAO,EAAM,KAAK,CAClB,IAAK,EAAM,GAAG,AAChB,GACA,oBAWN,OADA,EAAK,QAAQ,CAAC,GAAG,CAAG,EAAM,EAAM,GAAG,EAC5B,CACT,CAMA,SAAS,IACP,MAAO,AAAA,CAAA,EAAA,EAAA,QAAO,AAAP,EAAS,IAAI,CAAC,KAAK,CAAC,GAAG,GAChC,CAyJA,SAAS,EAAY,CAAK,EACxB,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAG,EAAE,CAC1C,EAAO,EAAK,QAAQ,CAAC,EAAK,QAAQ,CAAC,MAAM,CAAG,EAAE,CAC7C,GAAQ,AAAc,SAAd,EAAK,IAAI,GAIpB,AAFA,CAAA,EA6aK,CACL,KAAM,OACN,MAAO,EACT,CAhbE,EAEK,QAAQ,CAAG,CACd,MAAO,EAAM,EAAM,KAAK,CAC1B,EAEA,EAAK,QAAQ,CAAC,IAAI,CAAC,IAErB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,EAClB,CAOA,SAAS,EAAW,CAAK,EACvB,IAAM,EAAO,IAAI,CAAC,KAAK,CAAC,GAAG,EAC3B,CAAA,EAAK,KAAK,EAAI,IAAI,CAAC,cAAc,CAAC,GAClC,EAAK,QAAQ,CAAC,GAAG,CAAG,EAAM,EAAM,GAAG,CACrC,CA8BA,SAAS,IA/VP,EAgWQ,WAhWC,CAgWc,CAAA,CACzB,CA8LA,SAAS,EAA+B,CAAK,MAhiBvB,EAAA,EAiiBc,EAAM,IAAI,CAhiB5C,EAgiBQ,sBAhiBC,CAAG,CAiiBd,CA2DA,SAAS,IACP,MAAO,CACL,KAAM,OACN,KAAM,KACN,KAAM,KACN,MAAO,EACT,CACF,CA8BA,SAAS,IAEP,MAAO,CACL,KAAM,UACN,MAAO,KAAA,EACP,SAAU,EAAE,AACd,CACF,CAGA,SAAS,IACP,MAAO,CACL,KAAM,OACR,CACF,CAGA,SAAS,IACP,MAAO,CACL,KAAM,OACN,MAAO,EACT,CACF,CAaA,SAAS,IACP,MAAO,CACL,KAAM,OACN,MAAO,KACP,IAAK,GACL,SAAU,EAAE,AACd,CACF,CAMA,SAAS,EAAK,CAAK,EACjB,MAAO,CACL,KAAM,OACN,QAAS,AAAe,gBAAf,EAAM,IAAI,CACnB,MAAO,KACP,OAAQ,EAAM,OAAO,CACrB,SAAU,EAAE,AACd,CACF,CA6CF,CAAA,EA3kCoB,GACd,AAAA,CAAA,EAAA,EAAA,WAAU,AAAV,EACE,AAAA,CAAA,EAAA,EAAA,KAAI,AAAJ,EAAM,GAAS,QAAQ,GAAG,KAAK,CAAC,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,IAAa,EAAO,EAAU,CAAA,KAGpE,EAglCF,SAAS,EAAM,CAAC,EACd,MAAO,CACL,KAAM,EAAE,IAAI,CACZ,OAAQ,EAAE,MAAM,CAChB,OAAQ,EAAE,MAAM,AAClB,CACF,CAkDA,SAAS,EAAe,CAAI,CAAE,CAAK,EACjC,GAAI,EACF,MAAM,AAAI,MACR,iBACE,EAAK,IAAI,CACT,MACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,CAChB,MAAO,EAAK,KAAK,CACjB,IAAK,EAAK,GAAG,AACf,GACA,0BACA,EAAM,IAAI,CACV,MACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,CAChB,MAAO,EAAM,KAAK,CAClB,IAAK,EAAM,GAAG,AAChB,GACA,YAGJ,OAAM,AAAI,MACR,oCACE,EAAM,IAAI,CACV,MACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,CAChB,MAAO,EAAM,KAAK,CAClB,IAAK,EAAM,GAAG,AAChB,GACA,kBAGR,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GC11CA,IAAM,EAAe,CAAC,EAef,SAAS,EAAS,CAAK,CAAE,CAAO,EACrC,IAAM,EAAW,GAAW,EAQ5B,OAAO,EAAI,EANT,AAAoC,WAApC,OAAO,EAAS,eAAe,EAC3B,EAAS,eAAe,CAG5B,AAAgC,WAAhC,OAAO,EAAS,WAAW,EAAiB,EAAS,WAAW,CAGpE,CAcA,SAAS,EAAI,CAAK,CAAE,CAAe,CAAE,CAAW,EAC9C,GAsDe,AAtDN,GAsDe,AAAiB,UAAjB,OAtDf,EAAQ,CACf,GAAI,UAAW,EACb,MAAO,AAAe,SAAf,EAAM,IAAI,EAAgB,EAAmB,EAAM,KAAK,CAAhB,GAGjD,GAAI,GAAmB,QAAS,GAAS,EAAM,GAAG,CAChD,OAAO,EAAM,GAAG,CAGlB,GAAI,aAAc,EAChB,OAAO,EAAI,EAAM,QAAQ,CAAE,EAAiB,EAEhD,QAEA,AAAI,MAAM,OAAO,CAAC,GACT,EAAI,EAAO,EAAiB,GAG9B,EACT,CAcA,SAAS,EAAI,CAAM,CAAE,CAAe,CAAE,CAAW,EAE/C,IAAM,EAAS,EAAE,CACb,EAAQ,GAEZ,KAAO,EAAE,EAAQ,EAAO,MAAM,EAC5B,CAAM,CAAC,EAAM,CAAG,EAAI,CAAM,CAAC,EAAM,CAAE,EAAiB,GAGtD,OAAO,EAAO,IAAI,CAAC,GACrB,C,G,E,Q,S,C,C,C,E,E,E,O,C,Q,I,GCzFC,IAAA,EAAA,EAAA,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,SAcM,SAAS,EAAM,CAAO,EAO3B,IAAM,EAAS,CACb,QAAS,EAAE,CACX,KAAM,CAAC,EACP,WANA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,CAAC,KAAuB,AAH3B,CAAA,GAAW,CAAC,CAAA,EAGwB,UAAU,EAAI,EAAE,CAAE,EAOrE,QAAS,EAAO,EAAA,OAAM,EACtB,SAAU,EAAO,EAAA,QAAO,EACxB,KAAM,EAAO,EAAA,IAAG,EAChB,OAAQ,EAAO,EAAA,MAAK,EACpB,KAAM,EAAO,EAAA,IAAG,CAClB,EACA,OAAO,EAKP,SAAS,EAAO,CAAO,EACrB,OAEA,SAAiB,CAAI,EACnB,MAAO,AAAA,CAAA,EAAA,EAAA,eAAc,AAAd,EAAgB,EAAQ,EAAS,EAC1C,CACF,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,oB,I,GC5CC,IAAA,EAAA,EAAA,SAID,IAAM,EAAiB,CAAC,EAAE,cAAc,CAUjC,SAAS,EAAkB,CAAU,EAE1C,IAAM,EAAM,CAAC,EACT,EAAQ,GAEZ,KAAO,EAAE,EAAQ,EAAW,MAAM,EAChC,AAeJ,CAAA,SAAyB,CAAG,CAAE,CAAS,EAErC,IAAI,EAEJ,IAAK,KAAQ,EAAW,KAOlB,EAJJ,IAAM,EAAO,AAFC,CAAA,EAAe,IAAI,CAAC,EAAK,GAAQ,CAAG,CAAC,EAAK,CAAG,KAAA,CAA3D,GAEuB,CAAA,CAAG,CAAC,EAAK,CAAG,CAAC,CAAA,EAE9B,EAAQ,CAAS,CAAC,EAAK,CAI7B,GAAI,EACF,IAAK,KAAQ,EAAO,CACb,EAAe,IAAI,CAAC,EAAM,IAAO,CAAA,CAAI,CAAC,EAAK,CAAG,EAAE,AAAF,EACnD,IAAM,EAAQ,CAAK,CAAC,EAAK,CACzB,AAkBR,CAAA,SAAoB,CAAQ,CAAE,CAAI,EAChC,IAAI,EAAQ,GAEN,EAAS,EAAE,CAEjB,KAAO,EAAE,EAAQ,EAAK,MAAM,EAEzB,AAAC,CAAA,AAAoB,UAApB,CAAI,CAAC,EAAM,CAAC,GAAG,CAAe,EAAW,CAAA,EAAQ,IAAI,CAAC,CAAI,CAAC,EAAM,EAGrE,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAU,EAAG,EAAG,EACzB,CAAA,EA3BU,CAAI,CAAC,EAAK,CACV,MAAM,OAAO,CAAC,GAAS,EAAQ,EAAQ,CAAC,EAAM,CAAG,EAAE,CAEvD,CAEJ,CACF,CAAA,EAxCoB,EAAK,CAAU,CAAC,EAAM,EAGxC,OAAO,CACT,C,G,E,Q,S,C,C,C,ECPO,SAAS,EAAO,CAAI,CAAE,CAAK,CAAE,CAAM,CAAE,CAAK,MAI3C,EAHJ,IAAM,EAAM,EAAK,MAAM,CACnB,EAAa,EAajB,GAPE,EADE,EAAQ,EACF,CAAC,EAAQ,EAAM,EAAI,EAAM,EAEzB,EAAQ,EAAM,EAAM,EAE9B,EAAS,EAAS,EAAI,EAAS,EAG3B,EAAM,MAAM,CAAG,IAEjB,AADA,CAAA,EAAa,MAAM,IAAI,CAAC,EAAxB,EACW,OAAO,CAAC,EAAO,GAE1B,EAAK,MAAM,IAAI,QAMf,IAHI,GAAQ,EAAK,MAAM,CAAC,EAAO,GAGxB,EAAa,EAAM,MAAM,EAE9B,AADA,CAAA,EAAa,EAAM,KAAK,CAAC,EAAY,EAAa,IAAlD,EACW,OAAO,CAAC,EAAO,GAE1B,EAAK,MAAM,IAAI,GACf,GAAc,IACd,GAAS,GAGf,CAkBO,SAAS,EAAK,CAAI,CAAE,CAAK,SAC9B,AAAI,EAAK,MAAM,CAAG,GAChB,EAAO,EAAM,EAAK,MAAM,CAAE,EAAG,GACtB,GAEF,CACT,C,E,E,O,C,S,I,G,E,E,O,C,O,I,E,G,E,Q,S,C,C,C,E,E,E,O,C,U,I,GC1EC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAU,CACrB,SAOF,SAA2B,CAAO,MAO5B,EANJ,IAAM,EAAe,EAAQ,OAAO,CAClC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,cAAc,CASvC,SAAoC,CAAI,EACtC,GAAI,AAAS,OAAT,EAAe,CACjB,EAAQ,OAAO,CAAC,GAChB,MACF,CAIA,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAc,aAC7C,EAGA,SAA0B,CAAI,EAE5B,OADA,EAAQ,KAAK,CAAC,aACP,AAIT,SAAS,EAAU,CAAI,EACrB,IAAM,EAAQ,EAAQ,KAAK,CAAC,YAAa,CACvC,YAAa,OACb,SAAA,CACF,GAKA,OAJI,GACF,CAAA,EAAS,IAAI,CAAG,CADlB,EAGA,EAAW,EACJ,AAIT,SAAS,EAAK,CAAI,EAChB,GAAI,AAAS,OAAT,EAAe,CACjB,EAAQ,IAAI,CAAC,aACb,EAAQ,IAAI,CAAC,aACb,EAAQ,OAAO,CAAC,GAChB,MACF,OACA,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,aACN,IAIT,EAAQ,OAAO,CAAC,GACT,EACT,EApBc,EACd,EAdmB,EACnB,GAlBA,OAAO,CAmDT,CAjEA,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,GCTC,IAAA,EAAA,EAAA,SAsCM,SAAS,EAAa,CAAO,CAAE,CAAE,CAAE,CAAI,CAAE,CAAG,EACjD,IAAM,EAAQ,EAAM,EAAM,EAAI,OAAO,iBAAiB,CAClD,EAAO,EACX,OAGA,SAAe,CAAI,QACjB,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,KAAK,CAAC,GACP,AAMX,SAAS,EAAO,CAAI,QAClB,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAAS,IAAS,GAClC,EAAQ,OAAO,CAAC,GACT,IAET,EAAQ,IAAI,CAAC,GACN,EAAG,GACZ,EAbkB,IAET,EAAG,EACZ,CAWF,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,G,E,E,O,C,oB,I,G,E,E,O,C,a,I,G,E,E,O,C,e,I,G,E,E,O,C,a,I,G,E,E,O,C,gB,I,G,E,E,O,C,mB,I,G,E,E,O,C,qB,I,G,E,E,O,C,4B,I,G,E,E,O,C,gB,I,G,E,E,O,C,qB,I,G,E,E,O,C,oB,I,GC/DC,IAAA,EAAA,EAAA,SAqBM,IAAM,EAAa,EAAW,YAcxB,EAAoB,EAAW,cAuB/B,EAAa,EAAW,uBAa9B,SAAS,EAAa,CAAI,EAC/B,OAGE,AAAS,OAAT,GAAkB,CAAA,EAAO,IAAM,AAAS,MAAT,CAAS,CAE5C,CAaO,IAAM,EAAa,EAAW,MAoBxB,EAAgB,EAAW,cAe3B,EAAmB,EAAW,kBAiBpC,SAAS,EAAmB,CAAI,EACrC,OAAO,AAAS,OAAT,GAAiB,EAAO,EACjC,CAWO,SAAS,EAA0B,CAAI,EAC5C,OAAO,AAAS,OAAT,GAAkB,CAAA,EAAO,GAAK,AAAS,KAAT,CAAS,CAChD,CAiBO,SAAS,EAAc,CAAI,EAChC,OAAO,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,CACvC,CAuBO,IAAM,EAAqB,EAAW,EAAA,uBAAsB,EAsBtD,EAAoB,EAAW,MAQ5C,SAAS,EAAW,CAAK,EACvB,OAUA,SAAe,CAAI,EACjB,OAAO,AAAS,OAAT,GAAiB,EAAM,IAAI,CAAC,OAAO,YAAY,CAAC,GACzD,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,0B,I,GC3OO,IAAM,EACX,swC,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GCGD,IAAA,EAAA,EAAA,S,E,E,S,E,E,SAMM,IAAM,EAAW,CACtB,SAYF,SAA4B,CAAO,MAM7B,EAEA,EAEA,EATJ,IAAM,EAAO,IAAI,CAEX,EAAQ,EAAE,CACZ,EAAY,EAOhB,OAAO,EAGP,SAAS,EAAM,CAAI,EAWjB,GAAI,EAAY,EAAM,MAAM,CAAE,CAC5B,IAAM,EAAO,CAAK,CAAC,EAAU,CAE7B,OADA,EAAK,cAAc,CAAG,CAAI,CAAC,EAAE,CACtB,EAAQ,OAAO,CACpB,CAAI,CAAC,EAAE,CAAC,YAAY,CACpB,EACA,GACA,EACJ,CAGA,OAAO,EAAmB,EAC5B,CAGA,SAAS,EAAiB,CAAI,EAM5B,GALA,IAKI,EAAK,cAAc,CAAC,UAAU,CAAE,KAW9B,CAVJ,CAAA,EAAK,cAAc,CAAC,UAAU,CAAG,KAAA,EAC7B,GACF,IAKF,IAAM,EAAmB,EAAK,MAAM,CAAC,MAAM,CACvC,EAAkB,EAKtB,KAAO,KACL,GACE,AAAoC,SAApC,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,EAC/B,AAAyC,cAAzC,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,CAAC,IAAI,CACpC,CACA,EAAQ,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,CAAC,GAAG,CAC3C,KACF,CAEF,EAAe,GAGf,IAAI,EAAQ,EACZ,KAAO,EAAQ,EAAK,MAAM,CAAC,MAAM,EAC/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,GAC9C,IAaF,MATA,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EACE,EAAK,MAAM,CACX,EAAkB,EAClB,EACA,EAAK,MAAM,CAAC,KAAK,CAAC,IAIpB,EAAK,MAAM,CAAC,MAAM,CAAG,EACd,EAAmB,EAC5B,CACA,OAAO,EAAM,EACf,CAGA,SAAS,EAAmB,CAAI,EAM9B,GAAI,IAAc,EAAM,MAAM,CAAE,CAI9B,GAAI,CAAC,EACH,OAAO,EAAkB,GAM3B,GAAI,EAAU,gBAAgB,EAAI,EAAU,gBAAgB,CAAC,QAAQ,CACnE,OAAO,EAAU,EAQnB,CAAA,EAAK,SAAS,CAAG,CAAA,CACf,CAAA,EAAU,gBAAgB,EAAI,CAAC,EAAU,6BAA6B,AAA7B,CAE7C,CAIA,OADA,EAAK,cAAc,CAAG,CAAC,EAChB,EAAQ,KAAK,CAClB,EACA,EACA,GACA,EACJ,CAGA,SAAS,EAAqB,CAAI,EAGhC,OAFI,GAAW,IACf,EAAe,GACR,EAAkB,EAC3B,CAGA,SAAS,EAAsB,CAAI,EAGjC,OAFA,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,CAAG,IAAc,EAAM,MAAM,CAC9D,EAAkB,EAAK,GAAG,GAAG,MAAM,CAC5B,EAAU,EACnB,CAGA,SAAS,EAAkB,CAAI,EAG7B,OADA,EAAK,cAAc,CAAG,CAAC,EAChB,EAAQ,OAAO,CACpB,EACA,EACA,GACA,EACJ,CAGA,SAAS,EAAkB,CAAI,EAI7B,OAHA,IACA,EAAM,IAAI,CAAC,CAAC,EAAK,gBAAgB,CAAE,EAAK,cAAc,CAAC,EAEhD,EAAkB,EAC3B,CAGA,SAAS,EAAU,CAAI,EACrB,GAAI,AAAS,OAAT,EAAe,CACb,GAAW,IACf,EAAe,GACf,EAAQ,OAAO,CAAC,GAChB,MACF,CAOA,OANA,EAAY,GAAa,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,IAClD,EAAQ,KAAK,CAAC,YAAa,CACzB,YAAa,OACb,SAAU,EACV,WAAY,CACd,GACO,AAIT,SAAS,EAAa,CAAI,EACxB,GAAI,AAAS,OAAT,EAAe,CACjB,EAAa,EAAQ,IAAI,CAAC,aAAc,CAAA,GACxC,EAAe,GACf,EAAQ,OAAO,CAAC,GAChB,MACF,OACA,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,OAAO,CAAC,GAChB,EAAa,EAAQ,IAAI,CAAC,cAE1B,EAAY,EACZ,EAAK,SAAS,CAAG,KAAA,EACV,IAET,EAAQ,OAAO,CAAC,GACT,EACT,EArBsB,EACtB,CA2BA,SAAS,EAAa,CAAK,CAAE,CAAG,EAC9B,IAAM,EAAS,EAAK,WAAW,CAAC,GAyChC,GAxCI,GAAK,EAAO,IAAI,CAAC,MACrB,EAAM,QAAQ,CAAG,EACb,GAAY,CAAA,EAAW,IAAI,CAAG,CAAlC,EACA,EAAa,EACb,EAAU,UAAU,CAAC,EAAM,KAAK,EAChC,EAAU,KAAK,CAAC,GAmCZ,EAAK,MAAM,CAAC,IAAI,CAAC,EAAM,KAAK,CAAC,IAAI,CAAC,CAAE,CACtC,IAqBI,EAEA,EAvBA,EAAQ,EAAU,MAAM,CAAC,MAAM,CACnC,KAAO,KACL,GAEE,EAAU,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAG,GAEzC,CAAA,CAAC,EAAU,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,EAE9B,EAAU,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAA,EAI1C,OAMJ,IAAM,EAAmB,EAAK,MAAM,CAAC,MAAM,CACvC,EAAkB,EAOtB,KAAO,KACL,GACE,AAAoC,SAApC,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,EAC/B,AAAyC,cAAzC,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,CAAC,IAAI,CACpC,CACA,GAAI,EAAM,CACR,EAAQ,EAAK,MAAM,CAAC,EAAgB,CAAC,EAAE,CAAC,GAAG,CAC3C,KACF,CACA,EAAO,CAAA,CACT,CAMF,IAJA,EAAe,GAGf,EAAQ,EACD,EAAQ,EAAK,MAAM,CAAC,MAAM,EAC/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,GAC9C,IAIF,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EACE,EAAK,MAAM,CACX,EAAkB,EAClB,EACA,EAAK,MAAM,CAAC,KAAK,CAAC,IAIpB,EAAK,MAAM,CAAC,MAAM,CAAG,CACvB,CACF,CAMA,SAAS,EAAe,CAAI,EAC1B,IAAI,EAAQ,EAAM,MAAM,CAGxB,KAAO,KAAU,GAAM,CACrB,IAAM,EAAQ,CAAK,CAAC,EAAM,AAC1B,CAAA,EAAK,cAAc,CAAG,CAAK,CAAC,EAAE,CAC9B,CAAK,CAAC,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,EAAM,EAC3B,CACA,EAAM,MAAM,CAAG,CACjB,CACA,SAAS,IACP,EAAU,KAAK,CAAC,CAAC,KAAK,EACtB,EAAa,KAAA,EACb,EAAY,KAAA,EACZ,EAAK,cAAc,CAAC,UAAU,CAAG,KAAA,CACnC,CACF,CAxVA,EAGM,EAAqB,CACzB,SA0VF,SAA2B,CAAO,CAAE,CAAE,CAAE,CAAG,EAGzC,MAAO,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EAAQ,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAE,EAAI,GACrD,aACA,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBAAkB,KAAA,EAAY,EAE/E,CAlWA,C,G,E,Q,S,C,C,C,E,E,E,O,C,O,I,G,I,E,E,SCtBC,EAAA,EAAA,S,E,E,SAMM,IAAM,EAAO,CAClB,SAOF,SAAwB,CAAO,EAC7B,IAAM,EAAO,IAAI,CACX,EAAU,EAAQ,OAAO,CAE7B,EAAA,SAAQ,CAoBV,SAAuB,CAAI,EACzB,GAAI,AAAS,OAAT,EAAe,CACjB,EAAQ,OAAO,CAAC,GAChB,MACF,CAKA,OAJA,EAAQ,KAAK,CAAC,mBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,mBACb,EAAK,gBAAgB,CAAG,KAAA,EACjB,CACT,EA3BE,EAAQ,OAAO,CACb,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,WAAW,CAClC,EACA,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACE,EACA,EAAQ,OAAO,CACb,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAC3B,EACA,EAAQ,OAAO,CAAC,EAAA,OAAM,CAAG,IAE3B,gBAIN,OAAO,EAgBP,SAAS,EAAe,CAAI,EAC1B,GAAI,AAAS,OAAT,EAAe,CACjB,EAAQ,OAAO,CAAC,GAChB,MACF,CAKA,OAJA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACb,EAAK,gBAAgB,CAAG,KAAA,EACjB,CACT,CACF,CAtDA,C,G,E,Q,S,C,C,C,E,E,E,O,C,Y,I,GCRC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAY,CACvB,SAQF,SAA2B,CAAO,CAAE,CAAE,CAAE,CAAG,EACzC,OAgBA,SAAe,CAAI,EACjB,MAAO,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAO,cAAc,GAC3C,EAAM,EACZ,EAgBA,SAAS,EAAM,CAAI,EACjB,OAAO,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAAQ,EAAG,GAAQ,EAAI,EACpE,CACF,EA/CE,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,U,I,GCNC,IAAA,EAAA,EAAA,S,E,E,S,E,E,SASM,IAAM,EAAU,CACrB,SAyBF,SAAyB,CAAO,CAAE,CAAE,EAElC,IAAI,EACJ,OAYA,SAAoB,CAAI,EAKtB,OAJA,EAAQ,KAAK,CAAC,WACd,EAAW,EAAQ,KAAK,CAAC,eAAgB,CACvC,YAAa,SACf,GACO,EAAY,EACrB,EAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,OAAT,EACK,EAAW,GAKhB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GACd,EAAQ,KAAK,CAClB,EACA,EACA,GACA,IAIJ,EAAQ,OAAO,CAAC,GACT,EACT,CAOA,SAAS,EAAW,CAAI,EAGtB,OAFA,EAAQ,IAAI,CAAC,gBACb,EAAQ,IAAI,CAAC,WACN,EAAG,EACZ,CAOA,SAAS,EAAgB,CAAI,EAQ3B,OAPA,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,gBACb,EAAS,IAAI,CAAG,EAAQ,KAAK,CAAC,eAAgB,CAC5C,YAAa,UACb,SAAA,CACF,GACA,EAAW,EAAS,IAAI,CACjB,CACT,CACF,EAvGE,QAeF,SAAwB,CAAM,EAE5B,MADA,AAAA,CAAA,EAAA,EAAA,WAAU,AAAV,EAAY,GACL,CACT,CAjBA,EAGM,EAAwB,CAC5B,SAwGF,SAA8B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC5C,IAAM,EAAO,IAAI,CACjB,OAOA,SAAwB,CAAI,EAK1B,OAJA,EAAQ,IAAI,CAAC,gBACb,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAU,aACzC,EAOA,SAAS,EAAS,CAAI,EACpB,GAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GACtC,OAAO,EAAI,GAKb,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,OAChD,AACE,CAAC,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,iBAC9C,GACA,AAAiB,eAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,EACZ,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,EAAI,EAEzC,EAAG,GAEL,EAAQ,SAAS,CAAC,EAAK,MAAM,CAAC,UAAU,CAAC,IAAI,CAAE,EAAK,GAAI,EACjE,CACF,EA/IE,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,c,I,GCrBC,IAAA,EAAA,EAAA,SAWM,SAAS,EAAY,CAAM,MAK5B,EAEA,EAEA,EAEA,EAEA,EAEA,EAEA,EAfJ,IAAM,EAAQ,CAAC,EACX,EAAQ,GAeZ,KAAO,EAAE,EAAQ,EAAO,MAAM,EAAE,CAC9B,KAAO,CAAA,KAAS,CAAA,GACd,EAAQ,CAAK,CAAC,EAAM,CAMtB,GAJA,EAAQ,CAAM,CAAC,EAAM,CAKnB,GACA,AAAkB,cAAlB,CAAK,CAAC,EAAE,CAAC,IAAI,EACb,AAA8B,mBAA9B,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,IAAI,GAGzB,CAAA,EAAa,CAAA,EAEE,AAHf,CAAA,EAAY,CAAK,CAAC,EAAE,CAAC,UAAU,CAAC,MAAM,AAAN,EAGP,MAAM,EAC7B,AAAkC,oBAAlC,CAAS,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,EAE7B,CAAA,GAAc,CAAA,EAGd,EAAa,EAAU,MAAM,EAC7B,AAAkC,YAAlC,CAAS,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,EAE7B,KACE,AADK,EAAE,EAAa,EAAU,MAAM,EAChC,AAAkC,YAAlC,CAAS,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,EAGK,cAAlC,CAAS,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,GAC/B,CAAS,CAAC,EAAW,CAAC,EAAE,CAAC,2BAA2B,CAAG,CAAA,EACvD,KAOR,GAAI,AAAa,UAAb,CAAK,CAAC,EAAE,CACN,CAAK,CAAC,EAAE,CAAC,WAAW,GACtB,OAAO,MAAM,CAAC,EAAO,AA+C7B,SAAoB,CAAM,CAAE,CAAU,MAchC,EAEA,EAfJ,IAAM,EAAQ,CAAM,CAAC,EAAW,CAAC,EAAE,CAC7B,EAAU,CAAM,CAAC,EAAW,CAAC,EAAE,CACjC,EAAgB,EAAa,EAE3B,EAAiB,EAAE,CACnB,EACJ,EAAM,UAAU,EAAI,EAAQ,MAAM,CAAC,EAAM,WAAW,CAAC,CAAC,EAAM,KAAK,EAC7D,EAAc,EAAU,MAAM,CAE9B,EAAQ,EAAE,CAEV,EAAO,CAAC,EAKV,EAAQ,GAER,EAAU,EACV,EAAS,EACT,EAAQ,EACN,EAAS,CADH,EACU,CAItB,KAAO,GAAS,CAEd,KAAO,CAAM,CAAC,EAAE,EAAc,CAAC,EAAE,GAAK,IAGtC,EAAe,IAAI,CAAC,GAChB,CAAC,EAAQ,UAAU,GACrB,EAAS,EAAQ,WAAW,CAAC,GACxB,EAAQ,IAAI,EACf,EAAO,IAAI,CAAC,MAEV,GACF,EAAU,UAAU,CAAC,EAAQ,KAAK,EAEhC,EAAQ,2BAA2B,EACrC,CAAA,EAAU,kCAAkC,CAAG,CAAA,CADjD,EAGA,EAAU,KAAK,CAAC,GACZ,EAAQ,2BAA2B,EACrC,CAAA,EAAU,kCAAkC,CAAG,KAAA,CADjD,GAMF,EAAW,EACX,EAAU,EAAQ,IAAI,AACxB,CAKA,IADA,EAAU,EACH,EAAE,EAAQ,EAAY,MAAM,EAGL,SAA1B,CAAW,CAAC,EAAM,CAAC,EAAE,EACrB,AAA8B,UAA9B,CAAW,CAAC,EAAQ,EAAE,CAAC,EAAE,EACzB,CAAW,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,GAAK,CAAW,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,IAAI,EAC7D,CAAW,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,IAAI,GAAK,CAAW,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,GAEnE,EAAQ,EAAQ,EAChB,EAAO,IAAI,CAAC,GAEZ,EAAQ,UAAU,CAAG,KAAA,EACrB,EAAQ,QAAQ,CAAG,KAAA,EACnB,EAAU,EAAQ,IAAI,EAqB1B,IAhBA,EAAU,MAAM,CAAG,EAAE,CAKjB,GAEF,EAAQ,UAAU,CAAG,KAAA,EACrB,EAAQ,QAAQ,CAAG,KAAA,GAEnB,EAAO,GAAG,GAKZ,EAAQ,EAAO,MAAM,CACd,KAAS,CACd,IAAM,EAAQ,EAAY,KAAK,CAAC,CAAM,CAAC,EAAM,CAAE,CAAM,CAAC,EAAQ,EAAE,EAC1D,EAAQ,EAAe,GAAG,GAChC,EAAM,OAAO,CAAC,CAAC,EAAO,EAAQ,EAAM,MAAM,CAAG,EAAE,EAC/C,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAQ,EAAO,EAAG,EAC3B,CAEA,IADA,EAAQ,GACD,EAAE,EAAQ,EAAM,MAAM,EAC3B,CAAI,CAAC,EAAS,CAAK,CAAC,EAAM,CAAC,EAAE,CAAC,CAAG,EAAS,CAAK,CAAC,EAAM,CAAC,EAAE,CACzD,GAAU,CAAK,CAAC,EAAM,CAAC,EAAE,CAAG,CAAK,CAAC,EAAM,CAAC,EAAE,CAAG,EAEhD,OAAO,CACT,EAtJwC,EAAQ,IACxC,EAAQ,CAAK,CAAC,EAAM,CACpB,EAAO,CAAA,QAIN,GAAI,CAAK,CAAC,EAAE,CAAC,UAAU,CAAE,CAG5B,IAFA,EAAa,EACb,EAAY,KAAA,EACL,KAEL,GACE,AAAuB,eAAvB,AAFF,CAAA,EAAa,CAAM,CAAC,EAAW,AAAX,CAER,CAAC,EAAE,CAAC,IAAI,EAClB,AAAuB,oBAAvB,CAAU,CAAC,EAAE,CAAC,IAAI,CAEI,UAAlB,CAAU,CAAC,EAAE,GACX,GACF,CAAA,CAAM,CAAC,EAAU,CAAC,EAAE,CAAC,IAAI,CAAG,iBAD9B,EAGA,CAAU,CAAC,EAAE,CAAC,IAAI,CAAG,aACrB,EAAY,QAGd,MAGA,IAEF,CAAK,CAAC,EAAE,CAAC,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAU,CAAC,EAAE,CAAC,KAAK,EAI3D,AADA,CAAA,EAAa,EAAO,KAAK,CAAC,EAAW,EAArC,EACW,OAAO,CAAC,GACnB,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAQ,EAAW,EAAQ,EAAY,EAAG,GAErD,CACF,CACA,MAAO,CAAC,CACV,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,G,E,E,O,C,S,I,G,E,E,O,C,O,I,GCtGO,IAAM,EAAW,CACtB,WAAY,GACd,EACa,EAAS,EAAkB,UAC3B,EAAO,EAAkB,QAMtC,SAAS,EAAkB,CAAK,EAC9B,MAAO,CACL,SAUF,SAAwB,CAAO,EAC7B,IAAM,EAAO,IAAI,CACX,EAAa,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,EAAM,CAC1C,EAAO,EAAQ,OAAO,CAAC,EAAY,EAAO,GAChD,OAAO,EAGP,SAAS,EAAM,CAAI,EACjB,OAAO,EAAQ,GAAQ,EAAK,GAAQ,EAAQ,EAC9C,CAGA,SAAS,EAAQ,CAAI,EACnB,GAAI,AAAS,OAAT,EAAe,CACjB,EAAQ,OAAO,CAAC,GAChB,MACF,CAGA,OAFA,EAAQ,KAAK,CAAC,QACd,EAAQ,OAAO,CAAC,GACT,CACT,CAGA,SAAS,EAAK,CAAI,SAChB,AAAI,EAAQ,IACV,EAAQ,IAAI,CAAC,QACN,EAAK,KAId,EAAQ,OAAO,CAAC,GACT,EACT,CAMA,SAAS,EAAQ,CAAI,EACnB,GAAI,AAAS,OAAT,EACF,MAAO,CAAA,EAET,IAAM,EAAO,CAAU,CAAC,EAAK,CACzB,EAAQ,GACZ,GAAI,EAGF,KAAO,EAAE,EAAQ,EAAK,MAAM,EAAE,CAC5B,IAAM,EAAO,CAAI,CAAC,EAAM,CACxB,GAAI,CAAC,EAAK,QAAQ,EAAI,EAAK,QAAQ,CAAC,IAAI,CAAC,EAAM,EAAK,QAAQ,EAC1D,MAAO,CAAA,CAEX,CAEF,MAAO,CAAA,CACT,CACF,EAjEE,WAAY,EACV,AAAU,SAAV,EAAmB,EAAyB,KAAA,EAEhD,CA+DF,CAMA,SAAS,EAAe,CAAa,EACnC,OAGA,SAAwB,CAAM,CAAE,CAAO,EACrC,IAEI,EAFA,EAAQ,GAMZ,KAAO,EAAE,GAAS,EAAO,MAAM,EACzB,AAAU,KAAA,IAAV,EACE,CAAM,CAAC,EAAM,EAAI,AAA0B,SAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,GACxC,EAAQ,EACR,KAEQ,CAAM,CAAC,EAAM,EAAI,AAA0B,SAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,GAE5C,IAAU,EAAQ,IACpB,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAG,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,GAAG,CAC/C,EAAO,MAAM,CAAC,EAAQ,EAAG,EAAQ,EAAQ,GACzC,EAAQ,EAAQ,GAElB,EAAQ,KAAA,GAGZ,OAAO,EAAgB,EAAc,EAAQ,GAAW,CAC1D,CACF,CAaA,SAAS,EAAuB,CAAM,CAAE,CAAO,EAC7C,IAAI,EAAa,EAEjB,KAAO,EAAE,GAAc,EAAO,MAAM,EAClC,GACG,AAAA,CAAA,IAAe,EAAO,MAAM,EAC3B,AAA+B,eAA/B,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,AAAK,GACjC,AAAmC,SAAnC,CAAM,CAAC,EAAa,EAAE,CAAC,EAAE,CAAC,IAAI,CAC9B,KAOI,EANJ,IAAM,EAAO,CAAM,CAAC,EAAa,EAAE,CAAC,EAAE,CAChC,EAAS,EAAQ,WAAW,CAAC,GAC/B,EAAQ,EAAO,MAAM,CACrB,EAAc,GACd,EAAO,EAGX,KAAO,KAAS,CACd,IAAM,EAAQ,CAAM,CAAC,EAAM,CAC3B,GAAI,AAAiB,UAAjB,OAAO,EAAoB,CAE7B,IADA,EAAc,EAAM,MAAM,CACnB,AAAsC,KAAtC,EAAM,UAAU,CAAC,EAAc,IACpC,IACA,IAEF,GAAI,EAAa,MACjB,EAAc,EAChB,MAEK,GAAI,AAAU,KAAV,EACP,EAAO,CAAA,EACP,SACK,GAAI,AAAU,KAAV,OAEJ,CAEL,IACA,KACF,CACF,CACA,GAAI,EAAM,CACR,IAAM,EAAQ,CACZ,KACE,IAAe,EAAO,MAAM,EAAI,GAAQ,EAAO,EAC3C,aACA,oBACN,MAAO,CACL,KAAM,EAAK,GAAG,CAAC,IAAI,CACnB,OAAQ,EAAK,GAAG,CAAC,MAAM,CAAG,EAC1B,OAAQ,EAAK,GAAG,CAAC,MAAM,CAAG,EAC1B,OAAQ,EAAK,KAAK,CAAC,MAAM,CAAG,EAC5B,aAAc,EACV,EACA,EAAK,KAAK,CAAC,YAAY,CAAG,CAChC,EACA,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,EAAK,GAAG,CACjC,CACA,CAAA,EAAK,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,EAAM,KAAK,EACpC,EAAK,KAAK,CAAC,MAAM,GAAK,EAAK,GAAG,CAAC,MAAM,CACvC,OAAO,MAAM,CAAC,EAAM,IAEpB,EAAO,MAAM,CACX,EACA,EACA,CAAC,QAAS,EAAO,EAAQ,CACzB,CAAC,OAAQ,EAAO,EAAQ,EAE1B,GAAc,EAElB,CACA,GACF,CAEF,OAAO,CACT,C,G,E,Q,S,C,C,C,E,E,E,O,C,kB,I,GCrLC,IAAA,EAAA,EAAA,S,E,E,S,E,E,SAmBM,SAAS,EAAgB,CAAM,CAAE,CAAU,CAAE,CAAI,EAEtD,IAAI,EAAQ,OAAO,MAAM,CACvB,EACI,OAAO,MAAM,CAAC,CAAC,EAAG,GAClB,CACE,KAAM,EACN,OAAQ,EACR,OAAQ,CACV,EACJ,CACE,OAAQ,EACR,aAAc,EAChB,GAGI,EAAc,CAAC,EAEf,EAAuB,EAAE,CAE3B,EAAS,EAAE,CAEX,EAAQ,EAAE,CASR,EAAU,CACd,QAkJF,SAAiB,CAAI,EACf,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAM,IAAI,GACV,EAAM,MAAM,CAAG,EACf,EAAM,MAAM,EAAI,AAAS,KAAT,EAAc,EAAI,EAClC,KACkB,KAAT,IACT,EAAM,MAAM,GACZ,EAAM,MAAM,IAIV,EAAM,YAAY,CAAG,EACvB,EAAM,MAAM,IAEZ,EAAM,YAAY,GAKd,EAAM,YAAY,GAAK,CAAM,CAAC,EAAM,MAAM,CAAC,CAAC,MAAM,GACpD,EAAM,YAAY,CAAG,GACrB,EAAM,MAAM,KAKhB,EAAQ,QAAQ,CAAG,CAIrB,EAhLE,MAmLF,SAAe,CAAI,CAAE,CAAM,EAGzB,IAAM,EAAQ,GAAU,CAAC,EAKzB,OAJA,EAAM,IAAI,CAAG,EACb,EAAM,KAAK,CAAG,IACd,EAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,QAAS,EAAO,EAAQ,EAC7C,EAAM,IAAI,CAAC,GACJ,CACT,EA3LE,KA8LF,SAAc,CAAI,EAChB,IAAM,EAAQ,EAAM,GAAG,GAGvB,OAFA,EAAM,GAAG,CAAG,IACZ,EAAQ,MAAM,CAAC,IAAI,CAAC,CAAC,OAAQ,EAAO,EAAQ,EACrC,CACT,EAlME,QAAS,EAyMX,SAA+B,CAAS,CAAE,CAAI,EAC5C,EAAU,EAAW,EAAK,IAAI,CAChC,GA1ME,MAAO,EAAiB,GACxB,UAAW,EAAiB,EAAmB,CAC7C,UAAW,CAAA,CACb,EACF,EAOM,EAAU,CACd,SAAU,KACV,KAAM,KACN,eAAgB,CAAC,EACjB,OAAQ,EAAE,CACV,OAAA,EACA,YAAA,EACA,eA6CF,SAAwB,CAAK,CAAE,CAAU,EACvC,OAAO,AAsYX,SAAyB,CAAM,CAAE,CAAU,EACzC,IAII,EAJA,EAAQ,GAEN,EAAS,EAAE,CAGjB,KAAO,EAAE,EAAQ,EAAO,MAAM,EAAE,KAG1B,EAFJ,IAAM,EAAQ,CAAM,CAAC,EAAM,CAG3B,GAAI,AAAiB,UAAjB,OAAO,EACT,EAAQ,OAER,OAAQ,GACN,KAAK,GACH,EAAQ,KACR,KAEF,MAAK,GACH,EAAQ,KACR,KAEF,MAAK,GACH,EAAQ,OACR,KAEF,MAAK,GACH,EAAQ,EAAa,IAAM,IAC3B,KAEF,MAAK,GACH,GAAI,CAAC,GAAc,EAAO,SAC1B,EAAQ,IACR,KAEF,SAEE,EAAQ,OAAO,YAAY,CAAC,EAEhC,CACF,EAAQ,AAAU,KAAV,EACR,EAAO,IAAI,CAAC,EACd,CACA,OAAO,EAAO,IAAI,CAAC,GACrB,EAlb2B,EAAY,GAAQ,EAC7C,EA9CE,IAAA,EACA,WAkEF,SAAoB,CAAK,EACvB,CAAW,CAAC,EAAM,IAAI,CAAC,CAAG,EAAM,MAAM,CACtC,GACF,EApEE,MAsBF,SAAe,CAAK,QAKlB,CAJA,EAAS,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAQ,GACtB,AA4DF,WAEE,IAAI,EACJ,KAAO,EAAM,MAAM,CAAG,EAAO,MAAM,EAAE,CACnC,IAAM,EAAQ,CAAM,CAAC,EAAM,MAAM,CAAC,CAGlC,GAAI,AAAiB,UAAjB,OAAO,EAKT,IAJA,EAAa,EAAM,MAAM,CACrB,EAAM,YAAY,CAAG,GACvB,CAAA,EAAM,YAAY,CAAG,CAAA,EAGrB,EAAM,MAAM,GAAK,GACjB,EAAM,YAAY,CAAG,EAAM,MAAM,EAEjC,KAcI,EAAA,EAdD,EAAM,UAAU,CAAC,EAAM,YAAY,EAiB5C,EAAQ,EAAM,EArBV,MAqBJ,EAAQ,EAdD,EAEP,CACF,IA/EM,AAA8B,OAA9B,CAAM,CAAC,EAAO,MAAM,CAAG,EAAE,EACpB,EAAE,EAEX,EAAU,EAAY,GAGtB,EAAQ,MAAM,CAAG,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,EAAsB,EAAQ,MAAM,CAAE,GAC3D,EAAQ,MAAM,CACvB,CAlCA,EAOI,EAAQ,EAAW,QAAQ,CAAC,IAAI,CAAC,EAAS,GAW9C,OAHI,EAAW,UAAU,EACvB,EAAqB,IAAI,CAAC,GAErB,EA4BP,SAAS,EAAY,CAAK,EACxB,OAAO,AA8VX,SAAqB,CAAM,CAAE,CAAK,MAM5B,EALJ,IAAM,EAAa,EAAM,KAAK,CAAC,MAAM,CAC/B,EAAmB,EAAM,KAAK,CAAC,YAAY,CAC3C,EAAW,EAAM,GAAG,CAAC,MAAM,CAC3B,EAAiB,EAAM,GAAG,CAAC,YAAY,CAG7C,GAAI,IAAe,EAEjB,EAAO,CAAC,CAAM,CAAC,EAAW,CAAC,KAAK,CAAC,EAAkB,GAAgB,KAC9D,CAEL,GADA,EAAO,EAAO,KAAK,CAAC,EAAY,GAC5B,EAAmB,GAAI,CACzB,IAAM,EAAO,CAAI,CAAC,EAAE,AAChB,AAAgB,CAAA,UAAhB,OAAO,EACT,CAAI,CAAC,EAAE,CAAG,EAAK,KAAK,CAAC,GAErB,EAAK,KAAK,EAEd,CACI,EAAiB,GAEnB,EAAK,IAAI,CAAC,CAAM,CAAC,EAAS,CAAC,KAAK,CAAC,EAAG,GAExC,CACA,OAAO,CACT,EAxXuB,EAAQ,EAC7B,CAGA,SAAS,IAEP,GAAM,CAAA,KAAC,CAAI,CAAA,OAAE,CAAM,CAAA,OAAE,CAAM,CAAA,OAAE,CAAM,CAAA,aAAE,CAAY,CAAC,CAAG,EACrD,MAAO,CACL,KAAA,EACA,OAAA,EACA,OAAA,EACA,OAAA,EACA,aAAA,CACF,CACF,CA8HA,SAAS,EAAkB,CAAC,CAAE,CAAI,EAChC,EAAK,OAAO,EACd,CAQA,SAAS,EAAiB,CAAQ,CAAE,CAAM,EACxC,OAWA,SAAc,CAAU,CAAE,CAAW,CAAE,CAAU,EAE/C,IAAI,EAEA,EAEA,EAEA,EACJ,OAAO,MAAM,OAAO,CAAC,GACjB,EAAuB,GACvB,aAAc,EAEd,EAAuB,CAAC,EAAW,EAarC,SAAe,CAAI,EACjB,IAAM,EAAM,AAAS,OAAT,GAAiB,AAbP,CAaU,CAAC,EAAK,CAChC,EAAM,AAAS,OAAT,GAAiB,AAdP,EAcW,IAAI,CAOrC,OAAO,EANM,IAGP,MAAM,OAAO,CAAC,GAAO,EAAM,EAAM,CAAC,EAAI,CAAG,EAAE,IAC3C,MAAM,OAAO,CAAC,GAAO,EAAM,EAAM,CAAC,EAAI,CAAG,EAAE,CAChD,EACmC,EACtC,EASF,SAAS,EAAuB,CAAI,QAGlC,CAFA,EAAmB,EACnB,EAAiB,EACb,AAAgB,IAAhB,EAAK,MAAM,EACN,EAEF,EAAgB,CAAI,CAAC,EAAe,CAC7C,CAQA,SAAS,EAAgB,CAAS,EAChC,OAGA,SAAe,CAAI,QAajB,CARA,EAAO,AAwEf,WACE,IAAM,EAAa,IACb,EAAgB,EAAQ,QAAQ,CAChC,EAAwB,EAAQ,gBAAgB,CAChD,EAAmB,EAAQ,MAAM,CAAC,MAAM,CACxC,EAAa,MAAM,IAAI,CAAC,GAC9B,MAAO,CACL,QASF,WACE,EAAQ,EACR,EAAQ,QAAQ,CAAG,EACnB,EAAQ,gBAAgB,CAAG,EAC3B,EAAQ,MAAM,CAAC,MAAM,CAAG,EACxB,EAAQ,EACR,GACF,EAfE,KAAM,CACR,CAeF,IA/FQ,EAAmB,EACd,EAAU,OAAO,EACpB,CAAA,EAAQ,gBAAgB,CAAG,CAD7B,EAOE,EAAU,IAAI,EACd,EAAQ,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAU,IAAI,GAEvD,EAAI,GAEN,EAAU,QAAQ,CAAC,IAAI,CAI5B,EAAS,OAAO,MAAM,CAAC,OAAO,MAAM,CAAC,GAAU,GAAU,EACzD,EACA,EACA,GACA,EACJ,CACF,CAGA,SAAS,EAAG,CAAI,EAGd,OADA,EAAS,EAAkB,GACpB,CACT,CAGA,SAAS,EAAI,CAAI,QAGf,CADA,EAAK,OAAO,GACR,EAAE,EAAiB,EAAiB,MAAM,EACrC,EAAgB,CAAgB,CAAC,EAAe,EAElD,CACT,CACF,CACF,CAOA,SAAS,EAAU,CAAS,CAAE,CAAI,EAC5B,EAAU,UAAU,EAAI,CAAC,EAAqB,QAAQ,CAAC,IACzD,EAAqB,IAAI,CAAC,GAExB,EAAU,OAAO,EACnB,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EACE,EAAQ,MAAM,CACd,EACA,EAAQ,MAAM,CAAC,MAAM,CAAG,EACxB,EAAU,OAAO,CAAC,EAAQ,MAAM,CAAC,KAAK,CAAC,GAAO,IAG9C,EAAU,SAAS,EACrB,CAAA,EAAQ,MAAM,CAAG,EAAU,SAAS,CAAC,EAAQ,MAAM,CAAE,EADvD,CAGF,CAuCA,SAAS,IACH,EAAM,IAAI,IAAI,GAAe,EAAM,MAAM,CAAG,IAC9C,EAAM,MAAM,CAAG,CAAW,CAAC,EAAM,IAAI,CAAC,CACtC,EAAM,MAAM,EAAI,CAAW,CAAC,EAAM,IAAI,CAAC,CAAG,EAE9C,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GC3dO,SAAS,EAAW,CAAU,CAAE,CAAM,CAAE,CAAO,EAEpD,IAAM,EAAS,EAAE,CACb,EAAQ,GAEZ,KAAO,EAAE,EAAQ,EAAW,MAAM,EAAE,CAClC,IAAM,EAAU,CAAU,CAAC,EAAM,CAAC,UAAU,CAExC,GAAW,CAAC,EAAO,QAAQ,CAAC,KAC9B,EAAS,EAAQ,EAAQ,GACzB,EAAO,IAAI,CAAC,GAEhB,CAEA,OAAO,CACT,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,G,E,E,O,C,iB,I,G,E,E,O,C,c,I,G,E,E,O,C,O,I,G,E,E,O,C,S,I,G,E,E,O,C,O,I,G,E,E,O,C,a,I,G,E,E,O,C,mB,I,G,E,E,O,C,U,I,G,I,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,SC/BC,EAAA,EAAA,S,E,E,SA2BM,IAAM,EAAW,CACrB,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,IAAG,CACR,GAAK,EAAA,UAAS,AACjB,EAGa,EAAiB,CAC3B,GAAK,EAAA,UAAS,AACjB,EAGa,EAAc,CACzB,CAAC,GAAG,CAAE,EAAA,YAAW,CACjB,CAAC,GAAG,CAAE,EAAA,YAAW,CAChB,GAAK,EAAA,YAAW,AACnB,EAGa,EAAO,CACjB,GAAK,EAAA,UAAS,CACd,GAAK,EAAA,aAAY,CACjB,GAAK,CAAC,EAAA,eAAc,CAAG,EAAA,aAAY,CAAE,CACrC,GAAK,EAAA,QAAO,CACZ,GAAK,EAAA,eAAc,CACnB,GAAK,EAAA,aAAY,CACjB,GAAK,EAAA,UAAS,CACd,IAAM,EAAA,UAAS,AAClB,EAGa,EAAS,CACnB,GAAK,EAAA,kBAAiB,CACtB,GAAK,EAAA,eAAc,AACtB,EAGa,EAAO,CAClB,CAAC,GAAG,CAAE,EAAA,UAAS,CACf,CAAC,GAAG,CAAE,EAAA,UAAS,CACf,CAAC,GAAG,CAAE,EAAA,UAAS,CACd,GAAK,EAAA,eAAc,CACnB,GAAK,EAAA,kBAAiB,CACtB,GAAK,EAAA,SAAQ,CACb,GAAK,CAAC,EAAA,QAAO,CAAG,EAAA,QAAO,CAAE,CACzB,GAAK,EAAA,cAAa,CAClB,GAAK,CAAC,EAAA,eAAc,CAAG,EAAA,eAAc,CAAE,CACvC,GAAK,EAAA,QAAO,CACZ,GAAK,EAAA,SAAQ,CACb,GAAK,EAAA,QAAO,AACf,EAGa,EAAa,CACxB,KAAM,CAAC,EAAA,SAAQ,CAAG,EAAA,QAAU,CAAE,AAChC,EAGa,EAAmB,CAC9B,KAAM,CAAC,GAAI,GAAG,AAChB,EAGa,EAAU,CACrB,KAAM,EAAE,AACV,C,G,E,Q,S,C,C,C,E,E,E,O,C,Y,I,GC/FC,IAAA,EAAA,EAAA,S,E,E,S,E,E,SAMM,IAAM,EAAY,CACvB,KAAM,YACN,SA4KF,SAA2B,CAAO,CAAE,CAAE,MAMhC,EALJ,IAAM,EAAmB,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,gBAAgB,CAAC,IAAI,CAC/D,EAAW,IAAI,CAAC,QAAQ,CACxB,EAAS,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,GAIjC,OAYA,SAAe,CAAI,EAGjB,OAFA,EAAS,EACT,EAAQ,KAAK,CAAC,qBACP,AAaT,SAAS,EAAO,CAAI,EAClB,GAAI,IAAS,EAEX,OADA,EAAQ,OAAO,CAAC,GACT,EAET,IAAM,EAAQ,EAAQ,IAAI,CAAC,qBAGrB,EAAQ,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,GAI1B,EACJ,CAAC,GAAU,AAAU,IAAV,GAAe,GAAW,EAAiB,QAAQ,CAAC,GAC3D,EACJ,CAAC,GAAW,AAAW,IAAX,GAAgB,GAAU,EAAiB,QAAQ,CAAC,GAGlE,OAFA,EAAM,KAAK,CAAG,CAAA,CAAQ,CAAA,AAAW,KAAX,EAAgB,EAAO,GAAS,CAAA,GAAU,CAAC,CAAA,CAAI,EACrE,EAAM,MAAM,CAAG,CAAA,CAAQ,CAAA,AAAW,KAAX,EAAgB,EAAQ,GAAU,CAAA,GAAS,CAAC,CAAA,CAAG,EAC/D,EAAG,EACZ,EAhCgB,EAChB,CAgCF,EAlOE,WAQF,SAA6B,CAAM,CAAE,CAAO,EAC1C,IAEI,EAEA,EAEA,EAEA,EAEA,EAEA,EAEA,EAEA,EAhBA,EAAQ,GAsBZ,KAAO,EAAE,EAAQ,EAAO,MAAM,EAE5B,GACE,AAAqB,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,EAChB,AAA0B,sBAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACrB,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,MAAM,CAKvB,CAAA,IAHA,EAAO,EAGA,KAEL,GACE,AAAoB,SAApB,CAAM,CAAC,EAAK,CAAC,EAAE,EACf,AAAyB,sBAAzB,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,IAAI,EACpB,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,EAErB,EAAQ,cAAc,CAAC,CAAM,CAAC,EAAK,CAAC,EAAE,EAAE,UAAU,CAAC,KACjD,EAAQ,cAAc,CAAC,CAAM,CAAC,EAAM,CAAC,EAAE,EAAE,UAAU,CAAC,GACtD,CAKA,GACG,AAAA,CAAA,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,MAAM,EAAI,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAI,AAAJ,GAC5C,AAAC,CAAA,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,AAAN,EAAU,GAChE,CACE,CAAA,AAAC,CAAA,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CACzB,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAC5B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAC3B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,AAAN,EACzB,CAAA,EAGF,SAIF,EACE,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAG,GAC5D,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,CAAG,EAC1D,EACA,EACN,IAAM,EAAQ,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,EAC7C,EAAM,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,EACpD,EAAU,EAAO,CAAC,GAClB,EAAU,EAAK,GACf,EAAkB,CAChB,KAAM,EAAM,EAAI,iBAAmB,mBACnC,MAAA,EACA,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,CAC5C,EACA,EAAkB,CAChB,KAAM,EAAM,EAAI,iBAAmB,mBACnC,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,EAC/C,IAAA,CACF,EACA,EAAO,CACL,KAAM,EAAM,EAAI,aAAe,eAC/B,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,EAC5C,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAC/C,EACA,EAAQ,CACN,KAAM,EAAM,EAAI,SAAW,WAC3B,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,EAAgB,KAAK,EAC9C,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,EAAgB,GAAG,CAC5C,EACA,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,EAAgB,KAAK,EAC7D,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,EAAgB,GAAG,EAC9D,EAAa,EAAE,CAGX,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAC3D,CAAA,EAAa,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAY,CAC5B,CAAC,QAAS,CAAM,CAAC,EAAK,CAAC,EAAE,CAAE,EAAQ,CACnC,CAAC,OAAQ,CAAM,CAAC,EAAK,CAAC,EAAE,CAAE,EAAQ,CACnC,CAAA,EAIH,EAAa,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAY,CAC5B,CAAC,QAAS,EAAO,EAAQ,CACzB,CAAC,QAAS,EAAiB,EAAQ,CACnC,CAAC,OAAQ,EAAiB,EAAQ,CAClC,CAAC,QAAS,EAAM,EAAQ,CACzB,EAKD,EAAa,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EACX,EACA,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EACE,EAAQ,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,IAAI,CACzC,EAAO,KAAK,CAAC,EAAO,EAAG,GACvB,IAKJ,EAAa,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAY,CAC5B,CAAC,OAAQ,EAAM,EAAQ,CACvB,CAAC,QAAS,EAAiB,EAAQ,CACnC,CAAC,OAAQ,EAAiB,EAAQ,CAClC,CAAC,OAAQ,EAAO,EAAQ,CACzB,EAGG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,EAC7D,EAAS,EACT,EAAa,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAY,CAC5B,CAAC,QAAS,CAAM,CAAC,EAAM,CAAC,EAAE,CAAE,EAAQ,CACpC,CAAC,OAAQ,CAAM,CAAC,EAAM,CAAC,EAAE,CAAE,EAAQ,CACpC,GAED,EAAS,EAEX,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAQ,EAAO,EAAG,EAAQ,EAAO,EAAG,GAC3C,EAAQ,EAAO,EAAW,MAAM,CAAG,EAAS,EAC5C,KACF,CAAA,CAON,IADA,EAAQ,GACD,EAAE,EAAQ,EAAO,MAAM,EACE,sBAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACvB,CAAA,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAAG,MAF5B,EAKA,OAAO,CACT,CApKA,EA6OA,SAAS,EAAU,CAAK,CAAE,CAAM,EAC9B,EAAM,MAAM,EAAI,EAChB,EAAM,MAAM,EAAI,EAChB,EAAM,YAAY,EAAI,CACxB,C,G,E,Q,S,C,C,C,E,E,E,O,C,oB,I,GCnQC,IAAA,EAAA,EAAA,SAqBM,SAAS,EAAkB,CAAI,SACpC,AACE,AAAS,OAAT,GACA,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,IAC1B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,GAEX,EAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GACd,QAEX,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GC7BC,IAAA,EAAA,EAAA,SASM,IAAM,EAAW,CACtB,KAAM,WACN,SAOF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,EACxC,IAAI,EAAO,EACX,OAcA,SAAe,CAAI,EAMjB,OALA,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACb,EAAQ,KAAK,CAAC,oBACP,CACT,EAcA,SAAS,EAAK,CAAI,QAChB,AAAI,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GACT,GAEF,EAAW,EACpB,CAcA,SAAS,EAAmB,CAAI,SAE9B,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IAEjE,EAAO,EACA,AAiBX,SAAS,EAAyB,CAAI,SACpC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAO,EACA,GAKN,AAAA,CAAA,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAA,GAChE,IAAS,IAET,EAAQ,OAAO,CAAC,GACT,IAET,EAAO,EACA,EAAW,GACpB,EAlCoC,IAE3B,EAAW,EACpB,CA2CA,SAAS,EAAU,CAAI,SACrB,AAAI,AAAS,KAAT,GACF,EAAQ,IAAI,CAAC,oBACb,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACb,EAAQ,IAAI,CAAC,YACN,GAIL,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GACvD,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAW,CAAI,SACtB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAiB,CAAI,EAC5B,MAAO,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,GAAQ,AAanC,SAAS,EAAW,CAAI,SACtB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAO,EACA,GAEL,AAAS,KAAT,GAEF,EAAQ,IAAI,CAAC,oBAAoB,IAAI,CAAG,gBACxC,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACb,EAAQ,IAAI,CAAC,YACN,GAEF,AAeT,SAAS,EAAW,CAAI,EAEtB,GAAI,AAAC,CAAA,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAA,GAAU,IAAS,GAAI,CAC3D,IAAM,EAAO,AAAS,KAAT,EAAc,EAAa,EAExC,OADA,EAAQ,OAAO,CAAC,GACT,CACT,CACA,OAAO,EAAI,EACb,EAvBoB,EACpB,EA7B8C,GAAQ,EAAI,EAC1D,CAmDF,CAzNA,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GCXC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAa,CACxB,KAAM,aACN,SAWF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC/C,IAAM,EAAO,IAAI,CACjB,OAYA,SAAe,CAAI,EACjB,GAAI,AAAS,KAAT,EAAa,CACf,IAAM,EAAQ,EAAK,cAAc,CAWjC,OAVK,EAAM,IAAI,GACb,EAAQ,KAAK,CAAC,aAAc,CAC1B,WAAY,CAAA,CACd,GACA,EAAM,IAAI,CAAG,CAAA,GAEf,EAAQ,KAAK,CAAC,oBACd,EAAQ,KAAK,CAAC,oBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,oBACN,CACT,CACA,OAAO,EAAI,EACb,EAYA,SAAS,EAAM,CAAI,QACjB,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,KAAK,CAAC,8BACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,8BACb,EAAQ,IAAI,CAAC,oBACN,IAET,EAAQ,IAAI,CAAC,oBACN,EAAG,GACZ,CACF,EA/DE,aAAc,CACZ,SA4EJ,SAAwC,CAAO,CAAE,CAAE,CAAE,CAAG,EACtD,IAAM,EAAO,IAAI,CACjB,OAeA,SAAmB,CAAI,QACrB,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GAGT,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EACA,aACA,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBACzC,KAAA,EACA,GACJ,GAEG,EAAW,EACpB,EAeA,SAAS,EAAW,CAAI,EACtB,OAAO,EAAQ,OAAO,CAAC,EAAY,EAAI,GAAK,EAC9C,CACF,CA5HE,EACA,KA8HF,SAAc,CAAO,EACnB,EAAQ,IAAI,CAAC,aACf,CA/HA,C,G,E,Q,S,C,C,C,E,E,E,O,C,kB,I,GCbC,IAAA,EAAA,EAAA,SAIM,IAAM,EAAkB,CAC7B,KAAM,kBACN,SAOF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC/C,OAYA,SAAe,CAAI,EAKjB,OAJA,EAAQ,KAAK,CAAC,mBACd,EAAQ,KAAK,CAAC,gBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,gBACN,CACT,EAYA,SAAS,EAAO,CAAI,QAElB,AAAI,AAAA,CAAA,EAAA,EAAA,gBAAe,AAAf,EAAiB,IACnB,EAAQ,KAAK,CAAC,wBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,wBACb,EAAQ,IAAI,CAAC,mBACN,GAEF,EAAI,EACb,CACF,CAhDA,C,G,E,Q,S,C,C,C,E,E,E,O,C,qB,I,GCNC,IAAA,EAAA,EAAA,S,E,E,SASM,IAAM,EAAqB,CAChC,KAAM,qBACN,SAOF,SAAoC,CAAO,CAAE,CAAE,CAAE,CAAG,MAI9C,EAEA,EALJ,IAAM,EAAO,IAAI,CACb,EAAO,EAKX,OAgBA,SAAe,CAAI,EAKjB,OAJA,EAAQ,KAAK,CAAC,sBACd,EAAQ,KAAK,CAAC,4BACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,4BACN,CACT,EAiBA,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,mCACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,mCACN,IAET,EAAQ,KAAK,CAAC,2BACd,EAAM,GACN,EAAO,EAAA,iBAAgB,CAChB,EAAM,GACf,CAcA,SAAS,EAAQ,CAAI,SACnB,AAAI,AAAS,KAAT,GAAe,AAAS,MAAT,GACjB,EAAQ,KAAK,CAAC,uCACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,uCACb,EAAQ,KAAK,CAAC,2BACd,EAAM,EACN,EAAO,EAAA,aAAY,CACZ,IAET,EAAQ,KAAK,CAAC,2BACd,EAAM,EACN,EAAO,EAAA,UAAS,CACT,EAAM,GACf,CAmBA,SAAS,EAAM,CAAI,EACjB,GAAI,AAAS,KAAT,GAAe,EAAM,CACvB,IAAM,EAAQ,EAAQ,IAAI,CAAC,kCAC3B,AACE,IAAS,EAAA,iBAAgB,EACxB,AAAA,CAAA,EAAA,EAAA,6BAA4B,AAA5B,EAA8B,EAAK,cAAc,CAAC,KAOrD,EAAQ,KAAK,CAAC,4BACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,4BACb,EAAQ,IAAI,CAAC,sBACN,GATE,EAAI,EAUf,QACA,AAAI,EAAK,IAAS,IAAS,GACzB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CACF,CAtIA,C,G,E,Q,S,C,C,C,E,E,E,O,C,gC,I,G,I,E,E,SChBA,IAAM,EAAM,CAAC,EAAE,cAAc,CAatB,SAAS,EAA8B,CAAK,EACjD,MAAO,EAAA,EAAI,IAAI,CAAC,EAAA,iBAAgB,CAAG,IAAS,AAAA,EAAA,iBAAgB,AAAC,CAAC,EAAM,AACtE,C,G,E,Q,S,C,C,C,E,E,E,O,C,oB,I,GCZO,IAAM,EAAoB,CAC/B,MAAO,IACP,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,IAAK,IACL,IAAK,eACL,OAAQ,IACR,MAAO,IACP,MAAO,IACP,IAAK,IACL,MAAO,IACP,KAAM,eACN,cAAe,IACf,MAAO,IACP,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,UAAW,IACX,KAAM,IACN,OAAQ,IACR,IAAK,IACL,QAAS,IACT,WAAY,IACZ,KAAM,IACN,IAAK,eACL,KAAM,eACN,MAAO,IACP,KAAM,IACN,OAAQ,IACR,KAAM,IACN,KAAM,IACN,OAAQ,IACR,IAAK,IACL,qBAAsB,IACtB,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,QAAS,IACT,KAAM,IACN,QAAS,IACT,UAAW,IACX,IAAK,IACL,IAAK,IACL,UAAW,IACX,YAAa,IACb,WAAY,IACZ,YAAa,IACb,yBAA0B,IAC1B,sBAAuB,IACvB,gBAAiB,IACjB,MAAO,IACP,OAAQ,IACR,UAAW,IACX,OAAQ,IACR,gBAAiB,IACjB,KAAM,IACN,UAAW,IACX,gCAAiC,IACjC,MAAO,IACP,KAAM,eACN,IAAK,IACL,OAAQ,IACR,GAAI,IACJ,SAAU,IACV,KAAM,IACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,KAAM,IACN,MAAO,IACP,OAAQ,IACR,IAAK,IACL,IAAK,IACL,MAAO,IACP,IAAK,eACL,iBAAkB,IAClB,eAAgB,IAChB,uBAAwB,IACxB,iBAAkB,IAClB,iBAAkB,IAClB,QAAS,IACT,cAAe,IACf,KAAM,eACN,IAAK,IACL,OAAQ,IACR,SAAU,IACV,sBAAuB,IACvB,UAAW,IACX,gBAAiB,IACjB,gBAAiB,IACjB,qBAAsB,IACtB,cAAe,IACf,oBAAqB,IACrB,yBAA0B,IAC1B,qBAAsB,IACtB,iBAAkB,IAClB,eAAgB,IAChB,cAAe,IACf,kBAAmB,IACnB,kBAAmB,IACnB,UAAW,IACX,aAAc,IACd,iBAAkB,IAClB,UAAW,IACX,oBAAqB,IACrB,kBAAmB,IACnB,eAAgB,IAChB,kBAAmB,IACnB,mBAAoB,IACpB,gBAAiB,IACjB,mBAAoB,IACpB,QAAS,IACT,aAAc,IACd,UAAW,IACX,KAAM,eACN,OAAQ,IACR,IAAK,IACL,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,IAAK,IACL,KAAM,IACN,IAAK,eACL,OAAQ,IACR,QAAS,IACT,MAAO,IACP,iBAAkB,IAClB,qBAAsB,IACtB,MAAO,IACP,KAAM,eACN,QAAS,IACT,MAAO,IACP,WAAY,IACZ,YAAa,IACb,KAAM,IACN,KAAM,IACN,IAAK,IACL,KAAM,IACN,OAAQ,IACR,aAAc,IACd,IAAK,IACL,IAAK,eACL,kBAAmB,IACnB,sBAAuB,IACvB,KAAM,eACN,OAAQ,IACR,WAAY,IACZ,KAAM,IACN,KAAM,IACN,GAAI,IACJ,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,IAAK,IACL,KAAM,IACN,IAAK,eACL,GAAI,IACJ,KAAM,eACN,aAAc,IACd,iBAAkB,IAClB,iBAAkB,IAClB,eAAgB,IAChB,YAAa,IACb,kBAAmB,IACnB,aAAc,IACd,KAAM,eACN,GAAI,IACJ,OAAQ,IACR,MAAO,IACP,IAAK,IACL,MAAO,IACP,IAAK,IACL,aAAc,IACd,KAAM,IACN,eAAgB,IAChB,KAAM,IACN,OAAQ,IACR,aAAc,IACd,UAAW,IACX,KAAM,IACN,MAAO,IACP,KAAM,IACN,OAAQ,IACR,MAAO,IACP,IAAK,IACL,KAAM,IACN,IAAK,IACL,OAAQ,IACR,GAAI,IACJ,MAAO,IACP,WAAY,IACZ,QAAS,IACT,IAAK,IACL,SAAU,IACV,aAAc,IACd,eAAgB,IAChB,eAAgB,IAChB,MAAO,IACP,KAAM,eACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,KAAM,IACN,MAAO,IACP,IAAK,IACL,IAAK,eACL,KAAM,eACN,KAAM,eACN,OAAQ,IACR,MAAO,IACP,KAAM,IACN,KAAM,IACN,MAAO,IACP,OAAQ,IACR,IAAK,IACL,IAAK,eACL,KAAM,eACN,KAAM,eACN,KAAM,IACN,GAAI,IACJ,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,WAAY,IACZ,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,iBAAkB,IAClB,UAAW,IACX,aAAc,IACd,oBAAqB,IACrB,YAAa,IACb,kBAAmB,IACnB,kBAAmB,IACnB,eAAgB,IAChB,kBAAmB,IACnB,UAAW,IACX,eAAgB,IAChB,gBAAiB,IACjB,QAAS,IACT,aAAc,IACd,cAAe,IACf,aAAc,IACd,gBAAiB,IACjB,kBAAmB,IACnB,iBAAkB,IAClB,gBAAiB,IACjB,aAAc,IACd,gBAAiB,IACjB,WAAY,IACZ,cAAe,IACf,UAAW,IACX,eAAgB,IAChB,iBAAkB,IAClB,cAAe,IACf,YAAa,IACb,SAAU,IACV,eAAgB,IAChB,UAAW,IACX,IAAK,eACL,GAAI,IACJ,WAAY,IACZ,OAAQ,IACR,cAAe,IACf,mBAAoB,IACpB,eAAgB,IAChB,cAAe,IACf,mBAAoB,IACpB,eAAgB,IAChB,KAAM,eACN,eAAgB,IAChB,gBAAiB,IACjB,KAAM,IACN,IAAK,IACL,OAAQ,IACR,GAAI,IACJ,IAAK,IACL,IAAK,IACL,YAAa,IACb,UAAW,IACX,IAAK,eACL,UAAW,IACX,KAAM,eACN,KAAM,IACN,GAAI,IACJ,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,oBAAqB,IACrB,mBAAoB,IACpB,kBAAmB,IACnB,sBAAuB,IACvB,qBAAsB,IACtB,eAAgB,IAChB,QAAS,KACT,IAAK,eACL,QAAS,IACT,iBAAkB,IAClB,KAAM,IACN,IAAK,IACL,aAAc,IACd,UAAW,IACX,qBAAsB,IACtB,WAAY,IACZ,SAAU,IACV,cAAe,KACf,UAAW,IACX,WAAY,IACZ,gBAAiB,IACjB,oBAAqB,KACrB,kBAAmB,KACnB,eAAgB,IAChB,qBAAsB,KACtB,gBAAiB,IACjB,gBAAiB,KACjB,aAAc,KACd,gBAAiB,IACjB,mBAAoB,KACpB,qBAAsB,IACtB,QAAS,IACT,aAAc,IACd,eAAgB,IAChB,YAAa,KACb,kBAAmB,KACnB,aAAc,IACd,wBAAyB,KACzB,kBAAmB,KACnB,YAAa,IACb,iBAAkB,KAClB,sBAAuB,IACvB,kBAAmB,IACnB,iBAAkB,IAClB,oBAAqB,KACrB,sBAAuB,IACvB,gBAAiB,KACjB,qBAAsB,IACtB,kBAAmB,KACnB,uBAAwB,IACxB,UAAW,KACX,eAAgB,IAChB,YAAa,IACb,iBAAkB,KAClB,sBAAuB,IACvB,iBAAkB,KAClB,YAAa,KACb,iBAAkB,IAClB,SAAU,IACV,cAAe,IACf,kBAAmB,IACnB,cAAe,IACf,eAAgB,IAChB,KAAM,eACN,OAAQ,IACR,GAAI,IACJ,MAAO,IACP,OAAQ,IACR,MAAO,IACP,IAAK,IACL,OAAQ,IACR,IAAK,eACL,OAAQ,IACR,MAAO,IACP,MAAO,IACP,QAAS,IACT,KAAM,eACN,qBAAsB,IACtB,eAAgB,IAChB,GAAI,IACJ,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,QAAS,IACT,UAAW,IACX,YAAa,IACb,gBAAiB,IACjB,SAAU,IACV,IAAK,IACL,IAAK,eACL,IAAK,IACL,GAAI,IACJ,UAAW,IACX,cAAe,IACf,KAAM,IACN,GAAI,IACJ,SAAU,IACV,cAAe,IACf,mBAAoB,IACpB,cAAe,IACf,MAAO,IACP,QAAS,IACT,WAAY,IACZ,aAAc,IACd,KAAM,eACN,IAAK,IACL,KAAM,IACN,IAAK,eACL,KAAM,IACN,KAAM,eACN,MAAO,IACP,IAAK,IACL,OAAQ,IACR,KAAM,IACN,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,GAAI,IACJ,eAAgB,IAChB,mBAAoB,IACpB,qBAAsB,IACtB,IAAK,IACL,IAAK,IACL,kBAAmB,IACnB,WAAY,IACZ,cAAe,IACf,oBAAqB,IACrB,aAAc,IACd,mBAAoB,IACpB,mBAAoB,IACpB,gBAAiB,IACjB,mBAAoB,IACpB,WAAY,IACZ,SAAU,IACV,cAAe,IACf,eAAgB,IAChB,cAAe,IACf,iBAAkB,IAClB,mBAAoB,IACpB,kBAAmB,IACnB,iBAAkB,IAClB,cAAe,IACf,iBAAkB,IAClB,YAAa,IACb,eAAgB,IAChB,WAAY,IACZ,KAAM,IACN,aAAc,IACd,YAAa,IACb,KAAM,IACN,IAAK,IACL,YAAa,IACb,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,GAAI,IACJ,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,IAAK,IACL,IAAK,eACL,eAAgB,IAChB,eAAgB,IAChB,gBAAiB,IACjB,aAAc,IACd,MAAO,IACP,YAAa,IACb,KAAM,eACN,KAAM,IACN,OAAQ,IACR,mBAAoB,IACpB,aAAc,IACd,kBAAmB,IACnB,eAAgB,IAChB,oBAAqB,IACrB,YAAa,IACb,KAAM,eACN,KAAM,IACN,IAAK,IACL,OAAQ,IACR,YAAa,IACb,SAAU,IACV,cAAe,IACf,mBAAoB,IACpB,cAAe,IACf,SAAU,IACV,IAAK,IACL,IAAK,IACL,SAAU,IACV,cAAe,IACf,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,IACN,IAAK,IACL,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,IAAK,eACL,UAAW,IACX,MAAO,IACP,WAAY,KACZ,UAAW,IACX,MAAO,IACP,WAAY,IACZ,eAAgB,IAChB,WAAY,IACZ,KAAM,eACN,UAAW,IACX,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,SAAU,IACV,MAAO,IACP,OAAQ,IACR,MAAO,IACP,IAAK,IACL,OAAQ,IACR,IAAK,eACL,OAAQ,IACR,MAAO,IACP,SAAU,IACV,WAAY,IACZ,aAAc,IACd,iBAAkB,IAClB,MAAO,IACP,UAAW,IACX,MAAO,IACP,KAAM,eACN,QAAS,IACT,WAAY,IACZ,iBAAkB,IAClB,YAAa,IACb,cAAe,IACf,MAAO,IACP,WAAY,IACZ,QAAS,IACT,YAAa,IACb,eAAgB,IAChB,gBAAiB,IACjB,KAAM,IACN,QAAS,IACT,MAAO,IACP,KAAM,eACN,OAAQ,IACR,KAAM,IACN,MAAO,IACP,KAAM,IACN,IAAK,IACL,MAAO,IACP,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,KAAM,IACN,YAAa,IACb,aAAc,IACd,kBAAmB,IACnB,cAAe,IACf,cAAe,IACf,IAAK,eACL,KAAM,eACN,KAAM,eACN,OAAQ,IACR,MAAO,IACP,MAAO,IACP,IAAK,eACL,KAAM,eACN,KAAM,eACN,IAAK,eACL,GAAI,IACJ,KAAM,eACN,KAAM,eACN,KAAM,IACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,IAAK,IACL,IAAK,eACL,KAAM,eACN,KAAM,eACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,KAAM,IACN,eAAgB,IAChB,KAAM,IACN,IAAK,IACL,KAAM,IACN,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,GAAI,IACJ,IAAK,KACL,IAAK,IACL,MAAO,IACP,MAAO,IACP,IAAK,IACL,MAAO,IACP,GAAI,IACJ,IAAK,eACL,OAAQ,IACR,QAAS,IACT,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,IAAK,IACL,IAAK,IACL,OAAQ,IACR,KAAM,IACN,SAAU,IACV,KAAM,IACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,OAAQ,IACR,SAAU,IACV,SAAU,IACV,SAAU,IACV,SAAU,IACV,SAAU,IACV,SAAU,IACV,SAAU,IACV,SAAU,IACV,MAAO,IACP,QAAS,IACT,SAAU,IACV,OAAQ,IACR,MAAO,IACP,QAAS,IACT,MAAO,IACP,KAAM,eACN,GAAI,IACJ,IAAK,IACL,OAAQ,IACR,IAAK,IACL,KAAM,IACN,KAAM,IACN,OAAQ,IACR,SAAU,IACV,MAAO,IACP,KAAM,eACN,IAAK,IACL,MAAO,IACP,QAAS,IACT,OAAQ,IACR,KAAM,IACN,SAAU,IACV,MAAO,IACP,KAAM,IACN,SAAU,IACV,YAAa,IACb,UAAW,IACX,QAAS,IACT,UAAW,IACX,OAAQ,IACR,OAAQ,IACR,SAAU,IACV,KAAM,IACN,SAAU,IACV,MAAO,IACP,IAAK,IACL,MAAO,IACP,OAAQ,IACR,QAAS,IACT,QAAS,IACT,MAAO,IACP,OAAQ,IACR,KAAM,IACN,KAAM,IACN,QAAS,IACT,IAAK,eACL,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,QAAS,IACT,SAAU,IACV,UAAW,IACX,SAAU,IACV,QAAS,IACT,gBAAiB,IACjB,cAAe,IACf,SAAU,IACV,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,aAAc,IACd,YAAa,IACb,cAAe,IACf,kBAAmB,IACnB,kBAAmB,IACnB,mBAAoB,IACpB,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,IAAK,KACL,QAAS,KACT,KAAM,IACN,KAAM,eACN,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,SAAU,IACV,QAAS,IACT,SAAU,IACV,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,KAAM,eACN,MAAO,IACP,KAAM,IACN,MAAO,IACP,KAAM,KACN,MAAO,IACP,SAAU,IACV,KAAM,IACN,OAAQ,IACR,KAAM,IACN,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,KACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,MAAO,IACP,QAAS,IACT,KAAM,IACN,MAAO,IACP,QAAS,IACT,KAAM,IACN,UAAW,IACX,IAAK,eACL,KAAM,IACN,MAAO,IACP,UAAW,IACX,IAAK,IACL,IAAK,IACL,KAAM,IACN,KAAM,IACN,OAAQ,IACR,gBAAiB,IACjB,iBAAkB,IAClB,SAAU,IACV,SAAU,IACV,WAAY,IACZ,YAAa,IACb,YAAa,IACb,KAAM,IACN,SAAU,IACV,OAAQ,IACR,QAAS,IACT,MAAO,IACP,SAAU,IACV,MAAO,IACP,OAAQ,IACR,QAAS,IACT,MAAO,IACP,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,WAAY,IACZ,UAAW,IACX,KAAM,IACN,QAAS,IACT,OAAQ,IACR,KAAM,eACN,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,MAAO,IACP,MAAO,IACP,KAAM,eACN,KAAM,IACN,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,QAAS,IACT,QAAS,IACT,MAAO,IACP,MAAO,IACP,OAAQ,IACR,QAAS,IACT,IAAK,IACL,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,KAAM,KACN,OAAQ,IACR,QAAS,IACT,YAAa,IACb,YAAa,IACb,SAAU,IACV,WAAY,IACZ,OAAQ,IACR,eAAgB,IAChB,gBAAiB,IACjB,MAAO,IACP,MAAO,IACP,SAAU,IACV,MAAO,IACP,OAAQ,IACR,KAAM,IACN,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,KAAM,IACN,MAAO,IACP,QAAS,IACT,MAAO,IACP,OAAQ,IACR,IAAK,IACL,GAAI,IACJ,QAAS,IACT,MAAO,IACP,QAAS,IACT,IAAK,IACL,MAAO,IACP,QAAS,IACT,OAAQ,IACR,IAAK,eACL,MAAO,IACP,MAAO,IACP,KAAM,IACN,QAAS,IACT,YAAa,IACb,MAAO,IACP,IAAK,IACL,QAAS,IACT,MAAO,IACP,IAAK,IACL,OAAQ,IACR,cAAe,IACf,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,eACN,IAAK,IACL,MAAO,IACP,SAAU,IACV,SAAU,IACV,QAAS,IACT,UAAW,IACX,eAAgB,IAChB,UAAW,IACX,eAAgB,IAChB,gBAAiB,IACjB,iBAAkB,IAClB,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,KAAM,eACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,QAAS,IACT,KAAM,IACN,SAAU,IACV,MAAO,IACP,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,OAAQ,IACR,IAAK,IACL,KAAM,IACN,GAAI,IACJ,MAAO,IACP,IAAK,eACL,GAAI,IACJ,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,GAAI,IACJ,SAAU,IACV,IAAK,IACL,IAAK,IACL,OAAQ,IACR,MAAO,IACP,MAAO,IACP,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,KAAM,eACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,KAAM,IACN,QAAS,IACT,MAAO,IACP,OAAQ,IACR,QAAS,IACT,MAAO,IACP,WAAY,IACZ,YAAa,IACb,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,QAAS,IACT,SAAU,IACV,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,KAAM,IACN,IAAK,IACL,IAAK,IACL,KAAM,IACN,KAAM,IACN,KAAM,IACN,MAAO,IACP,YAAa,IACb,aAAc,IACd,cAAe,IACf,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,IAAK,eACL,MAAO,IACP,MAAO,KACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,KAAM,IACN,KAAM,eACN,OAAQ,IACR,KAAM,IACN,MAAO,IACP,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,MAAO,IACP,KAAM,eACN,GAAI,IACJ,IAAK,IACL,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,MAAO,IACP,IAAK,IACL,KAAM,IACN,GAAI,IACJ,IAAK,IACL,IAAK,IACL,KAAM,IACN,SAAU,IACV,IAAK,IACL,MAAO,IACP,OAAQ,IACR,QAAS,IACT,SAAU,IACV,KAAM,KACN,OAAQ,IACR,IAAK,eACL,GAAI,IACJ,IAAK,IACL,MAAO,IACP,KAAM,IACN,GAAI,IACJ,IAAK,IACL,IAAK,IACL,IAAK,IACL,IAAK,IACL,KAAM,IACN,SAAU,IACV,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,KAAM,eACN,MAAO,IACP,KAAM,IACN,KAAM,IACN,MAAO,IACP,MAAO,IACP,GAAI,IACJ,KAAM,IACN,MAAO,IACP,MAAO,IACP,OAAQ,IACR,QAAS,IACT,UAAW,IACX,OAAQ,IACR,OAAQ,IACR,UAAW,IACX,WAAY,IACZ,QAAS,IACT,OAAQ,IACR,UAAW,KACX,KAAM,KACN,KAAM,IACN,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,QAAS,IACT,MAAO,IACP,KAAM,IACN,MAAO,IACP,OAAQ,IACR,UAAW,IACX,OAAQ,IACR,OAAQ,IACR,IAAK,eACL,SAAU,IACV,SAAU,IACV,MAAO,IACP,OAAQ,IACR,cAAe,IACf,eAAgB,IAChB,KAAM,eACN,OAAQ,IACR,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,GAAI,IACJ,MAAO,IACP,IAAK,IACL,KAAM,IACN,MAAO,IACP,IAAK,IACL,IAAK,eACL,OAAQ,IACR,GAAI,IACJ,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,SAAU,IACV,SAAU,IACV,MAAO,IACP,KAAM,IACN,MAAO,IACP,GAAI,IACJ,OAAQ,IACR,MAAO,IACP,SAAU,IACV,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,SAAU,IACV,SAAU,IACV,SAAU,IACV,QAAS,IACT,KAAM,IACN,MAAO,IACP,KAAM,eACN,KAAM,IACN,MAAO,IACP,OAAQ,IACR,KAAM,eACN,KAAM,IACN,MAAO,IACP,QAAS,IACT,MAAO,IACP,OAAQ,IACR,MAAO,IACP,GAAI,IACJ,OAAQ,IACR,MAAO,IACP,KAAM,IACN,MAAO,IACP,IAAK,IACL,IAAK,eACL,MAAO,IACP,KAAM,eACN,KAAM,eACN,OAAQ,IACR,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,IAAK,eACL,OAAQ,IACR,KAAM,IACN,KAAM,IACN,KAAM,eACN,KAAM,eACN,MAAO,IACP,KAAM,IACN,OAAQ,IACR,MAAO,IACP,GAAI,IACJ,IAAK,IACL,KAAM,IACN,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,OAAQ,IACR,IAAK,IACL,MAAO,IACP,KAAM,IACN,MAAO,IACP,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,KAAM,IACN,MAAO,KACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,QAAS,IACT,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,KAAM,IACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,OAAQ,IACR,QAAS,IACT,SAAU,IACV,KAAM,IACN,GAAI,IACJ,UAAW,IACX,cAAe,IACf,gBAAiB,IACjB,cAAe,IACf,eAAgB,IAChB,eAAgB,IAChB,gBAAiB,IACjB,kBAAmB,IACnB,oBAAqB,IACrB,eAAgB,IAChB,IAAK,IACL,IAAK,IACL,KAAM,IACN,SAAU,IACV,IAAK,IACL,MAAO,IACP,OAAQ,IACR,QAAS,IACT,SAAU,IACV,KAAM,KACN,OAAQ,IACR,WAAY,IACZ,QAAS,IACT,UAAW,IACX,WAAY,IACZ,QAAS,IACT,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,IAAK,eACL,GAAI,IACJ,IAAK,IACL,MAAO,IACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,KAAM,IACN,GAAI,IACJ,MAAO,IACP,SAAU,IACV,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,WAAY,IACZ,IAAK,IACL,KAAM,IACN,SAAU,IACV,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,cAAe,IACf,mBAAoB,IACpB,WAAY,IACZ,eAAgB,IAChB,cAAe,IACf,eAAgB,IAChB,MAAO,IACP,KAAM,eACN,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,QAAS,IACT,KAAM,IACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,SAAU,IACV,MAAO,IACP,OAAQ,IACR,IAAK,IACL,MAAO,IACP,OAAQ,IACR,KAAM,eACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,KAAM,IACN,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,GAAI,IACJ,KAAM,IACN,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,KAAM,IACN,MAAO,IACP,MAAO,IACP,SAAU,IACV,QAAS,IACT,UAAW,KACX,KAAM,KACN,MAAO,IACP,KAAM,IACN,KAAM,IACN,KAAM,IACN,QAAS,IACT,IAAK,IACL,OAAQ,IACR,WAAY,IACZ,WAAY,IACZ,SAAU,IACV,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,MAAO,IACP,cAAe,IACf,IAAK,eACL,IAAK,IACL,MAAO,IACP,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,QAAS,IACT,KAAM,IACN,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,KAAM,eACN,GAAI,IACJ,KAAM,eACN,OAAQ,IACR,GAAI,IACJ,SAAU,IACV,MAAO,IACP,IAAK,KACL,IAAK,KACL,KAAM,KACN,WAAY,IACZ,gBAAiB,IACjB,IAAK,KACL,IAAK,KACL,KAAM,KACN,YAAa,IACb,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,KAAM,KACN,IAAK,IACL,KAAM,KACN,MAAO,KACP,MAAO,IACP,QAAS,IACT,MAAO,IACP,QAAS,IACT,SAAU,IACV,KAAM,IACN,MAAO,KACP,OAAQ,KACR,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,SAAU,KACV,KAAM,IACN,IAAK,IACL,MAAO,IACP,GAAI,IACJ,MAAO,IACP,OAAQ,IACR,MAAO,IACP,QAAS,IACT,MAAO,KACP,OAAQ,IACR,OAAQ,IACR,MAAO,KACP,OAAQ,IACR,QAAS,IACT,IAAK,eACL,IAAK,KACL,IAAK,IACL,KAAM,IACN,MAAO,KACP,UAAW,KACX,KAAM,KACN,MAAO,IACP,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,GAAI,IACJ,IAAK,IACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,IAAK,KACL,MAAO,IACP,KAAM,IACN,IAAK,IACL,WAAY,IACZ,gBAAiB,IACjB,KAAM,IACN,MAAO,KACP,UAAW,KACX,KAAM,KACN,MAAO,IACP,MAAO,IACP,IAAK,IACL,MAAO,IACP,OAAQ,IACR,KAAM,IACN,KAAM,eACN,IAAK,IACL,MAAO,IACP,OAAQ,KACR,SAAU,KACV,QAAS,IACT,QAAS,IACT,QAAS,IACT,MAAO,IACP,QAAS,IACT,QAAS,IACT,QAAS,IACT,KAAM,IACN,UAAW,IACX,OAAQ,KACR,MAAO,KACP,QAAS,IACT,IAAK,IACL,OAAQ,IACR,KAAM,KACN,MAAO,IACP,QAAS,KACT,MAAO,IACP,MAAO,IACP,OAAQ,KACR,OAAQ,KACR,YAAa,IACb,MAAO,IACP,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,KAAM,KACN,KAAM,eACN,UAAW,IACX,eAAgB,IAChB,KAAM,IACN,MAAO,IACP,OAAQ,IACR,MAAO,IACP,MAAO,IACP,QAAS,IACT,QAAS,IACT,KAAM,IACN,MAAO,KACP,MAAO,IACP,QAAS,KACT,UAAW,IACX,WAAY,KACZ,MAAO,IACP,QAAS,KACT,KAAM,IACN,MAAO,KACP,MAAO,IACP,QAAS,KACT,UAAW,IACX,WAAY,KACZ,KAAM,IACN,OAAQ,IACR,KAAM,IACN,cAAe,IACf,gBAAiB,IACjB,eAAgB,IAChB,iBAAkB,IAClB,GAAI,IACJ,IAAK,IACL,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,KAAM,KACN,OAAQ,IACR,KAAM,KACN,KAAM,KACN,QAAS,IACT,OAAQ,IACR,KAAM,KACN,KAAM,KACN,QAAS,KACT,OAAQ,IACR,QAAS,KACT,MAAO,KACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,QAAS,IACT,OAAQ,IACR,GAAI,IACJ,OAAQ,IACR,KAAM,IACN,KAAM,IACN,MAAO,IACP,IAAK,IACL,MAAO,IACP,OAAQ,IACR,KAAM,IACN,KAAM,IACN,OAAQ,IACR,MAAO,IACP,MAAO,IACP,IAAK,eACL,KAAM,IACN,OAAQ,IACR,IAAK,IACL,MAAO,IACP,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,QAAS,IACT,MAAO,IACP,IAAK,IACL,MAAO,IACP,MAAO,IACP,QAAS,IACT,KAAM,IACN,OAAQ,IACR,KAAM,eACN,KAAM,IACN,MAAO,IACP,MAAO,IACP,GAAI,IACJ,MAAO,IACP,IAAK,IACL,MAAO,IACP,QAAS,IACT,KAAM,IACN,KAAM,IACN,OAAQ,IACR,KAAM,IACN,QAAS,IACT,IAAK,IACL,KAAM,IACN,OAAQ,IACR,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,SAAU,IACV,KAAM,IACN,MAAO,IACP,IAAK,IACL,KAAM,IACN,SAAU,IACV,OAAQ,IACR,MAAO,IACP,KAAM,IACN,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,QAAS,IACT,IAAK,eACL,IAAK,IACL,KAAM,IACN,OAAQ,IACR,MAAO,IACP,GAAI,IACJ,UAAW,IACX,IAAK,IACL,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,KAAM,IACN,SAAU,IACV,MAAO,IACP,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,QAAS,IACT,QAAS,IACT,GAAI,IACJ,SAAU,IACV,KAAM,eACN,MAAO,IACP,GAAI,IACJ,IAAK,IACL,KAAM,IACN,MAAO,IACP,IAAK,IACL,KAAM,IACN,WAAY,IACZ,YAAa,IACb,OAAQ,IACR,YAAa,IACb,SAAU,IACV,SAAU,IACV,QAAS,IACT,MAAO,IACP,OAAQ,IACR,KAAM,IACN,MAAO,IACP,OAAQ,IACR,KAAM,IACN,SAAU,IACV,SAAU,IACV,SAAU,IACV,KAAM,IACN,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,KAAM,eACN,IAAK,IACL,OAAQ,IACR,IAAK,eACL,KAAM,IACN,KAAM,eACN,OAAQ,IACR,KAAM,eACN,YAAa,IACb,QAAS,IACT,MAAO,IACP,QAAS,IACT,KAAM,IACN,MAAO,IACP,KAAM,IACN,OAAQ,IACR,MAAO,IACP,KAAM,IACN,KAAM,KACN,OAAQ,IACR,MAAO,IACP,SAAU,IACV,KAAM,IACN,MAAO,IACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,KAAM,IACN,OAAQ,IACR,MAAO,IACP,QAAS,IACT,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,QAAS,IACT,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,MAAO,IACP,UAAW,IACX,MAAO,IACP,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,QAAS,IACT,QAAS,IACT,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,KAAM,IACN,IAAK,IACL,KAAM,IACN,QAAS,IACT,MAAO,IACP,OAAQ,IACR,KAAM,IACN,KAAM,IACN,QAAS,IACT,SAAU,IACV,MAAO,IACP,KAAM,IACN,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,IAAK,eACL,MAAO,IACP,MAAO,IACP,OAAQ,IACR,IAAK,IACL,KAAM,IACN,WAAY,IACZ,eAAgB,IAChB,iBAAkB,IAClB,eAAgB,IAChB,gBAAiB,IACjB,kBAAmB,IACnB,iBAAkB,IAClB,gBAAiB,IACjB,gBAAiB,IACjB,KAAM,IACN,aAAc,IACd,MAAO,IACP,MAAO,IACP,IAAK,IACL,OAAQ,IACR,WAAY,IACZ,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,MAAO,IACP,KAAM,eACN,OAAQ,IACR,QAAS,IACT,KAAM,IACN,OAAQ,IACR,SAAU,IACV,MAAO,IACP,OAAQ,IACR,KAAM,eACN,IAAK,IACL,KAAM,IACN,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,MAAO,IACP,SAAU,IACV,QAAS,IACT,GAAI,IACJ,OAAQ,IACR,MAAO,IACP,GAAI,IACJ,IAAK,IACL,KAAM,IACN,OAAQ,IACR,MAAO,IACP,IAAK,IACL,OAAQ,IACR,MAAO,IACP,KAAM,IACN,MAAO,IACP,OAAQ,IACR,SAAU,IACV,MAAO,IACP,IAAK,IACL,KAAM,IACN,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,MAAO,IACP,QAAS,IACT,KAAM,IACN,KAAM,IACN,OAAQ,IACR,SAAU,IACV,MAAO,IACP,KAAM,IACN,IAAK,eACL,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,KAAM,IACN,SAAU,IACV,cAAe,IACf,IAAK,IACL,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,OAAQ,IACR,KAAM,IACN,MAAO,IACP,KAAM,IACN,MAAO,IACP,KAAM,IACN,MAAO,IACP,MAAO,IACP,QAAS,IACT,QAAS,IACT,MAAO,IACP,cAAe,IACf,OAAQ,IACR,SAAU,IACV,KAAM,IACN,MAAO,IACP,IAAK,IACL,KAAM,IACN,MAAO,KACP,OAAQ,IACR,IAAK,IACL,KAAM,IACN,OAAQ,IACR,KAAM,eACN,OAAQ,IACR,UAAW,IACX,KAAM,IACN,MAAO,IACP,OAAQ,KACR,MAAO,IACP,OAAQ,KACR,MAAO,IACP,OAAQ,IACR,SAAU,IACV,WAAY,IACZ,MAAO,IACP,OAAQ,IACR,SAAU,IACV,WAAY,IACZ,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,gBAAiB,IACjB,YAAa,IACb,MAAO,IACP,IAAK,IACL,KAAM,IACN,OAAQ,IACR,KAAM,IACN,QAAS,IACT,QAAS,IACT,MAAO,IACP,MAAO,IACP,QAAS,IACT,QAAS,IACT,OAAQ,IACR,SAAU,IACV,UAAW,IACX,UAAW,IACX,WAAY,IACZ,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,WAAY,IACZ,YAAa,IACb,OAAQ,IACR,YAAa,IACb,SAAU,IACV,SAAU,IACV,QAAS,IACT,IAAK,IACL,KAAM,IACN,KAAM,IACN,KAAM,IACN,KAAM,IACN,IAAK,IACL,KAAM,IACN,OAAQ,IACR,QAAS,IACT,KAAM,IACN,QAAS,IACT,QAAS,IACT,QAAS,IACT,QAAS,IACT,QAAS,IACT,MAAO,IACP,MAAO,IACP,QAAS,IACT,OAAQ,IACR,SAAU,IACV,UAAW,IACX,UAAW,IACX,WAAY,IACZ,OAAQ,IACR,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,MAAO,IACP,QAAS,IACT,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,IAAK,IACL,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,KAAM,IACN,OAAQ,IACR,IAAK,eACL,OAAQ,IACR,UAAW,IACX,MAAO,IACP,SAAU,IACV,OAAQ,IACR,YAAa,IACb,SAAU,IACV,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,KAAM,IACN,KAAM,IACN,IAAK,IACL,OAAQ,IACR,OAAQ,IACR,KAAM,eACN,QAAS,IACT,KAAM,IACN,OAAQ,IACR,MAAO,IACP,SAAU,IACV,aAAc,IACd,aAAc,IACd,eAAgB,IAChB,UAAW,IACX,cAAe,IACf,gBAAiB,IACjB,OAAQ,IACR,KAAM,IACN,SAAU,IACV,QAAS,IACT,MAAO,IACP,QAAS,IACT,SAAU,IACV,KAAM,eACN,KAAM,IACN,MAAO,IACP,OAAQ,IACR,MAAO,IACP,iBAAkB,IAClB,kBAAmB,IACnB,KAAM,IACN,KAAM,IACN,OAAQ,IACR,KAAM,IACN,MAAO,IACP,OAAQ,IACR,MAAO,IACP,IAAK,IACL,MAAO,IACP,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,IAAK,eACL,OAAQ,IACR,MAAO,IACP,MAAO,IACP,MAAO,IACP,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,MAAO,IACP,MAAO,IACP,IAAK,IACL,MAAO,IACP,KAAM,eACN,QAAS,IACT,YAAa,IACb,cAAe,IACf,eAAgB,IAChB,MAAO,IACP,KAAM,IACN,MAAO,IACP,QAAS,IACT,WAAY,IACZ,OAAQ,IACR,SAAU,IACV,OAAQ,IACR,MAAO,IACP,MAAO,IACP,KAAM,eACN,MAAO,IACP,OAAQ,IACR,KAAM,IACN,MAAO,IACP,MAAO,IACP,KAAM,IACN,QAAS,IACT,KAAM,IACN,KAAM,IACN,MAAO,IACP,MAAO,IACP,OAAQ,IACR,WAAY,IACZ,SAAU,IACV,WAAY,IACZ,OAAQ,IACR,MAAO,IACP,UAAW,IACX,KAAM,IACN,OAAQ,IACR,SAAU,IACV,aAAc,KACd,cAAe,KACf,aAAc,KACd,cAAe,KACf,SAAU,IACV,gBAAiB,IACjB,iBAAkB,IAClB,IAAK,IACL,MAAO,IACP,IAAK,IACL,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,IAAK,eACL,MAAO,IACP,MAAO,KACP,MAAO,KACP,KAAM,eACN,MAAO,IACP,MAAO,IACP,KAAM,eACN,OAAQ,KACR,OAAQ,KACR,OAAQ,KACR,OAAQ,KACR,QAAS,IACT,MAAO,IACP,OAAQ,IACR,MAAO,IACP,OAAQ,IACR,OAAQ,IACR,IAAK,eACL,KAAM,eACN,GAAI,IACJ,GAAI,IACJ,OAAQ,IACR,KAAM,eACN,KAAM,IACN,MAAO,IACP,KAAM,IACN,MAAO,IACP,IAAK,eACL,MAAO,IACP,MAAO,IACP,GAAI,IACJ,MAAO,IACP,MAAO,IACP,KAAM,IACN,KAAM,IACN,MAAO,IACP,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,MAAO,IACP,KAAM,eACN,OAAQ,IACR,OAAQ,IACR,MAAO,IACP,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,KAAM,IACN,MAAO,IACP,IAAK,IACL,IAAK,IACL,IAAK,eACL,KAAM,IACN,KAAM,eACN,KAAM,eACN,KAAM,IACN,KAAM,IACN,OAAQ,IACR,OAAQ,IACR,IAAK,IACL,KAAM,IACN,OAAQ,IACR,KAAM,IACN,IAAK,eACL,KAAM,IACN,QAAS,IACT,KAAM,eACN,KAAM,eACN,IAAK,IACL,KAAM,GACR,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GC7kEC,IAAA,EAAA,EAAA,S,E,E,SAKD,IAAM,EAAsB,CAC1B,SAwbF,SAAqC,CAAO,CAAE,CAAE,CAAE,CAAG,EACnD,IAAM,EAAO,IAAI,CACjB,OAOA,SAAe,CAAI,SACjB,AAAI,AAAS,OAAT,EACK,EAAI,IAEb,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,EACT,EAOA,SAAS,EAAU,CAAI,EACrB,OAAO,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,CAAG,EAAI,GAAQ,EAAG,EAC5D,CACF,EAldE,QAAS,CAAA,CACX,EAGa,EAAa,CACxB,KAAM,aACN,SAQF,SAA4B,CAAO,CAAE,CAAE,CAAE,CAAG,MAUtC,EATJ,IAAM,EAAO,IAAI,CAEX,EAAa,CACjB,SA+SF,SAA4B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC1C,IAAI,EAAO,EACX,OAOA,SAAqB,CAAI,EAIvB,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,CACT,EAcA,SAAS,EAAM,CAAI,EAKjB,OADA,EAAQ,KAAK,CAAC,mBACP,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACE,EACA,EACA,aACA,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBACzC,KAAA,EACA,GACJ,GACF,EAAoB,EAC1B,CAcA,SAAS,EAAoB,CAAI,SAC/B,AAAI,IAAS,GACX,EAAQ,KAAK,CAAC,2BACP,AAiBX,SAAS,EAAc,CAAI,SACzB,AAAI,IAAS,GACX,IACA,EAAQ,OAAO,CAAC,GACT,GAEL,GAAQ,GACV,EAAQ,IAAI,CAAC,2BACN,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAoB,cAAc,GACxD,EAAmB,IAElB,EAAI,EACb,EA9ByB,IAEhB,EAAI,EACb,CAyCA,SAAS,EAAmB,CAAI,SAC9B,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,mBACN,EAAG,IAEL,EAAI,EACb,CACF,EA7ZE,QAAS,CAAA,CACX,EACI,EAAgB,EAChB,EAAW,EAGf,OAcA,SAAe,CAAI,EAEjB,OAAO,AAeT,SAA4B,CAAI,EAC9B,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAShD,OARA,EACE,GAAQ,AAAiB,eAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,CAChB,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,CAC5C,EACN,EAAS,EACT,EAAQ,KAAK,CAAC,cACd,EAAQ,KAAK,CAAC,mBACd,EAAQ,KAAK,CAAC,2BACP,AAeT,SAAS,EAAa,CAAI,SACxB,AAAI,IAAS,GACX,IACA,EAAQ,OAAO,CAAC,GACT,GAEL,EAAW,EACN,EAAI,IAEb,EAAQ,IAAI,CAAC,2BACN,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAY,cAAc,GAChD,EAAW,GACjB,EA5BsB,EACtB,EA1B4B,EAC5B,EAkEA,SAAS,EAAW,CAAI,SACtB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,mBACN,EAAK,SAAS,CACjB,EAAG,GACH,EAAQ,KAAK,CAAC,EAAqB,EAAgB,GAAO,KAEhE,EAAQ,KAAK,CAAC,uBACd,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,AAeT,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,uBACN,EAAW,IAEhB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,uBACN,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAY,cAAc,IAErD,AAAS,KAAT,GAAe,IAAS,EACnB,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,EACT,EA/Bc,GACd,CA4CA,SAAS,EAAW,CAAI,SACtB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAC/B,EAAW,IAEpB,EAAQ,KAAK,CAAC,uBACd,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,AAeT,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,uBACN,EAAW,IAEhB,AAAS,KAAT,GAAe,IAAS,EACnB,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,EACT,EA1Bc,GACd,CAwCA,SAAS,EAAe,CAAI,EAC1B,OAAO,EAAQ,OAAO,CAAC,EAAY,EAAO,GAAe,EAC3D,CAcA,SAAS,EAAc,CAAI,EAIzB,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,CACT,CAcA,SAAS,EAAa,CAAI,EACxB,OAAO,EAAgB,GAAK,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACtC,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACE,EACA,EACA,aACA,EAAgB,GAChB,GACF,EAAmB,EACzB,CAcA,SAAS,EAAmB,CAAI,SAC9B,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAC/B,EAAQ,KAAK,CAAC,EAAqB,EAAgB,GAAO,IAEnE,EAAQ,KAAK,CAAC,iBACP,AAeT,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,iBACN,EAAmB,KAE5B,EAAQ,OAAO,CAAC,GACT,EACT,EAtBsB,GACtB,CAmCA,SAAS,EAAM,CAAI,EAEjB,OADA,EAAQ,IAAI,CAAC,cACN,EAAG,EACZ,CAsHF,EA1aE,SAAU,CAAA,CACZ,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,GChBC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAe,CAC1B,KAAM,eACN,SAaF,SAA8B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC5C,IAAM,EAAO,IAAI,CACjB,OAgBA,SAAe,CAAI,EAMjB,OAHA,EAAQ,KAAK,CAAC,gBAGP,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAa,aAAc,GAAO,EACjE,EAYA,SAAS,EAAY,CAAI,EACvB,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAChD,OAAO,GACL,AAAiB,eAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,EACZ,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,EAAI,EAC9C,AAcN,SAAS,EAAQ,CAAI,SACnB,AAAI,AAAS,OAAT,EACK,EAAM,GAEX,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GACd,EAAQ,OAAO,CAAC,EAAc,EAAS,GAAO,IAEvD,EAAQ,KAAK,CAAC,iBACP,AAaT,SAAS,EAAO,CAAI,SAClB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,iBACN,EAAQ,KAEjB,EAAQ,OAAO,CAAC,GACT,EACT,EApBgB,GAChB,EAvBc,GACR,EAAI,EACV,CA2CA,SAAS,EAAM,CAAI,EAKjB,OAJA,EAAQ,IAAI,CAAC,gBAIN,EAAG,EACZ,CACF,CA1GA,EAGM,EAAe,CACnB,SA4GF,SAA8B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC5C,IAAM,EAAO,IAAI,CACjB,OAAO,EAaP,SAAS,EAAa,CAAI,SAGxB,AAAI,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,CAC5B,EAAI,GAET,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,GASF,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAa,aAAc,GAAO,EACjE,CAYA,SAAS,EAAY,CAAI,EACvB,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAChD,OAAO,GACL,AAAiB,eAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,EACZ,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,EAAI,EAC9C,EAAG,GACH,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GACnB,EAAa,GACb,EAAI,EACV,CACF,EApKE,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GCXC,IAAA,EAAA,EAAA,SAIM,IAAM,EAAW,CACtB,KAAM,WACN,SA8EF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,EAExC,IAEI,EAEA,EAJA,EAAW,EAKf,OAcA,SAAe,CAAI,EAGjB,OAFA,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,oBACP,AAaT,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,IACO,IAET,EAAQ,IAAI,CAAC,oBACN,EAAQ,GACjB,EArBsB,EACtB,EAgCA,SAAS,EAAQ,CAAI,SAEnB,AAAI,AAAS,OAAT,EACK,EAAI,GAMT,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,SACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,SACN,GAIL,AAAS,KAAT,GACF,EAAQ,EAAQ,KAAK,CAAC,oBACtB,EAAO,EACA,AAgDX,SAAS,EAAc,CAAI,SAEzB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,IACO,GAIL,IAAS,GACX,EAAQ,IAAI,CAAC,oBACb,EAAQ,IAAI,CAAC,YACN,EAAG,KAIZ,EAAM,IAAI,CAAG,eACN,EAAK,GACd,EAlEyB,IAEnB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,IAIT,EAAQ,KAAK,CAAC,gBACP,EAAK,GACd,CAYA,SAAS,EAAK,CAAI,SAChB,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IAEnB,EAAQ,IAAI,CAAC,gBACN,EAAQ,KAEjB,EAAQ,OAAO,CAAC,GACT,EACT,CA+BF,EA7NE,QAMF,SAAyB,CAAM,EAC7B,IAGI,EAEA,EALA,EAAgB,EAAO,MAAM,CAAG,EAChC,EAAiB,EAOrB,GACG,AAAA,CAAA,AAAmC,eAAnC,CAAM,CARY,EAQI,CAAC,EAAE,CAAC,IAAI,EAC7B,AAAmC,UAAnC,CAAM,CAAC,EAAe,CAAC,EAAE,CAAC,IAAI,AAAK,GACpC,CAAA,AAAkC,eAAlC,CAAM,CAAC,EAAc,CAAC,EAAE,CAAC,IAAI,EAC5B,AAAkC,UAAlC,CAAM,CAAC,EAAc,CAAC,EAAE,CAAC,IAAI,AAAK,EAKpC,CAAA,IAHA,EAAQ,EAGD,EAAE,EAAQ,GACf,GAAI,AAA0B,iBAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAAqB,CAE5C,CAAM,CAAC,EAAe,CAAC,EAAE,CAAC,IAAI,CAAG,kBACjC,CAAM,CAAC,EAAc,CAAC,EAAE,CAAC,IAAI,CAAG,kBAChC,GAAkB,EAClB,GAAiB,EACjB,KACF,CAAA,CAOJ,IAFA,EAAQ,EAAiB,EACzB,IACO,EAAE,GAAS,GACZ,AAAU,KAAA,IAAV,EACE,IAAU,GAAiB,AAA0B,eAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EAClD,CAAA,EAAQ,CADV,EAIA,CAAA,IAAU,GACV,AAA0B,eAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,AAAK,IAE1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAAG,eACpB,IAAU,EAAQ,IACpB,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAAG,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,GAAG,CAC/C,EAAO,MAAM,CAAC,EAAQ,EAAG,EAAQ,EAAQ,GACzC,GAAiB,EAAQ,EAAQ,EACjC,EAAQ,EAAQ,GAElB,EAAQ,KAAA,GAGZ,OAAO,CACT,EA1DE,SAgEF,SAAkB,CAAI,EAEpB,OACE,AAAS,KAAT,GACA,AAAgD,oBAAhD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAG,EAAE,CAAC,EAAE,CAAC,IAAI,AAE/C,CArEA,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GCZC,IAAA,EAAA,EAAA,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,SAcM,IAAM,EAAa,CACxB,KAAM,aACN,SAaF,SAA4B,CAAO,CAAE,CAAE,CAAE,CAAG,MAGtC,EAFJ,IAAM,EAAO,IAAI,CAGjB,OAYA,SAAe,CAAI,EAKjB,OADA,EAAQ,KAAK,CAAC,cAiBP,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,IAAI,CACtB,EACA,EACA,EAEA,EACA,kBACA,wBACA,yBAxBY,EAChB,EAqCA,SAAS,EAAW,CAAI,QAItB,CAHA,EAAa,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EACX,EAAK,cAAc,CAAC,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAAC,EAAE,EAAE,KAAK,CAAC,EAAG,KAEnE,AAAS,KAAT,IACF,EAAQ,KAAK,CAAC,oBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,oBACN,GAEF,EAAI,EACb,CAYA,SAAS,EAAY,CAAI,EAEvB,MAAO,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC7B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAS,GAAmB,GAC9C,EAAkB,EACxB,CAYA,SAAS,EAAkB,CAAI,EAC7B,MAAO,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EACL,EACA,EAEA,EACA,wBACA,+BACA,qCACA,2BACA,+BACA,EACJ,CAYA,SAAS,EAAiB,CAAI,EAC5B,OAAO,EAAQ,OAAO,CAAC,EAAa,EAAO,GAAO,EACpD,CAcA,SAAS,EAAM,CAAI,EACjB,MAAO,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAiB,cAAc,GACrD,EAAgB,EACtB,CAcA,SAAS,EAAgB,CAAI,SAC3B,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,cAKb,EAAK,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,GAKlB,EAAG,IAEL,EAAI,EACb,CACF,CAzLA,EAGM,EAAc,CAClB,SA2LF,SAA6B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC3C,OAcA,SAAqB,CAAI,EACvB,MAAO,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC7B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAS,GAAc,GACzC,EAAI,EACV,EAaA,SAAS,EAAa,CAAI,EACxB,MAAO,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EACA,EACA,kBACA,wBACA,yBACA,EACJ,CAYA,SAAS,EAAW,CAAI,EACtB,MAAO,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAA8B,cAAc,GAClE,EAA6B,EACnC,CAYA,SAAS,EAA6B,CAAI,EACxC,OAAO,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAAQ,EAAG,GAAQ,EAAI,EACpE,CACF,EAlQE,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,qB,I,GCxBC,IAAA,EAAA,EAAA,SA4CM,SAAS,EACd,CAAO,CACP,CAAE,CACF,CAAG,CACH,CAAI,CACJ,CAAW,CACX,CAAiB,CACjB,CAAO,CACP,CAAU,CACV,CAAG,EAEH,IAAM,EAAQ,GAAO,OAAO,iBAAiB,CACzC,EAAU,EACd,OAcA,SAAe,CAAI,SACjB,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACN,GAIL,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GACvD,EAAI,IAEb,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,EAAI,GACb,EAYA,SAAS,EAAe,CAAI,SAC1B,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACN,IAET,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,EAAS,GAClB,CAYA,SAAS,EAAS,CAAI,SACpB,AAAI,AAAS,KAAT,GACF,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,GACN,EAAe,IAEpB,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAC9C,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,AAAS,KAAT,EAAc,EAAiB,EACxC,CAYA,SAAS,EAAe,CAAI,SAC1B,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAChC,EAAQ,OAAO,CAAC,GACT,GAEF,EAAS,EAClB,CAYA,SAAS,EAAI,CAAI,QACf,AACE,CAAC,GACA,CAAA,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,EAAA,GAE3D,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACN,EAAG,IAER,EAAU,GAAS,AAAS,KAAT,GACrB,EAAQ,OAAO,CAAC,GAChB,IACO,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,IACO,GAML,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,GACvD,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,AAAS,KAAT,EAAc,EAAY,EACnC,CAYA,SAAS,EAAU,CAAI,SACrB,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAChC,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,GC3NC,IAAA,EAAA,EAAA,SAkCM,SAAS,EAAa,CAAO,CAAE,CAAE,CAAE,CAAG,CAAE,CAAI,CAAE,CAAU,CAAE,CAAU,MAIrE,EAHJ,IAAM,EAAO,IAAI,CACb,EAAO,EAGX,OAYA,SAAe,CAAI,EAMjB,OALA,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACb,EAAQ,KAAK,CAAC,GACP,CACT,EAYA,SAAS,EAAQ,CAAI,SACnB,AACE,EAAO,KACP,AAAS,OAAT,GACA,AAAS,KAAT,GACC,AAAS,KAAT,GAAe,CAAC,GAMhB,AAAS,KAAT,GACC,CAAC,GACD,2BAA4B,EAAK,MAAM,CAAC,UAAU,CAE7C,EAAI,GAET,AAAS,KAAT,GACF,EAAQ,IAAI,CAAC,GACb,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACN,GAIL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,IAET,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,EAAY,GACrB,CAYA,SAAS,EAAY,CAAI,SACvB,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACnB,IAAS,KAET,EAAQ,IAAI,CAAC,eACN,EAAQ,KAEjB,EAAQ,OAAO,CAAC,GACX,GAAM,CAAA,EAAO,CAAC,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,EAAjC,EACO,AAAS,KAAT,EAAc,EAAc,EACrC,CAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAChC,EAAQ,OAAO,CAAC,GAChB,IACO,GAEF,EAAY,EACrB,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,GCzJC,IAAA,EAAA,EAAA,S,E,E,SAmCM,SAAS,EAAa,CAAO,CAAE,CAAE,CAAE,CAAG,CAAE,CAAI,CAAE,CAAU,CAAE,CAAU,EAEzE,IAAI,EACJ,OAYA,SAAe,CAAI,SACjB,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,GAChC,EAAQ,KAAK,CAAC,GACd,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACb,EAAS,AAAS,KAAT,EAAc,GAAK,EACrB,GAEF,EAAI,EACb,EAcA,SAAS,EAAM,CAAI,SACjB,AAAI,IAAS,GACX,EAAQ,KAAK,CAAC,GACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,GACb,EAAQ,IAAI,CAAC,GACN,IAET,EAAQ,KAAK,CAAC,GACP,EAAQ,GACjB,CAYA,SAAS,EAAQ,CAAI,SACnB,AAAI,IAAS,GACX,EAAQ,IAAI,CAAC,GACN,EAAM,IAEX,AAAS,OAAT,EACK,EAAI,GAIT,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IAErB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAS,gBAExC,EAAQ,KAAK,CAAC,cAAe,CAC3B,YAAa,QACf,GACO,EAAO,GAChB,CAOA,SAAS,EAAO,CAAI,SAClB,AAAI,IAAS,GAAU,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACzD,EAAQ,IAAI,CAAC,eACN,EAAQ,KAEjB,EAAQ,OAAO,CAAC,GACT,AAAS,KAAT,EAAc,EAAS,EAChC,CAYA,SAAS,EAAO,CAAI,SAClB,AAAI,IAAS,GAAU,AAAS,KAAT,GACrB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAO,EAChB,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,oB,I,GCxJC,IAAA,EAAA,EAAA,S,E,E,SAsBM,SAAS,EAAkB,CAAO,CAAE,CAAE,EAE3C,IAAI,EACJ,OAGA,SAAS,EAAM,CAAI,QACjB,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACb,EAAO,CAAA,EACA,GAEL,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACT,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EACA,EAAO,aAAe,cACtB,GAEG,EAAG,EACZ,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,sB,I,GC7BO,SAAS,EAAoB,CAAK,EACvC,OACE,EAEG,OAAO,CAAC,cAAe,KAEvB,OAAO,CAAC,SAAU,IAOlB,WAAW,GACX,WAAW,EAElB,C,G,E,Q,S,C,C,C,E,E,E,O,C,kB,I,GC9BC,IAAA,EAAA,EAAA,SAIM,IAAM,EAAkB,CAC7B,KAAM,kBACN,SAOF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC/C,OAaA,SAAe,CAAI,EAGjB,OAFA,EAAQ,KAAK,CAAC,mBACd,EAAQ,OAAO,CAAC,GACT,CACT,EAaA,SAAS,EAAM,CAAI,QACjB,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,IAAI,CAAC,mBACN,EAAG,IAEL,EAAI,EACb,CACF,CA5CA,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GCLC,IAAA,EAAA,EAAA,S,E,E,S,E,E,SAUM,IAAM,EAAa,CACxB,KAAM,aACN,SA2DF,SAA4B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC1C,IAAI,EAAO,EACX,OAYA,SAAe,CAAI,EAGjB,OADA,EAAQ,KAAK,CAAC,cAed,EAAQ,KAAK,CAAC,sBACP,AAaT,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,KAAT,GAAe,IAAS,GAC1B,EAAQ,OAAO,CAAC,GACT,GAIL,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,IAC7C,EAAQ,IAAI,CAAC,sBACN,AAeX,SAAS,EAAQ,CAAI,SACnB,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,sBACP,AA+BX,SAAS,EAAgB,CAAI,SAC3B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,IAET,EAAQ,IAAI,CAAC,sBACN,EAAQ,GACjB,EAtC2B,IAErB,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,cAIN,EAAG,IAER,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACT,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAS,cAAc,IAKtD,EAAQ,KAAK,CAAC,kBACP,AAkCT,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,OAAT,GAAiB,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,IAC5D,EAAQ,IAAI,CAAC,kBACN,EAAQ,KAEjB,EAAQ,OAAO,CAAC,GACT,EACT,EAzCc,GACd,EAnCmB,IAEV,EAAI,EACb,EAxCgB,EAChB,CAgHF,EA5LE,QAIF,SAA2B,CAAM,CAAE,CAAO,EACxC,IAGI,EAEA,EALA,EAAa,EAAO,MAAM,CAAG,EAC7B,EAAe,EA6CnB,MAtCqC,eAAjC,CAAM,CAPS,EAOK,CAAC,EAAE,CAAC,IAAI,EAC9B,CAAA,GAAgB,CAAA,EAKhB,EAAa,EAAI,GACjB,AAA+B,eAA/B,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,EAE1B,CAAA,GAAc,CAAA,EAGiB,uBAA/B,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,IAAI,EACzB,CAAA,IAAiB,EAAa,GAC5B,EAAa,EAAI,GAChB,AAAmC,eAAnC,CAAM,CAAC,EAAa,EAAE,CAAC,EAAE,CAAC,IAAI,AAAK,GAEvC,CAAA,GAAc,EAAe,IAAM,EAAa,EAAI,CAAA,EAElD,EAAa,IACf,EAAU,CACR,KAAM,iBACN,MAAO,CAAM,CAAC,EAAa,CAAC,EAAE,CAAC,KAAK,CACpC,IAAK,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,GAAG,AAChC,EACA,EAAO,CACL,KAAM,YACN,MAAO,CAAM,CAAC,EAAa,CAAC,EAAE,CAAC,KAAK,CACpC,IAAK,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,GAAG,CAC9B,YAAa,MACf,EACA,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAQ,EAAc,EAAa,EAAe,EAAG,CAC1D,CAAC,QAAS,EAAS,EAAQ,CAC3B,CAAC,QAAS,EAAM,EAAQ,CACxB,CAAC,OAAQ,EAAM,EAAQ,CACvB,CAAC,OAAQ,EAAS,EAAQ,CAC3B,GAEI,CACT,CAnDA,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GCdC,IAAA,EAAA,EAAA,S,E,E,S,E,E,SAaM,IAAM,EAAW,CACtB,KAAM,WACN,SAsCF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,MAGpC,EAEA,EAEA,EAEA,EAEA,EAVJ,IAAM,EAAO,IAAI,CAWjB,OAYA,SAAe,CAAI,EAEjB,OAcA,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,gBACd,EAAQ,OAAO,CAhBD,GAiBP,CAhBT,EAiCA,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAa,CAAA,EACN,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAS,EAMF,EAAK,SAAS,CAAG,EAAK,GAI3B,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GAEhB,EAAS,OAAO,YAAY,CAAC,GACtB,GAEF,EAAI,EACb,CAgBA,SAAS,EAAgB,CAAI,SAC3B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAS,EACF,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAS,EACT,EAAQ,EACD,GAIL,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GAChB,EAAS,EAGF,EAAK,SAAS,CAAG,EAAK,GAExB,EAAI,EACb,CAYA,SAAS,EAAkB,CAAI,SAC7B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAGT,EAAK,SAAS,CAAG,EAAK,GAExB,EAAI,EACb,CAYA,SAAS,EAAgB,CAAI,EAC3B,IAAM,EAAQ,gBACd,AAAI,IAAS,EAAM,UAAU,CAAC,KAE5B,CADA,EAAQ,OAAO,CAAC,GACZ,IAAU,EAAM,MAAM,EAGjB,EAAK,SAAS,CAAG,EAAK,EAExB,EAEF,EAAI,EACb,CAYA,SAAS,EAAc,CAAI,QACzB,AAAI,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GAEhB,EAAS,OAAO,YAAY,CAAC,GACtB,GAEF,EAAI,EACb,CAcA,SAAS,EAAQ,CAAI,EACnB,GACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC1B,CACA,IAAM,EAAQ,AAAS,KAAT,EACR,EAAO,EAAO,WAAW,SAC/B,AAAI,CAAC,GAAS,CAAC,GAAc,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,QAAQ,CAAC,IACjD,EAAS,EAGF,EAAK,SAAS,CAAG,EAAG,GAAQ,EAAa,IAE9C,AAAA,CAAA,EAAA,EAAA,cAAa,AAAb,EAAe,QAAQ,CAAC,EAAO,WAAW,IAE5C,CADA,EAAS,EACL,IACF,EAAQ,OAAO,CAAC,GACT,GAKF,EAAK,SAAS,CAAG,EAAG,GAAQ,EAAa,IAElD,EAAS,EAEF,EAAK,SAAS,EAAI,CAAC,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,CACvD,EAAI,GACJ,EACA,AA2CR,SAAS,EAAwB,CAAI,QACnC,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAY,EACrB,EAjDgC,GACxB,EAA4B,GAClC,QAGA,AAAI,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IACnC,EAAQ,OAAO,CAAC,GAChB,GAAU,OAAO,YAAY,CAAC,GACvB,GAEF,EAAI,EACb,CAYA,SAAS,EAAiB,CAAI,SAC5B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAGT,EAAK,SAAS,CAAG,EAAK,GAExB,EAAI,EACb,CA2CA,SAAS,EAA4B,CAAI,SACvC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAIL,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IAC3C,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAY,EACrB,CAgBA,SAAS,EAAsB,CAAI,SAEjC,AACE,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IAElB,EAAQ,OAAO,CAAC,GACT,GAEF,EAA2B,EACpC,CAeA,SAAS,EAA2B,CAAI,SACtC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAA4B,EACrC,CAeA,SAAS,EAA6B,CAAI,SACxC,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,EAEO,EAAI,GAET,AAAS,KAAT,GAAe,AAAS,KAAT,GACjB,EAAQ,OAAO,CAAC,GAChB,EAAU,EACH,GAEL,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,AAsCT,SAAS,EAA+B,CAAI,SAC1C,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAEnB,EAA2B,IAEpC,EAAQ,OAAO,CAAC,GACT,EACT,EAtDwC,EACxC,CAcA,SAAS,EAA6B,CAAI,SACxC,AAAI,IAAS,GACX,EAAQ,OAAO,CAAC,GAChB,EAAU,KACH,GAEL,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAC/B,EAAI,IAEb,EAAQ,OAAO,CAAC,GACT,EACT,CAyCA,SAAS,EAAkC,CAAI,SAC7C,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACvC,EAA4B,GAE9B,EAAI,EACb,CAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAc,CAAI,SACzB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAG/B,EAAa,GAElB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IAAU,CAAA,AAAW,IAAX,GAAgB,AAAW,IAAX,CAAW,GAC1D,EAAQ,IAAI,CAAC,gBACN,EAAQ,KAAK,CAClB,EACA,EACA,GACA,IAEA,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,gBACN,EAAkB,KAE3B,EAAQ,OAAO,CAAC,GACT,EACT,CAaA,SAAS,EAAkB,CAAI,EAC7B,OAAO,EAAQ,KAAK,CAClB,EACA,EACA,GACA,EACJ,CAaA,SAAS,EAAyB,CAAI,EAIpC,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,CACT,CAaA,SAAS,EAAmB,CAAI,SAC9B,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,GAC/B,EAAkB,IAE3B,EAAQ,KAAK,CAAC,gBACP,EAAa,GACtB,CAYA,SAAS,EAA0B,CAAI,SACrC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAa,EACtB,CAYA,SAAS,EAAuB,CAAI,SAClC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAS,GACF,GAEF,EAAa,EACtB,CAYA,SAAS,EAAsB,CAAI,EACjC,GAAI,AAAS,KAAT,EAAa,CACf,IAAM,EAAO,EAAO,WAAW,SAC/B,AAAI,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,QAAQ,CAAC,IACxB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAa,EACtB,OACA,AAAI,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IAAS,EAAO,MAAM,CAAG,GACtC,EAAQ,OAAO,CAAC,GAEhB,GAAU,OAAO,YAAY,CAAC,GACvB,GAEF,EAAa,EACtB,CAYA,SAAS,EAAwB,CAAI,SACnC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAa,EACtB,CAoBA,SAAS,EAA8B,CAAI,SACzC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAIL,AAAS,KAAT,GAAe,AAAW,IAAX,GACjB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAa,EACtB,CAYA,SAAS,EAAkB,CAAI,SAC7B,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,gBACN,EAAkB,KAE3B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAkB,CAAI,EAM7B,OALA,EAAQ,IAAI,CAAC,YAKN,EAAG,EACZ,CACF,EArzBE,UAeF,SAA2B,CAAM,EAC/B,IAAI,EAAQ,EAAO,MAAM,CACzB,KAAO,KACD,CAAA,AAAqB,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,EAAgB,AAA0B,aAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,AAAK,IAYhE,OARI,EAAQ,GAAK,AAA8B,eAA9B,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,IAAI,GAExC,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,KAAK,CAAG,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,KAAK,CAEnD,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,KAAK,CAAG,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,KAAK,CAEvD,EAAO,MAAM,CAAC,EAAQ,EAAG,IAEpB,CACT,EA9BE,SAAU,CAAA,CACZ,EAGM,EAAkB,CACtB,SAk2BF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC/C,OAaA,SAAe,CAAI,EAIjB,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,EAAQ,OAAO,CAAC,EAAA,SAAQ,CAAG,EAAI,EACxC,CACF,EAr3BE,QAAS,CAAA,CACX,EACM,EAA2B,CAC/B,SAizBF,SAA0C,CAAO,CAAE,CAAE,CAAE,CAAG,EACxD,IAAM,EAAO,IAAI,CACjB,OAaA,SAAe,CAAI,QACjB,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,GAEF,EAAI,EACb,EAaA,SAAS,EAAM,CAAI,EACjB,OAAO,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,CAAG,EAAI,GAAQ,EAAG,EAC5D,CACF,EAv1BE,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,iB,I,G,E,E,O,C,e,I,GCpBO,IAAM,EAAiB,CAC5B,UACA,UACA,QACA,OACA,WACA,aACA,OACA,UACA,SACA,MACA,WACA,KACA,UACA,SACA,MACA,MACA,KACA,KACA,WACA,aACA,SACA,SACA,OACA,QACA,WACA,KACA,KACA,KACA,KACA,KACA,KACA,OACA,SACA,KACA,OACA,SACA,SACA,KACA,OACA,OACA,OACA,WACA,MACA,WACA,KACA,WACA,SACA,IACA,QACA,SACA,UACA,UACA,QACA,QACA,KACA,QACA,KACA,QACA,QACA,KACA,QACA,KACD,CAcY,EAAe,CAAC,MAAO,SAAU,QAAS,WAAW,A,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GCtFjE,IAAA,EAAA,EAAA,S,E,E,SAWM,IAAM,EAAW,CACtB,KAAM,WACN,SAOF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,MAGpC,EAEA,EAEA,EANJ,IAAM,EAAO,IAAI,CAOjB,OAYA,SAAe,CAAI,EAIjB,OAHA,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,gBACd,EAAQ,OAAO,CAAC,GACT,CACT,EAgBA,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAIL,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAgBA,SAAS,EAAgB,CAAI,SAC3B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAQ,EACD,GAEL,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAkB,CAAI,SAC7B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAQ,CAAI,SACnB,AAAI,AAAS,OAAT,EACK,EAAI,GAET,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,KAE1B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAQ,EACjB,CAYA,SAAS,EAAW,CAAI,EACtB,OAAO,AAAS,KAAT,EACH,EAAI,GACJ,AAAS,KAAT,EACA,EAAa,GACb,EAAQ,EACd,CAYA,SAAS,EAAgB,CAAI,EAC3B,IAAM,EAAQ,gBACd,AAAI,IAAS,EAAM,UAAU,CAAC,MAC5B,EAAQ,OAAO,CAAC,GACT,IAAU,EAAM,MAAM,CAAG,EAAQ,GAEnC,EAAI,EACb,CAYA,SAAS,EAAM,CAAI,SACjB,AAAI,AAAS,OAAT,EACK,EAAI,GAET,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,KAE1B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAW,CAAI,SACtB,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAM,EACf,CAYA,SAAS,EAAS,CAAI,SACpB,AAAI,AAAS,KAAT,EACK,EAAI,GAET,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEF,EAAM,EACf,CAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,OAAT,GAAiB,AAAS,KAAT,EACZ,EAAI,GAET,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,KAE1B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,OAAT,EACK,EAAI,GAET,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,KAE1B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAAiB,CAAI,EAC5B,OAAO,AAAS,KAAT,EAAc,EAAI,GAAQ,EAAY,EAC/C,CAYA,SAAS,EAAc,CAAI,QAEzB,AAAI,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IACb,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAS,CAAI,SAEpB,AAAI,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IACnC,EAAQ,OAAO,CAAC,GACT,GAEF,AAaT,SAAS,EAAgB,CAAI,QAC3B,AAAI,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,IAEtB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,EAvByB,EACzB,CAkCA,SAAS,EAAQ,CAAI,SAEnB,AAAI,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IACnC,EAAQ,OAAO,CAAC,GACT,GAEL,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GACnD,EAAe,GAEjB,EAAI,EACb,CAYA,SAAS,EAAe,CAAI,SAC1B,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAIL,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IAC3C,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,IAEtB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAI,EACb,CAYA,SAAS,EAAqB,CAAI,SAEhC,AACE,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,IAElB,EAAQ,OAAO,CAAC,GACT,GAEF,AAcT,SAAS,EAA0B,CAAI,SACrC,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GACT,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,IAEtB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,GAEF,EAAe,EACxB,EA5BmC,EACnC,CAwCA,SAAS,EAA4B,CAAI,SACvC,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,EAEO,EAAI,GAET,AAAS,KAAT,GAAe,AAAS,KAAT,GACjB,EAAQ,OAAO,CAAC,GAChB,EAAS,EACF,GAEL,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,IAEtB,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,OAAO,CAAC,GACT,IAET,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAA4B,CAAI,SACvC,AAAI,IAAS,GACX,EAAQ,OAAO,CAAC,GAChB,EAAS,KAAA,EACF,GAEL,AAAS,OAAT,EACK,EAAI,GAET,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACrB,EAAc,EACP,EAAiB,KAE1B,EAAQ,OAAO,CAAC,GACT,EACT,CAYA,SAAS,EAA8B,CAAI,SACzC,AACE,AAAS,OAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,GACA,AAAS,KAAT,EAEO,EAAI,GAET,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GACnD,EAAe,IAExB,EAAQ,OAAO,CAAC,GACT,EACT,CAaA,SAAS,EAAiC,CAAI,SAC5C,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GACnD,EAAe,GAEjB,EAAI,EACb,CAYA,SAAS,EAAI,CAAI,SACf,AAAI,AAAS,KAAT,GACF,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,gBACb,EAAQ,IAAI,CAAC,YACN,GAEF,EAAI,EACb,CAgBA,SAAS,EAAiB,CAAI,EAK5B,OAJA,EAAQ,IAAI,CAAC,gBACb,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,CACT,CAgBA,SAAS,EAAgB,CAAI,EAG3B,MAAO,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACE,EACA,EACA,aACA,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBACzC,KAAA,EACA,GACJ,GACF,EAAsB,EAC5B,CAgBA,SAAS,EAAsB,CAAI,EAEjC,OADA,EAAQ,KAAK,CAAC,gBACP,EAAY,EACrB,CACF,CAprBA,C,G,E,Q,S,C,C,C,E,E,E,O,C,W,I,GCZC,IAAA,EAAA,EAAA,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,S,E,E,SAWM,IAAM,EAAW,CACtB,KAAM,WACN,SA8IF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,MAIpC,EAEA,EALJ,IAAM,EAAO,IAAI,CACb,EAAQ,EAAK,MAAM,CAAC,MAAM,CAO9B,KAAO,KACL,GACG,AAAA,CAAA,AAA+B,eAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACzB,AAA+B,cAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,AAAK,GACjC,CAAC,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,SAAS,CAChC,CACA,EAAa,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAClC,KACF,CAEF,OAiBA,SAAe,CAAI,SAEjB,AAAK,EAaD,EAAW,SAAS,CACf,EAAY,IAErB,EAAU,EAAK,MAAM,CAAC,OAAO,CAAC,QAAQ,CACpC,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EACE,EAAK,cAAc,CAAC,CAClB,MAAO,EAAW,GAAG,CACrB,IAAK,EAAK,GAAG,EACf,KAGJ,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,eACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,YACN,GA5BE,EAAI,EA6Bf,EAkBA,SAAS,EAAM,CAAI,SAKjB,AAAI,AAAS,KAAT,EACK,EAAQ,OAAO,CACpB,EACA,EACA,EAAU,EAAa,GACvB,GAIA,AAAS,KAAT,EACK,EAAQ,OAAO,CACpB,EACA,EACA,EAAU,EAAmB,GAC7B,GAIG,EAAU,EAAW,GAAQ,EAAY,EAClD,CAgBA,SAAS,EAAiB,CAAI,EAC5B,OAAO,EAAQ,OAAO,CACpB,EACA,EACA,GACA,EACJ,CAkBA,SAAS,EAAW,CAAI,EAEtB,OAAO,EAAG,EACZ,CAkBA,SAAS,EAAY,CAAI,EAEvB,OADA,EAAW,SAAS,CAAG,CAAA,EAChB,EAAI,EACb,CACF,EA5TE,UAqCF,SAA2B,CAAM,CAAE,CAAO,EACxC,IAGI,EAEA,EAEA,EAEA,EATA,EAAQ,EAAO,MAAM,CACrB,EAAS,EAWb,KAAO,KAEL,GADA,EAAQ,CAAM,CAAC,EAAM,CAAC,EAAE,CACpB,EAAM,CAER,GACE,AAAe,SAAf,EAAM,IAAI,EACT,AAAe,cAAf,EAAM,IAAI,EAAoB,EAAM,SAAS,CAE9C,KAKuB,CAAA,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,EAAgB,AAAe,cAAf,EAAM,IAAI,EAC5C,CAAA,EAAM,SAAS,CAAG,CAAA,CADpB,CAGF,MAAO,GAAI,EACT,CAAA,GACE,AAAqB,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,EACf,CAAA,AAAe,eAAf,EAAM,IAAI,EAAqB,AAAe,cAAf,EAAM,IAAI,AAAK,GAC/C,CAAC,EAAM,SAAS,GAEhB,EAAO,EACH,AAAe,cAAf,EAAM,IAAI,EAAkB,CAC9B,EAAS,EACT,KACF,CACF,KACwB,aAAf,EAAM,IAAI,EACnB,CAAA,EAAQ,CAFR,EAKJ,IAAM,EAAQ,CACZ,KAAM,AAAyB,cAAzB,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,IAAI,CAAmB,OAAS,QACtD,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,EAC9C,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAO,MAAM,CAAG,EAAE,CAAC,EAAE,CAAC,GAAG,CACzD,EACM,EAAQ,CACZ,KAAM,QACN,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,EAC9C,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,GAAG,CAC7C,EACM,EAAO,CACX,KAAM,YACN,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAO,EAAS,EAAE,CAAC,EAAE,CAAC,GAAG,EACzD,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAQ,EAAE,CAAC,EAAE,CAAC,KAAK,CACnD,EAsCA,OArCA,EAAQ,CACN,CAAC,QAAS,EAAO,EAAQ,CACzB,CAAC,QAAS,EAAO,EAAQ,CAC1B,CAGD,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAO,EAAO,KAAK,CAAC,EAAO,EAAG,EAAO,EAAS,IAG3D,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAO,CAAC,CAAC,QAAS,EAAM,EAAQ,CAAC,EAK9C,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EACN,EACA,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EACE,EAAQ,MAAM,CAAC,UAAU,CAAC,UAAU,CAAC,IAAI,CACzC,EAAO,KAAK,CAAC,EAAO,EAAS,EAAG,EAAQ,GACxC,IAKJ,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAO,CAClB,CAAC,OAAQ,EAAM,EAAQ,CACvB,CAAM,CAAC,EAAQ,EAAE,CACjB,CAAM,CAAC,EAAQ,EAAE,CACjB,CAAC,OAAQ,EAAO,EAAQ,CACzB,EAGD,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAO,EAAO,KAAK,CAAC,EAAQ,IAGzC,EAAQ,AAAA,CAAA,EAAA,EAAA,IAAG,AAAH,EAAK,EAAO,CAAC,CAAC,OAAQ,EAAO,EAAQ,CAAC,EAC9C,AAAA,CAAA,EAAA,EAAA,MAAK,AAAL,EAAO,EAAQ,EAAM,EAAO,MAAM,CAAE,GAC7B,CACT,EAtIE,WAiBF,SAA4B,CAAM,EAChC,IAAI,EAAQ,GACZ,KAAO,EAAE,EAAQ,EAAO,MAAM,EAAE,CAC9B,IAAM,EAAQ,CAAM,CAAC,EAAM,CAAC,EAAE,CAE5B,CAAA,AAAe,eAAf,EAAM,IAAI,EACV,AAAe,cAAf,EAAM,IAAI,EACV,AAAe,aAAf,EAAM,IAAI,AAAK,IAGf,EAAO,MAAM,CAAC,EAAQ,EAAG,AAAe,eAAf,EAAM,IAAI,CAAoB,EAAI,GAC3D,EAAM,IAAI,CAAG,OACb,IAEJ,CACA,OAAO,CACT,CAhCA,EAGM,EAAoB,CACxB,SA4TF,SAA0B,CAAO,CAAE,CAAE,CAAE,CAAG,EACxC,OAYA,SAAuB,CAAI,EAKzB,OAJA,EAAQ,KAAK,CAAC,YACd,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACN,CACT,EAYA,SAAS,EAAe,CAAI,EAC1B,MAAO,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC7B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAS,GAAc,GACzC,EAAa,EACnB,CAYA,SAAS,EAAa,CAAI,SACxB,AAAI,AAAS,KAAT,EACK,EAAY,GAEd,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EACL,EACA,EACA,EACA,sBACA,6BACA,mCACA,yBACA,4BACA,IACA,EACJ,CAYA,SAAS,EAAyB,CAAI,EACpC,MAAO,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC7B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAS,GAAiB,GAC5C,EAAY,EAClB,CAYA,SAAS,EAA2B,CAAI,EACtC,OAAO,EAAI,EACb,CAYA,SAAS,EAAgB,CAAI,SAC3B,AAAI,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,EACzB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EACA,EACA,gBACA,sBACA,uBACA,GAEG,EAAY,EACrB,CAYA,SAAS,EAAmB,CAAI,EAC9B,MAAO,AAAA,CAAA,EAAA,EAAA,yBAAwB,AAAxB,EAA0B,GAC7B,AAAA,CAAA,EAAA,EAAA,iBAAgB,AAAhB,EAAkB,EAAS,GAAa,GACxC,EAAY,EAClB,CAYA,SAAS,EAAY,CAAI,SACvB,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACb,EAAQ,IAAI,CAAC,YACN,GAEF,EAAI,EACb,CACF,CArdA,EAEM,EAAyB,CAC7B,SAwdF,SAA+B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC7C,IAAM,EAAO,IAAI,CACjB,OAYA,SAAuB,CAAI,EACzB,MAAO,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,IAAI,CACtB,EACA,EACA,EACA,EACA,YACA,kBACA,mBACA,EACJ,EAYA,SAAS,EAAmB,CAAI,EAC9B,OAAO,EAAK,MAAM,CAAC,OAAO,CAAC,QAAQ,CACjC,AAAA,CAAA,EAAA,EAAA,mBAAkB,AAAlB,EACE,EAAK,cAAc,CAAC,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAAC,EAAE,EAAE,KAAK,CAAC,EAAG,MAGrE,EAAG,GACH,EAAI,EACV,CAYA,SAAS,EAAqB,CAAI,EAChC,OAAO,EAAI,EACb,CACF,CAlhBA,EAEM,EAA8B,CAClC,SAqhBF,SAAoC,CAAO,CAAE,CAAE,CAAE,CAAG,EAClD,OAcA,SAAiC,CAAI,EAOnC,OAJA,EAAQ,KAAK,CAAC,aACd,EAAQ,KAAK,CAAC,mBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,mBACN,CACT,EAcA,SAAS,EAAuB,CAAI,SAClC,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,mBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,mBACb,EAAQ,IAAI,CAAC,aACN,GAEF,EAAI,EACb,CACF,CAnkBA,C,G,E,Q,S,C,C,C,E,E,E,O,C,kB,I,GC3BO,IAAM,EAAkB,CAC7B,KAAM,kBACN,SAQF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC/C,IAAM,EAAO,IAAI,CACjB,OAYA,SAAe,CAAI,EAKjB,OAJA,EAAQ,KAAK,CAAC,cACd,EAAQ,KAAK,CAAC,oBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,oBACN,CACT,EAYA,SAAS,EAAK,CAAI,SAChB,AAAI,AAAS,KAAT,GACF,EAAQ,KAAK,CAAC,eACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,cACN,GAEF,EAAI,EACb,CA6BA,SAAS,EAAM,CAAI,EAMjB,OAAO,AAAS,KAAT,GAAe,2BAA4B,EAAK,MAAM,CAAC,UAAU,CACpE,EAAI,GACJ,EAAG,EACT,CACF,EAvFE,WAAY,AAAA,AARb,EAAA,SAQa,QAAO,CAAE,UAAU,AACjC,C,G,E,Q,S,C,C,C,E,E,E,O,C,iB,I,GCJO,IAAM,EAAiB,CAC5B,KAAM,iBACN,SAQF,SAAgC,CAAO,CAAE,CAAE,CAAE,CAAG,EAC9C,IAAM,EAAO,IAAI,CACjB,OAYA,SAAe,CAAI,EAMjB,OALA,EAAQ,KAAK,CAAC,aACd,EAAQ,KAAK,CAAC,eACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,eACb,EAAQ,IAAI,CAAC,aACN,CACT,EAGA,SAAS,EAAM,CAAI,EAKjB,OAAO,AAAS,KAAT,GAAe,2BAA4B,EAAK,MAAM,CAAC,UAAU,CACpE,EAAI,GACJ,EAAG,EACT,CACF,EAxCE,WAAY,AAAA,AARb,EAAA,SAQa,QAAO,CAAE,UAAU,AACjC,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GCTC,IAAA,EAAA,EAAA,SAKM,IAAM,EAAa,CACxB,KAAM,aACN,SAOF,SAA4B,CAAO,CAAE,CAAE,EACrC,OAGA,SAAe,CAAI,EAIjB,OAHA,EAAQ,KAAK,CAAC,cACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,cACN,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAI,aACnC,CACF,CAhBA,C,G,E,Q,S,C,C,C,E,E,E,O,C,O,I,GCLC,IAAA,EAAA,EAAA,S,E,E,S,E,E,S,E,E,SAQM,IAAM,EAAO,CAClB,KAAM,OACN,SA0BF,SAA2B,CAAO,CAAE,CAAE,CAAE,CAAG,EACzC,IAAM,EAAO,IAAI,CACX,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAC5C,EACF,GAAQ,AAAiB,eAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,CAChB,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,CAC5C,EACF,EAAO,EACX,OAGA,SAAe,CAAI,EACjB,IAAM,EACJ,EAAK,cAAc,CAAC,IAAI,EACvB,CAAA,AAAS,KAAT,GAAe,AAAS,KAAT,GAAe,AAAS,KAAT,EAC3B,gBACA,aAAA,EACN,GACE,AAAS,kBAAT,EACI,CAAC,EAAK,cAAc,CAAC,MAAM,EAAI,IAAS,EAAK,cAAc,CAAC,MAAM,CAClE,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,GACf,CAOA,GANK,EAAK,cAAc,CAAC,IAAI,GAC3B,EAAK,cAAc,CAAC,IAAI,CAAG,EAC3B,EAAQ,KAAK,CAAC,EAAM,CAClB,WAAY,CAAA,CACd,IAEE,AAAS,kBAAT,EAEF,OADA,EAAQ,KAAK,CAAC,kBACP,AAAS,KAAT,GAAe,AAAS,KAAT,EAClB,EAAQ,KAAK,CAAC,EAAA,aAAY,CAAG,EAAK,GAAU,GAC5C,EAAS,GAEf,GAAI,CAAC,EAAK,SAAS,EAAI,AAAS,KAAT,EAGrB,OAFA,EAAQ,KAAK,CAAC,kBACd,EAAQ,KAAK,CAAC,iBACP,AAOb,SAAS,EAAO,CAAI,QAClB,AAAI,AAAA,CAAA,EAAA,EAAA,UAAS,AAAT,EAAW,IAAS,EAAE,EAAO,IAC/B,EAAQ,OAAO,CAAC,GACT,GAGP,AAAC,CAAA,CAAC,EAAK,SAAS,EAAI,EAAO,CAAA,GAC1B,CAAA,EAAK,cAAc,CAAC,MAAM,CACvB,IAAS,EAAK,cAAc,CAAC,MAAM,CACnC,AAAS,KAAT,GAAe,AAAS,KAAT,CAAS,GAE5B,EAAQ,IAAI,CAAC,iBACN,EAAS,IAEX,EAAI,EACb,EAtBoB,EAElB,CACA,OAAO,EAAI,EACb,EAuBA,SAAS,EAAS,CAAI,EAKpB,OAJA,EAAQ,KAAK,CAAC,kBACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,kBACb,EAAK,cAAc,CAAC,MAAM,CAAG,EAAK,cAAc,CAAC,MAAM,EAAI,EACpD,EAAQ,KAAK,CAClB,EAAA,SAAQ,CAER,EAAK,SAAS,CAAG,EAAM,EACvB,EAAQ,OAAO,CACb,EACA,EACA,GAGN,CAGA,SAAS,EAAQ,CAAI,EAGnB,OAFA,EAAK,cAAc,CAAC,gBAAgB,CAAG,CAAA,EACvC,IACO,EAAY,EACrB,CAGA,SAAS,EAAY,CAAI,QACvB,AAAI,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAChB,EAAQ,KAAK,CAAC,4BACd,EAAQ,OAAO,CAAC,GAChB,EAAQ,IAAI,CAAC,4BACN,GAEF,EAAI,EACb,CAGA,SAAS,EAAY,CAAI,EAIvB,OAHA,EAAK,cAAc,CAAC,IAAI,CACtB,EACA,EAAK,cAAc,CAAC,EAAQ,IAAI,CAAC,kBAAmB,CAAA,GAAM,MAAM,CAC3D,EAAG,EACZ,CACF,EAnIE,aAAc,CACZ,SAwIJ,SAAkC,CAAO,CAAE,CAAE,CAAE,CAAG,EAChD,IAAM,EAAO,IAAI,CAEjB,OADA,EAAK,cAAc,CAAC,UAAU,CAAG,KAAA,EAC1B,EAAQ,KAAK,CAAC,EAAA,SAAQ,CAG7B,SAAiB,CAAI,EAOnB,OANA,EAAK,cAAc,CAAC,iBAAiB,CACnC,EAAK,cAAc,CAAC,iBAAiB,EACrC,EAAK,cAAc,CAAC,gBAAgB,CAI/B,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EACA,iBACA,EAAK,cAAc,CAAC,IAAI,CAAG,GAC3B,EACJ,EAGA,SAAkB,CAAI,SACpB,AAAI,EAAK,cAAc,CAAC,iBAAiB,EAAI,CAAC,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IAC1D,EAAK,cAAc,CAAC,iBAAiB,CAAG,KAAA,EACxC,EAAK,cAAc,CAAC,gBAAgB,CAAG,KAAA,EAChC,EAAiB,KAE1B,EAAK,cAAc,CAAC,iBAAiB,CAAG,KAAA,EACxC,EAAK,cAAc,CAAC,gBAAgB,CAAG,KAAA,EAChC,EAAQ,OAAO,CAAC,EAAiB,EAAI,GAAkB,GAChE,GAGA,SAAS,EAAiB,CAAI,EAO5B,OALA,EAAK,cAAc,CAAC,UAAU,CAAG,CAAA,EAEjC,EAAK,SAAS,CAAG,KAAA,EAGV,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EACA,EAAQ,OAAO,CAAC,EAAM,EAAI,GAC1B,aACA,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBACzC,KAAA,EACA,GACJ,EACJ,CACF,CAzLE,EACA,KAsNF,SAAyB,CAAO,EAC9B,EAAQ,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,CACvC,CAvNA,EAGM,EAAoC,CACxC,SAyNF,SAA0C,CAAO,CAAE,CAAE,CAAE,CAAG,EACxD,IAAM,EAAO,IAAI,CAIjB,MAAO,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EASF,SAAqB,CAAI,EACvB,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAChD,MAAO,CAAC,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,IACpB,GACA,AAAiB,6BAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,CACV,EAAG,GACH,EAAI,EACV,EAdE,2BACA,EAAK,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,gBACzC,KAAA,EACA,EAYR,EA/OE,QAAS,CAAA,CACX,EAGM,EAAkB,CACtB,SAmLF,SAAwB,CAAO,CAAE,CAAE,CAAE,CAAG,EACtC,IAAM,EAAO,IAAI,CACjB,MAAO,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EACL,EAOF,SAAqB,CAAI,EACvB,IAAM,EAAO,EAAK,MAAM,CAAC,EAAK,MAAM,CAAC,MAAM,CAAG,EAAE,CAChD,OAAO,GACL,AAAiB,mBAAjB,CAAI,CAAC,EAAE,CAAC,IAAI,EACZ,CAAI,CAAC,EAAE,CAAC,cAAc,CAAC,CAAI,CAAC,EAAE,CAAE,CAAA,GAAM,MAAM,GAAK,EAAK,cAAc,CAAC,IAAI,CACvE,EAAG,GACH,EAAI,EACV,EAZE,iBACA,EAAK,cAAc,CAAC,IAAI,CAAG,EAY/B,EApME,QAAS,CAAA,CACX,C,G,E,Q,S,C,C,C,E,E,E,O,C,gB,I,GC7BC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAgB,CAC3B,KAAM,gBACN,SAOF,SAA+B,CAAO,CAAE,CAAE,CAAE,CAAG,EAC7C,IAEI,EAFA,EAAO,EAGX,OAYA,SAAe,CAAI,EAGjB,OAFA,EAAQ,KAAK,CAAC,iBAgBd,EAdc,EAeP,AAaT,SAAS,EAAQ,CAAI,SACnB,AAAI,IAAS,GACX,EAAQ,KAAK,CAAC,yBACP,AAmBX,SAAS,EAAS,CAAI,SACpB,AAAI,IAAS,GACX,EAAQ,OAAO,CAAC,GAChB,IACO,IAET,EAAQ,IAAI,CAAC,yBACN,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAS,cAAc,GAC7C,EAAQ,GACd,EA7BoB,IAEd,GAAQ,GAAM,CAAA,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,EAAA,GACpD,EAAQ,IAAI,CAAC,iBACN,EAAG,IAEL,EAAI,EACb,EAtCgB,EAChB,CA4DF,CAtFA,C,G,E,Q,S,C,C,C,E,E,E,O,C,kB,I,GCPC,IAAA,EAAA,EAAA,S,E,E,SAKM,IAAM,EAAkB,CAC7B,KAAM,kBACN,SAkEF,SAAiC,CAAO,CAAE,CAAE,CAAE,CAAG,MAG3C,EAFJ,IAAM,EAAO,IAAI,CAGjB,OAaA,SAAe,CAAI,EACjB,IAEI,EAFA,EAAQ,EAAK,MAAM,CAAC,MAAM,CAI9B,KAAO,KAGL,GACE,AAA+B,eAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EAC1B,AAA+B,eAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EAC1B,AAA+B,YAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAC1B,CACA,EAAY,AAA+B,cAA/B,EAAK,MAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CACtC,KACF,OAKF,AAAI,CAAC,EAAK,MAAM,CAAC,IAAI,CAAC,EAAK,GAAG,GAAG,IAAI,CAAC,EAAK,CAAA,EAAK,SAAS,EAAI,CAAA,GAC3D,EAAQ,KAAK,CAAC,qBACd,EAAS,EAkBX,EAAQ,KAAK,CAAC,6BACP,AAcT,SAAS,EAAO,CAAI,SAClB,AAAI,IAAS,GACX,EAAQ,OAAO,CAAC,GACT,IAET,EAAQ,IAAI,CAAC,6BACN,AAAA,CAAA,EAAA,EAAA,aAAY,AAAZ,EAAc,GACjB,AAAA,CAAA,EAAA,EAAA,YAAW,AAAX,EAAa,EAAS,EAAO,cAAc,GAC3C,EAAM,GACZ,EAzCkB,IAET,EAAI,EACb,EAmDA,SAAS,EAAM,CAAI,SACjB,AAAI,AAAS,OAAT,GAAiB,AAAA,CAAA,EAAA,EAAA,kBAAiB,AAAjB,EAAmB,IACtC,EAAQ,IAAI,CAAC,qBACN,EAAG,IAEL,EAAI,EACb,CACF,EAtKE,UAIF,SAAkC,CAAM,CAAE,CAAO,EAE/C,IAEI,EAEA,EAEA,EANA,EAAQ,EAAO,MAAM,CAUzB,KAAO,KACL,GAAI,AAAqB,UAArB,CAAM,CAAC,EAAM,CAAC,EAAE,CAAc,CAChC,GAAI,AAA0B,YAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,CAAgB,CACvC,EAAU,EACV,KACF,CAC8B,cAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACvB,CAAA,EAAO,CADT,CAGF,KAGgC,YAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EAEvB,EAAO,MAAM,CAAC,EAAO,GAElB,GAAc,AAA0B,eAA1B,CAAM,CAAC,EAAM,CAAC,EAAE,CAAC,IAAI,EACtC,CAAA,EAAa,CADf,EAKJ,IAAM,EAAU,CACd,KAAM,gBACN,MAAO,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,KAAK,EAC9C,IAAK,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAO,MAAM,CAAG,EAAE,CAAC,EAAE,CAAC,GAAG,CACzD,EAiBA,OAdA,CAAM,CAAC,EAAK,CAAC,EAAE,CAAC,IAAI,CAAG,oBAInB,GACF,EAAO,MAAM,CAAC,EAAM,EAAG,CAAC,QAAS,EAAS,EAAQ,EAClD,EAAO,MAAM,CAAC,EAAa,EAAG,EAAG,CAAC,OAAQ,CAAM,CAAC,EAAQ,CAAC,EAAE,CAAE,EAAQ,EACtE,CAAM,CAAC,EAAQ,CAAC,EAAE,CAAC,GAAG,CAAG,OAAO,MAAM,CAAC,CAAC,EAAG,CAAM,CAAC,EAAW,CAAC,EAAE,CAAC,GAAG,GAEpE,CAAM,CAAC,EAAQ,CAAC,EAAE,CAAG,EAIvB,EAAO,IAAI,CAAC,CAAC,OAAQ,EAAS,EAAQ,EAC/B,CACT,CA1DA,C,G,E,Q,S,C,C,C,E,E,E,O,C,a,I,GCDA,IAAM,EAAS,cAKR,SAAS,IACd,IAKI,EALA,EAAS,EACT,EAAS,GAET,EAAQ,CAAA,EAGZ,OAGA,SAAsB,CAAK,CAAE,CAAQ,CAAE,CAAG,MAIpC,EAEA,EAEA,EAEA,EAEA,EAVJ,IAAM,EAAS,EAAE,CAuBjB,IAVA,EAAQ,EAAS,EAAM,QAAQ,CAAC,GAChC,EAAgB,EAChB,EAAS,GACL,IAE0B,QAAxB,EAAM,UAAU,CAAC,IACnB,IAEF,EAAQ,KAAA,GAEH,EAAgB,EAAM,MAAM,EAAE,CAMnC,GALA,EAAO,SAAS,CAAG,EAEnB,EACE,AAFF,CAAA,EAAQ,EAAO,IAAI,CAAC,EAApB,GAEW,AAAgB,KAAA,IAAhB,EAAM,KAAK,CAAiB,EAAM,KAAK,CAAG,EAAM,MAAM,CACjE,EAAO,EAAM,UAAU,CAAC,GACpB,CAAC,EAAO,CACV,EAAS,EAAM,KAAK,CAAC,GACrB,KACF,CACA,GAAI,AAAS,KAAT,GAAe,IAAkB,GAAe,EAClD,EAAO,IAAI,CAAC,IACZ,EAAmB,KAAA,OAUnB,OARI,IACF,EAAO,IAAI,CAAC,IACZ,EAAmB,KAAA,GAEjB,EAAgB,IAClB,EAAO,IAAI,CAAC,EAAM,KAAK,CAAC,EAAe,IACvC,GAAU,EAAc,GAElB,GACN,KAAK,EACH,EAAO,IAAI,CAAC,OACZ,IACA,KAEF,MAAK,EAGH,IAFA,EAAO,AAAwB,EAAxB,KAAK,IAAI,CAAC,EAAS,GAC1B,EAAO,IAAI,CAAC,IACL,IAAW,GAAM,EAAO,IAAI,CAAC,IACpC,KAEF,MAAK,GACH,EAAO,IAAI,CAAC,IACZ,EAAS,EACT,KAEF,SACE,EAAmB,CAAA,EACnB,EAAS,CAEb,CAEF,EAAgB,EAAc,CAChC,CAMA,OALI,IACE,GAAkB,EAAO,IAAI,CAAC,IAC9B,GAAQ,EAAO,IAAI,CAAC,GACxB,EAAO,IAAI,CAAC,OAEP,CACT,CACF,C,G,E,Q,S,C,C,C,E,E,E,O,C,c,I,GC3GC,IAAA,EAAA,EAAA,SAQM,SAAS,EAAY,CAAM,EAChC,KAAO,CAAC,AAAA,CAAA,EAAA,EAAA,WAAU,AAAV,EAAY,KAGpB,OAAO,CACT,C,G,E,Q,S,C,C,C,E,E,E,O,C,kC,I,GCDO,SAAS,EAAgC,CAAK,CAAE,CAAI,EACzD,IAAM,EAAO,OAAO,QAAQ,CAAC,EAAO,UACpC,AAEE,EAAO,GACP,AAAS,KAAT,GACC,EAAO,IAAM,EAAO,IAEpB,EAAO,KAAO,EAAO,KAErB,EAAO,OAAS,EAAO,OAEvB,EAAO,OAAS,EAAO,OACvB,AAAA,CAAA,AAAO,MAAP,CAAO,GAAW,OACnB,AAAC,CAAA,AAAO,MAAP,CAAO,GAAW,OAEnB,EAAO,QAEA,IAEF,OAAO,YAAY,CAAC,EAC7B,C,G,E,Q,S,C,C,C,E,E,E,O,C,e,I,G,I,E,E,S,E,E,SCjCA,IAAM,EACJ,oEAcK,SAAS,EAAa,CAAK,EAChC,OAAO,EAAM,OAAO,CAAC,EAA4B,EACnD,CAQA,SAAS,EAAO,CAAE,CAAE,CAAE,CAAE,CAAE,EACxB,GAAI,EAEF,OAAO,EAKT,GAAI,AAAS,KADA,EAAG,UAAU,CAAC,GACV,CACf,IAAM,EAAO,EAAG,UAAU,CAAC,GACrB,EAAM,AAAS,MAAT,GAAgB,AAAS,KAAT,EAC5B,MAAO,AAAA,CAAA,EAAA,EAAA,+BAA8B,AAA9B,EAAgC,EAAG,KAAK,CAAC,EAAM,EAAI,GAAI,EAAM,GAAK,GAC3E,CACA,MAAO,AAAA,CAAA,EAAA,EAAA,6BAA4B,AAA5B,EAA8B,IAAO,CAC9C,C,G,E,Q,S,C,C,C,ECLO,SAAS,EAAkB,CAAK,SAErC,AAAI,AAAC,GAAS,AAAiB,UAAjB,OAAO,EAKjB,aAAc,GAAS,SAAU,EAC5B,EAAS,EAAM,QAAQ,EAI5B,UAAW,GAAS,QAAS,EACxB,EAAS,GAId,SAAU,GAAS,WAAY,EAC1B,EAAM,GAIR,GAnBE,EAoBX,CAMA,SAAS,EAAM,CAAK,EAClB,OAAO,EAAM,GAAS,EAAM,IAAI,EAAI,IAAM,EAAM,GAAS,EAAM,MAAM,CACvE,CAMA,SAAS,EAAS,CAAG,EACnB,OAAO,EAAM,GAAO,EAAI,KAAK,EAAI,IAAM,EAAM,GAAO,EAAI,GAAG,CAC7D,CAMA,SAAS,EAAM,CAAK,EAClB,OAAO,GAAS,AAAiB,UAAjB,OAAO,EAAqB,EAAQ,CACtD,C,E,E,O,C,oB,I,E","sources":["","node_modules/mermaid/dist/createText-ca0c5216.js","node_modules/mdast-util-from-markdown/lib/index.js","node_modules/mdast-util-to-string/lib/index.js","node_modules/micromark/lib/parse.js","node_modules/micromark-util-combine-extensions/index.js","node_modules/micromark-util-chunked/index.js","node_modules/micromark/lib/initialize/content.js","node_modules/micromark-factory-space/index.js","node_modules/micromark-util-character/index.js","node_modules/micromark-util-character/lib/unicode-punctuation-regex.js","node_modules/micromark/lib/initialize/document.js","node_modules/micromark/lib/initialize/flow.js","node_modules/micromark-core-commonmark/lib/blank-line.js","node_modules/micromark-core-commonmark/lib/content.js","node_modules/micromark-util-subtokenize/index.js","node_modules/micromark/lib/initialize/text.js","node_modules/micromark/lib/create-tokenizer.js","node_modules/micromark-util-resolve-all/index.js","node_modules/micromark/lib/constructs.js","node_modules/micromark-core-commonmark/lib/attention.js","node_modules/micromark-util-classify-character/index.js","node_modules/micromark-core-commonmark/lib/autolink.js","node_modules/micromark-core-commonmark/lib/block-quote.js","node_modules/micromark-core-commonmark/lib/character-escape.js","node_modules/micromark-core-commonmark/lib/character-reference.js","node_modules/decode-named-character-reference/index.js","node_modules/character-entities/index.js","node_modules/micromark-core-commonmark/lib/code-fenced.js","node_modules/micromark-core-commonmark/lib/code-indented.js","node_modules/micromark-core-commonmark/lib/code-text.js","node_modules/micromark-core-commonmark/lib/definition.js","node_modules/micromark-factory-destination/index.js","node_modules/micromark-factory-label/index.js","node_modules/micromark-factory-title/index.js","node_modules/micromark-factory-whitespace/index.js","node_modules/micromark-util-normalize-identifier/index.js","node_modules/micromark-core-commonmark/lib/hard-break-escape.js","node_modules/micromark-core-commonmark/lib/heading-atx.js","node_modules/micromark-core-commonmark/lib/html-flow.js","node_modules/micromark-util-html-tag-name/index.js","node_modules/micromark-core-commonmark/lib/html-text.js","node_modules/micromark-core-commonmark/lib/label-end.js","node_modules/micromark-core-commonmark/lib/label-start-image.js","node_modules/micromark-core-commonmark/lib/label-start-link.js","node_modules/micromark-core-commonmark/lib/line-ending.js","node_modules/micromark-core-commonmark/lib/list.js","node_modules/micromark-core-commonmark/lib/thematic-break.js","node_modules/micromark-core-commonmark/lib/setext-underline.js","node_modules/micromark/lib/preprocess.js","node_modules/micromark/lib/postprocess.js","node_modules/micromark-util-decode-numeric-character-reference/index.js","node_modules/micromark-util-decode-string/index.js","node_modules/unist-util-stringify-position/lib/index.js"],"sourcesContent":["\nfunction $parcel$export(e, n, v, s) {\n Object.defineProperty(e, n, {get: v, set: s, enumerable: true, configurable: true});\n}\n\n var $parcel$global = globalThis;\n var parcelRequire = $parcel$global[\"parcelRequire0031\"];\nvar parcelRegister = parcelRequire.register;\nparcelRegister(\"fmBVZ\", function(module, exports) {\n\n$parcel$export(module.exports, \"c\", () => $b2f6560a6becbb8b$export$db3b6bfb95261072);\n$parcel$export(module.exports, \"a\", () => $b2f6560a6becbb8b$export$407448d2b89b1813);\n\nvar $4jcZX = parcelRequire(\"4jcZX\");\n\nvar $32C5u = parcelRequire(\"32C5u\");\n\nvar $eJNXH = parcelRequire(\"eJNXH\");\nfunction $b2f6560a6becbb8b$var$preprocessMarkdown(markdown) {\n const withoutMultipleNewlines = markdown.replace(/\\n{2,}/g, \"\\n\");\n const withoutExtraSpaces = (0, $eJNXH.dedent)(withoutMultipleNewlines);\n return withoutExtraSpaces;\n}\nfunction $b2f6560a6becbb8b$var$markdownToLines(markdown) {\n const preprocessedMarkdown = $b2f6560a6becbb8b$var$preprocessMarkdown(markdown);\n const { children: children } = (0, $32C5u.fromMarkdown)(preprocessedMarkdown);\n const lines = [\n []\n ];\n let currentLine = 0;\n function processNode(node, parentType = \"normal\") {\n if (node.type === \"text\") {\n const textLines = node.value.split(\"\\n\");\n textLines.forEach((textLine, index)=>{\n if (index !== 0) {\n currentLine++;\n lines.push([]);\n }\n textLine.split(\" \").forEach((word)=>{\n if (word) lines[currentLine].push({\n content: word,\n type: parentType\n });\n });\n });\n } else if (node.type === \"strong\" || node.type === \"emphasis\") node.children.forEach((contentNode)=>{\n processNode(contentNode, node.type);\n });\n }\n children.forEach((treeNode)=>{\n if (treeNode.type === \"paragraph\") treeNode.children.forEach((contentNode)=>{\n processNode(contentNode);\n });\n });\n return lines;\n}\nfunction $b2f6560a6becbb8b$var$markdownToHTML(markdown) {\n const { children: children } = (0, $32C5u.fromMarkdown)(markdown);\n function output(node) {\n if (node.type === \"text\") return node.value.replace(/\\n/g, \"
\");\n else if (node.type === \"strong\") return `${node.children.map(output).join(\"\")}`;\n else if (node.type === \"emphasis\") return `${node.children.map(output).join(\"\")}`;\n else if (node.type === \"paragraph\") return `${node.children.map(output).join(\"\")}
`;\n return `Unsupported markdown: ${node.type}`;\n }\n return children.map(output).join(\"\");\n}\nfunction $b2f6560a6becbb8b$var$splitTextToChars(text) {\n if (Intl.Segmenter) return [\n ...new Intl.Segmenter().segment(text)\n ].map((s)=>s.segment);\n return [\n ...text\n ];\n}\nfunction $b2f6560a6becbb8b$var$splitWordToFitWidth(checkFit, word) {\n const characters = $b2f6560a6becbb8b$var$splitTextToChars(word.content);\n return $b2f6560a6becbb8b$var$splitWordToFitWidthRecursion(checkFit, [], characters, word.type);\n}\nfunction $b2f6560a6becbb8b$var$splitWordToFitWidthRecursion(checkFit, usedChars, remainingChars, type) {\n if (remainingChars.length === 0) return [\n {\n content: usedChars.join(\"\"),\n type: type\n },\n {\n content: \"\",\n type: type\n }\n ];\n const [nextChar, ...rest] = remainingChars;\n const newWord = [\n ...usedChars,\n nextChar\n ];\n if (checkFit([\n {\n content: newWord.join(\"\"),\n type: type\n }\n ])) return $b2f6560a6becbb8b$var$splitWordToFitWidthRecursion(checkFit, newWord, rest, type);\n if (usedChars.length === 0 && nextChar) {\n usedChars.push(nextChar);\n remainingChars.shift();\n }\n return [\n {\n content: usedChars.join(\"\"),\n type: type\n },\n {\n content: remainingChars.join(\"\"),\n type: type\n }\n ];\n}\nfunction $b2f6560a6becbb8b$var$splitLineToFitWidth(line, checkFit) {\n if (line.some(({ content: content })=>content.includes(\"\\n\"))) throw new Error(\"splitLineToFitWidth does not support newlines in the line\");\n return $b2f6560a6becbb8b$var$splitLineToFitWidthRecursion(line, checkFit);\n}\nfunction $b2f6560a6becbb8b$var$splitLineToFitWidthRecursion(words, checkFit, lines = [], newLine = []) {\n if (words.length === 0) {\n if (newLine.length > 0) lines.push(newLine);\n return lines.length > 0 ? lines : [];\n }\n let joiner = \"\";\n if (words[0].content === \" \") {\n joiner = \" \";\n words.shift();\n }\n const nextWord = words.shift() ?? {\n content: \" \",\n type: \"normal\"\n };\n const lineWithNextWord = [\n ...newLine\n ];\n if (joiner !== \"\") lineWithNextWord.push({\n content: joiner,\n type: \"normal\"\n });\n lineWithNextWord.push(nextWord);\n if (checkFit(lineWithNextWord)) return $b2f6560a6becbb8b$var$splitLineToFitWidthRecursion(words, checkFit, lines, lineWithNextWord);\n if (newLine.length > 0) {\n lines.push(newLine);\n words.unshift(nextWord);\n } else if (nextWord.content) {\n const [line, rest] = $b2f6560a6becbb8b$var$splitWordToFitWidth(checkFit, nextWord);\n lines.push([\n line\n ]);\n if (rest.content) words.unshift(rest);\n }\n return $b2f6560a6becbb8b$var$splitLineToFitWidthRecursion(words, checkFit, lines);\n}\nfunction $b2f6560a6becbb8b$var$applyStyle(dom, styleFn) {\n if (styleFn) dom.attr(\"style\", styleFn);\n}\nfunction $b2f6560a6becbb8b$var$addHtmlSpan(element, node, width, classes, addBackground = false) {\n const fo = element.append(\"foreignObject\");\n const div = fo.append(\"xhtml:div\");\n const label = node.label;\n const labelClass = node.isNode ? \"nodeLabel\" : \"edgeLabel\";\n div.html(`\n \" + label + \"\");\n $b2f6560a6becbb8b$var$applyStyle(div, node.labelStyle);\n div.style(\"display\", \"table-cell\");\n div.style(\"white-space\", \"nowrap\");\n div.style(\"max-width\", width + \"px\");\n div.attr(\"xmlns\", \"http://www.w3.org/1999/xhtml\");\n if (addBackground) div.attr(\"class\", \"labelBkg\");\n let bbox = div.node().getBoundingClientRect();\n if (bbox.width === width) {\n div.style(\"display\", \"table\");\n div.style(\"white-space\", \"break-spaces\");\n div.style(\"width\", width + \"px\");\n bbox = div.node().getBoundingClientRect();\n }\n fo.style(\"width\", bbox.width);\n fo.style(\"height\", bbox.height);\n return fo.node();\n}\nfunction $b2f6560a6becbb8b$var$createTspan(textElement, lineIndex, lineHeight) {\n return textElement.append(\"tspan\").attr(\"class\", \"text-outer-tspan\").attr(\"x\", 0).attr(\"y\", lineIndex * lineHeight - 0.1 + \"em\").attr(\"dy\", lineHeight + \"em\");\n}\nfunction $b2f6560a6becbb8b$var$computeWidthOfText(parentNode, lineHeight, line) {\n const testElement = parentNode.append(\"text\");\n const testSpan = $b2f6560a6becbb8b$var$createTspan(testElement, 1, lineHeight);\n $b2f6560a6becbb8b$var$updateTextContentAndStyles(testSpan, line);\n const textLength = testSpan.node().getComputedTextLength();\n testElement.remove();\n return textLength;\n}\nfunction $b2f6560a6becbb8b$export$db3b6bfb95261072(parentNode, lineHeight, text) {\n var _a;\n const testElement = parentNode.append(\"text\");\n const testSpan = $b2f6560a6becbb8b$var$createTspan(testElement, 1, lineHeight);\n $b2f6560a6becbb8b$var$updateTextContentAndStyles(testSpan, [\n {\n content: text,\n type: \"normal\"\n }\n ]);\n const textDimension = (_a = testSpan.node()) == null ? void 0 : _a.getBoundingClientRect();\n if (textDimension) testElement.remove();\n return textDimension;\n}\nfunction $b2f6560a6becbb8b$var$createFormattedText(width, g, structuredText, addBackground = false) {\n const lineHeight = 1.1;\n const labelGroup = g.append(\"g\");\n const bkg = labelGroup.insert(\"rect\").attr(\"class\", \"background\");\n const textElement = labelGroup.append(\"text\").attr(\"y\", \"-10.1\");\n let lineIndex = 0;\n for (const line of structuredText){\n const checkWidth = (line2)=>$b2f6560a6becbb8b$var$computeWidthOfText(labelGroup, lineHeight, line2) <= width;\n const linesUnderWidth = checkWidth(line) ? [\n line\n ] : $b2f6560a6becbb8b$var$splitLineToFitWidth(line, checkWidth);\n for (const preparedLine of linesUnderWidth){\n const tspan = $b2f6560a6becbb8b$var$createTspan(textElement, lineIndex, lineHeight);\n $b2f6560a6becbb8b$var$updateTextContentAndStyles(tspan, preparedLine);\n lineIndex++;\n }\n }\n if (addBackground) {\n const bbox = textElement.node().getBBox();\n const padding = 2;\n bkg.attr(\"x\", -padding).attr(\"y\", -padding).attr(\"width\", bbox.width + 2 * padding).attr(\"height\", bbox.height + 2 * padding);\n return labelGroup.node();\n } else return textElement.node();\n}\nfunction $b2f6560a6becbb8b$var$updateTextContentAndStyles(tspan, wrappedLine) {\n tspan.text(\"\");\n wrappedLine.forEach((word, index)=>{\n const innerTspan = tspan.append(\"tspan\").attr(\"font-style\", word.type === \"emphasis\" ? \"italic\" : \"normal\").attr(\"class\", \"text-inner-tspan\").attr(\"font-weight\", word.type === \"strong\" ? \"bold\" : \"normal\");\n if (index === 0) innerTspan.text(word.content);\n else innerTspan.text(\" \" + word.content);\n });\n}\nconst $b2f6560a6becbb8b$export$407448d2b89b1813 = (el, text = \"\", { style: style = \"\", isTitle: isTitle = false, classes: classes = \"\", useHtmlLabels: useHtmlLabels = true, isNode: isNode = true, width: width = 200, addSvgBackground: addSvgBackground = false } = {})=>{\n (0, $4jcZX.l).info(\"createText\", text, style, isTitle, classes, useHtmlLabels, isNode, addSvgBackground);\n if (useHtmlLabels) {\n const htmlText = $b2f6560a6becbb8b$var$markdownToHTML(text);\n const node = {\n isNode: isNode,\n label: (0, $4jcZX.M)(htmlText).replace(/fa[blrs]?:fa-[\\w-]+/g, // cspell: disable-line\n (s)=>``),\n labelStyle: style.replace(\"fill:\", \"color:\")\n };\n const vertexNode = $b2f6560a6becbb8b$var$addHtmlSpan(el, node, width, classes, addSvgBackground);\n return vertexNode;\n } else {\n const structuredText = $b2f6560a6becbb8b$var$markdownToLines(text);\n const svgLabel = $b2f6560a6becbb8b$var$createFormattedText(width, el, structuredText, addSvgBackground);\n return svgLabel;\n }\n};\n\n});\nparcelRegister(\"32C5u\", function(module, exports) {\n\n$parcel$export(module.exports, \"fromMarkdown\", () => $236f1a66ed1dd212$export$d744d789c09bfde6);\n/**\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Value} Value\n *\n * @typedef {import('unist').Parent} UnistParent\n * @typedef {import('unist').Point} Point\n *\n * @typedef {import('mdast').PhrasingContent} PhrasingContent\n * @typedef {import('mdast').StaticPhrasingContent} StaticPhrasingContent\n * @typedef {import('mdast').Content} Content\n * @typedef {import('mdast').Break} Break\n * @typedef {import('mdast').Blockquote} Blockquote\n * @typedef {import('mdast').Code} Code\n * @typedef {import('mdast').Definition} Definition\n * @typedef {import('mdast').Emphasis} Emphasis\n * @typedef {import('mdast').Heading} Heading\n * @typedef {import('mdast').HTML} HTML\n * @typedef {import('mdast').Image} Image\n * @typedef {import('mdast').ImageReference} ImageReference\n * @typedef {import('mdast').InlineCode} InlineCode\n * @typedef {import('mdast').Link} Link\n * @typedef {import('mdast').LinkReference} LinkReference\n * @typedef {import('mdast').List} List\n * @typedef {import('mdast').ListItem} ListItem\n * @typedef {import('mdast').Paragraph} Paragraph\n * @typedef {import('mdast').Root} Root\n * @typedef {import('mdast').Strong} Strong\n * @typedef {import('mdast').Text} Text\n * @typedef {import('mdast').ThematicBreak} ThematicBreak\n * @typedef {import('mdast').ReferenceType} ReferenceType\n * @typedef {import('../index.js').CompileData} CompileData\n */ /**\n * @typedef {Root | Content} Node\n * @typedef {Extract} Parent\n *\n * @typedef {Omit & {type: 'fragment', children: Array}} Fragment\n */ /**\n * @callback Transform\n * Extra transform, to change the AST afterwards.\n * @param {Root} tree\n * Tree to transform.\n * @returns {Root | undefined | null | void}\n * New tree or nothing (in which case the current tree is used).\n *\n * @callback Handle\n * Handle a token.\n * @param {CompileContext} this\n * Context.\n * @param {Token} token\n * Current token.\n * @returns {void}\n * Nothing.\n *\n * @typedef {Record} Handles\n * Token types mapping to handles\n *\n * @callback OnEnterError\n * Handle the case where the `right` token is open, but it is closed (by the\n * `left` token) or because we reached the end of the document.\n * @param {Omit} this\n * Context.\n * @param {Token | undefined} left\n * Left token.\n * @param {Token} right\n * Right token.\n * @returns {void}\n * Nothing.\n *\n * @callback OnExitError\n * Handle the case where the `right` token is open but it is closed by\n * exiting the `left` token.\n * @param {Omit} this\n * Context.\n * @param {Token} left\n * Left token.\n * @param {Token} right\n * Right token.\n * @returns {void}\n * Nothing.\n *\n * @typedef {[Token, OnEnterError | undefined]} TokenTuple\n * Open token on the stack, with an optional error handler for when\n * that token isn’t closed properly.\n */ /**\n * @typedef Config\n * Configuration.\n *\n * We have our defaults, but extensions will add more.\n * @property {Array} canContainEols\n * Token types where line endings are used.\n * @property {Handles} enter\n * Opening handles.\n * @property {Handles} exit\n * Closing handles.\n * @property {Array} transforms\n * Tree transforms.\n *\n * @typedef {Partial} Extension\n * Change how markdown tokens from micromark are turned into mdast.\n *\n * @typedef CompileContext\n * mdast compiler context.\n * @property {Array} stack\n * Stack of nodes.\n * @property {Array} tokenStack\n * Stack of tokens.\n * @property {(key: Key) => CompileData[Key]} getData\n * Get data from the key/value store.\n * @property {(key: Key, value?: CompileData[Key]) => void} setData\n * Set data into the key/value store.\n * @property {(this: CompileContext) => void} buffer\n * Capture some of the output data.\n * @property {(this: CompileContext) => string} resume\n * Stop capturing and access the output data.\n * @property {(this: CompileContext, node: Kind, token: Token, onError?: OnEnterError) => Kind} enter\n * Enter a token.\n * @property {(this: CompileContext, token: Token, onError?: OnExitError) => Node} exit\n * Exit a token.\n * @property {TokenizeContext['sliceSerialize']} sliceSerialize\n * Get the string value of a token.\n * @property {Config} config\n * Configuration.\n *\n * @typedef FromMarkdownOptions\n * Configuration for how to build mdast.\n * @property {Array> | null | undefined} [mdastExtensions]\n * Extensions for this utility to change how tokens are turned into a tree.\n *\n * @typedef {ParseOptions & FromMarkdownOptions} Options\n * Configuration.\n */ // To do: micromark: create a registry of tokens?\n// To do: next major: don’t return given `Node` from `enter`.\n// To do: next major: remove setter/getter.\n\nvar $apdnY = parcelRequire(\"apdnY\");\n\nvar $2JaIP = parcelRequire(\"2JaIP\");\n\nvar $hFzKD = parcelRequire(\"hFzKD\");\n\nvar $6Jw5n = parcelRequire(\"6Jw5n\");\n\nvar $34rZa = parcelRequire(\"34rZa\");\n\nvar $1lC1m = parcelRequire(\"1lC1m\");\n\nvar $9HErY = parcelRequire(\"9HErY\");\n\nvar $gxs7E = parcelRequire(\"gxs7E\");\n\nvar $f9WaX = parcelRequire(\"f9WaX\");\nconst $236f1a66ed1dd212$var$own = {}.hasOwnProperty;\nconst $236f1a66ed1dd212$export$d744d789c09bfde6 = /**\n * @type {(\n * ((value: Value, encoding: Encoding, options?: Options | null | undefined) => Root) &\n * ((value: Value, options?: Options | null | undefined) => Root)\n * )}\n */ /**\n * @param {Value} value\n * @param {Encoding | Options | null | undefined} [encoding]\n * @param {Options | null | undefined} [options]\n * @returns {Root}\n */ function(value, encoding, options) {\n if (typeof encoding !== \"string\") {\n options = encoding;\n encoding = undefined;\n }\n return $236f1a66ed1dd212$var$compiler(options)((0, $6Jw5n.postprocess)((0, $2JaIP.parse)(options).document().write((0, $hFzKD.preprocess)()(value, encoding, true))));\n};\n/**\n * Note this compiler only understand complete buffering, not streaming.\n *\n * @param {Options | null | undefined} [options]\n */ function $236f1a66ed1dd212$var$compiler(options) {\n /** @type {Config} */ const config = {\n transforms: [],\n canContainEols: [\n \"emphasis\",\n \"fragment\",\n \"heading\",\n \"paragraph\",\n \"strong\"\n ],\n enter: {\n autolink: opener(link),\n autolinkProtocol: onenterdata,\n autolinkEmail: onenterdata,\n atxHeading: opener(heading),\n blockQuote: opener(blockQuote),\n characterEscape: onenterdata,\n characterReference: onenterdata,\n codeFenced: opener(codeFlow),\n codeFencedFenceInfo: buffer,\n codeFencedFenceMeta: buffer,\n codeIndented: opener(codeFlow, buffer),\n codeText: opener(codeText, buffer),\n codeTextData: onenterdata,\n data: onenterdata,\n codeFlowValue: onenterdata,\n definition: opener(definition),\n definitionDestinationString: buffer,\n definitionLabelString: buffer,\n definitionTitleString: buffer,\n emphasis: opener(emphasis),\n hardBreakEscape: opener(hardBreak),\n hardBreakTrailing: opener(hardBreak),\n htmlFlow: opener(html, buffer),\n htmlFlowData: onenterdata,\n htmlText: opener(html, buffer),\n htmlTextData: onenterdata,\n image: opener(image),\n label: buffer,\n link: opener(link),\n listItem: opener(listItem),\n listItemValue: onenterlistitemvalue,\n listOrdered: opener(list, onenterlistordered),\n listUnordered: opener(list),\n paragraph: opener(paragraph),\n reference: onenterreference,\n referenceString: buffer,\n resourceDestinationString: buffer,\n resourceTitleString: buffer,\n setextHeading: opener(heading),\n strong: opener(strong),\n thematicBreak: opener(thematicBreak)\n },\n exit: {\n atxHeading: closer(),\n atxHeadingSequence: onexitatxheadingsequence,\n autolink: closer(),\n autolinkEmail: onexitautolinkemail,\n autolinkProtocol: onexitautolinkprotocol,\n blockQuote: closer(),\n characterEscapeValue: onexitdata,\n characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,\n characterReferenceMarkerNumeric: onexitcharacterreferencemarker,\n characterReferenceValue: onexitcharacterreferencevalue,\n codeFenced: closer(onexitcodefenced),\n codeFencedFence: onexitcodefencedfence,\n codeFencedFenceInfo: onexitcodefencedfenceinfo,\n codeFencedFenceMeta: onexitcodefencedfencemeta,\n codeFlowValue: onexitdata,\n codeIndented: closer(onexitcodeindented),\n codeText: closer(onexitcodetext),\n codeTextData: onexitdata,\n data: onexitdata,\n definition: closer(),\n definitionDestinationString: onexitdefinitiondestinationstring,\n definitionLabelString: onexitdefinitionlabelstring,\n definitionTitleString: onexitdefinitiontitlestring,\n emphasis: closer(),\n hardBreakEscape: closer(onexithardbreak),\n hardBreakTrailing: closer(onexithardbreak),\n htmlFlow: closer(onexithtmlflow),\n htmlFlowData: onexitdata,\n htmlText: closer(onexithtmltext),\n htmlTextData: onexitdata,\n image: closer(onexitimage),\n label: onexitlabel,\n labelText: onexitlabeltext,\n lineEnding: onexitlineending,\n link: closer(onexitlink),\n listItem: closer(),\n listOrdered: closer(),\n listUnordered: closer(),\n paragraph: closer(),\n referenceString: onexitreferencestring,\n resourceDestinationString: onexitresourcedestinationstring,\n resourceTitleString: onexitresourcetitlestring,\n resource: onexitresource,\n setextHeading: closer(onexitsetextheading),\n setextHeadingLineSequence: onexitsetextheadinglinesequence,\n setextHeadingText: onexitsetextheadingtext,\n strong: closer(),\n thematicBreak: closer()\n }\n };\n $236f1a66ed1dd212$var$configure(config, (options || {}).mdastExtensions || []);\n /** @type {CompileData} */ const data = {};\n return compile;\n /**\n * Turn micromark events into an mdast tree.\n *\n * @param {Array} events\n * Events.\n * @returns {Root}\n * mdast tree.\n */ function compile(events) {\n /** @type {Root} */ let tree = {\n type: \"root\",\n children: []\n };\n /** @type {Omit} */ const context = {\n stack: [\n tree\n ],\n tokenStack: [],\n config: config,\n enter: enter,\n exit: exit,\n buffer: buffer,\n resume: resume,\n setData: setData,\n getData: getData\n };\n /** @type {Array} */ const listStack = [];\n let index = -1;\n while(++index < events.length)// We preprocess lists to add `listItem` tokens, and to infer whether\n // items the list itself are spread out.\n if (events[index][1].type === \"listOrdered\" || events[index][1].type === \"listUnordered\") {\n if (events[index][0] === \"enter\") listStack.push(index);\n else {\n const tail = listStack.pop();\n index = prepareList(events, tail, index);\n }\n }\n index = -1;\n while(++index < events.length){\n const handler = config[events[index][0]];\n if ($236f1a66ed1dd212$var$own.call(handler, events[index][1].type)) handler[events[index][1].type].call(Object.assign({\n sliceSerialize: events[index][2].sliceSerialize\n }, context), events[index][1]);\n }\n // Handle tokens still being open.\n if (context.tokenStack.length > 0) {\n const tail = context.tokenStack[context.tokenStack.length - 1];\n const handler = tail[1] || $236f1a66ed1dd212$var$defaultOnError;\n handler.call(context, undefined, tail[0]);\n }\n // Figure out `root` position.\n tree.position = {\n start: $236f1a66ed1dd212$var$point(events.length > 0 ? events[0][1].start : {\n line: 1,\n column: 1,\n offset: 0\n }),\n end: $236f1a66ed1dd212$var$point(events.length > 0 ? events[events.length - 2][1].end : {\n line: 1,\n column: 1,\n offset: 0\n })\n };\n // Call transforms.\n index = -1;\n while(++index < config.transforms.length)tree = config.transforms[index](tree) || tree;\n return tree;\n }\n /**\n * @param {Array} events\n * @param {number} start\n * @param {number} length\n * @returns {number}\n */ function prepareList(events, start, length) {\n let index = start - 1;\n let containerBalance = -1;\n let listSpread = false;\n /** @type {Token | undefined} */ let listItem;\n /** @type {number | undefined} */ let lineIndex;\n /** @type {number | undefined} */ let firstBlankLineIndex;\n /** @type {boolean | undefined} */ let atMarker;\n while(++index <= length){\n const event = events[index];\n if (event[1].type === \"listUnordered\" || event[1].type === \"listOrdered\" || event[1].type === \"blockQuote\") {\n if (event[0] === \"enter\") containerBalance++;\n else containerBalance--;\n atMarker = undefined;\n } else if (event[1].type === \"lineEndingBlank\") {\n if (event[0] === \"enter\") {\n if (listItem && !atMarker && !containerBalance && !firstBlankLineIndex) firstBlankLineIndex = index;\n atMarker = undefined;\n }\n } else if (event[1].type === \"linePrefix\" || event[1].type === \"listItemValue\" || event[1].type === \"listItemMarker\" || event[1].type === \"listItemPrefix\" || event[1].type === \"listItemPrefixWhitespace\") ;\n else atMarker = undefined;\n if (!containerBalance && event[0] === \"enter\" && event[1].type === \"listItemPrefix\" || containerBalance === -1 && event[0] === \"exit\" && (event[1].type === \"listUnordered\" || event[1].type === \"listOrdered\")) {\n if (listItem) {\n let tailIndex = index;\n lineIndex = undefined;\n while(tailIndex--){\n const tailEvent = events[tailIndex];\n if (tailEvent[1].type === \"lineEnding\" || tailEvent[1].type === \"lineEndingBlank\") {\n if (tailEvent[0] === \"exit\") continue;\n if (lineIndex) {\n events[lineIndex][1].type = \"lineEndingBlank\";\n listSpread = true;\n }\n tailEvent[1].type = \"lineEnding\";\n lineIndex = tailIndex;\n } else if (tailEvent[1].type === \"linePrefix\" || tailEvent[1].type === \"blockQuotePrefix\" || tailEvent[1].type === \"blockQuotePrefixWhitespace\" || tailEvent[1].type === \"blockQuoteMarker\" || tailEvent[1].type === \"listItemIndent\") ;\n else break;\n }\n if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) listItem._spread = true;\n // Fix position.\n listItem.end = Object.assign({}, lineIndex ? events[lineIndex][1].start : event[1].end);\n events.splice(lineIndex || index, 0, [\n \"exit\",\n listItem,\n event[2]\n ]);\n index++;\n length++;\n }\n // Create a new list item.\n if (event[1].type === \"listItemPrefix\") {\n listItem = {\n type: \"listItem\",\n _spread: false,\n start: Object.assign({}, event[1].start),\n // @ts-expect-error: we’ll add `end` in a second.\n end: undefined\n };\n // @ts-expect-error: `listItem` is most definitely defined, TS...\n events.splice(index, 0, [\n \"enter\",\n listItem,\n event[2]\n ]);\n index++;\n length++;\n firstBlankLineIndex = undefined;\n atMarker = true;\n }\n }\n }\n events[start][1]._spread = listSpread;\n return length;\n }\n /**\n * Set data.\n *\n * @template {keyof CompileData} Key\n * Field type.\n * @param {Key} key\n * Key of field.\n * @param {CompileData[Key]} [value]\n * New value.\n * @returns {void}\n * Nothing.\n */ function setData(key, value) {\n data[key] = value;\n }\n /**\n * Get data.\n *\n * @template {keyof CompileData} Key\n * Field type.\n * @param {Key} key\n * Key of field.\n * @returns {CompileData[Key]}\n * Value.\n */ function getData(key) {\n return data[key];\n }\n /**\n * Create an opener handle.\n *\n * @param {(token: Token) => Node} create\n * Create a node.\n * @param {Handle} [and]\n * Optional function to also run.\n * @returns {Handle}\n * Handle.\n */ function opener(create, and) {\n return open;\n /**\n * @this {CompileContext}\n * @param {Token} token\n * @returns {void}\n */ function open(token) {\n enter.call(this, create(token), token);\n if (and) and.call(this, token);\n }\n }\n /**\n * @this {CompileContext}\n * @returns {void}\n */ function buffer() {\n this.stack.push({\n type: \"fragment\",\n children: []\n });\n }\n /**\n * @template {Node} Kind\n * Node type.\n * @this {CompileContext}\n * Context.\n * @param {Kind} node\n * Node to enter.\n * @param {Token} token\n * Corresponding token.\n * @param {OnEnterError | undefined} [errorHandler]\n * Handle the case where this token is open, but it is closed by something else.\n * @returns {Kind}\n * The given node.\n */ function enter(node, token, errorHandler) {\n const parent = this.stack[this.stack.length - 1];\n // @ts-expect-error: Assume `Node` can exist as a child of `parent`.\n parent.children.push(node);\n this.stack.push(node);\n this.tokenStack.push([\n token,\n errorHandler\n ]);\n // @ts-expect-error: `end` will be patched later.\n node.position = {\n start: $236f1a66ed1dd212$var$point(token.start)\n };\n return node;\n }\n /**\n * Create a closer handle.\n *\n * @param {Handle} [and]\n * Optional function to also run.\n * @returns {Handle}\n * Handle.\n */ function closer(and) {\n return close;\n /**\n * @this {CompileContext}\n * @param {Token} token\n * @returns {void}\n */ function close(token) {\n if (and) and.call(this, token);\n exit.call(this, token);\n }\n }\n /**\n * @this {CompileContext}\n * Context.\n * @param {Token} token\n * Corresponding token.\n * @param {OnExitError | undefined} [onExitError]\n * Handle the case where another token is open.\n * @returns {Node}\n * The closed node.\n */ function exit(token, onExitError) {\n const node = this.stack.pop();\n const open = this.tokenStack.pop();\n if (!open) throw new Error(\"Cannot close `\" + token.type + \"` (\" + (0, $f9WaX.stringifyPosition)({\n start: token.start,\n end: token.end\n }) + \"): it\\u2019s not open\");\n else if (open[0].type !== token.type) {\n if (onExitError) onExitError.call(this, token, open[0]);\n else {\n const handler = open[1] || $236f1a66ed1dd212$var$defaultOnError;\n handler.call(this, token, open[0]);\n }\n }\n node.position.end = $236f1a66ed1dd212$var$point(token.end);\n return node;\n }\n /**\n * @this {CompileContext}\n * @returns {string}\n */ function resume() {\n return (0, $apdnY.toString)(this.stack.pop());\n }\n //\n // Handlers.\n //\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onenterlistordered() {\n setData(\"expectingFirstListItemValue\", true);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onenterlistitemvalue(token) {\n if (getData(\"expectingFirstListItemValue\")) {\n const ancestor = this.stack[this.stack.length - 2];\n ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);\n setData(\"expectingFirstListItemValue\");\n }\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodefencedfenceinfo() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.lang = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodefencedfencemeta() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.meta = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodefencedfence() {\n // Exit if this is the closing fence.\n if (getData(\"flowCodeInside\")) return;\n this.buffer();\n setData(\"flowCodeInside\", true);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodefenced() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.value = data.replace(/^(\\r?\\n|\\r)|(\\r?\\n|\\r)$/g, \"\");\n setData(\"flowCodeInside\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodeindented() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.value = data.replace(/(\\r?\\n|\\r)$/g, \"\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitdefinitionlabelstring(token) {\n const label = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.label = label;\n node.identifier = (0, $9HErY.normalizeIdentifier)(this.sliceSerialize(token)).toLowerCase();\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitdefinitiontitlestring() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.title = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitdefinitiondestinationstring() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.url = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitatxheadingsequence(token) {\n const node = this.stack[this.stack.length - 1];\n if (!node.depth) {\n const depth = this.sliceSerialize(token).length;\n node.depth = depth;\n }\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitsetextheadingtext() {\n setData(\"setextHeadingSlurpLineEnding\", true);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitsetextheadinglinesequence(token) {\n const node = this.stack[this.stack.length - 1];\n node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitsetextheading() {\n setData(\"setextHeadingSlurpLineEnding\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onenterdata(token) {\n const node = this.stack[this.stack.length - 1];\n let tail = node.children[node.children.length - 1];\n if (!tail || tail.type !== \"text\") {\n // Add a new text node.\n tail = text();\n // @ts-expect-error: we’ll add `end` later.\n tail.position = {\n start: $236f1a66ed1dd212$var$point(token.start)\n };\n // @ts-expect-error: Assume `parent` accepts `text`.\n node.children.push(tail);\n }\n this.stack.push(tail);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitdata(token) {\n const tail = this.stack.pop();\n tail.value += this.sliceSerialize(token);\n tail.position.end = $236f1a66ed1dd212$var$point(token.end);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitlineending(token) {\n const context = this.stack[this.stack.length - 1];\n // If we’re at a hard break, include the line ending in there.\n if (getData(\"atHardBreak\")) {\n const tail = context.children[context.children.length - 1];\n tail.position.end = $236f1a66ed1dd212$var$point(token.end);\n setData(\"atHardBreak\");\n return;\n }\n if (!getData(\"setextHeadingSlurpLineEnding\") && config.canContainEols.includes(context.type)) {\n onenterdata.call(this, token);\n onexitdata.call(this, token);\n }\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexithardbreak() {\n setData(\"atHardBreak\", true);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexithtmlflow() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.value = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexithtmltext() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.value = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcodetext() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.value = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitlink() {\n const node = this.stack[this.stack.length - 1];\n // Note: there are also `identifier` and `label` fields on this link node!\n // These are used / cleaned here.\n // To do: clean.\n if (getData(\"inReference\")) {\n /** @type {ReferenceType} */ const referenceType = getData(\"referenceType\") || \"shortcut\";\n node.type += \"Reference\";\n // @ts-expect-error: mutate.\n node.referenceType = referenceType;\n // @ts-expect-error: mutate.\n delete node.url;\n delete node.title;\n } else {\n // @ts-expect-error: mutate.\n delete node.identifier;\n // @ts-expect-error: mutate.\n delete node.label;\n }\n setData(\"referenceType\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitimage() {\n const node = this.stack[this.stack.length - 1];\n // Note: there are also `identifier` and `label` fields on this link node!\n // These are used / cleaned here.\n // To do: clean.\n if (getData(\"inReference\")) {\n /** @type {ReferenceType} */ const referenceType = getData(\"referenceType\") || \"shortcut\";\n node.type += \"Reference\";\n // @ts-expect-error: mutate.\n node.referenceType = referenceType;\n // @ts-expect-error: mutate.\n delete node.url;\n delete node.title;\n } else {\n // @ts-expect-error: mutate.\n delete node.identifier;\n // @ts-expect-error: mutate.\n delete node.label;\n }\n setData(\"referenceType\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitlabeltext(token) {\n const string = this.sliceSerialize(token);\n const ancestor = this.stack[this.stack.length - 2];\n // @ts-expect-error: stash this on the node, as it might become a reference\n // later.\n ancestor.label = (0, $1lC1m.decodeString)(string);\n // @ts-expect-error: same as above.\n ancestor.identifier = (0, $9HErY.normalizeIdentifier)(string).toLowerCase();\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitlabel() {\n const fragment = this.stack[this.stack.length - 1];\n const value = this.resume();\n const node = this.stack[this.stack.length - 1];\n // Assume a reference.\n setData(\"inReference\", true);\n if (node.type === \"link\") {\n /** @type {Array} */ // @ts-expect-error: Assume static phrasing content.\n const children = fragment.children;\n node.children = children;\n } else node.alt = value;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitresourcedestinationstring() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.url = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitresourcetitlestring() {\n const data = this.resume();\n const node = this.stack[this.stack.length - 1];\n node.title = data;\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitresource() {\n setData(\"inReference\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onenterreference() {\n setData(\"referenceType\", \"collapsed\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitreferencestring(token) {\n const label = this.resume();\n const node = this.stack[this.stack.length - 1];\n // @ts-expect-error: stash this on the node, as it might become a reference\n // later.\n node.label = label;\n // @ts-expect-error: same as above.\n node.identifier = (0, $9HErY.normalizeIdentifier)(this.sliceSerialize(token)).toLowerCase();\n setData(\"referenceType\", \"full\");\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcharacterreferencemarker(token) {\n setData(\"characterReferenceType\", token.type);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitcharacterreferencevalue(token) {\n const data = this.sliceSerialize(token);\n const type = getData(\"characterReferenceType\");\n /** @type {string} */ let value;\n if (type) {\n value = (0, $34rZa.decodeNumericCharacterReference)(data, type === \"characterReferenceMarkerNumeric\" ? 10 : 16);\n setData(\"characterReferenceType\");\n } else {\n const result = (0, $gxs7E.decodeNamedCharacterReference)(data);\n value = result;\n }\n const tail = this.stack.pop();\n tail.value += value;\n tail.position.end = $236f1a66ed1dd212$var$point(token.end);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitautolinkprotocol(token) {\n onexitdata.call(this, token);\n const node = this.stack[this.stack.length - 1];\n node.url = this.sliceSerialize(token);\n }\n /**\n * @this {CompileContext}\n * @type {Handle}\n */ function onexitautolinkemail(token) {\n onexitdata.call(this, token);\n const node = this.stack[this.stack.length - 1];\n node.url = \"mailto:\" + this.sliceSerialize(token);\n }\n //\n // Creaters.\n //\n /** @returns {Blockquote} */ function blockQuote() {\n return {\n type: \"blockquote\",\n children: []\n };\n }\n /** @returns {Code} */ function codeFlow() {\n return {\n type: \"code\",\n lang: null,\n meta: null,\n value: \"\"\n };\n }\n /** @returns {InlineCode} */ function codeText() {\n return {\n type: \"inlineCode\",\n value: \"\"\n };\n }\n /** @returns {Definition} */ function definition() {\n return {\n type: \"definition\",\n identifier: \"\",\n label: null,\n title: null,\n url: \"\"\n };\n }\n /** @returns {Emphasis} */ function emphasis() {\n return {\n type: \"emphasis\",\n children: []\n };\n }\n /** @returns {Heading} */ function heading() {\n // @ts-expect-error `depth` will be set later.\n return {\n type: \"heading\",\n depth: undefined,\n children: []\n };\n }\n /** @returns {Break} */ function hardBreak() {\n return {\n type: \"break\"\n };\n }\n /** @returns {HTML} */ function html() {\n return {\n type: \"html\",\n value: \"\"\n };\n }\n /** @returns {Image} */ function image() {\n return {\n type: \"image\",\n title: null,\n url: \"\",\n alt: null\n };\n }\n /** @returns {Link} */ function link() {\n return {\n type: \"link\",\n title: null,\n url: \"\",\n children: []\n };\n }\n /**\n * @param {Token} token\n * @returns {List}\n */ function list(token) {\n return {\n type: \"list\",\n ordered: token.type === \"listOrdered\",\n start: null,\n spread: token._spread,\n children: []\n };\n }\n /**\n * @param {Token} token\n * @returns {ListItem}\n */ function listItem(token) {\n return {\n type: \"listItem\",\n spread: token._spread,\n checked: null,\n children: []\n };\n }\n /** @returns {Paragraph} */ function paragraph() {\n return {\n type: \"paragraph\",\n children: []\n };\n }\n /** @returns {Strong} */ function strong() {\n return {\n type: \"strong\",\n children: []\n };\n }\n /** @returns {Text} */ function text() {\n return {\n type: \"text\",\n value: \"\"\n };\n }\n /** @returns {ThematicBreak} */ function thematicBreak() {\n return {\n type: \"thematicBreak\"\n };\n }\n}\n/**\n * Copy a point-like value.\n *\n * @param {Point} d\n * Point-like value.\n * @returns {Point}\n * unist point.\n */ function $236f1a66ed1dd212$var$point(d) {\n return {\n line: d.line,\n column: d.column,\n offset: d.offset\n };\n}\n/**\n * @param {Config} combined\n * @param {Array>} extensions\n * @returns {void}\n */ function $236f1a66ed1dd212$var$configure(combined, extensions) {\n let index = -1;\n while(++index < extensions.length){\n const value = extensions[index];\n if (Array.isArray(value)) $236f1a66ed1dd212$var$configure(combined, value);\n else $236f1a66ed1dd212$var$extension(combined, value);\n }\n}\n/**\n * @param {Config} combined\n * @param {Extension} extension\n * @returns {void}\n */ function $236f1a66ed1dd212$var$extension(combined, extension) {\n /** @type {keyof Extension} */ let key;\n for(key in extension)if ($236f1a66ed1dd212$var$own.call(extension, key)) {\n if (key === \"canContainEols\") {\n const right = extension[key];\n if (right) combined[key].push(...right);\n } else if (key === \"transforms\") {\n const right = extension[key];\n if (right) combined[key].push(...right);\n } else if (key === \"enter\" || key === \"exit\") {\n const right = extension[key];\n if (right) Object.assign(combined[key], right);\n }\n }\n}\n/** @type {OnEnterError} */ function $236f1a66ed1dd212$var$defaultOnError(left, right) {\n if (left) throw new Error(\"Cannot close `\" + left.type + \"` (\" + (0, $f9WaX.stringifyPosition)({\n start: left.start,\n end: left.end\n }) + \"): a different token (`\" + right.type + \"`, \" + (0, $f9WaX.stringifyPosition)({\n start: right.start,\n end: right.end\n }) + \") is open\");\n else throw new Error(\"Cannot close document, a token (`\" + right.type + \"`, \" + (0, $f9WaX.stringifyPosition)({\n start: right.start,\n end: right.end\n }) + \") is still open\");\n}\n\n});\nparcelRegister(\"apdnY\", function(module, exports) {\n\n$parcel$export(module.exports, \"toString\", () => $79369f5237095e86$export$f84e8e69fd4488a5);\n/**\n * @typedef {import('mdast').Root|import('mdast').Content} Node\n *\n * @typedef Options\n * Configuration (optional).\n * @property {boolean | null | undefined} [includeImageAlt=true]\n * Whether to use `alt` for `image`s.\n * @property {boolean | null | undefined} [includeHtml=true]\n * Whether to use `value` of HTML.\n */ /** @type {Options} */ const $79369f5237095e86$var$emptyOptions = {};\nfunction $79369f5237095e86$export$f84e8e69fd4488a5(value, options) {\n const settings = options || $79369f5237095e86$var$emptyOptions;\n const includeImageAlt = typeof settings.includeImageAlt === \"boolean\" ? settings.includeImageAlt : true;\n const includeHtml = typeof settings.includeHtml === \"boolean\" ? settings.includeHtml : true;\n return $79369f5237095e86$var$one(value, includeImageAlt, includeHtml);\n}\n/**\n * One node or several nodes.\n *\n * @param {unknown} value\n * Thing to serialize.\n * @param {boolean} includeImageAlt\n * Include image `alt`s.\n * @param {boolean} includeHtml\n * Include HTML.\n * @returns {string}\n * Serialized node.\n */ function $79369f5237095e86$var$one(value, includeImageAlt, includeHtml) {\n if ($79369f5237095e86$var$node(value)) {\n if (\"value\" in value) return value.type === \"html\" && !includeHtml ? \"\" : value.value;\n if (includeImageAlt && \"alt\" in value && value.alt) return value.alt;\n if (\"children\" in value) return $79369f5237095e86$var$all(value.children, includeImageAlt, includeHtml);\n }\n if (Array.isArray(value)) return $79369f5237095e86$var$all(value, includeImageAlt, includeHtml);\n return \"\";\n}\n/**\n * Serialize a list of nodes.\n *\n * @param {Array} values\n * Thing to serialize.\n * @param {boolean} includeImageAlt\n * Include image `alt`s.\n * @param {boolean} includeHtml\n * Include HTML.\n * @returns {string}\n * Serialized nodes.\n */ function $79369f5237095e86$var$all(values, includeImageAlt, includeHtml) {\n /** @type {Array} */ const result = [];\n let index = -1;\n while(++index < values.length)result[index] = $79369f5237095e86$var$one(values[index], includeImageAlt, includeHtml);\n return result.join(\"\");\n}\n/**\n * Check if `value` looks like a node.\n *\n * @param {unknown} value\n * Thing.\n * @returns {value is Node}\n * Whether `value` is a node.\n */ function $79369f5237095e86$var$node(value) {\n return Boolean(value && typeof value === \"object\");\n}\n\n});\n\nparcelRegister(\"2JaIP\", function(module, exports) {\n\n$parcel$export(module.exports, \"parse\", () => $1fc819ea965a3f81$export$98e6a39c04603d36);\n/**\n * @typedef {import('micromark-util-types').Create} Create\n * @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n */ \nvar $1BzS9 = parcelRequire(\"1BzS9\");\n\nvar $1YYHd = parcelRequire(\"1YYHd\");\n\nvar $4vIRz = parcelRequire(\"4vIRz\");\n\nvar $jugKX = parcelRequire(\"jugKX\");\n\nvar $2JBld = parcelRequire(\"2JBld\");\n\nvar $6oDSH = parcelRequire(\"6oDSH\");\n\nvar $8tRNW = parcelRequire(\"8tRNW\");\nfunction $1fc819ea965a3f81$export$98e6a39c04603d36(options) {\n const settings = options || {};\n const constructs = /** @type {FullNormalizedExtension} */ (0, $1BzS9.combineExtensions)([\n $8tRNW,\n ...settings.extensions || []\n ]);\n /** @type {ParseContext} */ const parser = {\n defined: [],\n lazy: {},\n constructs: constructs,\n content: create((0, $1YYHd.content)),\n document: create((0, $4vIRz.document)),\n flow: create((0, $jugKX.flow)),\n string: create((0, $2JBld.string)),\n text: create((0, $2JBld.text))\n };\n return parser;\n /**\n * @param {InitialConstruct} initial\n */ function create(initial) {\n return creator;\n /** @type {Create} */ function creator(from) {\n return (0, $6oDSH.createTokenizer)(parser, initial, from);\n }\n }\n}\n\n});\nparcelRegister(\"1BzS9\", function(module, exports) {\n\n$parcel$export(module.exports, \"combineExtensions\", () => $12b512a5203a7cce$export$86a865d89ef3c690);\n/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').Handles} Handles\n * @typedef {import('micromark-util-types').HtmlExtension} HtmlExtension\n * @typedef {import('micromark-util-types').NormalizedExtension} NormalizedExtension\n */ \nvar $Ux2lp = parcelRequire(\"Ux2lp\");\nconst $12b512a5203a7cce$var$hasOwnProperty = {}.hasOwnProperty;\nfunction $12b512a5203a7cce$export$86a865d89ef3c690(extensions) {\n /** @type {NormalizedExtension} */ const all = {};\n let index = -1;\n while(++index < extensions.length)$12b512a5203a7cce$var$syntaxExtension(all, extensions[index]);\n return all;\n}\n/**\n * Merge `extension` into `all`.\n *\n * @param {NormalizedExtension} all\n * Extension to merge into.\n * @param {Extension} extension\n * Extension to merge.\n * @returns {void}\n */ function $12b512a5203a7cce$var$syntaxExtension(all, extension) {\n /** @type {keyof Extension} */ let hook;\n for(hook in extension){\n const maybe = $12b512a5203a7cce$var$hasOwnProperty.call(all, hook) ? all[hook] : undefined;\n /** @type {Record} */ const left = maybe || (all[hook] = {});\n /** @type {Record | undefined} */ const right = extension[hook];\n /** @type {string} */ let code;\n if (right) for(code in right){\n if (!$12b512a5203a7cce$var$hasOwnProperty.call(left, code)) left[code] = [];\n const value = right[code];\n $12b512a5203a7cce$var$constructs(// @ts-expect-error Looks like a list.\n left[code], Array.isArray(value) ? value : value ? [\n value\n ] : []);\n }\n }\n}\n/**\n * Merge `list` into `existing` (both lists of constructs).\n * Mutates `existing`.\n *\n * @param {Array} existing\n * @param {Array} list\n * @returns {void}\n */ function $12b512a5203a7cce$var$constructs(existing, list) {\n let index = -1;\n /** @type {Array} */ const before = [];\n while(++index < list.length)(list[index].add === \"after\" ? existing : before).push(list[index]);\n (0, $Ux2lp.splice)(existing, 0, 0, before);\n}\nfunction $12b512a5203a7cce$export$eaf8c406dfb0a620(htmlExtensions) {\n /** @type {HtmlExtension} */ const handlers = {};\n let index = -1;\n while(++index < htmlExtensions.length)$12b512a5203a7cce$var$htmlExtension(handlers, htmlExtensions[index]);\n return handlers;\n}\n/**\n * Merge `extension` into `all`.\n *\n * @param {HtmlExtension} all\n * Extension to merge into.\n * @param {HtmlExtension} extension\n * Extension to merge.\n * @returns {void}\n */ function $12b512a5203a7cce$var$htmlExtension(all, extension) {\n /** @type {keyof HtmlExtension} */ let hook;\n for(hook in extension){\n const maybe = $12b512a5203a7cce$var$hasOwnProperty.call(all, hook) ? all[hook] : undefined;\n const left = maybe || (all[hook] = {});\n const right = extension[hook];\n /** @type {keyof Handles} */ let type;\n if (right) for(type in right)// @ts-expect-error assume document vs regular handler are managed correctly.\n left[type] = right[type];\n }\n}\n\n});\nparcelRegister(\"Ux2lp\", function(module, exports) {\n\n$parcel$export(module.exports, \"splice\", () => $0a9edb7ff3ef159f$export$869882364835d202);\n$parcel$export(module.exports, \"push\", () => $0a9edb7ff3ef159f$export$4cbf152802aa238);\n/**\n * Like `Array#splice`, but smarter for giant arrays.\n *\n * `Array#splice` takes all items to be inserted as individual argument which\n * causes a stack overflow in V8 when trying to insert 100k items for instance.\n *\n * Otherwise, this does not return the removed items, and takes `items` as an\n * array instead of rest parameters.\n *\n * @template {unknown} T\n * Item type.\n * @param {Array} list\n * List to operate on.\n * @param {number} start\n * Index to remove/insert at (can be negative).\n * @param {number} remove\n * Number of items to remove.\n * @param {Array} items\n * Items to inject into `list`.\n * @returns {void}\n * Nothing.\n */ function $0a9edb7ff3ef159f$export$869882364835d202(list, start, remove, items) {\n const end = list.length;\n let chunkStart = 0;\n /** @type {Array} */ let parameters;\n // Make start between zero and `end` (included).\n if (start < 0) start = -start > end ? 0 : end + start;\n else start = start > end ? end : start;\n remove = remove > 0 ? remove : 0;\n // No need to chunk the items if there’s only a couple (10k) items.\n if (items.length < 10000) {\n parameters = Array.from(items);\n parameters.unshift(start, remove);\n // @ts-expect-error Hush, it’s fine.\n list.splice(...parameters);\n } else {\n // Delete `remove` items starting from `start`\n if (remove) list.splice(start, remove);\n // Insert the items in chunks to not cause stack overflows.\n while(chunkStart < items.length){\n parameters = items.slice(chunkStart, chunkStart + 10000);\n parameters.unshift(start, 0);\n // @ts-expect-error Hush, it’s fine.\n list.splice(...parameters);\n chunkStart += 10000;\n start += 10000;\n }\n }\n}\nfunction $0a9edb7ff3ef159f$export$4cbf152802aa238(list, items) {\n if (list.length > 0) {\n $0a9edb7ff3ef159f$export$869882364835d202(list, list.length, 0, items);\n return list;\n }\n return items;\n}\n\n});\n\n\nparcelRegister(\"1YYHd\", function(module, exports) {\n\n$parcel$export(module.exports, \"content\", () => $171a79fbb1402b9a$export$a7db06668cad9adb);\n/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $171a79fbb1402b9a$export$a7db06668cad9adb = {\n tokenize: $171a79fbb1402b9a$var$initializeContent\n};\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */ function $171a79fbb1402b9a$var$initializeContent(effects) {\n const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);\n /** @type {Token} */ let previous;\n return contentStart;\n /** @type {State} */ function afterContentStartConstruct(code) {\n if (code === null) {\n effects.consume(code);\n return;\n }\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return (0, $8GWoH.factorySpace)(effects, contentStart, \"linePrefix\");\n }\n /** @type {State} */ function paragraphInitial(code) {\n effects.enter(\"paragraph\");\n return lineStart(code);\n }\n /** @type {State} */ function lineStart(code) {\n const token = effects.enter(\"chunkText\", {\n contentType: \"text\",\n previous: previous\n });\n if (previous) previous.next = token;\n previous = token;\n return data(code);\n }\n /** @type {State} */ function data(code) {\n if (code === null) {\n effects.exit(\"chunkText\");\n effects.exit(\"paragraph\");\n effects.consume(code);\n return;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.consume(code);\n effects.exit(\"chunkText\");\n return lineStart;\n }\n // Data.\n effects.consume(code);\n return data;\n }\n}\n\n});\nparcelRegister(\"8GWoH\", function(module, exports) {\n\n$parcel$export(module.exports, \"factorySpace\", () => $653f8b80711d97d0$export$ae105c1eb063a0a2);\n/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $653f8b80711d97d0$export$ae105c1eb063a0a2(effects, ok, type, max) {\n const limit = max ? max - 1 : Number.POSITIVE_INFINITY;\n let size = 0;\n return start;\n /** @type {State} */ function start(code) {\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.enter(type);\n return prefix(code);\n }\n return ok(code);\n }\n /** @type {State} */ function prefix(code) {\n if ((0, $5Lprs.markdownSpace)(code) && size++ < limit) {\n effects.consume(code);\n return prefix;\n }\n effects.exit(type);\n return ok(code);\n }\n}\n\n});\nparcelRegister(\"5Lprs\", function(module, exports) {\n\n$parcel$export(module.exports, \"asciiAlpha\", () => $432513c48b12fdfc$export$d65d6b62c24d5436);\n$parcel$export(module.exports, \"asciiAlphanumeric\", () => $432513c48b12fdfc$export$75c76db11865a9f4);\n$parcel$export(module.exports, \"asciiAtext\", () => $432513c48b12fdfc$export$4397998b34fe597d);\n$parcel$export(module.exports, \"asciiControl\", () => $432513c48b12fdfc$export$67dbf494fc8394df);\n$parcel$export(module.exports, \"asciiDigit\", () => $432513c48b12fdfc$export$ca8b5b1a6c320e6e);\n$parcel$export(module.exports, \"asciiHexDigit\", () => $432513c48b12fdfc$export$eca2752363989806);\n$parcel$export(module.exports, \"asciiPunctuation\", () => $432513c48b12fdfc$export$35794a7d1db99380);\n$parcel$export(module.exports, \"markdownLineEnding\", () => $432513c48b12fdfc$export$34a1dff1c0936953);\n$parcel$export(module.exports, \"markdownLineEndingOrSpace\", () => $432513c48b12fdfc$export$a30284361b3814b7);\n$parcel$export(module.exports, \"markdownSpace\", () => $432513c48b12fdfc$export$2c6cf65c1127992a);\n$parcel$export(module.exports, \"unicodePunctuation\", () => $432513c48b12fdfc$export$aa04114dd888a7a0);\n$parcel$export(module.exports, \"unicodeWhitespace\", () => $432513c48b12fdfc$export$a0ff789c034ffdf4);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n */ \nvar $1Vlly = parcelRequire(\"1Vlly\");\nconst $432513c48b12fdfc$export$d65d6b62c24d5436 = $432513c48b12fdfc$var$regexCheck(/[A-Za-z]/);\nconst $432513c48b12fdfc$export$75c76db11865a9f4 = $432513c48b12fdfc$var$regexCheck(/[\\dA-Za-z]/);\nconst $432513c48b12fdfc$export$4397998b34fe597d = $432513c48b12fdfc$var$regexCheck(/[#-'*+\\--9=?A-Z^-~]/);\nfunction $432513c48b12fdfc$export$67dbf494fc8394df(code) {\n return(// Special whitespace codes (which have negative values), C0 and Control\n // character DEL\n code !== null && (code < 32 || code === 127));\n}\nconst $432513c48b12fdfc$export$ca8b5b1a6c320e6e = $432513c48b12fdfc$var$regexCheck(/\\d/);\nconst $432513c48b12fdfc$export$eca2752363989806 = $432513c48b12fdfc$var$regexCheck(/[\\dA-Fa-f]/);\nconst $432513c48b12fdfc$export$35794a7d1db99380 = $432513c48b12fdfc$var$regexCheck(/[!-/:-@[-`{-~]/);\nfunction $432513c48b12fdfc$export$34a1dff1c0936953(code) {\n return code !== null && code < -2;\n}\nfunction $432513c48b12fdfc$export$a30284361b3814b7(code) {\n return code !== null && (code < 0 || code === 32);\n}\nfunction $432513c48b12fdfc$export$2c6cf65c1127992a(code) {\n return code === -2 || code === -1 || code === 32;\n}\nconst $432513c48b12fdfc$export$aa04114dd888a7a0 = $432513c48b12fdfc$var$regexCheck((0, $1Vlly.unicodePunctuationRegex));\nconst $432513c48b12fdfc$export$a0ff789c034ffdf4 = $432513c48b12fdfc$var$regexCheck(/\\s/);\n/**\n * Create a code check from a regex.\n *\n * @param {RegExp} regex\n * @returns {(code: Code) => boolean}\n */ function $432513c48b12fdfc$var$regexCheck(regex) {\n return check;\n /**\n * Check whether a code matches the bound regex.\n *\n * @param {Code} code\n * Character code.\n * @returns {boolean}\n * Whether the character code matches the bound regex.\n */ function check(code) {\n return code !== null && regex.test(String.fromCharCode(code));\n }\n}\n\n});\nparcelRegister(\"1Vlly\", function(module, exports) {\n\n$parcel$export(module.exports, \"unicodePunctuationRegex\", () => $166bac51b6e2a8af$export$85b5101f24802e8c);\n// This module is generated by `script/`.\n//\n// CommonMark handles attention (emphasis, strong) markers based on what comes\n// before or after them.\n// One such difference is if those characters are Unicode punctuation.\n// This script is generated from the Unicode data.\n/**\n * Regular expression that matches a unicode punctuation character.\n */ const $166bac51b6e2a8af$export$85b5101f24802e8c = /[!-\\/:-@\\[-`\\{-~\\xA1\\xA7\\xAB\\xB6\\xB7\\xBB\\xBF\\u037E\\u0387\\u055A-\\u055F\\u0589\\u058A\\u05BE\\u05C0\\u05C3\\u05C6\\u05F3\\u05F4\\u0609\\u060A\\u060C\\u060D\\u061B\\u061D-\\u061F\\u066A-\\u066D\\u06D4\\u0700-\\u070D\\u07F7-\\u07F9\\u0830-\\u083E\\u085E\\u0964\\u0965\\u0970\\u09FD\\u0A76\\u0AF0\\u0C77\\u0C84\\u0DF4\\u0E4F\\u0E5A\\u0E5B\\u0F04-\\u0F12\\u0F14\\u0F3A-\\u0F3D\\u0F85\\u0FD0-\\u0FD4\\u0FD9\\u0FDA\\u104A-\\u104F\\u10FB\\u1360-\\u1368\\u1400\\u166E\\u169B\\u169C\\u16EB-\\u16ED\\u1735\\u1736\\u17D4-\\u17D6\\u17D8-\\u17DA\\u1800-\\u180A\\u1944\\u1945\\u1A1E\\u1A1F\\u1AA0-\\u1AA6\\u1AA8-\\u1AAD\\u1B5A-\\u1B60\\u1B7D\\u1B7E\\u1BFC-\\u1BFF\\u1C3B-\\u1C3F\\u1C7E\\u1C7F\\u1CC0-\\u1CC7\\u1CD3\\u2010-\\u2027\\u2030-\\u2043\\u2045-\\u2051\\u2053-\\u205E\\u207D\\u207E\\u208D\\u208E\\u2308-\\u230B\\u2329\\u232A\\u2768-\\u2775\\u27C5\\u27C6\\u27E6-\\u27EF\\u2983-\\u2998\\u29D8-\\u29DB\\u29FC\\u29FD\\u2CF9-\\u2CFC\\u2CFE\\u2CFF\\u2D70\\u2E00-\\u2E2E\\u2E30-\\u2E4F\\u2E52-\\u2E5D\\u3001-\\u3003\\u3008-\\u3011\\u3014-\\u301F\\u3030\\u303D\\u30A0\\u30FB\\uA4FE\\uA4FF\\uA60D-\\uA60F\\uA673\\uA67E\\uA6F2-\\uA6F7\\uA874-\\uA877\\uA8CE\\uA8CF\\uA8F8-\\uA8FA\\uA8FC\\uA92E\\uA92F\\uA95F\\uA9C1-\\uA9CD\\uA9DE\\uA9DF\\uAA5C-\\uAA5F\\uAADE\\uAADF\\uAAF0\\uAAF1\\uABEB\\uFD3E\\uFD3F\\uFE10-\\uFE19\\uFE30-\\uFE52\\uFE54-\\uFE61\\uFE63\\uFE68\\uFE6A\\uFE6B\\uFF01-\\uFF03\\uFF05-\\uFF0A\\uFF0C-\\uFF0F\\uFF1A\\uFF1B\\uFF1F\\uFF20\\uFF3B-\\uFF3D\\uFF3F\\uFF5B\\uFF5D\\uFF5F-\\uFF65]/;\n\n});\n\n\n\n\nparcelRegister(\"4vIRz\", function(module, exports) {\n\n$parcel$export(module.exports, \"document\", () => $348cdb85d1aeadd1$export$5a7bfc01df82fcd1);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ /**\n * @typedef {[Construct, ContainerState]} StackItem\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $Ux2lp = parcelRequire(\"Ux2lp\");\nconst $348cdb85d1aeadd1$export$5a7bfc01df82fcd1 = {\n tokenize: $348cdb85d1aeadd1$var$initializeDocument\n};\n/** @type {Construct} */ const $348cdb85d1aeadd1$var$containerConstruct = {\n tokenize: $348cdb85d1aeadd1$var$tokenizeContainer\n};\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */ function $348cdb85d1aeadd1$var$initializeDocument(effects) {\n const self = this;\n /** @type {Array} */ const stack = [];\n let continued = 0;\n /** @type {TokenizeContext | undefined} */ let childFlow;\n /** @type {Token | undefined} */ let childToken;\n /** @type {number} */ let lineStartOffset;\n return start;\n /** @type {State} */ function start(code) {\n // First we iterate through the open blocks, starting with the root\n // document, and descending through last children down to the last open\n // block.\n // Each block imposes a condition that the line must satisfy if the block is\n // to remain open.\n // For example, a block quote requires a `>` character.\n // A paragraph requires a non-blank line.\n // In this phase we may match all or just some of the open blocks.\n // But we cannot close unmatched blocks yet, because we may have a lazy\n // continuation line.\n if (continued < stack.length) {\n const item = stack[continued];\n self.containerState = item[1];\n return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code);\n }\n // Done.\n return checkNewContainers(code);\n }\n /** @type {State} */ function documentContinue(code) {\n continued++;\n // Note: this field is called `_closeFlow` but it also closes containers.\n // Perhaps a good idea to rename it but it’s already used in the wild by\n // extensions.\n if (self.containerState._closeFlow) {\n self.containerState._closeFlow = undefined;\n if (childFlow) closeFlow();\n // Note: this algorithm for moving events around is similar to the\n // algorithm when dealing with lazy lines in `writeToChild`.\n const indexBeforeExits = self.events.length;\n let indexBeforeFlow = indexBeforeExits;\n /** @type {Point | undefined} */ let point;\n // Find the flow chunk.\n while(indexBeforeFlow--)if (self.events[indexBeforeFlow][0] === \"exit\" && self.events[indexBeforeFlow][1].type === \"chunkFlow\") {\n point = self.events[indexBeforeFlow][1].end;\n break;\n }\n exitContainers(continued);\n // Fix positions.\n let index = indexBeforeExits;\n while(index < self.events.length){\n self.events[index][1].end = Object.assign({}, point);\n index++;\n }\n // Inject the exits earlier (they’re still also at the end).\n (0, $Ux2lp.splice)(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));\n // Discard the duplicate exits.\n self.events.length = index;\n return checkNewContainers(code);\n }\n return start(code);\n }\n /** @type {State} */ function checkNewContainers(code) {\n // Next, after consuming the continuation markers for existing blocks, we\n // look for new block starts (e.g. `>` for a block quote).\n // If we encounter a new block start, we close any blocks unmatched in\n // step 1 before creating the new block as a child of the last matched\n // block.\n if (continued === stack.length) {\n // No need to `check` whether there’s a container, of `exitContainers`\n // would be moot.\n // We can instead immediately `attempt` to parse one.\n if (!childFlow) return documentContinued(code);\n // If we have concrete content, such as block HTML or fenced code,\n // we can’t have containers “pierce” into them, so we can immediately\n // start.\n if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) return flowStart(code);\n // If we do have flow, it could still be a blank line,\n // but we’d be interrupting it w/ a new container if there’s a current\n // construct.\n // To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer\n // needed in [email protected]).\n self.interrupt = Boolean(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);\n }\n // Check if there is a new container.\n self.containerState = {};\n return effects.check($348cdb85d1aeadd1$var$containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code);\n }\n /** @type {State} */ function thereIsANewContainer(code) {\n if (childFlow) closeFlow();\n exitContainers(continued);\n return documentContinued(code);\n }\n /** @type {State} */ function thereIsNoNewContainer(code) {\n self.parser.lazy[self.now().line] = continued !== stack.length;\n lineStartOffset = self.now().offset;\n return flowStart(code);\n }\n /** @type {State} */ function documentContinued(code) {\n // Try new containers.\n self.containerState = {};\n return effects.attempt($348cdb85d1aeadd1$var$containerConstruct, containerContinue, flowStart)(code);\n }\n /** @type {State} */ function containerContinue(code) {\n continued++;\n stack.push([\n self.currentConstruct,\n self.containerState\n ]);\n // Try another.\n return documentContinued(code);\n }\n /** @type {State} */ function flowStart(code) {\n if (code === null) {\n if (childFlow) closeFlow();\n exitContainers(0);\n effects.consume(code);\n return;\n }\n childFlow = childFlow || self.parser.flow(self.now());\n effects.enter(\"chunkFlow\", {\n contentType: \"flow\",\n previous: childToken,\n _tokenizer: childFlow\n });\n return flowContinue(code);\n }\n /** @type {State} */ function flowContinue(code) {\n if (code === null) {\n writeToChild(effects.exit(\"chunkFlow\"), true);\n exitContainers(0);\n effects.consume(code);\n return;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.consume(code);\n writeToChild(effects.exit(\"chunkFlow\"));\n // Get ready for the next line.\n continued = 0;\n self.interrupt = undefined;\n return start;\n }\n effects.consume(code);\n return flowContinue;\n }\n /**\n * @param {Token} token\n * @param {boolean | undefined} [eof]\n * @returns {void}\n */ function writeToChild(token, eof) {\n const stream = self.sliceStream(token);\n if (eof) stream.push(null);\n token.previous = childToken;\n if (childToken) childToken.next = token;\n childToken = token;\n childFlow.defineSkip(token.start);\n childFlow.write(stream);\n // Alright, so we just added a lazy line:\n //\n // ```markdown\n // > a\n // b.\n //\n // Or:\n //\n // > ~~~c\n // d\n //\n // Or:\n //\n // > | e |\n // f\n // ```\n //\n // The construct in the second example (fenced code) does not accept lazy\n // lines, so it marked itself as done at the end of its first line, and\n // then the content construct parses `d`.\n // Most constructs in markdown match on the first line: if the first line\n // forms a construct, a non-lazy line can’t “unmake” it.\n //\n // The construct in the third example is potentially a GFM table, and\n // those are *weird*.\n // It *could* be a table, from the first line, if the following line\n // matches a condition.\n // In this case, that second line is lazy, which “unmakes” the first line\n // and turns the whole into one content block.\n //\n // We’ve now parsed the non-lazy and the lazy line, and can figure out\n // whether the lazy line started a new flow block.\n // If it did, we exit the current containers between the two flow blocks.\n if (self.parser.lazy[token.start.line]) {\n let index = childFlow.events.length;\n while(index--){\n if (// The token starts before the line ending…\n childFlow.events[index][1].start.offset < lineStartOffset && // …and either is not ended yet…\n (!childFlow.events[index][1].end || // …or ends after it.\n childFlow.events[index][1].end.offset > lineStartOffset)) // Exit: there’s still something open, which means it’s a lazy line\n // part of something.\n return;\n }\n // Note: this algorithm for moving events around is similar to the\n // algorithm when closing flow in `documentContinue`.\n const indexBeforeExits = self.events.length;\n let indexBeforeFlow = indexBeforeExits;\n /** @type {boolean | undefined} */ let seen;\n /** @type {Point | undefined} */ let point;\n // Find the previous chunk (the one before the lazy line).\n while(indexBeforeFlow--)if (self.events[indexBeforeFlow][0] === \"exit\" && self.events[indexBeforeFlow][1].type === \"chunkFlow\") {\n if (seen) {\n point = self.events[indexBeforeFlow][1].end;\n break;\n }\n seen = true;\n }\n exitContainers(continued);\n // Fix positions.\n index = indexBeforeExits;\n while(index < self.events.length){\n self.events[index][1].end = Object.assign({}, point);\n index++;\n }\n // Inject the exits earlier (they’re still also at the end).\n (0, $Ux2lp.splice)(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));\n // Discard the duplicate exits.\n self.events.length = index;\n }\n }\n /**\n * @param {number} size\n * @returns {void}\n */ function exitContainers(size) {\n let index = stack.length;\n // Exit open containers.\n while(index-- > size){\n const entry = stack[index];\n self.containerState = entry[1];\n entry[0].exit.call(self, effects);\n }\n stack.length = size;\n }\n function closeFlow() {\n childFlow.write([\n null\n ]);\n childToken = undefined;\n childFlow = undefined;\n self.containerState._closeFlow = undefined;\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $348cdb85d1aeadd1$var$tokenizeContainer(effects, ok, nok) {\n // Always populated by defaults.\n return (0, $8GWoH.factorySpace)(effects, effects.attempt(this.parser.constructs.document, ok, nok), \"linePrefix\", this.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 4);\n}\n\n});\n\nparcelRegister(\"jugKX\", function(module, exports) {\n\n$parcel$export(module.exports, \"flow\", () => $e2fdd11cbe455c51$export$ccc7b0636abaffc3);\n/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */ \nvar $eRfeJ = parcelRequire(\"eRfeJ\");\nvar $j7la5 = parcelRequire(\"j7la5\");\n\nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nconst $e2fdd11cbe455c51$export$ccc7b0636abaffc3 = {\n tokenize: $e2fdd11cbe455c51$var$initializeFlow\n};\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */ function $e2fdd11cbe455c51$var$initializeFlow(effects) {\n const self = this;\n const initial = effects.attempt(// Try to parse a blank line.\n (0, $eRfeJ.blankLine), atBlankEnding, // Try to parse initial flow (essentially, only code).\n effects.attempt(this.parser.constructs.flowInitial, afterConstruct, (0, $8GWoH.factorySpace)(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt((0, $j7la5.content), afterConstruct)), \"linePrefix\")));\n return initial;\n /** @type {State} */ function atBlankEnding(code) {\n if (code === null) {\n effects.consume(code);\n return;\n }\n effects.enter(\"lineEndingBlank\");\n effects.consume(code);\n effects.exit(\"lineEndingBlank\");\n self.currentConstruct = undefined;\n return initial;\n }\n /** @type {State} */ function afterConstruct(code) {\n if (code === null) {\n effects.consume(code);\n return;\n }\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n self.currentConstruct = undefined;\n return initial;\n }\n}\n\n});\nparcelRegister(\"eRfeJ\", function(module, exports) {\n\n$parcel$export(module.exports, \"blankLine\", () => $ad11d598ddddec2e$export$d50d28ce3ab2a612);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $ad11d598ddddec2e$export$d50d28ce3ab2a612 = {\n tokenize: $ad11d598ddddec2e$var$tokenizeBlankLine,\n partial: true\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $ad11d598ddddec2e$var$tokenizeBlankLine(effects, ok, nok) {\n return start;\n /**\n * Start of blank line.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, after, \"linePrefix\")(code) : after(code);\n }\n /**\n * At eof/eol, after optional whitespace.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n return code === null || (0, $5Lprs.markdownLineEnding)(code) ? ok(code) : nok(code);\n }\n}\n\n});\n\nparcelRegister(\"j7la5\", function(module, exports) {\n\n$parcel$export(module.exports, \"content\", () => $deaf16079a1807fb$export$a7db06668cad9adb);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $iNbDn = parcelRequire(\"iNbDn\");\nconst $deaf16079a1807fb$export$a7db06668cad9adb = {\n tokenize: $deaf16079a1807fb$var$tokenizeContent,\n resolve: $deaf16079a1807fb$var$resolveContent\n};\n/** @type {Construct} */ const $deaf16079a1807fb$var$continuationConstruct = {\n tokenize: $deaf16079a1807fb$var$tokenizeContinuation,\n partial: true\n};\n/**\n * Content is transparent: it’s parsed right now. That way, definitions are also\n * parsed right now: before text in paragraphs (specifically, media) are parsed.\n *\n * @type {Resolver}\n */ function $deaf16079a1807fb$var$resolveContent(events) {\n (0, $iNbDn.subtokenize)(events);\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $deaf16079a1807fb$var$tokenizeContent(effects, ok) {\n /** @type {Token | undefined} */ let previous;\n return chunkStart;\n /**\n * Before a content chunk.\n *\n * ```markdown\n * > | abc\n * ^\n * ```\n *\n * @type {State}\n */ function chunkStart(code) {\n effects.enter(\"content\");\n previous = effects.enter(\"chunkContent\", {\n contentType: \"content\"\n });\n return chunkInside(code);\n }\n /**\n * In a content chunk.\n *\n * ```markdown\n * > | abc\n * ^^^\n * ```\n *\n * @type {State}\n */ function chunkInside(code) {\n if (code === null) return contentEnd(code);\n // To do: in `markdown-rs`, each line is parsed on its own, and everything\n // is stitched together resolving.\n if ((0, $5Lprs.markdownLineEnding)(code)) return effects.check($deaf16079a1807fb$var$continuationConstruct, contentContinue, contentEnd)(code);\n // Data.\n effects.consume(code);\n return chunkInside;\n }\n /**\n *\n *\n * @type {State}\n */ function contentEnd(code) {\n effects.exit(\"chunkContent\");\n effects.exit(\"content\");\n return ok(code);\n }\n /**\n *\n *\n * @type {State}\n */ function contentContinue(code) {\n effects.consume(code);\n effects.exit(\"chunkContent\");\n previous.next = effects.enter(\"chunkContent\", {\n contentType: \"content\",\n previous: previous\n });\n previous = previous.next;\n return chunkInside;\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $deaf16079a1807fb$var$tokenizeContinuation(effects, ok, nok) {\n const self = this;\n return startLookahead;\n /**\n *\n *\n * @type {State}\n */ function startLookahead(code) {\n effects.exit(\"chunkContent\");\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return (0, $8GWoH.factorySpace)(effects, prefixed, \"linePrefix\");\n }\n /**\n *\n *\n * @type {State}\n */ function prefixed(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) return nok(code);\n // Always populated by defaults.\n const tail = self.events[self.events.length - 1];\n if (!self.parser.constructs.disable.null.includes(\"codeIndented\") && tail && tail[1].type === \"linePrefix\" && tail[2].sliceSerialize(tail[1], true).length >= 4) return ok(code);\n return effects.interrupt(self.parser.constructs.flow, nok, ok)(code);\n }\n}\n\n});\nparcelRegister(\"iNbDn\", function(module, exports) {\n\n$parcel$export(module.exports, \"subtokenize\", () => $dae5d3de466990fa$export$12949d1dd00fddf4);\n/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Token} Token\n */ \nvar $Ux2lp = parcelRequire(\"Ux2lp\");\nfunction $dae5d3de466990fa$export$12949d1dd00fddf4(events) {\n /** @type {Record} */ const jumps = {};\n let index = -1;\n /** @type {Event} */ let event;\n /** @type {number | undefined} */ let lineIndex;\n /** @type {number} */ let otherIndex;\n /** @type {Event} */ let otherEvent;\n /** @type {Array} */ let parameters;\n /** @type {Array} */ let subevents;\n /** @type {boolean | undefined} */ let more;\n while(++index < events.length){\n while(index in jumps)index = jumps[index];\n event = events[index];\n // Add a hook for the GFM tasklist extension, which needs to know if text\n // is in the first content of a list item.\n if (index && event[1].type === \"chunkFlow\" && events[index - 1][1].type === \"listItemPrefix\") {\n subevents = event[1]._tokenizer.events;\n otherIndex = 0;\n if (otherIndex < subevents.length && subevents[otherIndex][1].type === \"lineEndingBlank\") otherIndex += 2;\n if (otherIndex < subevents.length && subevents[otherIndex][1].type === \"content\") while(++otherIndex < subevents.length){\n if (subevents[otherIndex][1].type === \"content\") break;\n if (subevents[otherIndex][1].type === \"chunkText\") {\n subevents[otherIndex][1]._isInFirstContentOfListItem = true;\n otherIndex++;\n }\n }\n }\n // Enter.\n if (event[0] === \"enter\") {\n if (event[1].contentType) {\n Object.assign(jumps, $dae5d3de466990fa$var$subcontent(events, index));\n index = jumps[index];\n more = true;\n }\n } else if (event[1]._container) {\n otherIndex = index;\n lineIndex = undefined;\n while(otherIndex--){\n otherEvent = events[otherIndex];\n if (otherEvent[1].type === \"lineEnding\" || otherEvent[1].type === \"lineEndingBlank\") {\n if (otherEvent[0] === \"enter\") {\n if (lineIndex) events[lineIndex][1].type = \"lineEndingBlank\";\n otherEvent[1].type = \"lineEnding\";\n lineIndex = otherIndex;\n }\n } else break;\n }\n if (lineIndex) {\n // Fix position.\n event[1].end = Object.assign({}, events[lineIndex][1].start);\n // Switch container exit w/ line endings.\n parameters = events.slice(lineIndex, index);\n parameters.unshift(event);\n (0, $Ux2lp.splice)(events, lineIndex, index - lineIndex + 1, parameters);\n }\n }\n }\n return !more;\n}\n/**\n * Tokenize embedded tokens.\n *\n * @param {Array} events\n * @param {number} eventIndex\n * @returns {Record}\n */ function $dae5d3de466990fa$var$subcontent(events, eventIndex) {\n const token = events[eventIndex][1];\n const context = events[eventIndex][2];\n let startPosition = eventIndex - 1;\n /** @type {Array} */ const startPositions = [];\n const tokenizer = token._tokenizer || context.parser[token.contentType](token.start);\n const childEvents = tokenizer.events;\n /** @type {Array<[number, number]>} */ const jumps = [];\n /** @type {Record} */ const gaps = {};\n /** @type {Array} */ let stream;\n /** @type {Token | undefined} */ let previous;\n let index = -1;\n /** @type {Token | undefined} */ let current = token;\n let adjust = 0;\n let start = 0;\n const breaks = [\n start\n ];\n // Loop forward through the linked tokens to pass them in order to the\n // subtokenizer.\n while(current){\n // Find the position of the event for this token.\n while(events[++startPosition][1] !== current);\n startPositions.push(startPosition);\n if (!current._tokenizer) {\n stream = context.sliceStream(current);\n if (!current.next) stream.push(null);\n if (previous) tokenizer.defineSkip(current.start);\n if (current._isInFirstContentOfListItem) tokenizer._gfmTasklistFirstContentOfListItem = true;\n tokenizer.write(stream);\n if (current._isInFirstContentOfListItem) tokenizer._gfmTasklistFirstContentOfListItem = undefined;\n }\n // Unravel the next token.\n previous = current;\n current = current.next;\n }\n // Now, loop back through all events (and linked tokens), to figure out which\n // parts belong where.\n current = token;\n while(++index < childEvents.length)if (// Find a void token that includes a break.\n childEvents[index][0] === \"exit\" && childEvents[index - 1][0] === \"enter\" && childEvents[index][1].type === childEvents[index - 1][1].type && childEvents[index][1].start.line !== childEvents[index][1].end.line) {\n start = index + 1;\n breaks.push(start);\n // Help GC.\n current._tokenizer = undefined;\n current.previous = undefined;\n current = current.next;\n }\n // Help GC.\n tokenizer.events = [];\n // If there’s one more token (which is the cases for lines that end in an\n // EOF), that’s perfect: the last point we found starts it.\n // If there isn’t then make sure any remaining content is added to it.\n if (current) {\n // Help GC.\n current._tokenizer = undefined;\n current.previous = undefined;\n } else breaks.pop();\n // Now splice the events from the subtokenizer into the current events,\n // moving back to front so that splice indices aren’t affected.\n index = breaks.length;\n while(index--){\n const slice = childEvents.slice(breaks[index], breaks[index + 1]);\n const start = startPositions.pop();\n jumps.unshift([\n start,\n start + slice.length - 1\n ]);\n (0, $Ux2lp.splice)(events, start, 2, slice);\n }\n index = -1;\n while(++index < jumps.length){\n gaps[adjust + jumps[index][0]] = adjust + jumps[index][1];\n adjust += jumps[index][1] - jumps[index][0] - 1;\n }\n return gaps;\n}\n\n});\n\n\n\nparcelRegister(\"2JBld\", function(module, exports) {\n\n$parcel$export(module.exports, \"resolver\", () => $1fdcbfee479a4092$export$50397835cbfdbc24);\n$parcel$export(module.exports, \"string\", () => $1fdcbfee479a4092$export$22b082955e083ec3);\n$parcel$export(module.exports, \"text\", () => $1fdcbfee479a4092$export$6f093cfa640b7166);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */ const $1fdcbfee479a4092$export$50397835cbfdbc24 = {\n resolveAll: $1fdcbfee479a4092$var$createResolver()\n};\nconst $1fdcbfee479a4092$export$22b082955e083ec3 = $1fdcbfee479a4092$var$initializeFactory(\"string\");\nconst $1fdcbfee479a4092$export$6f093cfa640b7166 = $1fdcbfee479a4092$var$initializeFactory(\"text\");\n/**\n * @param {'string' | 'text'} field\n * @returns {InitialConstruct}\n */ function $1fdcbfee479a4092$var$initializeFactory(field) {\n return {\n tokenize: initializeText,\n resolveAll: $1fdcbfee479a4092$var$createResolver(field === \"text\" ? $1fdcbfee479a4092$var$resolveAllLineSuffixes : undefined)\n };\n /**\n * @this {TokenizeContext}\n * @type {Initializer}\n */ function initializeText(effects) {\n const self = this;\n const constructs = this.parser.constructs[field];\n const text = effects.attempt(constructs, start, notText);\n return start;\n /** @type {State} */ function start(code) {\n return atBreak(code) ? text(code) : notText(code);\n }\n /** @type {State} */ function notText(code) {\n if (code === null) {\n effects.consume(code);\n return;\n }\n effects.enter(\"data\");\n effects.consume(code);\n return data;\n }\n /** @type {State} */ function data(code) {\n if (atBreak(code)) {\n effects.exit(\"data\");\n return text(code);\n }\n // Data.\n effects.consume(code);\n return data;\n }\n /**\n * @param {Code} code\n * @returns {boolean}\n */ function atBreak(code) {\n if (code === null) return true;\n const list = constructs[code];\n let index = -1;\n if (list) // Always populated by defaults.\n while(++index < list.length){\n const item = list[index];\n if (!item.previous || item.previous.call(self, self.previous)) return true;\n }\n return false;\n }\n }\n}\n/**\n * @param {Resolver | undefined} [extraResolver]\n * @returns {Resolver}\n */ function $1fdcbfee479a4092$var$createResolver(extraResolver) {\n return resolveAllText;\n /** @type {Resolver} */ function resolveAllText(events, context) {\n let index = -1;\n /** @type {number | undefined} */ let enter;\n // A rather boring computation (to merge adjacent `data` events) which\n // improves mm performance by 29%.\n while(++index <= events.length){\n if (enter === undefined) {\n if (events[index] && events[index][1].type === \"data\") {\n enter = index;\n index++;\n }\n } else if (!events[index] || events[index][1].type !== \"data\") {\n // Don’t do anything if there is one data token.\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end;\n events.splice(enter + 2, index - enter - 2);\n index = enter + 2;\n }\n enter = undefined;\n }\n }\n return extraResolver ? extraResolver(events, context) : events;\n }\n}\n/**\n * A rather ugly set of instructions which again looks at chunks in the input\n * stream.\n * The reason to do this here is that it is *much* faster to parse in reverse.\n * And that we can’t hook into `null` to split the line suffix before an EOF.\n * To do: figure out if we can make this into a clean utility, or even in core.\n * As it will be useful for GFMs literal autolink extension (and maybe even\n * tables?)\n *\n * @type {Resolver}\n */ function $1fdcbfee479a4092$var$resolveAllLineSuffixes(events, context) {\n let eventIndex = 0 // Skip first.\n ;\n while(++eventIndex <= events.length)if ((eventIndex === events.length || events[eventIndex][1].type === \"lineEnding\") && events[eventIndex - 1][1].type === \"data\") {\n const data = events[eventIndex - 1][1];\n const chunks = context.sliceStream(data);\n let index = chunks.length;\n let bufferIndex = -1;\n let size = 0;\n /** @type {boolean | undefined} */ let tabs;\n while(index--){\n const chunk = chunks[index];\n if (typeof chunk === \"string\") {\n bufferIndex = chunk.length;\n while(chunk.charCodeAt(bufferIndex - 1) === 32){\n size++;\n bufferIndex--;\n }\n if (bufferIndex) break;\n bufferIndex = -1;\n } else if (chunk === -2) {\n tabs = true;\n size++;\n } else if (chunk === -1) ;\n else {\n // Replacement character, exit.\n index++;\n break;\n }\n }\n if (size) {\n const token = {\n type: eventIndex === events.length || tabs || size < 2 ? \"lineSuffix\" : \"hardBreakTrailing\",\n start: {\n line: data.end.line,\n column: data.end.column - size,\n offset: data.end.offset - size,\n _index: data.start._index + index,\n _bufferIndex: index ? bufferIndex : data.start._bufferIndex + bufferIndex\n },\n end: Object.assign({}, data.end)\n };\n data.end = Object.assign({}, token.start);\n if (data.start.offset === data.end.offset) Object.assign(data, token);\n else {\n events.splice(eventIndex, 0, [\n \"enter\",\n token,\n context\n ], [\n \"exit\",\n token,\n context\n ]);\n eventIndex += 2;\n }\n }\n eventIndex++;\n }\n return events;\n}\n\n});\n\nparcelRegister(\"6oDSH\", function(module, exports) {\n\n$parcel$export(module.exports, \"createTokenizer\", () => $4a83eb91ec5d6b95$export$ae34f10ee4b29837);\n/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenType} TokenType\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */ /**\n * @callback Restore\n * @returns {void}\n *\n * @typedef Info\n * @property {Restore} restore\n * @property {number} from\n *\n * @callback ReturnHandle\n * Handle a successful run.\n * @param {Construct} construct\n * @param {Info} info\n * @returns {void}\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $Ux2lp = parcelRequire(\"Ux2lp\");\n\nvar $8f02Z = parcelRequire(\"8f02Z\");\nfunction $4a83eb91ec5d6b95$export$ae34f10ee4b29837(parser, initialize, from) {\n /** @type {Point} */ let point = Object.assign(from ? Object.assign({}, from) : {\n line: 1,\n column: 1,\n offset: 0\n }, {\n _index: 0,\n _bufferIndex: -1\n });\n /** @type {Record} */ const columnStart = {};\n /** @type {Array} */ const resolveAllConstructs = [];\n /** @type {Array} */ let chunks = [];\n /** @type {Array} */ let stack = [];\n /** @type {boolean | undefined} */ let consumed = true;\n /**\n * Tools used for tokenizing.\n *\n * @type {Effects}\n */ const effects = {\n consume: consume,\n enter: enter,\n exit: exit,\n attempt: constructFactory(onsuccessfulconstruct),\n check: constructFactory(onsuccessfulcheck),\n interrupt: constructFactory(onsuccessfulcheck, {\n interrupt: true\n })\n };\n /**\n * State and tools for resolving and serializing.\n *\n * @type {TokenizeContext}\n */ const context = {\n previous: null,\n code: null,\n containerState: {},\n events: [],\n parser: parser,\n sliceStream: sliceStream,\n sliceSerialize: sliceSerialize,\n now: now,\n defineSkip: defineSkip,\n write: write\n };\n /**\n * The state function.\n *\n * @type {State | void}\n */ let state = initialize.tokenize.call(context, effects);\n /**\n * Track which character we expect to be consumed, to catch bugs.\n *\n * @type {Code}\n */ let expectedCode;\n if (initialize.resolveAll) resolveAllConstructs.push(initialize);\n return context;\n /** @type {TokenizeContext['write']} */ function write(slice) {\n chunks = (0, $Ux2lp.push)(chunks, slice);\n main();\n // Exit if we’re not done, resolve might change stuff.\n if (chunks[chunks.length - 1] !== null) return [];\n addResult(initialize, 0);\n // Otherwise, resolve, and exit.\n context.events = (0, $8f02Z.resolveAll)(resolveAllConstructs, context.events, context);\n return context.events;\n }\n //\n // Tools.\n //\n /** @type {TokenizeContext['sliceSerialize']} */ function sliceSerialize(token, expandTabs) {\n return $4a83eb91ec5d6b95$var$serializeChunks(sliceStream(token), expandTabs);\n }\n /** @type {TokenizeContext['sliceStream']} */ function sliceStream(token) {\n return $4a83eb91ec5d6b95$var$sliceChunks(chunks, token);\n }\n /** @type {TokenizeContext['now']} */ function now() {\n // This is a hot path, so we clone manually instead of `Object.assign({}, point)`\n const { line: line, column: column, offset: offset, _index: _index, _bufferIndex: _bufferIndex } = point;\n return {\n line: line,\n column: column,\n offset: offset,\n _index: _index,\n _bufferIndex: _bufferIndex\n };\n }\n /** @type {TokenizeContext['defineSkip']} */ function defineSkip(value) {\n columnStart[value.line] = value.column;\n accountForPotentialSkip();\n }\n //\n // State management.\n //\n /**\n * Main loop (note that `_index` and `_bufferIndex` in `point` are modified by\n * `consume`).\n * Here is where we walk through the chunks, which either include strings of\n * several characters, or numerical character codes.\n * The reason to do this in a loop instead of a call is so the stack can\n * drain.\n *\n * @returns {void}\n */ function main() {\n /** @type {number} */ let chunkIndex;\n while(point._index < chunks.length){\n const chunk = chunks[point._index];\n // If we’re in a buffer chunk, loop through it.\n if (typeof chunk === \"string\") {\n chunkIndex = point._index;\n if (point._bufferIndex < 0) point._bufferIndex = 0;\n while(point._index === chunkIndex && point._bufferIndex < chunk.length)go(chunk.charCodeAt(point._bufferIndex));\n } else go(chunk);\n }\n }\n /**\n * Deal with one code.\n *\n * @param {Code} code\n * @returns {void}\n */ function go(code) {\n consumed = undefined;\n expectedCode = code;\n state = state(code);\n }\n /** @type {Effects['consume']} */ function consume(code) {\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n point.line++;\n point.column = 1;\n point.offset += code === -3 ? 2 : 1;\n accountForPotentialSkip();\n } else if (code !== -1) {\n point.column++;\n point.offset++;\n }\n // Not in a string chunk.\n if (point._bufferIndex < 0) point._index++;\n else {\n point._bufferIndex++;\n // At end of string chunk.\n // @ts-expect-error Points w/ non-negative `_bufferIndex` reference\n // strings.\n if (point._bufferIndex === chunks[point._index].length) {\n point._bufferIndex = -1;\n point._index++;\n }\n }\n // Expose the previous character.\n context.previous = code;\n // Mark as consumed.\n consumed = true;\n }\n /** @type {Effects['enter']} */ function enter(type, fields) {\n /** @type {Token} */ // @ts-expect-error Patch instead of assign required fields to help GC.\n const token = fields || {};\n token.type = type;\n token.start = now();\n context.events.push([\n \"enter\",\n token,\n context\n ]);\n stack.push(token);\n return token;\n }\n /** @type {Effects['exit']} */ function exit(type) {\n const token = stack.pop();\n token.end = now();\n context.events.push([\n \"exit\",\n token,\n context\n ]);\n return token;\n }\n /**\n * Use results.\n *\n * @type {ReturnHandle}\n */ function onsuccessfulconstruct(construct, info) {\n addResult(construct, info.from);\n }\n /**\n * Discard results.\n *\n * @type {ReturnHandle}\n */ function onsuccessfulcheck(_, info) {\n info.restore();\n }\n /**\n * Factory to attempt/check/interrupt.\n *\n * @param {ReturnHandle} onreturn\n * @param {{interrupt?: boolean | undefined} | undefined} [fields]\n */ function constructFactory(onreturn, fields) {\n return hook;\n /**\n * Handle either an object mapping codes to constructs, a list of\n * constructs, or a single construct.\n *\n * @param {Array | Construct | ConstructRecord} constructs\n * @param {State} returnState\n * @param {State | undefined} [bogusState]\n * @returns {State}\n */ function hook(constructs, returnState, bogusState) {\n /** @type {Array} */ let listOfConstructs;\n /** @type {number} */ let constructIndex;\n /** @type {Construct} */ let currentConstruct;\n /** @type {Info} */ let info;\n return Array.isArray(constructs) ? handleListOfConstructs(constructs) : \"tokenize\" in constructs ? handleListOfConstructs([\n constructs\n ]) : handleMapOfConstructs(constructs);\n /**\n * Handle a list of construct.\n *\n * @param {ConstructRecord} map\n * @returns {State}\n */ function handleMapOfConstructs(map) {\n return start;\n /** @type {State} */ function start(code) {\n const def = code !== null && map[code];\n const all = code !== null && map.null;\n const list = [\n // To do: add more extension tests.\n /* c8 ignore next 2 */ ...Array.isArray(def) ? def : def ? [\n def\n ] : [],\n ...Array.isArray(all) ? all : all ? [\n all\n ] : []\n ];\n return handleListOfConstructs(list)(code);\n }\n }\n /**\n * Handle a list of construct.\n *\n * @param {Array} list\n * @returns {State}\n */ function handleListOfConstructs(list) {\n listOfConstructs = list;\n constructIndex = 0;\n if (list.length === 0) return bogusState;\n return handleConstruct(list[constructIndex]);\n }\n /**\n * Handle a single construct.\n *\n * @param {Construct} construct\n * @returns {State}\n */ function handleConstruct(construct) {\n return start;\n /** @type {State} */ function start(code) {\n // To do: not needed to store if there is no bogus state, probably?\n // Currently doesn’t work because `inspect` in document does a check\n // w/o a bogus, which doesn’t make sense. But it does seem to help perf\n // by not storing.\n info = store();\n currentConstruct = construct;\n if (!construct.partial) context.currentConstruct = construct;\n // Always populated by defaults.\n if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) return nok(code);\n return construct.tokenize.call(// If we do have fields, create an object w/ `context` as its\n // prototype.\n // This allows a “live binding”, which is needed for `interrupt`.\n fields ? Object.assign(Object.create(context), fields) : context, effects, ok, nok)(code);\n }\n }\n /** @type {State} */ function ok(code) {\n consumed = true;\n onreturn(currentConstruct, info);\n return returnState;\n }\n /** @type {State} */ function nok(code) {\n consumed = true;\n info.restore();\n if (++constructIndex < listOfConstructs.length) return handleConstruct(listOfConstructs[constructIndex]);\n return bogusState;\n }\n }\n }\n /**\n * @param {Construct} construct\n * @param {number} from\n * @returns {void}\n */ function addResult(construct, from) {\n if (construct.resolveAll && !resolveAllConstructs.includes(construct)) resolveAllConstructs.push(construct);\n if (construct.resolve) (0, $Ux2lp.splice)(context.events, from, context.events.length - from, construct.resolve(context.events.slice(from), context));\n if (construct.resolveTo) context.events = construct.resolveTo(context.events, context);\n }\n /**\n * Store state.\n *\n * @returns {Info}\n */ function store() {\n const startPoint = now();\n const startPrevious = context.previous;\n const startCurrentConstruct = context.currentConstruct;\n const startEventsIndex = context.events.length;\n const startStack = Array.from(stack);\n return {\n restore: restore,\n from: startEventsIndex\n };\n /**\n * Restore state.\n *\n * @returns {void}\n */ function restore() {\n point = startPoint;\n context.previous = startPrevious;\n context.currentConstruct = startCurrentConstruct;\n context.events.length = startEventsIndex;\n stack = startStack;\n accountForPotentialSkip();\n }\n }\n /**\n * Move the current point a bit forward in the line when it’s on a column\n * skip.\n *\n * @returns {void}\n */ function accountForPotentialSkip() {\n if (point.line in columnStart && point.column < 2) {\n point.column = columnStart[point.line];\n point.offset += columnStart[point.line] - 1;\n }\n }\n}\n/**\n * Get the chunks from a slice of chunks in the range of a token.\n *\n * @param {Array} chunks\n * @param {Pick} token\n * @returns {Array}\n */ function $4a83eb91ec5d6b95$var$sliceChunks(chunks, token) {\n const startIndex = token.start._index;\n const startBufferIndex = token.start._bufferIndex;\n const endIndex = token.end._index;\n const endBufferIndex = token.end._bufferIndex;\n /** @type {Array} */ let view;\n if (startIndex === endIndex) // @ts-expect-error `_bufferIndex` is used on string chunks.\n view = [\n chunks[startIndex].slice(startBufferIndex, endBufferIndex)\n ];\n else {\n view = chunks.slice(startIndex, endIndex);\n if (startBufferIndex > -1) {\n const head = view[0];\n if (typeof head === \"string\") view[0] = head.slice(startBufferIndex);\n else view.shift();\n }\n if (endBufferIndex > 0) // @ts-expect-error `_bufferIndex` is used on string chunks.\n view.push(chunks[endIndex].slice(0, endBufferIndex));\n }\n return view;\n}\n/**\n * Get the string value of a slice of chunks.\n *\n * @param {Array} chunks\n * @param {boolean | undefined} [expandTabs=false]\n * @returns {string}\n */ function $4a83eb91ec5d6b95$var$serializeChunks(chunks, expandTabs) {\n let index = -1;\n /** @type {Array} */ const result = [];\n /** @type {boolean | undefined} */ let atTab;\n while(++index < chunks.length){\n const chunk = chunks[index];\n /** @type {string} */ let value;\n if (typeof chunk === \"string\") value = chunk;\n else switch(chunk){\n case -5:\n value = \"\\r\";\n break;\n case -4:\n value = \"\\n\";\n break;\n case -3:\n value = \"\\r\\n\";\n break;\n case -2:\n value = expandTabs ? \" \" : \"\t\";\n break;\n case -1:\n if (!expandTabs && atTab) continue;\n value = \" \";\n break;\n default:\n // Currently only replacement character.\n value = String.fromCharCode(chunk);\n }\n atTab = chunk === -2;\n result.push(value);\n }\n return result.join(\"\");\n}\n\n});\nparcelRegister(\"8f02Z\", function(module, exports) {\n\n$parcel$export(module.exports, \"resolveAll\", () => $5fffc2d235733349$export$3ff61ec196ff408b);\n/**\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */ /**\n * Call all `resolveAll`s.\n *\n * @param {Array<{resolveAll?: Resolver | undefined}>} constructs\n * List of constructs, optionally with `resolveAll`s.\n * @param {Array} events\n * List of events.\n * @param {TokenizeContext} context\n * Context used by `tokenize`.\n * @returns {Array}\n * Changed events.\n */ function $5fffc2d235733349$export$3ff61ec196ff408b(constructs, events, context) {\n /** @type {Array} */ const called = [];\n let index = -1;\n while(++index < constructs.length){\n const resolve = constructs[index].resolveAll;\n if (resolve && !called.includes(resolve)) {\n events = resolve(events, context);\n called.push(resolve);\n }\n }\n return events;\n}\n\n});\n\n\nparcelRegister(\"8tRNW\", function(module, exports) {\n\n$parcel$export(module.exports, \"document\", () => $62cac499df3c25a0$export$5a7bfc01df82fcd1);\n$parcel$export(module.exports, \"contentInitial\", () => $62cac499df3c25a0$export$5a2181fb44b58173);\n$parcel$export(module.exports, \"flowInitial\", () => $62cac499df3c25a0$export$cf8bead395eff824);\n$parcel$export(module.exports, \"flow\", () => $62cac499df3c25a0$export$ccc7b0636abaffc3);\n$parcel$export(module.exports, \"string\", () => $62cac499df3c25a0$export$22b082955e083ec3);\n$parcel$export(module.exports, \"text\", () => $62cac499df3c25a0$export$6f093cfa640b7166);\n$parcel$export(module.exports, \"insideSpan\", () => $62cac499df3c25a0$export$d44f260a3f9b69f5);\n$parcel$export(module.exports, \"attentionMarkers\", () => $62cac499df3c25a0$export$b9c0b60d74426aea);\n$parcel$export(module.exports, \"disable\", () => $62cac499df3c25a0$export$e20fbacbb41798b);\n/**\n * @typedef {import('micromark-util-types').Extension} Extension\n */ \nvar $eIIGZ = parcelRequire(\"eIIGZ\");\nvar $807ia = parcelRequire(\"807ia\");\nvar $acnAX = parcelRequire(\"acnAX\");\nvar $7ITXU = parcelRequire(\"7ITXU\");\nvar $hIELq = parcelRequire(\"hIELq\");\nvar $2qoAc = parcelRequire(\"2qoAc\");\nvar $eHPOy = parcelRequire(\"eHPOy\");\nvar $1XgoW = parcelRequire(\"1XgoW\");\nvar $ix9Ua = parcelRequire(\"ix9Ua\");\nvar $1d2Fb = parcelRequire(\"1d2Fb\");\nvar $e7bPJ = parcelRequire(\"e7bPJ\");\nvar $aMDJK = parcelRequire(\"aMDJK\");\nvar $fAOFU = parcelRequire(\"fAOFU\");\nvar $2yvw6 = parcelRequire(\"2yvw6\");\nvar $l3YSx = parcelRequire(\"l3YSx\");\nvar $ifBjz = parcelRequire(\"ifBjz\");\nvar $9shNq = parcelRequire(\"9shNq\");\nvar $7twjg = parcelRequire(\"7twjg\");\nvar $cfK44 = parcelRequire(\"cfK44\");\nvar $eBJ0T = parcelRequire(\"eBJ0T\");\n\nvar $2JBld = parcelRequire(\"2JBld\");\nconst $62cac499df3c25a0$export$5a7bfc01df82fcd1 = {\n [42]: (0, $7twjg.list),\n [43]: (0, $7twjg.list),\n [45]: (0, $7twjg.list),\n [48]: (0, $7twjg.list),\n [49]: (0, $7twjg.list),\n [50]: (0, $7twjg.list),\n [51]: (0, $7twjg.list),\n [52]: (0, $7twjg.list),\n [53]: (0, $7twjg.list),\n [54]: (0, $7twjg.list),\n [55]: (0, $7twjg.list),\n [56]: (0, $7twjg.list),\n [57]: (0, $7twjg.list),\n [62]: (0, $acnAX.blockQuote)\n};\nconst $62cac499df3c25a0$export$5a2181fb44b58173 = {\n [91]: (0, $ix9Ua.definition)\n};\nconst $62cac499df3c25a0$export$cf8bead395eff824 = {\n [-2]: (0, $eHPOy.codeIndented),\n [-1]: (0, $eHPOy.codeIndented),\n [32]: (0, $eHPOy.codeIndented)\n};\nconst $62cac499df3c25a0$export$ccc7b0636abaffc3 = {\n [35]: (0, $e7bPJ.headingAtx),\n [42]: (0, $eBJ0T.thematicBreak),\n [45]: [\n (0, $cfK44.setextUnderline),\n (0, $eBJ0T.thematicBreak)\n ],\n [60]: (0, $aMDJK.htmlFlow),\n [61]: (0, $cfK44.setextUnderline),\n [95]: (0, $eBJ0T.thematicBreak),\n [96]: (0, $2qoAc.codeFenced),\n [126]: (0, $2qoAc.codeFenced)\n};\nconst $62cac499df3c25a0$export$22b082955e083ec3 = {\n [38]: (0, $hIELq.characterReference),\n [92]: (0, $7ITXU.characterEscape)\n};\nconst $62cac499df3c25a0$export$6f093cfa640b7166 = {\n [-5]: (0, $9shNq.lineEnding),\n [-4]: (0, $9shNq.lineEnding),\n [-3]: (0, $9shNq.lineEnding),\n [33]: (0, $l3YSx.labelStartImage),\n [38]: (0, $hIELq.characterReference),\n [42]: (0, $eIIGZ.attention),\n [60]: [\n (0, $807ia.autolink),\n (0, $fAOFU.htmlText)\n ],\n [91]: (0, $ifBjz.labelStartLink),\n [92]: [\n (0, $1d2Fb.hardBreakEscape),\n (0, $7ITXU.characterEscape)\n ],\n [93]: (0, $2yvw6.labelEnd),\n [95]: (0, $eIIGZ.attention),\n [96]: (0, $1XgoW.codeText)\n};\nconst $62cac499df3c25a0$export$d44f260a3f9b69f5 = {\n null: [\n (0, $eIIGZ.attention),\n (0, $2JBld.resolver)\n ]\n};\nconst $62cac499df3c25a0$export$b9c0b60d74426aea = {\n null: [\n 42,\n 95\n ]\n};\nconst $62cac499df3c25a0$export$e20fbacbb41798b = {\n null: []\n};\n\n});\nparcelRegister(\"eIIGZ\", function(module, exports) {\n\n$parcel$export(module.exports, \"attention\", () => $ab77d7b684ccca7a$export$45b92471da762af7);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $Ux2lp = parcelRequire(\"Ux2lp\");\n\nvar $i5TSH = parcelRequire(\"i5TSH\");\n\nvar $8f02Z = parcelRequire(\"8f02Z\");\nconst $ab77d7b684ccca7a$export$45b92471da762af7 = {\n name: \"attention\",\n tokenize: $ab77d7b684ccca7a$var$tokenizeAttention,\n resolveAll: $ab77d7b684ccca7a$var$resolveAllAttention\n};\n/**\n * Take all events and resolve attention to emphasis or strong.\n *\n * @type {Resolver}\n */ function $ab77d7b684ccca7a$var$resolveAllAttention(events, context) {\n let index = -1;\n /** @type {number} */ let open;\n /** @type {Token} */ let group;\n /** @type {Token} */ let text;\n /** @type {Token} */ let openingSequence;\n /** @type {Token} */ let closingSequence;\n /** @type {number} */ let use;\n /** @type {Array} */ let nextEvents;\n /** @type {number} */ let offset;\n // Walk through all events.\n //\n // Note: performance of this is fine on an mb of normal markdown, but it’s\n // a bottleneck for malicious stuff.\n while(++index < events.length)// Find a token that can close.\n if (events[index][0] === \"enter\" && events[index][1].type === \"attentionSequence\" && events[index][1]._close) {\n open = index;\n // Now walk back to find an opener.\n while(open--)// Find a token that can open the closer.\n if (events[open][0] === \"exit\" && events[open][1].type === \"attentionSequence\" && events[open][1]._open && // If the markers are the same:\n context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index][1]).charCodeAt(0)) {\n // If the opening can close or the closing can open,\n // and the close size *is not* a multiple of three,\n // but the sum of the opening and closing size *is* multiple of three,\n // then don’t match.\n if ((events[open][1]._close || events[index][1]._open) && (events[index][1].end.offset - events[index][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index][1].end.offset - events[index][1].start.offset) % 3)) continue;\n // Number of markers to use from the sequence.\n use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index][1].end.offset - events[index][1].start.offset > 1 ? 2 : 1;\n const start = Object.assign({}, events[open][1].end);\n const end = Object.assign({}, events[index][1].start);\n $ab77d7b684ccca7a$var$movePoint(start, -use);\n $ab77d7b684ccca7a$var$movePoint(end, use);\n openingSequence = {\n type: use > 1 ? \"strongSequence\" : \"emphasisSequence\",\n start: start,\n end: Object.assign({}, events[open][1].end)\n };\n closingSequence = {\n type: use > 1 ? \"strongSequence\" : \"emphasisSequence\",\n start: Object.assign({}, events[index][1].start),\n end: end\n };\n text = {\n type: use > 1 ? \"strongText\" : \"emphasisText\",\n start: Object.assign({}, events[open][1].end),\n end: Object.assign({}, events[index][1].start)\n };\n group = {\n type: use > 1 ? \"strong\" : \"emphasis\",\n start: Object.assign({}, openingSequence.start),\n end: Object.assign({}, closingSequence.end)\n };\n events[open][1].end = Object.assign({}, openingSequence.start);\n events[index][1].start = Object.assign({}, closingSequence.end);\n nextEvents = [];\n // If there are more markers in the opening, add them before.\n if (events[open][1].end.offset - events[open][1].start.offset) nextEvents = (0, $Ux2lp.push)(nextEvents, [\n [\n \"enter\",\n events[open][1],\n context\n ],\n [\n \"exit\",\n events[open][1],\n context\n ]\n ]);\n // Opening.\n nextEvents = (0, $Ux2lp.push)(nextEvents, [\n [\n \"enter\",\n group,\n context\n ],\n [\n \"enter\",\n openingSequence,\n context\n ],\n [\n \"exit\",\n openingSequence,\n context\n ],\n [\n \"enter\",\n text,\n context\n ]\n ]);\n // Always populated by defaults.\n // Between.\n nextEvents = (0, $Ux2lp.push)(nextEvents, (0, $8f02Z.resolveAll)(context.parser.constructs.insideSpan.null, events.slice(open + 1, index), context));\n // Closing.\n nextEvents = (0, $Ux2lp.push)(nextEvents, [\n [\n \"exit\",\n text,\n context\n ],\n [\n \"enter\",\n closingSequence,\n context\n ],\n [\n \"exit\",\n closingSequence,\n context\n ],\n [\n \"exit\",\n group,\n context\n ]\n ]);\n // If there are more markers in the closing, add them after.\n if (events[index][1].end.offset - events[index][1].start.offset) {\n offset = 2;\n nextEvents = (0, $Ux2lp.push)(nextEvents, [\n [\n \"enter\",\n events[index][1],\n context\n ],\n [\n \"exit\",\n events[index][1],\n context\n ]\n ]);\n } else offset = 0;\n (0, $Ux2lp.splice)(events, open - 1, index - open + 3, nextEvents);\n index = open + nextEvents.length - offset - 2;\n break;\n }\n }\n // Remove remaining sequences.\n index = -1;\n while(++index < events.length)if (events[index][1].type === \"attentionSequence\") events[index][1].type = \"data\";\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $ab77d7b684ccca7a$var$tokenizeAttention(effects, ok) {\n const attentionMarkers = this.parser.constructs.attentionMarkers.null;\n const previous = this.previous;\n const before = (0, $i5TSH.classifyCharacter)(previous);\n /** @type {NonNullable} */ let marker;\n return start;\n /**\n * Before a sequence.\n *\n * ```markdown\n * > | **\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n marker = code;\n effects.enter(\"attentionSequence\");\n return inside(code);\n }\n /**\n * In a sequence.\n *\n * ```markdown\n * > | **\n * ^^\n * ```\n *\n * @type {State}\n */ function inside(code) {\n if (code === marker) {\n effects.consume(code);\n return inside;\n }\n const token = effects.exit(\"attentionSequence\");\n // To do: next major: move this to resolver, just like `markdown-rs`.\n const after = (0, $i5TSH.classifyCharacter)(code);\n // Always populated by defaults.\n const open = !after || after === 2 && before || attentionMarkers.includes(code);\n const close = !before || before === 2 && after || attentionMarkers.includes(previous);\n token._open = Boolean(marker === 42 ? open : open && (before || !close));\n token._close = Boolean(marker === 42 ? close : close && (after || !open));\n return ok(code);\n }\n}\n/**\n * Move a point a bit.\n *\n * Note: `move` only works inside lines! It’s not possible to move past other\n * chunks (replacement characters, tabs, or line endings).\n *\n * @param {Point} point\n * @param {number} offset\n * @returns {void}\n */ function $ab77d7b684ccca7a$var$movePoint(point, offset) {\n point.column += offset;\n point.offset += offset;\n point._bufferIndex += offset;\n}\n\n});\nparcelRegister(\"i5TSH\", function(module, exports) {\n\n$parcel$export(module.exports, \"classifyCharacter\", () => $d2c40a75b3c1060a$export$e3902bc0d835cad0);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $d2c40a75b3c1060a$export$e3902bc0d835cad0(code) {\n if (code === null || (0, $5Lprs.markdownLineEndingOrSpace)(code) || (0, $5Lprs.unicodeWhitespace)(code)) return 1;\n if ((0, $5Lprs.unicodePunctuation)(code)) return 2;\n}\n\n});\n\n\nparcelRegister(\"807ia\", function(module, exports) {\n\n$parcel$export(module.exports, \"autolink\", () => $5d33fad6991aa1ad$export$17ddf85e4c916ad6);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $5d33fad6991aa1ad$export$17ddf85e4c916ad6 = {\n name: \"autolink\",\n tokenize: $5d33fad6991aa1ad$var$tokenizeAutolink\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $5d33fad6991aa1ad$var$tokenizeAutolink(effects, ok, nok) {\n let size = 0;\n return start;\n /**\n * Start of an autolink.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"autolink\");\n effects.enter(\"autolinkMarker\");\n effects.consume(code);\n effects.exit(\"autolinkMarker\");\n effects.enter(\"autolinkProtocol\");\n return open;\n }\n /**\n * After `<`, at protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function open(code) {\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return schemeOrEmailAtext;\n }\n return emailAtext(code);\n }\n /**\n * At second byte of protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function schemeOrEmailAtext(code) {\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (code === 43 || code === 45 || code === 46 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n // Count the previous alphabetical from `open` too.\n size = 1;\n return schemeInsideOrEmailAtext(code);\n }\n return emailAtext(code);\n }\n /**\n * In ambiguous protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function schemeInsideOrEmailAtext(code) {\n if (code === 58) {\n effects.consume(code);\n size = 0;\n return urlInside;\n }\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if ((code === 43 || code === 45 || code === 46 || (0, $5Lprs.asciiAlphanumeric)(code)) && size++ < 32) {\n effects.consume(code);\n return schemeInsideOrEmailAtext;\n }\n size = 0;\n return emailAtext(code);\n }\n /**\n * After protocol, in URL.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function urlInside(code) {\n if (code === 62) {\n effects.exit(\"autolinkProtocol\");\n effects.enter(\"autolinkMarker\");\n effects.consume(code);\n effects.exit(\"autolinkMarker\");\n effects.exit(\"autolink\");\n return ok;\n }\n // ASCII control, space, or `<`.\n if (code === null || code === 32 || code === 60 || (0, $5Lprs.asciiControl)(code)) return nok(code);\n effects.consume(code);\n return urlInside;\n }\n /**\n * In email atext.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function emailAtext(code) {\n if (code === 64) {\n effects.consume(code);\n return emailAtSignOrDot;\n }\n if ((0, $5Lprs.asciiAtext)(code)) {\n effects.consume(code);\n return emailAtext;\n }\n return nok(code);\n }\n /**\n * In label, after at-sign or dot.\n *\n * ```markdown\n * > | ab\n * ^ ^\n * ```\n *\n * @type {State}\n */ function emailAtSignOrDot(code) {\n return (0, $5Lprs.asciiAlphanumeric)(code) ? emailLabel(code) : nok(code);\n }\n /**\n * In label, where `.` and `>` are allowed.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function emailLabel(code) {\n if (code === 46) {\n effects.consume(code);\n size = 0;\n return emailAtSignOrDot;\n }\n if (code === 62) {\n // Exit, then change the token type.\n effects.exit(\"autolinkProtocol\").type = \"autolinkEmail\";\n effects.enter(\"autolinkMarker\");\n effects.consume(code);\n effects.exit(\"autolinkMarker\");\n effects.exit(\"autolink\");\n return ok;\n }\n return emailValue(code);\n }\n /**\n * In label, where `.` and `>` are *not* allowed.\n *\n * Though, this is also used in `emailLabel` to parse other values.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */ function emailValue(code) {\n // ASCII alphanumeric or `-`.\n if ((code === 45 || (0, $5Lprs.asciiAlphanumeric)(code)) && size++ < 63) {\n const next = code === 45 ? emailValue : emailLabel;\n effects.consume(code);\n return next;\n }\n return nok(code);\n }\n}\n\n});\n\nparcelRegister(\"acnAX\", function(module, exports) {\n\n$parcel$export(module.exports, \"blockQuote\", () => $76cd53d8c5b717c4$export$200dcd0a5903c968);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $76cd53d8c5b717c4$export$200dcd0a5903c968 = {\n name: \"blockQuote\",\n tokenize: $76cd53d8c5b717c4$var$tokenizeBlockQuoteStart,\n continuation: {\n tokenize: $76cd53d8c5b717c4$var$tokenizeBlockQuoteContinuation\n },\n exit: $76cd53d8c5b717c4$var$exit\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $76cd53d8c5b717c4$var$tokenizeBlockQuoteStart(effects, ok, nok) {\n const self = this;\n return start;\n /**\n * Start of block quote.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n if (code === 62) {\n const state = self.containerState;\n if (!state.open) {\n effects.enter(\"blockQuote\", {\n _container: true\n });\n state.open = true;\n }\n effects.enter(\"blockQuotePrefix\");\n effects.enter(\"blockQuoteMarker\");\n effects.consume(code);\n effects.exit(\"blockQuoteMarker\");\n return after;\n }\n return nok(code);\n }\n /**\n * After `>`, before optional whitespace.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.enter(\"blockQuotePrefixWhitespace\");\n effects.consume(code);\n effects.exit(\"blockQuotePrefixWhitespace\");\n effects.exit(\"blockQuotePrefix\");\n return ok;\n }\n effects.exit(\"blockQuotePrefix\");\n return ok(code);\n }\n}\n/**\n * Start of block quote continuation.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $76cd53d8c5b717c4$var$tokenizeBlockQuoteContinuation(effects, ok, nok) {\n const self = this;\n return contStart;\n /**\n * Start of block quote continuation.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */ function contStart(code) {\n if ((0, $5Lprs.markdownSpace)(code)) // Always populated by defaults.\n return (0, $8GWoH.factorySpace)(effects, contBefore, \"linePrefix\", self.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 4)(code);\n return contBefore(code);\n }\n /**\n * At `>`, after optional whitespace.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */ function contBefore(code) {\n return effects.attempt($76cd53d8c5b717c4$export$200dcd0a5903c968, ok, nok)(code);\n }\n}\n/** @type {Exiter} */ function $76cd53d8c5b717c4$var$exit(effects) {\n effects.exit(\"blockQuote\");\n}\n\n});\n\nparcelRegister(\"7ITXU\", function(module, exports) {\n\n$parcel$export(module.exports, \"characterEscape\", () => $59f80dfce403f5de$export$2005478564e78d96);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $59f80dfce403f5de$export$2005478564e78d96 = {\n name: \"characterEscape\",\n tokenize: $59f80dfce403f5de$var$tokenizeCharacterEscape\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $59f80dfce403f5de$var$tokenizeCharacterEscape(effects, ok, nok) {\n return start;\n /**\n * Start of character escape.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"characterEscape\");\n effects.enter(\"escapeMarker\");\n effects.consume(code);\n effects.exit(\"escapeMarker\");\n return inside;\n }\n /**\n * After `\\`, at punctuation.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */ function inside(code) {\n // ASCII punctuation.\n if ((0, $5Lprs.asciiPunctuation)(code)) {\n effects.enter(\"characterEscapeValue\");\n effects.consume(code);\n effects.exit(\"characterEscapeValue\");\n effects.exit(\"characterEscape\");\n return ok;\n }\n return nok(code);\n }\n}\n\n});\n\nparcelRegister(\"hIELq\", function(module, exports) {\n\n$parcel$export(module.exports, \"characterReference\", () => $ce662a9402db28e3$export$e31905600aaf3d8e);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $gxs7E = parcelRequire(\"gxs7E\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $ce662a9402db28e3$export$e31905600aaf3d8e = {\n name: \"characterReference\",\n tokenize: $ce662a9402db28e3$var$tokenizeCharacterReference\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $ce662a9402db28e3$var$tokenizeCharacterReference(effects, ok, nok) {\n const self = this;\n let size = 0;\n /** @type {number} */ let max;\n /** @type {(code: Code) => boolean} */ let test;\n return start;\n /**\n * Start of character reference.\n *\n * ```markdown\n * > | a&b\n * ^\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"characterReference\");\n effects.enter(\"characterReferenceMarker\");\n effects.consume(code);\n effects.exit(\"characterReferenceMarker\");\n return open;\n }\n /**\n * After `&`, at `#` for numeric references or alphanumeric for named\n * references.\n *\n * ```markdown\n * > | a&b\n * ^\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */ function open(code) {\n if (code === 35) {\n effects.enter(\"characterReferenceMarkerNumeric\");\n effects.consume(code);\n effects.exit(\"characterReferenceMarkerNumeric\");\n return numeric;\n }\n effects.enter(\"characterReferenceValue\");\n max = 31;\n test = (0, $5Lprs.asciiAlphanumeric);\n return value(code);\n }\n /**\n * After `#`, at `x` for hexadecimals or digit for decimals.\n *\n * ```markdown\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */ function numeric(code) {\n if (code === 88 || code === 120) {\n effects.enter(\"characterReferenceMarkerHexadecimal\");\n effects.consume(code);\n effects.exit(\"characterReferenceMarkerHexadecimal\");\n effects.enter(\"characterReferenceValue\");\n max = 6;\n test = (0, $5Lprs.asciiHexDigit);\n return value;\n }\n effects.enter(\"characterReferenceValue\");\n max = 7;\n test = (0, $5Lprs.asciiDigit);\n return value(code);\n }\n /**\n * After markers (``, ``, or `&`), in value, before `;`.\n *\n * The character reference kind defines what and how many characters are\n * allowed.\n *\n * ```markdown\n * > | a&b\n * ^^^\n * > | a{b\n * ^^^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */ function value(code) {\n if (code === 59 && size) {\n const token = effects.exit(\"characterReferenceValue\");\n if (test === (0, $5Lprs.asciiAlphanumeric) && !(0, $gxs7E.decodeNamedCharacterReference)(self.sliceSerialize(token))) return nok(code);\n // To do: `markdown-rs` uses a different name:\n // `CharacterReferenceMarkerSemi`.\n effects.enter(\"characterReferenceMarker\");\n effects.consume(code);\n effects.exit(\"characterReferenceMarker\");\n effects.exit(\"characterReference\");\n return ok;\n }\n if (test(code) && size++ < max) {\n effects.consume(code);\n return value;\n }\n return nok(code);\n }\n}\n\n});\nparcelRegister(\"gxs7E\", function(module, exports) {\n\n$parcel$export(module.exports, \"decodeNamedCharacterReference\", () => $c0a58af43f6129ac$export$289b6a6320f709b4);\n\nvar $kmPXE = parcelRequire(\"kmPXE\");\nconst $c0a58af43f6129ac$var$own = {}.hasOwnProperty;\nfunction $c0a58af43f6129ac$export$289b6a6320f709b4(value) {\n return $c0a58af43f6129ac$var$own.call((0, $kmPXE.characterEntities), value) ? (0, $kmPXE.characterEntities)[value] : false;\n}\n\n});\nparcelRegister(\"kmPXE\", function(module, exports) {\n\n$parcel$export(module.exports, \"characterEntities\", () => $ed3e2b2347fac27f$export$ec810d1aafce79a7);\n/**\n * Map of named character references.\n *\n * @type {Record}\n */ const $ed3e2b2347fac27f$export$ec810d1aafce79a7 = {\n AElig: \"\\xc6\",\n AMP: \"&\",\n Aacute: \"\\xc1\",\n Abreve: \"\\u0102\",\n Acirc: \"\\xc2\",\n Acy: \"\\u0410\",\n Afr: \"\\uD835\\uDD04\",\n Agrave: \"\\xc0\",\n Alpha: \"\\u0391\",\n Amacr: \"\\u0100\",\n And: \"\\u2A53\",\n Aogon: \"\\u0104\",\n Aopf: \"\\uD835\\uDD38\",\n ApplyFunction: \"\\u2061\",\n Aring: \"\\xc5\",\n Ascr: \"\\uD835\\uDC9C\",\n Assign: \"\\u2254\",\n Atilde: \"\\xc3\",\n Auml: \"\\xc4\",\n Backslash: \"\\u2216\",\n Barv: \"\\u2AE7\",\n Barwed: \"\\u2306\",\n Bcy: \"\\u0411\",\n Because: \"\\u2235\",\n Bernoullis: \"\\u212C\",\n Beta: \"\\u0392\",\n Bfr: \"\\uD835\\uDD05\",\n Bopf: \"\\uD835\\uDD39\",\n Breve: \"\\u02D8\",\n Bscr: \"\\u212C\",\n Bumpeq: \"\\u224E\",\n CHcy: \"\\u0427\",\n COPY: \"\\xa9\",\n Cacute: \"\\u0106\",\n Cap: \"\\u22D2\",\n CapitalDifferentialD: \"\\u2145\",\n Cayleys: \"\\u212D\",\n Ccaron: \"\\u010C\",\n Ccedil: \"\\xc7\",\n Ccirc: \"\\u0108\",\n Cconint: \"\\u2230\",\n Cdot: \"\\u010A\",\n Cedilla: \"\\xb8\",\n CenterDot: \"\\xb7\",\n Cfr: \"\\u212D\",\n Chi: \"\\u03A7\",\n CircleDot: \"\\u2299\",\n CircleMinus: \"\\u2296\",\n CirclePlus: \"\\u2295\",\n CircleTimes: \"\\u2297\",\n ClockwiseContourIntegral: \"\\u2232\",\n CloseCurlyDoubleQuote: \"\\u201D\",\n CloseCurlyQuote: \"\\u2019\",\n Colon: \"\\u2237\",\n Colone: \"\\u2A74\",\n Congruent: \"\\u2261\",\n Conint: \"\\u222F\",\n ContourIntegral: \"\\u222E\",\n Copf: \"\\u2102\",\n Coproduct: \"\\u2210\",\n CounterClockwiseContourIntegral: \"\\u2233\",\n Cross: \"\\u2A2F\",\n Cscr: \"\\uD835\\uDC9E\",\n Cup: \"\\u22D3\",\n CupCap: \"\\u224D\",\n DD: \"\\u2145\",\n DDotrahd: \"\\u2911\",\n DJcy: \"\\u0402\",\n DScy: \"\\u0405\",\n DZcy: \"\\u040F\",\n Dagger: \"\\u2021\",\n Darr: \"\\u21A1\",\n Dashv: \"\\u2AE4\",\n Dcaron: \"\\u010E\",\n Dcy: \"\\u0414\",\n Del: \"\\u2207\",\n Delta: \"\\u0394\",\n Dfr: \"\\uD835\\uDD07\",\n DiacriticalAcute: \"\\xb4\",\n DiacriticalDot: \"\\u02D9\",\n DiacriticalDoubleAcute: \"\\u02DD\",\n DiacriticalGrave: \"`\",\n DiacriticalTilde: \"\\u02DC\",\n Diamond: \"\\u22C4\",\n DifferentialD: \"\\u2146\",\n Dopf: \"\\uD835\\uDD3B\",\n Dot: \"\\xa8\",\n DotDot: \"\\u20DC\",\n DotEqual: \"\\u2250\",\n DoubleContourIntegral: \"\\u222F\",\n DoubleDot: \"\\xa8\",\n DoubleDownArrow: \"\\u21D3\",\n DoubleLeftArrow: \"\\u21D0\",\n DoubleLeftRightArrow: \"\\u21D4\",\n DoubleLeftTee: \"\\u2AE4\",\n DoubleLongLeftArrow: \"\\u27F8\",\n DoubleLongLeftRightArrow: \"\\u27FA\",\n DoubleLongRightArrow: \"\\u27F9\",\n DoubleRightArrow: \"\\u21D2\",\n DoubleRightTee: \"\\u22A8\",\n DoubleUpArrow: \"\\u21D1\",\n DoubleUpDownArrow: \"\\u21D5\",\n DoubleVerticalBar: \"\\u2225\",\n DownArrow: \"\\u2193\",\n DownArrowBar: \"\\u2913\",\n DownArrowUpArrow: \"\\u21F5\",\n DownBreve: \"\\u0311\",\n DownLeftRightVector: \"\\u2950\",\n DownLeftTeeVector: \"\\u295E\",\n DownLeftVector: \"\\u21BD\",\n DownLeftVectorBar: \"\\u2956\",\n DownRightTeeVector: \"\\u295F\",\n DownRightVector: \"\\u21C1\",\n DownRightVectorBar: \"\\u2957\",\n DownTee: \"\\u22A4\",\n DownTeeArrow: \"\\u21A7\",\n Downarrow: \"\\u21D3\",\n Dscr: \"\\uD835\\uDC9F\",\n Dstrok: \"\\u0110\",\n ENG: \"\\u014A\",\n ETH: \"\\xd0\",\n Eacute: \"\\xc9\",\n Ecaron: \"\\u011A\",\n Ecirc: \"\\xca\",\n Ecy: \"\\u042D\",\n Edot: \"\\u0116\",\n Efr: \"\\uD835\\uDD08\",\n Egrave: \"\\xc8\",\n Element: \"\\u2208\",\n Emacr: \"\\u0112\",\n EmptySmallSquare: \"\\u25FB\",\n EmptyVerySmallSquare: \"\\u25AB\",\n Eogon: \"\\u0118\",\n Eopf: \"\\uD835\\uDD3C\",\n Epsilon: \"\\u0395\",\n Equal: \"\\u2A75\",\n EqualTilde: \"\\u2242\",\n Equilibrium: \"\\u21CC\",\n Escr: \"\\u2130\",\n Esim: \"\\u2A73\",\n Eta: \"\\u0397\",\n Euml: \"\\xcb\",\n Exists: \"\\u2203\",\n ExponentialE: \"\\u2147\",\n Fcy: \"\\u0424\",\n Ffr: \"\\uD835\\uDD09\",\n FilledSmallSquare: \"\\u25FC\",\n FilledVerySmallSquare: \"\\u25AA\",\n Fopf: \"\\uD835\\uDD3D\",\n ForAll: \"\\u2200\",\n Fouriertrf: \"\\u2131\",\n Fscr: \"\\u2131\",\n GJcy: \"\\u0403\",\n GT: \">\",\n Gamma: \"\\u0393\",\n Gammad: \"\\u03DC\",\n Gbreve: \"\\u011E\",\n Gcedil: \"\\u0122\",\n Gcirc: \"\\u011C\",\n Gcy: \"\\u0413\",\n Gdot: \"\\u0120\",\n Gfr: \"\\uD835\\uDD0A\",\n Gg: \"\\u22D9\",\n Gopf: \"\\uD835\\uDD3E\",\n GreaterEqual: \"\\u2265\",\n GreaterEqualLess: \"\\u22DB\",\n GreaterFullEqual: \"\\u2267\",\n GreaterGreater: \"\\u2AA2\",\n GreaterLess: \"\\u2277\",\n GreaterSlantEqual: \"\\u2A7E\",\n GreaterTilde: \"\\u2273\",\n Gscr: \"\\uD835\\uDCA2\",\n Gt: \"\\u226B\",\n HARDcy: \"\\u042A\",\n Hacek: \"\\u02C7\",\n Hat: \"^\",\n Hcirc: \"\\u0124\",\n Hfr: \"\\u210C\",\n HilbertSpace: \"\\u210B\",\n Hopf: \"\\u210D\",\n HorizontalLine: \"\\u2500\",\n Hscr: \"\\u210B\",\n Hstrok: \"\\u0126\",\n HumpDownHump: \"\\u224E\",\n HumpEqual: \"\\u224F\",\n IEcy: \"\\u0415\",\n IJlig: \"\\u0132\",\n IOcy: \"\\u0401\",\n Iacute: \"\\xcd\",\n Icirc: \"\\xce\",\n Icy: \"\\u0418\",\n Idot: \"\\u0130\",\n Ifr: \"\\u2111\",\n Igrave: \"\\xcc\",\n Im: \"\\u2111\",\n Imacr: \"\\u012A\",\n ImaginaryI: \"\\u2148\",\n Implies: \"\\u21D2\",\n Int: \"\\u222C\",\n Integral: \"\\u222B\",\n Intersection: \"\\u22C2\",\n InvisibleComma: \"\\u2063\",\n InvisibleTimes: \"\\u2062\",\n Iogon: \"\\u012E\",\n Iopf: \"\\uD835\\uDD40\",\n Iota: \"\\u0399\",\n Iscr: \"\\u2110\",\n Itilde: \"\\u0128\",\n Iukcy: \"\\u0406\",\n Iuml: \"\\xcf\",\n Jcirc: \"\\u0134\",\n Jcy: \"\\u0419\",\n Jfr: \"\\uD835\\uDD0D\",\n Jopf: \"\\uD835\\uDD41\",\n Jscr: \"\\uD835\\uDCA5\",\n Jsercy: \"\\u0408\",\n Jukcy: \"\\u0404\",\n KHcy: \"\\u0425\",\n KJcy: \"\\u040C\",\n Kappa: \"\\u039A\",\n Kcedil: \"\\u0136\",\n Kcy: \"\\u041A\",\n Kfr: \"\\uD835\\uDD0E\",\n Kopf: \"\\uD835\\uDD42\",\n Kscr: \"\\uD835\\uDCA6\",\n LJcy: \"\\u0409\",\n LT: \"<\",\n Lacute: \"\\u0139\",\n Lambda: \"\\u039B\",\n Lang: \"\\u27EA\",\n Laplacetrf: \"\\u2112\",\n Larr: \"\\u219E\",\n Lcaron: \"\\u013D\",\n Lcedil: \"\\u013B\",\n Lcy: \"\\u041B\",\n LeftAngleBracket: \"\\u27E8\",\n LeftArrow: \"\\u2190\",\n LeftArrowBar: \"\\u21E4\",\n LeftArrowRightArrow: \"\\u21C6\",\n LeftCeiling: \"\\u2308\",\n LeftDoubleBracket: \"\\u27E6\",\n LeftDownTeeVector: \"\\u2961\",\n LeftDownVector: \"\\u21C3\",\n LeftDownVectorBar: \"\\u2959\",\n LeftFloor: \"\\u230A\",\n LeftRightArrow: \"\\u2194\",\n LeftRightVector: \"\\u294E\",\n LeftTee: \"\\u22A3\",\n LeftTeeArrow: \"\\u21A4\",\n LeftTeeVector: \"\\u295A\",\n LeftTriangle: \"\\u22B2\",\n LeftTriangleBar: \"\\u29CF\",\n LeftTriangleEqual: \"\\u22B4\",\n LeftUpDownVector: \"\\u2951\",\n LeftUpTeeVector: \"\\u2960\",\n LeftUpVector: \"\\u21BF\",\n LeftUpVectorBar: \"\\u2958\",\n LeftVector: \"\\u21BC\",\n LeftVectorBar: \"\\u2952\",\n Leftarrow: \"\\u21D0\",\n Leftrightarrow: \"\\u21D4\",\n LessEqualGreater: \"\\u22DA\",\n LessFullEqual: \"\\u2266\",\n LessGreater: \"\\u2276\",\n LessLess: \"\\u2AA1\",\n LessSlantEqual: \"\\u2A7D\",\n LessTilde: \"\\u2272\",\n Lfr: \"\\uD835\\uDD0F\",\n Ll: \"\\u22D8\",\n Lleftarrow: \"\\u21DA\",\n Lmidot: \"\\u013F\",\n LongLeftArrow: \"\\u27F5\",\n LongLeftRightArrow: \"\\u27F7\",\n LongRightArrow: \"\\u27F6\",\n Longleftarrow: \"\\u27F8\",\n Longleftrightarrow: \"\\u27FA\",\n Longrightarrow: \"\\u27F9\",\n Lopf: \"\\uD835\\uDD43\",\n LowerLeftArrow: \"\\u2199\",\n LowerRightArrow: \"\\u2198\",\n Lscr: \"\\u2112\",\n Lsh: \"\\u21B0\",\n Lstrok: \"\\u0141\",\n Lt: \"\\u226A\",\n Map: \"\\u2905\",\n Mcy: \"\\u041C\",\n MediumSpace: \"\\u205F\",\n Mellintrf: \"\\u2133\",\n Mfr: \"\\uD835\\uDD10\",\n MinusPlus: \"\\u2213\",\n Mopf: \"\\uD835\\uDD44\",\n Mscr: \"\\u2133\",\n Mu: \"\\u039C\",\n NJcy: \"\\u040A\",\n Nacute: \"\\u0143\",\n Ncaron: \"\\u0147\",\n Ncedil: \"\\u0145\",\n Ncy: \"\\u041D\",\n NegativeMediumSpace: \"\\u200B\",\n NegativeThickSpace: \"\\u200B\",\n NegativeThinSpace: \"\\u200B\",\n NegativeVeryThinSpace: \"\\u200B\",\n NestedGreaterGreater: \"\\u226B\",\n NestedLessLess: \"\\u226A\",\n NewLine: \"\\n\",\n Nfr: \"\\uD835\\uDD11\",\n NoBreak: \"\\u2060\",\n NonBreakingSpace: \"\\xa0\",\n Nopf: \"\\u2115\",\n Not: \"\\u2AEC\",\n NotCongruent: \"\\u2262\",\n NotCupCap: \"\\u226D\",\n NotDoubleVerticalBar: \"\\u2226\",\n NotElement: \"\\u2209\",\n NotEqual: \"\\u2260\",\n NotEqualTilde: \"\\u2242\\u0338\",\n NotExists: \"\\u2204\",\n NotGreater: \"\\u226F\",\n NotGreaterEqual: \"\\u2271\",\n NotGreaterFullEqual: \"\\u2267\\u0338\",\n NotGreaterGreater: \"\\u226B\\u0338\",\n NotGreaterLess: \"\\u2279\",\n NotGreaterSlantEqual: \"\\u2A7E\\u0338\",\n NotGreaterTilde: \"\\u2275\",\n NotHumpDownHump: \"\\u224E\\u0338\",\n NotHumpEqual: \"\\u224F\\u0338\",\n NotLeftTriangle: \"\\u22EA\",\n NotLeftTriangleBar: \"\\u29CF\\u0338\",\n NotLeftTriangleEqual: \"\\u22EC\",\n NotLess: \"\\u226E\",\n NotLessEqual: \"\\u2270\",\n NotLessGreater: \"\\u2278\",\n NotLessLess: \"\\u226A\\u0338\",\n NotLessSlantEqual: \"\\u2A7D\\u0338\",\n NotLessTilde: \"\\u2274\",\n NotNestedGreaterGreater: \"\\u2AA2\\u0338\",\n NotNestedLessLess: \"\\u2AA1\\u0338\",\n NotPrecedes: \"\\u2280\",\n NotPrecedesEqual: \"\\u2AAF\\u0338\",\n NotPrecedesSlantEqual: \"\\u22E0\",\n NotReverseElement: \"\\u220C\",\n NotRightTriangle: \"\\u22EB\",\n NotRightTriangleBar: \"\\u29D0\\u0338\",\n NotRightTriangleEqual: \"\\u22ED\",\n NotSquareSubset: \"\\u228F\\u0338\",\n NotSquareSubsetEqual: \"\\u22E2\",\n NotSquareSuperset: \"\\u2290\\u0338\",\n NotSquareSupersetEqual: \"\\u22E3\",\n NotSubset: \"\\u2282\\u20D2\",\n NotSubsetEqual: \"\\u2288\",\n NotSucceeds: \"\\u2281\",\n NotSucceedsEqual: \"\\u2AB0\\u0338\",\n NotSucceedsSlantEqual: \"\\u22E1\",\n NotSucceedsTilde: \"\\u227F\\u0338\",\n NotSuperset: \"\\u2283\\u20D2\",\n NotSupersetEqual: \"\\u2289\",\n NotTilde: \"\\u2241\",\n NotTildeEqual: \"\\u2244\",\n NotTildeFullEqual: \"\\u2247\",\n NotTildeTilde: \"\\u2249\",\n NotVerticalBar: \"\\u2224\",\n Nscr: \"\\uD835\\uDCA9\",\n Ntilde: \"\\xd1\",\n Nu: \"\\u039D\",\n OElig: \"\\u0152\",\n Oacute: \"\\xd3\",\n Ocirc: \"\\xd4\",\n Ocy: \"\\u041E\",\n Odblac: \"\\u0150\",\n Ofr: \"\\uD835\\uDD12\",\n Ograve: \"\\xd2\",\n Omacr: \"\\u014C\",\n Omega: \"\\u03A9\",\n Omicron: \"\\u039F\",\n Oopf: \"\\uD835\\uDD46\",\n OpenCurlyDoubleQuote: \"\\u201C\",\n OpenCurlyQuote: \"\\u2018\",\n Or: \"\\u2A54\",\n Oscr: \"\\uD835\\uDCAA\",\n Oslash: \"\\xd8\",\n Otilde: \"\\xd5\",\n Otimes: \"\\u2A37\",\n Ouml: \"\\xd6\",\n OverBar: \"\\u203E\",\n OverBrace: \"\\u23DE\",\n OverBracket: \"\\u23B4\",\n OverParenthesis: \"\\u23DC\",\n PartialD: \"\\u2202\",\n Pcy: \"\\u041F\",\n Pfr: \"\\uD835\\uDD13\",\n Phi: \"\\u03A6\",\n Pi: \"\\u03A0\",\n PlusMinus: \"\\xb1\",\n Poincareplane: \"\\u210C\",\n Popf: \"\\u2119\",\n Pr: \"\\u2ABB\",\n Precedes: \"\\u227A\",\n PrecedesEqual: \"\\u2AAF\",\n PrecedesSlantEqual: \"\\u227C\",\n PrecedesTilde: \"\\u227E\",\n Prime: \"\\u2033\",\n Product: \"\\u220F\",\n Proportion: \"\\u2237\",\n Proportional: \"\\u221D\",\n Pscr: \"\\uD835\\uDCAB\",\n Psi: \"\\u03A8\",\n QUOT: '\"',\n Qfr: \"\\uD835\\uDD14\",\n Qopf: \"\\u211A\",\n Qscr: \"\\uD835\\uDCAC\",\n RBarr: \"\\u2910\",\n REG: \"\\xae\",\n Racute: \"\\u0154\",\n Rang: \"\\u27EB\",\n Rarr: \"\\u21A0\",\n Rarrtl: \"\\u2916\",\n Rcaron: \"\\u0158\",\n Rcedil: \"\\u0156\",\n Rcy: \"\\u0420\",\n Re: \"\\u211C\",\n ReverseElement: \"\\u220B\",\n ReverseEquilibrium: \"\\u21CB\",\n ReverseUpEquilibrium: \"\\u296F\",\n Rfr: \"\\u211C\",\n Rho: \"\\u03A1\",\n RightAngleBracket: \"\\u27E9\",\n RightArrow: \"\\u2192\",\n RightArrowBar: \"\\u21E5\",\n RightArrowLeftArrow: \"\\u21C4\",\n RightCeiling: \"\\u2309\",\n RightDoubleBracket: \"\\u27E7\",\n RightDownTeeVector: \"\\u295D\",\n RightDownVector: \"\\u21C2\",\n RightDownVectorBar: \"\\u2955\",\n RightFloor: \"\\u230B\",\n RightTee: \"\\u22A2\",\n RightTeeArrow: \"\\u21A6\",\n RightTeeVector: \"\\u295B\",\n RightTriangle: \"\\u22B3\",\n RightTriangleBar: \"\\u29D0\",\n RightTriangleEqual: \"\\u22B5\",\n RightUpDownVector: \"\\u294F\",\n RightUpTeeVector: \"\\u295C\",\n RightUpVector: \"\\u21BE\",\n RightUpVectorBar: \"\\u2954\",\n RightVector: \"\\u21C0\",\n RightVectorBar: \"\\u2953\",\n Rightarrow: \"\\u21D2\",\n Ropf: \"\\u211D\",\n RoundImplies: \"\\u2970\",\n Rrightarrow: \"\\u21DB\",\n Rscr: \"\\u211B\",\n Rsh: \"\\u21B1\",\n RuleDelayed: \"\\u29F4\",\n SHCHcy: \"\\u0429\",\n SHcy: \"\\u0428\",\n SOFTcy: \"\\u042C\",\n Sacute: \"\\u015A\",\n Sc: \"\\u2ABC\",\n Scaron: \"\\u0160\",\n Scedil: \"\\u015E\",\n Scirc: \"\\u015C\",\n Scy: \"\\u0421\",\n Sfr: \"\\uD835\\uDD16\",\n ShortDownArrow: \"\\u2193\",\n ShortLeftArrow: \"\\u2190\",\n ShortRightArrow: \"\\u2192\",\n ShortUpArrow: \"\\u2191\",\n Sigma: \"\\u03A3\",\n SmallCircle: \"\\u2218\",\n Sopf: \"\\uD835\\uDD4A\",\n Sqrt: \"\\u221A\",\n Square: \"\\u25A1\",\n SquareIntersection: \"\\u2293\",\n SquareSubset: \"\\u228F\",\n SquareSubsetEqual: \"\\u2291\",\n SquareSuperset: \"\\u2290\",\n SquareSupersetEqual: \"\\u2292\",\n SquareUnion: \"\\u2294\",\n Sscr: \"\\uD835\\uDCAE\",\n Star: \"\\u22C6\",\n Sub: \"\\u22D0\",\n Subset: \"\\u22D0\",\n SubsetEqual: \"\\u2286\",\n Succeeds: \"\\u227B\",\n SucceedsEqual: \"\\u2AB0\",\n SucceedsSlantEqual: \"\\u227D\",\n SucceedsTilde: \"\\u227F\",\n SuchThat: \"\\u220B\",\n Sum: \"\\u2211\",\n Sup: \"\\u22D1\",\n Superset: \"\\u2283\",\n SupersetEqual: \"\\u2287\",\n Supset: \"\\u22D1\",\n THORN: \"\\xde\",\n TRADE: \"\\u2122\",\n TSHcy: \"\\u040B\",\n TScy: \"\\u0426\",\n Tab: \"\t\",\n Tau: \"\\u03A4\",\n Tcaron: \"\\u0164\",\n Tcedil: \"\\u0162\",\n Tcy: \"\\u0422\",\n Tfr: \"\\uD835\\uDD17\",\n Therefore: \"\\u2234\",\n Theta: \"\\u0398\",\n ThickSpace: \"\\u205F\\u200A\",\n ThinSpace: \"\\u2009\",\n Tilde: \"\\u223C\",\n TildeEqual: \"\\u2243\",\n TildeFullEqual: \"\\u2245\",\n TildeTilde: \"\\u2248\",\n Topf: \"\\uD835\\uDD4B\",\n TripleDot: \"\\u20DB\",\n Tscr: \"\\uD835\\uDCAF\",\n Tstrok: \"\\u0166\",\n Uacute: \"\\xda\",\n Uarr: \"\\u219F\",\n Uarrocir: \"\\u2949\",\n Ubrcy: \"\\u040E\",\n Ubreve: \"\\u016C\",\n Ucirc: \"\\xdb\",\n Ucy: \"\\u0423\",\n Udblac: \"\\u0170\",\n Ufr: \"\\uD835\\uDD18\",\n Ugrave: \"\\xd9\",\n Umacr: \"\\u016A\",\n UnderBar: \"_\",\n UnderBrace: \"\\u23DF\",\n UnderBracket: \"\\u23B5\",\n UnderParenthesis: \"\\u23DD\",\n Union: \"\\u22C3\",\n UnionPlus: \"\\u228E\",\n Uogon: \"\\u0172\",\n Uopf: \"\\uD835\\uDD4C\",\n UpArrow: \"\\u2191\",\n UpArrowBar: \"\\u2912\",\n UpArrowDownArrow: \"\\u21C5\",\n UpDownArrow: \"\\u2195\",\n UpEquilibrium: \"\\u296E\",\n UpTee: \"\\u22A5\",\n UpTeeArrow: \"\\u21A5\",\n Uparrow: \"\\u21D1\",\n Updownarrow: \"\\u21D5\",\n UpperLeftArrow: \"\\u2196\",\n UpperRightArrow: \"\\u2197\",\n Upsi: \"\\u03D2\",\n Upsilon: \"\\u03A5\",\n Uring: \"\\u016E\",\n Uscr: \"\\uD835\\uDCB0\",\n Utilde: \"\\u0168\",\n Uuml: \"\\xdc\",\n VDash: \"\\u22AB\",\n Vbar: \"\\u2AEB\",\n Vcy: \"\\u0412\",\n Vdash: \"\\u22A9\",\n Vdashl: \"\\u2AE6\",\n Vee: \"\\u22C1\",\n Verbar: \"\\u2016\",\n Vert: \"\\u2016\",\n VerticalBar: \"\\u2223\",\n VerticalLine: \"|\",\n VerticalSeparator: \"\\u2758\",\n VerticalTilde: \"\\u2240\",\n VeryThinSpace: \"\\u200A\",\n Vfr: \"\\uD835\\uDD19\",\n Vopf: \"\\uD835\\uDD4D\",\n Vscr: \"\\uD835\\uDCB1\",\n Vvdash: \"\\u22AA\",\n Wcirc: \"\\u0174\",\n Wedge: \"\\u22C0\",\n Wfr: \"\\uD835\\uDD1A\",\n Wopf: \"\\uD835\\uDD4E\",\n Wscr: \"\\uD835\\uDCB2\",\n Xfr: \"\\uD835\\uDD1B\",\n Xi: \"\\u039E\",\n Xopf: \"\\uD835\\uDD4F\",\n Xscr: \"\\uD835\\uDCB3\",\n YAcy: \"\\u042F\",\n YIcy: \"\\u0407\",\n YUcy: \"\\u042E\",\n Yacute: \"\\xdd\",\n Ycirc: \"\\u0176\",\n Ycy: \"\\u042B\",\n Yfr: \"\\uD835\\uDD1C\",\n Yopf: \"\\uD835\\uDD50\",\n Yscr: \"\\uD835\\uDCB4\",\n Yuml: \"\\u0178\",\n ZHcy: \"\\u0416\",\n Zacute: \"\\u0179\",\n Zcaron: \"\\u017D\",\n Zcy: \"\\u0417\",\n Zdot: \"\\u017B\",\n ZeroWidthSpace: \"\\u200B\",\n Zeta: \"\\u0396\",\n Zfr: \"\\u2128\",\n Zopf: \"\\u2124\",\n Zscr: \"\\uD835\\uDCB5\",\n aacute: \"\\xe1\",\n abreve: \"\\u0103\",\n ac: \"\\u223E\",\n acE: \"\\u223E\\u0333\",\n acd: \"\\u223F\",\n acirc: \"\\xe2\",\n acute: \"\\xb4\",\n acy: \"\\u0430\",\n aelig: \"\\xe6\",\n af: \"\\u2061\",\n afr: \"\\uD835\\uDD1E\",\n agrave: \"\\xe0\",\n alefsym: \"\\u2135\",\n aleph: \"\\u2135\",\n alpha: \"\\u03B1\",\n amacr: \"\\u0101\",\n amalg: \"\\u2A3F\",\n amp: \"&\",\n and: \"\\u2227\",\n andand: \"\\u2A55\",\n andd: \"\\u2A5C\",\n andslope: \"\\u2A58\",\n andv: \"\\u2A5A\",\n ang: \"\\u2220\",\n ange: \"\\u29A4\",\n angle: \"\\u2220\",\n angmsd: \"\\u2221\",\n angmsdaa: \"\\u29A8\",\n angmsdab: \"\\u29A9\",\n angmsdac: \"\\u29AA\",\n angmsdad: \"\\u29AB\",\n angmsdae: \"\\u29AC\",\n angmsdaf: \"\\u29AD\",\n angmsdag: \"\\u29AE\",\n angmsdah: \"\\u29AF\",\n angrt: \"\\u221F\",\n angrtvb: \"\\u22BE\",\n angrtvbd: \"\\u299D\",\n angsph: \"\\u2222\",\n angst: \"\\xc5\",\n angzarr: \"\\u237C\",\n aogon: \"\\u0105\",\n aopf: \"\\uD835\\uDD52\",\n ap: \"\\u2248\",\n apE: \"\\u2A70\",\n apacir: \"\\u2A6F\",\n ape: \"\\u224A\",\n apid: \"\\u224B\",\n apos: \"'\",\n approx: \"\\u2248\",\n approxeq: \"\\u224A\",\n aring: \"\\xe5\",\n ascr: \"\\uD835\\uDCB6\",\n ast: \"*\",\n asymp: \"\\u2248\",\n asympeq: \"\\u224D\",\n atilde: \"\\xe3\",\n auml: \"\\xe4\",\n awconint: \"\\u2233\",\n awint: \"\\u2A11\",\n bNot: \"\\u2AED\",\n backcong: \"\\u224C\",\n backepsilon: \"\\u03F6\",\n backprime: \"\\u2035\",\n backsim: \"\\u223D\",\n backsimeq: \"\\u22CD\",\n barvee: \"\\u22BD\",\n barwed: \"\\u2305\",\n barwedge: \"\\u2305\",\n bbrk: \"\\u23B5\",\n bbrktbrk: \"\\u23B6\",\n bcong: \"\\u224C\",\n bcy: \"\\u0431\",\n bdquo: \"\\u201E\",\n becaus: \"\\u2235\",\n because: \"\\u2235\",\n bemptyv: \"\\u29B0\",\n bepsi: \"\\u03F6\",\n bernou: \"\\u212C\",\n beta: \"\\u03B2\",\n beth: \"\\u2136\",\n between: \"\\u226C\",\n bfr: \"\\uD835\\uDD1F\",\n bigcap: \"\\u22C2\",\n bigcirc: \"\\u25EF\",\n bigcup: \"\\u22C3\",\n bigodot: \"\\u2A00\",\n bigoplus: \"\\u2A01\",\n bigotimes: \"\\u2A02\",\n bigsqcup: \"\\u2A06\",\n bigstar: \"\\u2605\",\n bigtriangledown: \"\\u25BD\",\n bigtriangleup: \"\\u25B3\",\n biguplus: \"\\u2A04\",\n bigvee: \"\\u22C1\",\n bigwedge: \"\\u22C0\",\n bkarow: \"\\u290D\",\n blacklozenge: \"\\u29EB\",\n blacksquare: \"\\u25AA\",\n blacktriangle: \"\\u25B4\",\n blacktriangledown: \"\\u25BE\",\n blacktriangleleft: \"\\u25C2\",\n blacktriangleright: \"\\u25B8\",\n blank: \"\\u2423\",\n blk12: \"\\u2592\",\n blk14: \"\\u2591\",\n blk34: \"\\u2593\",\n block: \"\\u2588\",\n bne: \"=\\u20E5\",\n bnequiv: \"\\u2261\\u20E5\",\n bnot: \"\\u2310\",\n bopf: \"\\uD835\\uDD53\",\n bot: \"\\u22A5\",\n bottom: \"\\u22A5\",\n bowtie: \"\\u22C8\",\n boxDL: \"\\u2557\",\n boxDR: \"\\u2554\",\n boxDl: \"\\u2556\",\n boxDr: \"\\u2553\",\n boxH: \"\\u2550\",\n boxHD: \"\\u2566\",\n boxHU: \"\\u2569\",\n boxHd: \"\\u2564\",\n boxHu: \"\\u2567\",\n boxUL: \"\\u255D\",\n boxUR: \"\\u255A\",\n boxUl: \"\\u255C\",\n boxUr: \"\\u2559\",\n boxV: \"\\u2551\",\n boxVH: \"\\u256C\",\n boxVL: \"\\u2563\",\n boxVR: \"\\u2560\",\n boxVh: \"\\u256B\",\n boxVl: \"\\u2562\",\n boxVr: \"\\u255F\",\n boxbox: \"\\u29C9\",\n boxdL: \"\\u2555\",\n boxdR: \"\\u2552\",\n boxdl: \"\\u2510\",\n boxdr: \"\\u250C\",\n boxh: \"\\u2500\",\n boxhD: \"\\u2565\",\n boxhU: \"\\u2568\",\n boxhd: \"\\u252C\",\n boxhu: \"\\u2534\",\n boxminus: \"\\u229F\",\n boxplus: \"\\u229E\",\n boxtimes: \"\\u22A0\",\n boxuL: \"\\u255B\",\n boxuR: \"\\u2558\",\n boxul: \"\\u2518\",\n boxur: \"\\u2514\",\n boxv: \"\\u2502\",\n boxvH: \"\\u256A\",\n boxvL: \"\\u2561\",\n boxvR: \"\\u255E\",\n boxvh: \"\\u253C\",\n boxvl: \"\\u2524\",\n boxvr: \"\\u251C\",\n bprime: \"\\u2035\",\n breve: \"\\u02D8\",\n brvbar: \"\\xa6\",\n bscr: \"\\uD835\\uDCB7\",\n bsemi: \"\\u204F\",\n bsim: \"\\u223D\",\n bsime: \"\\u22CD\",\n bsol: \"\\\\\",\n bsolb: \"\\u29C5\",\n bsolhsub: \"\\u27C8\",\n bull: \"\\u2022\",\n bullet: \"\\u2022\",\n bump: \"\\u224E\",\n bumpE: \"\\u2AAE\",\n bumpe: \"\\u224F\",\n bumpeq: \"\\u224F\",\n cacute: \"\\u0107\",\n cap: \"\\u2229\",\n capand: \"\\u2A44\",\n capbrcup: \"\\u2A49\",\n capcap: \"\\u2A4B\",\n capcup: \"\\u2A47\",\n capdot: \"\\u2A40\",\n caps: \"\\u2229\\uFE00\",\n caret: \"\\u2041\",\n caron: \"\\u02C7\",\n ccaps: \"\\u2A4D\",\n ccaron: \"\\u010D\",\n ccedil: \"\\xe7\",\n ccirc: \"\\u0109\",\n ccups: \"\\u2A4C\",\n ccupssm: \"\\u2A50\",\n cdot: \"\\u010B\",\n cedil: \"\\xb8\",\n cemptyv: \"\\u29B2\",\n cent: \"\\xa2\",\n centerdot: \"\\xb7\",\n cfr: \"\\uD835\\uDD20\",\n chcy: \"\\u0447\",\n check: \"\\u2713\",\n checkmark: \"\\u2713\",\n chi: \"\\u03C7\",\n cir: \"\\u25CB\",\n cirE: \"\\u29C3\",\n circ: \"\\u02C6\",\n circeq: \"\\u2257\",\n circlearrowleft: \"\\u21BA\",\n circlearrowright: \"\\u21BB\",\n circledR: \"\\xae\",\n circledS: \"\\u24C8\",\n circledast: \"\\u229B\",\n circledcirc: \"\\u229A\",\n circleddash: \"\\u229D\",\n cire: \"\\u2257\",\n cirfnint: \"\\u2A10\",\n cirmid: \"\\u2AEF\",\n cirscir: \"\\u29C2\",\n clubs: \"\\u2663\",\n clubsuit: \"\\u2663\",\n colon: \":\",\n colone: \"\\u2254\",\n coloneq: \"\\u2254\",\n comma: \",\",\n commat: \"@\",\n comp: \"\\u2201\",\n compfn: \"\\u2218\",\n complement: \"\\u2201\",\n complexes: \"\\u2102\",\n cong: \"\\u2245\",\n congdot: \"\\u2A6D\",\n conint: \"\\u222E\",\n copf: \"\\uD835\\uDD54\",\n coprod: \"\\u2210\",\n copy: \"\\xa9\",\n copysr: \"\\u2117\",\n crarr: \"\\u21B5\",\n cross: \"\\u2717\",\n cscr: \"\\uD835\\uDCB8\",\n csub: \"\\u2ACF\",\n csube: \"\\u2AD1\",\n csup: \"\\u2AD0\",\n csupe: \"\\u2AD2\",\n ctdot: \"\\u22EF\",\n cudarrl: \"\\u2938\",\n cudarrr: \"\\u2935\",\n cuepr: \"\\u22DE\",\n cuesc: \"\\u22DF\",\n cularr: \"\\u21B6\",\n cularrp: \"\\u293D\",\n cup: \"\\u222A\",\n cupbrcap: \"\\u2A48\",\n cupcap: \"\\u2A46\",\n cupcup: \"\\u2A4A\",\n cupdot: \"\\u228D\",\n cupor: \"\\u2A45\",\n cups: \"\\u222A\\uFE00\",\n curarr: \"\\u21B7\",\n curarrm: \"\\u293C\",\n curlyeqprec: \"\\u22DE\",\n curlyeqsucc: \"\\u22DF\",\n curlyvee: \"\\u22CE\",\n curlywedge: \"\\u22CF\",\n curren: \"\\xa4\",\n curvearrowleft: \"\\u21B6\",\n curvearrowright: \"\\u21B7\",\n cuvee: \"\\u22CE\",\n cuwed: \"\\u22CF\",\n cwconint: \"\\u2232\",\n cwint: \"\\u2231\",\n cylcty: \"\\u232D\",\n dArr: \"\\u21D3\",\n dHar: \"\\u2965\",\n dagger: \"\\u2020\",\n daleth: \"\\u2138\",\n darr: \"\\u2193\",\n dash: \"\\u2010\",\n dashv: \"\\u22A3\",\n dbkarow: \"\\u290F\",\n dblac: \"\\u02DD\",\n dcaron: \"\\u010F\",\n dcy: \"\\u0434\",\n dd: \"\\u2146\",\n ddagger: \"\\u2021\",\n ddarr: \"\\u21CA\",\n ddotseq: \"\\u2A77\",\n deg: \"\\xb0\",\n delta: \"\\u03B4\",\n demptyv: \"\\u29B1\",\n dfisht: \"\\u297F\",\n dfr: \"\\uD835\\uDD21\",\n dharl: \"\\u21C3\",\n dharr: \"\\u21C2\",\n diam: \"\\u22C4\",\n diamond: \"\\u22C4\",\n diamondsuit: \"\\u2666\",\n diams: \"\\u2666\",\n die: \"\\xa8\",\n digamma: \"\\u03DD\",\n disin: \"\\u22F2\",\n div: \"\\xf7\",\n divide: \"\\xf7\",\n divideontimes: \"\\u22C7\",\n divonx: \"\\u22C7\",\n djcy: \"\\u0452\",\n dlcorn: \"\\u231E\",\n dlcrop: \"\\u230D\",\n dollar: \"$\",\n dopf: \"\\uD835\\uDD55\",\n dot: \"\\u02D9\",\n doteq: \"\\u2250\",\n doteqdot: \"\\u2251\",\n dotminus: \"\\u2238\",\n dotplus: \"\\u2214\",\n dotsquare: \"\\u22A1\",\n doublebarwedge: \"\\u2306\",\n downarrow: \"\\u2193\",\n downdownarrows: \"\\u21CA\",\n downharpoonleft: \"\\u21C3\",\n downharpoonright: \"\\u21C2\",\n drbkarow: \"\\u2910\",\n drcorn: \"\\u231F\",\n drcrop: \"\\u230C\",\n dscr: \"\\uD835\\uDCB9\",\n dscy: \"\\u0455\",\n dsol: \"\\u29F6\",\n dstrok: \"\\u0111\",\n dtdot: \"\\u22F1\",\n dtri: \"\\u25BF\",\n dtrif: \"\\u25BE\",\n duarr: \"\\u21F5\",\n duhar: \"\\u296F\",\n dwangle: \"\\u29A6\",\n dzcy: \"\\u045F\",\n dzigrarr: \"\\u27FF\",\n eDDot: \"\\u2A77\",\n eDot: \"\\u2251\",\n eacute: \"\\xe9\",\n easter: \"\\u2A6E\",\n ecaron: \"\\u011B\",\n ecir: \"\\u2256\",\n ecirc: \"\\xea\",\n ecolon: \"\\u2255\",\n ecy: \"\\u044D\",\n edot: \"\\u0117\",\n ee: \"\\u2147\",\n efDot: \"\\u2252\",\n efr: \"\\uD835\\uDD22\",\n eg: \"\\u2A9A\",\n egrave: \"\\xe8\",\n egs: \"\\u2A96\",\n egsdot: \"\\u2A98\",\n el: \"\\u2A99\",\n elinters: \"\\u23E7\",\n ell: \"\\u2113\",\n els: \"\\u2A95\",\n elsdot: \"\\u2A97\",\n emacr: \"\\u0113\",\n empty: \"\\u2205\",\n emptyset: \"\\u2205\",\n emptyv: \"\\u2205\",\n emsp13: \"\\u2004\",\n emsp14: \"\\u2005\",\n emsp: \"\\u2003\",\n eng: \"\\u014B\",\n ensp: \"\\u2002\",\n eogon: \"\\u0119\",\n eopf: \"\\uD835\\uDD56\",\n epar: \"\\u22D5\",\n eparsl: \"\\u29E3\",\n eplus: \"\\u2A71\",\n epsi: \"\\u03B5\",\n epsilon: \"\\u03B5\",\n epsiv: \"\\u03F5\",\n eqcirc: \"\\u2256\",\n eqcolon: \"\\u2255\",\n eqsim: \"\\u2242\",\n eqslantgtr: \"\\u2A96\",\n eqslantless: \"\\u2A95\",\n equals: \"=\",\n equest: \"\\u225F\",\n equiv: \"\\u2261\",\n equivDD: \"\\u2A78\",\n eqvparsl: \"\\u29E5\",\n erDot: \"\\u2253\",\n erarr: \"\\u2971\",\n escr: \"\\u212F\",\n esdot: \"\\u2250\",\n esim: \"\\u2242\",\n eta: \"\\u03B7\",\n eth: \"\\xf0\",\n euml: \"\\xeb\",\n euro: \"\\u20AC\",\n excl: \"!\",\n exist: \"\\u2203\",\n expectation: \"\\u2130\",\n exponentiale: \"\\u2147\",\n fallingdotseq: \"\\u2252\",\n fcy: \"\\u0444\",\n female: \"\\u2640\",\n ffilig: \"\\uFB03\",\n fflig: \"\\uFB00\",\n ffllig: \"\\uFB04\",\n ffr: \"\\uD835\\uDD23\",\n filig: \"\\uFB01\",\n fjlig: \"fj\",\n flat: \"\\u266D\",\n fllig: \"\\uFB02\",\n fltns: \"\\u25B1\",\n fnof: \"\\u0192\",\n fopf: \"\\uD835\\uDD57\",\n forall: \"\\u2200\",\n fork: \"\\u22D4\",\n forkv: \"\\u2AD9\",\n fpartint: \"\\u2A0D\",\n frac12: \"\\xbd\",\n frac13: \"\\u2153\",\n frac14: \"\\xbc\",\n frac15: \"\\u2155\",\n frac16: \"\\u2159\",\n frac18: \"\\u215B\",\n frac23: \"\\u2154\",\n frac25: \"\\u2156\",\n frac34: \"\\xbe\",\n frac35: \"\\u2157\",\n frac38: \"\\u215C\",\n frac45: \"\\u2158\",\n frac56: \"\\u215A\",\n frac58: \"\\u215D\",\n frac78: \"\\u215E\",\n frasl: \"\\u2044\",\n frown: \"\\u2322\",\n fscr: \"\\uD835\\uDCBB\",\n gE: \"\\u2267\",\n gEl: \"\\u2A8C\",\n gacute: \"\\u01F5\",\n gamma: \"\\u03B3\",\n gammad: \"\\u03DD\",\n gap: \"\\u2A86\",\n gbreve: \"\\u011F\",\n gcirc: \"\\u011D\",\n gcy: \"\\u0433\",\n gdot: \"\\u0121\",\n ge: \"\\u2265\",\n gel: \"\\u22DB\",\n geq: \"\\u2265\",\n geqq: \"\\u2267\",\n geqslant: \"\\u2A7E\",\n ges: \"\\u2A7E\",\n gescc: \"\\u2AA9\",\n gesdot: \"\\u2A80\",\n gesdoto: \"\\u2A82\",\n gesdotol: \"\\u2A84\",\n gesl: \"\\u22DB\\uFE00\",\n gesles: \"\\u2A94\",\n gfr: \"\\uD835\\uDD24\",\n gg: \"\\u226B\",\n ggg: \"\\u22D9\",\n gimel: \"\\u2137\",\n gjcy: \"\\u0453\",\n gl: \"\\u2277\",\n glE: \"\\u2A92\",\n gla: \"\\u2AA5\",\n glj: \"\\u2AA4\",\n gnE: \"\\u2269\",\n gnap: \"\\u2A8A\",\n gnapprox: \"\\u2A8A\",\n gne: \"\\u2A88\",\n gneq: \"\\u2A88\",\n gneqq: \"\\u2269\",\n gnsim: \"\\u22E7\",\n gopf: \"\\uD835\\uDD58\",\n grave: \"`\",\n gscr: \"\\u210A\",\n gsim: \"\\u2273\",\n gsime: \"\\u2A8E\",\n gsiml: \"\\u2A90\",\n gt: \">\",\n gtcc: \"\\u2AA7\",\n gtcir: \"\\u2A7A\",\n gtdot: \"\\u22D7\",\n gtlPar: \"\\u2995\",\n gtquest: \"\\u2A7C\",\n gtrapprox: \"\\u2A86\",\n gtrarr: \"\\u2978\",\n gtrdot: \"\\u22D7\",\n gtreqless: \"\\u22DB\",\n gtreqqless: \"\\u2A8C\",\n gtrless: \"\\u2277\",\n gtrsim: \"\\u2273\",\n gvertneqq: \"\\u2269\\uFE00\",\n gvnE: \"\\u2269\\uFE00\",\n hArr: \"\\u21D4\",\n hairsp: \"\\u200A\",\n half: \"\\xbd\",\n hamilt: \"\\u210B\",\n hardcy: \"\\u044A\",\n harr: \"\\u2194\",\n harrcir: \"\\u2948\",\n harrw: \"\\u21AD\",\n hbar: \"\\u210F\",\n hcirc: \"\\u0125\",\n hearts: \"\\u2665\",\n heartsuit: \"\\u2665\",\n hellip: \"\\u2026\",\n hercon: \"\\u22B9\",\n hfr: \"\\uD835\\uDD25\",\n hksearow: \"\\u2925\",\n hkswarow: \"\\u2926\",\n hoarr: \"\\u21FF\",\n homtht: \"\\u223B\",\n hookleftarrow: \"\\u21A9\",\n hookrightarrow: \"\\u21AA\",\n hopf: \"\\uD835\\uDD59\",\n horbar: \"\\u2015\",\n hscr: \"\\uD835\\uDCBD\",\n hslash: \"\\u210F\",\n hstrok: \"\\u0127\",\n hybull: \"\\u2043\",\n hyphen: \"\\u2010\",\n iacute: \"\\xed\",\n ic: \"\\u2063\",\n icirc: \"\\xee\",\n icy: \"\\u0438\",\n iecy: \"\\u0435\",\n iexcl: \"\\xa1\",\n iff: \"\\u21D4\",\n ifr: \"\\uD835\\uDD26\",\n igrave: \"\\xec\",\n ii: \"\\u2148\",\n iiiint: \"\\u2A0C\",\n iiint: \"\\u222D\",\n iinfin: \"\\u29DC\",\n iiota: \"\\u2129\",\n ijlig: \"\\u0133\",\n imacr: \"\\u012B\",\n image: \"\\u2111\",\n imagline: \"\\u2110\",\n imagpart: \"\\u2111\",\n imath: \"\\u0131\",\n imof: \"\\u22B7\",\n imped: \"\\u01B5\",\n in: \"\\u2208\",\n incare: \"\\u2105\",\n infin: \"\\u221E\",\n infintie: \"\\u29DD\",\n inodot: \"\\u0131\",\n int: \"\\u222B\",\n intcal: \"\\u22BA\",\n integers: \"\\u2124\",\n intercal: \"\\u22BA\",\n intlarhk: \"\\u2A17\",\n intprod: \"\\u2A3C\",\n iocy: \"\\u0451\",\n iogon: \"\\u012F\",\n iopf: \"\\uD835\\uDD5A\",\n iota: \"\\u03B9\",\n iprod: \"\\u2A3C\",\n iquest: \"\\xbf\",\n iscr: \"\\uD835\\uDCBE\",\n isin: \"\\u2208\",\n isinE: \"\\u22F9\",\n isindot: \"\\u22F5\",\n isins: \"\\u22F4\",\n isinsv: \"\\u22F3\",\n isinv: \"\\u2208\",\n it: \"\\u2062\",\n itilde: \"\\u0129\",\n iukcy: \"\\u0456\",\n iuml: \"\\xef\",\n jcirc: \"\\u0135\",\n jcy: \"\\u0439\",\n jfr: \"\\uD835\\uDD27\",\n jmath: \"\\u0237\",\n jopf: \"\\uD835\\uDD5B\",\n jscr: \"\\uD835\\uDCBF\",\n jsercy: \"\\u0458\",\n jukcy: \"\\u0454\",\n kappa: \"\\u03BA\",\n kappav: \"\\u03F0\",\n kcedil: \"\\u0137\",\n kcy: \"\\u043A\",\n kfr: \"\\uD835\\uDD28\",\n kgreen: \"\\u0138\",\n khcy: \"\\u0445\",\n kjcy: \"\\u045C\",\n kopf: \"\\uD835\\uDD5C\",\n kscr: \"\\uD835\\uDCC0\",\n lAarr: \"\\u21DA\",\n lArr: \"\\u21D0\",\n lAtail: \"\\u291B\",\n lBarr: \"\\u290E\",\n lE: \"\\u2266\",\n lEg: \"\\u2A8B\",\n lHar: \"\\u2962\",\n lacute: \"\\u013A\",\n laemptyv: \"\\u29B4\",\n lagran: \"\\u2112\",\n lambda: \"\\u03BB\",\n lang: \"\\u27E8\",\n langd: \"\\u2991\",\n langle: \"\\u27E8\",\n lap: \"\\u2A85\",\n laquo: \"\\xab\",\n larr: \"\\u2190\",\n larrb: \"\\u21E4\",\n larrbfs: \"\\u291F\",\n larrfs: \"\\u291D\",\n larrhk: \"\\u21A9\",\n larrlp: \"\\u21AB\",\n larrpl: \"\\u2939\",\n larrsim: \"\\u2973\",\n larrtl: \"\\u21A2\",\n lat: \"\\u2AAB\",\n latail: \"\\u2919\",\n late: \"\\u2AAD\",\n lates: \"\\u2AAD\\uFE00\",\n lbarr: \"\\u290C\",\n lbbrk: \"\\u2772\",\n lbrace: \"{\",\n lbrack: \"[\",\n lbrke: \"\\u298B\",\n lbrksld: \"\\u298F\",\n lbrkslu: \"\\u298D\",\n lcaron: \"\\u013E\",\n lcedil: \"\\u013C\",\n lceil: \"\\u2308\",\n lcub: \"{\",\n lcy: \"\\u043B\",\n ldca: \"\\u2936\",\n ldquo: \"\\u201C\",\n ldquor: \"\\u201E\",\n ldrdhar: \"\\u2967\",\n ldrushar: \"\\u294B\",\n ldsh: \"\\u21B2\",\n le: \"\\u2264\",\n leftarrow: \"\\u2190\",\n leftarrowtail: \"\\u21A2\",\n leftharpoondown: \"\\u21BD\",\n leftharpoonup: \"\\u21BC\",\n leftleftarrows: \"\\u21C7\",\n leftrightarrow: \"\\u2194\",\n leftrightarrows: \"\\u21C6\",\n leftrightharpoons: \"\\u21CB\",\n leftrightsquigarrow: \"\\u21AD\",\n leftthreetimes: \"\\u22CB\",\n leg: \"\\u22DA\",\n leq: \"\\u2264\",\n leqq: \"\\u2266\",\n leqslant: \"\\u2A7D\",\n les: \"\\u2A7D\",\n lescc: \"\\u2AA8\",\n lesdot: \"\\u2A7F\",\n lesdoto: \"\\u2A81\",\n lesdotor: \"\\u2A83\",\n lesg: \"\\u22DA\\uFE00\",\n lesges: \"\\u2A93\",\n lessapprox: \"\\u2A85\",\n lessdot: \"\\u22D6\",\n lesseqgtr: \"\\u22DA\",\n lesseqqgtr: \"\\u2A8B\",\n lessgtr: \"\\u2276\",\n lesssim: \"\\u2272\",\n lfisht: \"\\u297C\",\n lfloor: \"\\u230A\",\n lfr: \"\\uD835\\uDD29\",\n lg: \"\\u2276\",\n lgE: \"\\u2A91\",\n lhard: \"\\u21BD\",\n lharu: \"\\u21BC\",\n lharul: \"\\u296A\",\n lhblk: \"\\u2584\",\n ljcy: \"\\u0459\",\n ll: \"\\u226A\",\n llarr: \"\\u21C7\",\n llcorner: \"\\u231E\",\n llhard: \"\\u296B\",\n lltri: \"\\u25FA\",\n lmidot: \"\\u0140\",\n lmoust: \"\\u23B0\",\n lmoustache: \"\\u23B0\",\n lnE: \"\\u2268\",\n lnap: \"\\u2A89\",\n lnapprox: \"\\u2A89\",\n lne: \"\\u2A87\",\n lneq: \"\\u2A87\",\n lneqq: \"\\u2268\",\n lnsim: \"\\u22E6\",\n loang: \"\\u27EC\",\n loarr: \"\\u21FD\",\n lobrk: \"\\u27E6\",\n longleftarrow: \"\\u27F5\",\n longleftrightarrow: \"\\u27F7\",\n longmapsto: \"\\u27FC\",\n longrightarrow: \"\\u27F6\",\n looparrowleft: \"\\u21AB\",\n looparrowright: \"\\u21AC\",\n lopar: \"\\u2985\",\n lopf: \"\\uD835\\uDD5D\",\n loplus: \"\\u2A2D\",\n lotimes: \"\\u2A34\",\n lowast: \"\\u2217\",\n lowbar: \"_\",\n loz: \"\\u25CA\",\n lozenge: \"\\u25CA\",\n lozf: \"\\u29EB\",\n lpar: \"(\",\n lparlt: \"\\u2993\",\n lrarr: \"\\u21C6\",\n lrcorner: \"\\u231F\",\n lrhar: \"\\u21CB\",\n lrhard: \"\\u296D\",\n lrm: \"\\u200E\",\n lrtri: \"\\u22BF\",\n lsaquo: \"\\u2039\",\n lscr: \"\\uD835\\uDCC1\",\n lsh: \"\\u21B0\",\n lsim: \"\\u2272\",\n lsime: \"\\u2A8D\",\n lsimg: \"\\u2A8F\",\n lsqb: \"[\",\n lsquo: \"\\u2018\",\n lsquor: \"\\u201A\",\n lstrok: \"\\u0142\",\n lt: \"<\",\n ltcc: \"\\u2AA6\",\n ltcir: \"\\u2A79\",\n ltdot: \"\\u22D6\",\n lthree: \"\\u22CB\",\n ltimes: \"\\u22C9\",\n ltlarr: \"\\u2976\",\n ltquest: \"\\u2A7B\",\n ltrPar: \"\\u2996\",\n ltri: \"\\u25C3\",\n ltrie: \"\\u22B4\",\n ltrif: \"\\u25C2\",\n lurdshar: \"\\u294A\",\n luruhar: \"\\u2966\",\n lvertneqq: \"\\u2268\\uFE00\",\n lvnE: \"\\u2268\\uFE00\",\n mDDot: \"\\u223A\",\n macr: \"\\xaf\",\n male: \"\\u2642\",\n malt: \"\\u2720\",\n maltese: \"\\u2720\",\n map: \"\\u21A6\",\n mapsto: \"\\u21A6\",\n mapstodown: \"\\u21A7\",\n mapstoleft: \"\\u21A4\",\n mapstoup: \"\\u21A5\",\n marker: \"\\u25AE\",\n mcomma: \"\\u2A29\",\n mcy: \"\\u043C\",\n mdash: \"\\u2014\",\n measuredangle: \"\\u2221\",\n mfr: \"\\uD835\\uDD2A\",\n mho: \"\\u2127\",\n micro: \"\\xb5\",\n mid: \"\\u2223\",\n midast: \"*\",\n midcir: \"\\u2AF0\",\n middot: \"\\xb7\",\n minus: \"\\u2212\",\n minusb: \"\\u229F\",\n minusd: \"\\u2238\",\n minusdu: \"\\u2A2A\",\n mlcp: \"\\u2ADB\",\n mldr: \"\\u2026\",\n mnplus: \"\\u2213\",\n models: \"\\u22A7\",\n mopf: \"\\uD835\\uDD5E\",\n mp: \"\\u2213\",\n mscr: \"\\uD835\\uDCC2\",\n mstpos: \"\\u223E\",\n mu: \"\\u03BC\",\n multimap: \"\\u22B8\",\n mumap: \"\\u22B8\",\n nGg: \"\\u22D9\\u0338\",\n nGt: \"\\u226B\\u20D2\",\n nGtv: \"\\u226B\\u0338\",\n nLeftarrow: \"\\u21CD\",\n nLeftrightarrow: \"\\u21CE\",\n nLl: \"\\u22D8\\u0338\",\n nLt: \"\\u226A\\u20D2\",\n nLtv: \"\\u226A\\u0338\",\n nRightarrow: \"\\u21CF\",\n nVDash: \"\\u22AF\",\n nVdash: \"\\u22AE\",\n nabla: \"\\u2207\",\n nacute: \"\\u0144\",\n nang: \"\\u2220\\u20D2\",\n nap: \"\\u2249\",\n napE: \"\\u2A70\\u0338\",\n napid: \"\\u224B\\u0338\",\n napos: \"\\u0149\",\n napprox: \"\\u2249\",\n natur: \"\\u266E\",\n natural: \"\\u266E\",\n naturals: \"\\u2115\",\n nbsp: \"\\xa0\",\n nbump: \"\\u224E\\u0338\",\n nbumpe: \"\\u224F\\u0338\",\n ncap: \"\\u2A43\",\n ncaron: \"\\u0148\",\n ncedil: \"\\u0146\",\n ncong: \"\\u2247\",\n ncongdot: \"\\u2A6D\\u0338\",\n ncup: \"\\u2A42\",\n ncy: \"\\u043D\",\n ndash: \"\\u2013\",\n ne: \"\\u2260\",\n neArr: \"\\u21D7\",\n nearhk: \"\\u2924\",\n nearr: \"\\u2197\",\n nearrow: \"\\u2197\",\n nedot: \"\\u2250\\u0338\",\n nequiv: \"\\u2262\",\n nesear: \"\\u2928\",\n nesim: \"\\u2242\\u0338\",\n nexist: \"\\u2204\",\n nexists: \"\\u2204\",\n nfr: \"\\uD835\\uDD2B\",\n ngE: \"\\u2267\\u0338\",\n nge: \"\\u2271\",\n ngeq: \"\\u2271\",\n ngeqq: \"\\u2267\\u0338\",\n ngeqslant: \"\\u2A7E\\u0338\",\n nges: \"\\u2A7E\\u0338\",\n ngsim: \"\\u2275\",\n ngt: \"\\u226F\",\n ngtr: \"\\u226F\",\n nhArr: \"\\u21CE\",\n nharr: \"\\u21AE\",\n nhpar: \"\\u2AF2\",\n ni: \"\\u220B\",\n nis: \"\\u22FC\",\n nisd: \"\\u22FA\",\n niv: \"\\u220B\",\n njcy: \"\\u045A\",\n nlArr: \"\\u21CD\",\n nlE: \"\\u2266\\u0338\",\n nlarr: \"\\u219A\",\n nldr: \"\\u2025\",\n nle: \"\\u2270\",\n nleftarrow: \"\\u219A\",\n nleftrightarrow: \"\\u21AE\",\n nleq: \"\\u2270\",\n nleqq: \"\\u2266\\u0338\",\n nleqslant: \"\\u2A7D\\u0338\",\n nles: \"\\u2A7D\\u0338\",\n nless: \"\\u226E\",\n nlsim: \"\\u2274\",\n nlt: \"\\u226E\",\n nltri: \"\\u22EA\",\n nltrie: \"\\u22EC\",\n nmid: \"\\u2224\",\n nopf: \"\\uD835\\uDD5F\",\n not: \"\\xac\",\n notin: \"\\u2209\",\n notinE: \"\\u22F9\\u0338\",\n notindot: \"\\u22F5\\u0338\",\n notinva: \"\\u2209\",\n notinvb: \"\\u22F7\",\n notinvc: \"\\u22F6\",\n notni: \"\\u220C\",\n notniva: \"\\u220C\",\n notnivb: \"\\u22FE\",\n notnivc: \"\\u22FD\",\n npar: \"\\u2226\",\n nparallel: \"\\u2226\",\n nparsl: \"\\u2AFD\\u20E5\",\n npart: \"\\u2202\\u0338\",\n npolint: \"\\u2A14\",\n npr: \"\\u2280\",\n nprcue: \"\\u22E0\",\n npre: \"\\u2AAF\\u0338\",\n nprec: \"\\u2280\",\n npreceq: \"\\u2AAF\\u0338\",\n nrArr: \"\\u21CF\",\n nrarr: \"\\u219B\",\n nrarrc: \"\\u2933\\u0338\",\n nrarrw: \"\\u219D\\u0338\",\n nrightarrow: \"\\u219B\",\n nrtri: \"\\u22EB\",\n nrtrie: \"\\u22ED\",\n nsc: \"\\u2281\",\n nsccue: \"\\u22E1\",\n nsce: \"\\u2AB0\\u0338\",\n nscr: \"\\uD835\\uDCC3\",\n nshortmid: \"\\u2224\",\n nshortparallel: \"\\u2226\",\n nsim: \"\\u2241\",\n nsime: \"\\u2244\",\n nsimeq: \"\\u2244\",\n nsmid: \"\\u2224\",\n nspar: \"\\u2226\",\n nsqsube: \"\\u22E2\",\n nsqsupe: \"\\u22E3\",\n nsub: \"\\u2284\",\n nsubE: \"\\u2AC5\\u0338\",\n nsube: \"\\u2288\",\n nsubset: \"\\u2282\\u20D2\",\n nsubseteq: \"\\u2288\",\n nsubseteqq: \"\\u2AC5\\u0338\",\n nsucc: \"\\u2281\",\n nsucceq: \"\\u2AB0\\u0338\",\n nsup: \"\\u2285\",\n nsupE: \"\\u2AC6\\u0338\",\n nsupe: \"\\u2289\",\n nsupset: \"\\u2283\\u20D2\",\n nsupseteq: \"\\u2289\",\n nsupseteqq: \"\\u2AC6\\u0338\",\n ntgl: \"\\u2279\",\n ntilde: \"\\xf1\",\n ntlg: \"\\u2278\",\n ntriangleleft: \"\\u22EA\",\n ntrianglelefteq: \"\\u22EC\",\n ntriangleright: \"\\u22EB\",\n ntrianglerighteq: \"\\u22ED\",\n nu: \"\\u03BD\",\n num: \"#\",\n numero: \"\\u2116\",\n numsp: \"\\u2007\",\n nvDash: \"\\u22AD\",\n nvHarr: \"\\u2904\",\n nvap: \"\\u224D\\u20D2\",\n nvdash: \"\\u22AC\",\n nvge: \"\\u2265\\u20D2\",\n nvgt: \">\\u20D2\",\n nvinfin: \"\\u29DE\",\n nvlArr: \"\\u2902\",\n nvle: \"\\u2264\\u20D2\",\n nvlt: \"<\\u20D2\",\n nvltrie: \"\\u22B4\\u20D2\",\n nvrArr: \"\\u2903\",\n nvrtrie: \"\\u22B5\\u20D2\",\n nvsim: \"\\u223C\\u20D2\",\n nwArr: \"\\u21D6\",\n nwarhk: \"\\u2923\",\n nwarr: \"\\u2196\",\n nwarrow: \"\\u2196\",\n nwnear: \"\\u2927\",\n oS: \"\\u24C8\",\n oacute: \"\\xf3\",\n oast: \"\\u229B\",\n ocir: \"\\u229A\",\n ocirc: \"\\xf4\",\n ocy: \"\\u043E\",\n odash: \"\\u229D\",\n odblac: \"\\u0151\",\n odiv: \"\\u2A38\",\n odot: \"\\u2299\",\n odsold: \"\\u29BC\",\n oelig: \"\\u0153\",\n ofcir: \"\\u29BF\",\n ofr: \"\\uD835\\uDD2C\",\n ogon: \"\\u02DB\",\n ograve: \"\\xf2\",\n ogt: \"\\u29C1\",\n ohbar: \"\\u29B5\",\n ohm: \"\\u03A9\",\n oint: \"\\u222E\",\n olarr: \"\\u21BA\",\n olcir: \"\\u29BE\",\n olcross: \"\\u29BB\",\n oline: \"\\u203E\",\n olt: \"\\u29C0\",\n omacr: \"\\u014D\",\n omega: \"\\u03C9\",\n omicron: \"\\u03BF\",\n omid: \"\\u29B6\",\n ominus: \"\\u2296\",\n oopf: \"\\uD835\\uDD60\",\n opar: \"\\u29B7\",\n operp: \"\\u29B9\",\n oplus: \"\\u2295\",\n or: \"\\u2228\",\n orarr: \"\\u21BB\",\n ord: \"\\u2A5D\",\n order: \"\\u2134\",\n orderof: \"\\u2134\",\n ordf: \"\\xaa\",\n ordm: \"\\xba\",\n origof: \"\\u22B6\",\n oror: \"\\u2A56\",\n orslope: \"\\u2A57\",\n orv: \"\\u2A5B\",\n oscr: \"\\u2134\",\n oslash: \"\\xf8\",\n osol: \"\\u2298\",\n otilde: \"\\xf5\",\n otimes: \"\\u2297\",\n otimesas: \"\\u2A36\",\n ouml: \"\\xf6\",\n ovbar: \"\\u233D\",\n par: \"\\u2225\",\n para: \"\\xb6\",\n parallel: \"\\u2225\",\n parsim: \"\\u2AF3\",\n parsl: \"\\u2AFD\",\n part: \"\\u2202\",\n pcy: \"\\u043F\",\n percnt: \"%\",\n period: \".\",\n permil: \"\\u2030\",\n perp: \"\\u22A5\",\n pertenk: \"\\u2031\",\n pfr: \"\\uD835\\uDD2D\",\n phi: \"\\u03C6\",\n phiv: \"\\u03D5\",\n phmmat: \"\\u2133\",\n phone: \"\\u260E\",\n pi: \"\\u03C0\",\n pitchfork: \"\\u22D4\",\n piv: \"\\u03D6\",\n planck: \"\\u210F\",\n planckh: \"\\u210E\",\n plankv: \"\\u210F\",\n plus: \"+\",\n plusacir: \"\\u2A23\",\n plusb: \"\\u229E\",\n pluscir: \"\\u2A22\",\n plusdo: \"\\u2214\",\n plusdu: \"\\u2A25\",\n pluse: \"\\u2A72\",\n plusmn: \"\\xb1\",\n plussim: \"\\u2A26\",\n plustwo: \"\\u2A27\",\n pm: \"\\xb1\",\n pointint: \"\\u2A15\",\n popf: \"\\uD835\\uDD61\",\n pound: \"\\xa3\",\n pr: \"\\u227A\",\n prE: \"\\u2AB3\",\n prap: \"\\u2AB7\",\n prcue: \"\\u227C\",\n pre: \"\\u2AAF\",\n prec: \"\\u227A\",\n precapprox: \"\\u2AB7\",\n preccurlyeq: \"\\u227C\",\n preceq: \"\\u2AAF\",\n precnapprox: \"\\u2AB9\",\n precneqq: \"\\u2AB5\",\n precnsim: \"\\u22E8\",\n precsim: \"\\u227E\",\n prime: \"\\u2032\",\n primes: \"\\u2119\",\n prnE: \"\\u2AB5\",\n prnap: \"\\u2AB9\",\n prnsim: \"\\u22E8\",\n prod: \"\\u220F\",\n profalar: \"\\u232E\",\n profline: \"\\u2312\",\n profsurf: \"\\u2313\",\n prop: \"\\u221D\",\n propto: \"\\u221D\",\n prsim: \"\\u227E\",\n prurel: \"\\u22B0\",\n pscr: \"\\uD835\\uDCC5\",\n psi: \"\\u03C8\",\n puncsp: \"\\u2008\",\n qfr: \"\\uD835\\uDD2E\",\n qint: \"\\u2A0C\",\n qopf: \"\\uD835\\uDD62\",\n qprime: \"\\u2057\",\n qscr: \"\\uD835\\uDCC6\",\n quaternions: \"\\u210D\",\n quatint: \"\\u2A16\",\n quest: \"?\",\n questeq: \"\\u225F\",\n quot: '\"',\n rAarr: \"\\u21DB\",\n rArr: \"\\u21D2\",\n rAtail: \"\\u291C\",\n rBarr: \"\\u290F\",\n rHar: \"\\u2964\",\n race: \"\\u223D\\u0331\",\n racute: \"\\u0155\",\n radic: \"\\u221A\",\n raemptyv: \"\\u29B3\",\n rang: \"\\u27E9\",\n rangd: \"\\u2992\",\n range: \"\\u29A5\",\n rangle: \"\\u27E9\",\n raquo: \"\\xbb\",\n rarr: \"\\u2192\",\n rarrap: \"\\u2975\",\n rarrb: \"\\u21E5\",\n rarrbfs: \"\\u2920\",\n rarrc: \"\\u2933\",\n rarrfs: \"\\u291E\",\n rarrhk: \"\\u21AA\",\n rarrlp: \"\\u21AC\",\n rarrpl: \"\\u2945\",\n rarrsim: \"\\u2974\",\n rarrtl: \"\\u21A3\",\n rarrw: \"\\u219D\",\n ratail: \"\\u291A\",\n ratio: \"\\u2236\",\n rationals: \"\\u211A\",\n rbarr: \"\\u290D\",\n rbbrk: \"\\u2773\",\n rbrace: \"}\",\n rbrack: \"]\",\n rbrke: \"\\u298C\",\n rbrksld: \"\\u298E\",\n rbrkslu: \"\\u2990\",\n rcaron: \"\\u0159\",\n rcedil: \"\\u0157\",\n rceil: \"\\u2309\",\n rcub: \"}\",\n rcy: \"\\u0440\",\n rdca: \"\\u2937\",\n rdldhar: \"\\u2969\",\n rdquo: \"\\u201D\",\n rdquor: \"\\u201D\",\n rdsh: \"\\u21B3\",\n real: \"\\u211C\",\n realine: \"\\u211B\",\n realpart: \"\\u211C\",\n reals: \"\\u211D\",\n rect: \"\\u25AD\",\n reg: \"\\xae\",\n rfisht: \"\\u297D\",\n rfloor: \"\\u230B\",\n rfr: \"\\uD835\\uDD2F\",\n rhard: \"\\u21C1\",\n rharu: \"\\u21C0\",\n rharul: \"\\u296C\",\n rho: \"\\u03C1\",\n rhov: \"\\u03F1\",\n rightarrow: \"\\u2192\",\n rightarrowtail: \"\\u21A3\",\n rightharpoondown: \"\\u21C1\",\n rightharpoonup: \"\\u21C0\",\n rightleftarrows: \"\\u21C4\",\n rightleftharpoons: \"\\u21CC\",\n rightrightarrows: \"\\u21C9\",\n rightsquigarrow: \"\\u219D\",\n rightthreetimes: \"\\u22CC\",\n ring: \"\\u02DA\",\n risingdotseq: \"\\u2253\",\n rlarr: \"\\u21C4\",\n rlhar: \"\\u21CC\",\n rlm: \"\\u200F\",\n rmoust: \"\\u23B1\",\n rmoustache: \"\\u23B1\",\n rnmid: \"\\u2AEE\",\n roang: \"\\u27ED\",\n roarr: \"\\u21FE\",\n robrk: \"\\u27E7\",\n ropar: \"\\u2986\",\n ropf: \"\\uD835\\uDD63\",\n roplus: \"\\u2A2E\",\n rotimes: \"\\u2A35\",\n rpar: \")\",\n rpargt: \"\\u2994\",\n rppolint: \"\\u2A12\",\n rrarr: \"\\u21C9\",\n rsaquo: \"\\u203A\",\n rscr: \"\\uD835\\uDCC7\",\n rsh: \"\\u21B1\",\n rsqb: \"]\",\n rsquo: \"\\u2019\",\n rsquor: \"\\u2019\",\n rthree: \"\\u22CC\",\n rtimes: \"\\u22CA\",\n rtri: \"\\u25B9\",\n rtrie: \"\\u22B5\",\n rtrif: \"\\u25B8\",\n rtriltri: \"\\u29CE\",\n ruluhar: \"\\u2968\",\n rx: \"\\u211E\",\n sacute: \"\\u015B\",\n sbquo: \"\\u201A\",\n sc: \"\\u227B\",\n scE: \"\\u2AB4\",\n scap: \"\\u2AB8\",\n scaron: \"\\u0161\",\n sccue: \"\\u227D\",\n sce: \"\\u2AB0\",\n scedil: \"\\u015F\",\n scirc: \"\\u015D\",\n scnE: \"\\u2AB6\",\n scnap: \"\\u2ABA\",\n scnsim: \"\\u22E9\",\n scpolint: \"\\u2A13\",\n scsim: \"\\u227F\",\n scy: \"\\u0441\",\n sdot: \"\\u22C5\",\n sdotb: \"\\u22A1\",\n sdote: \"\\u2A66\",\n seArr: \"\\u21D8\",\n searhk: \"\\u2925\",\n searr: \"\\u2198\",\n searrow: \"\\u2198\",\n sect: \"\\xa7\",\n semi: \";\",\n seswar: \"\\u2929\",\n setminus: \"\\u2216\",\n setmn: \"\\u2216\",\n sext: \"\\u2736\",\n sfr: \"\\uD835\\uDD30\",\n sfrown: \"\\u2322\",\n sharp: \"\\u266F\",\n shchcy: \"\\u0449\",\n shcy: \"\\u0448\",\n shortmid: \"\\u2223\",\n shortparallel: \"\\u2225\",\n shy: \"\\xad\",\n sigma: \"\\u03C3\",\n sigmaf: \"\\u03C2\",\n sigmav: \"\\u03C2\",\n sim: \"\\u223C\",\n simdot: \"\\u2A6A\",\n sime: \"\\u2243\",\n simeq: \"\\u2243\",\n simg: \"\\u2A9E\",\n simgE: \"\\u2AA0\",\n siml: \"\\u2A9D\",\n simlE: \"\\u2A9F\",\n simne: \"\\u2246\",\n simplus: \"\\u2A24\",\n simrarr: \"\\u2972\",\n slarr: \"\\u2190\",\n smallsetminus: \"\\u2216\",\n smashp: \"\\u2A33\",\n smeparsl: \"\\u29E4\",\n smid: \"\\u2223\",\n smile: \"\\u2323\",\n smt: \"\\u2AAA\",\n smte: \"\\u2AAC\",\n smtes: \"\\u2AAC\\uFE00\",\n softcy: \"\\u044C\",\n sol: \"/\",\n solb: \"\\u29C4\",\n solbar: \"\\u233F\",\n sopf: \"\\uD835\\uDD64\",\n spades: \"\\u2660\",\n spadesuit: \"\\u2660\",\n spar: \"\\u2225\",\n sqcap: \"\\u2293\",\n sqcaps: \"\\u2293\\uFE00\",\n sqcup: \"\\u2294\",\n sqcups: \"\\u2294\\uFE00\",\n sqsub: \"\\u228F\",\n sqsube: \"\\u2291\",\n sqsubset: \"\\u228F\",\n sqsubseteq: \"\\u2291\",\n sqsup: \"\\u2290\",\n sqsupe: \"\\u2292\",\n sqsupset: \"\\u2290\",\n sqsupseteq: \"\\u2292\",\n squ: \"\\u25A1\",\n square: \"\\u25A1\",\n squarf: \"\\u25AA\",\n squf: \"\\u25AA\",\n srarr: \"\\u2192\",\n sscr: \"\\uD835\\uDCC8\",\n ssetmn: \"\\u2216\",\n ssmile: \"\\u2323\",\n sstarf: \"\\u22C6\",\n star: \"\\u2606\",\n starf: \"\\u2605\",\n straightepsilon: \"\\u03F5\",\n straightphi: \"\\u03D5\",\n strns: \"\\xaf\",\n sub: \"\\u2282\",\n subE: \"\\u2AC5\",\n subdot: \"\\u2ABD\",\n sube: \"\\u2286\",\n subedot: \"\\u2AC3\",\n submult: \"\\u2AC1\",\n subnE: \"\\u2ACB\",\n subne: \"\\u228A\",\n subplus: \"\\u2ABF\",\n subrarr: \"\\u2979\",\n subset: \"\\u2282\",\n subseteq: \"\\u2286\",\n subseteqq: \"\\u2AC5\",\n subsetneq: \"\\u228A\",\n subsetneqq: \"\\u2ACB\",\n subsim: \"\\u2AC7\",\n subsub: \"\\u2AD5\",\n subsup: \"\\u2AD3\",\n succ: \"\\u227B\",\n succapprox: \"\\u2AB8\",\n succcurlyeq: \"\\u227D\",\n succeq: \"\\u2AB0\",\n succnapprox: \"\\u2ABA\",\n succneqq: \"\\u2AB6\",\n succnsim: \"\\u22E9\",\n succsim: \"\\u227F\",\n sum: \"\\u2211\",\n sung: \"\\u266A\",\n sup1: \"\\xb9\",\n sup2: \"\\xb2\",\n sup3: \"\\xb3\",\n sup: \"\\u2283\",\n supE: \"\\u2AC6\",\n supdot: \"\\u2ABE\",\n supdsub: \"\\u2AD8\",\n supe: \"\\u2287\",\n supedot: \"\\u2AC4\",\n suphsol: \"\\u27C9\",\n suphsub: \"\\u2AD7\",\n suplarr: \"\\u297B\",\n supmult: \"\\u2AC2\",\n supnE: \"\\u2ACC\",\n supne: \"\\u228B\",\n supplus: \"\\u2AC0\",\n supset: \"\\u2283\",\n supseteq: \"\\u2287\",\n supseteqq: \"\\u2AC6\",\n supsetneq: \"\\u228B\",\n supsetneqq: \"\\u2ACC\",\n supsim: \"\\u2AC8\",\n supsub: \"\\u2AD4\",\n supsup: \"\\u2AD6\",\n swArr: \"\\u21D9\",\n swarhk: \"\\u2926\",\n swarr: \"\\u2199\",\n swarrow: \"\\u2199\",\n swnwar: \"\\u292A\",\n szlig: \"\\xdf\",\n target: \"\\u2316\",\n tau: \"\\u03C4\",\n tbrk: \"\\u23B4\",\n tcaron: \"\\u0165\",\n tcedil: \"\\u0163\",\n tcy: \"\\u0442\",\n tdot: \"\\u20DB\",\n telrec: \"\\u2315\",\n tfr: \"\\uD835\\uDD31\",\n there4: \"\\u2234\",\n therefore: \"\\u2234\",\n theta: \"\\u03B8\",\n thetasym: \"\\u03D1\",\n thetav: \"\\u03D1\",\n thickapprox: \"\\u2248\",\n thicksim: \"\\u223C\",\n thinsp: \"\\u2009\",\n thkap: \"\\u2248\",\n thksim: \"\\u223C\",\n thorn: \"\\xfe\",\n tilde: \"\\u02DC\",\n times: \"\\xd7\",\n timesb: \"\\u22A0\",\n timesbar: \"\\u2A31\",\n timesd: \"\\u2A30\",\n tint: \"\\u222D\",\n toea: \"\\u2928\",\n top: \"\\u22A4\",\n topbot: \"\\u2336\",\n topcir: \"\\u2AF1\",\n topf: \"\\uD835\\uDD65\",\n topfork: \"\\u2ADA\",\n tosa: \"\\u2929\",\n tprime: \"\\u2034\",\n trade: \"\\u2122\",\n triangle: \"\\u25B5\",\n triangledown: \"\\u25BF\",\n triangleleft: \"\\u25C3\",\n trianglelefteq: \"\\u22B4\",\n triangleq: \"\\u225C\",\n triangleright: \"\\u25B9\",\n trianglerighteq: \"\\u22B5\",\n tridot: \"\\u25EC\",\n trie: \"\\u225C\",\n triminus: \"\\u2A3A\",\n triplus: \"\\u2A39\",\n trisb: \"\\u29CD\",\n tritime: \"\\u2A3B\",\n trpezium: \"\\u23E2\",\n tscr: \"\\uD835\\uDCC9\",\n tscy: \"\\u0446\",\n tshcy: \"\\u045B\",\n tstrok: \"\\u0167\",\n twixt: \"\\u226C\",\n twoheadleftarrow: \"\\u219E\",\n twoheadrightarrow: \"\\u21A0\",\n uArr: \"\\u21D1\",\n uHar: \"\\u2963\",\n uacute: \"\\xfa\",\n uarr: \"\\u2191\",\n ubrcy: \"\\u045E\",\n ubreve: \"\\u016D\",\n ucirc: \"\\xfb\",\n ucy: \"\\u0443\",\n udarr: \"\\u21C5\",\n udblac: \"\\u0171\",\n udhar: \"\\u296E\",\n ufisht: \"\\u297E\",\n ufr: \"\\uD835\\uDD32\",\n ugrave: \"\\xf9\",\n uharl: \"\\u21BF\",\n uharr: \"\\u21BE\",\n uhblk: \"\\u2580\",\n ulcorn: \"\\u231C\",\n ulcorner: \"\\u231C\",\n ulcrop: \"\\u230F\",\n ultri: \"\\u25F8\",\n umacr: \"\\u016B\",\n uml: \"\\xa8\",\n uogon: \"\\u0173\",\n uopf: \"\\uD835\\uDD66\",\n uparrow: \"\\u2191\",\n updownarrow: \"\\u2195\",\n upharpoonleft: \"\\u21BF\",\n upharpoonright: \"\\u21BE\",\n uplus: \"\\u228E\",\n upsi: \"\\u03C5\",\n upsih: \"\\u03D2\",\n upsilon: \"\\u03C5\",\n upuparrows: \"\\u21C8\",\n urcorn: \"\\u231D\",\n urcorner: \"\\u231D\",\n urcrop: \"\\u230E\",\n uring: \"\\u016F\",\n urtri: \"\\u25F9\",\n uscr: \"\\uD835\\uDCCA\",\n utdot: \"\\u22F0\",\n utilde: \"\\u0169\",\n utri: \"\\u25B5\",\n utrif: \"\\u25B4\",\n uuarr: \"\\u21C8\",\n uuml: \"\\xfc\",\n uwangle: \"\\u29A7\",\n vArr: \"\\u21D5\",\n vBar: \"\\u2AE8\",\n vBarv: \"\\u2AE9\",\n vDash: \"\\u22A8\",\n vangrt: \"\\u299C\",\n varepsilon: \"\\u03F5\",\n varkappa: \"\\u03F0\",\n varnothing: \"\\u2205\",\n varphi: \"\\u03D5\",\n varpi: \"\\u03D6\",\n varpropto: \"\\u221D\",\n varr: \"\\u2195\",\n varrho: \"\\u03F1\",\n varsigma: \"\\u03C2\",\n varsubsetneq: \"\\u228A\\uFE00\",\n varsubsetneqq: \"\\u2ACB\\uFE00\",\n varsupsetneq: \"\\u228B\\uFE00\",\n varsupsetneqq: \"\\u2ACC\\uFE00\",\n vartheta: \"\\u03D1\",\n vartriangleleft: \"\\u22B2\",\n vartriangleright: \"\\u22B3\",\n vcy: \"\\u0432\",\n vdash: \"\\u22A2\",\n vee: \"\\u2228\",\n veebar: \"\\u22BB\",\n veeeq: \"\\u225A\",\n vellip: \"\\u22EE\",\n verbar: \"|\",\n vert: \"|\",\n vfr: \"\\uD835\\uDD33\",\n vltri: \"\\u22B2\",\n vnsub: \"\\u2282\\u20D2\",\n vnsup: \"\\u2283\\u20D2\",\n vopf: \"\\uD835\\uDD67\",\n vprop: \"\\u221D\",\n vrtri: \"\\u22B3\",\n vscr: \"\\uD835\\uDCCB\",\n vsubnE: \"\\u2ACB\\uFE00\",\n vsubne: \"\\u228A\\uFE00\",\n vsupnE: \"\\u2ACC\\uFE00\",\n vsupne: \"\\u228B\\uFE00\",\n vzigzag: \"\\u299A\",\n wcirc: \"\\u0175\",\n wedbar: \"\\u2A5F\",\n wedge: \"\\u2227\",\n wedgeq: \"\\u2259\",\n weierp: \"\\u2118\",\n wfr: \"\\uD835\\uDD34\",\n wopf: \"\\uD835\\uDD68\",\n wp: \"\\u2118\",\n wr: \"\\u2240\",\n wreath: \"\\u2240\",\n wscr: \"\\uD835\\uDCCC\",\n xcap: \"\\u22C2\",\n xcirc: \"\\u25EF\",\n xcup: \"\\u22C3\",\n xdtri: \"\\u25BD\",\n xfr: \"\\uD835\\uDD35\",\n xhArr: \"\\u27FA\",\n xharr: \"\\u27F7\",\n xi: \"\\u03BE\",\n xlArr: \"\\u27F8\",\n xlarr: \"\\u27F5\",\n xmap: \"\\u27FC\",\n xnis: \"\\u22FB\",\n xodot: \"\\u2A00\",\n xopf: \"\\uD835\\uDD69\",\n xoplus: \"\\u2A01\",\n xotime: \"\\u2A02\",\n xrArr: \"\\u27F9\",\n xrarr: \"\\u27F6\",\n xscr: \"\\uD835\\uDCCD\",\n xsqcup: \"\\u2A06\",\n xuplus: \"\\u2A04\",\n xutri: \"\\u25B3\",\n xvee: \"\\u22C1\",\n xwedge: \"\\u22C0\",\n yacute: \"\\xfd\",\n yacy: \"\\u044F\",\n ycirc: \"\\u0177\",\n ycy: \"\\u044B\",\n yen: \"\\xa5\",\n yfr: \"\\uD835\\uDD36\",\n yicy: \"\\u0457\",\n yopf: \"\\uD835\\uDD6A\",\n yscr: \"\\uD835\\uDCCE\",\n yucy: \"\\u044E\",\n yuml: \"\\xff\",\n zacute: \"\\u017A\",\n zcaron: \"\\u017E\",\n zcy: \"\\u0437\",\n zdot: \"\\u017C\",\n zeetrf: \"\\u2128\",\n zeta: \"\\u03B6\",\n zfr: \"\\uD835\\uDD37\",\n zhcy: \"\\u0436\",\n zigrarr: \"\\u21DD\",\n zopf: \"\\uD835\\uDD6B\",\n zscr: \"\\uD835\\uDCCF\",\n zwj: \"\\u200D\",\n zwnj: \"\\u200C\"\n};\n\n});\n\n\n\nparcelRegister(\"2qoAc\", function(module, exports) {\n\n$parcel$export(module.exports, \"codeFenced\", () => $1c411422dc205d23$export$c23e4921f8d87e7c);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n/** @type {Construct} */ const $1c411422dc205d23$var$nonLazyContinuation = {\n tokenize: $1c411422dc205d23$var$tokenizeNonLazyContinuation,\n partial: true\n};\nconst $1c411422dc205d23$export$c23e4921f8d87e7c = {\n name: \"codeFenced\",\n tokenize: $1c411422dc205d23$var$tokenizeCodeFenced,\n concrete: true\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1c411422dc205d23$var$tokenizeCodeFenced(effects, ok, nok) {\n const self = this;\n /** @type {Construct} */ const closeStart = {\n tokenize: tokenizeCloseStart,\n partial: true\n };\n let initialPrefix = 0;\n let sizeOpen = 0;\n /** @type {NonNullable} */ let marker;\n return start;\n /**\n * Start of code.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function start(code) {\n // To do: parse whitespace like `markdown-rs`.\n return beforeSequenceOpen(code);\n }\n /**\n * In opening fence, after prefix, at sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function beforeSequenceOpen(code) {\n const tail = self.events[self.events.length - 1];\n initialPrefix = tail && tail[1].type === \"linePrefix\" ? tail[2].sliceSerialize(tail[1], true).length : 0;\n marker = code;\n effects.enter(\"codeFenced\");\n effects.enter(\"codeFencedFence\");\n effects.enter(\"codeFencedFenceSequence\");\n return sequenceOpen(code);\n }\n /**\n * In opening fence sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function sequenceOpen(code) {\n if (code === marker) {\n sizeOpen++;\n effects.consume(code);\n return sequenceOpen;\n }\n if (sizeOpen < 3) return nok(code);\n effects.exit(\"codeFencedFenceSequence\");\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, infoBefore, \"whitespace\")(code) : infoBefore(code);\n }\n /**\n * In opening fence, after the sequence (and optional whitespace), before info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function infoBefore(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"codeFencedFence\");\n return self.interrupt ? ok(code) : effects.check($1c411422dc205d23$var$nonLazyContinuation, atNonLazyBreak, after)(code);\n }\n effects.enter(\"codeFencedFenceInfo\");\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return info(code);\n }\n /**\n * In info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function info(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"chunkString\");\n effects.exit(\"codeFencedFenceInfo\");\n return infoBefore(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.exit(\"chunkString\");\n effects.exit(\"codeFencedFenceInfo\");\n return (0, $8GWoH.factorySpace)(effects, metaBefore, \"whitespace\")(code);\n }\n if (code === 96 && code === marker) return nok(code);\n effects.consume(code);\n return info;\n }\n /**\n * In opening fence, after info and whitespace, before meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function metaBefore(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) return infoBefore(code);\n effects.enter(\"codeFencedFenceMeta\");\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return meta(code);\n }\n /**\n * In meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */ function meta(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"chunkString\");\n effects.exit(\"codeFencedFenceMeta\");\n return infoBefore(code);\n }\n if (code === 96 && code === marker) return nok(code);\n effects.consume(code);\n return meta;\n }\n /**\n * At eol/eof in code, before a non-lazy closing fence or content.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */ function atNonLazyBreak(code) {\n return effects.attempt(closeStart, after, contentBefore)(code);\n }\n /**\n * Before code content, not a closing fence, at eol.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */ function contentBefore(code) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return contentStart;\n }\n /**\n * Before code content, not a closing fence.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */ function contentStart(code) {\n return initialPrefix > 0 && (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, beforeContentChunk, \"linePrefix\", initialPrefix + 1)(code) : beforeContentChunk(code);\n }\n /**\n * Before code content, after optional prefix.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */ function beforeContentChunk(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) return effects.check($1c411422dc205d23$var$nonLazyContinuation, atNonLazyBreak, after)(code);\n effects.enter(\"codeFlowValue\");\n return contentChunk(code);\n }\n /**\n * In code content.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^^^^^^^^\n * | ~~~\n * ```\n *\n * @type {State}\n */ function contentChunk(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"codeFlowValue\");\n return beforeContentChunk(code);\n }\n effects.consume(code);\n return contentChunk;\n }\n /**\n * After code.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n effects.exit(\"codeFenced\");\n return ok(code);\n }\n /**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function tokenizeCloseStart(effects, ok, nok) {\n let size = 0;\n return startBefore;\n /**\n *\n *\n * @type {State}\n */ function startBefore(code) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return start;\n }\n /**\n * Before closing fence, at optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n // Always populated by defaults.\n // To do: `enter` here or in next state?\n effects.enter(\"codeFencedFence\");\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, beforeSequenceClose, \"linePrefix\", self.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 4)(code) : beforeSequenceClose(code);\n }\n /**\n * In closing fence, after optional whitespace, at sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */ function beforeSequenceClose(code) {\n if (code === marker) {\n effects.enter(\"codeFencedFenceSequence\");\n return sequenceClose(code);\n }\n return nok(code);\n }\n /**\n * In closing fence sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceClose(code) {\n if (code === marker) {\n size++;\n effects.consume(code);\n return sequenceClose;\n }\n if (size >= sizeOpen) {\n effects.exit(\"codeFencedFenceSequence\");\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, sequenceCloseAfter, \"whitespace\")(code) : sequenceCloseAfter(code);\n }\n return nok(code);\n }\n /**\n * After closing fence sequence, after optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceCloseAfter(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"codeFencedFence\");\n return ok(code);\n }\n return nok(code);\n }\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1c411422dc205d23$var$tokenizeNonLazyContinuation(effects, ok, nok) {\n const self = this;\n return start;\n /**\n *\n *\n * @type {State}\n */ function start(code) {\n if (code === null) return nok(code);\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return lineStart;\n }\n /**\n *\n *\n * @type {State}\n */ function lineStart(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code);\n }\n}\n\n});\n\nparcelRegister(\"eHPOy\", function(module, exports) {\n\n$parcel$export(module.exports, \"codeIndented\", () => $ab4d46130bbbc8bd$export$47910b7ab28d1853);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $ab4d46130bbbc8bd$export$47910b7ab28d1853 = {\n name: \"codeIndented\",\n tokenize: $ab4d46130bbbc8bd$var$tokenizeCodeIndented\n};\n/** @type {Construct} */ const $ab4d46130bbbc8bd$var$furtherStart = {\n tokenize: $ab4d46130bbbc8bd$var$tokenizeFurtherStart,\n partial: true\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $ab4d46130bbbc8bd$var$tokenizeCodeIndented(effects, ok, nok) {\n const self = this;\n return start;\n /**\n * Start of code (indented).\n *\n * > **Parsing note**: it is not needed to check if this first line is a\n * > filled line (that it has a non-whitespace character), because blank lines\n * > are parsed already, so we never run into that.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n // To do: manually check if interrupting like `markdown-rs`.\n effects.enter(\"codeIndented\");\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return (0, $8GWoH.factorySpace)(effects, afterPrefix, \"linePrefix\", 5)(code);\n }\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */ function afterPrefix(code) {\n const tail = self.events[self.events.length - 1];\n return tail && tail[1].type === \"linePrefix\" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? atBreak(code) : nok(code);\n }\n /**\n * At a break.\n *\n * ```markdown\n * > | aaa\n * ^ ^\n * ```\n *\n * @type {State}\n */ function atBreak(code) {\n if (code === null) return after(code);\n if ((0, $5Lprs.markdownLineEnding)(code)) return effects.attempt($ab4d46130bbbc8bd$var$furtherStart, atBreak, after)(code);\n effects.enter(\"codeFlowValue\");\n return inside(code);\n }\n /**\n * In code content.\n *\n * ```markdown\n * > | aaa\n * ^^^^\n * ```\n *\n * @type {State}\n */ function inside(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"codeFlowValue\");\n return atBreak(code);\n }\n effects.consume(code);\n return inside;\n }\n /** @type {State} */ function after(code) {\n effects.exit(\"codeIndented\");\n // To do: allow interrupting like `markdown-rs`.\n // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $ab4d46130bbbc8bd$var$tokenizeFurtherStart(effects, ok, nok) {\n const self = this;\n return furtherStart;\n /**\n * At eol, trying to parse another indent.\n *\n * ```markdown\n * > | aaa\n * ^\n * | bbb\n * ```\n *\n * @type {State}\n */ function furtherStart(code) {\n // To do: improve `lazy` / `pierce` handling.\n // If this is a lazy line, it can’t be code.\n if (self.parser.lazy[self.now().line]) return nok(code);\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return furtherStart;\n }\n // To do: the code here in `micromark-js` is a bit different from\n // `markdown-rs` because there it can attempt spaces.\n // We can’t yet.\n //\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return (0, $8GWoH.factorySpace)(effects, afterPrefix, \"linePrefix\", 5)(code);\n }\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */ function afterPrefix(code) {\n const tail = self.events[self.events.length - 1];\n return tail && tail[1].type === \"linePrefix\" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok(code) : (0, $5Lprs.markdownLineEnding)(code) ? furtherStart(code) : nok(code);\n }\n}\n\n});\n\nparcelRegister(\"1XgoW\", function(module, exports) {\n\n$parcel$export(module.exports, \"codeText\", () => $16c806108f267846$export$d24f93e715f9df88);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $16c806108f267846$export$d24f93e715f9df88 = {\n name: \"codeText\",\n tokenize: $16c806108f267846$var$tokenizeCodeText,\n resolve: $16c806108f267846$var$resolveCodeText,\n previous: $16c806108f267846$var$previous\n};\n// To do: next major: don’t resolve, like `markdown-rs`.\n/** @type {Resolver} */ function $16c806108f267846$var$resolveCodeText(events) {\n let tailExitIndex = events.length - 4;\n let headEnterIndex = 3;\n /** @type {number} */ let index;\n /** @type {number | undefined} */ let enter;\n // If we start and end with an EOL or a space.\n if ((events[headEnterIndex][1].type === \"lineEnding\" || events[headEnterIndex][1].type === \"space\") && (events[tailExitIndex][1].type === \"lineEnding\" || events[tailExitIndex][1].type === \"space\")) {\n index = headEnterIndex;\n // And we have data.\n while(++index < tailExitIndex)if (events[index][1].type === \"codeTextData\") {\n // Then we have padding.\n events[headEnterIndex][1].type = \"codeTextPadding\";\n events[tailExitIndex][1].type = \"codeTextPadding\";\n headEnterIndex += 2;\n tailExitIndex -= 2;\n break;\n }\n }\n // Merge adjacent spaces and data.\n index = headEnterIndex - 1;\n tailExitIndex++;\n while(++index <= tailExitIndex){\n if (enter === undefined) {\n if (index !== tailExitIndex && events[index][1].type !== \"lineEnding\") enter = index;\n } else if (index === tailExitIndex || events[index][1].type === \"lineEnding\") {\n events[enter][1].type = \"codeTextData\";\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end;\n events.splice(enter + 2, index - enter - 2);\n tailExitIndex -= index - enter - 2;\n index = enter + 2;\n }\n enter = undefined;\n }\n }\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Previous}\n */ function $16c806108f267846$var$previous(code) {\n // If there is a previous code, there will always be a tail.\n return code !== 96 || this.events[this.events.length - 1][1].type === \"characterEscape\";\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $16c806108f267846$var$tokenizeCodeText(effects, ok, nok) {\n const self = this;\n let sizeOpen = 0;\n /** @type {number} */ let size;\n /** @type {Token} */ let token;\n return start;\n /**\n * Start of code (text).\n *\n * ```markdown\n * > | `a`\n * ^\n * > | \\`a`\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"codeText\");\n effects.enter(\"codeTextSequence\");\n return sequenceOpen(code);\n }\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceOpen(code) {\n if (code === 96) {\n effects.consume(code);\n sizeOpen++;\n return sequenceOpen;\n }\n effects.exit(\"codeTextSequence\");\n return between(code);\n }\n /**\n * Between something and something else.\n *\n * ```markdown\n * > | `a`\n * ^^\n * ```\n *\n * @type {State}\n */ function between(code) {\n // EOF.\n if (code === null) return nok(code);\n // To do: next major: don’t do spaces in resolve, but when compiling,\n // like `markdown-rs`.\n // Tabs don’t work, and virtual spaces don’t make sense.\n if (code === 32) {\n effects.enter(\"space\");\n effects.consume(code);\n effects.exit(\"space\");\n return between;\n }\n // Closing fence? Could also be data.\n if (code === 96) {\n token = effects.enter(\"codeTextSequence\");\n size = 0;\n return sequenceClose(code);\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return between;\n }\n // Data.\n effects.enter(\"codeTextData\");\n return data(code);\n }\n /**\n * In data.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */ function data(code) {\n if (code === null || code === 32 || code === 96 || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"codeTextData\");\n return between(code);\n }\n effects.consume(code);\n return data;\n }\n /**\n * In closing sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceClose(code) {\n // More.\n if (code === 96) {\n effects.consume(code);\n size++;\n return sequenceClose;\n }\n // Done!\n if (size === sizeOpen) {\n effects.exit(\"codeTextSequence\");\n effects.exit(\"codeText\");\n return ok(code);\n }\n // More or less accents: mark as data.\n token.type = \"codeTextData\";\n return data(code);\n }\n}\n\n});\n\nparcelRegister(\"ix9Ua\", function(module, exports) {\n\n$parcel$export(module.exports, \"definition\", () => $d7e2fdfd86ff4dd4$export$69f215ed977cdb73);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $flE5d = parcelRequire(\"flE5d\");\n\nvar $i8rQF = parcelRequire(\"i8rQF\");\n\nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $kXx8i = parcelRequire(\"kXx8i\");\n\nvar $5Jc1R = parcelRequire(\"5Jc1R\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $9HErY = parcelRequire(\"9HErY\");\nconst $d7e2fdfd86ff4dd4$export$69f215ed977cdb73 = {\n name: \"definition\",\n tokenize: $d7e2fdfd86ff4dd4$var$tokenizeDefinition\n};\n/** @type {Construct} */ const $d7e2fdfd86ff4dd4$var$titleBefore = {\n tokenize: $d7e2fdfd86ff4dd4$var$tokenizeTitleBefore,\n partial: true\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $d7e2fdfd86ff4dd4$var$tokenizeDefinition(effects, ok, nok) {\n const self = this;\n /** @type {string} */ let identifier;\n return start;\n /**\n * At start of a definition.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n // Do not interrupt paragraphs (but do follow definitions).\n // To do: do `interrupt` the way `markdown-rs` does.\n // To do: parse whitespace the way `markdown-rs` does.\n effects.enter(\"definition\");\n return before(code);\n }\n /**\n * After optional whitespace, at `[`.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function before(code) {\n // To do: parse whitespace the way `markdown-rs` does.\n return (0, $i8rQF.factoryLabel).call(self, effects, labelAfter, // Note: we don’t need to reset the way `markdown-rs` does.\n nok, \"definitionLabel\", \"definitionLabelMarker\", \"definitionLabelString\")(code);\n }\n /**\n * After label.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function labelAfter(code) {\n identifier = (0, $9HErY.normalizeIdentifier)(self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1));\n if (code === 58) {\n effects.enter(\"definitionMarker\");\n effects.consume(code);\n effects.exit(\"definitionMarker\");\n return markerAfter;\n }\n return nok(code);\n }\n /**\n * After marker.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function markerAfter(code) {\n // Note: whitespace is optional.\n return (0, $5Lprs.markdownLineEndingOrSpace)(code) ? (0, $5Jc1R.factoryWhitespace)(effects, destinationBefore)(code) : destinationBefore(code);\n }\n /**\n * Before destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function destinationBefore(code) {\n return (0, $flE5d.factoryDestination)(effects, destinationAfter, // Note: we don’t need to reset the way `markdown-rs` does.\n nok, \"definitionDestination\", \"definitionDestinationLiteral\", \"definitionDestinationLiteralMarker\", \"definitionDestinationRaw\", \"definitionDestinationString\")(code);\n }\n /**\n * After destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function destinationAfter(code) {\n return effects.attempt($d7e2fdfd86ff4dd4$var$titleBefore, after, after)(code);\n }\n /**\n * After definition.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, afterWhitespace, \"whitespace\")(code) : afterWhitespace(code);\n }\n /**\n * After definition, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function afterWhitespace(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"definition\");\n // Note: we don’t care about uniqueness.\n // It’s likely that that doesn’t happen very frequently.\n // It is more likely that it wastes precious time.\n self.parser.defined.push(identifier);\n // To do: `markdown-rs` interrupt.\n // // You’d be interrupting.\n // tokenizer.interrupt = true\n return ok(code);\n }\n return nok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $d7e2fdfd86ff4dd4$var$tokenizeTitleBefore(effects, ok, nok) {\n return titleBefore;\n /**\n * After destination, at whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function titleBefore(code) {\n return (0, $5Lprs.markdownLineEndingOrSpace)(code) ? (0, $5Jc1R.factoryWhitespace)(effects, beforeMarker)(code) : nok(code);\n }\n /**\n * At title.\n *\n * ```markdown\n * | [a]: b\n * > | \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function beforeMarker(code) {\n return (0, $kXx8i.factoryTitle)(effects, titleAfter, nok, \"definitionTitle\", \"definitionTitleMarker\", \"definitionTitleString\")(code);\n }\n /**\n * After title.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function titleAfter(code) {\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, titleAfterOptionalWhitespace, \"whitespace\")(code) : titleAfterOptionalWhitespace(code);\n }\n /**\n * After title, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */ function titleAfterOptionalWhitespace(code) {\n return code === null || (0, $5Lprs.markdownLineEnding)(code) ? ok(code) : nok(code);\n }\n}\n\n});\nparcelRegister(\"flE5d\", function(module, exports) {\n\n$parcel$export(module.exports, \"factoryDestination\", () => $b2c7e8deac525d60$export$2e6c8deaa96af245);\n/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $b2c7e8deac525d60$export$2e6c8deaa96af245(effects, ok, nok, type, literalType, literalMarkerType, rawType, stringType, max) {\n const limit = max || Number.POSITIVE_INFINITY;\n let balance = 0;\n return start;\n /**\n * Start of destination.\n *\n * ```markdown\n * > | \n * ^\n * > | aa\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n if (code === 60) {\n effects.enter(type);\n effects.enter(literalType);\n effects.enter(literalMarkerType);\n effects.consume(code);\n effects.exit(literalMarkerType);\n return enclosedBefore;\n }\n // ASCII control, space, closing paren.\n if (code === null || code === 32 || code === 41 || (0, $5Lprs.asciiControl)(code)) return nok(code);\n effects.enter(type);\n effects.enter(rawType);\n effects.enter(stringType);\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return raw(code);\n }\n /**\n * After `<`, at an enclosed destination.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function enclosedBefore(code) {\n if (code === 62) {\n effects.enter(literalMarkerType);\n effects.consume(code);\n effects.exit(literalMarkerType);\n effects.exit(literalType);\n effects.exit(type);\n return ok;\n }\n effects.enter(stringType);\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return enclosed(code);\n }\n /**\n * In enclosed destination.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function enclosed(code) {\n if (code === 62) {\n effects.exit(\"chunkString\");\n effects.exit(stringType);\n return enclosedBefore(code);\n }\n if (code === null || code === 60 || (0, $5Lprs.markdownLineEnding)(code)) return nok(code);\n effects.consume(code);\n return code === 92 ? enclosedEscape : enclosed;\n }\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function enclosedEscape(code) {\n if (code === 60 || code === 62 || code === 92) {\n effects.consume(code);\n return enclosed;\n }\n return enclosed(code);\n }\n /**\n * In raw destination.\n *\n * ```markdown\n * > | aa\n * ^\n * ```\n *\n * @type {State}\n */ function raw(code) {\n if (!balance && (code === null || code === 41 || (0, $5Lprs.markdownLineEndingOrSpace)(code))) {\n effects.exit(\"chunkString\");\n effects.exit(stringType);\n effects.exit(rawType);\n effects.exit(type);\n return ok(code);\n }\n if (balance < limit && code === 40) {\n effects.consume(code);\n balance++;\n return raw;\n }\n if (code === 41) {\n effects.consume(code);\n balance--;\n return raw;\n }\n // ASCII control (but *not* `\\0`) and space and `(`.\n // Note: in `markdown-rs`, `\\0` exists in codes, in `micromark-js` it\n // doesn’t.\n if (code === null || code === 32 || code === 40 || (0, $5Lprs.asciiControl)(code)) return nok(code);\n effects.consume(code);\n return code === 92 ? rawEscape : raw;\n }\n /**\n * After `\\`, at special character.\n *\n * ```markdown\n * > | a\\*a\n * ^\n * ```\n *\n * @type {State}\n */ function rawEscape(code) {\n if (code === 40 || code === 41 || code === 92) {\n effects.consume(code);\n return raw;\n }\n return raw(code);\n }\n}\n\n});\n\nparcelRegister(\"i8rQF\", function(module, exports) {\n\n$parcel$export(module.exports, \"factoryLabel\", () => $d33e937ae1ea4ed4$export$7b768614d8ba97a7);\n/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $d33e937ae1ea4ed4$export$7b768614d8ba97a7(effects, ok, nok, type, markerType, stringType) {\n const self = this;\n let size = 0;\n /** @type {boolean} */ let seen;\n return start;\n /**\n * Start of label.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(type);\n effects.enter(markerType);\n effects.consume(code);\n effects.exit(markerType);\n effects.enter(stringType);\n return atBreak;\n }\n /**\n * In label, at something, before something else.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */ function atBreak(code) {\n if (size > 999 || code === null || code === 91 || code === 93 && !seen || // To do: remove in the future once we’ve switched from\n // `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,\n // which doesn’t need this.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */ code === 94 && !size && \"_hiddenFootnoteSupport\" in self.parser.constructs) return nok(code);\n if (code === 93) {\n effects.exit(stringType);\n effects.enter(markerType);\n effects.consume(code);\n effects.exit(markerType);\n effects.exit(type);\n return ok;\n }\n // To do: indent? Link chunks and EOLs together?\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return atBreak;\n }\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return labelInside(code);\n }\n /**\n * In label, in text.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */ function labelInside(code) {\n if (code === null || code === 91 || code === 93 || (0, $5Lprs.markdownLineEnding)(code) || size++ > 999) {\n effects.exit(\"chunkString\");\n return atBreak(code);\n }\n effects.consume(code);\n if (!seen) seen = !(0, $5Lprs.markdownSpace)(code);\n return code === 92 ? labelEscape : labelInside;\n }\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | [a\\*a]\n * ^\n * ```\n *\n * @type {State}\n */ function labelEscape(code) {\n if (code === 91 || code === 92 || code === 93) {\n effects.consume(code);\n size++;\n return labelInside;\n }\n return labelInside(code);\n }\n}\n\n});\n\nparcelRegister(\"kXx8i\", function(module, exports) {\n\n$parcel$export(module.exports, \"factoryTitle\", () => $f42304cc65ff93d0$export$f970569cc855e483);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $f42304cc65ff93d0$export$f970569cc855e483(effects, ok, nok, type, markerType, stringType) {\n /** @type {NonNullable} */ let marker;\n return start;\n /**\n * Start of title.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n if (code === 34 || code === 39 || code === 40) {\n effects.enter(type);\n effects.enter(markerType);\n effects.consume(code);\n effects.exit(markerType);\n marker = code === 40 ? 41 : code;\n return begin;\n }\n return nok(code);\n }\n /**\n * After opening marker.\n *\n * This is also used at the closing marker.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */ function begin(code) {\n if (code === marker) {\n effects.enter(markerType);\n effects.consume(code);\n effects.exit(markerType);\n effects.exit(type);\n return ok;\n }\n effects.enter(stringType);\n return atBreak(code);\n }\n /**\n * At something, before something else.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */ function atBreak(code) {\n if (code === marker) {\n effects.exit(stringType);\n return begin(marker);\n }\n if (code === null) return nok(code);\n // Note: blank lines can’t exist in content.\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n // To do: use `space_or_tab_eol_with_options`, connect.\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return (0, $8GWoH.factorySpace)(effects, atBreak, \"linePrefix\");\n }\n effects.enter(\"chunkString\", {\n contentType: \"string\"\n });\n return inside(code);\n }\n /**\n *\n *\n * @type {State}\n */ function inside(code) {\n if (code === marker || code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"chunkString\");\n return atBreak(code);\n }\n effects.consume(code);\n return code === 92 ? escape : inside;\n }\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | \"a\\*b\"\n * ^\n * ```\n *\n * @type {State}\n */ function escape(code) {\n if (code === marker || code === 92) {\n effects.consume(code);\n return inside;\n }\n return inside(code);\n }\n}\n\n});\n\nparcelRegister(\"5Jc1R\", function(module, exports) {\n\n$parcel$export(module.exports, \"factoryWhitespace\", () => $42ba7c725dbf102d$export$1f27bd1aa33ce173);\n/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nfunction $42ba7c725dbf102d$export$1f27bd1aa33ce173(effects, ok) {\n /** @type {boolean} */ let seen;\n return start;\n /** @type {State} */ function start(code) {\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n seen = true;\n return start;\n }\n if ((0, $5Lprs.markdownSpace)(code)) return (0, $8GWoH.factorySpace)(effects, start, seen ? \"linePrefix\" : \"lineSuffix\")(code);\n return ok(code);\n }\n}\n\n});\n\nparcelRegister(\"9HErY\", function(module, exports) {\n\n$parcel$export(module.exports, \"normalizeIdentifier\", () => $710781c0cdec9f6d$export$806d55e226cfcd08);\n/**\n * Normalize an identifier (as found in references, definitions).\n *\n * Collapses markdown whitespace, trim, and then lower- and uppercase.\n *\n * Some characters are considered “uppercase”, such as U+03F4 (`ϴ`), but if their\n * lowercase counterpart (U+03B8 (`θ`)) is uppercased will result in a different\n * uppercase character (U+0398 (`Θ`)).\n * So, to get a canonical form, we perform both lower- and uppercase.\n *\n * Using uppercase last makes sure keys will never interact with default\n * prototypal values (such as `constructor`): nothing in the prototype of\n * `Object` is uppercase.\n *\n * @param {string} value\n * Identifier to normalize.\n * @returns {string}\n * Normalized identifier.\n */ function $710781c0cdec9f6d$export$806d55e226cfcd08(value) {\n return value// Collapse markdown whitespace.\n .replace(/[\\t\\n\\r ]+/g, \" \")// Trim.\n .replace(/^ | $/g, \"\")// Some characters are considered “uppercase”, but if their lowercase\n // counterpart is uppercased will result in a different uppercase\n // character.\n // Hence, to get that form, we perform both lower- and uppercase.\n // Upper case makes sure keys will not interact with default prototypal\n // methods: no method is uppercase.\n .toLowerCase().toUpperCase();\n}\n\n});\n\n\nparcelRegister(\"1d2Fb\", function(module, exports) {\n\n$parcel$export(module.exports, \"hardBreakEscape\", () => $0e19123aa1bdeff9$export$86c573ab9e06f418);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $0e19123aa1bdeff9$export$86c573ab9e06f418 = {\n name: \"hardBreakEscape\",\n tokenize: $0e19123aa1bdeff9$var$tokenizeHardBreakEscape\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $0e19123aa1bdeff9$var$tokenizeHardBreakEscape(effects, ok, nok) {\n return start;\n /**\n * Start of a hard break (escape).\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"hardBreakEscape\");\n effects.consume(code);\n return after;\n }\n /**\n * After `\\`, at eol.\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */ function after(code) {\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"hardBreakEscape\");\n return ok(code);\n }\n return nok(code);\n }\n}\n\n});\n\nparcelRegister(\"e7bPJ\", function(module, exports) {\n\n$parcel$export(module.exports, \"headingAtx\", () => $a46ae5f97ae33c5d$export$3871e9deb360695c);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $Ux2lp = parcelRequire(\"Ux2lp\");\nconst $a46ae5f97ae33c5d$export$3871e9deb360695c = {\n name: \"headingAtx\",\n tokenize: $a46ae5f97ae33c5d$var$tokenizeHeadingAtx,\n resolve: $a46ae5f97ae33c5d$var$resolveHeadingAtx\n};\n/** @type {Resolver} */ function $a46ae5f97ae33c5d$var$resolveHeadingAtx(events, context) {\n let contentEnd = events.length - 2;\n let contentStart = 3;\n /** @type {Token} */ let content;\n /** @type {Token} */ let text;\n // Prefix whitespace, part of the opening.\n if (events[contentStart][1].type === \"whitespace\") contentStart += 2;\n // Suffix whitespace, part of the closing.\n if (contentEnd - 2 > contentStart && events[contentEnd][1].type === \"whitespace\") contentEnd -= 2;\n if (events[contentEnd][1].type === \"atxHeadingSequence\" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === \"whitespace\")) contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;\n if (contentEnd > contentStart) {\n content = {\n type: \"atxHeadingText\",\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end\n };\n text = {\n type: \"chunkText\",\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end,\n contentType: \"text\"\n };\n (0, $Ux2lp.splice)(events, contentStart, contentEnd - contentStart + 1, [\n [\n \"enter\",\n content,\n context\n ],\n [\n \"enter\",\n text,\n context\n ],\n [\n \"exit\",\n text,\n context\n ],\n [\n \"exit\",\n content,\n context\n ]\n ]);\n }\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $a46ae5f97ae33c5d$var$tokenizeHeadingAtx(effects, ok, nok) {\n let size = 0;\n return start;\n /**\n * Start of a heading (atx).\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n // To do: parse indent like `markdown-rs`.\n effects.enter(\"atxHeading\");\n return before(code);\n }\n /**\n * After optional whitespace, at `#`.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */ function before(code) {\n effects.enter(\"atxHeadingSequence\");\n return sequenceOpen(code);\n }\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceOpen(code) {\n if (code === 35 && size++ < 6) {\n effects.consume(code);\n return sequenceOpen;\n }\n // Always at least one `#`.\n if (code === null || (0, $5Lprs.markdownLineEndingOrSpace)(code)) {\n effects.exit(\"atxHeadingSequence\");\n return atBreak(code);\n }\n return nok(code);\n }\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */ function atBreak(code) {\n if (code === 35) {\n effects.enter(\"atxHeadingSequence\");\n return sequenceFurther(code);\n }\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"atxHeading\");\n // To do: interrupt like `markdown-rs`.\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) return (0, $8GWoH.factorySpace)(effects, atBreak, \"whitespace\")(code);\n // To do: generate `data` tokens, add the `text` token later.\n // Needs edit map, see: `markdown.rs`.\n effects.enter(\"atxHeadingText\");\n return data(code);\n }\n /**\n * In further sequence (after whitespace).\n *\n * Could be normal “visible” hashes in the heading or a final sequence.\n *\n * ```markdown\n * > | ## aa ##\n * ^\n * ```\n *\n * @type {State}\n */ function sequenceFurther(code) {\n if (code === 35) {\n effects.consume(code);\n return sequenceFurther;\n }\n effects.exit(\"atxHeadingSequence\");\n return atBreak(code);\n }\n /**\n * In text.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */ function data(code) {\n if (code === null || code === 35 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) {\n effects.exit(\"atxHeadingText\");\n return atBreak(code);\n }\n effects.consume(code);\n return data;\n }\n}\n\n});\n\nparcelRegister(\"aMDJK\", function(module, exports) {\n\n$parcel$export(module.exports, \"htmlFlow\", () => $7d9d35fac2b6ebcc$export$476ac411cb7d0d8f);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $9Lzi7 = parcelRequire(\"9Lzi7\");\n\nvar $eRfeJ = parcelRequire(\"eRfeJ\");\nconst $7d9d35fac2b6ebcc$export$476ac411cb7d0d8f = {\n name: \"htmlFlow\",\n tokenize: $7d9d35fac2b6ebcc$var$tokenizeHtmlFlow,\n resolveTo: $7d9d35fac2b6ebcc$var$resolveToHtmlFlow,\n concrete: true\n};\n/** @type {Construct} */ const $7d9d35fac2b6ebcc$var$blankLineBefore = {\n tokenize: $7d9d35fac2b6ebcc$var$tokenizeBlankLineBefore,\n partial: true\n};\nconst $7d9d35fac2b6ebcc$var$nonLazyContinuationStart = {\n tokenize: $7d9d35fac2b6ebcc$var$tokenizeNonLazyContinuationStart,\n partial: true\n};\n/** @type {Resolver} */ function $7d9d35fac2b6ebcc$var$resolveToHtmlFlow(events) {\n let index = events.length;\n while(index--){\n if (events[index][0] === \"enter\" && events[index][1].type === \"htmlFlow\") break;\n }\n if (index > 1 && events[index - 2][1].type === \"linePrefix\") {\n // Add the prefix start to the HTML token.\n events[index][1].start = events[index - 2][1].start;\n // Add the prefix start to the HTML line token.\n events[index + 1][1].start = events[index - 2][1].start;\n // Remove the line prefix.\n events.splice(index - 2, 2);\n }\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $7d9d35fac2b6ebcc$var$tokenizeHtmlFlow(effects, ok, nok) {\n const self = this;\n /** @type {number} */ let marker;\n /** @type {boolean} */ let closingTag;\n /** @type {string} */ let buffer;\n /** @type {number} */ let index;\n /** @type {Code} */ let markerB;\n return start;\n /**\n * Start of HTML (flow).\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n // To do: parse indent like `markdown-rs`.\n return before(code);\n }\n /**\n * At `<`, after optional whitespace.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function before(code) {\n effects.enter(\"htmlFlow\");\n effects.enter(\"htmlFlowData\");\n effects.consume(code);\n return open;\n }\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function open(code) {\n if (code === 33) {\n effects.consume(code);\n return declarationOpen;\n }\n if (code === 47) {\n effects.consume(code);\n closingTag = true;\n return tagCloseStart;\n }\n if (code === 63) {\n effects.consume(code);\n marker = 3;\n // To do:\n // tokenizer.concrete = true\n // To do: use `markdown-rs` style interrupt.\n // While we’re in an instruction instead of a declaration, we’re on a `?`\n // right now, so we do need to search for `>`, similar to declarations.\n return self.interrupt ? ok : continuationDeclarationInside;\n }\n // ASCII alphabetical.\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code);\n return tagName;\n }\n return nok(code);\n }\n /**\n * After ` | \n * ^\n * > | \n * ^\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */ function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code);\n marker = 2;\n return commentOpenInside;\n }\n if (code === 91) {\n effects.consume(code);\n marker = 5;\n index = 0;\n return cdataOpenInside;\n }\n // ASCII alphabetical.\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n marker = 4;\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside;\n }\n return nok(code);\n }\n /**\n * After ` | \n * ^\n * ```\n *\n * @type {State}\n */ function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code);\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside;\n }\n return nok(code);\n }\n /**\n * After ` | &<]]>\n * ^^^^^^\n * ```\n *\n * @type {State}\n */ function cdataOpenInside(code) {\n const value = \"CDATA[\";\n if (code === value.charCodeAt(index++)) {\n effects.consume(code);\n if (index === value.length) // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation;\n return cdataOpenInside;\n }\n return nok(code);\n }\n /**\n * After ``, in closing tag, at tag name.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function tagCloseStart(code) {\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code);\n return tagName;\n }\n return nok(code);\n }\n /**\n * In tag name.\n *\n * ```markdown\n * > | \n * ^^\n * > | \n * ^^\n * ```\n *\n * @type {State}\n */ function tagName(code) {\n if (code === null || code === 47 || code === 62 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) {\n const slash = code === 47;\n const name = buffer.toLowerCase();\n if (!slash && !closingTag && (0, $9Lzi7.htmlRawNames).includes(name)) {\n marker = 1;\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code);\n }\n if ((0, $9Lzi7.htmlBlockNames).includes(buffer.toLowerCase())) {\n marker = 6;\n if (slash) {\n effects.consume(code);\n return basicSelfClosing;\n }\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code);\n }\n marker = 7;\n // Do not support complete HTML when interrupting.\n return self.interrupt && !self.parser.lazy[self.now().line] ? nok(code) : closingTag ? completeClosingTagAfter(code) : completeAttributeNameBefore(code);\n }\n // ASCII alphanumerical and `-`.\n if (code === 45 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n effects.consume(code);\n buffer += String.fromCharCode(code);\n return tagName;\n }\n return nok(code);\n }\n /**\n * After closing slash of a basic tag name.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function basicSelfClosing(code) {\n if (code === 62) {\n effects.consume(code);\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation;\n }\n return nok(code);\n }\n /**\n * After closing slash of a complete tag name.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeClosingTagAfter(code) {\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return completeClosingTagAfter;\n }\n return completeEnd(code);\n }\n /**\n * At an attribute name.\n *\n * At first, this state is used after a complete tag name, after whitespace,\n * where it expects optional attributes or the end of the tag.\n * It is also reused after attributes, when expecting more optional\n * attributes.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeNameBefore(code) {\n if (code === 47) {\n effects.consume(code);\n return completeEnd;\n }\n // ASCII alphanumerical and `:` and `_`.\n if (code === 58 || code === 95 || (0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return completeAttributeName;\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return completeAttributeNameBefore;\n }\n return completeEnd(code);\n }\n /**\n * In attribute name.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeName(code) {\n // ASCII alphanumerical and `-`, `.`, `:`, and `_`.\n if (code === 45 || code === 46 || code === 58 || code === 95 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n effects.consume(code);\n return completeAttributeName;\n }\n return completeAttributeNameAfter(code);\n }\n /**\n * After attribute name, at an optional initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code);\n return completeAttributeValueBefore;\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return completeAttributeNameAfter;\n }\n return completeAttributeNameBefore(code);\n }\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeValueBefore(code) {\n if (code === null || code === 60 || code === 61 || code === 62 || code === 96) return nok(code);\n if (code === 34 || code === 39) {\n effects.consume(code);\n markerB = code;\n return completeAttributeValueQuoted;\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return completeAttributeValueBefore;\n }\n return completeAttributeValueUnquoted(code);\n }\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeValueQuoted(code) {\n if (code === markerB) {\n effects.consume(code);\n markerB = null;\n return completeAttributeValueQuotedAfter;\n }\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) return nok(code);\n effects.consume(code);\n return completeAttributeValueQuoted;\n }\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeValueUnquoted(code) {\n if (code === null || code === 34 || code === 39 || code === 47 || code === 60 || code === 61 || code === 62 || code === 96 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) return completeAttributeNameAfter(code);\n effects.consume(code);\n return completeAttributeValueUnquoted;\n }\n /**\n * After double or single quoted attribute value, before whitespace or the\n * end of the tag.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || (0, $5Lprs.markdownSpace)(code)) return completeAttributeNameBefore(code);\n return nok(code);\n }\n /**\n * In certain circumstances of a complete tag where only an `>` is allowed.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeEnd(code) {\n if (code === 62) {\n effects.consume(code);\n return completeAfter;\n }\n return nok(code);\n }\n /**\n * After `>` in a complete tag.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function completeAfter(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) // // Do not form containers.\n // tokenizer.concrete = true\n return continuation(code);\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return completeAfter;\n }\n return nok(code);\n }\n /**\n * In continuation of any HTML kind.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function continuation(code) {\n if (code === 45 && marker === 2) {\n effects.consume(code);\n return continuationCommentInside;\n }\n if (code === 60 && marker === 1) {\n effects.consume(code);\n return continuationRawTagOpen;\n }\n if (code === 62 && marker === 4) {\n effects.consume(code);\n return continuationClose;\n }\n if (code === 63 && marker === 3) {\n effects.consume(code);\n return continuationDeclarationInside;\n }\n if (code === 93 && marker === 5) {\n effects.consume(code);\n return continuationCdataInside;\n }\n if ((0, $5Lprs.markdownLineEnding)(code) && (marker === 6 || marker === 7)) {\n effects.exit(\"htmlFlowData\");\n return effects.check($7d9d35fac2b6ebcc$var$blankLineBefore, continuationAfter, continuationStart)(code);\n }\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"htmlFlowData\");\n return continuationStart(code);\n }\n effects.consume(code);\n return continuation;\n }\n /**\n * In continuation, at eol.\n *\n * ```markdown\n * > | \n * ^\n * | asd\n * ```\n *\n * @type {State}\n */ function continuationStart(code) {\n return effects.check($7d9d35fac2b6ebcc$var$nonLazyContinuationStart, continuationStartNonLazy, continuationAfter)(code);\n }\n /**\n * In continuation, at eol, before non-lazy content.\n *\n * ```markdown\n * > | \n * ^\n * | asd\n * ```\n *\n * @type {State}\n */ function continuationStartNonLazy(code) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return continuationBefore;\n }\n /**\n * In continuation, before non-lazy content.\n *\n * ```markdown\n * | \n * > | asd\n * ^\n * ```\n *\n * @type {State}\n */ function continuationBefore(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) return continuationStart(code);\n effects.enter(\"htmlFlowData\");\n return continuation(code);\n }\n /**\n * In comment continuation, after one `-`, expecting another.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function continuationCommentInside(code) {\n if (code === 45) {\n effects.consume(code);\n return continuationDeclarationInside;\n }\n return continuation(code);\n }\n /**\n * In raw continuation, after `<`, at `/`.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function continuationRawTagOpen(code) {\n if (code === 47) {\n effects.consume(code);\n buffer = \"\";\n return continuationRawEndTag;\n }\n return continuation(code);\n }\n /**\n * In raw continuation, after ``, in a raw tag name.\n *\n * ```markdown\n * > | \n * ^^^^^^\n * ```\n *\n * @type {State}\n */ function continuationRawEndTag(code) {\n if (code === 62) {\n const name = buffer.toLowerCase();\n if ((0, $9Lzi7.htmlRawNames).includes(name)) {\n effects.consume(code);\n return continuationClose;\n }\n return continuation(code);\n }\n if ((0, $5Lprs.asciiAlpha)(code) && buffer.length < 8) {\n effects.consume(code);\n // @ts-expect-error: not null.\n buffer += String.fromCharCode(code);\n return continuationRawEndTag;\n }\n return continuation(code);\n }\n /**\n * In cdata continuation, after `]`, expecting `]>`.\n *\n * ```markdown\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */ function continuationCdataInside(code) {\n if (code === 93) {\n effects.consume(code);\n return continuationDeclarationInside;\n }\n return continuation(code);\n }\n /**\n * In declaration or instruction continuation, at `>`.\n *\n * ```markdown\n * > | \n * ^\n * > | >\n * ^\n * > | \n * ^\n * > | \n * ^\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */ function continuationDeclarationInside(code) {\n if (code === 62) {\n effects.consume(code);\n return continuationClose;\n }\n // More dashes.\n if (code === 45 && marker === 2) {\n effects.consume(code);\n return continuationDeclarationInside;\n }\n return continuation(code);\n }\n /**\n * In closed continuation: everything we get until the eol/eof is part of it.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function continuationClose(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"htmlFlowData\");\n return continuationAfter(code);\n }\n effects.consume(code);\n return continuationClose;\n }\n /**\n * Done.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */ function continuationAfter(code) {\n effects.exit(\"htmlFlow\");\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n // // No longer concrete.\n // tokenizer.concrete = false\n return ok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $7d9d35fac2b6ebcc$var$tokenizeNonLazyContinuationStart(effects, ok, nok) {\n const self = this;\n return start;\n /**\n * At eol, before continuation.\n *\n * ```markdown\n * > | * ```js\n * ^\n * | b\n * ```\n *\n * @type {State}\n */ function start(code) {\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return after;\n }\n return nok(code);\n }\n /**\n * A continuation.\n *\n * ```markdown\n * | * ```js\n * > | b\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $7d9d35fac2b6ebcc$var$tokenizeBlankLineBefore(effects, ok, nok) {\n return start;\n /**\n * Before eol, expecting blank line.\n *\n * ```markdown\n * > | \n * ^\n * |\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return effects.attempt((0, $eRfeJ.blankLine), ok, nok);\n }\n}\n\n});\nparcelRegister(\"9Lzi7\", function(module, exports) {\n\n$parcel$export(module.exports, \"htmlBlockNames\", () => $71c3e0f388301604$export$7364aee1c59d1879);\n$parcel$export(module.exports, \"htmlRawNames\", () => $71c3e0f388301604$export$948e66da505d080);\n/**\n * List of lowercase HTML “block” tag names.\n *\n * The list, when parsing HTML (flow), results in more relaxed rules (condition\n * 6).\n * Because they are known blocks, the HTML-like syntax doesn’t have to be\n * strictly parsed.\n * For tag names not in this list, a more strict algorithm (condition 7) is used\n * to detect whether the HTML-like syntax is seen as HTML (flow) or not.\n *\n * This is copied from:\n * .\n *\n * > 👉 **Note**: `search` was added in `[email protected]`.\n */ const $71c3e0f388301604$export$7364aee1c59d1879 = [\n \"address\",\n \"article\",\n \"aside\",\n \"base\",\n \"basefont\",\n \"blockquote\",\n \"body\",\n \"caption\",\n \"center\",\n \"col\",\n \"colgroup\",\n \"dd\",\n \"details\",\n \"dialog\",\n \"dir\",\n \"div\",\n \"dl\",\n \"dt\",\n \"fieldset\",\n \"figcaption\",\n \"figure\",\n \"footer\",\n \"form\",\n \"frame\",\n \"frameset\",\n \"h1\",\n \"h2\",\n \"h3\",\n \"h4\",\n \"h5\",\n \"h6\",\n \"head\",\n \"header\",\n \"hr\",\n \"html\",\n \"iframe\",\n \"legend\",\n \"li\",\n \"link\",\n \"main\",\n \"menu\",\n \"menuitem\",\n \"nav\",\n \"noframes\",\n \"ol\",\n \"optgroup\",\n \"option\",\n \"p\",\n \"param\",\n \"search\",\n \"section\",\n \"summary\",\n \"table\",\n \"tbody\",\n \"td\",\n \"tfoot\",\n \"th\",\n \"thead\",\n \"title\",\n \"tr\",\n \"track\",\n \"ul\"\n];\nconst $71c3e0f388301604$export$948e66da505d080 = [\n \"pre\",\n \"script\",\n \"style\",\n \"textarea\"\n];\n\n});\n\n\nparcelRegister(\"fAOFU\", function(module, exports) {\n\n$parcel$export(module.exports, \"htmlText\", () => $b5a186cf1fccfac7$export$398af27f284914fe);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $b5a186cf1fccfac7$export$398af27f284914fe = {\n name: \"htmlText\",\n tokenize: $b5a186cf1fccfac7$var$tokenizeHtmlText\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $b5a186cf1fccfac7$var$tokenizeHtmlText(effects, ok, nok) {\n const self = this;\n /** @type {NonNullable | undefined} */ let marker;\n /** @type {number} */ let index;\n /** @type {State} */ let returnState;\n return start;\n /**\n * Start of HTML (text).\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"htmlText\");\n effects.enter(\"htmlTextData\");\n effects.consume(code);\n return open;\n }\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | a c\n * ^\n * > | a c\n * ^\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function open(code) {\n if (code === 33) {\n effects.consume(code);\n return declarationOpen;\n }\n if (code === 47) {\n effects.consume(code);\n return tagCloseStart;\n }\n if (code === 63) {\n effects.consume(code);\n return instruction;\n }\n // ASCII alphabetical.\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return tagOpen;\n }\n return nok(code);\n }\n /**\n * After ` | a c\n * ^\n * > | a c\n * ^\n * > | a &<]]> c\n * ^\n * ```\n *\n * @type {State}\n */ function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code);\n return commentOpenInside;\n }\n if (code === 91) {\n effects.consume(code);\n index = 0;\n return cdataOpenInside;\n }\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return declaration;\n }\n return nok(code);\n }\n /**\n * In a comment, after ` | a c\n * ^\n * ```\n *\n * @type {State}\n */ function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code);\n return commentEnd;\n }\n return nok(code);\n }\n /**\n * In comment.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function comment(code) {\n if (code === null) return nok(code);\n if (code === 45) {\n effects.consume(code);\n return commentClose;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = comment;\n return lineEndingBefore(code);\n }\n effects.consume(code);\n return comment;\n }\n /**\n * In comment, after `-`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function commentClose(code) {\n if (code === 45) {\n effects.consume(code);\n return commentEnd;\n }\n return comment(code);\n }\n /**\n * In comment, after `--`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function commentEnd(code) {\n return code === 62 ? end(code) : code === 45 ? commentClose(code) : comment(code);\n }\n /**\n * After ` | a &<]]> b\n * ^^^^^^\n * ```\n *\n * @type {State}\n */ function cdataOpenInside(code) {\n const value = \"CDATA[\";\n if (code === value.charCodeAt(index++)) {\n effects.consume(code);\n return index === value.length ? cdata : cdataOpenInside;\n }\n return nok(code);\n }\n /**\n * In CDATA.\n *\n * ```markdown\n * > | a &<]]> b\n * ^^^\n * ```\n *\n * @type {State}\n */ function cdata(code) {\n if (code === null) return nok(code);\n if (code === 93) {\n effects.consume(code);\n return cdataClose;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = cdata;\n return lineEndingBefore(code);\n }\n effects.consume(code);\n return cdata;\n }\n /**\n * In CDATA, after `]`, at another `]`.\n *\n * ```markdown\n * > | a &<]]> b\n * ^\n * ```\n *\n * @type {State}\n */ function cdataClose(code) {\n if (code === 93) {\n effects.consume(code);\n return cdataEnd;\n }\n return cdata(code);\n }\n /**\n * In CDATA, after `]]`, at `>`.\n *\n * ```markdown\n * > | a &<]]> b\n * ^\n * ```\n *\n * @type {State}\n */ function cdataEnd(code) {\n if (code === 62) return end(code);\n if (code === 93) {\n effects.consume(code);\n return cdataEnd;\n }\n return cdata(code);\n }\n /**\n * In declaration.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function declaration(code) {\n if (code === null || code === 62) return end(code);\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = declaration;\n return lineEndingBefore(code);\n }\n effects.consume(code);\n return declaration;\n }\n /**\n * In instruction.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function instruction(code) {\n if (code === null) return nok(code);\n if (code === 63) {\n effects.consume(code);\n return instructionClose;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = instruction;\n return lineEndingBefore(code);\n }\n effects.consume(code);\n return instruction;\n }\n /**\n * In instruction, after `?`, at `>`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function instructionClose(code) {\n return code === 62 ? end(code) : instruction(code);\n }\n /**\n * After ``, in closing tag, at tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function tagCloseStart(code) {\n // ASCII alphabetical.\n if ((0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return tagClose;\n }\n return nok(code);\n }\n /**\n * After ` | a c\n * ^\n * ```\n *\n * @type {State}\n */ function tagClose(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n effects.consume(code);\n return tagClose;\n }\n return tagCloseBetween(code);\n }\n /**\n * In closing tag, after tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function tagCloseBetween(code) {\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = tagCloseBetween;\n return lineEndingBefore(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return tagCloseBetween;\n }\n return end(code);\n }\n /**\n * After ` | a c\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpen(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n effects.consume(code);\n return tagOpen;\n }\n if (code === 47 || code === 62 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) return tagOpenBetween(code);\n return nok(code);\n }\n /**\n * In opening tag, after tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenBetween(code) {\n if (code === 47) {\n effects.consume(code);\n return end;\n }\n // ASCII alphabetical and `:` and `_`.\n if (code === 58 || code === 95 || (0, $5Lprs.asciiAlpha)(code)) {\n effects.consume(code);\n return tagOpenAttributeName;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = tagOpenBetween;\n return lineEndingBefore(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return tagOpenBetween;\n }\n return end(code);\n }\n /**\n * In attribute name.\n *\n * ```markdown\n * > | a d\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeName(code) {\n // ASCII alphabetical and `-`, `.`, `:`, and `_`.\n if (code === 45 || code === 46 || code === 58 || code === 95 || (0, $5Lprs.asciiAlphanumeric)(code)) {\n effects.consume(code);\n return tagOpenAttributeName;\n }\n return tagOpenAttributeNameAfter(code);\n }\n /**\n * After attribute name, before initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | a d\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code);\n return tagOpenAttributeValueBefore;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = tagOpenAttributeNameAfter;\n return lineEndingBefore(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return tagOpenAttributeNameAfter;\n }\n return tagOpenBetween(code);\n }\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeValueBefore(code) {\n if (code === null || code === 60 || code === 61 || code === 62 || code === 96) return nok(code);\n if (code === 34 || code === 39) {\n effects.consume(code);\n marker = code;\n return tagOpenAttributeValueQuoted;\n }\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = tagOpenAttributeValueBefore;\n return lineEndingBefore(code);\n }\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.consume(code);\n return tagOpenAttributeValueBefore;\n }\n effects.consume(code);\n return tagOpenAttributeValueUnquoted;\n }\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeValueQuoted(code) {\n if (code === marker) {\n effects.consume(code);\n marker = undefined;\n return tagOpenAttributeValueQuotedAfter;\n }\n if (code === null) return nok(code);\n if ((0, $5Lprs.markdownLineEnding)(code)) {\n returnState = tagOpenAttributeValueQuoted;\n return lineEndingBefore(code);\n }\n effects.consume(code);\n return tagOpenAttributeValueQuoted;\n }\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeValueUnquoted(code) {\n if (code === null || code === 34 || code === 39 || code === 60 || code === 61 || code === 96) return nok(code);\n if (code === 47 || code === 62 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) return tagOpenBetween(code);\n effects.consume(code);\n return tagOpenAttributeValueUnquoted;\n }\n /**\n * After double or single quoted attribute value, before whitespace or the end\n * of the tag.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */ function tagOpenAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || (0, $5Lprs.markdownLineEndingOrSpace)(code)) return tagOpenBetween(code);\n return nok(code);\n }\n /**\n * In certain circumstances of a tag where only an `>` is allowed.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */ function end(code) {\n if (code === 62) {\n effects.consume(code);\n effects.exit(\"htmlTextData\");\n effects.exit(\"htmlText\");\n return ok;\n }\n return nok(code);\n }\n /**\n * At eol.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * > | a \n * ```\n *\n * @type {State}\n */ function lineEndingBefore(code) {\n effects.exit(\"htmlTextData\");\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return lineEndingAfter;\n }\n /**\n * After eol, at optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a \n * ^\n * ```\n *\n * @type {State}\n */ function lineEndingAfter(code) {\n // Always populated by defaults.\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, lineEndingAfterPrefix, \"linePrefix\", self.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 4)(code) : lineEndingAfterPrefix(code);\n }\n /**\n * After eol, after optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a \n * ^\n * ```\n *\n * @type {State}\n */ function lineEndingAfterPrefix(code) {\n effects.enter(\"htmlTextData\");\n return returnState(code);\n }\n}\n\n});\n\nparcelRegister(\"2yvw6\", function(module, exports) {\n\n$parcel$export(module.exports, \"labelEnd\", () => $1dc73455d0df3be9$export$470a5dafbbf62654);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $flE5d = parcelRequire(\"flE5d\");\n\nvar $i8rQF = parcelRequire(\"i8rQF\");\n\nvar $kXx8i = parcelRequire(\"kXx8i\");\n\nvar $5Jc1R = parcelRequire(\"5Jc1R\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $Ux2lp = parcelRequire(\"Ux2lp\");\n\nvar $9HErY = parcelRequire(\"9HErY\");\n\nvar $8f02Z = parcelRequire(\"8f02Z\");\nconst $1dc73455d0df3be9$export$470a5dafbbf62654 = {\n name: \"labelEnd\",\n tokenize: $1dc73455d0df3be9$var$tokenizeLabelEnd,\n resolveTo: $1dc73455d0df3be9$var$resolveToLabelEnd,\n resolveAll: $1dc73455d0df3be9$var$resolveAllLabelEnd\n};\n/** @type {Construct} */ const $1dc73455d0df3be9$var$resourceConstruct = {\n tokenize: $1dc73455d0df3be9$var$tokenizeResource\n};\n/** @type {Construct} */ const $1dc73455d0df3be9$var$referenceFullConstruct = {\n tokenize: $1dc73455d0df3be9$var$tokenizeReferenceFull\n};\n/** @type {Construct} */ const $1dc73455d0df3be9$var$referenceCollapsedConstruct = {\n tokenize: $1dc73455d0df3be9$var$tokenizeReferenceCollapsed\n};\n/** @type {Resolver} */ function $1dc73455d0df3be9$var$resolveAllLabelEnd(events) {\n let index = -1;\n while(++index < events.length){\n const token = events[index][1];\n if (token.type === \"labelImage\" || token.type === \"labelLink\" || token.type === \"labelEnd\") {\n // Remove the marker.\n events.splice(index + 1, token.type === \"labelImage\" ? 4 : 2);\n token.type = \"data\";\n index++;\n }\n }\n return events;\n}\n/** @type {Resolver} */ function $1dc73455d0df3be9$var$resolveToLabelEnd(events, context) {\n let index = events.length;\n let offset = 0;\n /** @type {Token} */ let token;\n /** @type {number | undefined} */ let open;\n /** @type {number | undefined} */ let close;\n /** @type {Array} */ let media;\n // Find an opening.\n while(index--){\n token = events[index][1];\n if (open) {\n // If we see another link, or inactive link label, we’ve been here before.\n if (token.type === \"link\" || token.type === \"labelLink\" && token._inactive) break;\n // Mark other link openings as inactive, as we can’t have links in\n // links.\n if (events[index][0] === \"enter\" && token.type === \"labelLink\") token._inactive = true;\n } else if (close) {\n if (events[index][0] === \"enter\" && (token.type === \"labelImage\" || token.type === \"labelLink\") && !token._balanced) {\n open = index;\n if (token.type !== \"labelLink\") {\n offset = 2;\n break;\n }\n }\n } else if (token.type === \"labelEnd\") close = index;\n }\n const group = {\n type: events[open][1].type === \"labelLink\" ? \"link\" : \"image\",\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n };\n const label = {\n type: \"label\",\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[close][1].end)\n };\n const text = {\n type: \"labelText\",\n start: Object.assign({}, events[open + offset + 2][1].end),\n end: Object.assign({}, events[close - 2][1].start)\n };\n media = [\n [\n \"enter\",\n group,\n context\n ],\n [\n \"enter\",\n label,\n context\n ]\n ];\n // Opening marker.\n media = (0, $Ux2lp.push)(media, events.slice(open + 1, open + offset + 3));\n // Text open.\n media = (0, $Ux2lp.push)(media, [\n [\n \"enter\",\n text,\n context\n ]\n ]);\n // Always populated by defaults.\n // Between.\n media = (0, $Ux2lp.push)(media, (0, $8f02Z.resolveAll)(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close - 3), context));\n // Text close, marker close, label close.\n media = (0, $Ux2lp.push)(media, [\n [\n \"exit\",\n text,\n context\n ],\n events[close - 2],\n events[close - 1],\n [\n \"exit\",\n label,\n context\n ]\n ]);\n // Reference, resource, or so.\n media = (0, $Ux2lp.push)(media, events.slice(close + 1));\n // Media close.\n media = (0, $Ux2lp.push)(media, [\n [\n \"exit\",\n group,\n context\n ]\n ]);\n (0, $Ux2lp.splice)(events, open, events.length, media);\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1dc73455d0df3be9$var$tokenizeLabelEnd(effects, ok, nok) {\n const self = this;\n let index = self.events.length;\n /** @type {Token} */ let labelStart;\n /** @type {boolean} */ let defined;\n // Find an opening.\n while(index--)if ((self.events[index][1].type === \"labelImage\" || self.events[index][1].type === \"labelLink\") && !self.events[index][1]._balanced) {\n labelStart = self.events[index][1];\n break;\n }\n return start;\n /**\n * Start of label end.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ```\n *\n * @type {State}\n */ function start(code) {\n // If there is not an okay opening.\n if (!labelStart) return nok(code);\n // If the corresponding label (link) start is marked as inactive,\n // it means we’d be wrapping a link, like this:\n //\n // ```markdown\n // > | a [b [c](d) e](f) g.\n // ^\n // ```\n //\n // We can’t have that, so it’s just balanced brackets.\n if (labelStart._inactive) return labelEndNok(code);\n defined = self.parser.defined.includes((0, $9HErY.normalizeIdentifier)(self.sliceSerialize({\n start: labelStart.end,\n end: self.now()\n })));\n effects.enter(\"labelEnd\");\n effects.enter(\"labelMarker\");\n effects.consume(code);\n effects.exit(\"labelMarker\");\n effects.exit(\"labelEnd\");\n return after;\n }\n /**\n * After `]`.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n // Note: `markdown-rs` also parses GFM footnotes here, which for us is in\n // an extension.\n // Resource (`[asd](fgh)`)?\n if (code === 40) return effects.attempt($1dc73455d0df3be9$var$resourceConstruct, labelEndOk, defined ? labelEndOk : labelEndNok)(code);\n // Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?\n if (code === 91) return effects.attempt($1dc73455d0df3be9$var$referenceFullConstruct, labelEndOk, defined ? referenceNotFull : labelEndNok)(code);\n // Shortcut (`[asd]`) reference?\n return defined ? labelEndOk(code) : labelEndNok(code);\n }\n /**\n * After `]`, at `[`, but not at a full reference.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */ function referenceNotFull(code) {\n return effects.attempt($1dc73455d0df3be9$var$referenceCollapsedConstruct, labelEndOk, labelEndNok)(code);\n }\n /**\n * Done, we found something.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */ function labelEndOk(code) {\n // Note: `markdown-rs` does a bunch of stuff here.\n return ok(code);\n }\n /**\n * Done, it’s nothing.\n *\n * There was an okay opening, but we didn’t match anything.\n *\n * ```markdown\n * > | [a](b c\n * ^\n * > | [a][b c\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */ function labelEndNok(code) {\n labelStart._balanced = true;\n return nok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1dc73455d0df3be9$var$tokenizeResource(effects, ok, nok) {\n return resourceStart;\n /**\n * At a resource.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */ function resourceStart(code) {\n effects.enter(\"resource\");\n effects.enter(\"resourceMarker\");\n effects.consume(code);\n effects.exit(\"resourceMarker\");\n return resourceBefore;\n }\n /**\n * In resource, after `(`, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */ function resourceBefore(code) {\n return (0, $5Lprs.markdownLineEndingOrSpace)(code) ? (0, $5Jc1R.factoryWhitespace)(effects, resourceOpen)(code) : resourceOpen(code);\n }\n /**\n * In resource, after optional whitespace, at `)` or a destination.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */ function resourceOpen(code) {\n if (code === 41) return resourceEnd(code);\n return (0, $flE5d.factoryDestination)(effects, resourceDestinationAfter, resourceDestinationMissing, \"resourceDestination\", \"resourceDestinationLiteral\", \"resourceDestinationLiteralMarker\", \"resourceDestinationRaw\", \"resourceDestinationString\", 32)(code);\n }\n /**\n * In resource, after destination, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */ function resourceDestinationAfter(code) {\n return (0, $5Lprs.markdownLineEndingOrSpace)(code) ? (0, $5Jc1R.factoryWhitespace)(effects, resourceBetween)(code) : resourceEnd(code);\n }\n /**\n * At invalid destination.\n *\n * ```markdown\n * > | [a](<<) b\n * ^\n * ```\n *\n * @type {State}\n */ function resourceDestinationMissing(code) {\n return nok(code);\n }\n /**\n * In resource, after destination and whitespace, at `(` or title.\n *\n * ```markdown\n * > | [a](b ) c\n * ^\n * ```\n *\n * @type {State}\n */ function resourceBetween(code) {\n if (code === 34 || code === 39 || code === 40) return (0, $kXx8i.factoryTitle)(effects, resourceTitleAfter, nok, \"resourceTitle\", \"resourceTitleMarker\", \"resourceTitleString\")(code);\n return resourceEnd(code);\n }\n /**\n * In resource, after title, at optional whitespace.\n *\n * ```markdown\n * > | [a](b \"c\") d\n * ^\n * ```\n *\n * @type {State}\n */ function resourceTitleAfter(code) {\n return (0, $5Lprs.markdownLineEndingOrSpace)(code) ? (0, $5Jc1R.factoryWhitespace)(effects, resourceEnd)(code) : resourceEnd(code);\n }\n /**\n * In resource, at `)`.\n *\n * ```markdown\n * > | [a](b) d\n * ^\n * ```\n *\n * @type {State}\n */ function resourceEnd(code) {\n if (code === 41) {\n effects.enter(\"resourceMarker\");\n effects.consume(code);\n effects.exit(\"resourceMarker\");\n effects.exit(\"resource\");\n return ok;\n }\n return nok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1dc73455d0df3be9$var$tokenizeReferenceFull(effects, ok, nok) {\n const self = this;\n return referenceFull;\n /**\n * In a reference (full), at the `[`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */ function referenceFull(code) {\n return (0, $i8rQF.factoryLabel).call(self, effects, referenceFullAfter, referenceFullMissing, \"reference\", \"referenceMarker\", \"referenceString\")(code);\n }\n /**\n * In a reference (full), after `]`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */ function referenceFullAfter(code) {\n return self.parser.defined.includes((0, $9HErY.normalizeIdentifier)(self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1))) ? ok(code) : nok(code);\n }\n /**\n * In reference (full) that was missing.\n *\n * ```markdown\n * > | [a][b d\n * ^\n * ```\n *\n * @type {State}\n */ function referenceFullMissing(code) {\n return nok(code);\n }\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $1dc73455d0df3be9$var$tokenizeReferenceCollapsed(effects, ok, nok) {\n return referenceCollapsedStart;\n /**\n * In reference (collapsed), at `[`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */ function referenceCollapsedStart(code) {\n // We only attempt a collapsed label if there’s a `[`.\n effects.enter(\"reference\");\n effects.enter(\"referenceMarker\");\n effects.consume(code);\n effects.exit(\"referenceMarker\");\n return referenceCollapsedOpen;\n }\n /**\n * In reference (collapsed), at `]`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */ function referenceCollapsedOpen(code) {\n if (code === 93) {\n effects.enter(\"referenceMarker\");\n effects.consume(code);\n effects.exit(\"referenceMarker\");\n effects.exit(\"reference\");\n return ok;\n }\n return nok(code);\n }\n}\n\n});\n\nparcelRegister(\"l3YSx\", function(module, exports) {\n\n$parcel$export(module.exports, \"labelStartImage\", () => $f5591a0a77ea393a$export$3d754936e25aa5f5);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $2yvw6 = parcelRequire(\"2yvw6\");\nconst $f5591a0a77ea393a$export$3d754936e25aa5f5 = {\n name: \"labelStartImage\",\n tokenize: $f5591a0a77ea393a$var$tokenizeLabelStartImage,\n resolveAll: (0, $2yvw6.labelEnd).resolveAll\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $f5591a0a77ea393a$var$tokenizeLabelStartImage(effects, ok, nok) {\n const self = this;\n return start;\n /**\n * Start of label (image) start.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"labelImage\");\n effects.enter(\"labelImageMarker\");\n effects.consume(code);\n effects.exit(\"labelImageMarker\");\n return open;\n }\n /**\n * After `!`, at `[`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */ function open(code) {\n if (code === 91) {\n effects.enter(\"labelMarker\");\n effects.consume(code);\n effects.exit(\"labelMarker\");\n effects.exit(\"labelImage\");\n return after;\n }\n return nok(code);\n }\n /**\n * After `![`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * This is needed in because, when GFM footnotes are enabled, images never\n * form when started with a `^`.\n * Instead, links form:\n *\n * ```markdown\n * ![^a](b)\n *\n * ![^a][b]\n *\n * [b]: c\n * ```\n *\n * ```html\n * !^a
\n * !^a
\n * ```\n *\n * @type {State}\n */ function after(code) {\n // To do: use a new field to do this, this is still needed for\n // `micromark-extension-gfm-footnote`, but the `label-start-link`\n // behavior isn’t.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */ return code === 94 && \"_hiddenFootnoteSupport\" in self.parser.constructs ? nok(code) : ok(code);\n }\n}\n\n});\n\nparcelRegister(\"ifBjz\", function(module, exports) {\n\n$parcel$export(module.exports, \"labelStartLink\", () => $d496929de41504d8$export$5c0cee0701a3b584);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $2yvw6 = parcelRequire(\"2yvw6\");\nconst $d496929de41504d8$export$5c0cee0701a3b584 = {\n name: \"labelStartLink\",\n tokenize: $d496929de41504d8$var$tokenizeLabelStartLink,\n resolveAll: (0, $2yvw6.labelEnd).resolveAll\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $d496929de41504d8$var$tokenizeLabelStartLink(effects, ok, nok) {\n const self = this;\n return start;\n /**\n * Start of label (link) start.\n *\n * ```markdown\n * > | a [b] c\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"labelLink\");\n effects.enter(\"labelMarker\");\n effects.consume(code);\n effects.exit(\"labelMarker\");\n effects.exit(\"labelLink\");\n return after;\n }\n /** @type {State} */ function after(code) {\n // To do: this isn’t needed in `micromark-extension-gfm-footnote`,\n // remove.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */ return code === 94 && \"_hiddenFootnoteSupport\" in self.parser.constructs ? nok(code) : ok(code);\n }\n}\n\n});\n\nparcelRegister(\"9shNq\", function(module, exports) {\n\n$parcel$export(module.exports, \"lineEnding\", () => $6e24888a23d546e6$export$8e62e0ad51c97b2);\n/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nconst $6e24888a23d546e6$export$8e62e0ad51c97b2 = {\n name: \"lineEnding\",\n tokenize: $6e24888a23d546e6$var$tokenizeLineEnding\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $6e24888a23d546e6$var$tokenizeLineEnding(effects, ok) {\n return start;\n /** @type {State} */ function start(code) {\n effects.enter(\"lineEnding\");\n effects.consume(code);\n effects.exit(\"lineEnding\");\n return (0, $8GWoH.factorySpace)(effects, ok, \"linePrefix\");\n }\n}\n\n});\n\nparcelRegister(\"7twjg\", function(module, exports) {\n\n$parcel$export(module.exports, \"list\", () => $57144dde063f82cb$export$8837f4fc672e936d);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\n\nvar $eRfeJ = parcelRequire(\"eRfeJ\");\n\nvar $eBJ0T = parcelRequire(\"eBJ0T\");\nconst $57144dde063f82cb$export$8837f4fc672e936d = {\n name: \"list\",\n tokenize: $57144dde063f82cb$var$tokenizeListStart,\n continuation: {\n tokenize: $57144dde063f82cb$var$tokenizeListContinuation\n },\n exit: $57144dde063f82cb$var$tokenizeListEnd\n};\n/** @type {Construct} */ const $57144dde063f82cb$var$listItemPrefixWhitespaceConstruct = {\n tokenize: $57144dde063f82cb$var$tokenizeListItemPrefixWhitespace,\n partial: true\n};\n/** @type {Construct} */ const $57144dde063f82cb$var$indentConstruct = {\n tokenize: $57144dde063f82cb$var$tokenizeIndent,\n partial: true\n};\n// To do: `markdown-rs` parses list items on their own and later stitches them\n// together.\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */ function $57144dde063f82cb$var$tokenizeListStart(effects, ok, nok) {\n const self = this;\n const tail = self.events[self.events.length - 1];\n let initialSize = tail && tail[1].type === \"linePrefix\" ? tail[2].sliceSerialize(tail[1], true).length : 0;\n let size = 0;\n return start;\n /** @type {State} */ function start(code) {\n const kind = self.containerState.type || (code === 42 || code === 43 || code === 45 ? \"listUnordered\" : \"listOrdered\");\n if (kind === \"listUnordered\" ? !self.containerState.marker || code === self.containerState.marker : (0, $5Lprs.asciiDigit)(code)) {\n if (!self.containerState.type) {\n self.containerState.type = kind;\n effects.enter(kind, {\n _container: true\n });\n }\n if (kind === \"listUnordered\") {\n effects.enter(\"listItemPrefix\");\n return code === 42 || code === 45 ? effects.check((0, $eBJ0T.thematicBreak), nok, atMarker)(code) : atMarker(code);\n }\n if (!self.interrupt || code === 49) {\n effects.enter(\"listItemPrefix\");\n effects.enter(\"listItemValue\");\n return inside(code);\n }\n }\n return nok(code);\n }\n /** @type {State} */ function inside(code) {\n if ((0, $5Lprs.asciiDigit)(code) && ++size < 10) {\n effects.consume(code);\n return inside;\n }\n if ((!self.interrupt || size < 2) && (self.containerState.marker ? code === self.containerState.marker : code === 41 || code === 46)) {\n effects.exit(\"listItemValue\");\n return atMarker(code);\n }\n return nok(code);\n }\n /**\n * @type {State}\n **/ function atMarker(code) {\n effects.enter(\"listItemMarker\");\n effects.consume(code);\n effects.exit(\"listItemMarker\");\n self.containerState.marker = self.containerState.marker || code;\n return effects.check((0, $eRfeJ.blankLine), // Can’t be empty when interrupting.\n self.interrupt ? nok : onBlank, effects.attempt($57144dde063f82cb$var$listItemPrefixWhitespaceConstruct, endOfPrefix, otherPrefix));\n }\n /** @type {State} */ function onBlank(code) {\n self.containerState.initialBlankLine = true;\n initialSize++;\n return endOfPrefix(code);\n }\n /** @type {State} */ function otherPrefix(code) {\n if ((0, $5Lprs.markdownSpace)(code)) {\n effects.enter(\"listItemPrefixWhitespace\");\n effects.consume(code);\n effects.exit(\"listItemPrefixWhitespace\");\n return endOfPrefix;\n }\n return nok(code);\n }\n /** @type {State} */ function endOfPrefix(code) {\n self.containerState.size = initialSize + self.sliceSerialize(effects.exit(\"listItemPrefix\"), true).length;\n return ok(code);\n }\n}\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */ function $57144dde063f82cb$var$tokenizeListContinuation(effects, ok, nok) {\n const self = this;\n self.containerState._closeFlow = undefined;\n return effects.check((0, $eRfeJ.blankLine), onBlank, notBlank);\n /** @type {State} */ function onBlank(code) {\n self.containerState.furtherBlankLines = self.containerState.furtherBlankLines || self.containerState.initialBlankLine;\n // We have a blank line.\n // Still, try to consume at most the items size.\n return (0, $8GWoH.factorySpace)(effects, ok, \"listItemIndent\", self.containerState.size + 1)(code);\n }\n /** @type {State} */ function notBlank(code) {\n if (self.containerState.furtherBlankLines || !(0, $5Lprs.markdownSpace)(code)) {\n self.containerState.furtherBlankLines = undefined;\n self.containerState.initialBlankLine = undefined;\n return notInCurrentItem(code);\n }\n self.containerState.furtherBlankLines = undefined;\n self.containerState.initialBlankLine = undefined;\n return effects.attempt($57144dde063f82cb$var$indentConstruct, ok, notInCurrentItem)(code);\n }\n /** @type {State} */ function notInCurrentItem(code) {\n // While we do continue, we signal that the flow should be closed.\n self.containerState._closeFlow = true;\n // As we’re closing flow, we’re no longer interrupting.\n self.interrupt = undefined;\n // Always populated by defaults.\n return (0, $8GWoH.factorySpace)(effects, effects.attempt($57144dde063f82cb$export$8837f4fc672e936d, ok, nok), \"linePrefix\", self.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 4)(code);\n }\n}\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */ function $57144dde063f82cb$var$tokenizeIndent(effects, ok, nok) {\n const self = this;\n return (0, $8GWoH.factorySpace)(effects, afterPrefix, \"listItemIndent\", self.containerState.size + 1);\n /** @type {State} */ function afterPrefix(code) {\n const tail = self.events[self.events.length - 1];\n return tail && tail[1].type === \"listItemIndent\" && tail[2].sliceSerialize(tail[1], true).length === self.containerState.size ? ok(code) : nok(code);\n }\n}\n/**\n * @type {Exiter}\n * @this {TokenizeContext}\n */ function $57144dde063f82cb$var$tokenizeListEnd(effects) {\n effects.exit(this.containerState.type);\n}\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */ function $57144dde063f82cb$var$tokenizeListItemPrefixWhitespace(effects, ok, nok) {\n const self = this;\n // Always populated by defaults.\n return (0, $8GWoH.factorySpace)(effects, afterPrefix, \"listItemPrefixWhitespace\", self.parser.constructs.disable.null.includes(\"codeIndented\") ? undefined : 5);\n /** @type {State} */ function afterPrefix(code) {\n const tail = self.events[self.events.length - 1];\n return !(0, $5Lprs.markdownSpace)(code) && tail && tail[1].type === \"listItemPrefixWhitespace\" ? ok(code) : nok(code);\n }\n}\n\n});\nparcelRegister(\"eBJ0T\", function(module, exports) {\n\n$parcel$export(module.exports, \"thematicBreak\", () => $aa27701f1509407c$export$ba7b13e047416c03);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $aa27701f1509407c$export$ba7b13e047416c03 = {\n name: \"thematicBreak\",\n tokenize: $aa27701f1509407c$var$tokenizeThematicBreak\n};\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $aa27701f1509407c$var$tokenizeThematicBreak(effects, ok, nok) {\n let size = 0;\n /** @type {NonNullable} */ let marker;\n return start;\n /**\n * Start of thematic break.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n effects.enter(\"thematicBreak\");\n // To do: parse indent like `markdown-rs`.\n return before(code);\n }\n /**\n * After optional whitespace, at marker.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */ function before(code) {\n marker = code;\n return atBreak(code);\n }\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */ function atBreak(code) {\n if (code === marker) {\n effects.enter(\"thematicBreakSequence\");\n return sequence(code);\n }\n if (size >= 3 && (code === null || (0, $5Lprs.markdownLineEnding)(code))) {\n effects.exit(\"thematicBreak\");\n return ok(code);\n }\n return nok(code);\n }\n /**\n * In sequence.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */ function sequence(code) {\n if (code === marker) {\n effects.consume(code);\n size++;\n return sequence;\n }\n effects.exit(\"thematicBreakSequence\");\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, atBreak, \"whitespace\")(code) : atBreak(code);\n }\n}\n\n});\n\n\nparcelRegister(\"cfK44\", function(module, exports) {\n\n$parcel$export(module.exports, \"setextUnderline\", () => $8eba9e38644cd781$export$e104e2de391dfde9);\n/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */ \nvar $8GWoH = parcelRequire(\"8GWoH\");\n\nvar $5Lprs = parcelRequire(\"5Lprs\");\nconst $8eba9e38644cd781$export$e104e2de391dfde9 = {\n name: \"setextUnderline\",\n tokenize: $8eba9e38644cd781$var$tokenizeSetextUnderline,\n resolveTo: $8eba9e38644cd781$var$resolveToSetextUnderline\n};\n/** @type {Resolver} */ function $8eba9e38644cd781$var$resolveToSetextUnderline(events, context) {\n // To do: resolve like `markdown-rs`.\n let index = events.length;\n /** @type {number | undefined} */ let content;\n /** @type {number | undefined} */ let text;\n /** @type {number | undefined} */ let definition;\n // Find the opening of the content.\n // It’ll always exist: we don’t tokenize if it isn’t there.\n while(index--)if (events[index][0] === \"enter\") {\n if (events[index][1].type === \"content\") {\n content = index;\n break;\n }\n if (events[index][1].type === \"paragraph\") text = index;\n } else {\n if (events[index][1].type === \"content\") // Remove the content end (if needed we’ll add it later)\n events.splice(index, 1);\n if (!definition && events[index][1].type === \"definition\") definition = index;\n }\n const heading = {\n type: \"setextHeading\",\n start: Object.assign({}, events[text][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n };\n // Change the paragraph to setext heading text.\n events[text][1].type = \"setextHeadingText\";\n // If we have definitions in the content, we’ll keep on having content,\n // but we need move it.\n if (definition) {\n events.splice(text, 0, [\n \"enter\",\n heading,\n context\n ]);\n events.splice(definition + 1, 0, [\n \"exit\",\n events[content][1],\n context\n ]);\n events[content][1].end = Object.assign({}, events[definition][1].end);\n } else events[content][1] = heading;\n // Add the heading exit at the end.\n events.push([\n \"exit\",\n heading,\n context\n ]);\n return events;\n}\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */ function $8eba9e38644cd781$var$tokenizeSetextUnderline(effects, ok, nok) {\n const self = this;\n /** @type {NonNullable} */ let marker;\n return start;\n /**\n * At start of heading (setext) underline.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */ function start(code) {\n let index = self.events.length;\n /** @type {boolean | undefined} */ let paragraph;\n // Find an opening.\n while(index--)// Skip enter/exit of line ending, line prefix, and content.\n // We can now either have a definition or a paragraph.\n if (self.events[index][1].type !== \"lineEnding\" && self.events[index][1].type !== \"linePrefix\" && self.events[index][1].type !== \"content\") {\n paragraph = self.events[index][1].type === \"paragraph\";\n break;\n }\n // To do: handle lazy/pierce like `markdown-rs`.\n // To do: parse indent like `markdown-rs`.\n if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {\n effects.enter(\"setextHeadingLine\");\n marker = code;\n return before(code);\n }\n return nok(code);\n }\n /**\n * After optional whitespace, at `-` or `=`.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */ function before(code) {\n effects.enter(\"setextHeadingLineSequence\");\n return inside(code);\n }\n /**\n * In sequence.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */ function inside(code) {\n if (code === marker) {\n effects.consume(code);\n return inside;\n }\n effects.exit(\"setextHeadingLineSequence\");\n return (0, $5Lprs.markdownSpace)(code) ? (0, $8GWoH.factorySpace)(effects, after, \"lineSuffix\")(code) : after(code);\n }\n /**\n * After sequence, after optional whitespace.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */ function after(code) {\n if (code === null || (0, $5Lprs.markdownLineEnding)(code)) {\n effects.exit(\"setextHeadingLine\");\n return ok(code);\n }\n return nok(code);\n }\n}\n\n});\n\n\n\nparcelRegister(\"hFzKD\", function(module, exports) {\n\n$parcel$export(module.exports, \"preprocess\", () => $cdd1ff6afd8515e5$export$fc37fe19dfda43ee);\n/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Value} Value\n */ /**\n * @callback Preprocessor\n * @param {Value} value\n * @param {Encoding | null | undefined} [encoding]\n * @param {boolean | null | undefined} [end=false]\n * @returns {Array}\n */ const $cdd1ff6afd8515e5$var$search = /[\\0\\t\\n\\r]/g;\nfunction $cdd1ff6afd8515e5$export$fc37fe19dfda43ee() {\n let column = 1;\n let buffer = \"\";\n /** @type {boolean | undefined} */ let start = true;\n /** @type {boolean | undefined} */ let atCarriageReturn;\n return preprocessor;\n /** @type {Preprocessor} */ function preprocessor(value, encoding, end) {\n /** @type {Array} */ const chunks = [];\n /** @type {RegExpMatchArray | null} */ let match;\n /** @type {number} */ let next;\n /** @type {number} */ let startPosition;\n /** @type {number} */ let endPosition;\n /** @type {Code} */ let code;\n // @ts-expect-error `Buffer` does allow an encoding.\n value = buffer + value.toString(encoding);\n startPosition = 0;\n buffer = \"\";\n if (start) {\n // To do: `markdown-rs` actually parses BOMs (byte order mark).\n if (value.charCodeAt(0) === 65279) startPosition++;\n start = undefined;\n }\n while(startPosition < value.length){\n $cdd1ff6afd8515e5$var$search.lastIndex = startPosition;\n match = $cdd1ff6afd8515e5$var$search.exec(value);\n endPosition = match && match.index !== undefined ? match.index : value.length;\n code = value.charCodeAt(endPosition);\n if (!match) {\n buffer = value.slice(startPosition);\n break;\n }\n if (code === 10 && startPosition === endPosition && atCarriageReturn) {\n chunks.push(-3);\n atCarriageReturn = undefined;\n } else {\n if (atCarriageReturn) {\n chunks.push(-5);\n atCarriageReturn = undefined;\n }\n if (startPosition < endPosition) {\n chunks.push(value.slice(startPosition, endPosition));\n column += endPosition - startPosition;\n }\n switch(code){\n case 0:\n chunks.push(65533);\n column++;\n break;\n case 9:\n next = Math.ceil(column / 4) * 4;\n chunks.push(-2);\n while(column++ < next)chunks.push(-1);\n break;\n case 10:\n chunks.push(-4);\n column = 1;\n break;\n default:\n atCarriageReturn = true;\n column = 1;\n }\n }\n startPosition = endPosition + 1;\n }\n if (end) {\n if (atCarriageReturn) chunks.push(-5);\n if (buffer) chunks.push(buffer);\n chunks.push(null);\n }\n return chunks;\n }\n}\n\n});\n\nparcelRegister(\"6Jw5n\", function(module, exports) {\n\n$parcel$export(module.exports, \"postprocess\", () => $4e6fd59d488c6494$export$bd0e6e1378a871d7);\n/**\n * @typedef {import('micromark-util-types').Event} Event\n */ \nvar $iNbDn = parcelRequire(\"iNbDn\");\nfunction $4e6fd59d488c6494$export$bd0e6e1378a871d7(events) {\n while(!(0, $iNbDn.subtokenize)(events));\n return events;\n}\n\n});\n\nparcelRegister(\"34rZa\", function(module, exports) {\n\n$parcel$export(module.exports, \"decodeNumericCharacterReference\", () => $23c7742930bb7617$export$15a69557afac2c20);\n/**\n * Turn the number (in string form as either hexa- or plain decimal) coming from\n * a numeric character reference into a character.\n *\n * Sort of like `String.fromCharCode(Number.parseInt(value, base))`, but makes\n * non-characters and control characters safe.\n *\n * @param {string} value\n * Value to decode.\n * @param {number} base\n * Numeric base.\n * @returns {string}\n * Character.\n */ function $23c7742930bb7617$export$15a69557afac2c20(value, base) {\n const code = Number.parseInt(value, base);\n if (// C0 except for HT, LF, FF, CR, space.\n code < 9 || code === 11 || code > 13 && code < 32 || // Control character (DEL) of C0, and C1 controls.\n code > 126 && code < 160 || // Lone high surrogates and low surrogates.\n code > 55295 && code < 57344 || // Noncharacters.\n code > 64975 && code < 65008 || (code & 65535) === 65535 || (code & 65535) === 65534 /* eslint-enable no-bitwise */ || // Out of range\n code > 1114111) return \"\\uFFFD\";\n return String.fromCharCode(code);\n}\n\n});\n\nparcelRegister(\"1lC1m\", function(module, exports) {\n\n$parcel$export(module.exports, \"decodeString\", () => $0fb53ec7385928f9$export$a0fb664af7d0cc44);\n\nvar $gxs7E = parcelRequire(\"gxs7E\");\n\nvar $34rZa = parcelRequire(\"34rZa\");\nconst $0fb53ec7385928f9$var$characterEscapeOrReference = /\\\\([!-/:-@[-`{-~])|&(#(?:\\d{1,7}|x[\\da-f]{1,6})|[\\da-z]{1,31});/gi;\nfunction $0fb53ec7385928f9$export$a0fb664af7d0cc44(value) {\n return value.replace($0fb53ec7385928f9$var$characterEscapeOrReference, $0fb53ec7385928f9$var$decode);\n}\n/**\n * @param {string} $0\n * @param {string} $1\n * @param {string} $2\n * @returns {string}\n */ function $0fb53ec7385928f9$var$decode($0, $1, $2) {\n if ($1) // Escape.\n return $1;\n // Reference.\n const head = $2.charCodeAt(0);\n if (head === 35) {\n const head = $2.charCodeAt(1);\n const hex = head === 120 || head === 88;\n return (0, $34rZa.decodeNumericCharacterReference)($2.slice(hex ? 2 : 1), hex ? 16 : 10);\n }\n return (0, $gxs7E.decodeNamedCharacterReference)($2) || $0;\n}\n\n});\n\nparcelRegister(\"f9WaX\", function(module, exports) {\n\n$parcel$export(module.exports, \"stringifyPosition\", () => $b094d2c4d032c594$export$c304dd45fe166145);\n/**\n * @typedef {import('unist').Node} Node\n * @typedef {import('unist').Point} Point\n * @typedef {import('unist').Position} Position\n */ /**\n * @typedef NodeLike\n * @property {string} type\n * @property {PositionLike | null | undefined} [position]\n *\n * @typedef PositionLike\n * @property {PointLike | null | undefined} [start]\n * @property {PointLike | null | undefined} [end]\n *\n * @typedef PointLike\n * @property {number | null | undefined} [line]\n * @property {number | null | undefined} [column]\n * @property {number | null | undefined} [offset]\n */ /**\n * Serialize the positional info of a point, position (start and end points),\n * or node.\n *\n * @param {Node | NodeLike | Position | PositionLike | Point | PointLike | null | undefined} [value]\n * Node, position, or point.\n * @returns {string}\n * Pretty printed positional info of a node (`string`).\n *\n * In the format of a range `ls:cs-le:ce` (when given `node` or `position`)\n * or a point `l:c` (when given `point`), where `l` stands for line, `c` for\n * column, `s` for `start`, and `e` for end.\n * An empty string (`''`) is returned if the given value is neither `node`,\n * `position`, nor `point`.\n */ function $b094d2c4d032c594$export$c304dd45fe166145(value) {\n // Nothing.\n if (!value || typeof value !== \"object\") return \"\";\n // Node.\n if (\"position\" in value || \"type\" in value) return $b094d2c4d032c594$var$position(value.position);\n // Position.\n if (\"start\" in value || \"end\" in value) return $b094d2c4d032c594$var$position(value);\n // Point.\n if (\"line\" in value || \"column\" in value) return $b094d2c4d032c594$var$point(value);\n // ?\n return \"\";\n}\n/**\n * @param {Point | PointLike | null | undefined} point\n * @returns {string}\n */ function $b094d2c4d032c594$var$point(point) {\n return $b094d2c4d032c594$var$index(point && point.line) + \":\" + $b094d2c4d032c594$var$index(point && point.column);\n}\n/**\n * @param {Position | PositionLike | null | undefined} pos\n * @returns {string}\n */ function $b094d2c4d032c594$var$position(pos) {\n return $b094d2c4d032c594$var$point(pos && pos.start) + \"-\" + $b094d2c4d032c594$var$point(pos && pos.end);\n}\n/**\n * @param {number | null | undefined} value\n * @returns {number}\n */ function $b094d2c4d032c594$var$index(value) {\n return value && typeof value === \"number\" ? value : 1;\n}\n\n});\n\n\n\n\n//# sourceMappingURL=flowDiagram-b222e15a.f77f79b1.js.map\n","import { l as log, M as decodeEntities } from \"./mermaid-6dc72991.js\";\nimport { fromMarkdown } from \"mdast-util-from-markdown\";\nimport { dedent } from \"ts-dedent\";\nfunction preprocessMarkdown(markdown) {\n const withoutMultipleNewlines = markdown.replace(/\\n{2,}/g, \"\\n\");\n const withoutExtraSpaces = dedent(withoutMultipleNewlines);\n return withoutExtraSpaces;\n}\nfunction markdownToLines(markdown) {\n const preprocessedMarkdown = preprocessMarkdown(markdown);\n const { children } = fromMarkdown(preprocessedMarkdown);\n const lines = [[]];\n let currentLine = 0;\n function processNode(node, parentType = \"normal\") {\n if (node.type === \"text\") {\n const textLines = node.value.split(\"\\n\");\n textLines.forEach((textLine, index) => {\n if (index !== 0) {\n currentLine++;\n lines.push([]);\n }\n textLine.split(\" \").forEach((word) => {\n if (word) {\n lines[currentLine].push({ content: word, type: parentType });\n }\n });\n });\n } else if (node.type === \"strong\" || node.type === \"emphasis\") {\n node.children.forEach((contentNode) => {\n processNode(contentNode, node.type);\n });\n }\n }\n children.forEach((treeNode) => {\n if (treeNode.type === \"paragraph\") {\n treeNode.children.forEach((contentNode) => {\n processNode(contentNode);\n });\n }\n });\n return lines;\n}\nfunction markdownToHTML(markdown) {\n const { children } = fromMarkdown(markdown);\n function output(node) {\n if (node.type === \"text\") {\n return node.value.replace(/\\n/g, \"
\");\n } else if (node.type === \"strong\") {\n return `${node.children.map(output).join(\"\")}`;\n } else if (node.type === \"emphasis\") {\n return `${node.children.map(output).join(\"\")}`;\n } else if (node.type === \"paragraph\") {\n return `${node.children.map(output).join(\"\")}
`;\n }\n return `Unsupported markdown: ${node.type}`;\n }\n return children.map(output).join(\"\");\n}\nfunction splitTextToChars(text) {\n if (Intl.Segmenter) {\n return [...new Intl.Segmenter().segment(text)].map((s) => s.segment);\n }\n return [...text];\n}\nfunction splitWordToFitWidth(checkFit, word) {\n const characters = splitTextToChars(word.content);\n return splitWordToFitWidthRecursion(checkFit, [], characters, word.type);\n}\nfunction splitWordToFitWidthRecursion(checkFit, usedChars, remainingChars, type) {\n if (remainingChars.length === 0) {\n return [\n { content: usedChars.join(\"\"), type },\n { content: \"\", type }\n ];\n }\n const [nextChar, ...rest] = remainingChars;\n const newWord = [...usedChars, nextChar];\n if (checkFit([{ content: newWord.join(\"\"), type }])) {\n return splitWordToFitWidthRecursion(checkFit, newWord, rest, type);\n }\n if (usedChars.length === 0 && nextChar) {\n usedChars.push(nextChar);\n remainingChars.shift();\n }\n return [\n { content: usedChars.join(\"\"), type },\n { content: remainingChars.join(\"\"), type }\n ];\n}\nfunction splitLineToFitWidth(line, checkFit) {\n if (line.some(({ content }) => content.includes(\"\\n\"))) {\n throw new Error(\"splitLineToFitWidth does not support newlines in the line\");\n }\n return splitLineToFitWidthRecursion(line, checkFit);\n}\nfunction splitLineToFitWidthRecursion(words, checkFit, lines = [], newLine = []) {\n if (words.length === 0) {\n if (newLine.length > 0) {\n lines.push(newLine);\n }\n return lines.length > 0 ? lines : [];\n }\n let joiner = \"\";\n if (words[0].content === \" \") {\n joiner = \" \";\n words.shift();\n }\n const nextWord = words.shift() ?? { content: \" \", type: \"normal\" };\n const lineWithNextWord = [...newLine];\n if (joiner !== \"\") {\n lineWithNextWord.push({ content: joiner, type: \"normal\" });\n }\n lineWithNextWord.push(nextWord);\n if (checkFit(lineWithNextWord)) {\n return splitLineToFitWidthRecursion(words, checkFit, lines, lineWithNextWord);\n }\n if (newLine.length > 0) {\n lines.push(newLine);\n words.unshift(nextWord);\n } else if (nextWord.content) {\n const [line, rest] = splitWordToFitWidth(checkFit, nextWord);\n lines.push([line]);\n if (rest.content) {\n words.unshift(rest);\n }\n }\n return splitLineToFitWidthRecursion(words, checkFit, lines);\n}\nfunction applyStyle(dom, styleFn) {\n if (styleFn) {\n dom.attr(\"style\", styleFn);\n }\n}\nfunction addHtmlSpan(element, node, width, classes, addBackground = false) {\n const fo = element.append(\"foreignObject\");\n const div = fo.append(\"xhtml:div\");\n const label = node.label;\n const labelClass = node.isNode ? \"nodeLabel\" : \"edgeLabel\";\n div.html(\n `\n \" + label + \"\"\n );\n applyStyle(div, node.labelStyle);\n div.style(\"display\", \"table-cell\");\n div.style(\"white-space\", \"nowrap\");\n div.style(\"max-width\", width + \"px\");\n div.attr(\"xmlns\", \"http://www.w3.org/1999/xhtml\");\n if (addBackground) {\n div.attr(\"class\", \"labelBkg\");\n }\n let bbox = div.node().getBoundingClientRect();\n if (bbox.width === width) {\n div.style(\"display\", \"table\");\n div.style(\"white-space\", \"break-spaces\");\n div.style(\"width\", width + \"px\");\n bbox = div.node().getBoundingClientRect();\n }\n fo.style(\"width\", bbox.width);\n fo.style(\"height\", bbox.height);\n return fo.node();\n}\nfunction createTspan(textElement, lineIndex, lineHeight) {\n return textElement.append(\"tspan\").attr(\"class\", \"text-outer-tspan\").attr(\"x\", 0).attr(\"y\", lineIndex * lineHeight - 0.1 + \"em\").attr(\"dy\", lineHeight + \"em\");\n}\nfunction computeWidthOfText(parentNode, lineHeight, line) {\n const testElement = parentNode.append(\"text\");\n const testSpan = createTspan(testElement, 1, lineHeight);\n updateTextContentAndStyles(testSpan, line);\n const textLength = testSpan.node().getComputedTextLength();\n testElement.remove();\n return textLength;\n}\nfunction computeDimensionOfText(parentNode, lineHeight, text) {\n var _a;\n const testElement = parentNode.append(\"text\");\n const testSpan = createTspan(testElement, 1, lineHeight);\n updateTextContentAndStyles(testSpan, [{ content: text, type: \"normal\" }]);\n const textDimension = (_a = testSpan.node()) == null ? void 0 : _a.getBoundingClientRect();\n if (textDimension) {\n testElement.remove();\n }\n return textDimension;\n}\nfunction createFormattedText(width, g, structuredText, addBackground = false) {\n const lineHeight = 1.1;\n const labelGroup = g.append(\"g\");\n const bkg = labelGroup.insert(\"rect\").attr(\"class\", \"background\");\n const textElement = labelGroup.append(\"text\").attr(\"y\", \"-10.1\");\n let lineIndex = 0;\n for (const line of structuredText) {\n const checkWidth = (line2) => computeWidthOfText(labelGroup, lineHeight, line2) <= width;\n const linesUnderWidth = checkWidth(line) ? [line] : splitLineToFitWidth(line, checkWidth);\n for (const preparedLine of linesUnderWidth) {\n const tspan = createTspan(textElement, lineIndex, lineHeight);\n updateTextContentAndStyles(tspan, preparedLine);\n lineIndex++;\n }\n }\n if (addBackground) {\n const bbox = textElement.node().getBBox();\n const padding = 2;\n bkg.attr(\"x\", -padding).attr(\"y\", -padding).attr(\"width\", bbox.width + 2 * padding).attr(\"height\", bbox.height + 2 * padding);\n return labelGroup.node();\n } else {\n return textElement.node();\n }\n}\nfunction updateTextContentAndStyles(tspan, wrappedLine) {\n tspan.text(\"\");\n wrappedLine.forEach((word, index) => {\n const innerTspan = tspan.append(\"tspan\").attr(\"font-style\", word.type === \"emphasis\" ? \"italic\" : \"normal\").attr(\"class\", \"text-inner-tspan\").attr(\"font-weight\", word.type === \"strong\" ? \"bold\" : \"normal\");\n if (index === 0) {\n innerTspan.text(word.content);\n } else {\n innerTspan.text(\" \" + word.content);\n }\n });\n}\nconst createText = (el, text = \"\", {\n style = \"\",\n isTitle = false,\n classes = \"\",\n useHtmlLabels = true,\n isNode = true,\n width = 200,\n addSvgBackground = false\n} = {}) => {\n log.info(\"createText\", text, style, isTitle, classes, useHtmlLabels, isNode, addSvgBackground);\n if (useHtmlLabels) {\n const htmlText = markdownToHTML(text);\n const node = {\n isNode,\n label: decodeEntities(htmlText).replace(\n /fa[blrs]?:fa-[\\w-]+/g,\n // cspell: disable-line\n (s) => ``\n ),\n labelStyle: style.replace(\"fill:\", \"color:\")\n };\n const vertexNode = addHtmlSpan(el, node, width, classes, addSvgBackground);\n return vertexNode;\n } else {\n const structuredText = markdownToLines(text);\n const svgLabel = createFormattedText(width, el, structuredText, addSvgBackground);\n return svgLabel;\n }\n};\nexport {\n createText as a,\n computeDimensionOfText as c\n};\n","/**\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Value} Value\n *\n * @typedef {import('unist').Parent} UnistParent\n * @typedef {import('unist').Point} Point\n *\n * @typedef {import('mdast').PhrasingContent} PhrasingContent\n * @typedef {import('mdast').StaticPhrasingContent} StaticPhrasingContent\n * @typedef {import('mdast').Content} Content\n * @typedef {import('mdast').Break} Break\n * @typedef {import('mdast').Blockquote} Blockquote\n * @typedef {import('mdast').Code} Code\n * @typedef {import('mdast').Definition} Definition\n * @typedef {import('mdast').Emphasis} Emphasis\n * @typedef {import('mdast').Heading} Heading\n * @typedef {import('mdast').HTML} HTML\n * @typedef {import('mdast').Image} Image\n * @typedef {import('mdast').ImageReference} ImageReference\n * @typedef {import('mdast').InlineCode} InlineCode\n * @typedef {import('mdast').Link} Link\n * @typedef {import('mdast').LinkReference} LinkReference\n * @typedef {import('mdast').List} List\n * @typedef {import('mdast').ListItem} ListItem\n * @typedef {import('mdast').Paragraph} Paragraph\n * @typedef {import('mdast').Root} Root\n * @typedef {import('mdast').Strong} Strong\n * @typedef {import('mdast').Text} Text\n * @typedef {import('mdast').ThematicBreak} ThematicBreak\n * @typedef {import('mdast').ReferenceType} ReferenceType\n * @typedef {import('../index.js').CompileData} CompileData\n */\n\n/**\n * @typedef {Root | Content} Node\n * @typedef {Extract} Parent\n *\n * @typedef {Omit & {type: 'fragment', children: Array}} Fragment\n */\n\n/**\n * @callback Transform\n * Extra transform, to change the AST afterwards.\n * @param {Root} tree\n * Tree to transform.\n * @returns {Root | undefined | null | void}\n * New tree or nothing (in which case the current tree is used).\n *\n * @callback Handle\n * Handle a token.\n * @param {CompileContext} this\n * Context.\n * @param {Token} token\n * Current token.\n * @returns {void}\n * Nothing.\n *\n * @typedef {Record} Handles\n * Token types mapping to handles\n *\n * @callback OnEnterError\n * Handle the case where the `right` token is open, but it is closed (by the\n * `left` token) or because we reached the end of the document.\n * @param {Omit} this\n * Context.\n * @param {Token | undefined} left\n * Left token.\n * @param {Token} right\n * Right token.\n * @returns {void}\n * Nothing.\n *\n * @callback OnExitError\n * Handle the case where the `right` token is open but it is closed by\n * exiting the `left` token.\n * @param {Omit} this\n * Context.\n * @param {Token} left\n * Left token.\n * @param {Token} right\n * Right token.\n * @returns {void}\n * Nothing.\n *\n * @typedef {[Token, OnEnterError | undefined]} TokenTuple\n * Open token on the stack, with an optional error handler for when\n * that token isn’t closed properly.\n */\n\n/**\n * @typedef Config\n * Configuration.\n *\n * We have our defaults, but extensions will add more.\n * @property {Array} canContainEols\n * Token types where line endings are used.\n * @property {Handles} enter\n * Opening handles.\n * @property {Handles} exit\n * Closing handles.\n * @property {Array} transforms\n * Tree transforms.\n *\n * @typedef {Partial} Extension\n * Change how markdown tokens from micromark are turned into mdast.\n *\n * @typedef CompileContext\n * mdast compiler context.\n * @property {Array} stack\n * Stack of nodes.\n * @property {Array} tokenStack\n * Stack of tokens.\n * @property {(key: Key) => CompileData[Key]} getData\n * Get data from the key/value store.\n * @property {(key: Key, value?: CompileData[Key]) => void} setData\n * Set data into the key/value store.\n * @property {(this: CompileContext) => void} buffer\n * Capture some of the output data.\n * @property {(this: CompileContext) => string} resume\n * Stop capturing and access the output data.\n * @property {(this: CompileContext, node: Kind, token: Token, onError?: OnEnterError) => Kind} enter\n * Enter a token.\n * @property {(this: CompileContext, token: Token, onError?: OnExitError) => Node} exit\n * Exit a token.\n * @property {TokenizeContext['sliceSerialize']} sliceSerialize\n * Get the string value of a token.\n * @property {Config} config\n * Configuration.\n *\n * @typedef FromMarkdownOptions\n * Configuration for how to build mdast.\n * @property {Array> | null | undefined} [mdastExtensions]\n * Extensions for this utility to change how tokens are turned into a tree.\n *\n * @typedef {ParseOptions & FromMarkdownOptions} Options\n * Configuration.\n */\n\n// To do: micromark: create a registry of tokens?\n// To do: next major: don’t return given `Node` from `enter`.\n// To do: next major: remove setter/getter.\n\nimport {toString} from 'mdast-util-to-string'\nimport {parse} from 'micromark/lib/parse.js'\nimport {preprocess} from 'micromark/lib/preprocess.js'\nimport {postprocess} from 'micromark/lib/postprocess.js'\nimport {decodeNumericCharacterReference} from 'micromark-util-decode-numeric-character-reference'\nimport {decodeString} from 'micromark-util-decode-string'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\nimport {decodeNamedCharacterReference} from 'decode-named-character-reference'\nimport {stringifyPosition} from 'unist-util-stringify-position'\nconst own = {}.hasOwnProperty\n\n/**\n * @param value\n * Markdown to parse.\n * @param encoding\n * Character encoding for when `value` is `Buffer`.\n * @param options\n * Configuration.\n * @returns\n * mdast tree.\n */\nexport const fromMarkdown =\n /**\n * @type {(\n * ((value: Value, encoding: Encoding, options?: Options | null | undefined) => Root) &\n * ((value: Value, options?: Options | null | undefined) => Root)\n * )}\n */\n\n /**\n * @param {Value} value\n * @param {Encoding | Options | null | undefined} [encoding]\n * @param {Options | null | undefined} [options]\n * @returns {Root}\n */\n function (value, encoding, options) {\n if (typeof encoding !== 'string') {\n options = encoding\n encoding = undefined\n }\n return compiler(options)(\n postprocess(\n parse(options).document().write(preprocess()(value, encoding, true))\n )\n )\n }\n\n/**\n * Note this compiler only understand complete buffering, not streaming.\n *\n * @param {Options | null | undefined} [options]\n */\nfunction compiler(options) {\n /** @type {Config} */\n const config = {\n transforms: [],\n canContainEols: ['emphasis', 'fragment', 'heading', 'paragraph', 'strong'],\n enter: {\n autolink: opener(link),\n autolinkProtocol: onenterdata,\n autolinkEmail: onenterdata,\n atxHeading: opener(heading),\n blockQuote: opener(blockQuote),\n characterEscape: onenterdata,\n characterReference: onenterdata,\n codeFenced: opener(codeFlow),\n codeFencedFenceInfo: buffer,\n codeFencedFenceMeta: buffer,\n codeIndented: opener(codeFlow, buffer),\n codeText: opener(codeText, buffer),\n codeTextData: onenterdata,\n data: onenterdata,\n codeFlowValue: onenterdata,\n definition: opener(definition),\n definitionDestinationString: buffer,\n definitionLabelString: buffer,\n definitionTitleString: buffer,\n emphasis: opener(emphasis),\n hardBreakEscape: opener(hardBreak),\n hardBreakTrailing: opener(hardBreak),\n htmlFlow: opener(html, buffer),\n htmlFlowData: onenterdata,\n htmlText: opener(html, buffer),\n htmlTextData: onenterdata,\n image: opener(image),\n label: buffer,\n link: opener(link),\n listItem: opener(listItem),\n listItemValue: onenterlistitemvalue,\n listOrdered: opener(list, onenterlistordered),\n listUnordered: opener(list),\n paragraph: opener(paragraph),\n reference: onenterreference,\n referenceString: buffer,\n resourceDestinationString: buffer,\n resourceTitleString: buffer,\n setextHeading: opener(heading),\n strong: opener(strong),\n thematicBreak: opener(thematicBreak)\n },\n exit: {\n atxHeading: closer(),\n atxHeadingSequence: onexitatxheadingsequence,\n autolink: closer(),\n autolinkEmail: onexitautolinkemail,\n autolinkProtocol: onexitautolinkprotocol,\n blockQuote: closer(),\n characterEscapeValue: onexitdata,\n characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,\n characterReferenceMarkerNumeric: onexitcharacterreferencemarker,\n characterReferenceValue: onexitcharacterreferencevalue,\n codeFenced: closer(onexitcodefenced),\n codeFencedFence: onexitcodefencedfence,\n codeFencedFenceInfo: onexitcodefencedfenceinfo,\n codeFencedFenceMeta: onexitcodefencedfencemeta,\n codeFlowValue: onexitdata,\n codeIndented: closer(onexitcodeindented),\n codeText: closer(onexitcodetext),\n codeTextData: onexitdata,\n data: onexitdata,\n definition: closer(),\n definitionDestinationString: onexitdefinitiondestinationstring,\n definitionLabelString: onexitdefinitionlabelstring,\n definitionTitleString: onexitdefinitiontitlestring,\n emphasis: closer(),\n hardBreakEscape: closer(onexithardbreak),\n hardBreakTrailing: closer(onexithardbreak),\n htmlFlow: closer(onexithtmlflow),\n htmlFlowData: onexitdata,\n htmlText: closer(onexithtmltext),\n htmlTextData: onexitdata,\n image: closer(onexitimage),\n label: onexitlabel,\n labelText: onexitlabeltext,\n lineEnding: onexitlineending,\n link: closer(onexitlink),\n listItem: closer(),\n listOrdered: closer(),\n listUnordered: closer(),\n paragraph: closer(),\n referenceString: onexitreferencestring,\n resourceDestinationString: onexitresourcedestinationstring,\n resourceTitleString: onexitresourcetitlestring,\n resource: onexitresource,\n setextHeading: closer(onexitsetextheading),\n setextHeadingLineSequence: onexitsetextheadinglinesequence,\n setextHeadingText: onexitsetextheadingtext,\n strong: closer(),\n thematicBreak: closer()\n }\n }\n configure(config, (options || {}).mdastExtensions || [])\n\n /** @type {CompileData} */\n const data = {}\n return compile\n\n /**\n * Turn micromark events into an mdast tree.\n *\n * @param {Array} events\n * Events.\n * @returns {Root}\n * mdast tree.\n */\n function compile(events) {\n /** @type {Root} */\n let tree = {\n type: 'root',\n children: []\n }\n /** @type {Omit} */\n const context = {\n stack: [tree],\n tokenStack: [],\n config,\n enter,\n exit,\n buffer,\n resume,\n setData,\n getData\n }\n /** @type {Array} */\n const listStack = []\n let index = -1\n while (++index < events.length) {\n // We preprocess lists to add `listItem` tokens, and to infer whether\n // items the list itself are spread out.\n if (\n events[index][1].type === 'listOrdered' ||\n events[index][1].type === 'listUnordered'\n ) {\n if (events[index][0] === 'enter') {\n listStack.push(index)\n } else {\n const tail = listStack.pop()\n index = prepareList(events, tail, index)\n }\n }\n }\n index = -1\n while (++index < events.length) {\n const handler = config[events[index][0]]\n if (own.call(handler, events[index][1].type)) {\n handler[events[index][1].type].call(\n Object.assign(\n {\n sliceSerialize: events[index][2].sliceSerialize\n },\n context\n ),\n events[index][1]\n )\n }\n }\n\n // Handle tokens still being open.\n if (context.tokenStack.length > 0) {\n const tail = context.tokenStack[context.tokenStack.length - 1]\n const handler = tail[1] || defaultOnError\n handler.call(context, undefined, tail[0])\n }\n\n // Figure out `root` position.\n tree.position = {\n start: point(\n events.length > 0\n ? events[0][1].start\n : {\n line: 1,\n column: 1,\n offset: 0\n }\n ),\n end: point(\n events.length > 0\n ? events[events.length - 2][1].end\n : {\n line: 1,\n column: 1,\n offset: 0\n }\n )\n }\n\n // Call transforms.\n index = -1\n while (++index < config.transforms.length) {\n tree = config.transforms[index](tree) || tree\n }\n return tree\n }\n\n /**\n * @param {Array} events\n * @param {number} start\n * @param {number} length\n * @returns {number}\n */\n function prepareList(events, start, length) {\n let index = start - 1\n let containerBalance = -1\n let listSpread = false\n /** @type {Token | undefined} */\n let listItem\n /** @type {number | undefined} */\n let lineIndex\n /** @type {number | undefined} */\n let firstBlankLineIndex\n /** @type {boolean | undefined} */\n let atMarker\n while (++index <= length) {\n const event = events[index]\n if (\n event[1].type === 'listUnordered' ||\n event[1].type === 'listOrdered' ||\n event[1].type === 'blockQuote'\n ) {\n if (event[0] === 'enter') {\n containerBalance++\n } else {\n containerBalance--\n }\n atMarker = undefined\n } else if (event[1].type === 'lineEndingBlank') {\n if (event[0] === 'enter') {\n if (\n listItem &&\n !atMarker &&\n !containerBalance &&\n !firstBlankLineIndex\n ) {\n firstBlankLineIndex = index\n }\n atMarker = undefined\n }\n } else if (\n event[1].type === 'linePrefix' ||\n event[1].type === 'listItemValue' ||\n event[1].type === 'listItemMarker' ||\n event[1].type === 'listItemPrefix' ||\n event[1].type === 'listItemPrefixWhitespace'\n ) {\n // Empty.\n } else {\n atMarker = undefined\n }\n if (\n (!containerBalance &&\n event[0] === 'enter' &&\n event[1].type === 'listItemPrefix') ||\n (containerBalance === -1 &&\n event[0] === 'exit' &&\n (event[1].type === 'listUnordered' ||\n event[1].type === 'listOrdered'))\n ) {\n if (listItem) {\n let tailIndex = index\n lineIndex = undefined\n while (tailIndex--) {\n const tailEvent = events[tailIndex]\n if (\n tailEvent[1].type === 'lineEnding' ||\n tailEvent[1].type === 'lineEndingBlank'\n ) {\n if (tailEvent[0] === 'exit') continue\n if (lineIndex) {\n events[lineIndex][1].type = 'lineEndingBlank'\n listSpread = true\n }\n tailEvent[1].type = 'lineEnding'\n lineIndex = tailIndex\n } else if (\n tailEvent[1].type === 'linePrefix' ||\n tailEvent[1].type === 'blockQuotePrefix' ||\n tailEvent[1].type === 'blockQuotePrefixWhitespace' ||\n tailEvent[1].type === 'blockQuoteMarker' ||\n tailEvent[1].type === 'listItemIndent'\n ) {\n // Empty\n } else {\n break\n }\n }\n if (\n firstBlankLineIndex &&\n (!lineIndex || firstBlankLineIndex < lineIndex)\n ) {\n listItem._spread = true\n }\n\n // Fix position.\n listItem.end = Object.assign(\n {},\n lineIndex ? events[lineIndex][1].start : event[1].end\n )\n events.splice(lineIndex || index, 0, ['exit', listItem, event[2]])\n index++\n length++\n }\n\n // Create a new list item.\n if (event[1].type === 'listItemPrefix') {\n listItem = {\n type: 'listItem',\n _spread: false,\n start: Object.assign({}, event[1].start),\n // @ts-expect-error: we’ll add `end` in a second.\n end: undefined\n }\n // @ts-expect-error: `listItem` is most definitely defined, TS...\n events.splice(index, 0, ['enter', listItem, event[2]])\n index++\n length++\n firstBlankLineIndex = undefined\n atMarker = true\n }\n }\n }\n events[start][1]._spread = listSpread\n return length\n }\n\n /**\n * Set data.\n *\n * @template {keyof CompileData} Key\n * Field type.\n * @param {Key} key\n * Key of field.\n * @param {CompileData[Key]} [value]\n * New value.\n * @returns {void}\n * Nothing.\n */\n function setData(key, value) {\n data[key] = value\n }\n\n /**\n * Get data.\n *\n * @template {keyof CompileData} Key\n * Field type.\n * @param {Key} key\n * Key of field.\n * @returns {CompileData[Key]}\n * Value.\n */\n function getData(key) {\n return data[key]\n }\n\n /**\n * Create an opener handle.\n *\n * @param {(token: Token) => Node} create\n * Create a node.\n * @param {Handle} [and]\n * Optional function to also run.\n * @returns {Handle}\n * Handle.\n */\n function opener(create, and) {\n return open\n\n /**\n * @this {CompileContext}\n * @param {Token} token\n * @returns {void}\n */\n function open(token) {\n enter.call(this, create(token), token)\n if (and) and.call(this, token)\n }\n }\n\n /**\n * @this {CompileContext}\n * @returns {void}\n */\n function buffer() {\n this.stack.push({\n type: 'fragment',\n children: []\n })\n }\n\n /**\n * @template {Node} Kind\n * Node type.\n * @this {CompileContext}\n * Context.\n * @param {Kind} node\n * Node to enter.\n * @param {Token} token\n * Corresponding token.\n * @param {OnEnterError | undefined} [errorHandler]\n * Handle the case where this token is open, but it is closed by something else.\n * @returns {Kind}\n * The given node.\n */\n function enter(node, token, errorHandler) {\n const parent = this.stack[this.stack.length - 1]\n // @ts-expect-error: Assume `Node` can exist as a child of `parent`.\n parent.children.push(node)\n this.stack.push(node)\n this.tokenStack.push([token, errorHandler])\n // @ts-expect-error: `end` will be patched later.\n node.position = {\n start: point(token.start)\n }\n return node\n }\n\n /**\n * Create a closer handle.\n *\n * @param {Handle} [and]\n * Optional function to also run.\n * @returns {Handle}\n * Handle.\n */\n function closer(and) {\n return close\n\n /**\n * @this {CompileContext}\n * @param {Token} token\n * @returns {void}\n */\n function close(token) {\n if (and) and.call(this, token)\n exit.call(this, token)\n }\n }\n\n /**\n * @this {CompileContext}\n * Context.\n * @param {Token} token\n * Corresponding token.\n * @param {OnExitError | undefined} [onExitError]\n * Handle the case where another token is open.\n * @returns {Node}\n * The closed node.\n */\n function exit(token, onExitError) {\n const node = this.stack.pop()\n const open = this.tokenStack.pop()\n if (!open) {\n throw new Error(\n 'Cannot close `' +\n token.type +\n '` (' +\n stringifyPosition({\n start: token.start,\n end: token.end\n }) +\n '): it’s not open'\n )\n } else if (open[0].type !== token.type) {\n if (onExitError) {\n onExitError.call(this, token, open[0])\n } else {\n const handler = open[1] || defaultOnError\n handler.call(this, token, open[0])\n }\n }\n node.position.end = point(token.end)\n return node\n }\n\n /**\n * @this {CompileContext}\n * @returns {string}\n */\n function resume() {\n return toString(this.stack.pop())\n }\n\n //\n // Handlers.\n //\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onenterlistordered() {\n setData('expectingFirstListItemValue', true)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onenterlistitemvalue(token) {\n if (getData('expectingFirstListItemValue')) {\n const ancestor = this.stack[this.stack.length - 2]\n ancestor.start = Number.parseInt(this.sliceSerialize(token), 10)\n setData('expectingFirstListItemValue')\n }\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcodefencedfenceinfo() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.lang = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcodefencedfencemeta() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.meta = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcodefencedfence() {\n // Exit if this is the closing fence.\n if (getData('flowCodeInside')) return\n this.buffer()\n setData('flowCodeInside', true)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcodefenced() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.value = data.replace(/^(\\r?\\n|\\r)|(\\r?\\n|\\r)$/g, '')\n setData('flowCodeInside')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcodeindented() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.value = data.replace(/(\\r?\\n|\\r)$/g, '')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitdefinitionlabelstring(token) {\n const label = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.label = label\n node.identifier = normalizeIdentifier(\n this.sliceSerialize(token)\n ).toLowerCase()\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitdefinitiontitlestring() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.title = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitdefinitiondestinationstring() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.url = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitatxheadingsequence(token) {\n const node = this.stack[this.stack.length - 1]\n if (!node.depth) {\n const depth = this.sliceSerialize(token).length\n node.depth = depth\n }\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitsetextheadingtext() {\n setData('setextHeadingSlurpLineEnding', true)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitsetextheadinglinesequence(token) {\n const node = this.stack[this.stack.length - 1]\n node.depth = this.sliceSerialize(token).charCodeAt(0) === 61 ? 1 : 2\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitsetextheading() {\n setData('setextHeadingSlurpLineEnding')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onenterdata(token) {\n const node = this.stack[this.stack.length - 1]\n let tail = node.children[node.children.length - 1]\n if (!tail || tail.type !== 'text') {\n // Add a new text node.\n tail = text()\n // @ts-expect-error: we’ll add `end` later.\n tail.position = {\n start: point(token.start)\n }\n // @ts-expect-error: Assume `parent` accepts `text`.\n node.children.push(tail)\n }\n this.stack.push(tail)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitdata(token) {\n const tail = this.stack.pop()\n tail.value += this.sliceSerialize(token)\n tail.position.end = point(token.end)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitlineending(token) {\n const context = this.stack[this.stack.length - 1]\n // If we’re at a hard break, include the line ending in there.\n if (getData('atHardBreak')) {\n const tail = context.children[context.children.length - 1]\n tail.position.end = point(token.end)\n setData('atHardBreak')\n return\n }\n if (\n !getData('setextHeadingSlurpLineEnding') &&\n config.canContainEols.includes(context.type)\n ) {\n onenterdata.call(this, token)\n onexitdata.call(this, token)\n }\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexithardbreak() {\n setData('atHardBreak', true)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexithtmlflow() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.value = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexithtmltext() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.value = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitcodetext() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.value = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitlink() {\n const node = this.stack[this.stack.length - 1]\n // Note: there are also `identifier` and `label` fields on this link node!\n // These are used / cleaned here.\n // To do: clean.\n if (getData('inReference')) {\n /** @type {ReferenceType} */\n const referenceType = getData('referenceType') || 'shortcut'\n node.type += 'Reference'\n // @ts-expect-error: mutate.\n node.referenceType = referenceType\n // @ts-expect-error: mutate.\n delete node.url\n delete node.title\n } else {\n // @ts-expect-error: mutate.\n delete node.identifier\n // @ts-expect-error: mutate.\n delete node.label\n }\n setData('referenceType')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitimage() {\n const node = this.stack[this.stack.length - 1]\n // Note: there are also `identifier` and `label` fields on this link node!\n // These are used / cleaned here.\n // To do: clean.\n if (getData('inReference')) {\n /** @type {ReferenceType} */\n const referenceType = getData('referenceType') || 'shortcut'\n node.type += 'Reference'\n // @ts-expect-error: mutate.\n node.referenceType = referenceType\n // @ts-expect-error: mutate.\n delete node.url\n delete node.title\n } else {\n // @ts-expect-error: mutate.\n delete node.identifier\n // @ts-expect-error: mutate.\n delete node.label\n }\n setData('referenceType')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitlabeltext(token) {\n const string = this.sliceSerialize(token)\n const ancestor = this.stack[this.stack.length - 2]\n // @ts-expect-error: stash this on the node, as it might become a reference\n // later.\n ancestor.label = decodeString(string)\n // @ts-expect-error: same as above.\n ancestor.identifier = normalizeIdentifier(string).toLowerCase()\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitlabel() {\n const fragment = this.stack[this.stack.length - 1]\n const value = this.resume()\n const node = this.stack[this.stack.length - 1]\n // Assume a reference.\n setData('inReference', true)\n if (node.type === 'link') {\n /** @type {Array} */\n // @ts-expect-error: Assume static phrasing content.\n const children = fragment.children\n node.children = children\n } else {\n node.alt = value\n }\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitresourcedestinationstring() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.url = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitresourcetitlestring() {\n const data = this.resume()\n const node = this.stack[this.stack.length - 1]\n node.title = data\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitresource() {\n setData('inReference')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onenterreference() {\n setData('referenceType', 'collapsed')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitreferencestring(token) {\n const label = this.resume()\n const node = this.stack[this.stack.length - 1]\n // @ts-expect-error: stash this on the node, as it might become a reference\n // later.\n node.label = label\n // @ts-expect-error: same as above.\n node.identifier = normalizeIdentifier(\n this.sliceSerialize(token)\n ).toLowerCase()\n setData('referenceType', 'full')\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n\n function onexitcharacterreferencemarker(token) {\n setData('characterReferenceType', token.type)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitcharacterreferencevalue(token) {\n const data = this.sliceSerialize(token)\n const type = getData('characterReferenceType')\n /** @type {string} */\n let value\n if (type) {\n value = decodeNumericCharacterReference(\n data,\n type === 'characterReferenceMarkerNumeric' ? 10 : 16\n )\n setData('characterReferenceType')\n } else {\n const result = decodeNamedCharacterReference(data)\n value = result\n }\n const tail = this.stack.pop()\n tail.value += value\n tail.position.end = point(token.end)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitautolinkprotocol(token) {\n onexitdata.call(this, token)\n const node = this.stack[this.stack.length - 1]\n node.url = this.sliceSerialize(token)\n }\n\n /**\n * @this {CompileContext}\n * @type {Handle}\n */\n function onexitautolinkemail(token) {\n onexitdata.call(this, token)\n const node = this.stack[this.stack.length - 1]\n node.url = 'mailto:' + this.sliceSerialize(token)\n }\n\n //\n // Creaters.\n //\n\n /** @returns {Blockquote} */\n function blockQuote() {\n return {\n type: 'blockquote',\n children: []\n }\n }\n\n /** @returns {Code} */\n function codeFlow() {\n return {\n type: 'code',\n lang: null,\n meta: null,\n value: ''\n }\n }\n\n /** @returns {InlineCode} */\n function codeText() {\n return {\n type: 'inlineCode',\n value: ''\n }\n }\n\n /** @returns {Definition} */\n function definition() {\n return {\n type: 'definition',\n identifier: '',\n label: null,\n title: null,\n url: ''\n }\n }\n\n /** @returns {Emphasis} */\n function emphasis() {\n return {\n type: 'emphasis',\n children: []\n }\n }\n\n /** @returns {Heading} */\n function heading() {\n // @ts-expect-error `depth` will be set later.\n return {\n type: 'heading',\n depth: undefined,\n children: []\n }\n }\n\n /** @returns {Break} */\n function hardBreak() {\n return {\n type: 'break'\n }\n }\n\n /** @returns {HTML} */\n function html() {\n return {\n type: 'html',\n value: ''\n }\n }\n\n /** @returns {Image} */\n function image() {\n return {\n type: 'image',\n title: null,\n url: '',\n alt: null\n }\n }\n\n /** @returns {Link} */\n function link() {\n return {\n type: 'link',\n title: null,\n url: '',\n children: []\n }\n }\n\n /**\n * @param {Token} token\n * @returns {List}\n */\n function list(token) {\n return {\n type: 'list',\n ordered: token.type === 'listOrdered',\n start: null,\n spread: token._spread,\n children: []\n }\n }\n\n /**\n * @param {Token} token\n * @returns {ListItem}\n */\n function listItem(token) {\n return {\n type: 'listItem',\n spread: token._spread,\n checked: null,\n children: []\n }\n }\n\n /** @returns {Paragraph} */\n function paragraph() {\n return {\n type: 'paragraph',\n children: []\n }\n }\n\n /** @returns {Strong} */\n function strong() {\n return {\n type: 'strong',\n children: []\n }\n }\n\n /** @returns {Text} */\n function text() {\n return {\n type: 'text',\n value: ''\n }\n }\n\n /** @returns {ThematicBreak} */\n function thematicBreak() {\n return {\n type: 'thematicBreak'\n }\n }\n}\n\n/**\n * Copy a point-like value.\n *\n * @param {Point} d\n * Point-like value.\n * @returns {Point}\n * unist point.\n */\nfunction point(d) {\n return {\n line: d.line,\n column: d.column,\n offset: d.offset\n }\n}\n\n/**\n * @param {Config} combined\n * @param {Array>} extensions\n * @returns {void}\n */\nfunction configure(combined, extensions) {\n let index = -1\n while (++index < extensions.length) {\n const value = extensions[index]\n if (Array.isArray(value)) {\n configure(combined, value)\n } else {\n extension(combined, value)\n }\n }\n}\n\n/**\n * @param {Config} combined\n * @param {Extension} extension\n * @returns {void}\n */\nfunction extension(combined, extension) {\n /** @type {keyof Extension} */\n let key\n for (key in extension) {\n if (own.call(extension, key)) {\n if (key === 'canContainEols') {\n const right = extension[key]\n if (right) {\n combined[key].push(...right)\n }\n } else if (key === 'transforms') {\n const right = extension[key]\n if (right) {\n combined[key].push(...right)\n }\n } else if (key === 'enter' || key === 'exit') {\n const right = extension[key]\n if (right) {\n Object.assign(combined[key], right)\n }\n }\n }\n }\n}\n\n/** @type {OnEnterError} */\nfunction defaultOnError(left, right) {\n if (left) {\n throw new Error(\n 'Cannot close `' +\n left.type +\n '` (' +\n stringifyPosition({\n start: left.start,\n end: left.end\n }) +\n '): a different token (`' +\n right.type +\n '`, ' +\n stringifyPosition({\n start: right.start,\n end: right.end\n }) +\n ') is open'\n )\n } else {\n throw new Error(\n 'Cannot close document, a token (`' +\n right.type +\n '`, ' +\n stringifyPosition({\n start: right.start,\n end: right.end\n }) +\n ') is still open'\n )\n }\n}\n","/**\n * @typedef {import('mdast').Root|import('mdast').Content} Node\n *\n * @typedef Options\n * Configuration (optional).\n * @property {boolean | null | undefined} [includeImageAlt=true]\n * Whether to use `alt` for `image`s.\n * @property {boolean | null | undefined} [includeHtml=true]\n * Whether to use `value` of HTML.\n */\n\n/** @type {Options} */\nconst emptyOptions = {}\n\n/**\n * Get the text content of a node or list of nodes.\n *\n * Prefers the node’s plain-text fields, otherwise serializes its children,\n * and if the given value is an array, serialize the nodes in it.\n *\n * @param {unknown} value\n * Thing to serialize, typically `Node`.\n * @param {Options | null | undefined} [options]\n * Configuration (optional).\n * @returns {string}\n * Serialized `value`.\n */\nexport function toString(value, options) {\n const settings = options || emptyOptions\n const includeImageAlt =\n typeof settings.includeImageAlt === 'boolean'\n ? settings.includeImageAlt\n : true\n const includeHtml =\n typeof settings.includeHtml === 'boolean' ? settings.includeHtml : true\n\n return one(value, includeImageAlt, includeHtml)\n}\n\n/**\n * One node or several nodes.\n *\n * @param {unknown} value\n * Thing to serialize.\n * @param {boolean} includeImageAlt\n * Include image `alt`s.\n * @param {boolean} includeHtml\n * Include HTML.\n * @returns {string}\n * Serialized node.\n */\nfunction one(value, includeImageAlt, includeHtml) {\n if (node(value)) {\n if ('value' in value) {\n return value.type === 'html' && !includeHtml ? '' : value.value\n }\n\n if (includeImageAlt && 'alt' in value && value.alt) {\n return value.alt\n }\n\n if ('children' in value) {\n return all(value.children, includeImageAlt, includeHtml)\n }\n }\n\n if (Array.isArray(value)) {\n return all(value, includeImageAlt, includeHtml)\n }\n\n return ''\n}\n\n/**\n * Serialize a list of nodes.\n *\n * @param {Array} values\n * Thing to serialize.\n * @param {boolean} includeImageAlt\n * Include image `alt`s.\n * @param {boolean} includeHtml\n * Include HTML.\n * @returns {string}\n * Serialized nodes.\n */\nfunction all(values, includeImageAlt, includeHtml) {\n /** @type {Array} */\n const result = []\n let index = -1\n\n while (++index < values.length) {\n result[index] = one(values[index], includeImageAlt, includeHtml)\n }\n\n return result.join('')\n}\n\n/**\n * Check if `value` looks like a node.\n *\n * @param {unknown} value\n * Thing.\n * @returns {value is Node}\n * Whether `value` is a node.\n */\nfunction node(value) {\n return Boolean(value && typeof value === 'object')\n}\n","/**\n * @typedef {import('micromark-util-types').Create} Create\n * @typedef {import('micromark-util-types').FullNormalizedExtension} FullNormalizedExtension\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').ParseOptions} ParseOptions\n */\n\nimport {combineExtensions} from 'micromark-util-combine-extensions'\nimport {content} from './initialize/content.js'\nimport {document} from './initialize/document.js'\nimport {flow} from './initialize/flow.js'\nimport {text, string} from './initialize/text.js'\nimport {createTokenizer} from './create-tokenizer.js'\nimport * as defaultConstructs from './constructs.js'\n\n/**\n * @param {ParseOptions | null | undefined} [options]\n * @returns {ParseContext}\n */\nexport function parse(options) {\n const settings = options || {}\n const constructs =\n /** @type {FullNormalizedExtension} */\n combineExtensions([defaultConstructs, ...(settings.extensions || [])])\n\n /** @type {ParseContext} */\n const parser = {\n defined: [],\n lazy: {},\n constructs,\n content: create(content),\n document: create(document),\n flow: create(flow),\n string: create(string),\n text: create(text)\n }\n return parser\n\n /**\n * @param {InitialConstruct} initial\n */\n function create(initial) {\n return creator\n /** @type {Create} */\n function creator(from) {\n return createTokenizer(parser, initial, from)\n }\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n * @typedef {import('micromark-util-types').Handles} Handles\n * @typedef {import('micromark-util-types').HtmlExtension} HtmlExtension\n * @typedef {import('micromark-util-types').NormalizedExtension} NormalizedExtension\n */\n\nimport {splice} from 'micromark-util-chunked'\n\nconst hasOwnProperty = {}.hasOwnProperty\n\n/**\n * Combine multiple syntax extensions into one.\n *\n * @param {Array} extensions\n * List of syntax extensions.\n * @returns {NormalizedExtension}\n * A single combined extension.\n */\nexport function combineExtensions(extensions) {\n /** @type {NormalizedExtension} */\n const all = {}\n let index = -1\n\n while (++index < extensions.length) {\n syntaxExtension(all, extensions[index])\n }\n\n return all\n}\n\n/**\n * Merge `extension` into `all`.\n *\n * @param {NormalizedExtension} all\n * Extension to merge into.\n * @param {Extension} extension\n * Extension to merge.\n * @returns {void}\n */\nfunction syntaxExtension(all, extension) {\n /** @type {keyof Extension} */\n let hook\n\n for (hook in extension) {\n const maybe = hasOwnProperty.call(all, hook) ? all[hook] : undefined\n /** @type {Record} */\n const left = maybe || (all[hook] = {})\n /** @type {Record | undefined} */\n const right = extension[hook]\n /** @type {string} */\n let code\n\n if (right) {\n for (code in right) {\n if (!hasOwnProperty.call(left, code)) left[code] = []\n const value = right[code]\n constructs(\n // @ts-expect-error Looks like a list.\n left[code],\n Array.isArray(value) ? value : value ? [value] : []\n )\n }\n }\n }\n}\n\n/**\n * Merge `list` into `existing` (both lists of constructs).\n * Mutates `existing`.\n *\n * @param {Array} existing\n * @param {Array} list\n * @returns {void}\n */\nfunction constructs(existing, list) {\n let index = -1\n /** @type {Array} */\n const before = []\n\n while (++index < list.length) {\n // @ts-expect-error Looks like an object.\n ;(list[index].add === 'after' ? existing : before).push(list[index])\n }\n\n splice(existing, 0, 0, before)\n}\n\n/**\n * Combine multiple HTML extensions into one.\n *\n * @param {Array} htmlExtensions\n * List of HTML extensions.\n * @returns {HtmlExtension}\n * A single combined HTML extension.\n */\nexport function combineHtmlExtensions(htmlExtensions) {\n /** @type {HtmlExtension} */\n const handlers = {}\n let index = -1\n\n while (++index < htmlExtensions.length) {\n htmlExtension(handlers, htmlExtensions[index])\n }\n\n return handlers\n}\n\n/**\n * Merge `extension` into `all`.\n *\n * @param {HtmlExtension} all\n * Extension to merge into.\n * @param {HtmlExtension} extension\n * Extension to merge.\n * @returns {void}\n */\nfunction htmlExtension(all, extension) {\n /** @type {keyof HtmlExtension} */\n let hook\n\n for (hook in extension) {\n const maybe = hasOwnProperty.call(all, hook) ? all[hook] : undefined\n const left = maybe || (all[hook] = {})\n const right = extension[hook]\n /** @type {keyof Handles} */\n let type\n\n if (right) {\n for (type in right) {\n // @ts-expect-error assume document vs regular handler are managed correctly.\n left[type] = right[type]\n }\n }\n }\n}\n","/**\n * Like `Array#splice`, but smarter for giant arrays.\n *\n * `Array#splice` takes all items to be inserted as individual argument which\n * causes a stack overflow in V8 when trying to insert 100k items for instance.\n *\n * Otherwise, this does not return the removed items, and takes `items` as an\n * array instead of rest parameters.\n *\n * @template {unknown} T\n * Item type.\n * @param {Array} list\n * List to operate on.\n * @param {number} start\n * Index to remove/insert at (can be negative).\n * @param {number} remove\n * Number of items to remove.\n * @param {Array} items\n * Items to inject into `list`.\n * @returns {void}\n * Nothing.\n */\nexport function splice(list, start, remove, items) {\n const end = list.length\n let chunkStart = 0\n /** @type {Array} */\n let parameters\n\n // Make start between zero and `end` (included).\n if (start < 0) {\n start = -start > end ? 0 : end + start\n } else {\n start = start > end ? end : start\n }\n remove = remove > 0 ? remove : 0\n\n // No need to chunk the items if there’s only a couple (10k) items.\n if (items.length < 10000) {\n parameters = Array.from(items)\n parameters.unshift(start, remove)\n // @ts-expect-error Hush, it’s fine.\n list.splice(...parameters)\n } else {\n // Delete `remove` items starting from `start`\n if (remove) list.splice(start, remove)\n\n // Insert the items in chunks to not cause stack overflows.\n while (chunkStart < items.length) {\n parameters = items.slice(chunkStart, chunkStart + 10000)\n parameters.unshift(start, 0)\n // @ts-expect-error Hush, it’s fine.\n list.splice(...parameters)\n chunkStart += 10000\n start += 10000\n }\n }\n}\n\n/**\n * Append `items` (an array) at the end of `list` (another array).\n * When `list` was empty, returns `items` instead.\n *\n * This prevents a potentially expensive operation when `list` is empty,\n * and adds items in batches to prevent V8 from hanging.\n *\n * @template {unknown} T\n * Item type.\n * @param {Array} list\n * List to operate on.\n * @param {Array} items\n * Items to add to `list`.\n * @returns {Array}\n * Either `list` or `items`.\n */\nexport function push(list, items) {\n if (list.length > 0) {\n splice(list, list.length, 0, items)\n return list\n }\n return items\n}\n","/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {InitialConstruct} */\nexport const content = {\n tokenize: initializeContent\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeContent(effects) {\n const contentStart = effects.attempt(\n this.parser.constructs.contentInitial,\n afterContentStartConstruct,\n paragraphInitial\n )\n /** @type {Token} */\n let previous\n return contentStart\n\n /** @type {State} */\n function afterContentStartConstruct(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, contentStart, 'linePrefix')\n }\n\n /** @type {State} */\n function paragraphInitial(code) {\n effects.enter('paragraph')\n return lineStart(code)\n }\n\n /** @type {State} */\n function lineStart(code) {\n const token = effects.enter('chunkText', {\n contentType: 'text',\n previous\n })\n if (previous) {\n previous.next = token\n }\n previous = token\n return data(code)\n }\n\n /** @type {State} */\n function data(code) {\n if (code === null) {\n effects.exit('chunkText')\n effects.exit('paragraph')\n effects.consume(code)\n return\n }\n if (markdownLineEnding(code)) {\n effects.consume(code)\n effects.exit('chunkText')\n return lineStart\n }\n\n // Data.\n effects.consume(code)\n return data\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */\n\nimport {markdownSpace} from 'micromark-util-character'\n\n// To do: implement `spaceOrTab`, `spaceOrTabMinMax`, `spaceOrTabWithOptions`.\n\n/**\n * Parse spaces and tabs.\n *\n * There is no `nok` parameter:\n *\n * * spaces in markdown are often optional, in which case this factory can be\n * used and `ok` will be switched to whether spaces were found or not\n * * one line ending or space can be detected with `markdownSpace(code)` right\n * before using `factorySpace`\n *\n * ###### Examples\n *\n * Where `␉` represents a tab (plus how much it expands) and `␠` represents a\n * single space.\n *\n * ```markdown\n * ␉\n * ␠␠␠␠\n * ␉␠\n * ```\n *\n * @param {Effects} effects\n * Context.\n * @param {State} ok\n * State switched to when successful.\n * @param {TokenType} type\n * Type (`' \\t'`).\n * @param {number | undefined} [max=Infinity]\n * Max (exclusive).\n * @returns\n * Start state.\n */\nexport function factorySpace(effects, ok, type, max) {\n const limit = max ? max - 1 : Number.POSITIVE_INFINITY\n let size = 0\n return start\n\n /** @type {State} */\n function start(code) {\n if (markdownSpace(code)) {\n effects.enter(type)\n return prefix(code)\n }\n return ok(code)\n }\n\n /** @type {State} */\n function prefix(code) {\n if (markdownSpace(code) && size++ < limit) {\n effects.consume(code)\n return prefix\n }\n effects.exit(type)\n return ok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n */\n\nimport {unicodePunctuationRegex} from './lib/unicode-punctuation-regex.js'\n\n/**\n * Check whether the character code represents an ASCII alpha (`a` through `z`,\n * case insensitive).\n *\n * An **ASCII alpha** is an ASCII upper alpha or ASCII lower alpha.\n *\n * An **ASCII upper alpha** is a character in the inclusive range U+0041 (`A`)\n * to U+005A (`Z`).\n *\n * An **ASCII lower alpha** is a character in the inclusive range U+0061 (`a`)\n * to U+007A (`z`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAlpha = regexCheck(/[A-Za-z]/)\n\n/**\n * Check whether the character code represents an ASCII alphanumeric (`a`\n * through `z`, case insensitive, or `0` through `9`).\n *\n * An **ASCII alphanumeric** is an ASCII digit (see `asciiDigit`) or ASCII alpha\n * (see `asciiAlpha`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAlphanumeric = regexCheck(/[\\dA-Za-z]/)\n\n/**\n * Check whether the character code represents an ASCII atext.\n *\n * atext is an ASCII alphanumeric (see `asciiAlphanumeric`), or a character in\n * the inclusive ranges U+0023 NUMBER SIGN (`#`) to U+0027 APOSTROPHE (`'`),\n * U+002A ASTERISK (`*`), U+002B PLUS SIGN (`+`), U+002D DASH (`-`), U+002F\n * SLASH (`/`), U+003D EQUALS TO (`=`), U+003F QUESTION MARK (`?`), U+005E\n * CARET (`^`) to U+0060 GRAVE ACCENT (`` ` ``), or U+007B LEFT CURLY BRACE\n * (`{`) to U+007E TILDE (`~`).\n *\n * See:\n * **\\[RFC5322]**:\n * [Internet Message Format](https://tools.ietf.org/html/rfc5322).\n * P. Resnick.\n * IETF.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiAtext = regexCheck(/[#-'*+\\--9=?A-Z^-~]/)\n\n/**\n * Check whether a character code is an ASCII control character.\n *\n * An **ASCII control** is a character in the inclusive range U+0000 NULL (NUL)\n * to U+001F (US), or U+007F (DEL).\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function asciiControl(code) {\n return (\n // Special whitespace codes (which have negative values), C0 and Control\n // character DEL\n code !== null && (code < 32 || code === 127)\n )\n}\n\n/**\n * Check whether the character code represents an ASCII digit (`0` through `9`).\n *\n * An **ASCII digit** is a character in the inclusive range U+0030 (`0`) to\n * U+0039 (`9`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiDigit = regexCheck(/\\d/)\n\n/**\n * Check whether the character code represents an ASCII hex digit (`a` through\n * `f`, case insensitive, or `0` through `9`).\n *\n * An **ASCII hex digit** is an ASCII digit (see `asciiDigit`), ASCII upper hex\n * digit, or an ASCII lower hex digit.\n *\n * An **ASCII upper hex digit** is a character in the inclusive range U+0041\n * (`A`) to U+0046 (`F`).\n *\n * An **ASCII lower hex digit** is a character in the inclusive range U+0061\n * (`a`) to U+0066 (`f`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiHexDigit = regexCheck(/[\\dA-Fa-f]/)\n\n/**\n * Check whether the character code represents ASCII punctuation.\n *\n * An **ASCII punctuation** is a character in the inclusive ranges U+0021\n * EXCLAMATION MARK (`!`) to U+002F SLASH (`/`), U+003A COLON (`:`) to U+0040 AT\n * SIGN (`@`), U+005B LEFT SQUARE BRACKET (`[`) to U+0060 GRAVE ACCENT\n * (`` ` ``), or U+007B LEFT CURLY BRACE (`{`) to U+007E TILDE (`~`).\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/)\n\n/**\n * Check whether a character code is a markdown line ending.\n *\n * A **markdown line ending** is the virtual characters M-0003 CARRIAGE RETURN\n * LINE FEED (CRLF), M-0004 LINE FEED (LF) and M-0005 CARRIAGE RETURN (CR).\n *\n * In micromark, the actual character U+000A LINE FEED (LF) and U+000D CARRIAGE\n * RETURN (CR) are replaced by these virtual characters depending on whether\n * they occurred together.\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownLineEnding(code) {\n return code !== null && code < -2\n}\n\n/**\n * Check whether a character code is a markdown line ending (see\n * `markdownLineEnding`) or markdown space (see `markdownSpace`).\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownLineEndingOrSpace(code) {\n return code !== null && (code < 0 || code === 32)\n}\n\n/**\n * Check whether a character code is a markdown space.\n *\n * A **markdown space** is the concrete character U+0020 SPACE (SP) and the\n * virtual characters M-0001 VIRTUAL SPACE (VS) and M-0002 HORIZONTAL TAB (HT).\n *\n * In micromark, the actual character U+0009 CHARACTER TABULATION (HT) is\n * replaced by one M-0002 HORIZONTAL TAB (HT) and between 0 and 3 M-0001 VIRTUAL\n * SPACE (VS) characters, depending on the column at which the tab occurred.\n *\n * @param {Code} code\n * Code.\n * @returns {boolean}\n * Whether it matches.\n */\nexport function markdownSpace(code) {\n return code === -2 || code === -1 || code === 32\n}\n\n// Size note: removing ASCII from the regex and using `asciiPunctuation` here\n// In fact adds to the bundle size.\n/**\n * Check whether the character code represents Unicode punctuation.\n *\n * A **Unicode punctuation** is a character in the Unicode `Pc` (Punctuation,\n * Connector), `Pd` (Punctuation, Dash), `Pe` (Punctuation, Close), `Pf`\n * (Punctuation, Final quote), `Pi` (Punctuation, Initial quote), `Po`\n * (Punctuation, Other), or `Ps` (Punctuation, Open) categories, or an ASCII\n * punctuation (see `asciiPunctuation`).\n *\n * See:\n * **\\[UNICODE]**:\n * [The Unicode Standard](https://www.unicode.org/versions/).\n * Unicode Consortium.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const unicodePunctuation = regexCheck(unicodePunctuationRegex)\n\n/**\n * Check whether the character code represents Unicode whitespace.\n *\n * Note that this does handle micromark specific markdown whitespace characters.\n * See `markdownLineEndingOrSpace` to check that.\n *\n * A **Unicode whitespace** is a character in the Unicode `Zs` (Separator,\n * Space) category, or U+0009 CHARACTER TABULATION (HT), U+000A LINE FEED (LF),\n * U+000C (FF), or U+000D CARRIAGE RETURN (CR) (**\\[UNICODE]**).\n *\n * See:\n * **\\[UNICODE]**:\n * [The Unicode Standard](https://www.unicode.org/versions/).\n * Unicode Consortium.\n *\n * @param code\n * Code.\n * @returns\n * Whether it matches.\n */\nexport const unicodeWhitespace = regexCheck(/\\s/)\n\n/**\n * Create a code check from a regex.\n *\n * @param {RegExp} regex\n * @returns {(code: Code) => boolean}\n */\nfunction regexCheck(regex) {\n return check\n\n /**\n * Check whether a code matches the bound regex.\n *\n * @param {Code} code\n * Character code.\n * @returns {boolean}\n * Whether the character code matches the bound regex.\n */\n function check(code) {\n return code !== null && regex.test(String.fromCharCode(code))\n }\n}\n","// This module is generated by `script/`.\n//\n// CommonMark handles attention (emphasis, strong) markers based on what comes\n// before or after them.\n// One such difference is if those characters are Unicode punctuation.\n// This script is generated from the Unicode data.\n\n/**\n * Regular expression that matches a unicode punctuation character.\n */\nexport const unicodePunctuationRegex =\n /[!-\\/:-@\\[-`\\{-~\\xA1\\xA7\\xAB\\xB6\\xB7\\xBB\\xBF\\u037E\\u0387\\u055A-\\u055F\\u0589\\u058A\\u05BE\\u05C0\\u05C3\\u05C6\\u05F3\\u05F4\\u0609\\u060A\\u060C\\u060D\\u061B\\u061D-\\u061F\\u066A-\\u066D\\u06D4\\u0700-\\u070D\\u07F7-\\u07F9\\u0830-\\u083E\\u085E\\u0964\\u0965\\u0970\\u09FD\\u0A76\\u0AF0\\u0C77\\u0C84\\u0DF4\\u0E4F\\u0E5A\\u0E5B\\u0F04-\\u0F12\\u0F14\\u0F3A-\\u0F3D\\u0F85\\u0FD0-\\u0FD4\\u0FD9\\u0FDA\\u104A-\\u104F\\u10FB\\u1360-\\u1368\\u1400\\u166E\\u169B\\u169C\\u16EB-\\u16ED\\u1735\\u1736\\u17D4-\\u17D6\\u17D8-\\u17DA\\u1800-\\u180A\\u1944\\u1945\\u1A1E\\u1A1F\\u1AA0-\\u1AA6\\u1AA8-\\u1AAD\\u1B5A-\\u1B60\\u1B7D\\u1B7E\\u1BFC-\\u1BFF\\u1C3B-\\u1C3F\\u1C7E\\u1C7F\\u1CC0-\\u1CC7\\u1CD3\\u2010-\\u2027\\u2030-\\u2043\\u2045-\\u2051\\u2053-\\u205E\\u207D\\u207E\\u208D\\u208E\\u2308-\\u230B\\u2329\\u232A\\u2768-\\u2775\\u27C5\\u27C6\\u27E6-\\u27EF\\u2983-\\u2998\\u29D8-\\u29DB\\u29FC\\u29FD\\u2CF9-\\u2CFC\\u2CFE\\u2CFF\\u2D70\\u2E00-\\u2E2E\\u2E30-\\u2E4F\\u2E52-\\u2E5D\\u3001-\\u3003\\u3008-\\u3011\\u3014-\\u301F\\u3030\\u303D\\u30A0\\u30FB\\uA4FE\\uA4FF\\uA60D-\\uA60F\\uA673\\uA67E\\uA6F2-\\uA6F7\\uA874-\\uA877\\uA8CE\\uA8CF\\uA8F8-\\uA8FA\\uA8FC\\uA92E\\uA92F\\uA95F\\uA9C1-\\uA9CD\\uA9DE\\uA9DF\\uAA5C-\\uAA5F\\uAADE\\uAADF\\uAAF0\\uAAF1\\uABEB\\uFD3E\\uFD3F\\uFE10-\\uFE19\\uFE30-\\uFE52\\uFE54-\\uFE61\\uFE63\\uFE68\\uFE6A\\uFE6B\\uFF01-\\uFF03\\uFF05-\\uFF0A\\uFF0C-\\uFF0F\\uFF1A\\uFF1B\\uFF1F\\uFF20\\uFF3B-\\uFF3D\\uFF3F\\uFF5B\\uFF5D\\uFF5F-\\uFF65]/\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\n/**\n * @typedef {[Construct, ContainerState]} StackItem\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {InitialConstruct} */\nexport const document = {\n tokenize: initializeDocument\n}\n\n/** @type {Construct} */\nconst containerConstruct = {\n tokenize: tokenizeContainer\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeDocument(effects) {\n const self = this\n /** @type {Array} */\n const stack = []\n let continued = 0\n /** @type {TokenizeContext | undefined} */\n let childFlow\n /** @type {Token | undefined} */\n let childToken\n /** @type {number} */\n let lineStartOffset\n return start\n\n /** @type {State} */\n function start(code) {\n // First we iterate through the open blocks, starting with the root\n // document, and descending through last children down to the last open\n // block.\n // Each block imposes a condition that the line must satisfy if the block is\n // to remain open.\n // For example, a block quote requires a `>` character.\n // A paragraph requires a non-blank line.\n // In this phase we may match all or just some of the open blocks.\n // But we cannot close unmatched blocks yet, because we may have a lazy\n // continuation line.\n if (continued < stack.length) {\n const item = stack[continued]\n self.containerState = item[1]\n return effects.attempt(\n item[0].continuation,\n documentContinue,\n checkNewContainers\n )(code)\n }\n\n // Done.\n return checkNewContainers(code)\n }\n\n /** @type {State} */\n function documentContinue(code) {\n continued++\n\n // Note: this field is called `_closeFlow` but it also closes containers.\n // Perhaps a good idea to rename it but it’s already used in the wild by\n // extensions.\n if (self.containerState._closeFlow) {\n self.containerState._closeFlow = undefined\n if (childFlow) {\n closeFlow()\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when dealing with lazy lines in `writeToChild`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {Point | undefined} */\n let point\n\n // Find the flow chunk.\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n let index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n return checkNewContainers(code)\n }\n return start(code)\n }\n\n /** @type {State} */\n function checkNewContainers(code) {\n // Next, after consuming the continuation markers for existing blocks, we\n // look for new block starts (e.g. `>` for a block quote).\n // If we encounter a new block start, we close any blocks unmatched in\n // step 1 before creating the new block as a child of the last matched\n // block.\n if (continued === stack.length) {\n // No need to `check` whether there’s a container, of `exitContainers`\n // would be moot.\n // We can instead immediately `attempt` to parse one.\n if (!childFlow) {\n return documentContinued(code)\n }\n\n // If we have concrete content, such as block HTML or fenced code,\n // we can’t have containers “pierce” into them, so we can immediately\n // start.\n if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {\n return flowStart(code)\n }\n\n // If we do have flow, it could still be a blank line,\n // but we’d be interrupting it w/ a new container if there’s a current\n // construct.\n // To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer\n // needed in [email protected]).\n self.interrupt = Boolean(\n childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack\n )\n }\n\n // Check if there is a new container.\n self.containerState = {}\n return effects.check(\n containerConstruct,\n thereIsANewContainer,\n thereIsNoNewContainer\n )(code)\n }\n\n /** @type {State} */\n function thereIsANewContainer(code) {\n if (childFlow) closeFlow()\n exitContainers(continued)\n return documentContinued(code)\n }\n\n /** @type {State} */\n function thereIsNoNewContainer(code) {\n self.parser.lazy[self.now().line] = continued !== stack.length\n lineStartOffset = self.now().offset\n return flowStart(code)\n }\n\n /** @type {State} */\n function documentContinued(code) {\n // Try new containers.\n self.containerState = {}\n return effects.attempt(\n containerConstruct,\n containerContinue,\n flowStart\n )(code)\n }\n\n /** @type {State} */\n function containerContinue(code) {\n continued++\n stack.push([self.currentConstruct, self.containerState])\n // Try another.\n return documentContinued(code)\n }\n\n /** @type {State} */\n function flowStart(code) {\n if (code === null) {\n if (childFlow) closeFlow()\n exitContainers(0)\n effects.consume(code)\n return\n }\n childFlow = childFlow || self.parser.flow(self.now())\n effects.enter('chunkFlow', {\n contentType: 'flow',\n previous: childToken,\n _tokenizer: childFlow\n })\n return flowContinue(code)\n }\n\n /** @type {State} */\n function flowContinue(code) {\n if (code === null) {\n writeToChild(effects.exit('chunkFlow'), true)\n exitContainers(0)\n effects.consume(code)\n return\n }\n if (markdownLineEnding(code)) {\n effects.consume(code)\n writeToChild(effects.exit('chunkFlow'))\n // Get ready for the next line.\n continued = 0\n self.interrupt = undefined\n return start\n }\n effects.consume(code)\n return flowContinue\n }\n\n /**\n * @param {Token} token\n * @param {boolean | undefined} [eof]\n * @returns {void}\n */\n function writeToChild(token, eof) {\n const stream = self.sliceStream(token)\n if (eof) stream.push(null)\n token.previous = childToken\n if (childToken) childToken.next = token\n childToken = token\n childFlow.defineSkip(token.start)\n childFlow.write(stream)\n\n // Alright, so we just added a lazy line:\n //\n // ```markdown\n // > a\n // b.\n //\n // Or:\n //\n // > ~~~c\n // d\n //\n // Or:\n //\n // > | e |\n // f\n // ```\n //\n // The construct in the second example (fenced code) does not accept lazy\n // lines, so it marked itself as done at the end of its first line, and\n // then the content construct parses `d`.\n // Most constructs in markdown match on the first line: if the first line\n // forms a construct, a non-lazy line can’t “unmake” it.\n //\n // The construct in the third example is potentially a GFM table, and\n // those are *weird*.\n // It *could* be a table, from the first line, if the following line\n // matches a condition.\n // In this case, that second line is lazy, which “unmakes” the first line\n // and turns the whole into one content block.\n //\n // We’ve now parsed the non-lazy and the lazy line, and can figure out\n // whether the lazy line started a new flow block.\n // If it did, we exit the current containers between the two flow blocks.\n if (self.parser.lazy[token.start.line]) {\n let index = childFlow.events.length\n while (index--) {\n if (\n // The token starts before the line ending…\n childFlow.events[index][1].start.offset < lineStartOffset &&\n // …and either is not ended yet…\n (!childFlow.events[index][1].end ||\n // …or ends after it.\n childFlow.events[index][1].end.offset > lineStartOffset)\n ) {\n // Exit: there’s still something open, which means it’s a lazy line\n // part of something.\n return\n }\n }\n\n // Note: this algorithm for moving events around is similar to the\n // algorithm when closing flow in `documentContinue`.\n const indexBeforeExits = self.events.length\n let indexBeforeFlow = indexBeforeExits\n /** @type {boolean | undefined} */\n let seen\n /** @type {Point | undefined} */\n let point\n\n // Find the previous chunk (the one before the lazy line).\n while (indexBeforeFlow--) {\n if (\n self.events[indexBeforeFlow][0] === 'exit' &&\n self.events[indexBeforeFlow][1].type === 'chunkFlow'\n ) {\n if (seen) {\n point = self.events[indexBeforeFlow][1].end\n break\n }\n seen = true\n }\n }\n exitContainers(continued)\n\n // Fix positions.\n index = indexBeforeExits\n while (index < self.events.length) {\n self.events[index][1].end = Object.assign({}, point)\n index++\n }\n\n // Inject the exits earlier (they’re still also at the end).\n splice(\n self.events,\n indexBeforeFlow + 1,\n 0,\n self.events.slice(indexBeforeExits)\n )\n\n // Discard the duplicate exits.\n self.events.length = index\n }\n }\n\n /**\n * @param {number} size\n * @returns {void}\n */\n function exitContainers(size) {\n let index = stack.length\n\n // Exit open containers.\n while (index-- > size) {\n const entry = stack[index]\n self.containerState = entry[1]\n entry[0].exit.call(self, effects)\n }\n stack.length = size\n }\n function closeFlow() {\n childFlow.write([null])\n childToken = undefined\n childFlow = undefined\n self.containerState._closeFlow = undefined\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContainer(effects, ok, nok) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n effects.attempt(this.parser.constructs.document, ok, nok),\n 'linePrefix',\n this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4\n )\n}\n","/**\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nimport {blankLine, content} from 'micromark-core-commonmark'\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {InitialConstruct} */\nexport const flow = {\n tokenize: initializeFlow\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\nfunction initializeFlow(effects) {\n const self = this\n const initial = effects.attempt(\n // Try to parse a blank line.\n blankLine,\n atBlankEnding,\n // Try to parse initial flow (essentially, only code).\n effects.attempt(\n this.parser.constructs.flowInitial,\n afterConstruct,\n factorySpace(\n effects,\n effects.attempt(\n this.parser.constructs.flow,\n afterConstruct,\n effects.attempt(content, afterConstruct)\n ),\n 'linePrefix'\n )\n )\n )\n return initial\n\n /** @type {State} */\n function atBlankEnding(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEndingBlank')\n effects.consume(code)\n effects.exit('lineEndingBlank')\n self.currentConstruct = undefined\n return initial\n }\n\n /** @type {State} */\n function afterConstruct(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n self.currentConstruct = undefined\n return initial\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blankLine = {\n tokenize: tokenizeBlankLine,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlankLine(effects, ok, nok) {\n return start\n\n /**\n * Start of blank line.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n return markdownSpace(code)\n ? factorySpace(effects, after, 'linePrefix')(code)\n : after(code)\n }\n\n /**\n * At eof/eol, after optional whitespace.\n *\n * > 👉 **Note**: `␠` represents a space character.\n *\n * ```markdown\n * > | ␠␠␊\n * ^\n * > | ␊\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {subtokenize} from 'micromark-util-subtokenize'\n/**\n * No name because it must not be turned off.\n * @type {Construct}\n */\nexport const content = {\n tokenize: tokenizeContent,\n resolve: resolveContent\n}\n\n/** @type {Construct} */\nconst continuationConstruct = {\n tokenize: tokenizeContinuation,\n partial: true\n}\n\n/**\n * Content is transparent: it’s parsed right now. That way, definitions are also\n * parsed right now: before text in paragraphs (specifically, media) are parsed.\n *\n * @type {Resolver}\n */\nfunction resolveContent(events) {\n subtokenize(events)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContent(effects, ok) {\n /** @type {Token | undefined} */\n let previous\n return chunkStart\n\n /**\n * Before a content chunk.\n *\n * ```markdown\n * > | abc\n * ^\n * ```\n *\n * @type {State}\n */\n function chunkStart(code) {\n effects.enter('content')\n previous = effects.enter('chunkContent', {\n contentType: 'content'\n })\n return chunkInside(code)\n }\n\n /**\n * In a content chunk.\n *\n * ```markdown\n * > | abc\n * ^^^\n * ```\n *\n * @type {State}\n */\n function chunkInside(code) {\n if (code === null) {\n return contentEnd(code)\n }\n\n // To do: in `markdown-rs`, each line is parsed on its own, and everything\n // is stitched together resolving.\n if (markdownLineEnding(code)) {\n return effects.check(\n continuationConstruct,\n contentContinue,\n contentEnd\n )(code)\n }\n\n // Data.\n effects.consume(code)\n return chunkInside\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentEnd(code) {\n effects.exit('chunkContent')\n effects.exit('content')\n return ok(code)\n }\n\n /**\n *\n *\n * @type {State}\n */\n function contentContinue(code) {\n effects.consume(code)\n effects.exit('chunkContent')\n previous.next = effects.enter('chunkContent', {\n contentType: 'content',\n previous\n })\n previous = previous.next\n return chunkInside\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeContinuation(effects, ok, nok) {\n const self = this\n return startLookahead\n\n /**\n *\n *\n * @type {State}\n */\n function startLookahead(code) {\n effects.exit('chunkContent')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, prefixed, 'linePrefix')\n }\n\n /**\n *\n *\n * @type {State}\n */\n function prefixed(code) {\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n\n // Always populated by defaults.\n\n const tail = self.events[self.events.length - 1]\n if (\n !self.parser.constructs.disable.null.includes('codeIndented') &&\n tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ) {\n return ok(code)\n }\n return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Token} Token\n */\n\nimport {splice} from 'micromark-util-chunked'\n/**\n * Tokenize subcontent.\n *\n * @param {Array} events\n * List of events.\n * @returns {boolean}\n * Whether subtokens were found.\n */\nexport function subtokenize(events) {\n /** @type {Record} */\n const jumps = {}\n let index = -1\n /** @type {Event} */\n let event\n /** @type {number | undefined} */\n let lineIndex\n /** @type {number} */\n let otherIndex\n /** @type {Event} */\n let otherEvent\n /** @type {Array} */\n let parameters\n /** @type {Array} */\n let subevents\n /** @type {boolean | undefined} */\n let more\n while (++index < events.length) {\n while (index in jumps) {\n index = jumps[index]\n }\n event = events[index]\n\n // Add a hook for the GFM tasklist extension, which needs to know if text\n // is in the first content of a list item.\n if (\n index &&\n event[1].type === 'chunkFlow' &&\n events[index - 1][1].type === 'listItemPrefix'\n ) {\n subevents = event[1]._tokenizer.events\n otherIndex = 0\n if (\n otherIndex < subevents.length &&\n subevents[otherIndex][1].type === 'lineEndingBlank'\n ) {\n otherIndex += 2\n }\n if (\n otherIndex < subevents.length &&\n subevents[otherIndex][1].type === 'content'\n ) {\n while (++otherIndex < subevents.length) {\n if (subevents[otherIndex][1].type === 'content') {\n break\n }\n if (subevents[otherIndex][1].type === 'chunkText') {\n subevents[otherIndex][1]._isInFirstContentOfListItem = true\n otherIndex++\n }\n }\n }\n }\n\n // Enter.\n if (event[0] === 'enter') {\n if (event[1].contentType) {\n Object.assign(jumps, subcontent(events, index))\n index = jumps[index]\n more = true\n }\n }\n // Exit.\n else if (event[1]._container) {\n otherIndex = index\n lineIndex = undefined\n while (otherIndex--) {\n otherEvent = events[otherIndex]\n if (\n otherEvent[1].type === 'lineEnding' ||\n otherEvent[1].type === 'lineEndingBlank'\n ) {\n if (otherEvent[0] === 'enter') {\n if (lineIndex) {\n events[lineIndex][1].type = 'lineEndingBlank'\n }\n otherEvent[1].type = 'lineEnding'\n lineIndex = otherIndex\n }\n } else {\n break\n }\n }\n if (lineIndex) {\n // Fix position.\n event[1].end = Object.assign({}, events[lineIndex][1].start)\n\n // Switch container exit w/ line endings.\n parameters = events.slice(lineIndex, index)\n parameters.unshift(event)\n splice(events, lineIndex, index - lineIndex + 1, parameters)\n }\n }\n }\n return !more\n}\n\n/**\n * Tokenize embedded tokens.\n *\n * @param {Array} events\n * @param {number} eventIndex\n * @returns {Record}\n */\nfunction subcontent(events, eventIndex) {\n const token = events[eventIndex][1]\n const context = events[eventIndex][2]\n let startPosition = eventIndex - 1\n /** @type {Array} */\n const startPositions = []\n const tokenizer =\n token._tokenizer || context.parser[token.contentType](token.start)\n const childEvents = tokenizer.events\n /** @type {Array<[number, number]>} */\n const jumps = []\n /** @type {Record} */\n const gaps = {}\n /** @type {Array} */\n let stream\n /** @type {Token | undefined} */\n let previous\n let index = -1\n /** @type {Token | undefined} */\n let current = token\n let adjust = 0\n let start = 0\n const breaks = [start]\n\n // Loop forward through the linked tokens to pass them in order to the\n // subtokenizer.\n while (current) {\n // Find the position of the event for this token.\n while (events[++startPosition][1] !== current) {\n // Empty.\n }\n startPositions.push(startPosition)\n if (!current._tokenizer) {\n stream = context.sliceStream(current)\n if (!current.next) {\n stream.push(null)\n }\n if (previous) {\n tokenizer.defineSkip(current.start)\n }\n if (current._isInFirstContentOfListItem) {\n tokenizer._gfmTasklistFirstContentOfListItem = true\n }\n tokenizer.write(stream)\n if (current._isInFirstContentOfListItem) {\n tokenizer._gfmTasklistFirstContentOfListItem = undefined\n }\n }\n\n // Unravel the next token.\n previous = current\n current = current.next\n }\n\n // Now, loop back through all events (and linked tokens), to figure out which\n // parts belong where.\n current = token\n while (++index < childEvents.length) {\n if (\n // Find a void token that includes a break.\n childEvents[index][0] === 'exit' &&\n childEvents[index - 1][0] === 'enter' &&\n childEvents[index][1].type === childEvents[index - 1][1].type &&\n childEvents[index][1].start.line !== childEvents[index][1].end.line\n ) {\n start = index + 1\n breaks.push(start)\n // Help GC.\n current._tokenizer = undefined\n current.previous = undefined\n current = current.next\n }\n }\n\n // Help GC.\n tokenizer.events = []\n\n // If there’s one more token (which is the cases for lines that end in an\n // EOF), that’s perfect: the last point we found starts it.\n // If there isn’t then make sure any remaining content is added to it.\n if (current) {\n // Help GC.\n current._tokenizer = undefined\n current.previous = undefined\n } else {\n breaks.pop()\n }\n\n // Now splice the events from the subtokenizer into the current events,\n // moving back to front so that splice indices aren’t affected.\n index = breaks.length\n while (index--) {\n const slice = childEvents.slice(breaks[index], breaks[index + 1])\n const start = startPositions.pop()\n jumps.unshift([start, start + slice.length - 1])\n splice(events, start, 2, slice)\n }\n index = -1\n while (++index < jumps.length) {\n gaps[adjust + jumps[index][0]] = adjust + jumps[index][1]\n adjust += jumps[index][1] - jumps[index][0] - 1\n }\n return gaps\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').Initializer} Initializer\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\nexport const resolver = {\n resolveAll: createResolver()\n}\nexport const string = initializeFactory('string')\nexport const text = initializeFactory('text')\n\n/**\n * @param {'string' | 'text'} field\n * @returns {InitialConstruct}\n */\nfunction initializeFactory(field) {\n return {\n tokenize: initializeText,\n resolveAll: createResolver(\n field === 'text' ? resolveAllLineSuffixes : undefined\n )\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Initializer}\n */\n function initializeText(effects) {\n const self = this\n const constructs = this.parser.constructs[field]\n const text = effects.attempt(constructs, start, notText)\n return start\n\n /** @type {State} */\n function start(code) {\n return atBreak(code) ? text(code) : notText(code)\n }\n\n /** @type {State} */\n function notText(code) {\n if (code === null) {\n effects.consume(code)\n return\n }\n effects.enter('data')\n effects.consume(code)\n return data\n }\n\n /** @type {State} */\n function data(code) {\n if (atBreak(code)) {\n effects.exit('data')\n return text(code)\n }\n\n // Data.\n effects.consume(code)\n return data\n }\n\n /**\n * @param {Code} code\n * @returns {boolean}\n */\n function atBreak(code) {\n if (code === null) {\n return true\n }\n const list = constructs[code]\n let index = -1\n if (list) {\n // Always populated by defaults.\n\n while (++index < list.length) {\n const item = list[index]\n if (!item.previous || item.previous.call(self, self.previous)) {\n return true\n }\n }\n }\n return false\n }\n }\n}\n\n/**\n * @param {Resolver | undefined} [extraResolver]\n * @returns {Resolver}\n */\nfunction createResolver(extraResolver) {\n return resolveAllText\n\n /** @type {Resolver} */\n function resolveAllText(events, context) {\n let index = -1\n /** @type {number | undefined} */\n let enter\n\n // A rather boring computation (to merge adjacent `data` events) which\n // improves mm performance by 29%.\n while (++index <= events.length) {\n if (enter === undefined) {\n if (events[index] && events[index][1].type === 'data') {\n enter = index\n index++\n }\n } else if (!events[index] || events[index][1].type !== 'data') {\n // Don’t do anything if there is one data token.\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n index = enter + 2\n }\n enter = undefined\n }\n }\n return extraResolver ? extraResolver(events, context) : events\n }\n}\n\n/**\n * A rather ugly set of instructions which again looks at chunks in the input\n * stream.\n * The reason to do this here is that it is *much* faster to parse in reverse.\n * And that we can’t hook into `null` to split the line suffix before an EOF.\n * To do: figure out if we can make this into a clean utility, or even in core.\n * As it will be useful for GFMs literal autolink extension (and maybe even\n * tables?)\n *\n * @type {Resolver}\n */\nfunction resolveAllLineSuffixes(events, context) {\n let eventIndex = 0 // Skip first.\n\n while (++eventIndex <= events.length) {\n if (\n (eventIndex === events.length ||\n events[eventIndex][1].type === 'lineEnding') &&\n events[eventIndex - 1][1].type === 'data'\n ) {\n const data = events[eventIndex - 1][1]\n const chunks = context.sliceStream(data)\n let index = chunks.length\n let bufferIndex = -1\n let size = 0\n /** @type {boolean | undefined} */\n let tabs\n while (index--) {\n const chunk = chunks[index]\n if (typeof chunk === 'string') {\n bufferIndex = chunk.length\n while (chunk.charCodeAt(bufferIndex - 1) === 32) {\n size++\n bufferIndex--\n }\n if (bufferIndex) break\n bufferIndex = -1\n }\n // Number\n else if (chunk === -2) {\n tabs = true\n size++\n } else if (chunk === -1) {\n // Empty\n } else {\n // Replacement character, exit.\n index++\n break\n }\n }\n if (size) {\n const token = {\n type:\n eventIndex === events.length || tabs || size < 2\n ? 'lineSuffix'\n : 'hardBreakTrailing',\n start: {\n line: data.end.line,\n column: data.end.column - size,\n offset: data.end.offset - size,\n _index: data.start._index + index,\n _bufferIndex: index\n ? bufferIndex\n : data.start._bufferIndex + bufferIndex\n },\n end: Object.assign({}, data.end)\n }\n data.end = Object.assign({}, token.start)\n if (data.start.offset === data.end.offset) {\n Object.assign(data, token)\n } else {\n events.splice(\n eventIndex,\n 0,\n ['enter', token, context],\n ['exit', token, context]\n )\n eventIndex += 2\n }\n }\n eventIndex++\n }\n }\n return events\n}\n","/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct\n * @typedef {import('micromark-util-types').ParseContext} ParseContext\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenType} TokenType\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\n/**\n * @callback Restore\n * @returns {void}\n *\n * @typedef Info\n * @property {Restore} restore\n * @property {number} from\n *\n * @callback ReturnHandle\n * Handle a successful run.\n * @param {Construct} construct\n * @param {Info} info\n * @returns {void}\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\nimport {push, splice} from 'micromark-util-chunked'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/**\n * Create a tokenizer.\n * Tokenizers deal with one type of data (e.g., containers, flow, text).\n * The parser is the object dealing with it all.\n * `initialize` works like other constructs, except that only its `tokenize`\n * function is used, in which case it doesn’t receive an `ok` or `nok`.\n * `from` can be given to set the point before the first character, although\n * when further lines are indented, they must be set with `defineSkip`.\n *\n * @param {ParseContext} parser\n * @param {InitialConstruct} initialize\n * @param {Omit | undefined} [from]\n * @returns {TokenizeContext}\n */\nexport function createTokenizer(parser, initialize, from) {\n /** @type {Point} */\n let point = Object.assign(\n from\n ? Object.assign({}, from)\n : {\n line: 1,\n column: 1,\n offset: 0\n },\n {\n _index: 0,\n _bufferIndex: -1\n }\n )\n /** @type {Record} */\n const columnStart = {}\n /** @type {Array} */\n const resolveAllConstructs = []\n /** @type {Array} */\n let chunks = []\n /** @type {Array} */\n let stack = []\n /** @type {boolean | undefined} */\n let consumed = true\n\n /**\n * Tools used for tokenizing.\n *\n * @type {Effects}\n */\n const effects = {\n consume,\n enter,\n exit,\n attempt: constructFactory(onsuccessfulconstruct),\n check: constructFactory(onsuccessfulcheck),\n interrupt: constructFactory(onsuccessfulcheck, {\n interrupt: true\n })\n }\n\n /**\n * State and tools for resolving and serializing.\n *\n * @type {TokenizeContext}\n */\n const context = {\n previous: null,\n code: null,\n containerState: {},\n events: [],\n parser,\n sliceStream,\n sliceSerialize,\n now,\n defineSkip,\n write\n }\n\n /**\n * The state function.\n *\n * @type {State | void}\n */\n let state = initialize.tokenize.call(context, effects)\n\n /**\n * Track which character we expect to be consumed, to catch bugs.\n *\n * @type {Code}\n */\n let expectedCode\n if (initialize.resolveAll) {\n resolveAllConstructs.push(initialize)\n }\n return context\n\n /** @type {TokenizeContext['write']} */\n function write(slice) {\n chunks = push(chunks, slice)\n main()\n\n // Exit if we’re not done, resolve might change stuff.\n if (chunks[chunks.length - 1] !== null) {\n return []\n }\n addResult(initialize, 0)\n\n // Otherwise, resolve, and exit.\n context.events = resolveAll(resolveAllConstructs, context.events, context)\n return context.events\n }\n\n //\n // Tools.\n //\n\n /** @type {TokenizeContext['sliceSerialize']} */\n function sliceSerialize(token, expandTabs) {\n return serializeChunks(sliceStream(token), expandTabs)\n }\n\n /** @type {TokenizeContext['sliceStream']} */\n function sliceStream(token) {\n return sliceChunks(chunks, token)\n }\n\n /** @type {TokenizeContext['now']} */\n function now() {\n // This is a hot path, so we clone manually instead of `Object.assign({}, point)`\n const {line, column, offset, _index, _bufferIndex} = point\n return {\n line,\n column,\n offset,\n _index,\n _bufferIndex\n }\n }\n\n /** @type {TokenizeContext['defineSkip']} */\n function defineSkip(value) {\n columnStart[value.line] = value.column\n accountForPotentialSkip()\n }\n\n //\n // State management.\n //\n\n /**\n * Main loop (note that `_index` and `_bufferIndex` in `point` are modified by\n * `consume`).\n * Here is where we walk through the chunks, which either include strings of\n * several characters, or numerical character codes.\n * The reason to do this in a loop instead of a call is so the stack can\n * drain.\n *\n * @returns {void}\n */\n function main() {\n /** @type {number} */\n let chunkIndex\n while (point._index < chunks.length) {\n const chunk = chunks[point._index]\n\n // If we’re in a buffer chunk, loop through it.\n if (typeof chunk === 'string') {\n chunkIndex = point._index\n if (point._bufferIndex < 0) {\n point._bufferIndex = 0\n }\n while (\n point._index === chunkIndex &&\n point._bufferIndex < chunk.length\n ) {\n go(chunk.charCodeAt(point._bufferIndex))\n }\n } else {\n go(chunk)\n }\n }\n }\n\n /**\n * Deal with one code.\n *\n * @param {Code} code\n * @returns {void}\n */\n function go(code) {\n consumed = undefined\n expectedCode = code\n state = state(code)\n }\n\n /** @type {Effects['consume']} */\n function consume(code) {\n if (markdownLineEnding(code)) {\n point.line++\n point.column = 1\n point.offset += code === -3 ? 2 : 1\n accountForPotentialSkip()\n } else if (code !== -1) {\n point.column++\n point.offset++\n }\n\n // Not in a string chunk.\n if (point._bufferIndex < 0) {\n point._index++\n } else {\n point._bufferIndex++\n\n // At end of string chunk.\n // @ts-expect-error Points w/ non-negative `_bufferIndex` reference\n // strings.\n if (point._bufferIndex === chunks[point._index].length) {\n point._bufferIndex = -1\n point._index++\n }\n }\n\n // Expose the previous character.\n context.previous = code\n\n // Mark as consumed.\n consumed = true\n }\n\n /** @type {Effects['enter']} */\n function enter(type, fields) {\n /** @type {Token} */\n // @ts-expect-error Patch instead of assign required fields to help GC.\n const token = fields || {}\n token.type = type\n token.start = now()\n context.events.push(['enter', token, context])\n stack.push(token)\n return token\n }\n\n /** @type {Effects['exit']} */\n function exit(type) {\n const token = stack.pop()\n token.end = now()\n context.events.push(['exit', token, context])\n return token\n }\n\n /**\n * Use results.\n *\n * @type {ReturnHandle}\n */\n function onsuccessfulconstruct(construct, info) {\n addResult(construct, info.from)\n }\n\n /**\n * Discard results.\n *\n * @type {ReturnHandle}\n */\n function onsuccessfulcheck(_, info) {\n info.restore()\n }\n\n /**\n * Factory to attempt/check/interrupt.\n *\n * @param {ReturnHandle} onreturn\n * @param {{interrupt?: boolean | undefined} | undefined} [fields]\n */\n function constructFactory(onreturn, fields) {\n return hook\n\n /**\n * Handle either an object mapping codes to constructs, a list of\n * constructs, or a single construct.\n *\n * @param {Array | Construct | ConstructRecord} constructs\n * @param {State} returnState\n * @param {State | undefined} [bogusState]\n * @returns {State}\n */\n function hook(constructs, returnState, bogusState) {\n /** @type {Array} */\n let listOfConstructs\n /** @type {number} */\n let constructIndex\n /** @type {Construct} */\n let currentConstruct\n /** @type {Info} */\n let info\n return Array.isArray(constructs) /* c8 ignore next 1 */\n ? handleListOfConstructs(constructs)\n : 'tokenize' in constructs\n ? // @ts-expect-error Looks like a construct.\n handleListOfConstructs([constructs])\n : handleMapOfConstructs(constructs)\n\n /**\n * Handle a list of construct.\n *\n * @param {ConstructRecord} map\n * @returns {State}\n */\n function handleMapOfConstructs(map) {\n return start\n\n /** @type {State} */\n function start(code) {\n const def = code !== null && map[code]\n const all = code !== null && map.null\n const list = [\n // To do: add more extension tests.\n /* c8 ignore next 2 */\n ...(Array.isArray(def) ? def : def ? [def] : []),\n ...(Array.isArray(all) ? all : all ? [all] : [])\n ]\n return handleListOfConstructs(list)(code)\n }\n }\n\n /**\n * Handle a list of construct.\n *\n * @param {Array} list\n * @returns {State}\n */\n function handleListOfConstructs(list) {\n listOfConstructs = list\n constructIndex = 0\n if (list.length === 0) {\n return bogusState\n }\n return handleConstruct(list[constructIndex])\n }\n\n /**\n * Handle a single construct.\n *\n * @param {Construct} construct\n * @returns {State}\n */\n function handleConstruct(construct) {\n return start\n\n /** @type {State} */\n function start(code) {\n // To do: not needed to store if there is no bogus state, probably?\n // Currently doesn’t work because `inspect` in document does a check\n // w/o a bogus, which doesn’t make sense. But it does seem to help perf\n // by not storing.\n info = store()\n currentConstruct = construct\n if (!construct.partial) {\n context.currentConstruct = construct\n }\n\n // Always populated by defaults.\n\n if (\n construct.name &&\n context.parser.constructs.disable.null.includes(construct.name)\n ) {\n return nok(code)\n }\n return construct.tokenize.call(\n // If we do have fields, create an object w/ `context` as its\n // prototype.\n // This allows a “live binding”, which is needed for `interrupt`.\n fields ? Object.assign(Object.create(context), fields) : context,\n effects,\n ok,\n nok\n )(code)\n }\n }\n\n /** @type {State} */\n function ok(code) {\n consumed = true\n onreturn(currentConstruct, info)\n return returnState\n }\n\n /** @type {State} */\n function nok(code) {\n consumed = true\n info.restore()\n if (++constructIndex < listOfConstructs.length) {\n return handleConstruct(listOfConstructs[constructIndex])\n }\n return bogusState\n }\n }\n }\n\n /**\n * @param {Construct} construct\n * @param {number} from\n * @returns {void}\n */\n function addResult(construct, from) {\n if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {\n resolveAllConstructs.push(construct)\n }\n if (construct.resolve) {\n splice(\n context.events,\n from,\n context.events.length - from,\n construct.resolve(context.events.slice(from), context)\n )\n }\n if (construct.resolveTo) {\n context.events = construct.resolveTo(context.events, context)\n }\n }\n\n /**\n * Store state.\n *\n * @returns {Info}\n */\n function store() {\n const startPoint = now()\n const startPrevious = context.previous\n const startCurrentConstruct = context.currentConstruct\n const startEventsIndex = context.events.length\n const startStack = Array.from(stack)\n return {\n restore,\n from: startEventsIndex\n }\n\n /**\n * Restore state.\n *\n * @returns {void}\n */\n function restore() {\n point = startPoint\n context.previous = startPrevious\n context.currentConstruct = startCurrentConstruct\n context.events.length = startEventsIndex\n stack = startStack\n accountForPotentialSkip()\n }\n }\n\n /**\n * Move the current point a bit forward in the line when it’s on a column\n * skip.\n *\n * @returns {void}\n */\n function accountForPotentialSkip() {\n if (point.line in columnStart && point.column < 2) {\n point.column = columnStart[point.line]\n point.offset += columnStart[point.line] - 1\n }\n }\n}\n\n/**\n * Get the chunks from a slice of chunks in the range of a token.\n *\n * @param {Array} chunks\n * @param {Pick} token\n * @returns {Array}\n */\nfunction sliceChunks(chunks, token) {\n const startIndex = token.start._index\n const startBufferIndex = token.start._bufferIndex\n const endIndex = token.end._index\n const endBufferIndex = token.end._bufferIndex\n /** @type {Array} */\n let view\n if (startIndex === endIndex) {\n // @ts-expect-error `_bufferIndex` is used on string chunks.\n view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)]\n } else {\n view = chunks.slice(startIndex, endIndex)\n if (startBufferIndex > -1) {\n const head = view[0]\n if (typeof head === 'string') {\n view[0] = head.slice(startBufferIndex)\n } else {\n view.shift()\n }\n }\n if (endBufferIndex > 0) {\n // @ts-expect-error `_bufferIndex` is used on string chunks.\n view.push(chunks[endIndex].slice(0, endBufferIndex))\n }\n }\n return view\n}\n\n/**\n * Get the string value of a slice of chunks.\n *\n * @param {Array} chunks\n * @param {boolean | undefined} [expandTabs=false]\n * @returns {string}\n */\nfunction serializeChunks(chunks, expandTabs) {\n let index = -1\n /** @type {Array} */\n const result = []\n /** @type {boolean | undefined} */\n let atTab\n while (++index < chunks.length) {\n const chunk = chunks[index]\n /** @type {string} */\n let value\n if (typeof chunk === 'string') {\n value = chunk\n } else\n switch (chunk) {\n case -5: {\n value = '\\r'\n break\n }\n case -4: {\n value = '\\n'\n break\n }\n case -3: {\n value = '\\r' + '\\n'\n break\n }\n case -2: {\n value = expandTabs ? ' ' : '\\t'\n break\n }\n case -1: {\n if (!expandTabs && atTab) continue\n value = ' '\n break\n }\n default: {\n // Currently only replacement character.\n value = String.fromCharCode(chunk)\n }\n }\n atTab = chunk === -2\n result.push(value)\n }\n return result.join('')\n}\n","/**\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n */\n\n/**\n * Call all `resolveAll`s.\n *\n * @param {Array<{resolveAll?: Resolver | undefined}>} constructs\n * List of constructs, optionally with `resolveAll`s.\n * @param {Array} events\n * List of events.\n * @param {TokenizeContext} context\n * Context used by `tokenize`.\n * @returns {Array}\n * Changed events.\n */\nexport function resolveAll(constructs, events, context) {\n /** @type {Array} */\n const called = []\n let index = -1\n\n while (++index < constructs.length) {\n const resolve = constructs[index].resolveAll\n\n if (resolve && !called.includes(resolve)) {\n events = resolve(events, context)\n called.push(resolve)\n }\n }\n\n return events\n}\n","/**\n * @typedef {import('micromark-util-types').Extension} Extension\n */\n\nimport {\n attention,\n autolink,\n blockQuote,\n characterEscape,\n characterReference,\n codeFenced,\n codeIndented,\n codeText,\n definition,\n hardBreakEscape,\n headingAtx,\n htmlFlow,\n htmlText,\n labelEnd,\n labelStartImage,\n labelStartLink,\n lineEnding,\n list,\n setextUnderline,\n thematicBreak\n} from 'micromark-core-commonmark'\nimport {resolver as resolveText} from './initialize/text.js'\n\n/** @satisfies {Extension['document']} */\nexport const document = {\n [42]: list,\n [43]: list,\n [45]: list,\n [48]: list,\n [49]: list,\n [50]: list,\n [51]: list,\n [52]: list,\n [53]: list,\n [54]: list,\n [55]: list,\n [56]: list,\n [57]: list,\n [62]: blockQuote\n}\n\n/** @satisfies {Extension['contentInitial']} */\nexport const contentInitial = {\n [91]: definition\n}\n\n/** @satisfies {Extension['flowInitial']} */\nexport const flowInitial = {\n [-2]: codeIndented,\n [-1]: codeIndented,\n [32]: codeIndented\n}\n\n/** @satisfies {Extension['flow']} */\nexport const flow = {\n [35]: headingAtx,\n [42]: thematicBreak,\n [45]: [setextUnderline, thematicBreak],\n [60]: htmlFlow,\n [61]: setextUnderline,\n [95]: thematicBreak,\n [96]: codeFenced,\n [126]: codeFenced\n}\n\n/** @satisfies {Extension['string']} */\nexport const string = {\n [38]: characterReference,\n [92]: characterEscape\n}\n\n/** @satisfies {Extension['text']} */\nexport const text = {\n [-5]: lineEnding,\n [-4]: lineEnding,\n [-3]: lineEnding,\n [33]: labelStartImage,\n [38]: characterReference,\n [42]: attention,\n [60]: [autolink, htmlText],\n [91]: labelStartLink,\n [92]: [hardBreakEscape, characterEscape],\n [93]: labelEnd,\n [95]: attention,\n [96]: codeText\n}\n\n/** @satisfies {Extension['insideSpan']} */\nexport const insideSpan = {\n null: [attention, resolveText]\n}\n\n/** @satisfies {Extension['attentionMarkers']} */\nexport const attentionMarkers = {\n null: [42, 95]\n}\n\n/** @satisfies {Extension['disable']} */\nexport const disable = {\n null: []\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Point} Point\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {push, splice} from 'micromark-util-chunked'\nimport {classifyCharacter} from 'micromark-util-classify-character'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/** @type {Construct} */\nexport const attention = {\n name: 'attention',\n tokenize: tokenizeAttention,\n resolveAll: resolveAllAttention\n}\n\n/**\n * Take all events and resolve attention to emphasis or strong.\n *\n * @type {Resolver}\n */\nfunction resolveAllAttention(events, context) {\n let index = -1\n /** @type {number} */\n let open\n /** @type {Token} */\n let group\n /** @type {Token} */\n let text\n /** @type {Token} */\n let openingSequence\n /** @type {Token} */\n let closingSequence\n /** @type {number} */\n let use\n /** @type {Array} */\n let nextEvents\n /** @type {number} */\n let offset\n\n // Walk through all events.\n //\n // Note: performance of this is fine on an mb of normal markdown, but it’s\n // a bottleneck for malicious stuff.\n while (++index < events.length) {\n // Find a token that can close.\n if (\n events[index][0] === 'enter' &&\n events[index][1].type === 'attentionSequence' &&\n events[index][1]._close\n ) {\n open = index\n\n // Now walk back to find an opener.\n while (open--) {\n // Find a token that can open the closer.\n if (\n events[open][0] === 'exit' &&\n events[open][1].type === 'attentionSequence' &&\n events[open][1]._open &&\n // If the markers are the same:\n context.sliceSerialize(events[open][1]).charCodeAt(0) ===\n context.sliceSerialize(events[index][1]).charCodeAt(0)\n ) {\n // If the opening can close or the closing can open,\n // and the close size *is not* a multiple of three,\n // but the sum of the opening and closing size *is* multiple of three,\n // then don’t match.\n if (\n (events[open][1]._close || events[index][1]._open) &&\n (events[index][1].end.offset - events[index][1].start.offset) % 3 &&\n !(\n (events[open][1].end.offset -\n events[open][1].start.offset +\n events[index][1].end.offset -\n events[index][1].start.offset) %\n 3\n )\n ) {\n continue\n }\n\n // Number of markers to use from the sequence.\n use =\n events[open][1].end.offset - events[open][1].start.offset > 1 &&\n events[index][1].end.offset - events[index][1].start.offset > 1\n ? 2\n : 1\n const start = Object.assign({}, events[open][1].end)\n const end = Object.assign({}, events[index][1].start)\n movePoint(start, -use)\n movePoint(end, use)\n openingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start,\n end: Object.assign({}, events[open][1].end)\n }\n closingSequence = {\n type: use > 1 ? 'strongSequence' : 'emphasisSequence',\n start: Object.assign({}, events[index][1].start),\n end\n }\n text = {\n type: use > 1 ? 'strongText' : 'emphasisText',\n start: Object.assign({}, events[open][1].end),\n end: Object.assign({}, events[index][1].start)\n }\n group = {\n type: use > 1 ? 'strong' : 'emphasis',\n start: Object.assign({}, openingSequence.start),\n end: Object.assign({}, closingSequence.end)\n }\n events[open][1].end = Object.assign({}, openingSequence.start)\n events[index][1].start = Object.assign({}, closingSequence.end)\n nextEvents = []\n\n // If there are more markers in the opening, add them before.\n if (events[open][1].end.offset - events[open][1].start.offset) {\n nextEvents = push(nextEvents, [\n ['enter', events[open][1], context],\n ['exit', events[open][1], context]\n ])\n }\n\n // Opening.\n nextEvents = push(nextEvents, [\n ['enter', group, context],\n ['enter', openingSequence, context],\n ['exit', openingSequence, context],\n ['enter', text, context]\n ])\n\n // Always populated by defaults.\n\n // Between.\n nextEvents = push(\n nextEvents,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + 1, index),\n context\n )\n )\n\n // Closing.\n nextEvents = push(nextEvents, [\n ['exit', text, context],\n ['enter', closingSequence, context],\n ['exit', closingSequence, context],\n ['exit', group, context]\n ])\n\n // If there are more markers in the closing, add them after.\n if (events[index][1].end.offset - events[index][1].start.offset) {\n offset = 2\n nextEvents = push(nextEvents, [\n ['enter', events[index][1], context],\n ['exit', events[index][1], context]\n ])\n } else {\n offset = 0\n }\n splice(events, open - 1, index - open + 3, nextEvents)\n index = open + nextEvents.length - offset - 2\n break\n }\n }\n }\n }\n\n // Remove remaining sequences.\n index = -1\n while (++index < events.length) {\n if (events[index][1].type === 'attentionSequence') {\n events[index][1].type = 'data'\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAttention(effects, ok) {\n const attentionMarkers = this.parser.constructs.attentionMarkers.null\n const previous = this.previous\n const before = classifyCharacter(previous)\n\n /** @type {NonNullable} */\n let marker\n return start\n\n /**\n * Before a sequence.\n *\n * ```markdown\n * > | **\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n marker = code\n effects.enter('attentionSequence')\n return inside(code)\n }\n\n /**\n * In a sequence.\n *\n * ```markdown\n * > | **\n * ^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker) {\n effects.consume(code)\n return inside\n }\n const token = effects.exit('attentionSequence')\n\n // To do: next major: move this to resolver, just like `markdown-rs`.\n const after = classifyCharacter(code)\n\n // Always populated by defaults.\n\n const open =\n !after || (after === 2 && before) || attentionMarkers.includes(code)\n const close =\n !before || (before === 2 && after) || attentionMarkers.includes(previous)\n token._open = Boolean(marker === 42 ? open : open && (before || !close))\n token._close = Boolean(marker === 42 ? close : close && (after || !open))\n return ok(code)\n }\n}\n\n/**\n * Move a point a bit.\n *\n * Note: `move` only works inside lines! It’s not possible to move past other\n * chunks (replacement characters, tabs, or line endings).\n *\n * @param {Point} point\n * @param {number} offset\n * @returns {void}\n */\nfunction movePoint(point, offset) {\n point.column += offset\n point.offset += offset\n point._bufferIndex += offset\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n */\n\nimport {\n markdownLineEndingOrSpace,\n unicodePunctuation,\n unicodeWhitespace\n} from 'micromark-util-character'\n/**\n * Classify whether a code represents whitespace, punctuation, or something\n * else.\n *\n * Used for attention (emphasis, strong), whose sequences can open or close\n * based on the class of surrounding characters.\n *\n * > 👉 **Note**: eof (`null`) is seen as whitespace.\n *\n * @param {Code} code\n * Code.\n * @returns {typeof constants.characterGroupWhitespace | typeof constants.characterGroupPunctuation | undefined}\n * Group.\n */\nexport function classifyCharacter(code) {\n if (\n code === null ||\n markdownLineEndingOrSpace(code) ||\n unicodeWhitespace(code)\n ) {\n return 1\n }\n if (unicodePunctuation(code)) {\n return 2\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n asciiAtext,\n asciiControl\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const autolink = {\n name: 'autolink',\n tokenize: tokenizeAutolink\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeAutolink(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of an autolink.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('autolink')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.enter('autolinkProtocol')\n return open\n }\n\n /**\n * After `<`, at protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n return schemeOrEmailAtext\n }\n return emailAtext(code)\n }\n\n /**\n * At second byte of protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeOrEmailAtext(code) {\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) {\n // Count the previous alphabetical from `open` too.\n size = 1\n return schemeInsideOrEmailAtext(code)\n }\n return emailAtext(code)\n }\n\n /**\n * In ambiguous protocol or atext.\n *\n * ```markdown\n * > | ab\n * ^\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function schemeInsideOrEmailAtext(code) {\n if (code === 58) {\n effects.consume(code)\n size = 0\n return urlInside\n }\n\n // ASCII alphanumeric and `+`, `-`, and `.`.\n if (\n (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) &&\n size++ < 32\n ) {\n effects.consume(code)\n return schemeInsideOrEmailAtext\n }\n size = 0\n return emailAtext(code)\n }\n\n /**\n * After protocol, in URL.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function urlInside(code) {\n if (code === 62) {\n effects.exit('autolinkProtocol')\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n\n // ASCII control, space, or `<`.\n if (code === null || code === 32 || code === 60 || asciiControl(code)) {\n return nok(code)\n }\n effects.consume(code)\n return urlInside\n }\n\n /**\n * In email atext.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function emailAtext(code) {\n if (code === 64) {\n effects.consume(code)\n return emailAtSignOrDot\n }\n if (asciiAtext(code)) {\n effects.consume(code)\n return emailAtext\n }\n return nok(code)\n }\n\n /**\n * In label, after at-sign or dot.\n *\n * ```markdown\n * > | ab\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function emailAtSignOrDot(code) {\n return asciiAlphanumeric(code) ? emailLabel(code) : nok(code)\n }\n\n /**\n * In label, where `.` and `>` are allowed.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function emailLabel(code) {\n if (code === 46) {\n effects.consume(code)\n size = 0\n return emailAtSignOrDot\n }\n if (code === 62) {\n // Exit, then change the token type.\n effects.exit('autolinkProtocol').type = 'autolinkEmail'\n effects.enter('autolinkMarker')\n effects.consume(code)\n effects.exit('autolinkMarker')\n effects.exit('autolink')\n return ok\n }\n return emailValue(code)\n }\n\n /**\n * In label, where `.` and `>` are *not* allowed.\n *\n * Though, this is also used in `emailLabel` to parse other values.\n *\n * ```markdown\n * > | ab\n * ^\n * ```\n *\n * @type {State}\n */\n function emailValue(code) {\n // ASCII alphanumeric or `-`.\n if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {\n const next = code === 45 ? emailValue : emailLabel\n effects.consume(code)\n return next\n }\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const blockQuote = {\n name: 'blockQuote',\n tokenize: tokenizeBlockQuoteStart,\n continuation: {\n tokenize: tokenizeBlockQuoteContinuation\n },\n exit\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteStart(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of block quote.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (code === 62) {\n const state = self.containerState\n if (!state.open) {\n effects.enter('blockQuote', {\n _container: true\n })\n state.open = true\n }\n effects.enter('blockQuotePrefix')\n effects.enter('blockQuoteMarker')\n effects.consume(code)\n effects.exit('blockQuoteMarker')\n return after\n }\n return nok(code)\n }\n\n /**\n * After `>`, before optional whitespace.\n *\n * ```markdown\n * > | > a\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownSpace(code)) {\n effects.enter('blockQuotePrefixWhitespace')\n effects.consume(code)\n effects.exit('blockQuotePrefixWhitespace')\n effects.exit('blockQuotePrefix')\n return ok\n }\n effects.exit('blockQuotePrefix')\n return ok(code)\n }\n}\n\n/**\n * Start of block quote continuation.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlockQuoteContinuation(effects, ok, nok) {\n const self = this\n return contStart\n\n /**\n * Start of block quote continuation.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contStart(code) {\n if (markdownSpace(code)) {\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n contBefore,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n }\n return contBefore(code)\n }\n\n /**\n * At `>`, after optional whitespace.\n *\n * Also used to parse the first block quote opening.\n *\n * ```markdown\n * | > a\n * > | > b\n * ^\n * ```\n *\n * @type {State}\n */\n function contBefore(code) {\n return effects.attempt(blockQuote, ok, nok)(code)\n }\n}\n\n/** @type {Exiter} */\nfunction exit(effects) {\n effects.exit('blockQuote')\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {asciiPunctuation} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterEscape = {\n name: 'characterEscape',\n tokenize: tokenizeCharacterEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of character escape.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterEscape')\n effects.enter('escapeMarker')\n effects.consume(code)\n effects.exit('escapeMarker')\n return inside\n }\n\n /**\n * After `\\`, at punctuation.\n *\n * ```markdown\n * > | a\\*b\n * ^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n // ASCII punctuation.\n if (asciiPunctuation(code)) {\n effects.enter('characterEscapeValue')\n effects.consume(code)\n effects.exit('characterEscapeValue')\n effects.exit('characterEscape')\n return ok\n }\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {decodeNamedCharacterReference} from 'decode-named-character-reference'\nimport {\n asciiAlphanumeric,\n asciiDigit,\n asciiHexDigit\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const characterReference = {\n name: 'characterReference',\n tokenize: tokenizeCharacterReference\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCharacterReference(effects, ok, nok) {\n const self = this\n let size = 0\n /** @type {number} */\n let max\n /** @type {(code: Code) => boolean} */\n let test\n return start\n\n /**\n * Start of character reference.\n *\n * ```markdown\n * > | a&b\n * ^\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('characterReference')\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n return open\n }\n\n /**\n * After `&`, at `#` for numeric references or alphanumeric for named\n * references.\n *\n * ```markdown\n * > | a&b\n * ^\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 35) {\n effects.enter('characterReferenceMarkerNumeric')\n effects.consume(code)\n effects.exit('characterReferenceMarkerNumeric')\n return numeric\n }\n effects.enter('characterReferenceValue')\n max = 31\n test = asciiAlphanumeric\n return value(code)\n }\n\n /**\n * After `#`, at `x` for hexadecimals or digit for decimals.\n *\n * ```markdown\n * > | a{b\n * ^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */\n function numeric(code) {\n if (code === 88 || code === 120) {\n effects.enter('characterReferenceMarkerHexadecimal')\n effects.consume(code)\n effects.exit('characterReferenceMarkerHexadecimal')\n effects.enter('characterReferenceValue')\n max = 6\n test = asciiHexDigit\n return value\n }\n effects.enter('characterReferenceValue')\n max = 7\n test = asciiDigit\n return value(code)\n }\n\n /**\n * After markers (``, ``, or `&`), in value, before `;`.\n *\n * The character reference kind defines what and how many characters are\n * allowed.\n *\n * ```markdown\n * > | a&b\n * ^^^\n * > | a{b\n * ^^^\n * > | a b\n * ^\n * ```\n *\n * @type {State}\n */\n function value(code) {\n if (code === 59 && size) {\n const token = effects.exit('characterReferenceValue')\n if (\n test === asciiAlphanumeric &&\n !decodeNamedCharacterReference(self.sliceSerialize(token))\n ) {\n return nok(code)\n }\n\n // To do: `markdown-rs` uses a different name:\n // `CharacterReferenceMarkerSemi`.\n effects.enter('characterReferenceMarker')\n effects.consume(code)\n effects.exit('characterReferenceMarker')\n effects.exit('characterReference')\n return ok\n }\n if (test(code) && size++ < max) {\n effects.consume(code)\n return value\n }\n return nok(code)\n }\n}\n","import {characterEntities} from 'character-entities'\n\nconst own = {}.hasOwnProperty\n\n/**\n * Decode a single character reference (without the `&` or `;`).\n * You probably only need this when you’re building parsers yourself that follow\n * different rules compared to HTML.\n * This is optimized to be tiny in browsers.\n *\n * @param {string} value\n * `notin` (named), `#123` (deci), `#x123` (hexa).\n * @returns {string|false}\n * Decoded reference.\n */\nexport function decodeNamedCharacterReference(value) {\n return own.call(characterEntities, value) ? characterEntities[value] : false\n}\n","/**\n * Map of named character references.\n *\n * @type {Record}\n */\nexport const characterEntities = {\n AElig: 'Æ',\n AMP: '&',\n Aacute: 'Á',\n Abreve: 'Ă',\n Acirc: 'Â',\n Acy: 'А',\n Afr: '𝔄',\n Agrave: 'À',\n Alpha: 'Α',\n Amacr: 'Ā',\n And: '⩓',\n Aogon: 'Ą',\n Aopf: '𝔸',\n ApplyFunction: '',\n Aring: 'Å',\n Ascr: '𝒜',\n Assign: '≔',\n Atilde: 'Ã',\n Auml: 'Ä',\n Backslash: '∖',\n Barv: '⫧',\n Barwed: '⌆',\n Bcy: 'Б',\n Because: '∵',\n Bernoullis: 'ℬ',\n Beta: 'Β',\n Bfr: '𝔅',\n Bopf: '𝔹',\n Breve: '˘',\n Bscr: 'ℬ',\n Bumpeq: '≎',\n CHcy: 'Ч',\n COPY: '©',\n Cacute: 'Ć',\n Cap: '⋒',\n CapitalDifferentialD: 'ⅅ',\n Cayleys: 'ℭ',\n Ccaron: 'Č',\n Ccedil: 'Ç',\n Ccirc: 'Ĉ',\n Cconint: '∰',\n Cdot: 'Ċ',\n Cedilla: '¸',\n CenterDot: '·',\n Cfr: 'ℭ',\n Chi: 'Χ',\n CircleDot: '⊙',\n CircleMinus: '⊖',\n CirclePlus: '⊕',\n CircleTimes: '⊗',\n ClockwiseContourIntegral: '∲',\n CloseCurlyDoubleQuote: '”',\n CloseCurlyQuote: '’',\n Colon: '∷',\n Colone: '⩴',\n Congruent: '≡',\n Conint: '∯',\n ContourIntegral: '∮',\n Copf: 'ℂ',\n Coproduct: '∐',\n CounterClockwiseContourIntegral: '∳',\n Cross: '⨯',\n Cscr: '𝒞',\n Cup: '⋓',\n CupCap: '≍',\n DD: 'ⅅ',\n DDotrahd: '⤑',\n DJcy: 'Ђ',\n DScy: 'Ѕ',\n DZcy: 'Џ',\n Dagger: '‡',\n Darr: '↡',\n Dashv: '⫤',\n Dcaron: 'Ď',\n Dcy: 'Д',\n Del: '∇',\n Delta: 'Δ',\n Dfr: '𝔇',\n DiacriticalAcute: '´',\n DiacriticalDot: '˙',\n DiacriticalDoubleAcute: '˝',\n DiacriticalGrave: '`',\n DiacriticalTilde: '˜',\n Diamond: '⋄',\n DifferentialD: 'ⅆ',\n Dopf: '𝔻',\n Dot: '¨',\n DotDot: '⃜',\n DotEqual: '≐',\n DoubleContourIntegral: '∯',\n DoubleDot: '¨',\n DoubleDownArrow: '⇓',\n DoubleLeftArrow: '⇐',\n DoubleLeftRightArrow: '⇔',\n DoubleLeftTee: '⫤',\n DoubleLongLeftArrow: '⟸',\n DoubleLongLeftRightArrow: '⟺',\n DoubleLongRightArrow: '⟹',\n DoubleRightArrow: '⇒',\n DoubleRightTee: '⊨',\n DoubleUpArrow: '⇑',\n DoubleUpDownArrow: '⇕',\n DoubleVerticalBar: '∥',\n DownArrow: '↓',\n DownArrowBar: '⤓',\n DownArrowUpArrow: '⇵',\n DownBreve: '̑',\n DownLeftRightVector: '⥐',\n DownLeftTeeVector: '⥞',\n DownLeftVector: '↽',\n DownLeftVectorBar: '⥖',\n DownRightTeeVector: '⥟',\n DownRightVector: '⇁',\n DownRightVectorBar: '⥗',\n DownTee: '⊤',\n DownTeeArrow: '↧',\n Downarrow: '⇓',\n Dscr: '𝒟',\n Dstrok: 'Đ',\n ENG: 'Ŋ',\n ETH: 'Ð',\n Eacute: 'É',\n Ecaron: 'Ě',\n Ecirc: 'Ê',\n Ecy: 'Э',\n Edot: 'Ė',\n Efr: '𝔈',\n Egrave: 'È',\n Element: '∈',\n Emacr: 'Ē',\n EmptySmallSquare: '◻',\n EmptyVerySmallSquare: '▫',\n Eogon: 'Ę',\n Eopf: '𝔼',\n Epsilon: 'Ε',\n Equal: '⩵',\n EqualTilde: '≂',\n Equilibrium: '⇌',\n Escr: 'ℰ',\n Esim: '⩳',\n Eta: 'Η',\n Euml: 'Ë',\n Exists: '∃',\n ExponentialE: 'ⅇ',\n Fcy: 'Ф',\n Ffr: '𝔉',\n FilledSmallSquare: '◼',\n FilledVerySmallSquare: '▪',\n Fopf: '𝔽',\n ForAll: '∀',\n Fouriertrf: 'ℱ',\n Fscr: 'ℱ',\n GJcy: 'Ѓ',\n GT: '>',\n Gamma: 'Γ',\n Gammad: 'Ϝ',\n Gbreve: 'Ğ',\n Gcedil: 'Ģ',\n Gcirc: 'Ĝ',\n Gcy: 'Г',\n Gdot: 'Ġ',\n Gfr: '𝔊',\n Gg: '⋙',\n Gopf: '𝔾',\n GreaterEqual: '≥',\n GreaterEqualLess: '⋛',\n GreaterFullEqual: '≧',\n GreaterGreater: '⪢',\n GreaterLess: '≷',\n GreaterSlantEqual: '⩾',\n GreaterTilde: '≳',\n Gscr: '𝒢',\n Gt: '≫',\n HARDcy: 'Ъ',\n Hacek: 'ˇ',\n Hat: '^',\n Hcirc: 'Ĥ',\n Hfr: 'ℌ',\n HilbertSpace: 'ℋ',\n Hopf: 'ℍ',\n HorizontalLine: '─',\n Hscr: 'ℋ',\n Hstrok: 'Ħ',\n HumpDownHump: '≎',\n HumpEqual: '≏',\n IEcy: 'Е',\n IJlig: 'IJ',\n IOcy: 'Ё',\n Iacute: 'Í',\n Icirc: 'Î',\n Icy: 'И',\n Idot: 'İ',\n Ifr: 'ℑ',\n Igrave: 'Ì',\n Im: 'ℑ',\n Imacr: 'Ī',\n ImaginaryI: 'ⅈ',\n Implies: '⇒',\n Int: '∬',\n Integral: '∫',\n Intersection: '⋂',\n InvisibleComma: '',\n InvisibleTimes: '',\n Iogon: 'Į',\n Iopf: '𝕀',\n Iota: 'Ι',\n Iscr: 'ℐ',\n Itilde: 'Ĩ',\n Iukcy: 'І',\n Iuml: 'Ï',\n Jcirc: 'Ĵ',\n Jcy: 'Й',\n Jfr: '𝔍',\n Jopf: '𝕁',\n Jscr: '𝒥',\n Jsercy: 'Ј',\n Jukcy: 'Є',\n KHcy: 'Х',\n KJcy: 'Ќ',\n Kappa: 'Κ',\n Kcedil: 'Ķ',\n Kcy: 'К',\n Kfr: '𝔎',\n Kopf: '𝕂',\n Kscr: '𝒦',\n LJcy: 'Љ',\n LT: '<',\n Lacute: 'Ĺ',\n Lambda: 'Λ',\n Lang: '⟪',\n Laplacetrf: 'ℒ',\n Larr: '↞',\n Lcaron: 'Ľ',\n Lcedil: 'Ļ',\n Lcy: 'Л',\n LeftAngleBracket: '⟨',\n LeftArrow: '←',\n LeftArrowBar: '⇤',\n LeftArrowRightArrow: '⇆',\n LeftCeiling: '⌈',\n LeftDoubleBracket: '⟦',\n LeftDownTeeVector: '⥡',\n LeftDownVector: '⇃',\n LeftDownVectorBar: '⥙',\n LeftFloor: '⌊',\n LeftRightArrow: '↔',\n LeftRightVector: '⥎',\n LeftTee: '⊣',\n LeftTeeArrow: '↤',\n LeftTeeVector: '⥚',\n LeftTriangle: '⊲',\n LeftTriangleBar: '⧏',\n LeftTriangleEqual: '⊴',\n LeftUpDownVector: '⥑',\n LeftUpTeeVector: '⥠',\n LeftUpVector: '↿',\n LeftUpVectorBar: '⥘',\n LeftVector: '↼',\n LeftVectorBar: '⥒',\n Leftarrow: '⇐',\n Leftrightarrow: '⇔',\n LessEqualGreater: '⋚',\n LessFullEqual: '≦',\n LessGreater: '≶',\n LessLess: '⪡',\n LessSlantEqual: '⩽',\n LessTilde: '≲',\n Lfr: '𝔏',\n Ll: '⋘',\n Lleftarrow: '⇚',\n Lmidot: 'Ŀ',\n LongLeftArrow: '⟵',\n LongLeftRightArrow: '⟷',\n LongRightArrow: '⟶',\n Longleftarrow: '⟸',\n Longleftrightarrow: '⟺',\n Longrightarrow: '⟹',\n Lopf: '𝕃',\n LowerLeftArrow: '↙',\n LowerRightArrow: '↘',\n Lscr: 'ℒ',\n Lsh: '↰',\n Lstrok: 'Ł',\n Lt: '≪',\n Map: '⤅',\n Mcy: 'М',\n MediumSpace: ' ',\n Mellintrf: 'ℳ',\n Mfr: '𝔐',\n MinusPlus: '∓',\n Mopf: '𝕄',\n Mscr: 'ℳ',\n Mu: 'Μ',\n NJcy: 'Њ',\n Nacute: 'Ń',\n Ncaron: 'Ň',\n Ncedil: 'Ņ',\n Ncy: 'Н',\n NegativeMediumSpace: '',\n NegativeThickSpace: '',\n NegativeThinSpace: '',\n NegativeVeryThinSpace: '',\n NestedGreaterGreater: '≫',\n NestedLessLess: '≪',\n NewLine: '\\n',\n Nfr: '𝔑',\n NoBreak: '',\n NonBreakingSpace: ' ',\n Nopf: 'ℕ',\n Not: '⫬',\n NotCongruent: '≢',\n NotCupCap: '≭',\n NotDoubleVerticalBar: '∦',\n NotElement: '∉',\n NotEqual: '≠',\n NotEqualTilde: '≂̸',\n NotExists: '∄',\n NotGreater: '≯',\n NotGreaterEqual: '≱',\n NotGreaterFullEqual: '≧̸',\n NotGreaterGreater: '≫̸',\n NotGreaterLess: '≹',\n NotGreaterSlantEqual: '⩾̸',\n NotGreaterTilde: '≵',\n NotHumpDownHump: '≎̸',\n NotHumpEqual: '≏̸',\n NotLeftTriangle: '⋪',\n NotLeftTriangleBar: '⧏̸',\n NotLeftTriangleEqual: '⋬',\n NotLess: '≮',\n NotLessEqual: '≰',\n NotLessGreater: '≸',\n NotLessLess: '≪̸',\n NotLessSlantEqual: '⩽̸',\n NotLessTilde: '≴',\n NotNestedGreaterGreater: '⪢̸',\n NotNestedLessLess: '⪡̸',\n NotPrecedes: '⊀',\n NotPrecedesEqual: '⪯̸',\n NotPrecedesSlantEqual: '⋠',\n NotReverseElement: '∌',\n NotRightTriangle: '⋫',\n NotRightTriangleBar: '⧐̸',\n NotRightTriangleEqual: '⋭',\n NotSquareSubset: '⊏̸',\n NotSquareSubsetEqual: '⋢',\n NotSquareSuperset: '⊐̸',\n NotSquareSupersetEqual: '⋣',\n NotSubset: '⊂⃒',\n NotSubsetEqual: '⊈',\n NotSucceeds: '⊁',\n NotSucceedsEqual: '⪰̸',\n NotSucceedsSlantEqual: '⋡',\n NotSucceedsTilde: '≿̸',\n NotSuperset: '⊃⃒',\n NotSupersetEqual: '⊉',\n NotTilde: '≁',\n NotTildeEqual: '≄',\n NotTildeFullEqual: '≇',\n NotTildeTilde: '≉',\n NotVerticalBar: '∤',\n Nscr: '𝒩',\n Ntilde: 'Ñ',\n Nu: 'Ν',\n OElig: 'Œ',\n Oacute: 'Ó',\n Ocirc: 'Ô',\n Ocy: 'О',\n Odblac: 'Ő',\n Ofr: '𝔒',\n Ograve: 'Ò',\n Omacr: 'Ō',\n Omega: 'Ω',\n Omicron: 'Ο',\n Oopf: '𝕆',\n OpenCurlyDoubleQuote: '“',\n OpenCurlyQuote: '‘',\n Or: '⩔',\n Oscr: '𝒪',\n Oslash: 'Ø',\n Otilde: 'Õ',\n Otimes: '⨷',\n Ouml: 'Ö',\n OverBar: '‾',\n OverBrace: '⏞',\n OverBracket: '⎴',\n OverParenthesis: '⏜',\n PartialD: '∂',\n Pcy: 'П',\n Pfr: '𝔓',\n Phi: 'Φ',\n Pi: 'Π',\n PlusMinus: '±',\n Poincareplane: 'ℌ',\n Popf: 'ℙ',\n Pr: '⪻',\n Precedes: '≺',\n PrecedesEqual: '⪯',\n PrecedesSlantEqual: '≼',\n PrecedesTilde: '≾',\n Prime: '″',\n Product: '∏',\n Proportion: '∷',\n Proportional: '∝',\n Pscr: '𝒫',\n Psi: 'Ψ',\n QUOT: '\"',\n Qfr: '𝔔',\n Qopf: 'ℚ',\n Qscr: '𝒬',\n RBarr: '⤐',\n REG: '®',\n Racute: 'Ŕ',\n Rang: '⟫',\n Rarr: '↠',\n Rarrtl: '⤖',\n Rcaron: 'Ř',\n Rcedil: 'Ŗ',\n Rcy: 'Р',\n Re: 'ℜ',\n ReverseElement: '∋',\n ReverseEquilibrium: '⇋',\n ReverseUpEquilibrium: '⥯',\n Rfr: 'ℜ',\n Rho: 'Ρ',\n RightAngleBracket: '⟩',\n RightArrow: '→',\n RightArrowBar: '⇥',\n RightArrowLeftArrow: '⇄',\n RightCeiling: '⌉',\n RightDoubleBracket: '⟧',\n RightDownTeeVector: '⥝',\n RightDownVector: '⇂',\n RightDownVectorBar: '⥕',\n RightFloor: '⌋',\n RightTee: '⊢',\n RightTeeArrow: '↦',\n RightTeeVector: '⥛',\n RightTriangle: '⊳',\n RightTriangleBar: '⧐',\n RightTriangleEqual: '⊵',\n RightUpDownVector: '⥏',\n RightUpTeeVector: '⥜',\n RightUpVector: '↾',\n RightUpVectorBar: '⥔',\n RightVector: '⇀',\n RightVectorBar: '⥓',\n Rightarrow: '⇒',\n Ropf: 'ℝ',\n RoundImplies: '⥰',\n Rrightarrow: '⇛',\n Rscr: 'ℛ',\n Rsh: '↱',\n RuleDelayed: '⧴',\n SHCHcy: 'Щ',\n SHcy: 'Ш',\n SOFTcy: 'Ь',\n Sacute: 'Ś',\n Sc: '⪼',\n Scaron: 'Š',\n Scedil: 'Ş',\n Scirc: 'Ŝ',\n Scy: 'С',\n Sfr: '𝔖',\n ShortDownArrow: '↓',\n ShortLeftArrow: '←',\n ShortRightArrow: '→',\n ShortUpArrow: '↑',\n Sigma: 'Σ',\n SmallCircle: '∘',\n Sopf: '𝕊',\n Sqrt: '√',\n Square: '□',\n SquareIntersection: '⊓',\n SquareSubset: '⊏',\n SquareSubsetEqual: '⊑',\n SquareSuperset: '⊐',\n SquareSupersetEqual: '⊒',\n SquareUnion: '⊔',\n Sscr: '𝒮',\n Star: '⋆',\n Sub: '⋐',\n Subset: '⋐',\n SubsetEqual: '⊆',\n Succeeds: '≻',\n SucceedsEqual: '⪰',\n SucceedsSlantEqual: '≽',\n SucceedsTilde: '≿',\n SuchThat: '∋',\n Sum: '∑',\n Sup: '⋑',\n Superset: '⊃',\n SupersetEqual: '⊇',\n Supset: '⋑',\n THORN: 'Þ',\n TRADE: '™',\n TSHcy: 'Ћ',\n TScy: 'Ц',\n Tab: '\\t',\n Tau: 'Τ',\n Tcaron: 'Ť',\n Tcedil: 'Ţ',\n Tcy: 'Т',\n Tfr: '𝔗',\n Therefore: '∴',\n Theta: 'Θ',\n ThickSpace: ' ',\n ThinSpace: ' ',\n Tilde: '∼',\n TildeEqual: '≃',\n TildeFullEqual: '≅',\n TildeTilde: '≈',\n Topf: '𝕋',\n TripleDot: '⃛',\n Tscr: '𝒯',\n Tstrok: 'Ŧ',\n Uacute: 'Ú',\n Uarr: '↟',\n Uarrocir: '⥉',\n Ubrcy: 'Ў',\n Ubreve: 'Ŭ',\n Ucirc: 'Û',\n Ucy: 'У',\n Udblac: 'Ű',\n Ufr: '𝔘',\n Ugrave: 'Ù',\n Umacr: 'Ū',\n UnderBar: '_',\n UnderBrace: '⏟',\n UnderBracket: '⎵',\n UnderParenthesis: '⏝',\n Union: '⋃',\n UnionPlus: '⊎',\n Uogon: 'Ų',\n Uopf: '𝕌',\n UpArrow: '↑',\n UpArrowBar: '⤒',\n UpArrowDownArrow: '⇅',\n UpDownArrow: '↕',\n UpEquilibrium: '⥮',\n UpTee: '⊥',\n UpTeeArrow: '↥',\n Uparrow: '⇑',\n Updownarrow: '⇕',\n UpperLeftArrow: '↖',\n UpperRightArrow: '↗',\n Upsi: 'ϒ',\n Upsilon: 'Υ',\n Uring: 'Ů',\n Uscr: '𝒰',\n Utilde: 'Ũ',\n Uuml: 'Ü',\n VDash: '⊫',\n Vbar: '⫫',\n Vcy: 'В',\n Vdash: '⊩',\n Vdashl: '⫦',\n Vee: '⋁',\n Verbar: '‖',\n Vert: '‖',\n VerticalBar: '∣',\n VerticalLine: '|',\n VerticalSeparator: '❘',\n VerticalTilde: '≀',\n VeryThinSpace: ' ',\n Vfr: '𝔙',\n Vopf: '𝕍',\n Vscr: '𝒱',\n Vvdash: '⊪',\n Wcirc: 'Ŵ',\n Wedge: '⋀',\n Wfr: '𝔚',\n Wopf: '𝕎',\n Wscr: '𝒲',\n Xfr: '𝔛',\n Xi: 'Ξ',\n Xopf: '𝕏',\n Xscr: '𝒳',\n YAcy: 'Я',\n YIcy: 'Ї',\n YUcy: 'Ю',\n Yacute: 'Ý',\n Ycirc: 'Ŷ',\n Ycy: 'Ы',\n Yfr: '𝔜',\n Yopf: '𝕐',\n Yscr: '𝒴',\n Yuml: 'Ÿ',\n ZHcy: 'Ж',\n Zacute: 'Ź',\n Zcaron: 'Ž',\n Zcy: 'З',\n Zdot: 'Ż',\n ZeroWidthSpace: '',\n Zeta: 'Ζ',\n Zfr: 'ℨ',\n Zopf: 'ℤ',\n Zscr: '𝒵',\n aacute: 'á',\n abreve: 'ă',\n ac: '∾',\n acE: '∾̳',\n acd: '∿',\n acirc: 'â',\n acute: '´',\n acy: 'а',\n aelig: 'æ',\n af: '',\n afr: '𝔞',\n agrave: 'à',\n alefsym: 'ℵ',\n aleph: 'ℵ',\n alpha: 'α',\n amacr: 'ā',\n amalg: '⨿',\n amp: '&',\n and: '∧',\n andand: '⩕',\n andd: '⩜',\n andslope: '⩘',\n andv: '⩚',\n ang: '∠',\n ange: '⦤',\n angle: '∠',\n angmsd: '∡',\n angmsdaa: '⦨',\n angmsdab: '⦩',\n angmsdac: '⦪',\n angmsdad: '⦫',\n angmsdae: '⦬',\n angmsdaf: '⦭',\n angmsdag: '⦮',\n angmsdah: '⦯',\n angrt: '∟',\n angrtvb: '⊾',\n angrtvbd: '⦝',\n angsph: '∢',\n angst: 'Å',\n angzarr: '⍼',\n aogon: 'ą',\n aopf: '𝕒',\n ap: '≈',\n apE: '⩰',\n apacir: '⩯',\n ape: '≊',\n apid: '≋',\n apos: \"'\",\n approx: '≈',\n approxeq: '≊',\n aring: 'å',\n ascr: '𝒶',\n ast: '*',\n asymp: '≈',\n asympeq: '≍',\n atilde: 'ã',\n auml: 'ä',\n awconint: '∳',\n awint: '⨑',\n bNot: '⫭',\n backcong: '≌',\n backepsilon: '϶',\n backprime: '‵',\n backsim: '∽',\n backsimeq: '⋍',\n barvee: '⊽',\n barwed: '⌅',\n barwedge: '⌅',\n bbrk: '⎵',\n bbrktbrk: '⎶',\n bcong: '≌',\n bcy: 'б',\n bdquo: '„',\n becaus: '∵',\n because: '∵',\n bemptyv: '⦰',\n bepsi: '϶',\n bernou: 'ℬ',\n beta: 'β',\n beth: 'ℶ',\n between: '≬',\n bfr: '𝔟',\n bigcap: '⋂',\n bigcirc: '◯',\n bigcup: '⋃',\n bigodot: '⨀',\n bigoplus: '⨁',\n bigotimes: '⨂',\n bigsqcup: '⨆',\n bigstar: '★',\n bigtriangledown: '▽',\n bigtriangleup: '△',\n biguplus: '⨄',\n bigvee: '⋁',\n bigwedge: '⋀',\n bkarow: '⤍',\n blacklozenge: '⧫',\n blacksquare: '▪',\n blacktriangle: '▴',\n blacktriangledown: '▾',\n blacktriangleleft: '◂',\n blacktriangleright: '▸',\n blank: '␣',\n blk12: '▒',\n blk14: '░',\n blk34: '▓',\n block: '█',\n bne: '=⃥',\n bnequiv: '≡⃥',\n bnot: '⌐',\n bopf: '𝕓',\n bot: '⊥',\n bottom: '⊥',\n bowtie: '⋈',\n boxDL: '╗',\n boxDR: '╔',\n boxDl: '╖',\n boxDr: '╓',\n boxH: '═',\n boxHD: '╦',\n boxHU: '╩',\n boxHd: '╤',\n boxHu: '╧',\n boxUL: '╝',\n boxUR: '╚',\n boxUl: '╜',\n boxUr: '╙',\n boxV: '║',\n boxVH: '╬',\n boxVL: '╣',\n boxVR: '╠',\n boxVh: '╫',\n boxVl: '╢',\n boxVr: '╟',\n boxbox: '⧉',\n boxdL: '╕',\n boxdR: '╒',\n boxdl: '┐',\n boxdr: '┌',\n boxh: '─',\n boxhD: '╥',\n boxhU: '╨',\n boxhd: '┬',\n boxhu: '┴',\n boxminus: '⊟',\n boxplus: '⊞',\n boxtimes: '⊠',\n boxuL: '╛',\n boxuR: '╘',\n boxul: '┘',\n boxur: '└',\n boxv: '│',\n boxvH: '╪',\n boxvL: '╡',\n boxvR: '╞',\n boxvh: '┼',\n boxvl: '┤',\n boxvr: '├',\n bprime: '‵',\n breve: '˘',\n brvbar: '¦',\n bscr: '𝒷',\n bsemi: '⁏',\n bsim: '∽',\n bsime: '⋍',\n bsol: '\\\\',\n bsolb: '⧅',\n bsolhsub: '⟈',\n bull: '•',\n bullet: '•',\n bump: '≎',\n bumpE: '⪮',\n bumpe: '≏',\n bumpeq: '≏',\n cacute: 'ć',\n cap: '∩',\n capand: '⩄',\n capbrcup: '⩉',\n capcap: '⩋',\n capcup: '⩇',\n capdot: '⩀',\n caps: '∩︀',\n caret: '⁁',\n caron: 'ˇ',\n ccaps: '⩍',\n ccaron: 'č',\n ccedil: 'ç',\n ccirc: 'ĉ',\n ccups: '⩌',\n ccupssm: '⩐',\n cdot: 'ċ',\n cedil: '¸',\n cemptyv: '⦲',\n cent: '¢',\n centerdot: '·',\n cfr: '𝔠',\n chcy: 'ч',\n check: '✓',\n checkmark: '✓',\n chi: 'χ',\n cir: '○',\n cirE: '⧃',\n circ: 'ˆ',\n circeq: '≗',\n circlearrowleft: '↺',\n circlearrowright: '↻',\n circledR: '®',\n circledS: 'Ⓢ',\n circledast: '⊛',\n circledcirc: '⊚',\n circleddash: '⊝',\n cire: '≗',\n cirfnint: '⨐',\n cirmid: '⫯',\n cirscir: '⧂',\n clubs: '♣',\n clubsuit: '♣',\n colon: ':',\n colone: '≔',\n coloneq: '≔',\n comma: ',',\n commat: '@',\n comp: '∁',\n compfn: '∘',\n complement: '∁',\n complexes: 'ℂ',\n cong: '≅',\n congdot: '⩭',\n conint: '∮',\n copf: '𝕔',\n coprod: '∐',\n copy: '©',\n copysr: '℗',\n crarr: '↵',\n cross: '✗',\n cscr: '𝒸',\n csub: '⫏',\n csube: '⫑',\n csup: '⫐',\n csupe: '⫒',\n ctdot: '⋯',\n cudarrl: '⤸',\n cudarrr: '⤵',\n cuepr: '⋞',\n cuesc: '⋟',\n cularr: '↶',\n cularrp: '⤽',\n cup: '∪',\n cupbrcap: '⩈',\n cupcap: '⩆',\n cupcup: '⩊',\n cupdot: '⊍',\n cupor: '⩅',\n cups: '∪︀',\n curarr: '↷',\n curarrm: '⤼',\n curlyeqprec: '⋞',\n curlyeqsucc: '⋟',\n curlyvee: '⋎',\n curlywedge: '⋏',\n curren: '¤',\n curvearrowleft: '↶',\n curvearrowright: '↷',\n cuvee: '⋎',\n cuwed: '⋏',\n cwconint: '∲',\n cwint: '∱',\n cylcty: '⌭',\n dArr: '⇓',\n dHar: '⥥',\n dagger: '†',\n daleth: 'ℸ',\n darr: '↓',\n dash: '‐',\n dashv: '⊣',\n dbkarow: '⤏',\n dblac: '˝',\n dcaron: 'ď',\n dcy: 'д',\n dd: 'ⅆ',\n ddagger: '‡',\n ddarr: '⇊',\n ddotseq: '⩷',\n deg: '°',\n delta: 'δ',\n demptyv: '⦱',\n dfisht: '⥿',\n dfr: '𝔡',\n dharl: '⇃',\n dharr: '⇂',\n diam: '⋄',\n diamond: '⋄',\n diamondsuit: '♦',\n diams: '♦',\n die: '¨',\n digamma: 'ϝ',\n disin: '⋲',\n div: '÷',\n divide: '÷',\n divideontimes: '⋇',\n divonx: '⋇',\n djcy: 'ђ',\n dlcorn: '⌞',\n dlcrop: '⌍',\n dollar: '$',\n dopf: '𝕕',\n dot: '˙',\n doteq: '≐',\n doteqdot: '≑',\n dotminus: '∸',\n dotplus: '∔',\n dotsquare: '⊡',\n doublebarwedge: '⌆',\n downarrow: '↓',\n downdownarrows: '⇊',\n downharpoonleft: '⇃',\n downharpoonright: '⇂',\n drbkarow: '⤐',\n drcorn: '⌟',\n drcrop: '⌌',\n dscr: '𝒹',\n dscy: 'ѕ',\n dsol: '⧶',\n dstrok: 'đ',\n dtdot: '⋱',\n dtri: '▿',\n dtrif: '▾',\n duarr: '⇵',\n duhar: '⥯',\n dwangle: '⦦',\n dzcy: 'џ',\n dzigrarr: '⟿',\n eDDot: '⩷',\n eDot: '≑',\n eacute: 'é',\n easter: '⩮',\n ecaron: 'ě',\n ecir: '≖',\n ecirc: 'ê',\n ecolon: '≕',\n ecy: 'э',\n edot: 'ė',\n ee: 'ⅇ',\n efDot: '≒',\n efr: '𝔢',\n eg: '⪚',\n egrave: 'è',\n egs: '⪖',\n egsdot: '⪘',\n el: '⪙',\n elinters: '⏧',\n ell: 'ℓ',\n els: '⪕',\n elsdot: '⪗',\n emacr: 'ē',\n empty: '∅',\n emptyset: '∅',\n emptyv: '∅',\n emsp13: ' ',\n emsp14: ' ',\n emsp: ' ',\n eng: 'ŋ',\n ensp: ' ',\n eogon: 'ę',\n eopf: '𝕖',\n epar: '⋕',\n eparsl: '⧣',\n eplus: '⩱',\n epsi: 'ε',\n epsilon: 'ε',\n epsiv: 'ϵ',\n eqcirc: '≖',\n eqcolon: '≕',\n eqsim: '≂',\n eqslantgtr: '⪖',\n eqslantless: '⪕',\n equals: '=',\n equest: '≟',\n equiv: '≡',\n equivDD: '⩸',\n eqvparsl: '⧥',\n erDot: '≓',\n erarr: '⥱',\n escr: 'ℯ',\n esdot: '≐',\n esim: '≂',\n eta: 'η',\n eth: 'ð',\n euml: 'ë',\n euro: '€',\n excl: '!',\n exist: '∃',\n expectation: 'ℰ',\n exponentiale: 'ⅇ',\n fallingdotseq: '≒',\n fcy: 'ф',\n female: '♀',\n ffilig: 'ffi',\n fflig: 'ff',\n ffllig: 'ffl',\n ffr: '𝔣',\n filig: 'fi',\n fjlig: 'fj',\n flat: '♭',\n fllig: 'fl',\n fltns: '▱',\n fnof: 'ƒ',\n fopf: '𝕗',\n forall: '∀',\n fork: '⋔',\n forkv: '⫙',\n fpartint: '⨍',\n frac12: '½',\n frac13: '⅓',\n frac14: '¼',\n frac15: '⅕',\n frac16: '⅙',\n frac18: '⅛',\n frac23: '⅔',\n frac25: '⅖',\n frac34: '¾',\n frac35: '⅗',\n frac38: '⅜',\n frac45: '⅘',\n frac56: '⅚',\n frac58: '⅝',\n frac78: '⅞',\n frasl: '⁄',\n frown: '⌢',\n fscr: '𝒻',\n gE: '≧',\n gEl: '⪌',\n gacute: 'ǵ',\n gamma: 'γ',\n gammad: 'ϝ',\n gap: '⪆',\n gbreve: 'ğ',\n gcirc: 'ĝ',\n gcy: 'г',\n gdot: 'ġ',\n ge: '≥',\n gel: '⋛',\n geq: '≥',\n geqq: '≧',\n geqslant: '⩾',\n ges: '⩾',\n gescc: '⪩',\n gesdot: '⪀',\n gesdoto: '⪂',\n gesdotol: '⪄',\n gesl: '⋛︀',\n gesles: '⪔',\n gfr: '𝔤',\n gg: '≫',\n ggg: '⋙',\n gimel: 'ℷ',\n gjcy: 'ѓ',\n gl: '≷',\n glE: '⪒',\n gla: '⪥',\n glj: '⪤',\n gnE: '≩',\n gnap: '⪊',\n gnapprox: '⪊',\n gne: '⪈',\n gneq: '⪈',\n gneqq: '≩',\n gnsim: '⋧',\n gopf: '𝕘',\n grave: '`',\n gscr: 'ℊ',\n gsim: '≳',\n gsime: '⪎',\n gsiml: '⪐',\n gt: '>',\n gtcc: '⪧',\n gtcir: '⩺',\n gtdot: '⋗',\n gtlPar: '⦕',\n gtquest: '⩼',\n gtrapprox: '⪆',\n gtrarr: '⥸',\n gtrdot: '⋗',\n gtreqless: '⋛',\n gtreqqless: '⪌',\n gtrless: '≷',\n gtrsim: '≳',\n gvertneqq: '≩︀',\n gvnE: '≩︀',\n hArr: '⇔',\n hairsp: ' ',\n half: '½',\n hamilt: 'ℋ',\n hardcy: 'ъ',\n harr: '↔',\n harrcir: '⥈',\n harrw: '↭',\n hbar: 'ℏ',\n hcirc: 'ĥ',\n hearts: '♥',\n heartsuit: '♥',\n hellip: '…',\n hercon: '⊹',\n hfr: '𝔥',\n hksearow: '⤥',\n hkswarow: '⤦',\n hoarr: '⇿',\n homtht: '∻',\n hookleftarrow: '↩',\n hookrightarrow: '↪',\n hopf: '𝕙',\n horbar: '―',\n hscr: '𝒽',\n hslash: 'ℏ',\n hstrok: 'ħ',\n hybull: '⁃',\n hyphen: '‐',\n iacute: 'í',\n ic: '',\n icirc: 'î',\n icy: 'и',\n iecy: 'е',\n iexcl: '¡',\n iff: '⇔',\n ifr: '𝔦',\n igrave: 'ì',\n ii: 'ⅈ',\n iiiint: '⨌',\n iiint: '∭',\n iinfin: '⧜',\n iiota: '℩',\n ijlig: 'ij',\n imacr: 'ī',\n image: 'ℑ',\n imagline: 'ℐ',\n imagpart: 'ℑ',\n imath: 'ı',\n imof: '⊷',\n imped: 'Ƶ',\n in: '∈',\n incare: '℅',\n infin: '∞',\n infintie: '⧝',\n inodot: 'ı',\n int: '∫',\n intcal: '⊺',\n integers: 'ℤ',\n intercal: '⊺',\n intlarhk: '⨗',\n intprod: '⨼',\n iocy: 'ё',\n iogon: 'į',\n iopf: '𝕚',\n iota: 'ι',\n iprod: '⨼',\n iquest: '¿',\n iscr: '𝒾',\n isin: '∈',\n isinE: '⋹',\n isindot: '⋵',\n isins: '⋴',\n isinsv: '⋳',\n isinv: '∈',\n it: '',\n itilde: 'ĩ',\n iukcy: 'і',\n iuml: 'ï',\n jcirc: 'ĵ',\n jcy: 'й',\n jfr: '𝔧',\n jmath: 'ȷ',\n jopf: '𝕛',\n jscr: '𝒿',\n jsercy: 'ј',\n jukcy: 'є',\n kappa: 'κ',\n kappav: 'ϰ',\n kcedil: 'ķ',\n kcy: 'к',\n kfr: '𝔨',\n kgreen: 'ĸ',\n khcy: 'х',\n kjcy: 'ќ',\n kopf: '𝕜',\n kscr: '𝓀',\n lAarr: '⇚',\n lArr: '⇐',\n lAtail: '⤛',\n lBarr: '⤎',\n lE: '≦',\n lEg: '⪋',\n lHar: '⥢',\n lacute: 'ĺ',\n laemptyv: '⦴',\n lagran: 'ℒ',\n lambda: 'λ',\n lang: '⟨',\n langd: '⦑',\n langle: '⟨',\n lap: '⪅',\n laquo: '«',\n larr: '←',\n larrb: '⇤',\n larrbfs: '⤟',\n larrfs: '⤝',\n larrhk: '↩',\n larrlp: '↫',\n larrpl: '⤹',\n larrsim: '⥳',\n larrtl: '↢',\n lat: '⪫',\n latail: '⤙',\n late: '⪭',\n lates: '⪭︀',\n lbarr: '⤌',\n lbbrk: '❲',\n lbrace: '{',\n lbrack: '[',\n lbrke: '⦋',\n lbrksld: '⦏',\n lbrkslu: '⦍',\n lcaron: 'ľ',\n lcedil: 'ļ',\n lceil: '⌈',\n lcub: '{',\n lcy: 'л',\n ldca: '⤶',\n ldquo: '“',\n ldquor: '„',\n ldrdhar: '⥧',\n ldrushar: '⥋',\n ldsh: '↲',\n le: '≤',\n leftarrow: '←',\n leftarrowtail: '↢',\n leftharpoondown: '↽',\n leftharpoonup: '↼',\n leftleftarrows: '⇇',\n leftrightarrow: '↔',\n leftrightarrows: '⇆',\n leftrightharpoons: '⇋',\n leftrightsquigarrow: '↭',\n leftthreetimes: '⋋',\n leg: '⋚',\n leq: '≤',\n leqq: '≦',\n leqslant: '⩽',\n les: '⩽',\n lescc: '⪨',\n lesdot: '⩿',\n lesdoto: '⪁',\n lesdotor: '⪃',\n lesg: '⋚︀',\n lesges: '⪓',\n lessapprox: '⪅',\n lessdot: '⋖',\n lesseqgtr: '⋚',\n lesseqqgtr: '⪋',\n lessgtr: '≶',\n lesssim: '≲',\n lfisht: '⥼',\n lfloor: '⌊',\n lfr: '𝔩',\n lg: '≶',\n lgE: '⪑',\n lhard: '↽',\n lharu: '↼',\n lharul: '⥪',\n lhblk: '▄',\n ljcy: 'љ',\n ll: '≪',\n llarr: '⇇',\n llcorner: '⌞',\n llhard: '⥫',\n lltri: '◺',\n lmidot: 'ŀ',\n lmoust: '⎰',\n lmoustache: '⎰',\n lnE: '≨',\n lnap: '⪉',\n lnapprox: '⪉',\n lne: '⪇',\n lneq: '⪇',\n lneqq: '≨',\n lnsim: '⋦',\n loang: '⟬',\n loarr: '⇽',\n lobrk: '⟦',\n longleftarrow: '⟵',\n longleftrightarrow: '⟷',\n longmapsto: '⟼',\n longrightarrow: '⟶',\n looparrowleft: '↫',\n looparrowright: '↬',\n lopar: '⦅',\n lopf: '𝕝',\n loplus: '⨭',\n lotimes: '⨴',\n lowast: '∗',\n lowbar: '_',\n loz: '◊',\n lozenge: '◊',\n lozf: '⧫',\n lpar: '(',\n lparlt: '⦓',\n lrarr: '⇆',\n lrcorner: '⌟',\n lrhar: '⇋',\n lrhard: '⥭',\n lrm: '',\n lrtri: '⊿',\n lsaquo: '‹',\n lscr: '𝓁',\n lsh: '↰',\n lsim: '≲',\n lsime: '⪍',\n lsimg: '⪏',\n lsqb: '[',\n lsquo: '‘',\n lsquor: '‚',\n lstrok: 'ł',\n lt: '<',\n ltcc: '⪦',\n ltcir: '⩹',\n ltdot: '⋖',\n lthree: '⋋',\n ltimes: '⋉',\n ltlarr: '⥶',\n ltquest: '⩻',\n ltrPar: '⦖',\n ltri: '◃',\n ltrie: '⊴',\n ltrif: '◂',\n lurdshar: '⥊',\n luruhar: '⥦',\n lvertneqq: '≨︀',\n lvnE: '≨︀',\n mDDot: '∺',\n macr: '¯',\n male: '♂',\n malt: '✠',\n maltese: '✠',\n map: '↦',\n mapsto: '↦',\n mapstodown: '↧',\n mapstoleft: '↤',\n mapstoup: '↥',\n marker: '▮',\n mcomma: '⨩',\n mcy: 'м',\n mdash: '—',\n measuredangle: '∡',\n mfr: '𝔪',\n mho: '℧',\n micro: 'µ',\n mid: '∣',\n midast: '*',\n midcir: '⫰',\n middot: '·',\n minus: '−',\n minusb: '⊟',\n minusd: '∸',\n minusdu: '⨪',\n mlcp: '⫛',\n mldr: '…',\n mnplus: '∓',\n models: '⊧',\n mopf: '𝕞',\n mp: '∓',\n mscr: '𝓂',\n mstpos: '∾',\n mu: 'μ',\n multimap: '⊸',\n mumap: '⊸',\n nGg: '⋙̸',\n nGt: '≫⃒',\n nGtv: '≫̸',\n nLeftarrow: '⇍',\n nLeftrightarrow: '⇎',\n nLl: '⋘̸',\n nLt: '≪⃒',\n nLtv: '≪̸',\n nRightarrow: '⇏',\n nVDash: '⊯',\n nVdash: '⊮',\n nabla: '∇',\n nacute: 'ń',\n nang: '∠⃒',\n nap: '≉',\n napE: '⩰̸',\n napid: '≋̸',\n napos: 'ʼn',\n napprox: '≉',\n natur: '♮',\n natural: '♮',\n naturals: 'ℕ',\n nbsp: ' ',\n nbump: '≎̸',\n nbumpe: '≏̸',\n ncap: '⩃',\n ncaron: 'ň',\n ncedil: 'ņ',\n ncong: '≇',\n ncongdot: '⩭̸',\n ncup: '⩂',\n ncy: 'н',\n ndash: '–',\n ne: '≠',\n neArr: '⇗',\n nearhk: '⤤',\n nearr: '↗',\n nearrow: '↗',\n nedot: '≐̸',\n nequiv: '≢',\n nesear: '⤨',\n nesim: '≂̸',\n nexist: '∄',\n nexists: '∄',\n nfr: '𝔫',\n ngE: '≧̸',\n nge: '≱',\n ngeq: '≱',\n ngeqq: '≧̸',\n ngeqslant: '⩾̸',\n nges: '⩾̸',\n ngsim: '≵',\n ngt: '≯',\n ngtr: '≯',\n nhArr: '⇎',\n nharr: '↮',\n nhpar: '⫲',\n ni: '∋',\n nis: '⋼',\n nisd: '⋺',\n niv: '∋',\n njcy: 'њ',\n nlArr: '⇍',\n nlE: '≦̸',\n nlarr: '↚',\n nldr: '‥',\n nle: '≰',\n nleftarrow: '↚',\n nleftrightarrow: '↮',\n nleq: '≰',\n nleqq: '≦̸',\n nleqslant: '⩽̸',\n nles: '⩽̸',\n nless: '≮',\n nlsim: '≴',\n nlt: '≮',\n nltri: '⋪',\n nltrie: '⋬',\n nmid: '∤',\n nopf: '𝕟',\n not: '¬',\n notin: '∉',\n notinE: '⋹̸',\n notindot: '⋵̸',\n notinva: '∉',\n notinvb: '⋷',\n notinvc: '⋶',\n notni: '∌',\n notniva: '∌',\n notnivb: '⋾',\n notnivc: '⋽',\n npar: '∦',\n nparallel: '∦',\n nparsl: '⫽⃥',\n npart: '∂̸',\n npolint: '⨔',\n npr: '⊀',\n nprcue: '⋠',\n npre: '⪯̸',\n nprec: '⊀',\n npreceq: '⪯̸',\n nrArr: '⇏',\n nrarr: '↛',\n nrarrc: '⤳̸',\n nrarrw: '↝̸',\n nrightarrow: '↛',\n nrtri: '⋫',\n nrtrie: '⋭',\n nsc: '⊁',\n nsccue: '⋡',\n nsce: '⪰̸',\n nscr: '𝓃',\n nshortmid: '∤',\n nshortparallel: '∦',\n nsim: '≁',\n nsime: '≄',\n nsimeq: '≄',\n nsmid: '∤',\n nspar: '∦',\n nsqsube: '⋢',\n nsqsupe: '⋣',\n nsub: '⊄',\n nsubE: '⫅̸',\n nsube: '⊈',\n nsubset: '⊂⃒',\n nsubseteq: '⊈',\n nsubseteqq: '⫅̸',\n nsucc: '⊁',\n nsucceq: '⪰̸',\n nsup: '⊅',\n nsupE: '⫆̸',\n nsupe: '⊉',\n nsupset: '⊃⃒',\n nsupseteq: '⊉',\n nsupseteqq: '⫆̸',\n ntgl: '≹',\n ntilde: 'ñ',\n ntlg: '≸',\n ntriangleleft: '⋪',\n ntrianglelefteq: '⋬',\n ntriangleright: '⋫',\n ntrianglerighteq: '⋭',\n nu: 'ν',\n num: '#',\n numero: '№',\n numsp: ' ',\n nvDash: '⊭',\n nvHarr: '⤄',\n nvap: '≍⃒',\n nvdash: '⊬',\n nvge: '≥⃒',\n nvgt: '>⃒',\n nvinfin: '⧞',\n nvlArr: '⤂',\n nvle: '≤⃒',\n nvlt: '<⃒',\n nvltrie: '⊴⃒',\n nvrArr: '⤃',\n nvrtrie: '⊵⃒',\n nvsim: '∼⃒',\n nwArr: '⇖',\n nwarhk: '⤣',\n nwarr: '↖',\n nwarrow: '↖',\n nwnear: '⤧',\n oS: 'Ⓢ',\n oacute: 'ó',\n oast: '⊛',\n ocir: '⊚',\n ocirc: 'ô',\n ocy: 'о',\n odash: '⊝',\n odblac: 'ő',\n odiv: '⨸',\n odot: '⊙',\n odsold: '⦼',\n oelig: 'œ',\n ofcir: '⦿',\n ofr: '𝔬',\n ogon: '˛',\n ograve: 'ò',\n ogt: '⧁',\n ohbar: '⦵',\n ohm: 'Ω',\n oint: '∮',\n olarr: '↺',\n olcir: '⦾',\n olcross: '⦻',\n oline: '‾',\n olt: '⧀',\n omacr: 'ō',\n omega: 'ω',\n omicron: 'ο',\n omid: '⦶',\n ominus: '⊖',\n oopf: '𝕠',\n opar: '⦷',\n operp: '⦹',\n oplus: '⊕',\n or: '∨',\n orarr: '↻',\n ord: '⩝',\n order: 'ℴ',\n orderof: 'ℴ',\n ordf: 'ª',\n ordm: 'º',\n origof: '⊶',\n oror: '⩖',\n orslope: '⩗',\n orv: '⩛',\n oscr: 'ℴ',\n oslash: 'ø',\n osol: '⊘',\n otilde: 'õ',\n otimes: '⊗',\n otimesas: '⨶',\n ouml: 'ö',\n ovbar: '⌽',\n par: '∥',\n para: '¶',\n parallel: '∥',\n parsim: '⫳',\n parsl: '⫽',\n part: '∂',\n pcy: 'п',\n percnt: '%',\n period: '.',\n permil: '‰',\n perp: '⊥',\n pertenk: '‱',\n pfr: '𝔭',\n phi: 'φ',\n phiv: 'ϕ',\n phmmat: 'ℳ',\n phone: '☎',\n pi: 'π',\n pitchfork: '⋔',\n piv: 'ϖ',\n planck: 'ℏ',\n planckh: 'ℎ',\n plankv: 'ℏ',\n plus: '+',\n plusacir: '⨣',\n plusb: '⊞',\n pluscir: '⨢',\n plusdo: '∔',\n plusdu: '⨥',\n pluse: '⩲',\n plusmn: '±',\n plussim: '⨦',\n plustwo: '⨧',\n pm: '±',\n pointint: '⨕',\n popf: '𝕡',\n pound: '£',\n pr: '≺',\n prE: '⪳',\n prap: '⪷',\n prcue: '≼',\n pre: '⪯',\n prec: '≺',\n precapprox: '⪷',\n preccurlyeq: '≼',\n preceq: '⪯',\n precnapprox: '⪹',\n precneqq: '⪵',\n precnsim: '⋨',\n precsim: '≾',\n prime: '′',\n primes: 'ℙ',\n prnE: '⪵',\n prnap: '⪹',\n prnsim: '⋨',\n prod: '∏',\n profalar: '⌮',\n profline: '⌒',\n profsurf: '⌓',\n prop: '∝',\n propto: '∝',\n prsim: '≾',\n prurel: '⊰',\n pscr: '𝓅',\n psi: 'ψ',\n puncsp: ' ',\n qfr: '𝔮',\n qint: '⨌',\n qopf: '𝕢',\n qprime: '⁗',\n qscr: '𝓆',\n quaternions: 'ℍ',\n quatint: '⨖',\n quest: '?',\n questeq: '≟',\n quot: '\"',\n rAarr: '⇛',\n rArr: '⇒',\n rAtail: '⤜',\n rBarr: '⤏',\n rHar: '⥤',\n race: '∽̱',\n racute: 'ŕ',\n radic: '√',\n raemptyv: '⦳',\n rang: '⟩',\n rangd: '⦒',\n range: '⦥',\n rangle: '⟩',\n raquo: '»',\n rarr: '→',\n rarrap: '⥵',\n rarrb: '⇥',\n rarrbfs: '⤠',\n rarrc: '⤳',\n rarrfs: '⤞',\n rarrhk: '↪',\n rarrlp: '↬',\n rarrpl: '⥅',\n rarrsim: '⥴',\n rarrtl: '↣',\n rarrw: '↝',\n ratail: '⤚',\n ratio: '∶',\n rationals: 'ℚ',\n rbarr: '⤍',\n rbbrk: '❳',\n rbrace: '}',\n rbrack: ']',\n rbrke: '⦌',\n rbrksld: '⦎',\n rbrkslu: '⦐',\n rcaron: 'ř',\n rcedil: 'ŗ',\n rceil: '⌉',\n rcub: '}',\n rcy: 'р',\n rdca: '⤷',\n rdldhar: '⥩',\n rdquo: '”',\n rdquor: '”',\n rdsh: '↳',\n real: 'ℜ',\n realine: 'ℛ',\n realpart: 'ℜ',\n reals: 'ℝ',\n rect: '▭',\n reg: '®',\n rfisht: '⥽',\n rfloor: '⌋',\n rfr: '𝔯',\n rhard: '⇁',\n rharu: '⇀',\n rharul: '⥬',\n rho: 'ρ',\n rhov: 'ϱ',\n rightarrow: '→',\n rightarrowtail: '↣',\n rightharpoondown: '⇁',\n rightharpoonup: '⇀',\n rightleftarrows: '⇄',\n rightleftharpoons: '⇌',\n rightrightarrows: '⇉',\n rightsquigarrow: '↝',\n rightthreetimes: '⋌',\n ring: '˚',\n risingdotseq: '≓',\n rlarr: '⇄',\n rlhar: '⇌',\n rlm: '',\n rmoust: '⎱',\n rmoustache: '⎱',\n rnmid: '⫮',\n roang: '⟭',\n roarr: '⇾',\n robrk: '⟧',\n ropar: '⦆',\n ropf: '𝕣',\n roplus: '⨮',\n rotimes: '⨵',\n rpar: ')',\n rpargt: '⦔',\n rppolint: '⨒',\n rrarr: '⇉',\n rsaquo: '›',\n rscr: '𝓇',\n rsh: '↱',\n rsqb: ']',\n rsquo: '’',\n rsquor: '’',\n rthree: '⋌',\n rtimes: '⋊',\n rtri: '▹',\n rtrie: '⊵',\n rtrif: '▸',\n rtriltri: '⧎',\n ruluhar: '⥨',\n rx: '℞',\n sacute: 'ś',\n sbquo: '‚',\n sc: '≻',\n scE: '⪴',\n scap: '⪸',\n scaron: 'š',\n sccue: '≽',\n sce: '⪰',\n scedil: 'ş',\n scirc: 'ŝ',\n scnE: '⪶',\n scnap: '⪺',\n scnsim: '⋩',\n scpolint: '⨓',\n scsim: '≿',\n scy: 'с',\n sdot: '⋅',\n sdotb: '⊡',\n sdote: '⩦',\n seArr: '⇘',\n searhk: '⤥',\n searr: '↘',\n searrow: '↘',\n sect: '§',\n semi: ';',\n seswar: '⤩',\n setminus: '∖',\n setmn: '∖',\n sext: '✶',\n sfr: '𝔰',\n sfrown: '⌢',\n sharp: '♯',\n shchcy: 'щ',\n shcy: 'ш',\n shortmid: '∣',\n shortparallel: '∥',\n shy: '',\n sigma: 'σ',\n sigmaf: 'ς',\n sigmav: 'ς',\n sim: '∼',\n simdot: '⩪',\n sime: '≃',\n simeq: '≃',\n simg: '⪞',\n simgE: '⪠',\n siml: '⪝',\n simlE: '⪟',\n simne: '≆',\n simplus: '⨤',\n simrarr: '⥲',\n slarr: '←',\n smallsetminus: '∖',\n smashp: '⨳',\n smeparsl: '⧤',\n smid: '∣',\n smile: '⌣',\n smt: '⪪',\n smte: '⪬',\n smtes: '⪬︀',\n softcy: 'ь',\n sol: '/',\n solb: '⧄',\n solbar: '⌿',\n sopf: '𝕤',\n spades: '♠',\n spadesuit: '♠',\n spar: '∥',\n sqcap: '⊓',\n sqcaps: '⊓︀',\n sqcup: '⊔',\n sqcups: '⊔︀',\n sqsub: '⊏',\n sqsube: '⊑',\n sqsubset: '⊏',\n sqsubseteq: '⊑',\n sqsup: '⊐',\n sqsupe: '⊒',\n sqsupset: '⊐',\n sqsupseteq: '⊒',\n squ: '□',\n square: '□',\n squarf: '▪',\n squf: '▪',\n srarr: '→',\n sscr: '𝓈',\n ssetmn: '∖',\n ssmile: '⌣',\n sstarf: '⋆',\n star: '☆',\n starf: '★',\n straightepsilon: 'ϵ',\n straightphi: 'ϕ',\n strns: '¯',\n sub: '⊂',\n subE: '⫅',\n subdot: '⪽',\n sube: '⊆',\n subedot: '⫃',\n submult: '⫁',\n subnE: '⫋',\n subne: '⊊',\n subplus: '⪿',\n subrarr: '⥹',\n subset: '⊂',\n subseteq: '⊆',\n subseteqq: '⫅',\n subsetneq: '⊊',\n subsetneqq: '⫋',\n subsim: '⫇',\n subsub: '⫕',\n subsup: '⫓',\n succ: '≻',\n succapprox: '⪸',\n succcurlyeq: '≽',\n succeq: '⪰',\n succnapprox: '⪺',\n succneqq: '⪶',\n succnsim: '⋩',\n succsim: '≿',\n sum: '∑',\n sung: '♪',\n sup1: '¹',\n sup2: '²',\n sup3: '³',\n sup: '⊃',\n supE: '⫆',\n supdot: '⪾',\n supdsub: '⫘',\n supe: '⊇',\n supedot: '⫄',\n suphsol: '⟉',\n suphsub: '⫗',\n suplarr: '⥻',\n supmult: '⫂',\n supnE: '⫌',\n supne: '⊋',\n supplus: '⫀',\n supset: '⊃',\n supseteq: '⊇',\n supseteqq: '⫆',\n supsetneq: '⊋',\n supsetneqq: '⫌',\n supsim: '⫈',\n supsub: '⫔',\n supsup: '⫖',\n swArr: '⇙',\n swarhk: '⤦',\n swarr: '↙',\n swarrow: '↙',\n swnwar: '⤪',\n szlig: 'ß',\n target: '⌖',\n tau: 'τ',\n tbrk: '⎴',\n tcaron: 'ť',\n tcedil: 'ţ',\n tcy: 'т',\n tdot: '⃛',\n telrec: '⌕',\n tfr: '𝔱',\n there4: '∴',\n therefore: '∴',\n theta: 'θ',\n thetasym: 'ϑ',\n thetav: 'ϑ',\n thickapprox: '≈',\n thicksim: '∼',\n thinsp: ' ',\n thkap: '≈',\n thksim: '∼',\n thorn: 'þ',\n tilde: '˜',\n times: '×',\n timesb: '⊠',\n timesbar: '⨱',\n timesd: '⨰',\n tint: '∭',\n toea: '⤨',\n top: '⊤',\n topbot: '⌶',\n topcir: '⫱',\n topf: '𝕥',\n topfork: '⫚',\n tosa: '⤩',\n tprime: '‴',\n trade: '™',\n triangle: '▵',\n triangledown: '▿',\n triangleleft: '◃',\n trianglelefteq: '⊴',\n triangleq: '≜',\n triangleright: '▹',\n trianglerighteq: '⊵',\n tridot: '◬',\n trie: '≜',\n triminus: '⨺',\n triplus: '⨹',\n trisb: '⧍',\n tritime: '⨻',\n trpezium: '⏢',\n tscr: '𝓉',\n tscy: 'ц',\n tshcy: 'ћ',\n tstrok: 'ŧ',\n twixt: '≬',\n twoheadleftarrow: '↞',\n twoheadrightarrow: '↠',\n uArr: '⇑',\n uHar: '⥣',\n uacute: 'ú',\n uarr: '↑',\n ubrcy: 'ў',\n ubreve: 'ŭ',\n ucirc: 'û',\n ucy: 'у',\n udarr: '⇅',\n udblac: 'ű',\n udhar: '⥮',\n ufisht: '⥾',\n ufr: '𝔲',\n ugrave: 'ù',\n uharl: '↿',\n uharr: '↾',\n uhblk: '▀',\n ulcorn: '⌜',\n ulcorner: '⌜',\n ulcrop: '⌏',\n ultri: '◸',\n umacr: 'ū',\n uml: '¨',\n uogon: 'ų',\n uopf: '𝕦',\n uparrow: '↑',\n updownarrow: '↕',\n upharpoonleft: '↿',\n upharpoonright: '↾',\n uplus: '⊎',\n upsi: 'υ',\n upsih: 'ϒ',\n upsilon: 'υ',\n upuparrows: '⇈',\n urcorn: '⌝',\n urcorner: '⌝',\n urcrop: '⌎',\n uring: 'ů',\n urtri: '◹',\n uscr: '𝓊',\n utdot: '⋰',\n utilde: 'ũ',\n utri: '▵',\n utrif: '▴',\n uuarr: '⇈',\n uuml: 'ü',\n uwangle: '⦧',\n vArr: '⇕',\n vBar: '⫨',\n vBarv: '⫩',\n vDash: '⊨',\n vangrt: '⦜',\n varepsilon: 'ϵ',\n varkappa: 'ϰ',\n varnothing: '∅',\n varphi: 'ϕ',\n varpi: 'ϖ',\n varpropto: '∝',\n varr: '↕',\n varrho: 'ϱ',\n varsigma: 'ς',\n varsubsetneq: '⊊︀',\n varsubsetneqq: '⫋︀',\n varsupsetneq: '⊋︀',\n varsupsetneqq: '⫌︀',\n vartheta: 'ϑ',\n vartriangleleft: '⊲',\n vartriangleright: '⊳',\n vcy: 'в',\n vdash: '⊢',\n vee: '∨',\n veebar: '⊻',\n veeeq: '≚',\n vellip: '⋮',\n verbar: '|',\n vert: '|',\n vfr: '𝔳',\n vltri: '⊲',\n vnsub: '⊂⃒',\n vnsup: '⊃⃒',\n vopf: '𝕧',\n vprop: '∝',\n vrtri: '⊳',\n vscr: '𝓋',\n vsubnE: '⫋︀',\n vsubne: '⊊︀',\n vsupnE: '⫌︀',\n vsupne: '⊋︀',\n vzigzag: '⦚',\n wcirc: 'ŵ',\n wedbar: '⩟',\n wedge: '∧',\n wedgeq: '≙',\n weierp: '℘',\n wfr: '𝔴',\n wopf: '𝕨',\n wp: '℘',\n wr: '≀',\n wreath: '≀',\n wscr: '𝓌',\n xcap: '⋂',\n xcirc: '◯',\n xcup: '⋃',\n xdtri: '▽',\n xfr: '𝔵',\n xhArr: '⟺',\n xharr: '⟷',\n xi: 'ξ',\n xlArr: '⟸',\n xlarr: '⟵',\n xmap: '⟼',\n xnis: '⋻',\n xodot: '⨀',\n xopf: '𝕩',\n xoplus: '⨁',\n xotime: '⨂',\n xrArr: '⟹',\n xrarr: '⟶',\n xscr: '𝓍',\n xsqcup: '⨆',\n xuplus: '⨄',\n xutri: '△',\n xvee: '⋁',\n xwedge: '⋀',\n yacute: 'ý',\n yacy: 'я',\n ycirc: 'ŷ',\n ycy: 'ы',\n yen: '¥',\n yfr: '𝔶',\n yicy: 'ї',\n yopf: '𝕪',\n yscr: '𝓎',\n yucy: 'ю',\n yuml: 'ÿ',\n zacute: 'ź',\n zcaron: 'ž',\n zcy: 'з',\n zdot: 'ż',\n zeetrf: 'ℨ',\n zeta: 'ζ',\n zfr: '𝔷',\n zhcy: 'ж',\n zigrarr: '⇝',\n zopf: '𝕫',\n zscr: '𝓏',\n zwj: '',\n zwnj: ''\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nconst nonLazyContinuation = {\n tokenize: tokenizeNonLazyContinuation,\n partial: true\n}\n\n/** @type {Construct} */\nexport const codeFenced = {\n name: 'codeFenced',\n tokenize: tokenizeCodeFenced,\n concrete: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeFenced(effects, ok, nok) {\n const self = this\n /** @type {Construct} */\n const closeStart = {\n tokenize: tokenizeCloseStart,\n partial: true\n }\n let initialPrefix = 0\n let sizeOpen = 0\n /** @type {NonNullable} */\n let marker\n return start\n\n /**\n * Start of code.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse whitespace like `markdown-rs`.\n return beforeSequenceOpen(code)\n }\n\n /**\n * In opening fence, after prefix, at sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeSequenceOpen(code) {\n const tail = self.events[self.events.length - 1]\n initialPrefix =\n tail && tail[1].type === 'linePrefix'\n ? tail[2].sliceSerialize(tail[1], true).length\n : 0\n marker = code\n effects.enter('codeFenced')\n effects.enter('codeFencedFence')\n effects.enter('codeFencedFenceSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening fence sequence.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === marker) {\n sizeOpen++\n effects.consume(code)\n return sequenceOpen\n }\n if (sizeOpen < 3) {\n return nok(code)\n }\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, infoBefore, 'whitespace')(code)\n : infoBefore(code)\n }\n\n /**\n * In opening fence, after the sequence (and optional whitespace), before info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function infoBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return self.interrupt\n ? ok(code)\n : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFencedFenceInfo')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return info(code)\n }\n\n /**\n * In info.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function info(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return infoBefore(code)\n }\n if (markdownSpace(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceInfo')\n return factorySpace(effects, metaBefore, 'whitespace')(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return info\n }\n\n /**\n * In opening fence, after info and whitespace, before meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function metaBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n return infoBefore(code)\n }\n effects.enter('codeFencedFenceMeta')\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return meta(code)\n }\n\n /**\n * In meta.\n *\n * ```markdown\n * > | ~~~js eval\n * ^\n * | alert(1)\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function meta(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n effects.exit('codeFencedFenceMeta')\n return infoBefore(code)\n }\n if (code === 96 && code === marker) {\n return nok(code)\n }\n effects.consume(code)\n return meta\n }\n\n /**\n * At eol/eof in code, before a non-lazy closing fence or content.\n *\n * ```markdown\n * > | ~~~js\n * ^\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function atNonLazyBreak(code) {\n return effects.attempt(closeStart, after, contentBefore)(code)\n }\n\n /**\n * Before code content, not a closing fence, at eol.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return contentStart\n }\n\n /**\n * Before code content, not a closing fence.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentStart(code) {\n return initialPrefix > 0 && markdownSpace(code)\n ? factorySpace(\n effects,\n beforeContentChunk,\n 'linePrefix',\n initialPrefix + 1\n )(code)\n : beforeContentChunk(code)\n }\n\n /**\n * Before code content, after optional prefix.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function beforeContentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return contentChunk(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * | ~~~js\n * > | alert(1)\n * ^^^^^^^^\n * | ~~~\n * ```\n *\n * @type {State}\n */\n function contentChunk(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return beforeContentChunk(code)\n }\n effects.consume(code)\n return contentChunk\n }\n\n /**\n * After code.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n effects.exit('codeFenced')\n return ok(code)\n }\n\n /**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\n function tokenizeCloseStart(effects, ok, nok) {\n let size = 0\n return startBefore\n\n /**\n *\n *\n * @type {State}\n */\n function startBefore(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return start\n }\n\n /**\n * Before closing fence, at optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Always populated by defaults.\n\n // To do: `enter` here or in next state?\n effects.enter('codeFencedFence')\n return markdownSpace(code)\n ? factorySpace(\n effects,\n beforeSequenceClose,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n : beforeSequenceClose(code)\n }\n\n /**\n * In closing fence, after optional whitespace, at sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeSequenceClose(code) {\n if (code === marker) {\n effects.enter('codeFencedFenceSequence')\n return sequenceClose(code)\n }\n return nok(code)\n }\n\n /**\n * In closing fence sequence.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n if (code === marker) {\n size++\n effects.consume(code)\n return sequenceClose\n }\n if (size >= sizeOpen) {\n effects.exit('codeFencedFenceSequence')\n return markdownSpace(code)\n ? factorySpace(effects, sequenceCloseAfter, 'whitespace')(code)\n : sequenceCloseAfter(code)\n }\n return nok(code)\n }\n\n /**\n * After closing fence sequence, after optional whitespace.\n *\n * ```markdown\n * | ~~~js\n * | alert(1)\n * > | ~~~\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceCloseAfter(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFencedFence')\n return ok(code)\n }\n return nok(code)\n }\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeNonLazyContinuation(effects, ok, nok) {\n const self = this\n return start\n\n /**\n *\n *\n * @type {State}\n */\n function start(code) {\n if (code === null) {\n return nok(code)\n }\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return lineStart\n }\n\n /**\n *\n *\n * @type {State}\n */\n function lineStart(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeIndented = {\n name: 'codeIndented',\n tokenize: tokenizeCodeIndented\n}\n\n/** @type {Construct} */\nconst furtherStart = {\n tokenize: tokenizeFurtherStart,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeIndented(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of code (indented).\n *\n * > **Parsing note**: it is not needed to check if this first line is a\n * > filled line (that it has a non-whitespace character), because blank lines\n * > are parsed already, so we never run into that.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: manually check if interrupting like `markdown-rs`.\n\n effects.enter('codeIndented')\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? atBreak(code)\n : nok(code)\n }\n\n /**\n * At a break.\n *\n * ```markdown\n * > | aaa\n * ^ ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === null) {\n return after(code)\n }\n if (markdownLineEnding(code)) {\n return effects.attempt(furtherStart, atBreak, after)(code)\n }\n effects.enter('codeFlowValue')\n return inside(code)\n }\n\n /**\n * In code content.\n *\n * ```markdown\n * > | aaa\n * ^^^^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('codeFlowValue')\n return atBreak(code)\n }\n effects.consume(code)\n return inside\n }\n\n /** @type {State} */\n function after(code) {\n effects.exit('codeIndented')\n // To do: allow interrupting like `markdown-rs`.\n // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeFurtherStart(effects, ok, nok) {\n const self = this\n return furtherStart\n\n /**\n * At eol, trying to parse another indent.\n *\n * ```markdown\n * > | aaa\n * ^\n * | bbb\n * ```\n *\n * @type {State}\n */\n function furtherStart(code) {\n // To do: improve `lazy` / `pierce` handling.\n // If this is a lazy line, it can’t be code.\n if (self.parser.lazy[self.now().line]) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return furtherStart\n }\n\n // To do: the code here in `micromark-js` is a bit different from\n // `markdown-rs` because there it can attempt spaces.\n // We can’t yet.\n //\n // To do: use an improved `space_or_tab` function like `markdown-rs`,\n // so that we can drop the next state.\n return factorySpace(effects, afterPrefix, 'linePrefix', 4 + 1)(code)\n }\n\n /**\n * At start, after 1 or 4 spaces.\n *\n * ```markdown\n * > | aaa\n * ^\n * ```\n *\n * @type {State}\n */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'linePrefix' &&\n tail[2].sliceSerialize(tail[1], true).length >= 4\n ? ok(code)\n : markdownLineEnding(code)\n ? furtherStart(code)\n : nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Previous} Previous\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const codeText = {\n name: 'codeText',\n tokenize: tokenizeCodeText,\n resolve: resolveCodeText,\n previous\n}\n\n// To do: next major: don’t resolve, like `markdown-rs`.\n/** @type {Resolver} */\nfunction resolveCodeText(events) {\n let tailExitIndex = events.length - 4\n let headEnterIndex = 3\n /** @type {number} */\n let index\n /** @type {number | undefined} */\n let enter\n\n // If we start and end with an EOL or a space.\n if (\n (events[headEnterIndex][1].type === 'lineEnding' ||\n events[headEnterIndex][1].type === 'space') &&\n (events[tailExitIndex][1].type === 'lineEnding' ||\n events[tailExitIndex][1].type === 'space')\n ) {\n index = headEnterIndex\n\n // And we have data.\n while (++index < tailExitIndex) {\n if (events[index][1].type === 'codeTextData') {\n // Then we have padding.\n events[headEnterIndex][1].type = 'codeTextPadding'\n events[tailExitIndex][1].type = 'codeTextPadding'\n headEnterIndex += 2\n tailExitIndex -= 2\n break\n }\n }\n }\n\n // Merge adjacent spaces and data.\n index = headEnterIndex - 1\n tailExitIndex++\n while (++index <= tailExitIndex) {\n if (enter === undefined) {\n if (index !== tailExitIndex && events[index][1].type !== 'lineEnding') {\n enter = index\n }\n } else if (\n index === tailExitIndex ||\n events[index][1].type === 'lineEnding'\n ) {\n events[enter][1].type = 'codeTextData'\n if (index !== enter + 2) {\n events[enter][1].end = events[index - 1][1].end\n events.splice(enter + 2, index - enter - 2)\n tailExitIndex -= index - enter - 2\n index = enter + 2\n }\n enter = undefined\n }\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Previous}\n */\nfunction previous(code) {\n // If there is a previous code, there will always be a tail.\n return (\n code !== 96 ||\n this.events[this.events.length - 1][1].type === 'characterEscape'\n )\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeCodeText(effects, ok, nok) {\n const self = this\n let sizeOpen = 0\n /** @type {number} */\n let size\n /** @type {Token} */\n let token\n return start\n\n /**\n * Start of code (text).\n *\n * ```markdown\n * > | `a`\n * ^\n * > | \\`a`\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('codeText')\n effects.enter('codeTextSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 96) {\n effects.consume(code)\n sizeOpen++\n return sequenceOpen\n }\n effects.exit('codeTextSequence')\n return between(code)\n }\n\n /**\n * Between something and something else.\n *\n * ```markdown\n * > | `a`\n * ^^\n * ```\n *\n * @type {State}\n */\n function between(code) {\n // EOF.\n if (code === null) {\n return nok(code)\n }\n\n // To do: next major: don’t do spaces in resolve, but when compiling,\n // like `markdown-rs`.\n // Tabs don’t work, and virtual spaces don’t make sense.\n if (code === 32) {\n effects.enter('space')\n effects.consume(code)\n effects.exit('space')\n return between\n }\n\n // Closing fence? Could also be data.\n if (code === 96) {\n token = effects.enter('codeTextSequence')\n size = 0\n return sequenceClose(code)\n }\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return between\n }\n\n // Data.\n effects.enter('codeTextData')\n return data(code)\n }\n\n /**\n * In data.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (\n code === null ||\n code === 32 ||\n code === 96 ||\n markdownLineEnding(code)\n ) {\n effects.exit('codeTextData')\n return between(code)\n }\n effects.consume(code)\n return data\n }\n\n /**\n * In closing sequence.\n *\n * ```markdown\n * > | `a`\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceClose(code) {\n // More.\n if (code === 96) {\n effects.consume(code)\n size++\n return sequenceClose\n }\n\n // Done!\n if (size === sizeOpen) {\n effects.exit('codeTextSequence')\n effects.exit('codeText')\n return ok(code)\n }\n\n // More or less accents: mark as data.\n token.type = 'codeTextData'\n return data(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factorySpace} from 'micromark-factory-space'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\n/** @type {Construct} */\nexport const definition = {\n name: 'definition',\n tokenize: tokenizeDefinition\n}\n\n/** @type {Construct} */\nconst titleBefore = {\n tokenize: tokenizeTitleBefore,\n partial: true\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeDefinition(effects, ok, nok) {\n const self = this\n /** @type {string} */\n let identifier\n return start\n\n /**\n * At start of a definition.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // Do not interrupt paragraphs (but do follow definitions).\n // To do: do `interrupt` the way `markdown-rs` does.\n // To do: parse whitespace the way `markdown-rs` does.\n effects.enter('definition')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `[`.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n // To do: parse whitespace the way `markdown-rs` does.\n\n return factoryLabel.call(\n self,\n effects,\n labelAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionLabel',\n 'definitionLabelMarker',\n 'definitionLabelString'\n )(code)\n }\n\n /**\n * After label.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function labelAfter(code) {\n identifier = normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n if (code === 58) {\n effects.enter('definitionMarker')\n effects.consume(code)\n effects.exit('definitionMarker')\n return markerAfter\n }\n return nok(code)\n }\n\n /**\n * After marker.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function markerAfter(code) {\n // Note: whitespace is optional.\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, destinationBefore)(code)\n : destinationBefore(code)\n }\n\n /**\n * Before destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationBefore(code) {\n return factoryDestination(\n effects,\n destinationAfter,\n // Note: we don’t need to reset the way `markdown-rs` does.\n nok,\n 'definitionDestination',\n 'definitionDestinationLiteral',\n 'definitionDestinationLiteralMarker',\n 'definitionDestinationRaw',\n 'definitionDestinationString'\n )(code)\n }\n\n /**\n * After destination.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function destinationAfter(code) {\n return effects.attempt(titleBefore, after, after)(code)\n }\n\n /**\n * After definition.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return markdownSpace(code)\n ? factorySpace(effects, afterWhitespace, 'whitespace')(code)\n : afterWhitespace(code)\n }\n\n /**\n * After definition, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function afterWhitespace(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('definition')\n\n // Note: we don’t care about uniqueness.\n // It’s likely that that doesn’t happen very frequently.\n // It is more likely that it wastes precious time.\n self.parser.defined.push(identifier)\n\n // To do: `markdown-rs` interrupt.\n // // You’d be interrupting.\n // tokenizer.interrupt = true\n return ok(code)\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeTitleBefore(effects, ok, nok) {\n return titleBefore\n\n /**\n * After destination, at whitespace.\n *\n * ```markdown\n * > | [a]: b\n * ^\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, beforeMarker)(code)\n : nok(code)\n }\n\n /**\n * At title.\n *\n * ```markdown\n * | [a]: b\n * > | \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function beforeMarker(code) {\n return factoryTitle(\n effects,\n titleAfter,\n nok,\n 'definitionTitle',\n 'definitionTitleMarker',\n 'definitionTitleString'\n )(code)\n }\n\n /**\n * After title.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfter(code) {\n return markdownSpace(code)\n ? factorySpace(effects, titleAfterOptionalWhitespace, 'whitespace')(code)\n : titleAfterOptionalWhitespace(code)\n }\n\n /**\n * After title, after optional whitespace.\n *\n * ```markdown\n * > | [a]: b \"c\"\n * ^\n * ```\n *\n * @type {State}\n */\n function titleAfterOptionalWhitespace(code) {\n return code === null || markdownLineEnding(code) ? ok(code) : nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */\n\nimport {\n asciiControl,\n markdownLineEndingOrSpace,\n markdownLineEnding\n} from 'micromark-util-character'\n/**\n * Parse destinations.\n *\n * ###### Examples\n *\n * ```markdown\n * \n * b>\n * \n * \n * a\n * a\\)b\n * a(b)c\n * a(b)\n * ```\n *\n * @param {Effects} effects\n * Context.\n * @param {State} ok\n * State switched to when successful.\n * @param {State} nok\n * State switched to when unsuccessful.\n * @param {TokenType} type\n * Type for whole (`` or `b`).\n * @param {TokenType} literalType\n * Type when enclosed (``).\n * @param {TokenType} literalMarkerType\n * Type for enclosing (`<` and `>`).\n * @param {TokenType} rawType\n * Type when not enclosed (`b`).\n * @param {TokenType} stringType\n * Type for the value (`a` or `b`).\n * @param {number | undefined} [max=Infinity]\n * Depth of nested parens (inclusive).\n * @returns {State}\n * Start state.\n */ // eslint-disable-next-line max-params\nexport function factoryDestination(\n effects,\n ok,\n nok,\n type,\n literalType,\n literalMarkerType,\n rawType,\n stringType,\n max\n) {\n const limit = max || Number.POSITIVE_INFINITY\n let balance = 0\n return start\n\n /**\n * Start of destination.\n *\n * ```markdown\n * > | \n * ^\n * > | aa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (code === 60) {\n effects.enter(type)\n effects.enter(literalType)\n effects.enter(literalMarkerType)\n effects.consume(code)\n effects.exit(literalMarkerType)\n return enclosedBefore\n }\n\n // ASCII control, space, closing paren.\n if (code === null || code === 32 || code === 41 || asciiControl(code)) {\n return nok(code)\n }\n effects.enter(type)\n effects.enter(rawType)\n effects.enter(stringType)\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return raw(code)\n }\n\n /**\n * After `<`, at an enclosed destination.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function enclosedBefore(code) {\n if (code === 62) {\n effects.enter(literalMarkerType)\n effects.consume(code)\n effects.exit(literalMarkerType)\n effects.exit(literalType)\n effects.exit(type)\n return ok\n }\n effects.enter(stringType)\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return enclosed(code)\n }\n\n /**\n * In enclosed destination.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function enclosed(code) {\n if (code === 62) {\n effects.exit('chunkString')\n effects.exit(stringType)\n return enclosedBefore(code)\n }\n if (code === null || code === 60 || markdownLineEnding(code)) {\n return nok(code)\n }\n effects.consume(code)\n return code === 92 ? enclosedEscape : enclosed\n }\n\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function enclosedEscape(code) {\n if (code === 60 || code === 62 || code === 92) {\n effects.consume(code)\n return enclosed\n }\n return enclosed(code)\n }\n\n /**\n * In raw destination.\n *\n * ```markdown\n * > | aa\n * ^\n * ```\n *\n * @type {State}\n */\n function raw(code) {\n if (\n !balance &&\n (code === null || code === 41 || markdownLineEndingOrSpace(code))\n ) {\n effects.exit('chunkString')\n effects.exit(stringType)\n effects.exit(rawType)\n effects.exit(type)\n return ok(code)\n }\n if (balance < limit && code === 40) {\n effects.consume(code)\n balance++\n return raw\n }\n if (code === 41) {\n effects.consume(code)\n balance--\n return raw\n }\n\n // ASCII control (but *not* `\\0`) and space and `(`.\n // Note: in `markdown-rs`, `\\0` exists in codes, in `micromark-js` it\n // doesn’t.\n if (code === null || code === 32 || code === 40 || asciiControl(code)) {\n return nok(code)\n }\n effects.consume(code)\n return code === 92 ? rawEscape : raw\n }\n\n /**\n * After `\\`, at special character.\n *\n * ```markdown\n * > | a\\*a\n * ^\n * ```\n *\n * @type {State}\n */\n function rawEscape(code) {\n if (code === 40 || code === 41 || code === 92) {\n effects.consume(code)\n return raw\n }\n return raw(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */\n\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/**\n * Parse labels.\n *\n * > 👉 **Note**: labels in markdown are capped at 999 characters in the string.\n *\n * ###### Examples\n *\n * ```markdown\n * [a]\n * [a\n * b]\n * [a\\]b]\n * ```\n *\n * @this {TokenizeContext}\n * Tokenize context.\n * @param {Effects} effects\n * Context.\n * @param {State} ok\n * State switched to when successful.\n * @param {State} nok\n * State switched to when unsuccessful.\n * @param {TokenType} type\n * Type of the whole label (`[a]`).\n * @param {TokenType} markerType\n * Type for the markers (`[` and `]`).\n * @param {TokenType} stringType\n * Type for the identifier (`a`).\n * @returns {State}\n * Start state.\n */ // eslint-disable-next-line max-params\nexport function factoryLabel(effects, ok, nok, type, markerType, stringType) {\n const self = this\n let size = 0\n /** @type {boolean} */\n let seen\n return start\n\n /**\n * Start of label.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter(type)\n effects.enter(markerType)\n effects.consume(code)\n effects.exit(markerType)\n effects.enter(stringType)\n return atBreak\n }\n\n /**\n * In label, at something, before something else.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (\n size > 999 ||\n code === null ||\n code === 91 ||\n (code === 93 && !seen) ||\n // To do: remove in the future once we’ve switched from\n // `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,\n // which doesn’t need this.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */\n (code === 94 &&\n !size &&\n '_hiddenFootnoteSupport' in self.parser.constructs)\n ) {\n return nok(code)\n }\n if (code === 93) {\n effects.exit(stringType)\n effects.enter(markerType)\n effects.consume(code)\n effects.exit(markerType)\n effects.exit(type)\n return ok\n }\n\n // To do: indent? Link chunks and EOLs together?\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return atBreak\n }\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return labelInside(code)\n }\n\n /**\n * In label, in text.\n *\n * ```markdown\n * > | [a]\n * ^\n * ```\n *\n * @type {State}\n */\n function labelInside(code) {\n if (\n code === null ||\n code === 91 ||\n code === 93 ||\n markdownLineEnding(code) ||\n size++ > 999\n ) {\n effects.exit('chunkString')\n return atBreak(code)\n }\n effects.consume(code)\n if (!seen) seen = !markdownSpace(code)\n return code === 92 ? labelEscape : labelInside\n }\n\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | [a\\*a]\n * ^\n * ```\n *\n * @type {State}\n */\n function labelEscape(code) {\n if (code === 91 || code === 92 || code === 93) {\n effects.consume(code)\n size++\n return labelInside\n }\n return labelInside(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenType} TokenType\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/**\n * Parse titles.\n *\n * ###### Examples\n *\n * ```markdown\n * \"a\"\n * 'b'\n * (c)\n * \"a\n * b\"\n * 'a\n * b'\n * (a\\)b)\n * ```\n *\n * @param {Effects} effects\n * Context.\n * @param {State} ok\n * State switched to when successful.\n * @param {State} nok\n * State switched to when unsuccessful.\n * @param {TokenType} type\n * Type of the whole title (`\"a\"`, `'b'`, `(c)`).\n * @param {TokenType} markerType\n * Type for the markers (`\"`, `'`, `(`, and `)`).\n * @param {TokenType} stringType\n * Type for the value (`a`).\n * @returns {State}\n * Start state.\n */ // eslint-disable-next-line max-params\nexport function factoryTitle(effects, ok, nok, type, markerType, stringType) {\n /** @type {NonNullable} */\n let marker\n return start\n\n /**\n * Start of title.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (code === 34 || code === 39 || code === 40) {\n effects.enter(type)\n effects.enter(markerType)\n effects.consume(code)\n effects.exit(markerType)\n marker = code === 40 ? 41 : code\n return begin\n }\n return nok(code)\n }\n\n /**\n * After opening marker.\n *\n * This is also used at the closing marker.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */\n function begin(code) {\n if (code === marker) {\n effects.enter(markerType)\n effects.consume(code)\n effects.exit(markerType)\n effects.exit(type)\n return ok\n }\n effects.enter(stringType)\n return atBreak(code)\n }\n\n /**\n * At something, before something else.\n *\n * ```markdown\n * > | \"a\"\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === marker) {\n effects.exit(stringType)\n return begin(marker)\n }\n if (code === null) {\n return nok(code)\n }\n\n // Note: blank lines can’t exist in content.\n if (markdownLineEnding(code)) {\n // To do: use `space_or_tab_eol_with_options`, connect.\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, atBreak, 'linePrefix')\n }\n effects.enter('chunkString', {\n contentType: 'string'\n })\n return inside(code)\n }\n\n /**\n *\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker || code === null || markdownLineEnding(code)) {\n effects.exit('chunkString')\n return atBreak(code)\n }\n effects.consume(code)\n return code === 92 ? escape : inside\n }\n\n /**\n * After `\\`, at a special character.\n *\n * ```markdown\n * > | \"a\\*b\"\n * ^\n * ```\n *\n * @type {State}\n */\n function escape(code) {\n if (code === marker || code === 92) {\n effects.consume(code)\n return inside\n }\n return inside(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Effects} Effects\n * @typedef {import('micromark-util-types').State} State\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/**\n * Parse spaces and tabs.\n *\n * There is no `nok` parameter:\n *\n * * line endings or spaces in markdown are often optional, in which case this\n * factory can be used and `ok` will be switched to whether spaces were found\n * or not\n * * one line ending or space can be detected with\n * `markdownLineEndingOrSpace(code)` right before using `factoryWhitespace`\n *\n * @param {Effects} effects\n * Context.\n * @param {State} ok\n * State switched to when successful.\n * @returns\n * Start state.\n */\nexport function factoryWhitespace(effects, ok) {\n /** @type {boolean} */\n let seen\n return start\n\n /** @type {State} */\n function start(code) {\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n seen = true\n return start\n }\n if (markdownSpace(code)) {\n return factorySpace(\n effects,\n start,\n seen ? 'linePrefix' : 'lineSuffix'\n )(code)\n }\n return ok(code)\n }\n}\n","/**\n * Normalize an identifier (as found in references, definitions).\n *\n * Collapses markdown whitespace, trim, and then lower- and uppercase.\n *\n * Some characters are considered “uppercase”, such as U+03F4 (`ϴ`), but if their\n * lowercase counterpart (U+03B8 (`θ`)) is uppercased will result in a different\n * uppercase character (U+0398 (`Θ`)).\n * So, to get a canonical form, we perform both lower- and uppercase.\n *\n * Using uppercase last makes sure keys will never interact with default\n * prototypal values (such as `constructor`): nothing in the prototype of\n * `Object` is uppercase.\n *\n * @param {string} value\n * Identifier to normalize.\n * @returns {string}\n * Normalized identifier.\n */\nexport function normalizeIdentifier(value) {\n return (\n value\n // Collapse markdown whitespace.\n .replace(/[\\t\\n\\r ]+/g, ' ')\n // Trim.\n .replace(/^ | $/g, '')\n // Some characters are considered “uppercase”, but if their lowercase\n // counterpart is uppercased will result in a different uppercase\n // character.\n // Hence, to get that form, we perform both lower- and uppercase.\n // Upper case makes sure keys will not interact with default prototypal\n // methods: no method is uppercase.\n .toLowerCase()\n .toUpperCase()\n )\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const hardBreakEscape = {\n name: 'hardBreakEscape',\n tokenize: tokenizeHardBreakEscape\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHardBreakEscape(effects, ok, nok) {\n return start\n\n /**\n * Start of a hard break (escape).\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('hardBreakEscape')\n effects.consume(code)\n return after\n }\n\n /**\n * After `\\`, at eol.\n *\n * ```markdown\n * > | a\\\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (markdownLineEnding(code)) {\n effects.exit('hardBreakEscape')\n return ok(code)\n }\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {splice} from 'micromark-util-chunked'\n/** @type {Construct} */\nexport const headingAtx = {\n name: 'headingAtx',\n tokenize: tokenizeHeadingAtx,\n resolve: resolveHeadingAtx\n}\n\n/** @type {Resolver} */\nfunction resolveHeadingAtx(events, context) {\n let contentEnd = events.length - 2\n let contentStart = 3\n /** @type {Token} */\n let content\n /** @type {Token} */\n let text\n\n // Prefix whitespace, part of the opening.\n if (events[contentStart][1].type === 'whitespace') {\n contentStart += 2\n }\n\n // Suffix whitespace, part of the closing.\n if (\n contentEnd - 2 > contentStart &&\n events[contentEnd][1].type === 'whitespace'\n ) {\n contentEnd -= 2\n }\n if (\n events[contentEnd][1].type === 'atxHeadingSequence' &&\n (contentStart === contentEnd - 1 ||\n (contentEnd - 4 > contentStart &&\n events[contentEnd - 2][1].type === 'whitespace'))\n ) {\n contentEnd -= contentStart + 1 === contentEnd ? 2 : 4\n }\n if (contentEnd > contentStart) {\n content = {\n type: 'atxHeadingText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end\n }\n text = {\n type: 'chunkText',\n start: events[contentStart][1].start,\n end: events[contentEnd][1].end,\n contentType: 'text'\n }\n splice(events, contentStart, contentEnd - contentStart + 1, [\n ['enter', content, context],\n ['enter', text, context],\n ['exit', text, context],\n ['exit', content, context]\n ])\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHeadingAtx(effects, ok, nok) {\n let size = 0\n return start\n\n /**\n * Start of a heading (atx).\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse indent like `markdown-rs`.\n effects.enter('atxHeading')\n return before(code)\n }\n\n /**\n * After optional whitespace, at `#`.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('atxHeadingSequence')\n return sequenceOpen(code)\n }\n\n /**\n * In opening sequence.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceOpen(code) {\n if (code === 35 && size++ < 6) {\n effects.consume(code)\n return sequenceOpen\n }\n\n // Always at least one `#`.\n if (code === null || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n return nok(code)\n }\n\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === 35) {\n effects.enter('atxHeadingSequence')\n return sequenceFurther(code)\n }\n if (code === null || markdownLineEnding(code)) {\n effects.exit('atxHeading')\n // To do: interrupt like `markdown-rs`.\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n return ok(code)\n }\n if (markdownSpace(code)) {\n return factorySpace(effects, atBreak, 'whitespace')(code)\n }\n\n // To do: generate `data` tokens, add the `text` token later.\n // Needs edit map, see: `markdown.rs`.\n effects.enter('atxHeadingText')\n return data(code)\n }\n\n /**\n * In further sequence (after whitespace).\n *\n * Could be normal “visible” hashes in the heading or a final sequence.\n *\n * ```markdown\n * > | ## aa ##\n * ^\n * ```\n *\n * @type {State}\n */\n function sequenceFurther(code) {\n if (code === 35) {\n effects.consume(code)\n return sequenceFurther\n }\n effects.exit('atxHeadingSequence')\n return atBreak(code)\n }\n\n /**\n * In text.\n *\n * ```markdown\n * > | ## aa\n * ^\n * ```\n *\n * @type {State}\n */\n function data(code) {\n if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {\n effects.exit('atxHeadingText')\n return atBreak(code)\n }\n effects.consume(code)\n return data\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\nimport {htmlBlockNames, htmlRawNames} from 'micromark-util-html-tag-name'\nimport {blankLine} from './blank-line.js'\n\n/** @type {Construct} */\nexport const htmlFlow = {\n name: 'htmlFlow',\n tokenize: tokenizeHtmlFlow,\n resolveTo: resolveToHtmlFlow,\n concrete: true\n}\n\n/** @type {Construct} */\nconst blankLineBefore = {\n tokenize: tokenizeBlankLineBefore,\n partial: true\n}\nconst nonLazyContinuationStart = {\n tokenize: tokenizeNonLazyContinuationStart,\n partial: true\n}\n\n/** @type {Resolver} */\nfunction resolveToHtmlFlow(events) {\n let index = events.length\n while (index--) {\n if (events[index][0] === 'enter' && events[index][1].type === 'htmlFlow') {\n break\n }\n }\n if (index > 1 && events[index - 2][1].type === 'linePrefix') {\n // Add the prefix start to the HTML token.\n events[index][1].start = events[index - 2][1].start\n // Add the prefix start to the HTML line token.\n events[index + 1][1].start = events[index - 2][1].start\n // Remove the line prefix.\n events.splice(index - 2, 2)\n }\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHtmlFlow(effects, ok, nok) {\n const self = this\n /** @type {number} */\n let marker\n /** @type {boolean} */\n let closingTag\n /** @type {string} */\n let buffer\n /** @type {number} */\n let index\n /** @type {Code} */\n let markerB\n return start\n\n /**\n * Start of HTML (flow).\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // To do: parse indent like `markdown-rs`.\n return before(code)\n }\n\n /**\n * At `<`, after optional whitespace.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('htmlFlow')\n effects.enter('htmlFlowData')\n effects.consume(code)\n return open\n }\n\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 33) {\n effects.consume(code)\n return declarationOpen\n }\n if (code === 47) {\n effects.consume(code)\n closingTag = true\n return tagCloseStart\n }\n if (code === 63) {\n effects.consume(code)\n marker = 3\n // To do:\n // tokenizer.concrete = true\n // To do: use `markdown-rs` style interrupt.\n // While we’re in an instruction instead of a declaration, we’re on a `?`\n // right now, so we do need to search for `>`, similar to declarations.\n return self.interrupt ? ok : continuationDeclarationInside\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * After ` | \n * ^\n * > | \n * ^\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code)\n marker = 2\n return commentOpenInside\n }\n if (code === 91) {\n effects.consume(code)\n marker = 5\n index = 0\n return cdataOpenInside\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n marker = 4\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside\n }\n return nok(code)\n }\n\n /**\n * After ` | \n * ^\n * ```\n *\n * @type {State}\n */\n function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code)\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuationDeclarationInside\n }\n return nok(code)\n }\n\n /**\n * After ` | &<]]>\n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function cdataOpenInside(code) {\n const value = 'CDATA['\n if (code === value.charCodeAt(index++)) {\n effects.consume(code)\n if (index === value.length) {\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation\n }\n return cdataOpenInside\n }\n return nok(code)\n }\n\n /**\n * After ``, in closing tag, at tag name.\n *\n * ```markdown\n * > |
\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseStart(code) {\n if (asciiAlpha(code)) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer = String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * In tag name.\n *\n * ```markdown\n * > | \n * ^^\n * > | \n * ^^\n * ```\n *\n * @type {State}\n */\n function tagName(code) {\n if (\n code === null ||\n code === 47 ||\n code === 62 ||\n markdownLineEndingOrSpace(code)\n ) {\n const slash = code === 47\n const name = buffer.toLowerCase()\n if (!slash && !closingTag && htmlRawNames.includes(name)) {\n marker = 1\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code)\n }\n if (htmlBlockNames.includes(buffer.toLowerCase())) {\n marker = 6\n if (slash) {\n effects.consume(code)\n return basicSelfClosing\n }\n\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok(code) : continuation(code)\n }\n marker = 7\n // Do not support complete HTML when interrupting.\n return self.interrupt && !self.parser.lazy[self.now().line]\n ? nok(code)\n : closingTag\n ? completeClosingTagAfter(code)\n : completeAttributeNameBefore(code)\n }\n\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n buffer += String.fromCharCode(code)\n return tagName\n }\n return nok(code)\n }\n\n /**\n * After closing slash of a basic tag name.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function basicSelfClosing(code) {\n if (code === 62) {\n effects.consume(code)\n // // Do not form containers.\n // tokenizer.concrete = true\n return self.interrupt ? ok : continuation\n }\n return nok(code)\n }\n\n /**\n * After closing slash of a complete tag name.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeClosingTagAfter(code) {\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeClosingTagAfter\n }\n return completeEnd(code)\n }\n\n /**\n * At an attribute name.\n *\n * At first, this state is used after a complete tag name, after whitespace,\n * where it expects optional attributes or the end of the tag.\n * It is also reused after attributes, when expecting more optional\n * attributes.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeNameBefore(code) {\n if (code === 47) {\n effects.consume(code)\n return completeEnd\n }\n\n // ASCII alphanumerical and `:` and `_`.\n if (code === 58 || code === 95 || asciiAlpha(code)) {\n effects.consume(code)\n return completeAttributeName\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeNameBefore\n }\n return completeEnd(code)\n }\n\n /**\n * In attribute name.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeName(code) {\n // ASCII alphanumerical and `-`, `.`, `:`, and `_`.\n if (\n code === 45 ||\n code === 46 ||\n code === 58 ||\n code === 95 ||\n asciiAlphanumeric(code)\n ) {\n effects.consume(code)\n return completeAttributeName\n }\n return completeAttributeNameAfter(code)\n }\n\n /**\n * After attribute name, at an optional initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code)\n return completeAttributeValueBefore\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeNameAfter\n }\n return completeAttributeNameBefore(code)\n }\n\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueBefore(code) {\n if (\n code === null ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 34 || code === 39) {\n effects.consume(code)\n markerB = code\n return completeAttributeValueQuoted\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAttributeValueBefore\n }\n return completeAttributeValueUnquoted(code)\n }\n\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | \n * ^\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueQuoted(code) {\n if (code === markerB) {\n effects.consume(code)\n markerB = null\n return completeAttributeValueQuotedAfter\n }\n if (code === null || markdownLineEnding(code)) {\n return nok(code)\n }\n effects.consume(code)\n return completeAttributeValueQuoted\n }\n\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueUnquoted(code) {\n if (\n code === null ||\n code === 34 ||\n code === 39 ||\n code === 47 ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96 ||\n markdownLineEndingOrSpace(code)\n ) {\n return completeAttributeNameAfter(code)\n }\n effects.consume(code)\n return completeAttributeValueUnquoted\n }\n\n /**\n * After double or single quoted attribute value, before whitespace or the\n * end of the tag.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || markdownSpace(code)) {\n return completeAttributeNameBefore(code)\n }\n return nok(code)\n }\n\n /**\n * In certain circumstances of a complete tag where only an `>` is allowed.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeEnd(code) {\n if (code === 62) {\n effects.consume(code)\n return completeAfter\n }\n return nok(code)\n }\n\n /**\n * After `>` in a complete tag.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function completeAfter(code) {\n if (code === null || markdownLineEnding(code)) {\n // // Do not form containers.\n // tokenizer.concrete = true\n return continuation(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return completeAfter\n }\n return nok(code)\n }\n\n /**\n * In continuation of any HTML kind.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function continuation(code) {\n if (code === 45 && marker === 2) {\n effects.consume(code)\n return continuationCommentInside\n }\n if (code === 60 && marker === 1) {\n effects.consume(code)\n return continuationRawTagOpen\n }\n if (code === 62 && marker === 4) {\n effects.consume(code)\n return continuationClose\n }\n if (code === 63 && marker === 3) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n if (code === 93 && marker === 5) {\n effects.consume(code)\n return continuationCdataInside\n }\n if (markdownLineEnding(code) && (marker === 6 || marker === 7)) {\n effects.exit('htmlFlowData')\n return effects.check(\n blankLineBefore,\n continuationAfter,\n continuationStart\n )(code)\n }\n if (code === null || markdownLineEnding(code)) {\n effects.exit('htmlFlowData')\n return continuationStart(code)\n }\n effects.consume(code)\n return continuation\n }\n\n /**\n * In continuation, at eol.\n *\n * ```markdown\n * > | \n * ^\n * | asd\n * ```\n *\n * @type {State}\n */\n function continuationStart(code) {\n return effects.check(\n nonLazyContinuationStart,\n continuationStartNonLazy,\n continuationAfter\n )(code)\n }\n\n /**\n * In continuation, at eol, before non-lazy content.\n *\n * ```markdown\n * > | \n * ^\n * | asd\n * ```\n *\n * @type {State}\n */\n function continuationStartNonLazy(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return continuationBefore\n }\n\n /**\n * In continuation, before non-lazy content.\n *\n * ```markdown\n * | \n * > | asd\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationBefore(code) {\n if (code === null || markdownLineEnding(code)) {\n return continuationStart(code)\n }\n effects.enter('htmlFlowData')\n return continuation(code)\n }\n\n /**\n * In comment continuation, after one `-`, expecting another.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function continuationCommentInside(code) {\n if (code === 45) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In raw continuation, after `<`, at `/`.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function continuationRawTagOpen(code) {\n if (code === 47) {\n effects.consume(code)\n buffer = ''\n return continuationRawEndTag\n }\n return continuation(code)\n }\n\n /**\n * In raw continuation, after ``, in a raw tag name.\n *\n * ```markdown\n * > | \n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function continuationRawEndTag(code) {\n if (code === 62) {\n const name = buffer.toLowerCase()\n if (htmlRawNames.includes(name)) {\n effects.consume(code)\n return continuationClose\n }\n return continuation(code)\n }\n if (asciiAlpha(code) && buffer.length < 8) {\n effects.consume(code)\n // @ts-expect-error: not null.\n buffer += String.fromCharCode(code)\n return continuationRawEndTag\n }\n return continuation(code)\n }\n\n /**\n * In cdata continuation, after `]`, expecting `]>`.\n *\n * ```markdown\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationCdataInside(code) {\n if (code === 93) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In declaration or instruction continuation, at `>`.\n *\n * ```markdown\n * > | \n * ^\n * > | >\n * ^\n * > | \n * ^\n * > | \n * ^\n * > | &<]]>\n * ^\n * ```\n *\n * @type {State}\n */\n function continuationDeclarationInside(code) {\n if (code === 62) {\n effects.consume(code)\n return continuationClose\n }\n\n // More dashes.\n if (code === 45 && marker === 2) {\n effects.consume(code)\n return continuationDeclarationInside\n }\n return continuation(code)\n }\n\n /**\n * In closed continuation: everything we get until the eol/eof is part of it.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function continuationClose(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('htmlFlowData')\n return continuationAfter(code)\n }\n effects.consume(code)\n return continuationClose\n }\n\n /**\n * Done.\n *\n * ```markdown\n * > | \n * ^\n * ```\n *\n * @type {State}\n */\n function continuationAfter(code) {\n effects.exit('htmlFlow')\n // // Feel free to interrupt.\n // tokenizer.interrupt = false\n // // No longer concrete.\n // tokenizer.concrete = false\n return ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeNonLazyContinuationStart(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * At eol, before continuation.\n *\n * ```markdown\n * > | * ```js\n * ^\n * | b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n if (markdownLineEnding(code)) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return after\n }\n return nok(code)\n }\n\n /**\n * A continuation.\n *\n * ```markdown\n * | * ```js\n * > | b\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n return self.parser.lazy[self.now().line] ? nok(code) : ok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeBlankLineBefore(effects, ok, nok) {\n return start\n\n /**\n * Before eol, expecting blank line.\n *\n * ```markdown\n * > | \n * ^\n * |\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return effects.attempt(blankLine, ok, nok)\n }\n}\n","/**\n * List of lowercase HTML “block” tag names.\n *\n * The list, when parsing HTML (flow), results in more relaxed rules (condition\n * 6).\n * Because they are known blocks, the HTML-like syntax doesn’t have to be\n * strictly parsed.\n * For tag names not in this list, a more strict algorithm (condition 7) is used\n * to detect whether the HTML-like syntax is seen as HTML (flow) or not.\n *\n * This is copied from:\n * .\n *\n * > 👉 **Note**: `search` was added in `[email protected]`.\n */\nexport const htmlBlockNames = [\n 'address',\n 'article',\n 'aside',\n 'base',\n 'basefont',\n 'blockquote',\n 'body',\n 'caption',\n 'center',\n 'col',\n 'colgroup',\n 'dd',\n 'details',\n 'dialog',\n 'dir',\n 'div',\n 'dl',\n 'dt',\n 'fieldset',\n 'figcaption',\n 'figure',\n 'footer',\n 'form',\n 'frame',\n 'frameset',\n 'h1',\n 'h2',\n 'h3',\n 'h4',\n 'h5',\n 'h6',\n 'head',\n 'header',\n 'hr',\n 'html',\n 'iframe',\n 'legend',\n 'li',\n 'link',\n 'main',\n 'menu',\n 'menuitem',\n 'nav',\n 'noframes',\n 'ol',\n 'optgroup',\n 'option',\n 'p',\n 'param',\n 'search',\n 'section',\n 'summary',\n 'table',\n 'tbody',\n 'td',\n 'tfoot',\n 'th',\n 'thead',\n 'title',\n 'tr',\n 'track',\n 'ul'\n]\n\n/**\n * List of lowercase HTML “raw” tag names.\n *\n * The list, when parsing HTML (flow), results in HTML that can include lines\n * without exiting, until a closing tag also in this list is found (condition\n * 1).\n *\n * This module is copied from:\n * .\n *\n * > 👉 **Note**: `textarea` was added in `[email protected]`.\n */\nexport const htmlRawNames = ['pre', 'script', 'style', 'textarea']\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {\n asciiAlpha,\n asciiAlphanumeric,\n markdownLineEnding,\n markdownLineEndingOrSpace,\n markdownSpace\n} from 'micromark-util-character'\n/** @type {Construct} */\nexport const htmlText = {\n name: 'htmlText',\n tokenize: tokenizeHtmlText\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeHtmlText(effects, ok, nok) {\n const self = this\n /** @type {NonNullable | undefined} */\n let marker\n /** @type {number} */\n let index\n /** @type {State} */\n let returnState\n return start\n\n /**\n * Start of HTML (text).\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('htmlText')\n effects.enter('htmlTextData')\n effects.consume(code)\n return open\n }\n\n /**\n * After `<`, at tag name or other stuff.\n *\n * ```markdown\n * > | a c\n * ^\n * > | a c\n * ^\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 33) {\n effects.consume(code)\n return declarationOpen\n }\n if (code === 47) {\n effects.consume(code)\n return tagCloseStart\n }\n if (code === 63) {\n effects.consume(code)\n return instruction\n }\n\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n return tagOpen\n }\n return nok(code)\n }\n\n /**\n * After ` | a c\n * ^\n * > | a c\n * ^\n * > | a &<]]> c\n * ^\n * ```\n *\n * @type {State}\n */\n function declarationOpen(code) {\n if (code === 45) {\n effects.consume(code)\n return commentOpenInside\n }\n if (code === 91) {\n effects.consume(code)\n index = 0\n return cdataOpenInside\n }\n if (asciiAlpha(code)) {\n effects.consume(code)\n return declaration\n }\n return nok(code)\n }\n\n /**\n * In a comment, after ` | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentOpenInside(code) {\n if (code === 45) {\n effects.consume(code)\n return commentEnd\n }\n return nok(code)\n }\n\n /**\n * In comment.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function comment(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 45) {\n effects.consume(code)\n return commentClose\n }\n if (markdownLineEnding(code)) {\n returnState = comment\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return comment\n }\n\n /**\n * In comment, after `-`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentClose(code) {\n if (code === 45) {\n effects.consume(code)\n return commentEnd\n }\n return comment(code)\n }\n\n /**\n * In comment, after `--`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function commentEnd(code) {\n return code === 62\n ? end(code)\n : code === 45\n ? commentClose(code)\n : comment(code)\n }\n\n /**\n * After ` | a &<]]> b\n * ^^^^^^\n * ```\n *\n * @type {State}\n */\n function cdataOpenInside(code) {\n const value = 'CDATA['\n if (code === value.charCodeAt(index++)) {\n effects.consume(code)\n return index === value.length ? cdata : cdataOpenInside\n }\n return nok(code)\n }\n\n /**\n * In CDATA.\n *\n * ```markdown\n * > | a &<]]> b\n * ^^^\n * ```\n *\n * @type {State}\n */\n function cdata(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 93) {\n effects.consume(code)\n return cdataClose\n }\n if (markdownLineEnding(code)) {\n returnState = cdata\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return cdata\n }\n\n /**\n * In CDATA, after `]`, at another `]`.\n *\n * ```markdown\n * > | a &<]]> b\n * ^\n * ```\n *\n * @type {State}\n */\n function cdataClose(code) {\n if (code === 93) {\n effects.consume(code)\n return cdataEnd\n }\n return cdata(code)\n }\n\n /**\n * In CDATA, after `]]`, at `>`.\n *\n * ```markdown\n * > | a &<]]> b\n * ^\n * ```\n *\n * @type {State}\n */\n function cdataEnd(code) {\n if (code === 62) {\n return end(code)\n }\n if (code === 93) {\n effects.consume(code)\n return cdataEnd\n }\n return cdata(code)\n }\n\n /**\n * In declaration.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function declaration(code) {\n if (code === null || code === 62) {\n return end(code)\n }\n if (markdownLineEnding(code)) {\n returnState = declaration\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return declaration\n }\n\n /**\n * In instruction.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function instruction(code) {\n if (code === null) {\n return nok(code)\n }\n if (code === 63) {\n effects.consume(code)\n return instructionClose\n }\n if (markdownLineEnding(code)) {\n returnState = instruction\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return instruction\n }\n\n /**\n * In instruction, after `?`, at `>`.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function instructionClose(code) {\n return code === 62 ? end(code) : instruction(code)\n }\n\n /**\n * After ``, in closing tag, at tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseStart(code) {\n // ASCII alphabetical.\n if (asciiAlpha(code)) {\n effects.consume(code)\n return tagClose\n }\n return nok(code)\n }\n\n /**\n * After ` | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagClose(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n return tagClose\n }\n return tagCloseBetween(code)\n }\n\n /**\n * In closing tag, after tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagCloseBetween(code) {\n if (markdownLineEnding(code)) {\n returnState = tagCloseBetween\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagCloseBetween\n }\n return end(code)\n }\n\n /**\n * After ` | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpen(code) {\n // ASCII alphanumerical and `-`.\n if (code === 45 || asciiAlphanumeric(code)) {\n effects.consume(code)\n return tagOpen\n }\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n return nok(code)\n }\n\n /**\n * In opening tag, after tag name.\n *\n * ```markdown\n * > | a c\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenBetween(code) {\n if (code === 47) {\n effects.consume(code)\n return end\n }\n\n // ASCII alphabetical and `:` and `_`.\n if (code === 58 || code === 95 || asciiAlpha(code)) {\n effects.consume(code)\n return tagOpenAttributeName\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenBetween\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenBetween\n }\n return end(code)\n }\n\n /**\n * In attribute name.\n *\n * ```markdown\n * > | a d\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeName(code) {\n // ASCII alphabetical and `-`, `.`, `:`, and `_`.\n if (\n code === 45 ||\n code === 46 ||\n code === 58 ||\n code === 95 ||\n asciiAlphanumeric(code)\n ) {\n effects.consume(code)\n return tagOpenAttributeName\n }\n return tagOpenAttributeNameAfter(code)\n }\n\n /**\n * After attribute name, before initializer, the end of the tag, or\n * whitespace.\n *\n * ```markdown\n * > | a d\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeNameAfter(code) {\n if (code === 61) {\n effects.consume(code)\n return tagOpenAttributeValueBefore\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeNameAfter\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenAttributeNameAfter\n }\n return tagOpenBetween(code)\n }\n\n /**\n * Before unquoted, double quoted, or single quoted attribute value, allowing\n * whitespace.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueBefore(code) {\n if (\n code === null ||\n code === 60 ||\n code === 61 ||\n code === 62 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 34 || code === 39) {\n effects.consume(code)\n marker = code\n return tagOpenAttributeValueQuoted\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeValueBefore\n return lineEndingBefore(code)\n }\n if (markdownSpace(code)) {\n effects.consume(code)\n return tagOpenAttributeValueBefore\n }\n effects.consume(code)\n return tagOpenAttributeValueUnquoted\n }\n\n /**\n * In double or single quoted attribute value.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueQuoted(code) {\n if (code === marker) {\n effects.consume(code)\n marker = undefined\n return tagOpenAttributeValueQuotedAfter\n }\n if (code === null) {\n return nok(code)\n }\n if (markdownLineEnding(code)) {\n returnState = tagOpenAttributeValueQuoted\n return lineEndingBefore(code)\n }\n effects.consume(code)\n return tagOpenAttributeValueQuoted\n }\n\n /**\n * In unquoted attribute value.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueUnquoted(code) {\n if (\n code === null ||\n code === 34 ||\n code === 39 ||\n code === 60 ||\n code === 61 ||\n code === 96\n ) {\n return nok(code)\n }\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n effects.consume(code)\n return tagOpenAttributeValueUnquoted\n }\n\n /**\n * After double or single quoted attribute value, before whitespace or the end\n * of the tag.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */\n function tagOpenAttributeValueQuotedAfter(code) {\n if (code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {\n return tagOpenBetween(code)\n }\n return nok(code)\n }\n\n /**\n * In certain circumstances of a tag where only an `>` is allowed.\n *\n * ```markdown\n * > | a e\n * ^\n * ```\n *\n * @type {State}\n */\n function end(code) {\n if (code === 62) {\n effects.consume(code)\n effects.exit('htmlTextData')\n effects.exit('htmlText')\n return ok\n }\n return nok(code)\n }\n\n /**\n * At eol.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * > | a \n * ```\n *\n * @type {State}\n */\n function lineEndingBefore(code) {\n effects.exit('htmlTextData')\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return lineEndingAfter\n }\n\n /**\n * After eol, at optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a \n * ^\n * ```\n *\n * @type {State}\n */\n function lineEndingAfter(code) {\n // Always populated by defaults.\n\n return markdownSpace(code)\n ? factorySpace(\n effects,\n lineEndingAfterPrefix,\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n : lineEndingAfterPrefix(code)\n }\n\n /**\n * After eol, after optional whitespace.\n *\n * > 👉 **Note**: we can’t have blank lines in text, so no need to worry about\n * > empty tokens.\n *\n * ```markdown\n * | a \n * ^\n * ```\n *\n * @type {State}\n */\n function lineEndingAfterPrefix(code) {\n effects.enter('htmlTextData')\n return returnState(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Event} Event\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').Token} Token\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factoryDestination} from 'micromark-factory-destination'\nimport {factoryLabel} from 'micromark-factory-label'\nimport {factoryTitle} from 'micromark-factory-title'\nimport {factoryWhitespace} from 'micromark-factory-whitespace'\nimport {markdownLineEndingOrSpace} from 'micromark-util-character'\nimport {push, splice} from 'micromark-util-chunked'\nimport {normalizeIdentifier} from 'micromark-util-normalize-identifier'\nimport {resolveAll} from 'micromark-util-resolve-all'\n/** @type {Construct} */\nexport const labelEnd = {\n name: 'labelEnd',\n tokenize: tokenizeLabelEnd,\n resolveTo: resolveToLabelEnd,\n resolveAll: resolveAllLabelEnd\n}\n\n/** @type {Construct} */\nconst resourceConstruct = {\n tokenize: tokenizeResource\n}\n/** @type {Construct} */\nconst referenceFullConstruct = {\n tokenize: tokenizeReferenceFull\n}\n/** @type {Construct} */\nconst referenceCollapsedConstruct = {\n tokenize: tokenizeReferenceCollapsed\n}\n\n/** @type {Resolver} */\nfunction resolveAllLabelEnd(events) {\n let index = -1\n while (++index < events.length) {\n const token = events[index][1]\n if (\n token.type === 'labelImage' ||\n token.type === 'labelLink' ||\n token.type === 'labelEnd'\n ) {\n // Remove the marker.\n events.splice(index + 1, token.type === 'labelImage' ? 4 : 2)\n token.type = 'data'\n index++\n }\n }\n return events\n}\n\n/** @type {Resolver} */\nfunction resolveToLabelEnd(events, context) {\n let index = events.length\n let offset = 0\n /** @type {Token} */\n let token\n /** @type {number | undefined} */\n let open\n /** @type {number | undefined} */\n let close\n /** @type {Array} */\n let media\n\n // Find an opening.\n while (index--) {\n token = events[index][1]\n if (open) {\n // If we see another link, or inactive link label, we’ve been here before.\n if (\n token.type === 'link' ||\n (token.type === 'labelLink' && token._inactive)\n ) {\n break\n }\n\n // Mark other link openings as inactive, as we can’t have links in\n // links.\n if (events[index][0] === 'enter' && token.type === 'labelLink') {\n token._inactive = true\n }\n } else if (close) {\n if (\n events[index][0] === 'enter' &&\n (token.type === 'labelImage' || token.type === 'labelLink') &&\n !token._balanced\n ) {\n open = index\n if (token.type !== 'labelLink') {\n offset = 2\n break\n }\n }\n } else if (token.type === 'labelEnd') {\n close = index\n }\n }\n const group = {\n type: events[open][1].type === 'labelLink' ? 'link' : 'image',\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n }\n const label = {\n type: 'label',\n start: Object.assign({}, events[open][1].start),\n end: Object.assign({}, events[close][1].end)\n }\n const text = {\n type: 'labelText',\n start: Object.assign({}, events[open + offset + 2][1].end),\n end: Object.assign({}, events[close - 2][1].start)\n }\n media = [\n ['enter', group, context],\n ['enter', label, context]\n ]\n\n // Opening marker.\n media = push(media, events.slice(open + 1, open + offset + 3))\n\n // Text open.\n media = push(media, [['enter', text, context]])\n\n // Always populated by defaults.\n\n // Between.\n media = push(\n media,\n resolveAll(\n context.parser.constructs.insideSpan.null,\n events.slice(open + offset + 4, close - 3),\n context\n )\n )\n\n // Text close, marker close, label close.\n media = push(media, [\n ['exit', text, context],\n events[close - 2],\n events[close - 1],\n ['exit', label, context]\n ])\n\n // Reference, resource, or so.\n media = push(media, events.slice(close + 1))\n\n // Media close.\n media = push(media, [['exit', group, context]])\n splice(events, open, events.length, media)\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelEnd(effects, ok, nok) {\n const self = this\n let index = self.events.length\n /** @type {Token} */\n let labelStart\n /** @type {boolean} */\n let defined\n\n // Find an opening.\n while (index--) {\n if (\n (self.events[index][1].type === 'labelImage' ||\n self.events[index][1].type === 'labelLink') &&\n !self.events[index][1]._balanced\n ) {\n labelStart = self.events[index][1]\n break\n }\n }\n return start\n\n /**\n * Start of label end.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ```\n *\n * @type {State}\n */\n function start(code) {\n // If there is not an okay opening.\n if (!labelStart) {\n return nok(code)\n }\n\n // If the corresponding label (link) start is marked as inactive,\n // it means we’d be wrapping a link, like this:\n //\n // ```markdown\n // > | a [b [c](d) e](f) g.\n // ^\n // ```\n //\n // We can’t have that, so it’s just balanced brackets.\n if (labelStart._inactive) {\n return labelEndNok(code)\n }\n defined = self.parser.defined.includes(\n normalizeIdentifier(\n self.sliceSerialize({\n start: labelStart.end,\n end: self.now()\n })\n )\n )\n effects.enter('labelEnd')\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelEnd')\n return after\n }\n\n /**\n * After `]`.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n // Note: `markdown-rs` also parses GFM footnotes here, which for us is in\n // an extension.\n\n // Resource (`[asd](fgh)`)?\n if (code === 40) {\n return effects.attempt(\n resourceConstruct,\n labelEndOk,\n defined ? labelEndOk : labelEndNok\n )(code)\n }\n\n // Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?\n if (code === 91) {\n return effects.attempt(\n referenceFullConstruct,\n labelEndOk,\n defined ? referenceNotFull : labelEndNok\n )(code)\n }\n\n // Shortcut (`[asd]`) reference?\n return defined ? labelEndOk(code) : labelEndNok(code)\n }\n\n /**\n * After `]`, at `[`, but not at a full reference.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceNotFull(code) {\n return effects.attempt(\n referenceCollapsedConstruct,\n labelEndOk,\n labelEndNok\n )(code)\n }\n\n /**\n * Done, we found something.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * > | [a][b] c\n * ^\n * > | [a][] b\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function labelEndOk(code) {\n // Note: `markdown-rs` does a bunch of stuff here.\n return ok(code)\n }\n\n /**\n * Done, it’s nothing.\n *\n * There was an okay opening, but we didn’t match anything.\n *\n * ```markdown\n * > | [a](b c\n * ^\n * > | [a][b c\n * ^\n * > | [a] b\n * ^\n * ```\n *\n * @type {State}\n */\n function labelEndNok(code) {\n labelStart._balanced = true\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeResource(effects, ok, nok) {\n return resourceStart\n\n /**\n * At a resource.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceStart(code) {\n effects.enter('resource')\n effects.enter('resourceMarker')\n effects.consume(code)\n effects.exit('resourceMarker')\n return resourceBefore\n }\n\n /**\n * In resource, after `(`, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceBefore(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceOpen)(code)\n : resourceOpen(code)\n }\n\n /**\n * In resource, after optional whitespace, at `)` or a destination.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceOpen(code) {\n if (code === 41) {\n return resourceEnd(code)\n }\n return factoryDestination(\n effects,\n resourceDestinationAfter,\n resourceDestinationMissing,\n 'resourceDestination',\n 'resourceDestinationLiteral',\n 'resourceDestinationLiteralMarker',\n 'resourceDestinationRaw',\n 'resourceDestinationString',\n 32\n )(code)\n }\n\n /**\n * In resource, after destination, at optional whitespace.\n *\n * ```markdown\n * > | [a](b) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceDestinationAfter(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceBetween)(code)\n : resourceEnd(code)\n }\n\n /**\n * At invalid destination.\n *\n * ```markdown\n * > | [a](<<) b\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceDestinationMissing(code) {\n return nok(code)\n }\n\n /**\n * In resource, after destination and whitespace, at `(` or title.\n *\n * ```markdown\n * > | [a](b ) c\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceBetween(code) {\n if (code === 34 || code === 39 || code === 40) {\n return factoryTitle(\n effects,\n resourceTitleAfter,\n nok,\n 'resourceTitle',\n 'resourceTitleMarker',\n 'resourceTitleString'\n )(code)\n }\n return resourceEnd(code)\n }\n\n /**\n * In resource, after title, at optional whitespace.\n *\n * ```markdown\n * > | [a](b \"c\") d\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceTitleAfter(code) {\n return markdownLineEndingOrSpace(code)\n ? factoryWhitespace(effects, resourceEnd)(code)\n : resourceEnd(code)\n }\n\n /**\n * In resource, at `)`.\n *\n * ```markdown\n * > | [a](b) d\n * ^\n * ```\n *\n * @type {State}\n */\n function resourceEnd(code) {\n if (code === 41) {\n effects.enter('resourceMarker')\n effects.consume(code)\n effects.exit('resourceMarker')\n effects.exit('resource')\n return ok\n }\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeReferenceFull(effects, ok, nok) {\n const self = this\n return referenceFull\n\n /**\n * In a reference (full), at the `[`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFull(code) {\n return factoryLabel.call(\n self,\n effects,\n referenceFullAfter,\n referenceFullMissing,\n 'reference',\n 'referenceMarker',\n 'referenceString'\n )(code)\n }\n\n /**\n * In a reference (full), after `]`.\n *\n * ```markdown\n * > | [a][b] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFullAfter(code) {\n return self.parser.defined.includes(\n normalizeIdentifier(\n self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)\n )\n )\n ? ok(code)\n : nok(code)\n }\n\n /**\n * In reference (full) that was missing.\n *\n * ```markdown\n * > | [a][b d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceFullMissing(code) {\n return nok(code)\n }\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeReferenceCollapsed(effects, ok, nok) {\n return referenceCollapsedStart\n\n /**\n * In reference (collapsed), at `[`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceCollapsedStart(code) {\n // We only attempt a collapsed label if there’s a `[`.\n\n effects.enter('reference')\n effects.enter('referenceMarker')\n effects.consume(code)\n effects.exit('referenceMarker')\n return referenceCollapsedOpen\n }\n\n /**\n * In reference (collapsed), at `]`.\n *\n * > 👉 **Note**: we only get here if the label is defined.\n *\n * ```markdown\n * > | [a][] d\n * ^\n * ```\n *\n * @type {State}\n */\n function referenceCollapsedOpen(code) {\n if (code === 93) {\n effects.enter('referenceMarker')\n effects.consume(code)\n effects.exit('referenceMarker')\n effects.exit('reference')\n return ok\n }\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {labelEnd} from './label-end.js'\n\n/** @type {Construct} */\nexport const labelStartImage = {\n name: 'labelStartImage',\n tokenize: tokenizeLabelStartImage,\n resolveAll: labelEnd.resolveAll\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelStartImage(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of label (image) start.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('labelImage')\n effects.enter('labelImageMarker')\n effects.consume(code)\n effects.exit('labelImageMarker')\n return open\n }\n\n /**\n * After `!`, at `[`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function open(code) {\n if (code === 91) {\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelImage')\n return after\n }\n return nok(code)\n }\n\n /**\n * After `![`.\n *\n * ```markdown\n * > | a ![b] c\n * ^\n * ```\n *\n * This is needed in because, when GFM footnotes are enabled, images never\n * form when started with a `^`.\n * Instead, links form:\n *\n * ```markdown\n * ![^a](b)\n *\n * ![^a][b]\n *\n * [b]: c\n * ```\n *\n * ```html\n * !^a
\n * !^a
\n * ```\n *\n * @type {State}\n */\n function after(code) {\n // To do: use a new field to do this, this is still needed for\n // `micromark-extension-gfm-footnote`, but the `label-start-link`\n // behavior isn’t.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */\n return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs\n ? nok(code)\n : ok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {labelEnd} from './label-end.js'\n\n/** @type {Construct} */\nexport const labelStartLink = {\n name: 'labelStartLink',\n tokenize: tokenizeLabelStartLink,\n resolveAll: labelEnd.resolveAll\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLabelStartLink(effects, ok, nok) {\n const self = this\n return start\n\n /**\n * Start of label (link) start.\n *\n * ```markdown\n * > | a [b] c\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('labelLink')\n effects.enter('labelMarker')\n effects.consume(code)\n effects.exit('labelMarker')\n effects.exit('labelLink')\n return after\n }\n\n /** @type {State} */\n function after(code) {\n // To do: this isn’t needed in `micromark-extension-gfm-footnote`,\n // remove.\n // Hidden footnotes hook.\n /* c8 ignore next 3 */\n return code === 94 && '_hiddenFootnoteSupport' in self.parser.constructs\n ? nok(code)\n : ok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding} from 'micromark-util-character'\n/** @type {Construct} */\nexport const lineEnding = {\n name: 'lineEnding',\n tokenize: tokenizeLineEnding\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeLineEnding(effects, ok) {\n return start\n\n /** @type {State} */\n function start(code) {\n effects.enter('lineEnding')\n effects.consume(code)\n effects.exit('lineEnding')\n return factorySpace(effects, ok, 'linePrefix')\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').ContainerState} ContainerState\n * @typedef {import('micromark-util-types').Exiter} Exiter\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {asciiDigit, markdownSpace} from 'micromark-util-character'\nimport {blankLine} from './blank-line.js'\nimport {thematicBreak} from './thematic-break.js'\n\n/** @type {Construct} */\nexport const list = {\n name: 'list',\n tokenize: tokenizeListStart,\n continuation: {\n tokenize: tokenizeListContinuation\n },\n exit: tokenizeListEnd\n}\n\n/** @type {Construct} */\nconst listItemPrefixWhitespaceConstruct = {\n tokenize: tokenizeListItemPrefixWhitespace,\n partial: true\n}\n\n/** @type {Construct} */\nconst indentConstruct = {\n tokenize: tokenizeIndent,\n partial: true\n}\n\n// To do: `markdown-rs` parses list items on their own and later stitches them\n// together.\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListStart(effects, ok, nok) {\n const self = this\n const tail = self.events[self.events.length - 1]\n let initialSize =\n tail && tail[1].type === 'linePrefix'\n ? tail[2].sliceSerialize(tail[1], true).length\n : 0\n let size = 0\n return start\n\n /** @type {State} */\n function start(code) {\n const kind =\n self.containerState.type ||\n (code === 42 || code === 43 || code === 45\n ? 'listUnordered'\n : 'listOrdered')\n if (\n kind === 'listUnordered'\n ? !self.containerState.marker || code === self.containerState.marker\n : asciiDigit(code)\n ) {\n if (!self.containerState.type) {\n self.containerState.type = kind\n effects.enter(kind, {\n _container: true\n })\n }\n if (kind === 'listUnordered') {\n effects.enter('listItemPrefix')\n return code === 42 || code === 45\n ? effects.check(thematicBreak, nok, atMarker)(code)\n : atMarker(code)\n }\n if (!self.interrupt || code === 49) {\n effects.enter('listItemPrefix')\n effects.enter('listItemValue')\n return inside(code)\n }\n }\n return nok(code)\n }\n\n /** @type {State} */\n function inside(code) {\n if (asciiDigit(code) && ++size < 10) {\n effects.consume(code)\n return inside\n }\n if (\n (!self.interrupt || size < 2) &&\n (self.containerState.marker\n ? code === self.containerState.marker\n : code === 41 || code === 46)\n ) {\n effects.exit('listItemValue')\n return atMarker(code)\n }\n return nok(code)\n }\n\n /**\n * @type {State}\n **/\n function atMarker(code) {\n effects.enter('listItemMarker')\n effects.consume(code)\n effects.exit('listItemMarker')\n self.containerState.marker = self.containerState.marker || code\n return effects.check(\n blankLine,\n // Can’t be empty when interrupting.\n self.interrupt ? nok : onBlank,\n effects.attempt(\n listItemPrefixWhitespaceConstruct,\n endOfPrefix,\n otherPrefix\n )\n )\n }\n\n /** @type {State} */\n function onBlank(code) {\n self.containerState.initialBlankLine = true\n initialSize++\n return endOfPrefix(code)\n }\n\n /** @type {State} */\n function otherPrefix(code) {\n if (markdownSpace(code)) {\n effects.enter('listItemPrefixWhitespace')\n effects.consume(code)\n effects.exit('listItemPrefixWhitespace')\n return endOfPrefix\n }\n return nok(code)\n }\n\n /** @type {State} */\n function endOfPrefix(code) {\n self.containerState.size =\n initialSize +\n self.sliceSerialize(effects.exit('listItemPrefix'), true).length\n return ok(code)\n }\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListContinuation(effects, ok, nok) {\n const self = this\n self.containerState._closeFlow = undefined\n return effects.check(blankLine, onBlank, notBlank)\n\n /** @type {State} */\n function onBlank(code) {\n self.containerState.furtherBlankLines =\n self.containerState.furtherBlankLines ||\n self.containerState.initialBlankLine\n\n // We have a blank line.\n // Still, try to consume at most the items size.\n return factorySpace(\n effects,\n ok,\n 'listItemIndent',\n self.containerState.size + 1\n )(code)\n }\n\n /** @type {State} */\n function notBlank(code) {\n if (self.containerState.furtherBlankLines || !markdownSpace(code)) {\n self.containerState.furtherBlankLines = undefined\n self.containerState.initialBlankLine = undefined\n return notInCurrentItem(code)\n }\n self.containerState.furtherBlankLines = undefined\n self.containerState.initialBlankLine = undefined\n return effects.attempt(indentConstruct, ok, notInCurrentItem)(code)\n }\n\n /** @type {State} */\n function notInCurrentItem(code) {\n // While we do continue, we signal that the flow should be closed.\n self.containerState._closeFlow = true\n // As we’re closing flow, we’re no longer interrupting.\n self.interrupt = undefined\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n effects.attempt(list, ok, nok),\n 'linePrefix',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4\n )(code)\n }\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeIndent(effects, ok, nok) {\n const self = this\n return factorySpace(\n effects,\n afterPrefix,\n 'listItemIndent',\n self.containerState.size + 1\n )\n\n /** @type {State} */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return tail &&\n tail[1].type === 'listItemIndent' &&\n tail[2].sliceSerialize(tail[1], true).length === self.containerState.size\n ? ok(code)\n : nok(code)\n }\n}\n\n/**\n * @type {Exiter}\n * @this {TokenizeContext}\n */\nfunction tokenizeListEnd(effects) {\n effects.exit(this.containerState.type)\n}\n\n/**\n * @type {Tokenizer}\n * @this {TokenizeContext}\n */\nfunction tokenizeListItemPrefixWhitespace(effects, ok, nok) {\n const self = this\n\n // Always populated by defaults.\n\n return factorySpace(\n effects,\n afterPrefix,\n 'listItemPrefixWhitespace',\n self.parser.constructs.disable.null.includes('codeIndented')\n ? undefined\n : 4 + 1\n )\n\n /** @type {State} */\n function afterPrefix(code) {\n const tail = self.events[self.events.length - 1]\n return !markdownSpace(code) &&\n tail &&\n tail[1].type === 'listItemPrefixWhitespace'\n ? ok(code)\n : nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const thematicBreak = {\n name: 'thematicBreak',\n tokenize: tokenizeThematicBreak\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeThematicBreak(effects, ok, nok) {\n let size = 0\n /** @type {NonNullable} */\n let marker\n return start\n\n /**\n * Start of thematic break.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n effects.enter('thematicBreak')\n // To do: parse indent like `markdown-rs`.\n return before(code)\n }\n\n /**\n * After optional whitespace, at marker.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n marker = code\n return atBreak(code)\n }\n\n /**\n * After something, before something else.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function atBreak(code) {\n if (code === marker) {\n effects.enter('thematicBreakSequence')\n return sequence(code)\n }\n if (size >= 3 && (code === null || markdownLineEnding(code))) {\n effects.exit('thematicBreak')\n return ok(code)\n }\n return nok(code)\n }\n\n /**\n * In sequence.\n *\n * ```markdown\n * > | ***\n * ^\n * ```\n *\n * @type {State}\n */\n function sequence(code) {\n if (code === marker) {\n effects.consume(code)\n size++\n return sequence\n }\n effects.exit('thematicBreakSequence')\n return markdownSpace(code)\n ? factorySpace(effects, atBreak, 'whitespace')(code)\n : atBreak(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Construct} Construct\n * @typedef {import('micromark-util-types').Resolver} Resolver\n * @typedef {import('micromark-util-types').State} State\n * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext\n * @typedef {import('micromark-util-types').Tokenizer} Tokenizer\n */\n\nimport {factorySpace} from 'micromark-factory-space'\nimport {markdownLineEnding, markdownSpace} from 'micromark-util-character'\n/** @type {Construct} */\nexport const setextUnderline = {\n name: 'setextUnderline',\n tokenize: tokenizeSetextUnderline,\n resolveTo: resolveToSetextUnderline\n}\n\n/** @type {Resolver} */\nfunction resolveToSetextUnderline(events, context) {\n // To do: resolve like `markdown-rs`.\n let index = events.length\n /** @type {number | undefined} */\n let content\n /** @type {number | undefined} */\n let text\n /** @type {number | undefined} */\n let definition\n\n // Find the opening of the content.\n // It’ll always exist: we don’t tokenize if it isn’t there.\n while (index--) {\n if (events[index][0] === 'enter') {\n if (events[index][1].type === 'content') {\n content = index\n break\n }\n if (events[index][1].type === 'paragraph') {\n text = index\n }\n }\n // Exit\n else {\n if (events[index][1].type === 'content') {\n // Remove the content end (if needed we’ll add it later)\n events.splice(index, 1)\n }\n if (!definition && events[index][1].type === 'definition') {\n definition = index\n }\n }\n }\n const heading = {\n type: 'setextHeading',\n start: Object.assign({}, events[text][1].start),\n end: Object.assign({}, events[events.length - 1][1].end)\n }\n\n // Change the paragraph to setext heading text.\n events[text][1].type = 'setextHeadingText'\n\n // If we have definitions in the content, we’ll keep on having content,\n // but we need move it.\n if (definition) {\n events.splice(text, 0, ['enter', heading, context])\n events.splice(definition + 1, 0, ['exit', events[content][1], context])\n events[content][1].end = Object.assign({}, events[definition][1].end)\n } else {\n events[content][1] = heading\n }\n\n // Add the heading exit at the end.\n events.push(['exit', heading, context])\n return events\n}\n\n/**\n * @this {TokenizeContext}\n * @type {Tokenizer}\n */\nfunction tokenizeSetextUnderline(effects, ok, nok) {\n const self = this\n /** @type {NonNullable} */\n let marker\n return start\n\n /**\n * At start of heading (setext) underline.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function start(code) {\n let index = self.events.length\n /** @type {boolean | undefined} */\n let paragraph\n // Find an opening.\n while (index--) {\n // Skip enter/exit of line ending, line prefix, and content.\n // We can now either have a definition or a paragraph.\n if (\n self.events[index][1].type !== 'lineEnding' &&\n self.events[index][1].type !== 'linePrefix' &&\n self.events[index][1].type !== 'content'\n ) {\n paragraph = self.events[index][1].type === 'paragraph'\n break\n }\n }\n\n // To do: handle lazy/pierce like `markdown-rs`.\n // To do: parse indent like `markdown-rs`.\n if (!self.parser.lazy[self.now().line] && (self.interrupt || paragraph)) {\n effects.enter('setextHeadingLine')\n marker = code\n return before(code)\n }\n return nok(code)\n }\n\n /**\n * After optional whitespace, at `-` or `=`.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function before(code) {\n effects.enter('setextHeadingLineSequence')\n return inside(code)\n }\n\n /**\n * In sequence.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function inside(code) {\n if (code === marker) {\n effects.consume(code)\n return inside\n }\n effects.exit('setextHeadingLineSequence')\n return markdownSpace(code)\n ? factorySpace(effects, after, 'lineSuffix')(code)\n : after(code)\n }\n\n /**\n * After sequence, after optional whitespace.\n *\n * ```markdown\n * | aa\n * > | ==\n * ^\n * ```\n *\n * @type {State}\n */\n function after(code) {\n if (code === null || markdownLineEnding(code)) {\n effects.exit('setextHeadingLine')\n return ok(code)\n }\n return nok(code)\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Chunk} Chunk\n * @typedef {import('micromark-util-types').Code} Code\n * @typedef {import('micromark-util-types').Encoding} Encoding\n * @typedef {import('micromark-util-types').Value} Value\n */\n\n/**\n * @callback Preprocessor\n * @param {Value} value\n * @param {Encoding | null | undefined} [encoding]\n * @param {boolean | null | undefined} [end=false]\n * @returns {Array}\n */\n\nconst search = /[\\0\\t\\n\\r]/g\n\n/**\n * @returns {Preprocessor}\n */\nexport function preprocess() {\n let column = 1\n let buffer = ''\n /** @type {boolean | undefined} */\n let start = true\n /** @type {boolean | undefined} */\n let atCarriageReturn\n return preprocessor\n\n /** @type {Preprocessor} */\n function preprocessor(value, encoding, end) {\n /** @type {Array} */\n const chunks = []\n /** @type {RegExpMatchArray | null} */\n let match\n /** @type {number} */\n let next\n /** @type {number} */\n let startPosition\n /** @type {number} */\n let endPosition\n /** @type {Code} */\n let code\n\n // @ts-expect-error `Buffer` does allow an encoding.\n value = buffer + value.toString(encoding)\n startPosition = 0\n buffer = ''\n if (start) {\n // To do: `markdown-rs` actually parses BOMs (byte order mark).\n if (value.charCodeAt(0) === 65279) {\n startPosition++\n }\n start = undefined\n }\n while (startPosition < value.length) {\n search.lastIndex = startPosition\n match = search.exec(value)\n endPosition =\n match && match.index !== undefined ? match.index : value.length\n code = value.charCodeAt(endPosition)\n if (!match) {\n buffer = value.slice(startPosition)\n break\n }\n if (code === 10 && startPosition === endPosition && atCarriageReturn) {\n chunks.push(-3)\n atCarriageReturn = undefined\n } else {\n if (atCarriageReturn) {\n chunks.push(-5)\n atCarriageReturn = undefined\n }\n if (startPosition < endPosition) {\n chunks.push(value.slice(startPosition, endPosition))\n column += endPosition - startPosition\n }\n switch (code) {\n case 0: {\n chunks.push(65533)\n column++\n break\n }\n case 9: {\n next = Math.ceil(column / 4) * 4\n chunks.push(-2)\n while (column++ < next) chunks.push(-1)\n break\n }\n case 10: {\n chunks.push(-4)\n column = 1\n break\n }\n default: {\n atCarriageReturn = true\n column = 1\n }\n }\n }\n startPosition = endPosition + 1\n }\n if (end) {\n if (atCarriageReturn) chunks.push(-5)\n if (buffer) chunks.push(buffer)\n chunks.push(null)\n }\n return chunks\n }\n}\n","/**\n * @typedef {import('micromark-util-types').Event} Event\n */\n\nimport {subtokenize} from 'micromark-util-subtokenize'\n\n/**\n * @param {Array} events\n * @returns {Array}\n */\nexport function postprocess(events) {\n while (!subtokenize(events)) {\n // Empty\n }\n return events\n}\n","/**\n * Turn the number (in string form as either hexa- or plain decimal) coming from\n * a numeric character reference into a character.\n *\n * Sort of like `String.fromCharCode(Number.parseInt(value, base))`, but makes\n * non-characters and control characters safe.\n *\n * @param {string} value\n * Value to decode.\n * @param {number} base\n * Numeric base.\n * @returns {string}\n * Character.\n */\nexport function decodeNumericCharacterReference(value, base) {\n const code = Number.parseInt(value, base)\n if (\n // C0 except for HT, LF, FF, CR, space.\n code < 9 ||\n code === 11 ||\n (code > 13 && code < 32) ||\n // Control character (DEL) of C0, and C1 controls.\n (code > 126 && code < 160) ||\n // Lone high surrogates and low surrogates.\n (code > 55295 && code < 57344) ||\n // Noncharacters.\n (code > 64975 && code < 65008) /* eslint-disable no-bitwise */ ||\n (code & 65535) === 65535 ||\n (code & 65535) === 65534 /* eslint-enable no-bitwise */ ||\n // Out of range\n code > 1114111\n ) {\n return '\\uFFFD'\n }\n return String.fromCharCode(code)\n}\n","import {decodeNamedCharacterReference} from 'decode-named-character-reference'\nimport {decodeNumericCharacterReference} from 'micromark-util-decode-numeric-character-reference'\nconst characterEscapeOrReference =\n /\\\\([!-/:-@[-`{-~])|&(#(?:\\d{1,7}|x[\\da-f]{1,6})|[\\da-z]{1,31});/gi\n\n/**\n * Decode markdown strings (which occur in places such as fenced code info\n * strings, destinations, labels, and titles).\n *\n * The “string” content type allows character escapes and -references.\n * This decodes those.\n *\n * @param {string} value\n * Value to decode.\n * @returns {string}\n * Decoded value.\n */\nexport function decodeString(value) {\n return value.replace(characterEscapeOrReference, decode)\n}\n\n/**\n * @param {string} $0\n * @param {string} $1\n * @param {string} $2\n * @returns {string}\n */\nfunction decode($0, $1, $2) {\n if ($1) {\n // Escape.\n return $1\n }\n\n // Reference.\n const head = $2.charCodeAt(0)\n if (head === 35) {\n const head = $2.charCodeAt(1)\n const hex = head === 120 || head === 88\n return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10)\n }\n return decodeNamedCharacterReference($2) || $0\n}\n","/**\n * @typedef {import('unist').Node} Node\n * @typedef {import('unist').Point} Point\n * @typedef {import('unist').Position} Position\n */\n\n/**\n * @typedef NodeLike\n * @property {string} type\n * @property {PositionLike | null | undefined} [position]\n *\n * @typedef PositionLike\n * @property {PointLike | null | undefined} [start]\n * @property {PointLike | null | undefined} [end]\n *\n * @typedef PointLike\n * @property {number | null | undefined} [line]\n * @property {number | null | undefined} [column]\n * @property {number | null | undefined} [offset]\n */\n\n/**\n * Serialize the positional info of a point, position (start and end points),\n * or node.\n *\n * @param {Node | NodeLike | Position | PositionLike | Point | PointLike | null | undefined} [value]\n * Node, position, or point.\n * @returns {string}\n * Pretty printed positional info of a node (`string`).\n *\n * In the format of a range `ls:cs-le:ce` (when given `node` or `position`)\n * or a point `l:c` (when given `point`), where `l` stands for line, `c` for\n * column, `s` for `start`, and `e` for end.\n * An empty string (`''`) is returned if the given value is neither `node`,\n * `position`, nor `point`.\n */\nexport function stringifyPosition(value) {\n // Nothing.\n if (!value || typeof value !== 'object') {\n return ''\n }\n\n // Node.\n if ('position' in value || 'type' in value) {\n return position(value.position)\n }\n\n // Position.\n if ('start' in value || 'end' in value) {\n return position(value)\n }\n\n // Point.\n if ('line' in value || 'column' in value) {\n return point(value)\n }\n\n // ?\n return ''\n}\n\n/**\n * @param {Point | PointLike | null | undefined} point\n * @returns {string}\n */\nfunction point(point) {\n return index(point && point.line) + ':' + index(point && point.column)\n}\n\n/**\n * @param {Position | PositionLike | null | undefined} pos\n * @returns {string}\n */\nfunction position(pos) {\n return point(pos && pos.start) + '-' + point(pos && pos.end)\n}\n\n/**\n * @param {number | null | undefined} value\n * @returns {number}\n */\nfunction index(value) {\n return value && typeof value === 'number' ? value : 1\n}\n"],"names":["$parcel$export","e","n","v","s","Object","defineProperty","get","set","enumerable","configurable","parcelRequire","$parcel$global","globalThis","parcelRegister","register","module","exports","$b2f6560a6becbb8b$export$db3b6bfb95261072","$b2f6560a6becbb8b$export$407448d2b89b1813","$4jcZX","$32C5u","$eJNXH","$b2f6560a6becbb8b$var$createTspan","textElement","lineIndex","lineHeight","append","attr","parentNode","text","_a","testElement","testSpan","$b2f6560a6becbb8b$var$updateTextContentAndStyles","content","type","textDimension","node","getBoundingClientRect","remove","tspan","wrappedLine","forEach","word","index","innerTspan","el","style","isTitle","classes","useHtmlLabels","isNode","width","addSvgBackground","l","info","$b2f6560a6becbb8b$var$createFormattedText","g","structuredText","addBackground","labelGroup","bkg","insert","line","checkWidth","line2","$b2f6560a6becbb8b$var$computeWidthOfText","textLength","getComputedTextLength","preparedLine","$b2f6560a6becbb8b$var$splitLineToFitWidth","checkFit","some","includes","Error","$b2f6560a6becbb8b$var$splitLineToFitWidthRecursion","words","lines","newLine","length","push","joiner","shift","nextWord","lineWithNextWord","unshift","rest","$b2f6560a6becbb8b$var$splitWordToFitWidthRecursion","usedChars","remainingChars","join","nextChar","newWord","Intl","Segmenter","segment","map","bbox","getBBox","height","$b2f6560a6becbb8b$var$markdownToLines","markdown","preprocessedMarkdown","$b2f6560a6becbb8b$var$preprocessMarkdown","withoutMultipleNewlines","replace","dedent","children","fromMarkdown","currentLine","treeNode","contentNode","processNode","parentType","textLines","value","split","textLine","htmlText","$b2f6560a6becbb8b$var$markdownToHTML","output","$b2f6560a6becbb8b$var$addHtmlSpan","element","styleFn","fo","div","label","labelClass","html","labelStyle","dom","M","$236f1a66ed1dd212$export$d744d789c09bfde6","$apdnY","$2JaIP","$hFzKD","$6Jw5n","$34rZa","$1lC1m","$9HErY","$gxs7E","$f9WaX","$236f1a66ed1dd212$var$own","hasOwnProperty","encoding","options","undefined","$236f1a66ed1dd212$var$compiler","config","transforms","canContainEols","enter","autolink","opener","link","autolinkProtocol","onenterdata","autolinkEmail","atxHeading","heading","blockQuote","characterEscape","characterReference","codeFenced","codeFlow","codeFencedFenceInfo","buffer","codeFencedFenceMeta","codeIndented","codeText","codeTextData","data","codeFlowValue","definition","identifier","title","url","definitionDestinationString","definitionLabelString","definitionTitleString","emphasis","hardBreakEscape","hardBreak","hardBreakTrailing","htmlFlow","htmlFlowData","htmlTextData","image","alt","listItem","token","spread","_spread","checked","listItemValue","ancestor","stack","start","Number","parseInt","sliceSerialize","listOrdered","list","listUnordered","paragraph","reference","referenceString","resourceDestinationString","resourceTitleString","setextHeading","strong","thematicBreak","exit","closer","atxHeadingSequence","depth","onexitdata","call","characterEscapeValue","characterReferenceMarkerHexadecimal","onexitcharacterreferencemarker","characterReferenceMarkerNumeric","characterReferenceValue","decodeNumericCharacterReference","decodeNamedCharacterReference","tail","pop","position","end","$236f1a66ed1dd212$var$point","resume","codeFencedFence","lang","meta","normalizeIdentifier","toLowerCase","onexithardbreak","referenceType","getData","fragment","labelText","string","decodeString","lineEnding","context","resource","setextHeadingLineSequence","charCodeAt","setextHeadingText","$236f1a66ed1dd212$var$configure","combined","extensions","Array","isArray","$236f1a66ed1dd212$var$extension","extension","key","right","assign","mdastExtensions","events","tree","tokenStack","setData","listStack","prepareList","firstBlankLineIndex","atMarker","containerBalance","listSpread","event","tailIndex","tailEvent","splice","handler","$236f1a66ed1dd212$var$defaultOnError","column","offset","create","and","errorHandler","parent","onExitError","open","stringifyPosition","toString","ordered","postprocess","parse","document","write","preprocess","d","left","$79369f5237095e86$export$f84e8e69fd4488a5","$79369f5237095e86$var$emptyOptions","settings","$79369f5237095e86$var$one","includeImageAlt","includeHtml","$79369f5237095e86$var$all","values","result","$1fc819ea965a3f81$export$98e6a39c04603d36","$1BzS9","$1YYHd","$4vIRz","$jugKX","$2JBld","$6oDSH","$8tRNW","parser","defined","lazy","constructs","combineExtensions","flow","initial","from","createTokenizer","$12b512a5203a7cce$export$86a865d89ef3c690","$Ux2lp","$12b512a5203a7cce$var$hasOwnProperty","all","$12b512a5203a7cce$var$syntaxExtension","hook","code","maybe","$12b512a5203a7cce$var$constructs","existing","before","add","$0a9edb7ff3ef159f$export$869882364835d202","items","parameters","chunkStart","slice","$0a9edb7ff3ef159f$export$4cbf152802aa238","$171a79fbb1402b9a$export$a7db06668cad9adb","$8GWoH","$5Lprs","tokenize","effects","previous","contentStart","attempt","contentInitial","consume","factorySpace","lineStart","contentType","next","markdownLineEnding","$653f8b80711d97d0$export$ae105c1eb063a0a2","ok","max","limit","POSITIVE_INFINITY","size","markdownSpace","prefix","$432513c48b12fdfc$export$d65d6b62c24d5436","$432513c48b12fdfc$export$75c76db11865a9f4","$432513c48b12fdfc$export$4397998b34fe597d","$432513c48b12fdfc$export$67dbf494fc8394df","$432513c48b12fdfc$export$ca8b5b1a6c320e6e","$432513c48b12fdfc$export$eca2752363989806","$432513c48b12fdfc$export$35794a7d1db99380","$432513c48b12fdfc$export$34a1dff1c0936953","$432513c48b12fdfc$export$a30284361b3814b7","$432513c48b12fdfc$export$2c6cf65c1127992a","$432513c48b12fdfc$export$aa04114dd888a7a0","$432513c48b12fdfc$export$a0ff789c034ffdf4","$1Vlly","$432513c48b12fdfc$var$regexCheck","unicodePunctuationRegex","regex","test","String","fromCharCode","$166bac51b6e2a8af$export$85b5101f24802e8c","$348cdb85d1aeadd1$export$5a7bfc01df82fcd1","childFlow","childToken","lineStartOffset","self","continued","item","containerState","continuation","documentContinue","checkNewContainers","_closeFlow","point","closeFlow","indexBeforeExits","indexBeforeFlow","exitContainers","documentContinued","currentConstruct","concrete","flowStart","interrupt","Boolean","_gfmTableDynamicInterruptHack","check","$348cdb85d1aeadd1$var$containerConstruct","thereIsANewContainer","thereIsNoNewContainer","now","containerContinue","_tokenizer","flowContinue","writeToChild","eof","stream","sliceStream","defineSkip","seen","entry","nok","disable","null","$e2fdd11cbe455c51$export$ccc7b0636abaffc3","$eRfeJ","$j7la5","blankLine","flowInitial","afterConstruct","$ad11d598ddddec2e$export$d50d28ce3ab2a612","after","partial","$deaf16079a1807fb$export$a7db06668cad9adb","$iNbDn","chunkInside","contentEnd","$deaf16079a1807fb$var$continuationConstruct","contentContinue","resolve","subtokenize","prefixed","$dae5d3de466990fa$export$12949d1dd00fddf4","otherIndex","otherEvent","subevents","more","jumps","_isInFirstContentOfListItem","$dae5d3de466990fa$var$subcontent","eventIndex","startPosition","startPositions","tokenizer","childEvents","gaps","current","adjust","breaks","_gfmTasklistFirstContentOfListItem","_container","$1fdcbfee479a4092$export$50397835cbfdbc24","$1fdcbfee479a4092$export$22b082955e083ec3","$1fdcbfee479a4092$export$6f093cfa640b7166","resolveAll","$1fdcbfee479a4092$var$createResolver","$1fdcbfee479a4092$var$initializeFactory","field","notText","atBreak","$1fdcbfee479a4092$var$resolveAllLineSuffixes","extraResolver","tabs","chunks","bufferIndex","chunk","_index","_bufferIndex","$4a83eb91ec5d6b95$export$ae34f10ee4b29837","$8f02Z","initialize","columnStart","resolveAllConstructs","accountForPotentialSkip","fields","constructFactory","construct","addResult","onsuccessfulcheck","expandTabs","$4a83eb91ec5d6b95$var$serializeChunks","atTab","main","chunkIndex","go","state","$4a83eb91ec5d6b95$var$sliceChunks","view","startIndex","startBufferIndex","endIndex","endBufferIndex","head","_","restore","onreturn","returnState","bogusState","listOfConstructs","constructIndex","handleListOfConstructs","def","handleConstruct","store","startPoint","startPrevious","startCurrentConstruct","startEventsIndex","startStack","name","resolveTo","$5fffc2d235733349$export$3ff61ec196ff408b","called","$62cac499df3c25a0$export$5a7bfc01df82fcd1","$62cac499df3c25a0$export$5a2181fb44b58173","$62cac499df3c25a0$export$cf8bead395eff824","$62cac499df3c25a0$export$ccc7b0636abaffc3","$62cac499df3c25a0$export$22b082955e083ec3","$62cac499df3c25a0$export$6f093cfa640b7166","$62cac499df3c25a0$export$d44f260a3f9b69f5","$62cac499df3c25a0$export$b9c0b60d74426aea","$62cac499df3c25a0$export$e20fbacbb41798b","$eIIGZ","$807ia","$acnAX","$7ITXU","$hIELq","$2qoAc","$eHPOy","$1XgoW","$ix9Ua","$1d2Fb","$e7bPJ","$aMDJK","$fAOFU","$2yvw6","$l3YSx","$ifBjz","$9shNq","$7twjg","$cfK44","$eBJ0T","headingAtx","setextUnderline","labelStartImage","attention","labelStartLink","labelEnd","resolver","$ab77d7b684ccca7a$export$45b92471da762af7","$i5TSH","marker","attentionMarkers","classifyCharacter","inside","close","_open","_close","group","openingSequence","closingSequence","use","nextEvents","$ab77d7b684ccca7a$var$movePoint","insideSpan","$d2c40a75b3c1060a$export$e3902bc0d835cad0","markdownLineEndingOrSpace","unicodeWhitespace","unicodePunctuation","$5d33fad6991aa1ad$export$17ddf85e4c916ad6","asciiAlpha","schemeOrEmailAtext","emailAtext","asciiAlphanumeric","schemeInsideOrEmailAtext","urlInside","asciiControl","emailAtSignOrDot","asciiAtext","emailLabel","emailValue","$76cd53d8c5b717c4$export$200dcd0a5903c968","contBefore","$59f80dfce403f5de$export$2005478564e78d96","asciiPunctuation","$ce662a9402db28e3$export$e31905600aaf3d8e","numeric","asciiHexDigit","asciiDigit","$c0a58af43f6129ac$export$289b6a6320f709b4","$kmPXE","$c0a58af43f6129ac$var$own","characterEntities","$ed3e2b2347fac27f$export$ec810d1aafce79a7","AElig","AMP","Aacute","Abreve","Acirc","Acy","Afr","Agrave","Alpha","Amacr","And","Aogon","Aopf","ApplyFunction","Aring","Ascr","Assign","Atilde","Auml","Backslash","Barv","Barwed","Bcy","Because","Bernoullis","Beta","Bfr","Bopf","Breve","Bscr","Bumpeq","CHcy","COPY","Cacute","Cap","CapitalDifferentialD","Cayleys","Ccaron","Ccedil","Ccirc","Cconint","Cdot","Cedilla","CenterDot","Cfr","Chi","CircleDot","CircleMinus","CirclePlus","CircleTimes","ClockwiseContourIntegral","CloseCurlyDoubleQuote","CloseCurlyQuote","Colon","Colone","Congruent","Conint","ContourIntegral","Copf","Coproduct","CounterClockwiseContourIntegral","Cross","Cscr","Cup","CupCap","DD","DDotrahd","DJcy","DScy","DZcy","Dagger","Darr","Dashv","Dcaron","Dcy","Del","Delta","Dfr","DiacriticalAcute","DiacriticalDot","DiacriticalDoubleAcute","DiacriticalGrave","DiacriticalTilde","Diamond","DifferentialD","Dopf","Dot","DotDot","DotEqual","DoubleContourIntegral","DoubleDot","DoubleDownArrow","DoubleLeftArrow","DoubleLeftRightArrow","DoubleLeftTee","DoubleLongLeftArrow","DoubleLongLeftRightArrow","DoubleLongRightArrow","DoubleRightArrow","DoubleRightTee","DoubleUpArrow","DoubleUpDownArrow","DoubleVerticalBar","DownArrow","DownArrowBar","DownArrowUpArrow","DownBreve","DownLeftRightVector","DownLeftTeeVector","DownLeftVector","DownLeftVectorBar","DownRightTeeVector","DownRightVector","DownRightVectorBar","DownTee","DownTeeArrow","Downarrow","Dscr","Dstrok","ENG","ETH","Eacute","Ecaron","Ecirc","Ecy","Edot","Efr","Egrave","Element","Emacr","EmptySmallSquare","EmptyVerySmallSquare","Eogon","Eopf","Epsilon","Equal","EqualTilde","Equilibrium","Escr","Esim","Eta","Euml","Exists","ExponentialE","Fcy","Ffr","FilledSmallSquare","FilledVerySmallSquare","Fopf","ForAll","Fouriertrf","Fscr","GJcy","GT","Gamma","Gammad","Gbreve","Gcedil","Gcirc","Gcy","Gdot","Gfr","Gg","Gopf","GreaterEqual","GreaterEqualLess","GreaterFullEqual","GreaterGreater","GreaterLess","GreaterSlantEqual","GreaterTilde","Gscr","Gt","HARDcy","Hacek","Hat","Hcirc","Hfr","HilbertSpace","Hopf","HorizontalLine","Hscr","Hstrok","HumpDownHump","HumpEqual","IEcy","IJlig","IOcy","Iacute","Icirc","Icy","Idot","Ifr","Igrave","Im","Imacr","ImaginaryI","Implies","Int","Integral","Intersection","InvisibleComma","InvisibleTimes","Iogon","Iopf","Iota","Iscr","Itilde","Iukcy","Iuml","Jcirc","Jcy","Jfr","Jopf","Jscr","Jsercy","Jukcy","KHcy","KJcy","Kappa","Kcedil","Kcy","Kfr","Kopf","Kscr","LJcy","LT","Lacute","Lambda","Lang","Laplacetrf","Larr","Lcaron","Lcedil","Lcy","LeftAngleBracket","LeftArrow","LeftArrowBar","LeftArrowRightArrow","LeftCeiling","LeftDoubleBracket","LeftDownTeeVector","LeftDownVector","LeftDownVectorBar","LeftFloor","LeftRightArrow","LeftRightVector","LeftTee","LeftTeeArrow","LeftTeeVector","LeftTriangle","LeftTriangleBar","LeftTriangleEqual","LeftUpDownVector","LeftUpTeeVector","LeftUpVector","LeftUpVectorBar","LeftVector","LeftVectorBar","Leftarrow","Leftrightarrow","LessEqualGreater","LessFullEqual","LessGreater","LessLess","LessSlantEqual","LessTilde","Lfr","Ll","Lleftarrow","Lmidot","LongLeftArrow","LongLeftRightArrow","LongRightArrow","Longleftarrow","Longleftrightarrow","Longrightarrow","Lopf","LowerLeftArrow","LowerRightArrow","Lscr","Lsh","Lstrok","Lt","Map","Mcy","MediumSpace","Mellintrf","Mfr","MinusPlus","Mopf","Mscr","Mu","NJcy","Nacute","Ncaron","Ncedil","Ncy","NegativeMediumSpace","NegativeThickSpace","NegativeThinSpace","NegativeVeryThinSpace","NestedGreaterGreater","NestedLessLess","NewLine","Nfr","NoBreak","NonBreakingSpace","Nopf","Not","NotCongruent","NotCupCap","NotDoubleVerticalBar","NotElement","NotEqual","NotEqualTilde","NotExists","NotGreater","NotGreaterEqual","NotGreaterFullEqual","NotGreaterGreater","NotGreaterLess","NotGreaterSlantEqual","NotGreaterTilde","NotHumpDownHump","NotHumpEqual","NotLeftTriangle","NotLeftTriangleBar","NotLeftTriangleEqual","NotLess","NotLessEqual","NotLessGreater","NotLessLess","NotLessSlantEqual","NotLessTilde","NotNestedGreaterGreater","NotNestedLessLess","NotPrecedes","NotPrecedesEqual","NotPrecedesSlantEqual","NotReverseElement","NotRightTriangle","NotRightTriangleBar","NotRightTriangleEqual","NotSquareSubset","NotSquareSubsetEqual","NotSquareSuperset","NotSquareSupersetEqual","NotSubset","NotSubsetEqual","NotSucceeds","NotSucceedsEqual","NotSucceedsSlantEqual","NotSucceedsTilde","NotSuperset","NotSupersetEqual","NotTilde","NotTildeEqual","NotTildeFullEqual","NotTildeTilde","NotVerticalBar","Nscr","Ntilde","Nu","OElig","Oacute","Ocirc","Ocy","Odblac","Ofr","Ograve","Omacr","Omega","Omicron","Oopf","OpenCurlyDoubleQuote","OpenCurlyQuote","Or","Oscr","Oslash","Otilde","Otimes","Ouml","OverBar","OverBrace","OverBracket","OverParenthesis","PartialD","Pcy","Pfr","Phi","Pi","PlusMinus","Poincareplane","Popf","Pr","Precedes","PrecedesEqual","PrecedesSlantEqual","PrecedesTilde","Prime","Product","Proportion","Proportional","Pscr","Psi","QUOT","Qfr","Qopf","Qscr","RBarr","REG","Racute","Rang","Rarr","Rarrtl","Rcaron","Rcedil","Rcy","Re","ReverseElement","ReverseEquilibrium","ReverseUpEquilibrium","Rfr","Rho","RightAngleBracket","RightArrow","RightArrowBar","RightArrowLeftArrow","RightCeiling","RightDoubleBracket","RightDownTeeVector","RightDownVector","RightDownVectorBar","RightFloor","RightTee","RightTeeArrow","RightTeeVector","RightTriangle","RightTriangleBar","RightTriangleEqual","RightUpDownVector","RightUpTeeVector","RightUpVector","RightUpVectorBar","RightVector","RightVectorBar","Rightarrow","Ropf","RoundImplies","Rrightarrow","Rscr","Rsh","RuleDelayed","SHCHcy","SHcy","SOFTcy","Sacute","Sc","Scaron","Scedil","Scirc","Scy","Sfr","ShortDownArrow","ShortLeftArrow","ShortRightArrow","ShortUpArrow","Sigma","SmallCircle","Sopf","Sqrt","Square","SquareIntersection","SquareSubset","SquareSubsetEqual","SquareSuperset","SquareSupersetEqual","SquareUnion","Sscr","Star","Sub","Subset","SubsetEqual","Succeeds","SucceedsEqual","SucceedsSlantEqual","SucceedsTilde","SuchThat","Sum","Sup","Superset","SupersetEqual","Supset","THORN","TRADE","TSHcy","TScy","Tab","Tau","Tcaron","Tcedil","Tcy","Tfr","Therefore","Theta","ThickSpace","ThinSpace","Tilde","TildeEqual","TildeFullEqual","TildeTilde","Topf","TripleDot","Tscr","Tstrok","Uacute","Uarr","Uarrocir","Ubrcy","Ubreve","Ucirc","Ucy","Udblac","Ufr","Ugrave","Umacr","UnderBar","UnderBrace","UnderBracket","UnderParenthesis","Union","UnionPlus","Uogon","Uopf","UpArrow","UpArrowBar","UpArrowDownArrow","UpDownArrow","UpEquilibrium","UpTee","UpTeeArrow","Uparrow","Updownarrow","UpperLeftArrow","UpperRightArrow","Upsi","Upsilon","Uring","Uscr","Utilde","Uuml","VDash","Vbar","Vcy","Vdash","Vdashl","Vee","Verbar","Vert","VerticalBar","VerticalLine","VerticalSeparator","VerticalTilde","VeryThinSpace","Vfr","Vopf","Vscr","Vvdash","Wcirc","Wedge","Wfr","Wopf","Wscr","Xfr","Xi","Xopf","Xscr","YAcy","YIcy","YUcy","Yacute","Ycirc","Ycy","Yfr","Yopf","Yscr","Yuml","ZHcy","Zacute","Zcaron","Zcy","Zdot","ZeroWidthSpace","Zeta","Zfr","Zopf","Zscr","aacute","abreve","ac","acE","acd","acirc","acute","acy","aelig","af","afr","agrave","alefsym","aleph","alpha","amacr","amalg","amp","andand","andd","andslope","andv","ang","ange","angle","angmsd","angmsdaa","angmsdab","angmsdac","angmsdad","angmsdae","angmsdaf","angmsdag","angmsdah","angrt","angrtvb","angrtvbd","angsph","angst","angzarr","aogon","aopf","ap","apE","apacir","ape","apid","apos","approx","approxeq","aring","ascr","ast","asymp","asympeq","atilde","auml","awconint","awint","bNot","backcong","backepsilon","backprime","backsim","backsimeq","barvee","barwed","barwedge","bbrk","bbrktbrk","bcong","bcy","bdquo","becaus","because","bemptyv","bepsi","bernou","beta","beth","between","bfr","bigcap","bigcirc","bigcup","bigodot","bigoplus","bigotimes","bigsqcup","bigstar","bigtriangledown","bigtriangleup","biguplus","bigvee","bigwedge","bkarow","blacklozenge","blacksquare","blacktriangle","blacktriangledown","blacktriangleleft","blacktriangleright","blank","blk12","blk14","blk34","block","bne","bnequiv","bnot","bopf","bot","bottom","bowtie","boxDL","boxDR","boxDl","boxDr","boxH","boxHD","boxHU","boxHd","boxHu","boxUL","boxUR","boxUl","boxUr","boxV","boxVH","boxVL","boxVR","boxVh","boxVl","boxVr","boxbox","boxdL","boxdR","boxdl","boxdr","boxh","boxhD","boxhU","boxhd","boxhu","boxminus","boxplus","boxtimes","boxuL","boxuR","boxul","boxur","boxv","boxvH","boxvL","boxvR","boxvh","boxvl","boxvr","bprime","breve","brvbar","bscr","bsemi","bsim","bsime","bsol","bsolb","bsolhsub","bull","bullet","bump","bumpE","bumpe","bumpeq","cacute","cap","capand","capbrcup","capcap","capcup","capdot","caps","caret","caron","ccaps","ccaron","ccedil","ccirc","ccups","ccupssm","cdot","cedil","cemptyv","cent","centerdot","cfr","chcy","checkmark","chi","cir","cirE","circ","circeq","circlearrowleft","circlearrowright","circledR","circledS","circledast","circledcirc","circleddash","cire","cirfnint","cirmid","cirscir","clubs","clubsuit","colon","colone","coloneq","comma","commat","comp","compfn","complement","complexes","cong","congdot","conint","copf","coprod","copy","copysr","crarr","cross","cscr","csub","csube","csup","csupe","ctdot","cudarrl","cudarrr","cuepr","cuesc","cularr","cularrp","cup","cupbrcap","cupcap","cupcup","cupdot","cupor","cups","curarr","curarrm","curlyeqprec","curlyeqsucc","curlyvee","curlywedge","curren","curvearrowleft","curvearrowright","cuvee","cuwed","cwconint","cwint","cylcty","dArr","dHar","dagger","daleth","darr","dash","dashv","dbkarow","dblac","dcaron","dcy","dd","ddagger","ddarr","ddotseq","deg","delta","demptyv","dfisht","dfr","dharl","dharr","diam","diamond","diamondsuit","diams","die","digamma","disin","divide","divideontimes","divonx","djcy","dlcorn","dlcrop","dollar","dopf","dot","doteq","doteqdot","dotminus","dotplus","dotsquare","doublebarwedge","downarrow","downdownarrows","downharpoonleft","downharpoonright","drbkarow","drcorn","drcrop","dscr","dscy","dsol","dstrok","dtdot","dtri","dtrif","duarr","duhar","dwangle","dzcy","dzigrarr","eDDot","eDot","eacute","easter","ecaron","ecir","ecirc","ecolon","ecy","edot","ee","efDot","efr","eg","egrave","egs","egsdot","elinters","ell","els","elsdot","emacr","empty","emptyset","emptyv","emsp13","emsp14","emsp","eng","ensp","eogon","eopf","epar","eparsl","eplus","epsi","epsilon","epsiv","eqcirc","eqcolon","eqsim","eqslantgtr","eqslantless","equals","equest","equiv","equivDD","eqvparsl","erDot","erarr","escr","esdot","esim","eta","eth","euml","euro","excl","exist","expectation","exponentiale","fallingdotseq","fcy","female","ffilig","fflig","ffllig","ffr","filig","fjlig","flat","fllig","fltns","fnof","fopf","forall","fork","forkv","fpartint","frac12","frac13","frac14","frac15","frac16","frac18","frac23","frac25","frac34","frac35","frac38","frac45","frac56","frac58","frac78","frasl","frown","fscr","gE","gEl","gacute","gamma","gammad","gap","gbreve","gcirc","gcy","gdot","ge","gel","geq","geqq","geqslant","ges","gescc","gesdot","gesdoto","gesdotol","gesl","gesles","gfr","gg","ggg","gimel","gjcy","gl","glE","gla","glj","gnE","gnap","gnapprox","gne","gneq","gneqq","gnsim","gopf","grave","gscr","gsim","gsime","gsiml","gt","gtcc","gtcir","gtdot","gtlPar","gtquest","gtrapprox","gtrarr","gtrdot","gtreqless","gtreqqless","gtrless","gtrsim","gvertneqq","gvnE","hArr","hairsp","half","hamilt","hardcy","harr","harrcir","harrw","hbar","hcirc","hearts","heartsuit","hellip","hercon","hfr","hksearow","hkswarow","hoarr","homtht","hookleftarrow","hookrightarrow","hopf","horbar","hscr","hslash","hstrok","hybull","hyphen","iacute","ic","icirc","icy","iecy","iexcl","iff","ifr","igrave","ii","iiiint","iiint","iinfin","iiota","ijlig","imacr","imagline","imagpart","imath","imof","imped","in","incare","infin","infintie","inodot","int","intcal","integers","intercal","intlarhk","intprod","iocy","iogon","iopf","iota","iprod","iquest","iscr","isin","isinE","isindot","isins","isinsv","isinv","it","itilde","iukcy","iuml","jcirc","jcy","jfr","jmath","jopf","jscr","jsercy","jukcy","kappa","kappav","kcedil","kcy","kfr","kgreen","khcy","kjcy","kopf","kscr","lAarr","lArr","lAtail","lBarr","lE","lEg","lHar","lacute","laemptyv","lagran","lambda","langd","langle","lap","laquo","larr","larrb","larrbfs","larrfs","larrhk","larrlp","larrpl","larrsim","larrtl","lat","latail","late","lates","lbarr","lbbrk","lbrace","lbrack","lbrke","lbrksld","lbrkslu","lcaron","lcedil","lceil","lcub","lcy","ldca","ldquo","ldquor","ldrdhar","ldrushar","ldsh","le","leftarrow","leftarrowtail","leftharpoondown","leftharpoonup","leftleftarrows","leftrightarrow","leftrightarrows","leftrightharpoons","leftrightsquigarrow","leftthreetimes","leg","leq","leqq","leqslant","les","lescc","lesdot","lesdoto","lesdotor","lesg","lesges","lessapprox","lessdot","lesseqgtr","lesseqqgtr","lessgtr","lesssim","lfisht","lfloor","lfr","lg","lgE","lhard","lharu","lharul","lhblk","ljcy","ll","llarr","llcorner","llhard","lltri","lmidot","lmoust","lmoustache","lnE","lnap","lnapprox","lne","lneq","lneqq","lnsim","loang","loarr","lobrk","longleftarrow","longleftrightarrow","longmapsto","longrightarrow","looparrowleft","looparrowright","lopar","lopf","loplus","lotimes","lowast","lowbar","loz","lozenge","lozf","lpar","lparlt","lrarr","lrcorner","lrhar","lrhard","lrm","lrtri","lsaquo","lscr","lsh","lsim","lsime","lsimg","lsqb","lsquo","lsquor","lstrok","lt","ltcc","ltcir","ltdot","lthree","ltimes","ltlarr","ltquest","ltrPar","ltri","ltrie","ltrif","lurdshar","luruhar","lvertneqq","lvnE","mDDot","macr","male","malt","maltese","mapsto","mapstodown","mapstoleft","mapstoup","mcomma","mcy","mdash","measuredangle","mfr","mho","micro","mid","midast","midcir","middot","minus","minusb","minusd","minusdu","mlcp","mldr","mnplus","models","mopf","mp","mscr","mstpos","mu","multimap","mumap","nGg","nGt","nGtv","nLeftarrow","nLeftrightarrow","nLl","nLt","nLtv","nRightarrow","nVDash","nVdash","nabla","nacute","nang","nap","napE","napid","napos","napprox","natur","natural","naturals","nbsp","nbump","nbumpe","ncap","ncaron","ncedil","ncong","ncongdot","ncup","ncy","ndash","ne","neArr","nearhk","nearr","nearrow","nedot","nequiv","nesear","nesim","nexist","nexists","nfr","ngE","nge","ngeq","ngeqq","ngeqslant","nges","ngsim","ngt","ngtr","nhArr","nharr","nhpar","ni","nis","nisd","niv","njcy","nlArr","nlE","nlarr","nldr","nle","nleftarrow","nleftrightarrow","nleq","nleqq","nleqslant","nles","nless","nlsim","nlt","nltri","nltrie","nmid","nopf","not","notin","notinE","notindot","notinva","notinvb","notinvc","notni","notniva","notnivb","notnivc","npar","nparallel","nparsl","npart","npolint","npr","nprcue","npre","nprec","npreceq","nrArr","nrarr","nrarrc","nrarrw","nrightarrow","nrtri","nrtrie","nsc","nsccue","nsce","nscr","nshortmid","nshortparallel","nsim","nsime","nsimeq","nsmid","nspar","nsqsube","nsqsupe","nsub","nsubE","nsube","nsubset","nsubseteq","nsubseteqq","nsucc","nsucceq","nsup","nsupE","nsupe","nsupset","nsupseteq","nsupseteqq","ntgl","ntilde","ntlg","ntriangleleft","ntrianglelefteq","ntriangleright","ntrianglerighteq","nu","num","numero","numsp","nvDash","nvHarr","nvap","nvdash","nvge","nvgt","nvinfin","nvlArr","nvle","nvlt","nvltrie","nvrArr","nvrtrie","nvsim","nwArr","nwarhk","nwarr","nwarrow","nwnear","oS","oacute","oast","ocir","ocirc","ocy","odash","odblac","odiv","odot","odsold","oelig","ofcir","ofr","ogon","ograve","ogt","ohbar","ohm","oint","olarr","olcir","olcross","oline","olt","omacr","omega","omicron","omid","ominus","oopf","opar","operp","oplus","or","orarr","ord","order","orderof","ordf","ordm","origof","oror","orslope","orv","oscr","oslash","osol","otilde","otimes","otimesas","ouml","ovbar","par","para","parallel","parsim","parsl","part","pcy","percnt","period","permil","perp","pertenk","pfr","phi","phiv","phmmat","phone","pi","pitchfork","piv","planck","planckh","plankv","plus","plusacir","plusb","pluscir","plusdo","plusdu","pluse","plusmn","plussim","plustwo","pm","pointint","popf","pound","pr","prE","prap","prcue","pre","prec","precapprox","preccurlyeq","preceq","precnapprox","precneqq","precnsim","precsim","prime","primes","prnE","prnap","prnsim","prod","profalar","profline","profsurf","prop","propto","prsim","prurel","pscr","psi","puncsp","qfr","qint","qopf","qprime","qscr","quaternions","quatint","quest","questeq","quot","rAarr","rArr","rAtail","rBarr","rHar","race","racute","radic","raemptyv","rang","rangd","range","rangle","raquo","rarr","rarrap","rarrb","rarrbfs","rarrc","rarrfs","rarrhk","rarrlp","rarrpl","rarrsim","rarrtl","rarrw","ratail","ratio","rationals","rbarr","rbbrk","rbrace","rbrack","rbrke","rbrksld","rbrkslu","rcaron","rcedil","rceil","rcub","rcy","rdca","rdldhar","rdquo","rdquor","rdsh","real","realine","realpart","reals","rect","reg","rfisht","rfloor","rfr","rhard","rharu","rharul","rho","rhov","rightarrow","rightarrowtail","rightharpoondown","rightharpoonup","rightleftarrows","rightleftharpoons","rightrightarrows","rightsquigarrow","rightthreetimes","ring","risingdotseq","rlarr","rlhar","rlm","rmoust","rmoustache","rnmid","roang","roarr","robrk","ropar","ropf","roplus","rotimes","rpar","rpargt","rppolint","rrarr","rsaquo","rscr","rsh","rsqb","rsquo","rsquor","rthree","rtimes","rtri","rtrie","rtrif","rtriltri","ruluhar","rx","sacute","sbquo","sc","scE","scap","scaron","sccue","sce","scedil","scirc","scnE","scnap","scnsim","scpolint","scsim","scy","sdot","sdotb","sdote","seArr","searhk","searr","searrow","sect","semi","seswar","setminus","setmn","sext","sfr","sfrown","sharp","shchcy","shcy","shortmid","shortparallel","shy","sigma","sigmaf","sigmav","sim","simdot","sime","simeq","simg","simgE","siml","simlE","simne","simplus","simrarr","slarr","smallsetminus","smashp","smeparsl","smid","smile","smt","smte","smtes","softcy","sol","solb","solbar","sopf","spades","spadesuit","spar","sqcap","sqcaps","sqcup","sqcups","sqsub","sqsube","sqsubset","sqsubseteq","sqsup","sqsupe","sqsupset","sqsupseteq","squ","square","squarf","squf","srarr","sscr","ssetmn","ssmile","sstarf","star","starf","straightepsilon","straightphi","strns","sub","subE","subdot","sube","subedot","submult","subnE","subne","subplus","subrarr","subset","subseteq","subseteqq","subsetneq","subsetneqq","subsim","subsub","subsup","succ","succapprox","succcurlyeq","succeq","succnapprox","succneqq","succnsim","succsim","sum","sung","sup1","sup2","sup3","sup","supE","supdot","supdsub","supe","supedot","suphsol","suphsub","suplarr","supmult","supnE","supne","supplus","supset","supseteq","supseteqq","supsetneq","supsetneqq","supsim","supsub","supsup","swArr","swarhk","swarr","swarrow","swnwar","szlig","target","tau","tbrk","tcaron","tcedil","tcy","tdot","telrec","tfr","there4","therefore","theta","thetasym","thetav","thickapprox","thicksim","thinsp","thkap","thksim","thorn","tilde","times","timesb","timesbar","timesd","tint","toea","top","topbot","topcir","topf","topfork","tosa","tprime","trade","triangle","triangledown","triangleleft","trianglelefteq","triangleq","triangleright","trianglerighteq","tridot","trie","triminus","triplus","trisb","tritime","trpezium","tscr","tscy","tshcy","tstrok","twixt","twoheadleftarrow","twoheadrightarrow","uArr","uHar","uacute","uarr","ubrcy","ubreve","ucirc","ucy","udarr","udblac","udhar","ufisht","ufr","ugrave","uharl","uharr","uhblk","ulcorn","ulcorner","ulcrop","ultri","umacr","uml","uogon","uopf","uparrow","updownarrow","upharpoonleft","upharpoonright","uplus","upsi","upsih","upsilon","upuparrows","urcorn","urcorner","urcrop","uring","urtri","uscr","utdot","utilde","utri","utrif","uuarr","uuml","uwangle","vArr","vBar","vBarv","vDash","vangrt","varepsilon","varkappa","varnothing","varphi","varpi","varpropto","varr","varrho","varsigma","varsubsetneq","varsubsetneqq","varsupsetneq","varsupsetneqq","vartheta","vartriangleleft","vartriangleright","vcy","vdash","vee","veebar","veeeq","vellip","verbar","vert","vfr","vltri","vnsub","vnsup","vopf","vprop","vrtri","vscr","vsubnE","vsubne","vsupnE","vsupne","vzigzag","wcirc","wedbar","wedge","wedgeq","weierp","wfr","wopf","wp","wr","wreath","wscr","xcap","xcirc","xcup","xdtri","xfr","xhArr","xharr","xi","xlArr","xlarr","xmap","xnis","xodot","xopf","xoplus","xotime","xrArr","xrarr","xscr","xsqcup","xuplus","xutri","xvee","xwedge","yacute","yacy","ycirc","ycy","yen","yfr","yicy","yopf","yscr","yucy","yuml","zacute","zcaron","zcy","zdot","zeetrf","zeta","zfr","zhcy","zigrarr","zopf","zscr","zwj","zwnj","$1c411422dc205d23$export$c23e4921f8d87e7c","$1c411422dc205d23$var$nonLazyContinuation","closeStart","beforeSequenceClose","sequenceClose","sizeOpen","sequenceCloseAfter","initialPrefix","beforeSequenceOpen","sequenceOpen","infoBefore","atNonLazyBreak","metaBefore","contentBefore","beforeContentChunk","contentChunk","$ab4d46130bbbc8bd$export$47910b7ab28d1853","afterPrefix","$ab4d46130bbbc8bd$var$furtherStart","furtherStart","$16c806108f267846$export$d24f93e715f9df88","tailExitIndex","headEnterIndex","$d7e2fdfd86ff4dd4$export$69f215ed977cdb73","$flE5d","$i8rQF","$kXx8i","$5Jc1R","factoryLabel","labelAfter","markerAfter","factoryWhitespace","destinationBefore","factoryDestination","destinationAfter","$d7e2fdfd86ff4dd4$var$titleBefore","afterWhitespace","beforeMarker","factoryTitle","titleAfter","titleAfterOptionalWhitespace","$b2c7e8deac525d60$export$2e6c8deaa96af245","literalType","literalMarkerType","rawType","stringType","balance","enclosedBefore","raw","enclosed","enclosedEscape","rawEscape","$d33e937ae1ea4ed4$export$7b768614d8ba97a7","markerType","labelInside","labelEscape","$f42304cc65ff93d0$export$f970569cc855e483","begin","escape","$42ba7c725dbf102d$export$1f27bd1aa33ce173","$710781c0cdec9f6d$export$806d55e226cfcd08","toUpperCase","$0e19123aa1bdeff9$export$86c573ab9e06f418","$a46ae5f97ae33c5d$export$3871e9deb360695c","sequenceFurther","$7d9d35fac2b6ebcc$export$476ac411cb7d0d8f","$9Lzi7","closingTag","markerB","declarationOpen","tagCloseStart","continuationDeclarationInside","tagName","commentOpenInside","cdataOpenInside","slash","htmlRawNames","htmlBlockNames","basicSelfClosing","completeClosingTagAfter","completeEnd","completeAttributeNameBefore","completeAttributeName","completeAttributeNameAfter","completeAttributeValueBefore","completeAttributeValueQuoted","completeAttributeValueUnquoted","completeAttributeValueQuotedAfter","completeAfter","continuationCommentInside","continuationRawTagOpen","continuationClose","continuationCdataInside","$7d9d35fac2b6ebcc$var$blankLineBefore","continuationAfter","continuationStart","$7d9d35fac2b6ebcc$var$nonLazyContinuationStart","continuationStartNonLazy","continuationBefore","continuationRawEndTag","$71c3e0f388301604$export$7364aee1c59d1879","$71c3e0f388301604$export$948e66da505d080","$b5a186cf1fccfac7$export$398af27f284914fe","instruction","tagOpen","declaration","commentEnd","comment","commentClose","lineEndingBefore","cdata","cdataClose","cdataEnd","instructionClose","tagClose","tagCloseBetween","tagOpenBetween","tagOpenAttributeName","tagOpenAttributeNameAfter","tagOpenAttributeValueBefore","tagOpenAttributeValueQuoted","tagOpenAttributeValueUnquoted","tagOpenAttributeValueQuotedAfter","lineEndingAfter","lineEndingAfterPrefix","$1dc73455d0df3be9$export$470a5dafbbf62654","labelStart","_balanced","_inactive","labelEndNok","$1dc73455d0df3be9$var$resourceConstruct","labelEndOk","$1dc73455d0df3be9$var$referenceFullConstruct","referenceNotFull","$1dc73455d0df3be9$var$referenceCollapsedConstruct","media","resourceBefore","resourceOpen","resourceEnd","resourceDestinationAfter","resourceDestinationMissing","resourceBetween","resourceTitleAfter","referenceFullAfter","referenceFullMissing","referenceCollapsedOpen","$f5591a0a77ea393a$export$3d754936e25aa5f5","$d496929de41504d8$export$5c0cee0701a3b584","$6e24888a23d546e6$export$8e62e0ad51c97b2","$57144dde063f82cb$export$8837f4fc672e936d","initialSize","kind","onBlank","$57144dde063f82cb$var$listItemPrefixWhitespaceConstruct","endOfPrefix","otherPrefix","initialBlankLine","furtherBlankLines","notInCurrentItem","$57144dde063f82cb$var$indentConstruct","$aa27701f1509407c$export$ba7b13e047416c03","sequence","$8eba9e38644cd781$export$e104e2de391dfde9","$cdd1ff6afd8515e5$export$fc37fe19dfda43ee","$cdd1ff6afd8515e5$var$search","atCarriageReturn","match","endPosition","lastIndex","exec","Math","ceil","$4e6fd59d488c6494$export$bd0e6e1378a871d7","$23c7742930bb7617$export$15a69557afac2c20","base","$0fb53ec7385928f9$export$a0fb664af7d0cc44","$0fb53ec7385928f9$var$characterEscapeOrReference","$0fb53ec7385928f9$var$decode","$0","$1","$2","hex","$b094d2c4d032c594$export$c304dd45fe166145","$b094d2c4d032c594$var$position","$b094d2c4d032c594$var$point","$b094d2c4d032c594$var$index","pos"],"version":3,"file":"flowDiagram-b222e15a.f77f79b1.js.map"}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy