Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.github.jknack.handlebars.internal.HbsLexer Maven / Gradle / Ivy
// Generated from com/github/jknack/handlebars/internal/HbsLexer.g4 by ANTLR 4.5.1
package com.github.jknack.handlebars.internal;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class HbsLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.5.1", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
ESC_VAR=1, TEXT=2, COMMENT=3, START_AMP=4, END_RAW_BLOCK=5, START_RAW=6,
START_T=7, UNLESS=8, START_PARTIAL_BLOCK=9, START_BLOCK=10, START_DELIM=11,
START_PARTIAL=12, END_BLOCK=13, START=14, SPACE=15, NL=16, END_DELIM=17,
WS_DELIM=18, DELIM=19, END_RAW=20, END_T=21, END=22, DECORATOR=23, AS=24,
PIPE=25, DOUBLE_STRING=26, SINGLE_STRING=27, EQ=28, INT=29, BOOLEAN=30,
ELSE=31, QID=32, PATH=33, LP=34, RP=35, WS=36;
public static final int SET_DELIMS = 1;
public static final int VAR = 2;
public static String[] modeNames = {
"DEFAULT_MODE", "SET_DELIMS", "VAR"
};
public static final String[] ruleNames = {
"ESC_VAR", "TEXT", "COMMENT", "START_AMP", "END_RAW_BLOCK", "START_RAW",
"START_T", "UNLESS", "START_PARTIAL_BLOCK", "START_BLOCK", "START_DELIM",
"START_PARTIAL", "END_BLOCK", "START", "SPACE", "NL", "END_DELIM", "WS_DELIM",
"DELIM", "END_RAW", "END_T", "END", "DECORATOR", "AS", "PIPE", "DOUBLE_STRING",
"SINGLE_STRING", "EQ", "INT", "BOOLEAN", "ELSE", "QID", "PATH", "PATH_SEGMENT",
"ID_SEPARATOR", "ID", "ID_START", "ID_SUFFIX", "ID_ESCAPE", "ID_PART",
"LP", "RP", "WS"
};
private static final String[] _LITERAL_NAMES = {
null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null, "'*'",
"'as'", "'|'", null, null, "'='", null, null, null, null, null, "'('",
"')'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "ESC_VAR", "TEXT", "COMMENT", "START_AMP", "END_RAW_BLOCK", "START_RAW",
"START_T", "UNLESS", "START_PARTIAL_BLOCK", "START_BLOCK", "START_DELIM",
"START_PARTIAL", "END_BLOCK", "START", "SPACE", "NL", "END_DELIM", "WS_DELIM",
"DELIM", "END_RAW", "END_T", "END", "DECORATOR", "AS", "PIPE", "DOUBLE_STRING",
"SINGLE_STRING", "EQ", "INT", "BOOLEAN", "ELSE", "QID", "PATH", "LP",
"RP", "WS"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
// Some default values
String start = "{{";
String end = "}}";
boolean whiteSpaceControl;
public HbsLexer(CharStream input, String start, String end) {
this(input);
this.start = start;
this.end = end;
}
private boolean isWhite(int ch) {
return ch == ' ' || ch == '\t' || ch == '\r' || ch == '\n';
}
private boolean consumeUntil(final String token) {
int offset = 0;
while(!isEOF(offset) && !(ahead("\\" + token, offset) || ahead(token, offset)) &&
!isWhite(_input.LA(offset + 1))) {
offset+=1;
}
if (offset == 0) {
return false;
}
// Since we found the text, increase the CharStream's index.
_input.seek(_input.index() + offset - 1);
getInterpreter().setCharPositionInLine(_tokenStartCharPositionInLine + offset - 1);
return true;
}
private boolean comment(final String start, final String end) {
String commentClose;
if (ahead(start + "!--")) {
commentClose = "--" + end;
} else if (ahead(start + "!")) {
commentClose = end;
} else {
return false;
}
int offset = 0;
while (!isEOF(offset)) {
if (ahead(commentClose, offset)) {
break;
}
offset += 1;
}
offset += commentClose.length();
// Since we found the text, increase the CharStream's index.
_input.seek(_input.index() + offset - 1);
getInterpreter().setCharPositionInLine(_tokenStartCharPositionInLine + offset - 1);
return true;
}
private boolean varEscape(final String start, final String end) {
if (ahead("\\" + start)) {
int offset = start.length();
while (!isEOF(offset)) {
if (ahead(end, offset)) {
break;
}
if (ahead(start, offset)) {
return false;
}
offset += 1;
}
offset += end.length();
// Since we found the text, increase the CharStream's index.
_input.seek(_input.index() + offset - 1);
getInterpreter().setCharPositionInLine(_tokenStartCharPositionInLine + offset - 1);
return true;
}
return false;
}
private boolean startToken(final String delim) {
boolean matches = tryToken(delim + "~");
if (matches) {
whiteSpaceControl = true;
}
return matches || tryToken(delim);
}
private boolean startToken(final String delim, String subtype) {
boolean matches = tryToken(delim + subtype);
if (!matches) {
matches = tryToken(delim + "~" + subtype);
if (matches) {
whiteSpaceControl = true;
}
}
return matches;
}
private boolean endToken(final String delim) {
return endToken(delim, "");
}
private boolean endToken(final String delim, String subtype) {
boolean matches = tryToken(subtype + delim);
if (!matches) {
matches = tryToken(subtype + "~" + delim);
if (matches) {
whiteSpaceControl = true;
}
}
return matches;
}
private boolean tryToken(final String text) {
if (ahead(text)) {
// Since we found the text, increase the CharStream's index.
_input.seek(_input.index() + text.length() - 1);
getInterpreter().setCharPositionInLine(_tokenStartCharPositionInLine + text.length() - 1);
return true;
}
return false;
}
private boolean isEOF(final int offset) {
return _input.LA(offset + 1) == EOF;
}
private boolean ahead(final String text) {
return ahead(text, 0);
}
private boolean ahead(final String text, int offset) {
// See if `text` is ahead in the CharStream.
for (int i = 0; i < text.length(); i++) {
int ch = _input.LA(i + offset + 1);
if (ch != text.charAt(i)) {
// Nope, we didn't find `text`.
return false;
}
}
return true;
}
public HbsLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "HbsLexer.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
@Override
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
case 0:
return ESC_VAR_sempred((RuleContext)_localctx, predIndex);
case 1:
return TEXT_sempred((RuleContext)_localctx, predIndex);
case 2:
return COMMENT_sempred((RuleContext)_localctx, predIndex);
case 3:
return START_AMP_sempred((RuleContext)_localctx, predIndex);
case 4:
return END_RAW_BLOCK_sempred((RuleContext)_localctx, predIndex);
case 5:
return START_RAW_sempred((RuleContext)_localctx, predIndex);
case 6:
return START_T_sempred((RuleContext)_localctx, predIndex);
case 7:
return UNLESS_sempred((RuleContext)_localctx, predIndex);
case 8:
return START_PARTIAL_BLOCK_sempred((RuleContext)_localctx, predIndex);
case 9:
return START_BLOCK_sempred((RuleContext)_localctx, predIndex);
case 10:
return START_DELIM_sempred((RuleContext)_localctx, predIndex);
case 11:
return START_PARTIAL_sempred((RuleContext)_localctx, predIndex);
case 12:
return END_BLOCK_sempred((RuleContext)_localctx, predIndex);
case 13:
return START_sempred((RuleContext)_localctx, predIndex);
case 16:
return END_DELIM_sempred((RuleContext)_localctx, predIndex);
case 19:
return END_RAW_sempred((RuleContext)_localctx, predIndex);
case 20:
return END_T_sempred((RuleContext)_localctx, predIndex);
case 21:
return END_sempred((RuleContext)_localctx, predIndex);
}
return true;
}
private boolean ESC_VAR_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 0:
return varEscape(start, end);
}
return true;
}
private boolean TEXT_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 1:
return consumeUntil(start);
}
return true;
}
private boolean COMMENT_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 2:
return comment(start, end);
}
return true;
}
private boolean START_AMP_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 3:
return startToken(start, "&");
}
return true;
}
private boolean END_RAW_BLOCK_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 4:
return startToken(start, "{{/");
}
return true;
}
private boolean START_RAW_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 5:
return startToken(start, "{{");
}
return true;
}
private boolean START_T_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 6:
return startToken(start, "{");
}
return true;
}
private boolean UNLESS_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 7:
return startToken(start, "^");
}
return true;
}
private boolean START_PARTIAL_BLOCK_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 8:
return startToken(start, "#>");
}
return true;
}
private boolean START_BLOCK_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 9:
return startToken(start, "#");
}
return true;
}
private boolean START_DELIM_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 10:
return startToken(start, "=");
}
return true;
}
private boolean START_PARTIAL_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 11:
return startToken(start, ">");
}
return true;
}
private boolean END_BLOCK_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 12:
return startToken(start, "/");
}
return true;
}
private boolean START_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 13:
return startToken(start);
}
return true;
}
private boolean END_DELIM_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 14:
return endToken("=" + end);
}
return true;
}
private boolean END_RAW_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 15:
return endToken(end, "}}");
}
return true;
}
private boolean END_T_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 16:
return endToken(end, "}");
}
return true;
}
private boolean END_sempred(RuleContext _localctx, int predIndex) {
switch (predIndex) {
case 17:
return endToken(end);
}
return true;
}
public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2&\u0152\b\1\b\1\b"+
"\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n"+
"\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21"+
"\4\22\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30"+
"\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37"+
"\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t"+
"*\4+\t+\4,\t,\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5"+
"\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3"+
"\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3"+
"\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+
"\3\17\3\20\6\20\u009d\n\20\r\20\16\20\u009e\3\21\5\21\u00a2\n\21\3\21"+
"\3\21\5\21\u00a6\n\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\24\3\24\3\25"+
"\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27"+
"\3\30\3\30\3\31\3\31\3\31\3\32\3\32\3\33\3\33\3\33\3\33\7\33\u00cb\n\33"+
"\f\33\16\33\u00ce\13\33\3\33\3\33\3\34\3\34\3\34\3\34\7\34\u00d6\n\34"+
"\f\34\16\34\u00d9\13\34\3\34\3\34\3\35\3\35\3\36\5\36\u00e0\n\36\3\36"+
"\6\36\u00e3\n\36\r\36\16\36\u00e4\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3"+
"\37\3\37\5\37\u00f0\n\37\3 \5 \u00f3\n \3 \3 \3 \3 \3 \3 \5 \u00fb\n "+
"\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!\3!"+
"\3!\3!\3!\3!\5!\u0118\n!\3\"\3\"\3\"\3\"\3\"\5\"\u011f\n\"\3#\6#\u0122"+
"\n#\r#\16#\u0123\3$\3$\3%\3%\7%\u012a\n%\f%\16%\u012d\13%\3%\3%\7%\u0131"+
"\n%\f%\16%\u0134\13%\5%\u0136\n%\3&\3&\3\'\3\'\3\'\3\'\3\'\5\'\u013f\n"+
"\'\3(\3(\6(\u0143\n(\r(\16(\u0144\3(\3(\3)\3)\3*\3*\3+\3+\3,\3,\3,\3,"+
"\5\u00cc\u00d7\u0144\2-\5\3\7\4\t\5\13\6\r\7\17\b\21\t\23\n\25\13\27\f"+
"\31\r\33\16\35\17\37\20!\21#\22%\23\'\24)\25+\26-\27/\30\61\31\63\32\65"+
"\33\67\349\35;\36=\37? A!C\"E#G\2I\2K\2M\2O\2Q\2S\2U$W%Y&\5\2\3\4\n\4"+
"\2\13\13\"\"\5\2\13\f\17\17\"\"\3\2\f\f\3\2\62;\b\2&&))/\3\2\2\2\u00e6\u00e7"+
"\7v\2\2\u00e7\u00e8\7t\2\2\u00e8\u00e9\7w\2\2\u00e9\u00f0\7g\2\2\u00ea"+
"\u00eb\7h\2\2\u00eb\u00ec\7c\2\2\u00ec\u00ed\7n\2\2\u00ed\u00ee\7u\2\2"+
"\u00ee\u00f0\7g\2\2\u00ef\u00e6\3\2\2\2\u00ef\u00ea\3\2\2\2\u00f0@\3\2"+
"\2\2\u00f1\u00f3\7\u0080\2\2\u00f2\u00f1\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3"+
"\u00f4\3\2\2\2\u00f4\u00f5\7g\2\2\u00f5\u00f6\7n\2\2\u00f6\u00f7\7u\2"+
"\2\u00f7\u00f8\7g\2\2\u00f8\u00fa\3\2\2\2\u00f9\u00fb\7\u0080\2\2\u00fa"+
"\u00f9\3\2\2\2\u00fa\u00fb\3\2\2\2\u00fbB\3\2\2\2\u00fc\u00fd\7\60\2\2"+
"\u00fd\u00fe\7\60\2\2\u00fe\u00ff\7\61\2\2\u00ff\u0100\3\2\2\2\u0100\u0118"+
"\5C!\2\u0101\u0102\7\60\2\2\u0102\u0118\7\60\2\2\u0103\u0104\7\60\2\2"+
"\u0104\u0105\7\61\2\2\u0105\u0106\3\2\2\2\u0106\u0118\5C!\2\u0107\u0118"+
"\7\60\2\2\u0108\u0109\7]\2\2\u0109\u010a\5K%\2\u010a\u010b\7_\2\2\u010b"+
"\u010c\5I$\2\u010c\u010d\5C!\2\u010d\u0118\3\2\2\2\u010e\u010f\7]\2\2"+
"\u010f\u0110\5K%\2\u0110\u0111\7_\2\2\u0111\u0118\3\2\2\2\u0112\u0113"+
"\5K%\2\u0113\u0114\5I$\2\u0114\u0115\5C!\2\u0115\u0118\3\2\2\2\u0116\u0118"+
"\5K%\2\u0117\u00fc\3\2\2\2\u0117\u0101\3\2\2\2\u0117\u0103\3\2\2\2\u0117"+
"\u0107\3\2\2\2\u0117\u0108\3\2\2\2\u0117\u010e\3\2\2\2\u0117\u0112\3\2"+
"\2\2\u0117\u0116\3\2\2\2\u0118D\3\2\2\2\u0119\u011a\7]\2\2\u011a\u011b"+
"\5G#\2\u011b\u011c\7_\2\2\u011c\u011f\3\2\2\2\u011d\u011f\5G#\2\u011e"+
"\u0119\3\2\2\2\u011e\u011d\3\2\2\2\u011fF\3\2\2\2\u0120\u0122\t\6\2\2"+
"\u0121\u0120\3\2\2\2\u0122\u0123\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124"+
"\3\2\2\2\u0124H\3\2\2\2\u0125\u0126\4/\61\2\u0126J\3\2\2\2\u0127\u012b"+
"\5M&\2\u0128\u012a\5O\'\2\u0129\u0128\3\2\2\2\u012a\u012d\3\2\2\2\u012b"+
"\u0129\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0136\3\2\2\2\u012d\u012b\3\2"+
"\2\2\u012e\u0132\5Q(\2\u012f\u0131\5O\'\2\u0130\u012f\3\2\2\2\u0131\u0134"+
"\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2\2\2\u0133\u0136\3\2\2\2\u0134"+
"\u0132\3\2\2\2\u0135\u0127\3\2\2\2\u0135\u012e\3\2\2\2\u0136L\3\2\2\2"+
"\u0137\u0138\t\7\2\2\u0138N\3\2\2\2\u0139\u013a\7\60\2\2\u013a\u013f\5"+
"Q(\2\u013b\u013f\5M&\2\u013c\u013f\5S)\2\u013d\u013f\7/\2\2\u013e\u0139"+
"\3\2\2\2\u013e\u013b\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013d\3\2\2\2\u013f"+
"P\3\2\2\2\u0140\u0142\7]\2\2\u0141\u0143\n\b\2\2\u0142\u0141\3\2\2\2\u0143"+
"\u0144\3\2\2\2\u0144\u0145\3\2\2\2\u0144\u0142\3\2\2\2\u0145\u0146\3\2"+
"\2\2\u0146\u0147\7_\2\2\u0147R\3\2\2\2\u0148\u0149\t\t\2\2\u0149T\3\2"+
"\2\2\u014a\u014b\7*\2\2\u014bV\3\2\2\2\u014c\u014d\7+\2\2\u014dX\3\2\2"+
"\2\u014e\u014f\t\3\2\2\u014f\u0150\3\2\2\2\u0150\u0151\b,\6\2\u0151Z\3"+
"\2\2\2\31\2\3\4\u009e\u00a1\u00a5\u00ca\u00cc\u00d5\u00d7\u00df\u00e4"+
"\u00ef\u00f2\u00fa\u0117\u011e\u0123\u012b\u0132\u0135\u013e\u0144\7\7"+
"\4\2\7\3\2\6\2\2\4\2\2\b\2\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}