Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
ai.vespa.schemals.parser.indexinglanguage.IndexingParserLexer Maven / Gradle / Ivy
/* Generated by: CongoCC Parser Generator. IndexingParserLexer.java */
package ai.vespa.schemals.parser.indexinglanguage;
import ai.vespa.schemals.parser.indexinglanguage.Token.TokenType;
import static ai.vespa.schemals.parser.indexinglanguage.Token.TokenType.*;
import java.util.*;
public class IndexingParserLexer extends TokenSource {
public static EnumSet getRegularTokens() {
return EnumSet.copyOf(regularTokens);
}
private static MatcherHook MATCHER_HOOK;
// this cannot be initialize here, since hook must be set afterwards
public enum LexicalState {
DEFAULT
}
LexicalState lexicalState = LexicalState.values()[0];
EnumSet activeTokenTypes = null;
// Token types that are "regular" tokens that participate in parsing,
// i.e. declared as TOKEN
static final EnumSet regularTokens = EnumSet.of(EOF, NL, ADD, SUB, MUL, DIV, MOD, EQ, NE, LT, LE, GT, GE, CHOICE, PIPE, LCURLY, RCURLY, LPAREN, RPAREN, DOT, COMMA, COLON, SCOLON, ATTRIBUTE, BASE64_DECODE, BASE64_ENCODE, BINARIZE, BUSY_WAIT, CASE, CASE_DEFAULT, CLEAR_STATE, CREATE_IF_NON_EXISTENT, ECHO, ELSE, EMBED, EXACT, FLATTEN, FOR_EACH, GET_FIELD, GET_VAR, GUARD, HASH, HEX_DECODE, HEX_ENCODE, HOST_NAME, IF, INDEX, INPUT, JOIN, LOWER_CASE, MAX_LENGTH, MAX_OCCURRENCES, MAX_TOKEN_LENGTH, NGRAM, NORMALIZE, NOW, OPTIMIZE_PREDICATE, PACK_BITS, PASSTHROUGH, RANDOM, REMOVE_IF_ZERO, SELECT_INPUT, SET_LANGUAGE, SET_VAR, SLEEP, SPLIT, STEM, SUBSTRING, SUMMARY, SWITCH, THIS, TOKENIZE, TO_ARRAY, TO_BYTE, TO_DOUBLE, TO_FLOAT, TO_INT, TO_LONG, TO_POS, TO_EPOCH_SECOND, TO_STRING, TO_WSET, TO_BOOL, TRIM, ZCURVE, TRUE, FALSE, UNDERSCORE, INTEGER, LONG, DOUBLE, FLOAT, STRING, IDENTIFIER);
// Token types that do not participate in parsing
// i.e. declared as UNPARSED (or SPECIAL_TOKEN)
static final EnumSet unparsedTokens = EnumSet.of(COMMENT);
// Tokens that are skipped, i.e. SKIP
static final EnumSet skippedTokens = EnumSet.of(_TOKEN_1, _TOKEN_2, _TOKEN_3, _TOKEN_4);
// Tokens that correspond to a MORE, i.e. that are pending
// additional input
static final EnumSet moreTokens = EnumSet.noneOf(TokenType.class);
public IndexingParserLexer(CharSequence input) {
this("input", input);
}
/**
* @param inputSource just the name of the input source (typically the filename)
* that will be used in error messages and so on.
* @param input the input
*/
public IndexingParserLexer(String inputSource, CharSequence input) {
this(inputSource, input, LexicalState.DEFAULT, 1, 1);
}
/**
* @param inputSource just the name of the input source (typically the filename) that
* will be used in error messages and so on.
* @param input the input
* @param lexicalState The starting lexical state, may be null to indicate the default
* starting state
* @param line The line number at which we are starting for the purposes of location/error messages. In most
* normal usage, this is 1.
* @param column number at which we are starting for the purposes of location/error messages. In most normal
* usages this is 1.
*/
public IndexingParserLexer(String inputSource, CharSequence input, LexicalState lexState, int startingLine, int startingColumn) {
super(inputSource, input, startingLine, startingColumn, 1, true, false, false, "");
if (lexicalState != null) switchTo(lexState);
}
public Token getNextToken(Token tok) {
return getNextToken(tok, this.activeTokenTypes);
}
/**
* The public method for getting the next token, that is
* called by IndexingParser.
* It checks whether we have already cached
* the token after this one. If not, it finally goes
* to the NFA machinery
*/
public Token getNextToken(Token tok, EnumSet activeTokenTypes) {
if (tok == null) {
tok = tokenizeAt(0, null, activeTokenTypes);
cacheToken(tok);
return tok;
}
Token cachedToken = tok.nextCachedToken();
// If the cached next token is not currently active, we
// throw it away and go back to the IndexingParserLexer
if (cachedToken != null && activeTokenTypes != null && !activeTokenTypes.contains(cachedToken.getType())) {
reset(tok);
cachedToken = null;
}
if (cachedToken == null) {
Token token = tokenizeAt(tok.getEndOffset(), null, activeTokenTypes);
cacheToken(token);
return token;
}
return cachedToken;
}
static class MatchInfo {
TokenType matchedType;
int matchLength;
@Override
public int hashCode() {
return Objects.hash(matchLength, matchedType);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
MatchInfo other = (MatchInfo) obj;
return matchLength == other.matchLength && matchedType == other.matchedType;
}
}
@FunctionalInterface
private interface MatcherHook {
MatchInfo apply(LexicalState lexicalState, CharSequence input, int position, EnumSet activeTokenTypes, NfaFunction[] nfaFunctions, BitSet currentStates, BitSet nextStates, MatchInfo matchInfo);
}
/**
* Core tokenization method. Note that this can be called from a static context.
* Hence the extra parameters that need to be passed in.
*/
static MatchInfo getMatchInfo(CharSequence input, int position, EnumSet activeTokenTypes, NfaFunction[] nfaFunctions, BitSet currentStates, BitSet nextStates, MatchInfo matchInfo) {
if (matchInfo == null) {
matchInfo = new MatchInfo();
}
if (position >= input.length()) {
matchInfo.matchedType = EOF;
matchInfo.matchLength = 0;
return matchInfo;
}
int start = position;
int matchLength = 0;
TokenType matchedType = TokenType.INVALID;
EnumSet alreadyMatchedTypes = EnumSet.noneOf(TokenType.class);
if (currentStates == null) currentStates = new BitSet(574);
else currentStates.clear();
if (nextStates == null) nextStates = new BitSet(574);
else nextStates.clear();
// the core NFA loop
do {
// Holder for the new type (if any) matched on this iteration
if (position > start) {
// What was nextStates on the last iteration
// is now the currentStates!
BitSet temp = currentStates;
currentStates = nextStates;
nextStates = temp;
nextStates.clear();
} else {
currentStates.set(0);
}
if (position >= input.length()) {
break;
}
int curChar = Character.codePointAt(input, position++);
if (curChar > 0xFFFF) position++;
int nextActive = currentStates.nextSetBit(0);
while (nextActive != -1) {
TokenType returnedType = nfaFunctions[nextActive].apply(curChar, nextStates, activeTokenTypes, alreadyMatchedTypes);
if (returnedType != null && (position - start > matchLength || returnedType.ordinal() < matchedType.ordinal())) {
matchedType = returnedType;
matchLength = position - start;
alreadyMatchedTypes.add(returnedType);
}
nextActive = currentStates.nextSetBit(nextActive + 1);
}
if (position >= input.length()) break;
}
while (!nextStates.isEmpty());
matchInfo.matchedType = matchedType;
matchInfo.matchLength = matchLength;
return matchInfo;
}
/**
* @param position The position at which to tokenize.
* @param lexicalState The lexical state in which to tokenize. If this is null, it is the instance variable #lexicalState
* @param activeTokenTypes The active token types. If this is null, they are all active.
* @return the Token at position
*/
final Token tokenizeAt(int position, LexicalState lexicalState, EnumSet activeTokenTypes) {
if (lexicalState == null) lexicalState = this.lexicalState;
int tokenBeginOffset = position;
boolean inMore = false;
int invalidRegionStart = -1;
Token matchedToken = null;
TokenType matchedType = null;
// The core tokenization loop
MatchInfo matchInfo = new MatchInfo();
BitSet currentStates = new BitSet(574);
BitSet nextStates = new BitSet(574);
while (matchedToken == null) {
if (!inMore) tokenBeginOffset = position;
if (MATCHER_HOOK != null) {
matchInfo = MATCHER_HOOK.apply(lexicalState, this, position, activeTokenTypes, nfaFunctions, currentStates, nextStates, matchInfo);
if (matchInfo == null) {
matchInfo = getMatchInfo(this, position, activeTokenTypes, nfaFunctions, currentStates, nextStates, matchInfo);
}
} else {
matchInfo = getMatchInfo(this, position, activeTokenTypes, nfaFunctions, currentStates, nextStates, matchInfo);
}
matchedType = matchInfo.matchedType;
inMore = moreTokens.contains(matchedType);
position += matchInfo.matchLength;
if (matchedType == TokenType.INVALID) {
if (invalidRegionStart == -1) {
invalidRegionStart = tokenBeginOffset;
}
int cp = Character.codePointAt(this, position);
++position;
if (cp > 0xFFFF) ++position;
continue;
}
if (invalidRegionStart != -1) {
return new InvalidToken(this, invalidRegionStart, tokenBeginOffset);
}
if (skippedTokens.contains(matchedType)) {
skipTokens(tokenBeginOffset, position);
} else if (regularTokens.contains(matchedType) || unparsedTokens.contains(matchedType)) {
matchedToken = Token.newToken(matchedType, this, tokenBeginOffset, position);
matchedToken.setUnparsed(!regularTokens.contains(matchedType));
}
}
return matchedToken;
}
/**
* Switch to specified lexical state.
* @param lexState the lexical state to switch to
* @return whether we switched (i.e. we weren't already in the desired lexical state)
*/
public boolean switchTo(LexicalState lexState) {
if (this.lexicalState != lexState) {
this.lexicalState = lexState;
return true;
}
return false;
}
// Reset the token source input
// to just after the Token passed in.
void reset(Token t, LexicalState state) {
uncacheTokens(t);
if (state != null) {
switchTo(state);
}
}
void reset(Token t) {
reset(t, null);
}
// NFA related code follows.
// The functional interface that represents
// the acceptance method of an NFA state
@FunctionalInterface
interface NfaFunction {
TokenType apply(int ch, BitSet bs, EnumSet validTypes, EnumSet alreadyMatchedTypes);
}
private static NfaFunction[] nfaFunctions;
// Initialize the various NFA method tables
static {
DEFAULT.NFA_FUNCTIONS_init();
}
//The Nitty-gritty of the NFA code follows.
/**
* Holder class for NFA code related to DEFAULT lexical state
*/
private static class DEFAULT {
private static TokenType getNfaIndex0(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if (ch == '!') {
if (validTypes == null || validTypes.contains(NE)) {
nextStates.set(47);
}
} else if (ch == '"') {
if (validTypes == null || validTypes.contains(STRING)) {
nextStates.set(58);
}
} else if (ch == '\'') {
if (validTypes == null || validTypes.contains(STRING)) {
nextStates.set(38);
}
} else if (ch == '0') {
if (validTypes == null || validTypes.contains(LONG)) {
nextStates.set(97);
}
if (validTypes == null || validTypes.contains(INTEGER)) {
nextStates.set(226);
}
}
if (ch >= '0' && ch <= '9') {
if (validTypes == null || validTypes.contains(FLOAT)) {
nextStates.set(73);
}
if (validTypes == null || validTypes.contains(LONG)) {
nextStates.set(80);
}
} else if (ch == '<') {
if (validTypes == null || validTypes.contains(LE)) {
nextStates.set(76);
}
} else if (ch == '=') {
if (validTypes == null || validTypes.contains(EQ)) {
nextStates.set(55);
}
} else if (ch == '>') {
if (validTypes == null || validTypes.contains(GE)) {
nextStates.set(4);
}
} else if (ch == 'a') {
if (validTypes == null || validTypes.contains(ATTRIBUTE)) {
nextStates.set(348);
}
} else if (ch == 'b') {
if (validTypes == null || validTypes.contains(BASE64_ENCODE)) {
nextStates.set(237);
}
if (validTypes == null || validTypes.contains(BASE64_DECODE)) {
nextStates.set(287);
}
if (validTypes == null || validTypes.contains(BUSY_WAIT)) {
nextStates.set(355);
}
if (validTypes == null || validTypes.contains(BINARIZE)) {
nextStates.set(379);
}
} else if (ch == 'c') {
if (validTypes == null || validTypes.contains(CLEAR_STATE)) {
nextStates.set(227);
}
if (validTypes == null || validTypes.contains(CREATE_IF_NON_EXISTENT)) {
nextStates.set(249);
}
if (validTypes == null || validTypes.contains(CASE)) {
nextStates.set(444);
}
} else if (ch == 'd') {
if (validTypes == null || validTypes.contains(CASE_DEFAULT)) {
nextStates.set(197);
}
} else if (ch == 'e') {
if (validTypes == null || validTypes.contains(EXACT)) {
nextStates.set(109);
}
if (validTypes == null || validTypes.contains(ECHO)) {
nextStates.set(114);
}
if (validTypes == null || validTypes.contains(ELSE)) {
nextStates.set(116);
}
if (validTypes == null || validTypes.contains(EMBED)) {
nextStates.set(301);
}
} else if (ch == 'f') {
if (validTypes == null || validTypes.contains(FOR_EACH)) {
nextStates.set(99);
}
if (validTypes == null || validTypes.contains(FLATTEN)) {
nextStates.set(154);
}
if (validTypes == null || validTypes.contains(FALSE)) {
nextStates.set(335);
}
} else if (ch == 'g') {
if (validTypes == null || validTypes.contains(GET_FIELD)) {
nextStates.set(328);
}
if (validTypes == null || validTypes.contains(GET_VAR)) {
nextStates.set(385);
}
if (validTypes == null || validTypes.contains(GUARD)) {
nextStates.set(446);
}
} else if (ch == 'h') {
if (validTypes == null || validTypes.contains(HEX_DECODE)) {
nextStates.set(206);
}
if (validTypes == null || validTypes.contains(HOST_NAME)) {
nextStates.set(281);
}
if (validTypes == null || validTypes.contains(HEX_ENCODE)) {
nextStates.set(435);
}
if (validTypes == null || validTypes.contains(HASH)) {
nextStates.set(452);
}
} else if (ch == 'i') {
if (validTypes == null || validTypes.contains(IF)) {
nextStates.set(9);
}
if (validTypes == null || validTypes.contains(INPUT)) {
nextStates.set(189);
}
if (validTypes == null || validTypes.contains(INDEX)) {
nextStates.set(449);
}
} else if (ch == 'j') {
if (validTypes == null || validTypes.contains(JOIN)) {
nextStates.set(112);
}
} else if (ch == 'l') {
if (validTypes == null || validTypes.contains(LOWER_CASE)) {
nextStates.set(390);
}
} else if (ch == 'm') {
if (validTypes == null || validTypes.contains(MAX_LENGTH)) {
nextStates.set(178);
}
if (validTypes == null || validTypes.contains(MAX_OCCURRENCES)) {
nextStates.set(310);
}
if (validTypes == null || validTypes.contains(MAX_TOKEN_LENGTH)) {
nextStates.set(408);
}
} else if (ch == 'n') {
if (validTypes == null || validTypes.contains(NOW)) {
nextStates.set(126);
}
if (validTypes == null || validTypes.contains(NGRAM)) {
nextStates.set(186);
}
if (validTypes == null || validTypes.contains(NORMALIZE)) {
nextStates.set(269);
}
} else if (ch == 'o') {
if (validTypes == null || validTypes.contains(OPTIMIZE_PREDICATE)) {
nextStates.set(129);
}
} else if (ch == 'p') {
if (validTypes == null || validTypes.contains(PACK_BITS)) {
nextStates.set(338);
}
if (validTypes == null || validTypes.contains(PASSTHROUGH)) {
nextStates.set(397);
}
} else if (ch == 'r') {
if (validTypes == null || validTypes.contains(REMOVE_IF_ZERO)) {
nextStates.set(85);
}
if (validTypes == null || validTypes.contains(RANDOM)) {
nextStates.set(105);
}
} else if (ch == 's') {
if (validTypes == null || validTypes.contains(SPLIT)) {
nextStates.set(118);
}
if (validTypes == null || validTypes.contains(SLEEP)) {
nextStates.set(170);
}
if (validTypes == null || validTypes.contains(SET_VAR)) {
nextStates.set(192);
}
if (validTypes == null || validTypes.contains(SWITCH)) {
nextStates.set(297);
}
if (validTypes == null || validTypes.contains(SUMMARY)) {
nextStates.set(323);
}
if (validTypes == null || validTypes.contains(SET_LANGUAGE)) {
nextStates.set(362);
}
if (validTypes == null || validTypes.contains(SUBSTRING)) {
nextStates.set(372);
}
if (validTypes == null || validTypes.contains(STEM)) {
nextStates.set(406);
}
if (validTypes == null || validTypes.contains(SELECT_INPUT)) {
nextStates.set(460);
}
} else if (ch == 't') {
if (validTypes == null || validTypes.contains(TO_WSET)) {
nextStates.set(121);
}
if (validTypes == null || validTypes.contains(THIS)) {
nextStates.set(127);
}
if (validTypes == null || validTypes.contains(TO_BYTE)) {
nextStates.set(145);
}
if (validTypes == null || validTypes.contains(TO_POS)) {
nextStates.set(150);
}
if (validTypes == null || validTypes.contains(TO_STRING)) {
nextStates.set(159);
}
if (validTypes == null || validTypes.contains(TO_INT)) {
nextStates.set(166);
}
if (validTypes == null || validTypes.contains(TO_BOOL)) {
nextStates.set(173);
}
if (validTypes == null || validTypes.contains(TO_EPOCH_SECOND)) {
nextStates.set(213);
}
if (validTypes == null || validTypes.contains(TRIM)) {
nextStates.set(247);
}
if (validTypes == null || validTypes.contains(TO_LONG)) {
nextStates.set(276);
}
if (validTypes == null || validTypes.contains(TOKENIZE)) {
nextStates.set(304);
}
if (validTypes == null || validTypes.contains(TRUE)) {
nextStates.set(346);
}
if (validTypes == null || validTypes.contains(TO_ARRAY)) {
nextStates.set(422);
}
if (validTypes == null || validTypes.contains(TO_DOUBLE)) {
nextStates.set(428);
}
if (validTypes == null || validTypes.contains(TO_FLOAT)) {
nextStates.set(454);
}
} else if (ch == 'z') {
if (validTypes == null || validTypes.contains(ZCURVE)) {
nextStates.set(202);
}
} else if (ch == '|') {
if (validTypes == null || validTypes.contains(CHOICE)) {
nextStates.set(18);
}
}
if ((ch >= 'A' && ch <= 'Z') || ((ch == '_') || (ch >= 'a' && ch <= 'z'))) {
if (validTypes == null || validTypes.contains(IDENTIFIER)) {
nextStates.set(46);
type = IDENTIFIER;
}
} else if (ch >= '0' && ch <= '9') {
if (validTypes == null || validTypes.contains(DOUBLE)) {
nextStates.set(13);
}
if (validTypes == null || validTypes.contains(INTEGER)) {
nextStates.set(39);
type = INTEGER;
}
} else if (ch == '#') {
if (validTypes == null || validTypes.contains(COMMENT)) {
nextStates.set(77);
type = COMMENT;
}
}
if (ch == '_') {
if (validTypes == null || validTypes.contains(UNDERSCORE)) {
type = UNDERSCORE;
}
} else if (ch == ';') {
if (validTypes == null || validTypes.contains(SCOLON)) {
type = SCOLON;
}
} else if (ch == ':') {
if (validTypes == null || validTypes.contains(COLON)) {
type = COLON;
}
} else if (ch == ',') {
if (validTypes == null || validTypes.contains(COMMA)) {
type = COMMA;
}
} else if (ch == '.') {
if (validTypes == null || validTypes.contains(DOT)) {
type = DOT;
}
} else if (ch == ')') {
if (validTypes == null || validTypes.contains(RPAREN)) {
type = RPAREN;
}
} else if (ch == '(') {
if (validTypes == null || validTypes.contains(LPAREN)) {
type = LPAREN;
}
} else if (ch == '}') {
if (validTypes == null || validTypes.contains(RCURLY)) {
type = RCURLY;
}
} else if (ch == '{') {
if (validTypes == null || validTypes.contains(LCURLY)) {
type = LCURLY;
}
} else if (ch == '|') {
if (validTypes == null || validTypes.contains(PIPE)) {
type = PIPE;
}
} else if (ch == '>') {
if (validTypes == null || validTypes.contains(GT)) {
type = GT;
}
} else if (ch == '<') {
if (validTypes == null || validTypes.contains(LT)) {
type = LT;
}
} else if (ch == '%') {
if (validTypes == null || validTypes.contains(MOD)) {
type = MOD;
}
} else if (ch == '/') {
if (validTypes == null || validTypes.contains(DIV)) {
type = DIV;
}
} else if (ch == '*') {
if (validTypes == null || validTypes.contains(MUL)) {
type = MUL;
}
} else if (ch == '-') {
if (validTypes == null || validTypes.contains(SUB)) {
type = SUB;
}
} else if (ch == '+') {
if (validTypes == null || validTypes.contains(ADD)) {
type = ADD;
}
} else if (ch == '\n') {
if (validTypes == null || validTypes.contains(NL)) {
type = NL;
}
} else if (ch == '\f') {
if (validTypes == null || validTypes.contains(_TOKEN_4)) {
type = _TOKEN_4;
}
} else if (ch == '\r') {
if (validTypes == null || validTypes.contains(_TOKEN_3)) {
type = _TOKEN_3;
}
} else if (ch == '\t') {
if (validTypes == null || validTypes.contains(_TOKEN_2)) {
type = _TOKEN_2;
}
} else if (ch == ' ') {
if (validTypes == null || validTypes.contains(_TOKEN_1)) {
type = _TOKEN_1;
}
}
return type;
}
private static TokenType getNfaIndex1(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
return REMOVE_IF_ZERO;
}
return null;
}
private static TokenType getNfaIndex2(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch >= '0' && ch <= '9') || ((ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f'))) {
nextStates.set(2);
} else if ((ch == 'L') || (ch == 'l')) {
type = LONG;
}
return type;
}
private static TokenType getNfaIndex3(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return FOR_EACH;
}
return null;
}
private static TokenType getNfaIndex4(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '=') {
return GE;
}
return null;
}
private static TokenType getNfaIndex5(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'm') {
return RANDOM;
}
return null;
}
private static TokenType getNfaIndex6(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return EXACT;
}
return null;
}
private static TokenType getNfaIndex7(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'n') {
return JOIN;
}
return null;
}
private static TokenType getNfaIndex8(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
return ECHO;
}
return null;
}
private static TokenType getNfaIndex9(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'f') {
return IF;
}
return null;
}
private static TokenType getNfaIndex10(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return ELSE;
}
return null;
}
private static TokenType getNfaIndex11(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return SPLIT;
}
return null;
}
private static TokenType getNfaIndex12(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return TO_WSET;
}
return null;
}
private static TokenType getNfaIndex13(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch == 'E') || (ch == 'e')) {
nextStates.set(14);
} else if (ch == '.') {
nextStates.set(16);
type = DOUBLE;
} else if (ch >= '0' && ch <= '9') {
nextStates.set(13);
type = DOUBLE;
}
return type;
}
private static TokenType getNfaIndex14(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch == '+') || (ch == '-')) {
nextStates.set(15);
} else if (ch >= '0' && ch <= '9') {
nextStates.set(15);
type = DOUBLE;
}
return type;
}
private static TokenType getNfaIndex15(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch >= '0' && ch <= '9') {
nextStates.set(15);
return DOUBLE;
}
return null;
}
private static TokenType getNfaIndex16(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch == 'E') || (ch == 'e')) {
nextStates.set(14);
} else if (ch >= '0' && ch <= '9') {
nextStates.set(16);
type = DOUBLE;
}
return type;
}
private static TokenType getNfaIndex17(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'w') {
return NOW;
}
return null;
}
private static TokenType getNfaIndex18(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '|') {
return CHOICE;
}
return null;
}
private static TokenType getNfaIndex19(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 's') {
return THIS;
}
return null;
}
private static TokenType getNfaIndex20(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return OPTIMIZE_PREDICATE;
}
return null;
}
private static TokenType getNfaIndex21(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return TO_BYTE;
}
return null;
}
private static TokenType getNfaIndex22(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 's') {
return TO_POS;
}
return null;
}
private static TokenType getNfaIndex23(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'n') {
return FLATTEN;
}
return null;
}
private static TokenType getNfaIndex24(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'g') {
return TO_STRING;
}
return null;
}
private static TokenType getNfaIndex25(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return TO_INT;
}
return null;
}
private static TokenType getNfaIndex26(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'p') {
return SLEEP;
}
return null;
}
private static TokenType getNfaIndex27(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'l') {
return TO_BOOL;
}
return null;
}
private static TokenType getNfaIndex28(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return MAX_LENGTH;
}
return null;
}
private static TokenType getNfaIndex29(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'm') {
return NGRAM;
}
return null;
}
private static TokenType getNfaIndex30(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return INPUT;
}
return null;
}
private static TokenType getNfaIndex31(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'r') {
return SET_VAR;
}
return null;
}
private static TokenType getNfaIndex32(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return CASE_DEFAULT;
}
return null;
}
private static TokenType getNfaIndex33(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return ZCURVE;
}
return null;
}
private static TokenType getNfaIndex34(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return HEX_DECODE;
}
return null;
}
private static TokenType getNfaIndex35(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'd') {
return TO_EPOCH_SECOND;
}
return null;
}
private static TokenType getNfaIndex36(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if ((ch >= '0' && ch <= '9') || ((ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f'))) {
nextStates.set(36);
return INTEGER;
}
return null;
}
private static TokenType getNfaIndex37(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return CLEAR_STATE;
}
return null;
}
private static TokenType getNfaIndex38(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch >= 0x0 && ch <= '&') || (ch >= '(')) {
nextStates.set(38);
}
if (ch == '\\') {
nextStates.set(236);
} else if (ch == '\'') {
type = STRING;
}
return type;
}
private static TokenType getNfaIndex39(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch >= '0' && ch <= '9') {
nextStates.set(39);
return INTEGER;
}
return null;
}
private static TokenType getNfaIndex40(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return BASE64_ENCODE;
}
return null;
}
private static TokenType getNfaIndex41(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'm') {
return TRIM;
}
return null;
}
private static TokenType getNfaIndex42(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return CREATE_IF_NON_EXISTENT;
}
return null;
}
private static TokenType getNfaIndex43(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return NORMALIZE;
}
return null;
}
private static TokenType getNfaIndex44(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'g') {
return TO_LONG;
}
return null;
}
private static TokenType getNfaIndex45(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return HOST_NAME;
}
return null;
}
private static TokenType getNfaIndex46(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if ((ch == '-') || ((ch >= '0' && ch <= '9') || ((ch >= 'A' && ch <= 'Z') || ((ch == '_') || (ch >= 'a' && ch <= 'z'))))) {
nextStates.set(46);
return IDENTIFIER;
}
return null;
}
private static TokenType getNfaIndex47(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '=') {
return NE;
}
return null;
}
private static TokenType getNfaIndex48(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return BASE64_DECODE;
}
return null;
}
private static TokenType getNfaIndex49(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return SWITCH;
}
return null;
}
private static TokenType getNfaIndex50(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'd') {
return EMBED;
}
return null;
}
private static TokenType getNfaIndex51(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return TOKENIZE;
}
return null;
}
private static TokenType getNfaIndex52(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 's') {
return MAX_OCCURRENCES;
}
return null;
}
private static TokenType getNfaIndex53(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'y') {
return SUMMARY;
}
return null;
}
private static TokenType getNfaIndex54(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'd') {
return GET_FIELD;
}
return null;
}
private static TokenType getNfaIndex55(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '=') {
return EQ;
}
return null;
}
private static TokenType getNfaIndex56(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return FALSE;
}
return null;
}
private static TokenType getNfaIndex57(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 's') {
return PACK_BITS;
}
return null;
}
private static TokenType getNfaIndex58(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if ((ch >= 0x0 && ch <= '!') || (ch >= '#')) {
nextStates.set(58);
}
if (ch == '\\') {
nextStates.set(345);
} else if (ch == '"') {
type = STRING;
}
return type;
}
private static TokenType getNfaIndex59(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return TRUE;
}
return null;
}
private static TokenType getNfaIndex60(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return ATTRIBUTE;
}
return null;
}
private static TokenType getNfaIndex61(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return BUSY_WAIT;
}
return null;
}
private static TokenType getNfaIndex62(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return SET_LANGUAGE;
}
return null;
}
private static TokenType getNfaIndex63(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'g') {
return SUBSTRING;
}
return null;
}
private static TokenType getNfaIndex64(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return BINARIZE;
}
return null;
}
private static TokenType getNfaIndex65(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'r') {
return GET_VAR;
}
return null;
}
private static TokenType getNfaIndex66(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return LOWER_CASE;
}
return null;
}
private static TokenType getNfaIndex67(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return PASSTHROUGH;
}
return null;
}
private static TokenType getNfaIndex68(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'm') {
return STEM;
}
return null;
}
private static TokenType getNfaIndex69(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return MAX_TOKEN_LENGTH;
}
return null;
}
private static TokenType getNfaIndex70(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'y') {
return TO_ARRAY;
}
return null;
}
private static TokenType getNfaIndex71(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return TO_DOUBLE;
}
return null;
}
private static TokenType getNfaIndex72(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return HEX_ENCODE;
}
return null;
}
private static TokenType getNfaIndex73(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if (ch == '.') {
nextStates.set(75);
} else if (ch >= '0' && ch <= '9') {
nextStates.set(73);
} else if ((ch == 'E') || (ch == 'e')) {
nextStates.set(442);
} else if ((ch == 'F') || (ch == 'f')) {
type = FLOAT;
}
return type;
}
private static TokenType getNfaIndex74(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if (ch >= '0' && ch <= '9') {
nextStates.set(74);
} else if ((ch == 'F') || (ch == 'f')) {
type = FLOAT;
}
return type;
}
private static TokenType getNfaIndex75(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if (ch >= '0' && ch <= '9') {
nextStates.set(75);
} else if ((ch == 'E') || (ch == 'e')) {
nextStates.set(442);
} else if ((ch == 'F') || (ch == 'f')) {
type = FLOAT;
}
return type;
}
private static TokenType getNfaIndex76(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '=') {
return LE;
}
return null;
}
private static TokenType getNfaIndex77(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if ((ch >= 0x0 && ch <= '\t') || ((ch == 0xb || ch == '\f') || (ch >= 0xe))) {
nextStates.set(77);
return COMMENT;
}
return null;
}
private static TokenType getNfaIndex78(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
return CASE;
}
return null;
}
private static TokenType getNfaIndex79(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'd') {
return GUARD;
}
return null;
}
private static TokenType getNfaIndex80(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
TokenType type = null;
if (ch >= '0' && ch <= '9') {
nextStates.set(80);
} else if ((ch == 'L') || (ch == 'l')) {
type = LONG;
}
return type;
}
private static TokenType getNfaIndex81(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'x') {
return INDEX;
}
return null;
}
private static TokenType getNfaIndex82(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
return HASH;
}
return null;
}
private static TokenType getNfaIndex83(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return TO_FLOAT;
}
return null;
}
private static TokenType getNfaIndex84(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 't') {
return SELECT_INPUT;
}
return null;
}
private static TokenType getNfaIndex85(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
nextStates.set(86);
}
return null;
}
private static TokenType getNfaIndex86(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'm') {
nextStates.set(87);
}
return null;
}
private static TokenType getNfaIndex87(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
nextStates.set(88);
}
return null;
}
private static TokenType getNfaIndex88(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'v') {
nextStates.set(89);
}
return null;
}
private static TokenType getNfaIndex89(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
nextStates.set(90);
}
return null;
}
private static TokenType getNfaIndex90(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '_') {
nextStates.set(91);
}
return null;
}
private static TokenType getNfaIndex91(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'i') {
nextStates.set(92);
}
return null;
}
private static TokenType getNfaIndex92(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'f') {
nextStates.set(93);
}
return null;
}
private static TokenType getNfaIndex93(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '_') {
nextStates.set(94);
}
return null;
}
private static TokenType getNfaIndex94(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'z') {
nextStates.set(95);
}
return null;
}
private static TokenType getNfaIndex95(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
nextStates.set(96);
}
return null;
}
private static TokenType getNfaIndex96(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'r') {
nextStates.set(1);
}
return null;
}
private static TokenType getNfaIndex97(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if ((ch == 'X') || (ch == 'x')) {
nextStates.set(98);
}
return null;
}
private static TokenType getNfaIndex98(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if ((ch >= '0' && ch <= '9') || ((ch >= 'A' && ch <= 'F') || (ch >= 'a' && ch <= 'f'))) {
nextStates.set(2);
}
return null;
}
private static TokenType getNfaIndex99(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
nextStates.set(100);
}
return null;
}
private static TokenType getNfaIndex100(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'r') {
nextStates.set(101);
}
return null;
}
private static TokenType getNfaIndex101(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == '_') {
nextStates.set(102);
}
return null;
}
private static TokenType getNfaIndex102(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'e') {
nextStates.set(103);
}
return null;
}
private static TokenType getNfaIndex103(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'a') {
nextStates.set(104);
}
return null;
}
private static TokenType getNfaIndex104(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'c') {
nextStates.set(3);
}
return null;
}
private static TokenType getNfaIndex105(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'a') {
nextStates.set(106);
}
return null;
}
private static TokenType getNfaIndex106(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'n') {
nextStates.set(107);
}
return null;
}
private static TokenType getNfaIndex107(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'd') {
nextStates.set(108);
}
return null;
}
private static TokenType getNfaIndex108(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
nextStates.set(5);
}
return null;
}
private static TokenType getNfaIndex109(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'x') {
nextStates.set(110);
}
return null;
}
private static TokenType getNfaIndex110(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'a') {
nextStates.set(111);
}
return null;
}
private static TokenType getNfaIndex111(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'c') {
nextStates.set(6);
}
return null;
}
private static TokenType getNfaIndex112(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'o') {
nextStates.set(113);
}
return null;
}
private static TokenType getNfaIndex113(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'i') {
nextStates.set(7);
}
return null;
}
private static TokenType getNfaIndex114(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'c') {
nextStates.set(115);
}
return null;
}
private static TokenType getNfaIndex115(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'h') {
nextStates.set(8);
}
return null;
}
private static TokenType getNfaIndex116(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 'l') {
nextStates.set(117);
}
return null;
}
private static TokenType getNfaIndex117(int ch, BitSet nextStates, EnumSet validTypes, EnumSet alreadyMatchedTypes) {
if (ch == 's') {
nextStates.set(10);
}
return null;
}
private static TokenType getNfaIndex118(int ch, BitSet nextStates, EnumSet validTypes, EnumSet