All Downloads are FREE. Search and download functionalities are using the official Maven repository.

org.hibernate.tool.schema.ast.SqlScriptLexer Maven / Gradle / Ivy

// $ANTLR : "sql-script.g" -> "SqlScriptLexer.java"$

package org.hibernate.tool.schema.ast;

import java.util.Iterator;
import java.util.List;
import java.util.LinkedList;

import org.hibernate.hql.internal.ast.ErrorReporter;

import java.io.InputStream;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStreamException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStreamIOException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStreamRecognitionException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.CharStreamException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.CharStreamIOException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.ANTLRException;
import java.io.Reader;
import java.util.Hashtable;
import org.hibernate.relocated.org.hibernate.relocated.antlr.CharScanner;
import org.hibernate.relocated.org.hibernate.relocated.antlr.InputBuffer;
import org.hibernate.relocated.org.hibernate.relocated.antlr.ByteBuffer;
import org.hibernate.relocated.org.hibernate.relocated.antlr.CharBuffer;
import org.hibernate.relocated.org.hibernate.relocated.antlr.Token;
import org.hibernate.relocated.org.hibernate.relocated.antlr.CommonToken;
import org.hibernate.relocated.org.hibernate.relocated.antlr.RecognitionException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.NoViableAltForCharException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.MismatchedCharException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStream;
import org.hibernate.relocated.org.hibernate.relocated.antlr.ANTLRHashString;
import org.hibernate.relocated.org.hibernate.relocated.antlr.LexerSharedInputState;
import org.hibernate.relocated.org.hibernate.relocated.antlr.collections.impl.BitSet;
import org.hibernate.relocated.org.hibernate.relocated.antlr.SemanticException;

public class SqlScriptLexer extends org.hibernate.relocated.org.hibernate.relocated.antlr.CharScanner implements GeneratedSqlScriptParserTokenTypes, TokenStream
 {
public SqlScriptLexer(InputStream in) {
	this(new ByteBuffer(in));
}
public SqlScriptLexer(Reader in) {
	this(new CharBuffer(in));
}
public SqlScriptLexer(InputBuffer ib) {
	this(new LexerSharedInputState(ib));
}
public SqlScriptLexer(LexerSharedInputState state) {
	super(state);
	caseSensitiveLiterals = true;
	setCaseSensitive(true);
	literals = new Hashtable();
}

public Token nextToken() throws TokenStreamException {
	Token theRetToken=null;
tryAgain:
	for (;;) {
		Token _token = null;
		int _ttype = Token.INVALID_TYPE;
		resetText();
		try {   // for char stream error handling
			try {   // for lexical error handling
				switch ( LA(1)) {
				case ';':
				{
					mDELIMITER(true);
					theRetToken=_returnToken;
					break;
				}
				case '\n':  case '\r':
				{
					mNEWLINE(true);
					theRetToken=_returnToken;
					break;
				}
				default:
					if ((LA(1)=='\''||LA(1)=='`') && ((LA(2) >= '\u0000' && LA(2) <= '\ufffe'))) {
						mQUOTED_TEXT(true);
						theRetToken=_returnToken;
					}
					else if ((LA(1)=='-'||LA(1)=='/') && (LA(2)=='-'||LA(2)=='/')) {
						mLINE_COMMENT(true);
						theRetToken=_returnToken;
					}
					else if ((LA(1)=='/') && (LA(2)=='*')) {
						mBLOCK_COMMENT(true);
						theRetToken=_returnToken;
					}
					else if ((_tokenSet_0.member(LA(1))) && (true)) {
						mCHAR(true);
						theRetToken=_returnToken;
					}
				else {
					if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}
				else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
				}
				}
				if ( _returnToken==null ) continue tryAgain; // found SKIP token
				_ttype = _returnToken.getType();
				_ttype = testLiteralsTable(_ttype);
				_returnToken.setType(_ttype);
				return _returnToken;
			}
			catch (RecognitionException e) {
				throw new TokenStreamRecognitionException(e);
			}
		}
		catch (CharStreamException cse) {
			if ( cse instanceof CharStreamIOException ) {
				throw new TokenStreamIOException(((CharStreamIOException)cse).io);
			}
			else {
				throw new TokenStreamException(cse.getMessage());
			}
		}
	}
}

	public final void mDELIMITER(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = DELIMITER;
		int _saveIndex;
		
		match(';');
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	public final void mQUOTED_TEXT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = QUOTED_TEXT;
		int _saveIndex;
		
		switch ( LA(1)) {
		case '`':
		{
			match('`');
			{
			_loop812:
			do {
				if ((_tokenSet_1.member(LA(1)))) {
					{
					match(_tokenSet_1);
					}
				}
				else {
					break _loop812;
				}
				
			} while (true);
			}
			match('`');
			break;
		}
		case '\'':
		{
			match('\'');
			{
			_loop817:
			do {
				boolean synPredMatched815 = false;
				if (((LA(1)=='\'') && (LA(2)=='\''))) {
					int _m815 = mark();
					synPredMatched815 = true;
					inputState.guessing++;
					try {
						{
						mESCqs(false);
						}
					}
					catch (RecognitionException pe) {
						synPredMatched815 = false;
					}
					rewind(_m815);
inputState.guessing--;
				}
				if ( synPredMatched815 ) {
					mESCqs(false);
				}
				else if ((_tokenSet_2.member(LA(1)))) {
					{
					match(_tokenSet_2);
					}
				}
				else {
					break _loop817;
				}
				
			} while (true);
			}
			match('\'');
			break;
		}
		default:
		{
			throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
		}
		}
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	protected final void mESCqs(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = ESCqs;
		int _saveIndex;
		
		match('\'');
		match('\'');
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	public final void mCHAR(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = CHAR;
		int _saveIndex;
		
		boolean synPredMatched821 = false;
		if (((LA(1)=='\t'||LA(1)==' ') && (true))) {
			int _m821 = mark();
			synPredMatched821 = true;
			inputState.guessing++;
			try {
				{
				switch ( LA(1)) {
				case ' ':
				{
					match(' ');
					break;
				}
				case '\t':
				{
					match('\t');
					break;
				}
				default:
				{
					throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
				}
				}
				}
			}
			catch (RecognitionException pe) {
				synPredMatched821 = false;
			}
			rewind(_m821);
inputState.guessing--;
		}
		if ( synPredMatched821 ) {
			{
			switch ( LA(1)) {
			case ' ':
			{
				match(' ');
				break;
			}
			case '\t':
			{
				match('\t');
				break;
			}
			default:
			{
				throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
			}
			}
			}
		}
		else if ((_tokenSet_0.member(LA(1))) && (true)) {
			{
			match(_tokenSet_0);
			}
		}
		else {
			throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
		}
		
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	public final void mNEWLINE(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = NEWLINE;
		int _saveIndex;
		
		{
		if ((LA(1)=='\r') && (LA(2)=='\n')) {
			match('\r');
			match('\n');
		}
		else if ((LA(1)=='\r') && (true)) {
			match('\r');
		}
		else if ((LA(1)=='\n')) {
			match('\n');
		}
		else {
			throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
		}
		
		}
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	public final void mLINE_COMMENT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = LINE_COMMENT;
		int _saveIndex;
		
		{
		switch ( LA(1)) {
		case '/':
		{
			match("//");
			break;
		}
		case '-':
		{
			match("--");
			break;
		}
		default:
		{
			throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
		}
		}
		}
		{
		_loop830:
		do {
			if ((_tokenSet_3.member(LA(1)))) {
				{
				match(_tokenSet_3);
				}
			}
			else {
				break _loop830;
			}
			
		} while (true);
		}
		if ( inputState.guessing==0 ) {
			
					// skip the entire match from the lexer stream
					_ttype =  Token.SKIP;
				
		}
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
/**
 * Note : this comes from the great Terence Parr (author of Antlr) -
 *
 * https://theantlrguy.atlassian.net/wiki/spaces/ANTLR3/pages/2687360/How+do+I+match+multi-line+comments
 */
	public final void mBLOCK_COMMENT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
		int _ttype; Token _token=null; int _begin=text.length();
		_ttype = BLOCK_COMMENT;
		int _saveIndex;
		
		match("/*");
		{
		_loop834:
		do {
			if (((LA(1)=='*') && ((LA(2) >= '\u0000' && LA(2) <= '\ufffe')))&&( LA(2)!='/' )) {
				match('*');
			}
			else if ((LA(1)=='\r') && (LA(2)=='\n')) {
				match('\r');
				match('\n');
				if ( inputState.guessing==0 ) {
					newline();
				}
			}
			else if ((LA(1)=='\r') && ((LA(2) >= '\u0000' && LA(2) <= '\ufffe'))) {
				match('\r');
				if ( inputState.guessing==0 ) {
					newline();
				}
			}
			else if ((LA(1)=='\n')) {
				match('\n');
				if ( inputState.guessing==0 ) {
					newline();
				}
			}
			else if ((_tokenSet_4.member(LA(1)))) {
				{
				match(_tokenSet_4);
				}
			}
			else {
				break _loop834;
			}
			
		} while (true);
		}
		match("*/");
		if ( inputState.guessing==0 ) {
			_ttype = Token.SKIP;
		}
		if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
			_token = makeToken(_ttype);
			_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
		}
		_returnToken = _token;
	}
	
	
	private static final long[] mk_tokenSet_0() {
		long[] data = new long[2048];
		data[0]=-576460752303432705L;
		for (int i = 1; i<=1022; i++) { data[i]=-1L; }
		data[1023]=9223372036854775807L;
		return data;
	}
	public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
	private static final long[] mk_tokenSet_1() {
		long[] data = new long[2048];
		data[0]=-1L;
		data[1]=-4294967297L;
		for (int i = 2; i<=1022; i++) { data[i]=-1L; }
		data[1023]=9223372036854775807L;
		return data;
	}
	public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
	private static final long[] mk_tokenSet_2() {
		long[] data = new long[2048];
		data[0]=-549755813889L;
		for (int i = 1; i<=1022; i++) { data[i]=-1L; }
		data[1023]=9223372036854775807L;
		return data;
	}
	public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
	private static final long[] mk_tokenSet_3() {
		long[] data = new long[2048];
		data[0]=-9217L;
		for (int i = 1; i<=1022; i++) { data[i]=-1L; }
		data[1023]=9223372036854775807L;
		return data;
	}
	public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
	private static final long[] mk_tokenSet_4() {
		long[] data = new long[2048];
		data[0]=-4398046520321L;
		for (int i = 1; i<=1022; i++) { data[i]=-1L; }
		data[1023]=9223372036854775807L;
		return data;
	}
	public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
	
	}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy