org.hibernate.tool.schema.ast.GeneratedSqlScriptParser Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hibernate-core Show documentation
Show all versions of hibernate-core Show documentation
JPMS Module-Info's for a few of the Jakarta Libraries just until they add them in themselves
// $ANTLR : "sql-script.g" -> "GeneratedSqlScriptParser.java"$
package org.hibernate.tool.schema.ast;
import java.util.Iterator;
import java.util.List;
import java.util.LinkedList;
import org.hibernate.hql.internal.ast.ErrorReporter;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenBuffer;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStreamException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStreamIOException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.ANTLRException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.LLkParser;
import org.hibernate.relocated.org.hibernate.relocated.antlr.Token;
import org.hibernate.relocated.org.hibernate.relocated.antlr.TokenStream;
import org.hibernate.relocated.org.hibernate.relocated.antlr.RecognitionException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.NoViableAltException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.MismatchedTokenException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.SemanticException;
import org.hibernate.relocated.org.hibernate.relocated.antlr.ParserSharedInputState;
import org.hibernate.relocated.org.hibernate.relocated.antlr.collections.impl.BitSet;
/**
* Lexer and parser used to extract single statements from import SQL script. Supports instructions/comments and quoted
* strings spread over multiple lines. Each statement must end with semicolon.
*
* @author Lukasz Antoniak (lukasz dot antoniak at gmail dot com)
*/
public class GeneratedSqlScriptParser extends org.hibernate.relocated.org.hibernate.relocated.antlr.LLkParser implements GeneratedSqlScriptParserTokenTypes
{
protected void out(String stmt) {
// by default, nothing to do
}
protected void out(Token token) {
// by default, nothing to do
}
protected void statementStarted() {
// by default, nothing to do
}
protected void statementEnded() {
// by default, nothing to do
}
protected void skip() {
// by default, nothing to do
}
protected GeneratedSqlScriptParser(TokenBuffer tokenBuf, int k) {
super(tokenBuf,k);
tokenNames = _tokenNames;
}
public GeneratedSqlScriptParser(TokenBuffer tokenBuf) {
this(tokenBuf,3);
}
protected GeneratedSqlScriptParser(TokenStream lexer, int k) {
super(lexer,k);
tokenNames = _tokenNames;
}
public GeneratedSqlScriptParser(TokenStream lexer) {
this(lexer,3);
}
public GeneratedSqlScriptParser(ParserSharedInputState state) {
super(state,3);
tokenNames = _tokenNames;
}
public final void script() throws RecognitionException, TokenStreamException {
traceIn("script");
try { // debugging
try { // for error handling
{
_loop793:
do {
if ((LA(1)==NEWLINE)) {
newLineToSkip();
}
else {
break _loop793;
}
} while (true);
}
{
_loop795:
do {
if ((LA(1)==DELIMITER||LA(1)==QUOTED_TEXT||LA(1)==CHAR)) {
statement();
}
else {
break _loop795;
}
} while (true);
}
match(Token.EOF_TYPE);
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_0);
}
} finally { // debugging
traceOut("script");
}
}
public final void newLineToSkip() throws RecognitionException, TokenStreamException {
traceIn("newLineToSkip");
try { // debugging
try { // for error handling
match(NEWLINE);
skip();
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_1);
}
} finally { // debugging
traceOut("newLineToSkip");
}
}
public final void statement() throws RecognitionException, TokenStreamException {
traceIn("statement");
try { // debugging
try { // for error handling
statementStarted();
{
_loop800:
do {
if ((LA(1)==QUOTED_TEXT||LA(1)==CHAR)) {
statementPart();
{
_loop799:
do {
if ((LA(1)==NEWLINE)) {
afterStatementPartNewline();
}
else {
break _loop799;
}
} while (true);
}
}
else {
break _loop800;
}
} while (true);
}
match(DELIMITER);
{
_loop802:
do {
if ((LA(1)==NEWLINE)) {
newLineToSkip();
}
else {
break _loop802;
}
} while (true);
}
statementEnded();
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_2);
}
} finally { // debugging
traceOut("statement");
}
}
public final void statementPart() throws RecognitionException, TokenStreamException {
traceIn("statementPart");
try { // debugging
try { // for error handling
switch ( LA(1)) {
case QUOTED_TEXT:
{
quotedString();
break;
}
case CHAR:
{
nonSkippedChar();
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
} finally { // debugging
traceOut("statementPart");
}
}
public final void afterStatementPartNewline() throws RecognitionException, TokenStreamException {
traceIn("afterStatementPartNewline");
try { // debugging
Token n = null;
try { // for error handling
n = LT(1);
match(NEWLINE);
out( " " );
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
} finally { // debugging
traceOut("afterStatementPartNewline");
}
}
public final void quotedString() throws RecognitionException, TokenStreamException {
traceIn("quotedString");
try { // debugging
Token q = null;
try { // for error handling
q = LT(1);
match(QUOTED_TEXT);
out( q );
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
} finally { // debugging
traceOut("quotedString");
}
}
public final void nonSkippedChar() throws RecognitionException, TokenStreamException {
traceIn("nonSkippedChar");
try { // debugging
Token c = null;
try { // for error handling
c = LT(1);
match(CHAR);
out( c );
}
catch (RecognitionException ex) {
reportError(ex);
recover(ex,_tokenSet_3);
}
} finally { // debugging
traceOut("nonSkippedChar");
}
}
public static final String[] _tokenNames = {
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"DELIMITER",
"QUOTED_TEXT",
"NEWLINE",
"CHAR",
"ESCqs",
"LINE_COMMENT",
"BLOCK_COMMENT"
};
private static final long[] mk_tokenSet_0() {
long[] data = { 2L, 0L};
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
private static final long[] mk_tokenSet_1() {
long[] data = { 242L, 0L};
return data;
}
public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
private static final long[] mk_tokenSet_2() {
long[] data = { 178L, 0L};
return data;
}
public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
private static final long[] mk_tokenSet_3() {
long[] data = { 240L, 0L};
return data;
}
public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
}